1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
32 #include "coretypes.h"
36 #include "stor-layout.h"
45 #include "hard-reg-set.h"
49 #include "toplev.h" /* get_random_seed */
51 #include "filenames.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
59 #include "dominance.h"
61 #include "basic-block.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
68 #include "gimple-iterator.h"
70 #include "gimple-ssa.h"
72 #include "plugin-api.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
92 /* Tree code classes. */
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
97 const enum tree_code_class tree_code_type
[] = {
98 #include "all-tree.def"
102 #undef END_OF_BASE_TREE_CODES
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
111 const unsigned char tree_code_length
[] = {
112 #include "all-tree.def"
116 #undef END_OF_BASE_TREE_CODES
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
123 static const char *const tree_code_name
[] = {
124 #include "all-tree.def"
128 #undef END_OF_BASE_TREE_CODES
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
133 const char *const tree_code_class_strings
[] =
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
151 /* Statistics-gathering stuff. */
153 static int tree_code_counts
[MAX_TREE_CODES
];
154 int tree_node_counts
[(int) all_kinds
];
155 int tree_node_sizes
[(int) all_kinds
];
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names
[] = {
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid
;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid
= 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid
;
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
188 struct GTY(()) type_hash
{
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash
)))
204 htab_t type_hash_table
;
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node
;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
209 htab_t int_cst_hash_table
;
211 /* Hash table for optimization flags and target option flags. Use the same
212 hash table for both sets of options. Nodes for building the current
213 optimization and target option nodes. The assumption is most of the time
214 the options created will already be in the hash table, so we avoid
215 allocating and freeing up a node repeatably. */
216 static GTY (()) tree cl_optimization_node
;
217 static GTY (()) tree cl_target_option_node
;
218 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
219 htab_t cl_option_hash_table
;
221 /* General tree->tree mapping structure for use in hash tables. */
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
225 htab_t debug_expr_for_decl
;
227 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
228 htab_t value_expr_for_decl
;
230 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map
)))
231 htab_t debug_args_for_decl
;
233 static void set_type_quals (tree
, int);
234 static int type_hash_eq (const void *, const void *);
235 static hashval_t
type_hash_hash (const void *);
236 static hashval_t
int_cst_hash_hash (const void *);
237 static int int_cst_hash_eq (const void *, const void *);
238 static hashval_t
cl_option_hash_hash (const void *);
239 static int cl_option_hash_eq (const void *, const void *);
240 static void print_type_hash_statistics (void);
241 static void print_debug_expr_statistics (void);
242 static void print_value_expr_statistics (void);
243 static int type_hash_marked_p (const void *);
244 static void type_hash_list (const_tree
, inchash::hash
&);
245 static void attribute_hash_list (const_tree
, inchash::hash
&);
247 tree global_trees
[TI_MAX
];
248 tree integer_types
[itk_none
];
250 bool int_n_enabled_p
[NUM_INT_N_ENTS
];
251 struct int_n_trees_t int_n_trees
[NUM_INT_N_ENTS
];
253 unsigned char tree_contains_struct
[MAX_TREE_CODES
][64];
255 /* Number of operands for each OpenMP clause. */
256 unsigned const char omp_clause_num_ops
[] =
258 0, /* OMP_CLAUSE_ERROR */
259 1, /* OMP_CLAUSE_PRIVATE */
260 1, /* OMP_CLAUSE_SHARED */
261 1, /* OMP_CLAUSE_FIRSTPRIVATE */
262 2, /* OMP_CLAUSE_LASTPRIVATE */
263 4, /* OMP_CLAUSE_REDUCTION */
264 1, /* OMP_CLAUSE_COPYIN */
265 1, /* OMP_CLAUSE_COPYPRIVATE */
266 3, /* OMP_CLAUSE_LINEAR */
267 2, /* OMP_CLAUSE_ALIGNED */
268 1, /* OMP_CLAUSE_DEPEND */
269 1, /* OMP_CLAUSE_UNIFORM */
270 2, /* OMP_CLAUSE_FROM */
271 2, /* OMP_CLAUSE_TO */
272 2, /* OMP_CLAUSE_MAP */
273 1, /* OMP_CLAUSE__LOOPTEMP_ */
274 1, /* OMP_CLAUSE_IF */
275 1, /* OMP_CLAUSE_NUM_THREADS */
276 1, /* OMP_CLAUSE_SCHEDULE */
277 0, /* OMP_CLAUSE_NOWAIT */
278 0, /* OMP_CLAUSE_ORDERED */
279 0, /* OMP_CLAUSE_DEFAULT */
280 3, /* OMP_CLAUSE_COLLAPSE */
281 0, /* OMP_CLAUSE_UNTIED */
282 1, /* OMP_CLAUSE_FINAL */
283 0, /* OMP_CLAUSE_MERGEABLE */
284 1, /* OMP_CLAUSE_DEVICE */
285 1, /* OMP_CLAUSE_DIST_SCHEDULE */
286 0, /* OMP_CLAUSE_INBRANCH */
287 0, /* OMP_CLAUSE_NOTINBRANCH */
288 1, /* OMP_CLAUSE_NUM_TEAMS */
289 1, /* OMP_CLAUSE_THREAD_LIMIT */
290 0, /* OMP_CLAUSE_PROC_BIND */
291 1, /* OMP_CLAUSE_SAFELEN */
292 1, /* OMP_CLAUSE_SIMDLEN */
293 0, /* OMP_CLAUSE_FOR */
294 0, /* OMP_CLAUSE_PARALLEL */
295 0, /* OMP_CLAUSE_SECTIONS */
296 0, /* OMP_CLAUSE_TASKGROUP */
297 1, /* OMP_CLAUSE__SIMDUID_ */
298 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
301 const char * const omp_clause_code_name
[] =
347 /* Return the tree node structure used by tree code CODE. */
349 static inline enum tree_node_structure_enum
350 tree_node_structure_for_code (enum tree_code code
)
352 switch (TREE_CODE_CLASS (code
))
354 case tcc_declaration
:
359 return TS_FIELD_DECL
;
365 return TS_LABEL_DECL
;
367 return TS_RESULT_DECL
;
368 case DEBUG_EXPR_DECL
:
371 return TS_CONST_DECL
;
375 return TS_FUNCTION_DECL
;
376 case TRANSLATION_UNIT_DECL
:
377 return TS_TRANSLATION_UNIT_DECL
;
379 return TS_DECL_NON_COMMON
;
383 return TS_TYPE_NON_COMMON
;
392 default: /* tcc_constant and tcc_exceptional */
397 /* tcc_constant cases. */
398 case VOID_CST
: return TS_TYPED
;
399 case INTEGER_CST
: return TS_INT_CST
;
400 case REAL_CST
: return TS_REAL_CST
;
401 case FIXED_CST
: return TS_FIXED_CST
;
402 case COMPLEX_CST
: return TS_COMPLEX
;
403 case VECTOR_CST
: return TS_VECTOR
;
404 case STRING_CST
: return TS_STRING
;
405 /* tcc_exceptional cases. */
406 case ERROR_MARK
: return TS_COMMON
;
407 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
408 case TREE_LIST
: return TS_LIST
;
409 case TREE_VEC
: return TS_VEC
;
410 case SSA_NAME
: return TS_SSA_NAME
;
411 case PLACEHOLDER_EXPR
: return TS_COMMON
;
412 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
413 case BLOCK
: return TS_BLOCK
;
414 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
415 case TREE_BINFO
: return TS_BINFO
;
416 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
417 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
418 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
426 /* Initialize tree_contains_struct to describe the hierarchy of tree
430 initialize_tree_contains_struct (void)
434 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
437 enum tree_node_structure_enum ts_code
;
439 code
= (enum tree_code
) i
;
440 ts_code
= tree_node_structure_for_code (code
);
442 /* Mark the TS structure itself. */
443 tree_contains_struct
[code
][ts_code
] = 1;
445 /* Mark all the structures that TS is derived from. */
463 case TS_STATEMENT_LIST
:
464 MARK_TS_TYPED (code
);
468 case TS_DECL_MINIMAL
:
474 case TS_OPTIMIZATION
:
475 case TS_TARGET_OPTION
:
476 MARK_TS_COMMON (code
);
479 case TS_TYPE_WITH_LANG_SPECIFIC
:
480 MARK_TS_TYPE_COMMON (code
);
483 case TS_TYPE_NON_COMMON
:
484 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
488 MARK_TS_DECL_MINIMAL (code
);
493 MARK_TS_DECL_COMMON (code
);
496 case TS_DECL_NON_COMMON
:
497 MARK_TS_DECL_WITH_VIS (code
);
500 case TS_DECL_WITH_VIS
:
504 MARK_TS_DECL_WRTL (code
);
508 MARK_TS_DECL_COMMON (code
);
512 MARK_TS_DECL_WITH_VIS (code
);
516 case TS_FUNCTION_DECL
:
517 MARK_TS_DECL_NON_COMMON (code
);
520 case TS_TRANSLATION_UNIT_DECL
:
521 MARK_TS_DECL_COMMON (code
);
529 /* Basic consistency checks for attributes used in fold. */
530 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
531 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
532 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
533 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
534 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
535 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
536 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
537 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
538 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
539 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
540 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
541 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
542 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
543 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
544 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
545 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
546 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
547 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
548 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
549 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
550 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
551 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
552 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
553 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
554 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
555 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
556 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
557 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
558 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
559 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
560 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
561 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
562 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
563 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
564 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
565 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
566 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
567 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
568 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
569 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
578 /* Initialize the hash table of types. */
579 type_hash_table
= htab_create_ggc (TYPE_HASH_INITIAL_SIZE
, type_hash_hash
,
582 debug_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
583 tree_decl_map_eq
, 0);
585 value_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
586 tree_decl_map_eq
, 0);
588 int_cst_hash_table
= htab_create_ggc (1024, int_cst_hash_hash
,
589 int_cst_hash_eq
, NULL
);
591 int_cst_node
= make_int_cst (1, 1);
593 cl_option_hash_table
= htab_create_ggc (64, cl_option_hash_hash
,
594 cl_option_hash_eq
, NULL
);
596 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
597 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
599 /* Initialize the tree_contains_struct array. */
600 initialize_tree_contains_struct ();
601 lang_hooks
.init_ts ();
605 /* The name of the object as the assembler will see it (but before any
606 translations made by ASM_OUTPUT_LABELREF). Often this is the same
607 as DECL_NAME. It is an IDENTIFIER_NODE. */
609 decl_assembler_name (tree decl
)
611 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
612 lang_hooks
.set_decl_assembler_name (decl
);
613 return DECL_WITH_VIS_CHECK (decl
)->decl_with_vis
.assembler_name
;
616 /* When the target supports COMDAT groups, this indicates which group the
617 DECL is associated with. This can be either an IDENTIFIER_NODE or a
618 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
620 decl_comdat_group (const_tree node
)
622 struct symtab_node
*snode
= symtab_node::get (node
);
625 return snode
->get_comdat_group ();
628 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
630 decl_comdat_group_id (const_tree node
)
632 struct symtab_node
*snode
= symtab_node::get (node
);
635 return snode
->get_comdat_group_id ();
638 /* When the target supports named section, return its name as IDENTIFIER_NODE
639 or NULL if it is in no section. */
641 decl_section_name (const_tree node
)
643 struct symtab_node
*snode
= symtab_node::get (node
);
646 return snode
->get_section ();
649 /* Set section section name of NODE to VALUE (that is expected to
650 be identifier node) */
652 set_decl_section_name (tree node
, const char *value
)
654 struct symtab_node
*snode
;
658 snode
= symtab_node::get (node
);
662 else if (TREE_CODE (node
) == VAR_DECL
)
663 snode
= varpool_node::get_create (node
);
665 snode
= cgraph_node::get_create (node
);
666 snode
->set_section (value
);
669 /* Return TLS model of a variable NODE. */
671 decl_tls_model (const_tree node
)
673 struct varpool_node
*snode
= varpool_node::get (node
);
675 return TLS_MODEL_NONE
;
676 return snode
->tls_model
;
679 /* Set TLS model of variable NODE to MODEL. */
681 set_decl_tls_model (tree node
, enum tls_model model
)
683 struct varpool_node
*vnode
;
685 if (model
== TLS_MODEL_NONE
)
687 vnode
= varpool_node::get (node
);
692 vnode
= varpool_node::get_create (node
);
693 vnode
->tls_model
= model
;
696 /* Compute the number of bytes occupied by a tree with code CODE.
697 This function cannot be used for nodes that have variable sizes,
698 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
700 tree_code_size (enum tree_code code
)
702 switch (TREE_CODE_CLASS (code
))
704 case tcc_declaration
: /* A decl node */
709 return sizeof (struct tree_field_decl
);
711 return sizeof (struct tree_parm_decl
);
713 return sizeof (struct tree_var_decl
);
715 return sizeof (struct tree_label_decl
);
717 return sizeof (struct tree_result_decl
);
719 return sizeof (struct tree_const_decl
);
721 return sizeof (struct tree_type_decl
);
723 return sizeof (struct tree_function_decl
);
724 case DEBUG_EXPR_DECL
:
725 return sizeof (struct tree_decl_with_rtl
);
726 case TRANSLATION_UNIT_DECL
:
727 return sizeof (struct tree_translation_unit_decl
);
731 return sizeof (struct tree_decl_non_common
);
733 return lang_hooks
.tree_size (code
);
737 case tcc_type
: /* a type node */
738 return sizeof (struct tree_type_non_common
);
740 case tcc_reference
: /* a reference */
741 case tcc_expression
: /* an expression */
742 case tcc_statement
: /* an expression with side effects */
743 case tcc_comparison
: /* a comparison expression */
744 case tcc_unary
: /* a unary arithmetic expression */
745 case tcc_binary
: /* a binary arithmetic expression */
746 return (sizeof (struct tree_exp
)
747 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
749 case tcc_constant
: /* a constant */
752 case VOID_CST
: return sizeof (struct tree_typed
);
753 case INTEGER_CST
: gcc_unreachable ();
754 case REAL_CST
: return sizeof (struct tree_real_cst
);
755 case FIXED_CST
: return sizeof (struct tree_fixed_cst
);
756 case COMPLEX_CST
: return sizeof (struct tree_complex
);
757 case VECTOR_CST
: return sizeof (struct tree_vector
);
758 case STRING_CST
: gcc_unreachable ();
760 return lang_hooks
.tree_size (code
);
763 case tcc_exceptional
: /* something random, like an identifier. */
766 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
767 case TREE_LIST
: return sizeof (struct tree_list
);
770 case PLACEHOLDER_EXPR
: return sizeof (struct tree_common
);
773 case OMP_CLAUSE
: gcc_unreachable ();
775 case SSA_NAME
: return sizeof (struct tree_ssa_name
);
777 case STATEMENT_LIST
: return sizeof (struct tree_statement_list
);
778 case BLOCK
: return sizeof (struct tree_block
);
779 case CONSTRUCTOR
: return sizeof (struct tree_constructor
);
780 case OPTIMIZATION_NODE
: return sizeof (struct tree_optimization_option
);
781 case TARGET_OPTION_NODE
: return sizeof (struct tree_target_option
);
784 return lang_hooks
.tree_size (code
);
792 /* Compute the number of bytes occupied by NODE. This routine only
793 looks at TREE_CODE, except for those nodes that have variable sizes. */
795 tree_size (const_tree node
)
797 const enum tree_code code
= TREE_CODE (node
);
801 return (sizeof (struct tree_int_cst
)
802 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
805 return (offsetof (struct tree_binfo
, base_binfos
)
807 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
810 return (sizeof (struct tree_vec
)
811 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
814 return (sizeof (struct tree_vector
)
815 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node
)) - 1) * sizeof (tree
));
818 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
821 return (sizeof (struct tree_omp_clause
)
822 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
826 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
827 return (sizeof (struct tree_exp
)
828 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
830 return tree_code_size (code
);
834 /* Record interesting allocation statistics for a tree node with CODE
838 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED
,
839 size_t length ATTRIBUTE_UNUSED
)
841 enum tree_code_class type
= TREE_CODE_CLASS (code
);
844 if (!GATHER_STATISTICS
)
849 case tcc_declaration
: /* A decl node */
853 case tcc_type
: /* a type node */
857 case tcc_statement
: /* an expression with side effects */
861 case tcc_reference
: /* a reference */
865 case tcc_expression
: /* an expression */
866 case tcc_comparison
: /* a comparison expression */
867 case tcc_unary
: /* a unary arithmetic expression */
868 case tcc_binary
: /* a binary arithmetic expression */
872 case tcc_constant
: /* a constant */
876 case tcc_exceptional
: /* something random, like an identifier. */
879 case IDENTIFIER_NODE
:
892 kind
= ssa_name_kind
;
904 kind
= omp_clause_kind
;
921 tree_code_counts
[(int) code
]++;
922 tree_node_counts
[(int) kind
]++;
923 tree_node_sizes
[(int) kind
] += length
;
926 /* Allocate and return a new UID from the DECL_UID namespace. */
929 allocate_decl_uid (void)
931 return next_decl_uid
++;
934 /* Return a newly allocated node of code CODE. For decl and type
935 nodes, some other fields are initialized. The rest of the node is
936 initialized to zero. This function cannot be used for TREE_VEC,
937 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
940 Achoo! I got a code in the node. */
943 make_node_stat (enum tree_code code MEM_STAT_DECL
)
946 enum tree_code_class type
= TREE_CODE_CLASS (code
);
947 size_t length
= tree_code_size (code
);
949 record_node_allocation_statistics (code
, length
);
951 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
952 TREE_SET_CODE (t
, code
);
957 TREE_SIDE_EFFECTS (t
) = 1;
960 case tcc_declaration
:
961 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
963 if (code
== FUNCTION_DECL
)
965 DECL_ALIGN (t
) = FUNCTION_BOUNDARY
;
966 DECL_MODE (t
) = FUNCTION_MODE
;
971 DECL_SOURCE_LOCATION (t
) = input_location
;
972 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
973 DECL_UID (t
) = --next_debug_decl_uid
;
976 DECL_UID (t
) = allocate_decl_uid ();
977 SET_DECL_PT_UID (t
, -1);
979 if (TREE_CODE (t
) == LABEL_DECL
)
980 LABEL_DECL_UID (t
) = -1;
985 TYPE_UID (t
) = next_type_uid
++;
986 TYPE_ALIGN (t
) = BITS_PER_UNIT
;
987 TYPE_USER_ALIGN (t
) = 0;
988 TYPE_MAIN_VARIANT (t
) = t
;
989 TYPE_CANONICAL (t
) = t
;
991 /* Default to no attributes for type, but let target change that. */
992 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
993 targetm
.set_default_type_attributes (t
);
995 /* We have not yet computed the alias set for this type. */
996 TYPE_ALIAS_SET (t
) = -1;
1000 TREE_CONSTANT (t
) = 1;
1003 case tcc_expression
:
1009 case PREDECREMENT_EXPR
:
1010 case PREINCREMENT_EXPR
:
1011 case POSTDECREMENT_EXPR
:
1012 case POSTINCREMENT_EXPR
:
1013 /* All of these have side-effects, no matter what their
1015 TREE_SIDE_EFFECTS (t
) = 1;
1024 /* Other classes need no special treatment. */
1031 /* Return a new node with the same contents as NODE except that its
1032 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1035 copy_node_stat (tree node MEM_STAT_DECL
)
1038 enum tree_code code
= TREE_CODE (node
);
1041 gcc_assert (code
!= STATEMENT_LIST
);
1043 length
= tree_size (node
);
1044 record_node_allocation_statistics (code
, length
);
1045 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1046 memcpy (t
, node
, length
);
1048 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1050 TREE_ASM_WRITTEN (t
) = 0;
1051 TREE_VISITED (t
) = 0;
1053 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1055 if (code
== DEBUG_EXPR_DECL
)
1056 DECL_UID (t
) = --next_debug_decl_uid
;
1059 DECL_UID (t
) = allocate_decl_uid ();
1060 if (DECL_PT_UID_SET_P (node
))
1061 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1063 if ((TREE_CODE (node
) == PARM_DECL
|| TREE_CODE (node
) == VAR_DECL
)
1064 && DECL_HAS_VALUE_EXPR_P (node
))
1066 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1067 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1069 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1070 if (TREE_CODE (node
) == VAR_DECL
)
1072 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1073 t
->decl_with_vis
.symtab_node
= NULL
;
1075 if (TREE_CODE (node
) == VAR_DECL
&& DECL_HAS_INIT_PRIORITY_P (node
))
1077 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1078 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1080 if (TREE_CODE (node
) == FUNCTION_DECL
)
1082 DECL_STRUCT_FUNCTION (t
) = NULL
;
1083 t
->decl_with_vis
.symtab_node
= NULL
;
1086 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1088 TYPE_UID (t
) = next_type_uid
++;
1089 /* The following is so that the debug code for
1090 the copy is different from the original type.
1091 The two statements usually duplicate each other
1092 (because they clear fields of the same union),
1093 but the optimizer should catch that. */
1094 TYPE_SYMTAB_POINTER (t
) = 0;
1095 TYPE_SYMTAB_ADDRESS (t
) = 0;
1097 /* Do not copy the values cache. */
1098 if (TYPE_CACHED_VALUES_P (t
))
1100 TYPE_CACHED_VALUES_P (t
) = 0;
1101 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1108 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1109 For example, this can copy a list made of TREE_LIST nodes. */
1112 copy_list (tree list
)
1120 head
= prev
= copy_node (list
);
1121 next
= TREE_CHAIN (list
);
1124 TREE_CHAIN (prev
) = copy_node (next
);
1125 prev
= TREE_CHAIN (prev
);
1126 next
= TREE_CHAIN (next
);
1132 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1133 INTEGER_CST with value CST and type TYPE. */
1136 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1138 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1139 /* We need an extra zero HWI if CST is an unsigned integer with its
1140 upper bit set, and if CST occupies a whole number of HWIs. */
1141 if (TYPE_UNSIGNED (type
)
1143 && (cst
.get_precision () % HOST_BITS_PER_WIDE_INT
) == 0)
1144 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1145 return cst
.get_len ();
1148 /* Return a new INTEGER_CST with value CST and type TYPE. */
1151 build_new_int_cst (tree type
, const wide_int
&cst
)
1153 unsigned int len
= cst
.get_len ();
1154 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1155 tree nt
= make_int_cst (len
, ext_len
);
1160 TREE_INT_CST_ELT (nt
, ext_len
) = 0;
1161 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1162 TREE_INT_CST_ELT (nt
, i
) = -1;
1164 else if (TYPE_UNSIGNED (type
)
1165 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1168 TREE_INT_CST_ELT (nt
, len
)
1169 = zext_hwi (cst
.elt (len
),
1170 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1173 for (unsigned int i
= 0; i
< len
; i
++)
1174 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1175 TREE_TYPE (nt
) = type
;
1179 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1182 build_int_cst (tree type
, HOST_WIDE_INT low
)
1184 /* Support legacy code. */
1186 type
= integer_type_node
;
1188 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1192 build_int_cstu (tree type
, unsigned HOST_WIDE_INT cst
)
1194 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1197 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1200 build_int_cst_type (tree type
, HOST_WIDE_INT low
)
1203 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1206 /* Constructs tree in type TYPE from with value given by CST. Signedness
1207 of CST is assumed to be the same as the signedness of TYPE. */
1210 double_int_to_tree (tree type
, double_int cst
)
1212 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1215 /* We force the wide_int CST to the range of the type TYPE by sign or
1216 zero extending it. OVERFLOWABLE indicates if we are interested in
1217 overflow of the value, when >0 we are only interested in signed
1218 overflow, for <0 we are interested in any overflow. OVERFLOWED
1219 indicates whether overflow has already occurred. CONST_OVERFLOWED
1220 indicates whether constant overflow has already occurred. We force
1221 T's value to be within range of T's type (by setting to 0 or 1 all
1222 the bits outside the type's range). We set TREE_OVERFLOWED if,
1223 OVERFLOWED is nonzero,
1224 or OVERFLOWABLE is >0 and signed overflow occurs
1225 or OVERFLOWABLE is <0 and any overflow occurs
1226 We return a new tree node for the extended wide_int. The node
1227 is shared if no overflow flags are set. */
1231 force_fit_type (tree type
, const wide_int_ref
&cst
,
1232 int overflowable
, bool overflowed
)
1234 signop sign
= TYPE_SIGN (type
);
1236 /* If we need to set overflow flags, return a new unshared node. */
1237 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1241 || (overflowable
> 0 && sign
== SIGNED
))
1243 wide_int tmp
= wide_int::from (cst
, TYPE_PRECISION (type
), sign
);
1244 tree t
= build_new_int_cst (type
, tmp
);
1245 TREE_OVERFLOW (t
) = 1;
1250 /* Else build a shared node. */
1251 return wide_int_to_tree (type
, cst
);
1254 /* These are the hash table functions for the hash table of INTEGER_CST
1255 nodes of a sizetype. */
1257 /* Return the hash code code X, an INTEGER_CST. */
1260 int_cst_hash_hash (const void *x
)
1262 const_tree
const t
= (const_tree
) x
;
1263 hashval_t code
= htab_hash_pointer (TREE_TYPE (t
));
1266 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1267 code
^= TREE_INT_CST_ELT (t
, i
);
1272 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1273 is the same as that given by *Y, which is the same. */
1276 int_cst_hash_eq (const void *x
, const void *y
)
1278 const_tree
const xt
= (const_tree
) x
;
1279 const_tree
const yt
= (const_tree
) y
;
1281 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1282 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1283 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1286 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1287 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1293 /* Create an INT_CST node of TYPE and value CST.
1294 The returned node is always shared. For small integers we use a
1295 per-type vector cache, for larger ones we use a single hash table.
1296 The value is extended from its precision according to the sign of
1297 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1298 the upper bits and ensures that hashing and value equality based
1299 upon the underlying HOST_WIDE_INTs works without masking. */
1302 wide_int_to_tree (tree type
, const wide_int_ref
&pcst
)
1309 unsigned int prec
= TYPE_PRECISION (type
);
1310 signop sgn
= TYPE_SIGN (type
);
1312 /* Verify that everything is canonical. */
1313 int l
= pcst
.get_len ();
1316 if (pcst
.elt (l
- 1) == 0)
1317 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1318 if (pcst
.elt (l
- 1) == (HOST_WIDE_INT
) -1)
1319 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1322 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1323 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1327 /* We just need to store a single HOST_WIDE_INT. */
1329 if (TYPE_UNSIGNED (type
))
1330 hwi
= cst
.to_uhwi ();
1332 hwi
= cst
.to_shwi ();
1334 switch (TREE_CODE (type
))
1337 gcc_assert (hwi
== 0);
1341 case REFERENCE_TYPE
:
1342 /* Cache NULL pointer. */
1351 /* Cache false or true. */
1359 if (TYPE_SIGN (type
) == UNSIGNED
)
1362 limit
= INTEGER_SHARE_LIMIT
;
1363 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1368 /* Cache [-1, N). */
1369 limit
= INTEGER_SHARE_LIMIT
+ 1;
1370 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1384 /* Look for it in the type's vector of small shared ints. */
1385 if (!TYPE_CACHED_VALUES_P (type
))
1387 TYPE_CACHED_VALUES_P (type
) = 1;
1388 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1391 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1393 /* Make sure no one is clobbering the shared constant. */
1394 gcc_checking_assert (TREE_TYPE (t
) == type
1395 && TREE_INT_CST_NUNITS (t
) == 1
1396 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1397 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1398 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1401 /* Create a new shared int. */
1402 t
= build_new_int_cst (type
, cst
);
1403 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1408 /* Use the cache of larger shared ints, using int_cst_node as
1412 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1413 TREE_TYPE (int_cst_node
) = type
;
1415 slot
= htab_find_slot (int_cst_hash_table
, int_cst_node
, INSERT
);
1419 /* Insert this one into the hash table. */
1422 /* Make a new node for next time round. */
1423 int_cst_node
= make_int_cst (1, 1);
1429 /* The value either hashes properly or we drop it on the floor
1430 for the gc to take care of. There will not be enough of them
1434 tree nt
= build_new_int_cst (type
, cst
);
1435 slot
= htab_find_slot (int_cst_hash_table
, nt
, INSERT
);
1439 /* Insert this one into the hash table. */
1449 cache_integer_cst (tree t
)
1451 tree type
= TREE_TYPE (t
);
1454 int prec
= TYPE_PRECISION (type
);
1456 gcc_assert (!TREE_OVERFLOW (t
));
1458 switch (TREE_CODE (type
))
1461 gcc_assert (integer_zerop (t
));
1465 case REFERENCE_TYPE
:
1466 /* Cache NULL pointer. */
1467 if (integer_zerop (t
))
1475 /* Cache false or true. */
1477 if (wi::ltu_p (t
, 2))
1478 ix
= TREE_INT_CST_ELT (t
, 0);
1483 if (TYPE_UNSIGNED (type
))
1486 limit
= INTEGER_SHARE_LIMIT
;
1488 /* This is a little hokie, but if the prec is smaller than
1489 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1490 obvious test will not get the correct answer. */
1491 if (prec
< HOST_BITS_PER_WIDE_INT
)
1493 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1494 ix
= tree_to_uhwi (t
);
1496 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1497 ix
= tree_to_uhwi (t
);
1502 limit
= INTEGER_SHARE_LIMIT
+ 1;
1504 if (integer_minus_onep (t
))
1506 else if (!wi::neg_p (t
))
1508 if (prec
< HOST_BITS_PER_WIDE_INT
)
1510 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1511 ix
= tree_to_shwi (t
) + 1;
1513 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1514 ix
= tree_to_shwi (t
) + 1;
1528 /* Look for it in the type's vector of small shared ints. */
1529 if (!TYPE_CACHED_VALUES_P (type
))
1531 TYPE_CACHED_VALUES_P (type
) = 1;
1532 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1535 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1536 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1540 /* Use the cache of larger shared ints. */
1543 slot
= htab_find_slot (int_cst_hash_table
, t
, INSERT
);
1544 /* If there is already an entry for the number verify it's the
1547 gcc_assert (wi::eq_p (tree (*slot
), t
));
1549 /* Otherwise insert this one into the hash table. */
1555 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1556 and the rest are zeros. */
1559 build_low_bits_mask (tree type
, unsigned bits
)
1561 gcc_assert (bits
<= TYPE_PRECISION (type
));
1563 return wide_int_to_tree (type
, wi::mask (bits
, false,
1564 TYPE_PRECISION (type
)));
1567 /* Checks that X is integer constant that can be expressed in (unsigned)
1568 HOST_WIDE_INT without loss of precision. */
1571 cst_and_fits_in_hwi (const_tree x
)
1573 if (TREE_CODE (x
) != INTEGER_CST
)
1576 if (TYPE_PRECISION (TREE_TYPE (x
)) > HOST_BITS_PER_WIDE_INT
)
1579 return TREE_INT_CST_NUNITS (x
) == 1;
1582 /* Build a newly constructed TREE_VEC node of length LEN. */
1585 make_vector_stat (unsigned len MEM_STAT_DECL
)
1588 unsigned length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vector
);
1590 record_node_allocation_statistics (VECTOR_CST
, length
);
1592 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1594 TREE_SET_CODE (t
, VECTOR_CST
);
1595 TREE_CONSTANT (t
) = 1;
1600 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1601 are in a list pointed to by VALS. */
1604 build_vector_stat (tree type
, tree
*vals MEM_STAT_DECL
)
1608 tree v
= make_vector (TYPE_VECTOR_SUBPARTS (type
));
1609 TREE_TYPE (v
) = type
;
1611 /* Iterate through elements and check for overflow. */
1612 for (cnt
= 0; cnt
< TYPE_VECTOR_SUBPARTS (type
); ++cnt
)
1614 tree value
= vals
[cnt
];
1616 VECTOR_CST_ELT (v
, cnt
) = value
;
1618 /* Don't crash if we get an address constant. */
1619 if (!CONSTANT_CLASS_P (value
))
1622 over
|= TREE_OVERFLOW (value
);
1625 TREE_OVERFLOW (v
) = over
;
1629 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1630 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1633 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1635 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
1636 unsigned HOST_WIDE_INT idx
;
1639 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1641 for (; idx
< TYPE_VECTOR_SUBPARTS (type
); ++idx
)
1642 vec
[idx
] = build_zero_cst (TREE_TYPE (type
));
1644 return build_vector (type
, vec
);
1647 /* Build a vector of type VECTYPE where all the elements are SCs. */
1649 build_vector_from_val (tree vectype
, tree sc
)
1651 int i
, nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
1653 if (sc
== error_mark_node
)
1656 /* Verify that the vector type is suitable for SC. Note that there
1657 is some inconsistency in the type-system with respect to restrict
1658 qualifications of pointers. Vector types always have a main-variant
1659 element type and the qualification is applied to the vector-type.
1660 So TREE_TYPE (vector-type) does not return a properly qualified
1661 vector element-type. */
1662 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1663 TREE_TYPE (vectype
)));
1665 if (CONSTANT_CLASS_P (sc
))
1667 tree
*v
= XALLOCAVEC (tree
, nunits
);
1668 for (i
= 0; i
< nunits
; ++i
)
1670 return build_vector (vectype
, v
);
1674 vec
<constructor_elt
, va_gc
> *v
;
1675 vec_alloc (v
, nunits
);
1676 for (i
= 0; i
< nunits
; ++i
)
1677 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1678 return build_constructor (vectype
, v
);
1682 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1683 are in the vec pointed to by VALS. */
1685 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals
)
1687 tree c
= make_node (CONSTRUCTOR
);
1689 constructor_elt
*elt
;
1690 bool constant_p
= true;
1691 bool side_effects_p
= false;
1693 TREE_TYPE (c
) = type
;
1694 CONSTRUCTOR_ELTS (c
) = vals
;
1696 FOR_EACH_VEC_SAFE_ELT (vals
, i
, elt
)
1698 /* Mostly ctors will have elts that don't have side-effects, so
1699 the usual case is to scan all the elements. Hence a single
1700 loop for both const and side effects, rather than one loop
1701 each (with early outs). */
1702 if (!TREE_CONSTANT (elt
->value
))
1704 if (TREE_SIDE_EFFECTS (elt
->value
))
1705 side_effects_p
= true;
1708 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
1709 TREE_CONSTANT (c
) = constant_p
;
1714 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1717 build_constructor_single (tree type
, tree index
, tree value
)
1719 vec
<constructor_elt
, va_gc
> *v
;
1720 constructor_elt elt
= {index
, value
};
1723 v
->quick_push (elt
);
1725 return build_constructor (type
, v
);
1729 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1730 are in a list pointed to by VALS. */
1732 build_constructor_from_list (tree type
, tree vals
)
1735 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1739 vec_alloc (v
, list_length (vals
));
1740 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
1741 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
1744 return build_constructor (type
, v
);
1747 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1748 of elements, provided as index/value pairs. */
1751 build_constructor_va (tree type
, int nelts
, ...)
1753 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1756 va_start (p
, nelts
);
1757 vec_alloc (v
, nelts
);
1760 tree index
= va_arg (p
, tree
);
1761 tree value
= va_arg (p
, tree
);
1762 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
1765 return build_constructor (type
, v
);
1768 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1771 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
1774 FIXED_VALUE_TYPE
*fp
;
1776 v
= make_node (FIXED_CST
);
1777 fp
= ggc_alloc
<fixed_value
> ();
1778 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
1780 TREE_TYPE (v
) = type
;
1781 TREE_FIXED_CST_PTR (v
) = fp
;
1785 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1788 build_real (tree type
, REAL_VALUE_TYPE d
)
1791 REAL_VALUE_TYPE
*dp
;
1794 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1795 Consider doing it via real_convert now. */
1797 v
= make_node (REAL_CST
);
1798 dp
= ggc_alloc
<real_value
> ();
1799 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
1801 TREE_TYPE (v
) = type
;
1802 TREE_REAL_CST_PTR (v
) = dp
;
1803 TREE_OVERFLOW (v
) = overflow
;
1807 /* Return a new REAL_CST node whose type is TYPE
1808 and whose value is the integer value of the INTEGER_CST node I. */
1811 real_value_from_int_cst (const_tree type
, const_tree i
)
1815 /* Clear all bits of the real value type so that we can later do
1816 bitwise comparisons to see if two values are the same. */
1817 memset (&d
, 0, sizeof d
);
1819 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, i
,
1820 TYPE_SIGN (TREE_TYPE (i
)));
1824 /* Given a tree representing an integer constant I, return a tree
1825 representing the same value as a floating-point constant of type TYPE. */
1828 build_real_from_int_cst (tree type
, const_tree i
)
1831 int overflow
= TREE_OVERFLOW (i
);
1833 v
= build_real (type
, real_value_from_int_cst (type
, i
));
1835 TREE_OVERFLOW (v
) |= overflow
;
1839 /* Return a newly constructed STRING_CST node whose value is
1840 the LEN characters at STR.
1841 Note that for a C string literal, LEN should include the trailing NUL.
1842 The TREE_TYPE is not initialized. */
1845 build_string (int len
, const char *str
)
1850 /* Do not waste bytes provided by padding of struct tree_string. */
1851 length
= len
+ offsetof (struct tree_string
, str
) + 1;
1853 record_node_allocation_statistics (STRING_CST
, length
);
1855 s
= (tree
) ggc_internal_alloc (length
);
1857 memset (s
, 0, sizeof (struct tree_typed
));
1858 TREE_SET_CODE (s
, STRING_CST
);
1859 TREE_CONSTANT (s
) = 1;
1860 TREE_STRING_LENGTH (s
) = len
;
1861 memcpy (s
->string
.str
, str
, len
);
1862 s
->string
.str
[len
] = '\0';
1867 /* Return a newly constructed COMPLEX_CST node whose value is
1868 specified by the real and imaginary parts REAL and IMAG.
1869 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1870 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1873 build_complex (tree type
, tree real
, tree imag
)
1875 tree t
= make_node (COMPLEX_CST
);
1877 TREE_REALPART (t
) = real
;
1878 TREE_IMAGPART (t
) = imag
;
1879 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
1880 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
1884 /* Return a constant of arithmetic type TYPE which is the
1885 multiplicative identity of the set TYPE. */
1888 build_one_cst (tree type
)
1890 switch (TREE_CODE (type
))
1892 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1893 case POINTER_TYPE
: case REFERENCE_TYPE
:
1895 return build_int_cst (type
, 1);
1898 return build_real (type
, dconst1
);
1900 case FIXED_POINT_TYPE
:
1901 /* We can only generate 1 for accum types. */
1902 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1903 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
1907 tree scalar
= build_one_cst (TREE_TYPE (type
));
1909 return build_vector_from_val (type
, scalar
);
1913 return build_complex (type
,
1914 build_one_cst (TREE_TYPE (type
)),
1915 build_zero_cst (TREE_TYPE (type
)));
1922 /* Return an integer of type TYPE containing all 1's in as much precision as
1923 it contains, or a complex or vector whose subparts are such integers. */
1926 build_all_ones_cst (tree type
)
1928 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1930 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
1931 return build_complex (type
, scalar
, scalar
);
1934 return build_minus_one_cst (type
);
1937 /* Return a constant of arithmetic type TYPE which is the
1938 opposite of the multiplicative identity of the set TYPE. */
1941 build_minus_one_cst (tree type
)
1943 switch (TREE_CODE (type
))
1945 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1946 case POINTER_TYPE
: case REFERENCE_TYPE
:
1948 return build_int_cst (type
, -1);
1951 return build_real (type
, dconstm1
);
1953 case FIXED_POINT_TYPE
:
1954 /* We can only generate 1 for accum types. */
1955 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1956 return build_fixed (type
, fixed_from_double_int (double_int_minus_one
,
1961 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
1963 return build_vector_from_val (type
, scalar
);
1967 return build_complex (type
,
1968 build_minus_one_cst (TREE_TYPE (type
)),
1969 build_zero_cst (TREE_TYPE (type
)));
1976 /* Build 0 constant of type TYPE. This is used by constructor folding
1977 and thus the constant should be represented in memory by
1981 build_zero_cst (tree type
)
1983 switch (TREE_CODE (type
))
1985 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1986 case POINTER_TYPE
: case REFERENCE_TYPE
:
1987 case OFFSET_TYPE
: case NULLPTR_TYPE
:
1988 return build_int_cst (type
, 0);
1991 return build_real (type
, dconst0
);
1993 case FIXED_POINT_TYPE
:
1994 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
1998 tree scalar
= build_zero_cst (TREE_TYPE (type
));
2000 return build_vector_from_val (type
, scalar
);
2005 tree zero
= build_zero_cst (TREE_TYPE (type
));
2007 return build_complex (type
, zero
, zero
);
2011 if (!AGGREGATE_TYPE_P (type
))
2012 return fold_convert (type
, integer_zero_node
);
2013 return build_constructor (type
, NULL
);
2018 /* Build a BINFO with LEN language slots. */
2021 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL
)
2024 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2025 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2027 record_node_allocation_statistics (TREE_BINFO
, length
);
2029 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2031 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2033 TREE_SET_CODE (t
, TREE_BINFO
);
2035 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2040 /* Create a CASE_LABEL_EXPR tree node and return it. */
2043 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2045 tree t
= make_node (CASE_LABEL_EXPR
);
2047 TREE_TYPE (t
) = void_type_node
;
2048 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2050 CASE_LOW (t
) = low_value
;
2051 CASE_HIGH (t
) = high_value
;
2052 CASE_LABEL (t
) = label_decl
;
2053 CASE_CHAIN (t
) = NULL_TREE
;
2058 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2059 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2060 The latter determines the length of the HOST_WIDE_INT vector. */
2063 make_int_cst_stat (int len
, int ext_len MEM_STAT_DECL
)
2066 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2067 + sizeof (struct tree_int_cst
));
2070 record_node_allocation_statistics (INTEGER_CST
, length
);
2072 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2074 TREE_SET_CODE (t
, INTEGER_CST
);
2075 TREE_INT_CST_NUNITS (t
) = len
;
2076 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2077 /* to_offset can only be applied to trees that are offset_int-sized
2078 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2079 must be exactly the precision of offset_int and so LEN is correct. */
2080 if (ext_len
<= OFFSET_INT_ELTS
)
2081 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2083 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2085 TREE_CONSTANT (t
) = 1;
2090 /* Build a newly constructed TREE_VEC node of length LEN. */
2093 make_tree_vec_stat (int len MEM_STAT_DECL
)
2096 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2098 record_node_allocation_statistics (TREE_VEC
, length
);
2100 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2102 TREE_SET_CODE (t
, TREE_VEC
);
2103 TREE_VEC_LENGTH (t
) = len
;
2108 /* Grow a TREE_VEC node to new length LEN. */
2111 grow_tree_vec_stat (tree v
, int len MEM_STAT_DECL
)
2113 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2115 int oldlen
= TREE_VEC_LENGTH (v
);
2116 gcc_assert (len
> oldlen
);
2118 int oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2119 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2121 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2123 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2125 TREE_VEC_LENGTH (v
) = len
;
2130 /* Return 1 if EXPR is the integer constant zero or a complex constant
2134 integer_zerop (const_tree expr
)
2138 switch (TREE_CODE (expr
))
2141 return wi::eq_p (expr
, 0);
2143 return (integer_zerop (TREE_REALPART (expr
))
2144 && integer_zerop (TREE_IMAGPART (expr
)));
2148 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2149 if (!integer_zerop (VECTOR_CST_ELT (expr
, i
)))
2158 /* Return 1 if EXPR is the integer constant one or the corresponding
2159 complex constant. */
2162 integer_onep (const_tree expr
)
2166 switch (TREE_CODE (expr
))
2169 return wi::eq_p (wi::to_widest (expr
), 1);
2171 return (integer_onep (TREE_REALPART (expr
))
2172 && integer_zerop (TREE_IMAGPART (expr
)));
2176 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2177 if (!integer_onep (VECTOR_CST_ELT (expr
, i
)))
2186 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2187 return 1 if every piece is the integer constant one. */
2190 integer_each_onep (const_tree expr
)
2194 if (TREE_CODE (expr
) == COMPLEX_CST
)
2195 return (integer_onep (TREE_REALPART (expr
))
2196 && integer_onep (TREE_IMAGPART (expr
)));
2198 return integer_onep (expr
);
2201 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2202 it contains, or a complex or vector whose subparts are such integers. */
2205 integer_all_onesp (const_tree expr
)
2209 if (TREE_CODE (expr
) == COMPLEX_CST
2210 && integer_all_onesp (TREE_REALPART (expr
))
2211 && integer_all_onesp (TREE_IMAGPART (expr
)))
2214 else if (TREE_CODE (expr
) == VECTOR_CST
)
2217 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2218 if (!integer_all_onesp (VECTOR_CST_ELT (expr
, i
)))
2223 else if (TREE_CODE (expr
) != INTEGER_CST
)
2226 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
) == expr
;
2229 /* Return 1 if EXPR is the integer constant minus one. */
2232 integer_minus_onep (const_tree expr
)
2236 if (TREE_CODE (expr
) == COMPLEX_CST
)
2237 return (integer_all_onesp (TREE_REALPART (expr
))
2238 && integer_zerop (TREE_IMAGPART (expr
)));
2240 return integer_all_onesp (expr
);
2243 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2247 integer_pow2p (const_tree expr
)
2251 if (TREE_CODE (expr
) == COMPLEX_CST
2252 && integer_pow2p (TREE_REALPART (expr
))
2253 && integer_zerop (TREE_IMAGPART (expr
)))
2256 if (TREE_CODE (expr
) != INTEGER_CST
)
2259 return wi::popcount (expr
) == 1;
2262 /* Return 1 if EXPR is an integer constant other than zero or a
2263 complex constant other than zero. */
2266 integer_nonzerop (const_tree expr
)
2270 return ((TREE_CODE (expr
) == INTEGER_CST
2271 && !wi::eq_p (expr
, 0))
2272 || (TREE_CODE (expr
) == COMPLEX_CST
2273 && (integer_nonzerop (TREE_REALPART (expr
))
2274 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2277 /* Return 1 if EXPR is the fixed-point constant zero. */
2280 fixed_zerop (const_tree expr
)
2282 return (TREE_CODE (expr
) == FIXED_CST
2283 && TREE_FIXED_CST (expr
).data
.is_zero ());
2286 /* Return the power of two represented by a tree node known to be a
2290 tree_log2 (const_tree expr
)
2294 if (TREE_CODE (expr
) == COMPLEX_CST
)
2295 return tree_log2 (TREE_REALPART (expr
));
2297 return wi::exact_log2 (expr
);
2300 /* Similar, but return the largest integer Y such that 2 ** Y is less
2301 than or equal to EXPR. */
2304 tree_floor_log2 (const_tree expr
)
2308 if (TREE_CODE (expr
) == COMPLEX_CST
)
2309 return tree_log2 (TREE_REALPART (expr
));
2311 return wi::floor_log2 (expr
);
2314 /* Return number of known trailing zero bits in EXPR, or, if the value of
2315 EXPR is known to be zero, the precision of it's type. */
2318 tree_ctz (const_tree expr
)
2320 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2321 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2324 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2325 switch (TREE_CODE (expr
))
2328 ret1
= wi::ctz (expr
);
2329 return MIN (ret1
, prec
);
2331 ret1
= wi::ctz (get_nonzero_bits (expr
));
2332 return MIN (ret1
, prec
);
2339 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2342 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2343 return MIN (ret1
, ret2
);
2344 case POINTER_PLUS_EXPR
:
2345 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2346 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2347 /* Second operand is sizetype, which could be in theory
2348 wider than pointer's precision. Make sure we never
2349 return more than prec. */
2350 ret2
= MIN (ret2
, prec
);
2351 return MIN (ret1
, ret2
);
2353 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2354 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2355 return MAX (ret1
, ret2
);
2357 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2358 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2359 return MIN (ret1
+ ret2
, prec
);
2361 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2362 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2363 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2365 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2366 return MIN (ret1
+ ret2
, prec
);
2370 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2371 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2373 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2374 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2379 case TRUNC_DIV_EXPR
:
2381 case FLOOR_DIV_EXPR
:
2382 case ROUND_DIV_EXPR
:
2383 case EXACT_DIV_EXPR
:
2384 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2385 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2387 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2390 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2398 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2399 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2401 return MIN (ret1
, prec
);
2403 return tree_ctz (TREE_OPERAND (expr
, 0));
2405 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2408 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2409 return MIN (ret1
, ret2
);
2411 return tree_ctz (TREE_OPERAND (expr
, 1));
2413 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2414 if (ret1
> BITS_PER_UNIT
)
2416 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2417 return MIN (ret1
, prec
);
2425 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2426 decimal float constants, so don't return 1 for them. */
2429 real_zerop (const_tree expr
)
2433 switch (TREE_CODE (expr
))
2436 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst0
)
2437 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2439 return real_zerop (TREE_REALPART (expr
))
2440 && real_zerop (TREE_IMAGPART (expr
));
2444 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2445 if (!real_zerop (VECTOR_CST_ELT (expr
, i
)))
2454 /* Return 1 if EXPR is the real constant one in real or complex form.
2455 Trailing zeroes matter for decimal float constants, so don't return
2459 real_onep (const_tree expr
)
2463 switch (TREE_CODE (expr
))
2466 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst1
)
2467 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2469 return real_onep (TREE_REALPART (expr
))
2470 && real_zerop (TREE_IMAGPART (expr
));
2474 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2475 if (!real_onep (VECTOR_CST_ELT (expr
, i
)))
2484 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2485 matter for decimal float constants, so don't return 1 for them. */
2488 real_minus_onep (const_tree expr
)
2492 switch (TREE_CODE (expr
))
2495 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconstm1
)
2496 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2498 return real_minus_onep (TREE_REALPART (expr
))
2499 && real_zerop (TREE_IMAGPART (expr
));
2503 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2504 if (!real_minus_onep (VECTOR_CST_ELT (expr
, i
)))
2513 /* Nonzero if EXP is a constant or a cast of a constant. */
2516 really_constant_p (const_tree exp
)
2518 /* This is not quite the same as STRIP_NOPS. It does more. */
2519 while (CONVERT_EXPR_P (exp
)
2520 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2521 exp
= TREE_OPERAND (exp
, 0);
2522 return TREE_CONSTANT (exp
);
2525 /* Return first list element whose TREE_VALUE is ELEM.
2526 Return 0 if ELEM is not in LIST. */
2529 value_member (tree elem
, tree list
)
2533 if (elem
== TREE_VALUE (list
))
2535 list
= TREE_CHAIN (list
);
2540 /* Return first list element whose TREE_PURPOSE is ELEM.
2541 Return 0 if ELEM is not in LIST. */
2544 purpose_member (const_tree elem
, tree list
)
2548 if (elem
== TREE_PURPOSE (list
))
2550 list
= TREE_CHAIN (list
);
2555 /* Return true if ELEM is in V. */
2558 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
2562 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
2568 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2572 chain_index (int idx
, tree chain
)
2574 for (; chain
&& idx
> 0; --idx
)
2575 chain
= TREE_CHAIN (chain
);
2579 /* Return nonzero if ELEM is part of the chain CHAIN. */
2582 chain_member (const_tree elem
, const_tree chain
)
2588 chain
= DECL_CHAIN (chain
);
2594 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2595 We expect a null pointer to mark the end of the chain.
2596 This is the Lisp primitive `length'. */
2599 list_length (const_tree t
)
2602 #ifdef ENABLE_TREE_CHECKING
2610 #ifdef ENABLE_TREE_CHECKING
2613 gcc_assert (p
!= q
);
2621 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2622 UNION_TYPE TYPE, or NULL_TREE if none. */
2625 first_field (const_tree type
)
2627 tree t
= TYPE_FIELDS (type
);
2628 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
2633 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2634 by modifying the last node in chain 1 to point to chain 2.
2635 This is the Lisp primitive `nconc'. */
2638 chainon (tree op1
, tree op2
)
2647 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
2649 TREE_CHAIN (t1
) = op2
;
2651 #ifdef ENABLE_TREE_CHECKING
2654 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
2655 gcc_assert (t2
!= t1
);
2662 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2665 tree_last (tree chain
)
2669 while ((next
= TREE_CHAIN (chain
)))
2674 /* Reverse the order of elements in the chain T,
2675 and return the new head of the chain (old last element). */
2680 tree prev
= 0, decl
, next
;
2681 for (decl
= t
; decl
; decl
= next
)
2683 /* We shouldn't be using this function to reverse BLOCK chains; we
2684 have blocks_nreverse for that. */
2685 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
2686 next
= TREE_CHAIN (decl
);
2687 TREE_CHAIN (decl
) = prev
;
2693 /* Return a newly created TREE_LIST node whose
2694 purpose and value fields are PARM and VALUE. */
2697 build_tree_list_stat (tree parm
, tree value MEM_STAT_DECL
)
2699 tree t
= make_node_stat (TREE_LIST PASS_MEM_STAT
);
2700 TREE_PURPOSE (t
) = parm
;
2701 TREE_VALUE (t
) = value
;
2705 /* Build a chain of TREE_LIST nodes from a vector. */
2708 build_tree_list_vec_stat (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
2710 tree ret
= NULL_TREE
;
2714 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
2716 *pp
= build_tree_list_stat (NULL
, t PASS_MEM_STAT
);
2717 pp
= &TREE_CHAIN (*pp
);
2722 /* Return a newly created TREE_LIST node whose
2723 purpose and value fields are PURPOSE and VALUE
2724 and whose TREE_CHAIN is CHAIN. */
2727 tree_cons_stat (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
2731 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
2732 memset (node
, 0, sizeof (struct tree_common
));
2734 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
2736 TREE_SET_CODE (node
, TREE_LIST
);
2737 TREE_CHAIN (node
) = chain
;
2738 TREE_PURPOSE (node
) = purpose
;
2739 TREE_VALUE (node
) = value
;
2743 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2747 ctor_to_vec (tree ctor
)
2749 vec
<tree
, va_gc
> *vec
;
2750 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
2754 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
2755 vec
->quick_push (val
);
2760 /* Return the size nominally occupied by an object of type TYPE
2761 when it resides in memory. The value is measured in units of bytes,
2762 and its data type is that normally used for type sizes
2763 (which is the first type created by make_signed_type or
2764 make_unsigned_type). */
2767 size_in_bytes (const_tree type
)
2771 if (type
== error_mark_node
)
2772 return integer_zero_node
;
2774 type
= TYPE_MAIN_VARIANT (type
);
2775 t
= TYPE_SIZE_UNIT (type
);
2779 lang_hooks
.types
.incomplete_type_error (NULL_TREE
, type
);
2780 return size_zero_node
;
2786 /* Return the size of TYPE (in bytes) as a wide integer
2787 or return -1 if the size can vary or is larger than an integer. */
2790 int_size_in_bytes (const_tree type
)
2794 if (type
== error_mark_node
)
2797 type
= TYPE_MAIN_VARIANT (type
);
2798 t
= TYPE_SIZE_UNIT (type
);
2800 if (t
&& tree_fits_uhwi_p (t
))
2801 return TREE_INT_CST_LOW (t
);
2806 /* Return the maximum size of TYPE (in bytes) as a wide integer
2807 or return -1 if the size can vary or is larger than an integer. */
2810 max_int_size_in_bytes (const_tree type
)
2812 HOST_WIDE_INT size
= -1;
2815 /* If this is an array type, check for a possible MAX_SIZE attached. */
2817 if (TREE_CODE (type
) == ARRAY_TYPE
)
2819 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
2821 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2822 size
= tree_to_uhwi (size_tree
);
2825 /* If we still haven't been able to get a size, see if the language
2826 can compute a maximum size. */
2830 size_tree
= lang_hooks
.types
.max_size (type
);
2832 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2833 size
= tree_to_uhwi (size_tree
);
2839 /* Return the bit position of FIELD, in bits from the start of the record.
2840 This is a tree of type bitsizetype. */
2843 bit_position (const_tree field
)
2845 return bit_from_pos (DECL_FIELD_OFFSET (field
),
2846 DECL_FIELD_BIT_OFFSET (field
));
2849 /* Return the byte position of FIELD, in bytes from the start of the record.
2850 This is a tree of type sizetype. */
2853 byte_position (const_tree field
)
2855 return byte_from_pos (DECL_FIELD_OFFSET (field
),
2856 DECL_FIELD_BIT_OFFSET (field
));
2859 /* Likewise, but return as an integer. It must be representable in
2860 that way (since it could be a signed value, we don't have the
2861 option of returning -1 like int_size_in_byte can. */
2864 int_byte_position (const_tree field
)
2866 return tree_to_shwi (byte_position (field
));
2869 /* Return the strictest alignment, in bits, that T is known to have. */
2872 expr_align (const_tree t
)
2874 unsigned int align0
, align1
;
2876 switch (TREE_CODE (t
))
2878 CASE_CONVERT
: case NON_LVALUE_EXPR
:
2879 /* If we have conversions, we know that the alignment of the
2880 object must meet each of the alignments of the types. */
2881 align0
= expr_align (TREE_OPERAND (t
, 0));
2882 align1
= TYPE_ALIGN (TREE_TYPE (t
));
2883 return MAX (align0
, align1
);
2885 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
2886 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
2887 case CLEANUP_POINT_EXPR
:
2888 /* These don't change the alignment of an object. */
2889 return expr_align (TREE_OPERAND (t
, 0));
2892 /* The best we can do is say that the alignment is the least aligned
2894 align0
= expr_align (TREE_OPERAND (t
, 1));
2895 align1
= expr_align (TREE_OPERAND (t
, 2));
2896 return MIN (align0
, align1
);
2898 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2899 meaningfully, it's always 1. */
2900 case LABEL_DECL
: case CONST_DECL
:
2901 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
2903 gcc_assert (DECL_ALIGN (t
) != 0);
2904 return DECL_ALIGN (t
);
2910 /* Otherwise take the alignment from that of the type. */
2911 return TYPE_ALIGN (TREE_TYPE (t
));
2914 /* Return, as a tree node, the number of elements for TYPE (which is an
2915 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2918 array_type_nelts (const_tree type
)
2920 tree index_type
, min
, max
;
2922 /* If they did it with unspecified bounds, then we should have already
2923 given an error about it before we got here. */
2924 if (! TYPE_DOMAIN (type
))
2925 return error_mark_node
;
2927 index_type
= TYPE_DOMAIN (type
);
2928 min
= TYPE_MIN_VALUE (index_type
);
2929 max
= TYPE_MAX_VALUE (index_type
);
2931 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2933 return error_mark_node
;
2935 return (integer_zerop (min
)
2937 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
2940 /* If arg is static -- a reference to an object in static storage -- then
2941 return the object. This is not the same as the C meaning of `static'.
2942 If arg isn't static, return NULL. */
2947 switch (TREE_CODE (arg
))
2950 /* Nested functions are static, even though taking their address will
2951 involve a trampoline as we unnest the nested function and create
2952 the trampoline on the tree level. */
2956 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2957 && ! DECL_THREAD_LOCAL_P (arg
)
2958 && ! DECL_DLLIMPORT_P (arg
)
2962 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2966 return TREE_STATIC (arg
) ? arg
: NULL
;
2973 /* If the thing being referenced is not a field, then it is
2974 something language specific. */
2975 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
2977 /* If we are referencing a bitfield, we can't evaluate an
2978 ADDR_EXPR at compile time and so it isn't a constant. */
2979 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
2982 return staticp (TREE_OPERAND (arg
, 0));
2988 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
2991 case ARRAY_RANGE_REF
:
2992 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
2993 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
2994 return staticp (TREE_OPERAND (arg
, 0));
2998 case COMPOUND_LITERAL_EXPR
:
2999 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3009 /* Return whether OP is a DECL whose address is function-invariant. */
3012 decl_address_invariant_p (const_tree op
)
3014 /* The conditions below are slightly less strict than the one in
3017 switch (TREE_CODE (op
))
3026 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3027 || DECL_THREAD_LOCAL_P (op
)
3028 || DECL_CONTEXT (op
) == current_function_decl
3029 || decl_function_context (op
) == current_function_decl
)
3034 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3035 || decl_function_context (op
) == current_function_decl
)
3046 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3049 decl_address_ip_invariant_p (const_tree op
)
3051 /* The conditions below are slightly less strict than the one in
3054 switch (TREE_CODE (op
))
3062 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3063 && !DECL_DLLIMPORT_P (op
))
3064 || DECL_THREAD_LOCAL_P (op
))
3069 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3081 /* Return true if T is function-invariant (internal function, does
3082 not handle arithmetic; that's handled in skip_simple_arithmetic and
3083 tree_invariant_p). */
3085 static bool tree_invariant_p (tree t
);
3088 tree_invariant_p_1 (tree t
)
3092 if (TREE_CONSTANT (t
)
3093 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3096 switch (TREE_CODE (t
))
3102 op
= TREE_OPERAND (t
, 0);
3103 while (handled_component_p (op
))
3105 switch (TREE_CODE (op
))
3108 case ARRAY_RANGE_REF
:
3109 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3110 || TREE_OPERAND (op
, 2) != NULL_TREE
3111 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3116 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3122 op
= TREE_OPERAND (op
, 0);
3125 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3134 /* Return true if T is function-invariant. */
3137 tree_invariant_p (tree t
)
3139 tree inner
= skip_simple_arithmetic (t
);
3140 return tree_invariant_p_1 (inner
);
3143 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3144 Do this to any expression which may be used in more than one place,
3145 but must be evaluated only once.
3147 Normally, expand_expr would reevaluate the expression each time.
3148 Calling save_expr produces something that is evaluated and recorded
3149 the first time expand_expr is called on it. Subsequent calls to
3150 expand_expr just reuse the recorded value.
3152 The call to expand_expr that generates code that actually computes
3153 the value is the first call *at compile time*. Subsequent calls
3154 *at compile time* generate code to use the saved value.
3155 This produces correct result provided that *at run time* control
3156 always flows through the insns made by the first expand_expr
3157 before reaching the other places where the save_expr was evaluated.
3158 You, the caller of save_expr, must make sure this is so.
3160 Constants, and certain read-only nodes, are returned with no
3161 SAVE_EXPR because that is safe. Expressions containing placeholders
3162 are not touched; see tree.def for an explanation of what these
3166 save_expr (tree expr
)
3168 tree t
= fold (expr
);
3171 /* If the tree evaluates to a constant, then we don't want to hide that
3172 fact (i.e. this allows further folding, and direct checks for constants).
3173 However, a read-only object that has side effects cannot be bypassed.
3174 Since it is no problem to reevaluate literals, we just return the
3176 inner
= skip_simple_arithmetic (t
);
3177 if (TREE_CODE (inner
) == ERROR_MARK
)
3180 if (tree_invariant_p_1 (inner
))
3183 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3184 it means that the size or offset of some field of an object depends on
3185 the value within another field.
3187 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3188 and some variable since it would then need to be both evaluated once and
3189 evaluated more than once. Front-ends must assure this case cannot
3190 happen by surrounding any such subexpressions in their own SAVE_EXPR
3191 and forcing evaluation at the proper time. */
3192 if (contains_placeholder_p (inner
))
3195 t
= build1 (SAVE_EXPR
, TREE_TYPE (expr
), t
);
3196 SET_EXPR_LOCATION (t
, EXPR_LOCATION (expr
));
3198 /* This expression might be placed ahead of a jump to ensure that the
3199 value was computed on both sides of the jump. So make sure it isn't
3200 eliminated as dead. */
3201 TREE_SIDE_EFFECTS (t
) = 1;
3205 /* Look inside EXPR into any simple arithmetic operations. Return the
3206 outermost non-arithmetic or non-invariant node. */
3209 skip_simple_arithmetic (tree expr
)
3211 /* We don't care about whether this can be used as an lvalue in this
3213 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3214 expr
= TREE_OPERAND (expr
, 0);
3216 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3217 a constant, it will be more efficient to not make another SAVE_EXPR since
3218 it will allow better simplification and GCSE will be able to merge the
3219 computations if they actually occur. */
3222 if (UNARY_CLASS_P (expr
))
3223 expr
= TREE_OPERAND (expr
, 0);
3224 else if (BINARY_CLASS_P (expr
))
3226 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3227 expr
= TREE_OPERAND (expr
, 0);
3228 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3229 expr
= TREE_OPERAND (expr
, 1);
3240 /* Look inside EXPR into simple arithmetic operations involving constants.
3241 Return the outermost non-arithmetic or non-constant node. */
3244 skip_simple_constant_arithmetic (tree expr
)
3246 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3247 expr
= TREE_OPERAND (expr
, 0);
3251 if (UNARY_CLASS_P (expr
))
3252 expr
= TREE_OPERAND (expr
, 0);
3253 else if (BINARY_CLASS_P (expr
))
3255 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3256 expr
= TREE_OPERAND (expr
, 0);
3257 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3258 expr
= TREE_OPERAND (expr
, 1);
3269 /* Return which tree structure is used by T. */
3271 enum tree_node_structure_enum
3272 tree_node_structure (const_tree t
)
3274 const enum tree_code code
= TREE_CODE (t
);
3275 return tree_node_structure_for_code (code
);
3278 /* Set various status flags when building a CALL_EXPR object T. */
3281 process_call_operands (tree t
)
3283 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3284 bool read_only
= false;
3285 int i
= call_expr_flags (t
);
3287 /* Calls have side-effects, except those to const or pure functions. */
3288 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3289 side_effects
= true;
3290 /* Propagate TREE_READONLY of arguments for const functions. */
3294 if (!side_effects
|| read_only
)
3295 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3297 tree op
= TREE_OPERAND (t
, i
);
3298 if (op
&& TREE_SIDE_EFFECTS (op
))
3299 side_effects
= true;
3300 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3304 TREE_SIDE_EFFECTS (t
) = side_effects
;
3305 TREE_READONLY (t
) = read_only
;
3308 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3309 size or offset that depends on a field within a record. */
3312 contains_placeholder_p (const_tree exp
)
3314 enum tree_code code
;
3319 code
= TREE_CODE (exp
);
3320 if (code
== PLACEHOLDER_EXPR
)
3323 switch (TREE_CODE_CLASS (code
))
3326 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3327 position computations since they will be converted into a
3328 WITH_RECORD_EXPR involving the reference, which will assume
3329 here will be valid. */
3330 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3332 case tcc_exceptional
:
3333 if (code
== TREE_LIST
)
3334 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3335 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3340 case tcc_comparison
:
3341 case tcc_expression
:
3345 /* Ignoring the first operand isn't quite right, but works best. */
3346 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3349 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3350 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3351 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3354 /* The save_expr function never wraps anything containing
3355 a PLACEHOLDER_EXPR. */
3362 switch (TREE_CODE_LENGTH (code
))
3365 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3367 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3368 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3379 const_call_expr_arg_iterator iter
;
3380 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3381 if (CONTAINS_PLACEHOLDER_P (arg
))
3395 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3396 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3400 type_contains_placeholder_1 (const_tree type
)
3402 /* If the size contains a placeholder or the parent type (component type in
3403 the case of arrays) type involves a placeholder, this type does. */
3404 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3405 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3406 || (!POINTER_TYPE_P (type
)
3408 && type_contains_placeholder_p (TREE_TYPE (type
))))
3411 /* Now do type-specific checks. Note that the last part of the check above
3412 greatly limits what we have to do below. */
3413 switch (TREE_CODE (type
))
3421 case REFERENCE_TYPE
:
3430 case FIXED_POINT_TYPE
:
3431 /* Here we just check the bounds. */
3432 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3433 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3436 /* We have already checked the component type above, so just check the
3438 return type_contains_placeholder_p (TYPE_DOMAIN (type
));
3442 case QUAL_UNION_TYPE
:
3446 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3447 if (TREE_CODE (field
) == FIELD_DECL
3448 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3449 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3450 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3451 || type_contains_placeholder_p (TREE_TYPE (field
))))
3462 /* Wrapper around above function used to cache its result. */
3465 type_contains_placeholder_p (tree type
)
3469 /* If the contains_placeholder_bits field has been initialized,
3470 then we know the answer. */
3471 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3472 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3474 /* Indicate that we've seen this type node, and the answer is false.
3475 This is what we want to return if we run into recursion via fields. */
3476 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3478 /* Compute the real value. */
3479 result
= type_contains_placeholder_1 (type
);
3481 /* Store the real value. */
3482 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3487 /* Push tree EXP onto vector QUEUE if it is not already present. */
3490 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3495 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3496 if (simple_cst_equal (iter
, exp
) == 1)
3500 queue
->safe_push (exp
);
3503 /* Given a tree EXP, find all occurrences of references to fields
3504 in a PLACEHOLDER_EXPR and place them in vector REFS without
3505 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3506 we assume here that EXP contains only arithmetic expressions
3507 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3511 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3513 enum tree_code code
= TREE_CODE (exp
);
3517 /* We handle TREE_LIST and COMPONENT_REF separately. */
3518 if (code
== TREE_LIST
)
3520 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3521 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3523 else if (code
== COMPONENT_REF
)
3525 for (inner
= TREE_OPERAND (exp
, 0);
3526 REFERENCE_CLASS_P (inner
);
3527 inner
= TREE_OPERAND (inner
, 0))
3530 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3531 push_without_duplicates (exp
, refs
);
3533 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3536 switch (TREE_CODE_CLASS (code
))
3541 case tcc_declaration
:
3542 /* Variables allocated to static storage can stay. */
3543 if (!TREE_STATIC (exp
))
3544 push_without_duplicates (exp
, refs
);
3547 case tcc_expression
:
3548 /* This is the pattern built in ada/make_aligning_type. */
3549 if (code
== ADDR_EXPR
3550 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
3552 push_without_duplicates (exp
, refs
);
3556 /* Fall through... */
3558 case tcc_exceptional
:
3561 case tcc_comparison
:
3563 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
3564 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3568 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3569 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3577 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3578 return a tree with all occurrences of references to F in a
3579 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3580 CONST_DECLs. Note that we assume here that EXP contains only
3581 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3582 occurring only in their argument list. */
3585 substitute_in_expr (tree exp
, tree f
, tree r
)
3587 enum tree_code code
= TREE_CODE (exp
);
3588 tree op0
, op1
, op2
, op3
;
3591 /* We handle TREE_LIST and COMPONENT_REF separately. */
3592 if (code
== TREE_LIST
)
3594 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
3595 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
3596 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3599 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3601 else if (code
== COMPONENT_REF
)
3605 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3606 and it is the right field, replace it with R. */
3607 for (inner
= TREE_OPERAND (exp
, 0);
3608 REFERENCE_CLASS_P (inner
);
3609 inner
= TREE_OPERAND (inner
, 0))
3613 op1
= TREE_OPERAND (exp
, 1);
3615 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
3618 /* If this expression hasn't been completed let, leave it alone. */
3619 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
3622 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3623 if (op0
== TREE_OPERAND (exp
, 0))
3627 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
3630 switch (TREE_CODE_CLASS (code
))
3635 case tcc_declaration
:
3641 case tcc_expression
:
3645 /* Fall through... */
3647 case tcc_exceptional
:
3650 case tcc_comparison
:
3652 switch (TREE_CODE_LENGTH (code
))
3658 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3659 if (op0
== TREE_OPERAND (exp
, 0))
3662 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3666 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3667 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3669 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3672 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3676 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3677 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3678 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3680 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3681 && op2
== TREE_OPERAND (exp
, 2))
3684 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3688 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3689 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3690 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3691 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
3693 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3694 && op2
== TREE_OPERAND (exp
, 2)
3695 && op3
== TREE_OPERAND (exp
, 3))
3699 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3711 new_tree
= NULL_TREE
;
3713 /* If we are trying to replace F with a constant, inline back
3714 functions which do nothing else than computing a value from
3715 the arguments they are passed. This makes it possible to
3716 fold partially or entirely the replacement expression. */
3717 if (CONSTANT_CLASS_P (r
) && code
== CALL_EXPR
)
3719 tree t
= maybe_inline_call_in_expr (exp
);
3721 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
3724 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3726 tree op
= TREE_OPERAND (exp
, i
);
3727 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
3731 new_tree
= copy_node (exp
);
3732 TREE_OPERAND (new_tree
, i
) = new_op
;
3738 new_tree
= fold (new_tree
);
3739 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3740 process_call_operands (new_tree
);
3751 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3753 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3754 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3759 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3760 for it within OBJ, a tree that is an object or a chain of references. */
3763 substitute_placeholder_in_expr (tree exp
, tree obj
)
3765 enum tree_code code
= TREE_CODE (exp
);
3766 tree op0
, op1
, op2
, op3
;
3769 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3770 in the chain of OBJ. */
3771 if (code
== PLACEHOLDER_EXPR
)
3773 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
3776 for (elt
= obj
; elt
!= 0;
3777 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3778 || TREE_CODE (elt
) == COND_EXPR
)
3779 ? TREE_OPERAND (elt
, 1)
3780 : (REFERENCE_CLASS_P (elt
)
3781 || UNARY_CLASS_P (elt
)
3782 || BINARY_CLASS_P (elt
)
3783 || VL_EXP_CLASS_P (elt
)
3784 || EXPRESSION_CLASS_P (elt
))
3785 ? TREE_OPERAND (elt
, 0) : 0))
3786 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
3789 for (elt
= obj
; elt
!= 0;
3790 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3791 || TREE_CODE (elt
) == COND_EXPR
)
3792 ? TREE_OPERAND (elt
, 1)
3793 : (REFERENCE_CLASS_P (elt
)
3794 || UNARY_CLASS_P (elt
)
3795 || BINARY_CLASS_P (elt
)
3796 || VL_EXP_CLASS_P (elt
)
3797 || EXPRESSION_CLASS_P (elt
))
3798 ? TREE_OPERAND (elt
, 0) : 0))
3799 if (POINTER_TYPE_P (TREE_TYPE (elt
))
3800 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
3802 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
3804 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3805 survives until RTL generation, there will be an error. */
3809 /* TREE_LIST is special because we need to look at TREE_VALUE
3810 and TREE_CHAIN, not TREE_OPERANDS. */
3811 else if (code
== TREE_LIST
)
3813 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
3814 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
3815 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3818 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3821 switch (TREE_CODE_CLASS (code
))
3824 case tcc_declaration
:
3827 case tcc_exceptional
:
3830 case tcc_comparison
:
3831 case tcc_expression
:
3834 switch (TREE_CODE_LENGTH (code
))
3840 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3841 if (op0
== TREE_OPERAND (exp
, 0))
3844 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3848 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3849 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3851 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3854 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3858 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3859 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3860 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3862 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3863 && op2
== TREE_OPERAND (exp
, 2))
3866 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3870 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3871 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3872 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3873 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
3875 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3876 && op2
== TREE_OPERAND (exp
, 2)
3877 && op3
== TREE_OPERAND (exp
, 3))
3881 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3893 new_tree
= NULL_TREE
;
3895 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3897 tree op
= TREE_OPERAND (exp
, i
);
3898 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
3902 new_tree
= copy_node (exp
);
3903 TREE_OPERAND (new_tree
, i
) = new_op
;
3909 new_tree
= fold (new_tree
);
3910 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3911 process_call_operands (new_tree
);
3922 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3924 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3925 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3931 /* Subroutine of stabilize_reference; this is called for subtrees of
3932 references. Any expression with side-effects must be put in a SAVE_EXPR
3933 to ensure that it is only evaluated once.
3935 We don't put SAVE_EXPR nodes around everything, because assigning very
3936 simple expressions to temporaries causes us to miss good opportunities
3937 for optimizations. Among other things, the opportunity to fold in the
3938 addition of a constant into an addressing mode often gets lost, e.g.
3939 "y[i+1] += x;". In general, we take the approach that we should not make
3940 an assignment unless we are forced into it - i.e., that any non-side effect
3941 operator should be allowed, and that cse should take care of coalescing
3942 multiple utterances of the same expression should that prove fruitful. */
3945 stabilize_reference_1 (tree e
)
3948 enum tree_code code
= TREE_CODE (e
);
3950 /* We cannot ignore const expressions because it might be a reference
3951 to a const array but whose index contains side-effects. But we can
3952 ignore things that are actual constant or that already have been
3953 handled by this function. */
3955 if (tree_invariant_p (e
))
3958 switch (TREE_CODE_CLASS (code
))
3960 case tcc_exceptional
:
3962 case tcc_declaration
:
3963 case tcc_comparison
:
3965 case tcc_expression
:
3968 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3969 so that it will only be evaluated once. */
3970 /* The reference (r) and comparison (<) classes could be handled as
3971 below, but it is generally faster to only evaluate them once. */
3972 if (TREE_SIDE_EFFECTS (e
))
3973 return save_expr (e
);
3977 /* Constants need no processing. In fact, we should never reach
3982 /* Division is slow and tends to be compiled with jumps,
3983 especially the division by powers of 2 that is often
3984 found inside of an array reference. So do it just once. */
3985 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
3986 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
3987 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
3988 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
3989 return save_expr (e
);
3990 /* Recursively stabilize each operand. */
3991 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
3992 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
3996 /* Recursively stabilize each operand. */
3997 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
4004 TREE_TYPE (result
) = TREE_TYPE (e
);
4005 TREE_READONLY (result
) = TREE_READONLY (e
);
4006 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4007 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4012 /* Stabilize a reference so that we can use it any number of times
4013 without causing its operands to be evaluated more than once.
4014 Returns the stabilized reference. This works by means of save_expr,
4015 so see the caveats in the comments about save_expr.
4017 Also allows conversion expressions whose operands are references.
4018 Any other kind of expression is returned unchanged. */
4021 stabilize_reference (tree ref
)
4024 enum tree_code code
= TREE_CODE (ref
);
4031 /* No action is needed in this case. */
4036 case FIX_TRUNC_EXPR
:
4037 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4041 result
= build_nt (INDIRECT_REF
,
4042 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4046 result
= build_nt (COMPONENT_REF
,
4047 stabilize_reference (TREE_OPERAND (ref
, 0)),
4048 TREE_OPERAND (ref
, 1), NULL_TREE
);
4052 result
= build_nt (BIT_FIELD_REF
,
4053 stabilize_reference (TREE_OPERAND (ref
, 0)),
4054 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4058 result
= build_nt (ARRAY_REF
,
4059 stabilize_reference (TREE_OPERAND (ref
, 0)),
4060 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4061 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4064 case ARRAY_RANGE_REF
:
4065 result
= build_nt (ARRAY_RANGE_REF
,
4066 stabilize_reference (TREE_OPERAND (ref
, 0)),
4067 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4068 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4072 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4073 it wouldn't be ignored. This matters when dealing with
4075 return stabilize_reference_1 (ref
);
4077 /* If arg isn't a kind of lvalue we recognize, make no change.
4078 Caller should recognize the error for an invalid lvalue. */
4083 return error_mark_node
;
4086 TREE_TYPE (result
) = TREE_TYPE (ref
);
4087 TREE_READONLY (result
) = TREE_READONLY (ref
);
4088 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4089 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4094 /* Low-level constructors for expressions. */
4096 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4097 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4100 recompute_tree_invariant_for_addr_expr (tree t
)
4103 bool tc
= true, se
= false;
4105 /* We started out assuming this address is both invariant and constant, but
4106 does not have side effects. Now go down any handled components and see if
4107 any of them involve offsets that are either non-constant or non-invariant.
4108 Also check for side-effects.
4110 ??? Note that this code makes no attempt to deal with the case where
4111 taking the address of something causes a copy due to misalignment. */
4113 #define UPDATE_FLAGS(NODE) \
4114 do { tree _node = (NODE); \
4115 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4116 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4118 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4119 node
= TREE_OPERAND (node
, 0))
4121 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4122 array reference (probably made temporarily by the G++ front end),
4123 so ignore all the operands. */
4124 if ((TREE_CODE (node
) == ARRAY_REF
4125 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4126 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4128 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4129 if (TREE_OPERAND (node
, 2))
4130 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4131 if (TREE_OPERAND (node
, 3))
4132 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4134 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4135 FIELD_DECL, apparently. The G++ front end can put something else
4136 there, at least temporarily. */
4137 else if (TREE_CODE (node
) == COMPONENT_REF
4138 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4140 if (TREE_OPERAND (node
, 2))
4141 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4145 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4147 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4148 the address, since &(*a)->b is a form of addition. If it's a constant, the
4149 address is constant too. If it's a decl, its address is constant if the
4150 decl is static. Everything else is not constant and, furthermore,
4151 taking the address of a volatile variable is not volatile. */
4152 if (TREE_CODE (node
) == INDIRECT_REF
4153 || TREE_CODE (node
) == MEM_REF
)
4154 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4155 else if (CONSTANT_CLASS_P (node
))
4157 else if (DECL_P (node
))
4158 tc
&= (staticp (node
) != NULL_TREE
);
4162 se
|= TREE_SIDE_EFFECTS (node
);
4166 TREE_CONSTANT (t
) = tc
;
4167 TREE_SIDE_EFFECTS (t
) = se
;
4171 /* Build an expression of code CODE, data type TYPE, and operands as
4172 specified. Expressions and reference nodes can be created this way.
4173 Constants, decls, types and misc nodes cannot be.
4175 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4176 enough for all extant tree codes. */
4179 build0_stat (enum tree_code code
, tree tt MEM_STAT_DECL
)
4183 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4185 t
= make_node_stat (code PASS_MEM_STAT
);
4192 build1_stat (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4194 int length
= sizeof (struct tree_exp
);
4197 record_node_allocation_statistics (code
, length
);
4199 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4201 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4203 memset (t
, 0, sizeof (struct tree_common
));
4205 TREE_SET_CODE (t
, code
);
4207 TREE_TYPE (t
) = type
;
4208 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4209 TREE_OPERAND (t
, 0) = node
;
4210 if (node
&& !TYPE_P (node
))
4212 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4213 TREE_READONLY (t
) = TREE_READONLY (node
);
4216 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4217 TREE_SIDE_EFFECTS (t
) = 1;
4221 /* All of these have side-effects, no matter what their
4223 TREE_SIDE_EFFECTS (t
) = 1;
4224 TREE_READONLY (t
) = 0;
4228 /* Whether a dereference is readonly has nothing to do with whether
4229 its operand is readonly. */
4230 TREE_READONLY (t
) = 0;
4235 recompute_tree_invariant_for_addr_expr (t
);
4239 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4240 && node
&& !TYPE_P (node
)
4241 && TREE_CONSTANT (node
))
4242 TREE_CONSTANT (t
) = 1;
4243 if (TREE_CODE_CLASS (code
) == tcc_reference
4244 && node
&& TREE_THIS_VOLATILE (node
))
4245 TREE_THIS_VOLATILE (t
) = 1;
4252 #define PROCESS_ARG(N) \
4254 TREE_OPERAND (t, N) = arg##N; \
4255 if (arg##N &&!TYPE_P (arg##N)) \
4257 if (TREE_SIDE_EFFECTS (arg##N)) \
4259 if (!TREE_READONLY (arg##N) \
4260 && !CONSTANT_CLASS_P (arg##N)) \
4261 (void) (read_only = 0); \
4262 if (!TREE_CONSTANT (arg##N)) \
4263 (void) (constant = 0); \
4268 build2_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4270 bool constant
, read_only
, side_effects
;
4273 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4275 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4276 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4277 /* When sizetype precision doesn't match that of pointers
4278 we need to be able to build explicit extensions or truncations
4279 of the offset argument. */
4280 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4281 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4282 && TREE_CODE (arg1
) == INTEGER_CST
);
4284 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4285 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4286 && ptrofftype_p (TREE_TYPE (arg1
)));
4288 t
= make_node_stat (code PASS_MEM_STAT
);
4291 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4292 result based on those same flags for the arguments. But if the
4293 arguments aren't really even `tree' expressions, we shouldn't be trying
4296 /* Expressions without side effects may be constant if their
4297 arguments are as well. */
4298 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4299 || TREE_CODE_CLASS (code
) == tcc_binary
);
4301 side_effects
= TREE_SIDE_EFFECTS (t
);
4306 TREE_READONLY (t
) = read_only
;
4307 TREE_CONSTANT (t
) = constant
;
4308 TREE_SIDE_EFFECTS (t
) = side_effects
;
4309 TREE_THIS_VOLATILE (t
)
4310 = (TREE_CODE_CLASS (code
) == tcc_reference
4311 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4318 build3_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4319 tree arg2 MEM_STAT_DECL
)
4321 bool constant
, read_only
, side_effects
;
4324 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4325 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4327 t
= make_node_stat (code PASS_MEM_STAT
);
4332 /* As a special exception, if COND_EXPR has NULL branches, we
4333 assume that it is a gimple statement and always consider
4334 it to have side effects. */
4335 if (code
== COND_EXPR
4336 && tt
== void_type_node
4337 && arg1
== NULL_TREE
4338 && arg2
== NULL_TREE
)
4339 side_effects
= true;
4341 side_effects
= TREE_SIDE_EFFECTS (t
);
4347 if (code
== COND_EXPR
)
4348 TREE_READONLY (t
) = read_only
;
4350 TREE_SIDE_EFFECTS (t
) = side_effects
;
4351 TREE_THIS_VOLATILE (t
)
4352 = (TREE_CODE_CLASS (code
) == tcc_reference
4353 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4359 build4_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4360 tree arg2
, tree arg3 MEM_STAT_DECL
)
4362 bool constant
, read_only
, side_effects
;
4365 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4367 t
= make_node_stat (code PASS_MEM_STAT
);
4370 side_effects
= TREE_SIDE_EFFECTS (t
);
4377 TREE_SIDE_EFFECTS (t
) = side_effects
;
4378 TREE_THIS_VOLATILE (t
)
4379 = (TREE_CODE_CLASS (code
) == tcc_reference
4380 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4386 build5_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4387 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4389 bool constant
, read_only
, side_effects
;
4392 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4394 t
= make_node_stat (code PASS_MEM_STAT
);
4397 side_effects
= TREE_SIDE_EFFECTS (t
);
4405 TREE_SIDE_EFFECTS (t
) = side_effects
;
4406 TREE_THIS_VOLATILE (t
)
4407 = (TREE_CODE_CLASS (code
) == tcc_reference
4408 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4413 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4414 on the pointer PTR. */
4417 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4419 HOST_WIDE_INT offset
= 0;
4420 tree ptype
= TREE_TYPE (ptr
);
4422 /* For convenience allow addresses that collapse to a simple base
4424 if (TREE_CODE (ptr
) == ADDR_EXPR
4425 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4426 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4428 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4430 ptr
= build_fold_addr_expr (ptr
);
4431 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4433 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4434 ptr
, build_int_cst (ptype
, offset
));
4435 SET_EXPR_LOCATION (tem
, loc
);
4439 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4442 mem_ref_offset (const_tree t
)
4444 return offset_int::from (TREE_OPERAND (t
, 1), SIGNED
);
4447 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4448 offsetted by OFFSET units. */
4451 build_invariant_address (tree type
, tree base
, HOST_WIDE_INT offset
)
4453 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4454 build_fold_addr_expr (base
),
4455 build_int_cst (ptr_type_node
, offset
));
4456 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4457 recompute_tree_invariant_for_addr_expr (addr
);
4461 /* Similar except don't specify the TREE_TYPE
4462 and leave the TREE_SIDE_EFFECTS as 0.
4463 It is permissible for arguments to be null,
4464 or even garbage if their values do not matter. */
4467 build_nt (enum tree_code code
, ...)
4474 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4478 t
= make_node (code
);
4479 length
= TREE_CODE_LENGTH (code
);
4481 for (i
= 0; i
< length
; i
++)
4482 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
4488 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4492 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
4497 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
4498 CALL_EXPR_FN (ret
) = fn
;
4499 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
4500 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
4501 CALL_EXPR_ARG (ret
, ix
) = t
;
4505 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4506 We do NOT enter this node in any sort of symbol table.
4508 LOC is the location of the decl.
4510 layout_decl is used to set up the decl's storage layout.
4511 Other slots are initialized to 0 or null pointers. */
4514 build_decl_stat (location_t loc
, enum tree_code code
, tree name
,
4515 tree type MEM_STAT_DECL
)
4519 t
= make_node_stat (code PASS_MEM_STAT
);
4520 DECL_SOURCE_LOCATION (t
) = loc
;
4522 /* if (type == error_mark_node)
4523 type = integer_type_node; */
4524 /* That is not done, deliberately, so that having error_mark_node
4525 as the type can suppress useless errors in the use of this variable. */
4527 DECL_NAME (t
) = name
;
4528 TREE_TYPE (t
) = type
;
4530 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
4536 /* Builds and returns function declaration with NAME and TYPE. */
4539 build_fn_decl (const char *name
, tree type
)
4541 tree id
= get_identifier (name
);
4542 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
4544 DECL_EXTERNAL (decl
) = 1;
4545 TREE_PUBLIC (decl
) = 1;
4546 DECL_ARTIFICIAL (decl
) = 1;
4547 TREE_NOTHROW (decl
) = 1;
4552 vec
<tree
, va_gc
> *all_translation_units
;
4554 /* Builds a new translation-unit decl with name NAME, queues it in the
4555 global list of translation-unit decls and returns it. */
4558 build_translation_unit_decl (tree name
)
4560 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
4562 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
4563 vec_safe_push (all_translation_units
, tu
);
4568 /* BLOCK nodes are used to represent the structure of binding contours
4569 and declarations, once those contours have been exited and their contents
4570 compiled. This information is used for outputting debugging info. */
4573 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
4575 tree block
= make_node (BLOCK
);
4577 BLOCK_VARS (block
) = vars
;
4578 BLOCK_SUBBLOCKS (block
) = subblocks
;
4579 BLOCK_SUPERCONTEXT (block
) = supercontext
;
4580 BLOCK_CHAIN (block
) = chain
;
4585 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4587 LOC is the location to use in tree T. */
4590 protected_set_expr_location (tree t
, location_t loc
)
4592 if (CAN_HAVE_LOCATION_P (t
))
4593 SET_EXPR_LOCATION (t
, loc
);
4596 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4600 build_decl_attribute_variant (tree ddecl
, tree attribute
)
4602 DECL_ATTRIBUTES (ddecl
) = attribute
;
4606 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4607 is ATTRIBUTE and its qualifiers are QUALS.
4609 Record such modified types already made so we don't make duplicates. */
4612 build_type_attribute_qual_variant (tree ttype
, tree attribute
, int quals
)
4614 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype
), attribute
))
4616 inchash::hash hstate
;
4620 enum tree_code code
= TREE_CODE (ttype
);
4622 /* Building a distinct copy of a tagged type is inappropriate; it
4623 causes breakage in code that expects there to be a one-to-one
4624 relationship between a struct and its fields.
4625 build_duplicate_type is another solution (as used in
4626 handle_transparent_union_attribute), but that doesn't play well
4627 with the stronger C++ type identity model. */
4628 if (TREE_CODE (ttype
) == RECORD_TYPE
4629 || TREE_CODE (ttype
) == UNION_TYPE
4630 || TREE_CODE (ttype
) == QUAL_UNION_TYPE
4631 || TREE_CODE (ttype
) == ENUMERAL_TYPE
)
4633 warning (OPT_Wattributes
,
4634 "ignoring attributes applied to %qT after definition",
4635 TYPE_MAIN_VARIANT (ttype
));
4636 return build_qualified_type (ttype
, quals
);
4639 ttype
= build_qualified_type (ttype
, TYPE_UNQUALIFIED
);
4640 ntype
= build_distinct_type_copy (ttype
);
4642 TYPE_ATTRIBUTES (ntype
) = attribute
;
4644 hstate
.add_int (code
);
4645 if (TREE_TYPE (ntype
))
4646 hstate
.add_object (TYPE_HASH (TREE_TYPE (ntype
)));
4647 attribute_hash_list (attribute
, hstate
);
4649 switch (TREE_CODE (ntype
))
4652 type_hash_list (TYPE_ARG_TYPES (ntype
), hstate
);
4655 if (TYPE_DOMAIN (ntype
))
4656 hstate
.add_object (TYPE_HASH (TYPE_DOMAIN (ntype
)));
4659 t
= TYPE_MAX_VALUE (ntype
);
4660 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
4661 hstate
.add_object (TREE_INT_CST_ELT (t
, i
));
4664 case FIXED_POINT_TYPE
:
4666 unsigned int precision
= TYPE_PRECISION (ntype
);
4667 hstate
.add_object (precision
);
4674 ntype
= type_hash_canon (hstate
.end(), ntype
);
4676 /* If the target-dependent attributes make NTYPE different from
4677 its canonical type, we will need to use structural equality
4678 checks for this type. */
4679 if (TYPE_STRUCTURAL_EQUALITY_P (ttype
)
4680 || !comp_type_attributes (ntype
, ttype
))
4681 SET_TYPE_STRUCTURAL_EQUALITY (ntype
);
4682 else if (TYPE_CANONICAL (ntype
) == ntype
)
4683 TYPE_CANONICAL (ntype
) = TYPE_CANONICAL (ttype
);
4685 ttype
= build_qualified_type (ntype
, quals
);
4687 else if (TYPE_QUALS (ttype
) != quals
)
4688 ttype
= build_qualified_type (ttype
, quals
);
4693 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4697 omp_declare_simd_clauses_equal (tree clauses1
, tree clauses2
)
4700 for (cl1
= clauses1
, cl2
= clauses2
;
4702 cl1
= OMP_CLAUSE_CHAIN (cl1
), cl2
= OMP_CLAUSE_CHAIN (cl2
))
4704 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_CODE (cl2
))
4706 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_SIMDLEN
)
4708 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1
),
4709 OMP_CLAUSE_DECL (cl2
)) != 1)
4712 switch (OMP_CLAUSE_CODE (cl1
))
4714 case OMP_CLAUSE_ALIGNED
:
4715 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1
),
4716 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2
)) != 1)
4719 case OMP_CLAUSE_LINEAR
:
4720 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1
),
4721 OMP_CLAUSE_LINEAR_STEP (cl2
)) != 1)
4724 case OMP_CLAUSE_SIMDLEN
:
4725 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1
),
4726 OMP_CLAUSE_SIMDLEN_EXPR (cl2
)) != 1)
4735 /* Compare two constructor-element-type constants. Return 1 if the lists
4736 are known to be equal; otherwise return 0. */
4739 simple_cst_list_equal (const_tree l1
, const_tree l2
)
4741 while (l1
!= NULL_TREE
&& l2
!= NULL_TREE
)
4743 if (simple_cst_equal (TREE_VALUE (l1
), TREE_VALUE (l2
)) != 1)
4746 l1
= TREE_CHAIN (l1
);
4747 l2
= TREE_CHAIN (l2
);
4753 /* Compare two attributes for their value identity. Return true if the
4754 attribute values are known to be equal; otherwise return false.
4758 attribute_value_equal (const_tree attr1
, const_tree attr2
)
4760 if (TREE_VALUE (attr1
) == TREE_VALUE (attr2
))
4763 if (TREE_VALUE (attr1
) != NULL_TREE
4764 && TREE_CODE (TREE_VALUE (attr1
)) == TREE_LIST
4765 && TREE_VALUE (attr2
) != NULL
4766 && TREE_CODE (TREE_VALUE (attr2
)) == TREE_LIST
)
4767 return (simple_cst_list_equal (TREE_VALUE (attr1
),
4768 TREE_VALUE (attr2
)) == 1);
4770 if ((flag_openmp
|| flag_openmp_simd
)
4771 && TREE_VALUE (attr1
) && TREE_VALUE (attr2
)
4772 && TREE_CODE (TREE_VALUE (attr1
)) == OMP_CLAUSE
4773 && TREE_CODE (TREE_VALUE (attr2
)) == OMP_CLAUSE
)
4774 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1
),
4775 TREE_VALUE (attr2
));
4777 return (simple_cst_equal (TREE_VALUE (attr1
), TREE_VALUE (attr2
)) == 1);
4780 /* Return 0 if the attributes for two types are incompatible, 1 if they
4781 are compatible, and 2 if they are nearly compatible (which causes a
4782 warning to be generated). */
4784 comp_type_attributes (const_tree type1
, const_tree type2
)
4786 const_tree a1
= TYPE_ATTRIBUTES (type1
);
4787 const_tree a2
= TYPE_ATTRIBUTES (type2
);
4792 for (a
= a1
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4794 const struct attribute_spec
*as
;
4797 as
= lookup_attribute_spec (get_attribute_name (a
));
4798 if (!as
|| as
->affects_type_identity
== false)
4801 attr
= lookup_attribute (as
->name
, CONST_CAST_TREE (a2
));
4802 if (!attr
|| !attribute_value_equal (a
, attr
))
4807 for (a
= a2
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4809 const struct attribute_spec
*as
;
4811 as
= lookup_attribute_spec (get_attribute_name (a
));
4812 if (!as
|| as
->affects_type_identity
== false)
4815 if (!lookup_attribute (as
->name
, CONST_CAST_TREE (a1
)))
4817 /* We don't need to compare trees again, as we did this
4818 already in first loop. */
4820 /* All types - affecting identity - are equal, so
4821 there is no need to call target hook for comparison. */
4825 /* As some type combinations - like default calling-convention - might
4826 be compatible, we have to call the target hook to get the final result. */
4827 return targetm
.comp_type_attributes (type1
, type2
);
4830 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4833 Record such modified types already made so we don't make duplicates. */
4836 build_type_attribute_variant (tree ttype
, tree attribute
)
4838 return build_type_attribute_qual_variant (ttype
, attribute
,
4839 TYPE_QUALS (ttype
));
4843 /* Reset the expression *EXPR_P, a size or position.
4845 ??? We could reset all non-constant sizes or positions. But it's cheap
4846 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4848 We need to reset self-referential sizes or positions because they cannot
4849 be gimplified and thus can contain a CALL_EXPR after the gimplification
4850 is finished, which will run afoul of LTO streaming. And they need to be
4851 reset to something essentially dummy but not constant, so as to preserve
4852 the properties of the object they are attached to. */
4855 free_lang_data_in_one_sizepos (tree
*expr_p
)
4857 tree expr
= *expr_p
;
4858 if (CONTAINS_PLACEHOLDER_P (expr
))
4859 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
4863 /* Reset all the fields in a binfo node BINFO. We only keep
4864 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4867 free_lang_data_in_binfo (tree binfo
)
4872 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
4874 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
4875 BINFO_BASE_ACCESSES (binfo
) = NULL
;
4876 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
4877 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
4879 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
4880 free_lang_data_in_binfo (t
);
4884 /* Reset all language specific information still present in TYPE. */
4887 free_lang_data_in_type (tree type
)
4889 gcc_assert (TYPE_P (type
));
4891 /* Give the FE a chance to remove its own data first. */
4892 lang_hooks
.free_lang_data (type
);
4894 TREE_LANG_FLAG_0 (type
) = 0;
4895 TREE_LANG_FLAG_1 (type
) = 0;
4896 TREE_LANG_FLAG_2 (type
) = 0;
4897 TREE_LANG_FLAG_3 (type
) = 0;
4898 TREE_LANG_FLAG_4 (type
) = 0;
4899 TREE_LANG_FLAG_5 (type
) = 0;
4900 TREE_LANG_FLAG_6 (type
) = 0;
4902 if (TREE_CODE (type
) == FUNCTION_TYPE
)
4904 /* Remove the const and volatile qualifiers from arguments. The
4905 C++ front end removes them, but the C front end does not,
4906 leading to false ODR violation errors when merging two
4907 instances of the same function signature compiled by
4908 different front ends. */
4911 for (p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
4913 tree arg_type
= TREE_VALUE (p
);
4915 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
4917 int quals
= TYPE_QUALS (arg_type
)
4919 & ~TYPE_QUAL_VOLATILE
;
4920 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
4921 free_lang_data_in_type (TREE_VALUE (p
));
4926 /* Remove members that are not actually FIELD_DECLs from the field
4927 list of an aggregate. These occur in C++. */
4928 if (RECORD_OR_UNION_TYPE_P (type
))
4932 /* Note that TYPE_FIELDS can be shared across distinct
4933 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4934 to be removed, we cannot set its TREE_CHAIN to NULL.
4935 Otherwise, we would not be able to find all the other fields
4936 in the other instances of this TREE_TYPE.
4938 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4940 member
= TYPE_FIELDS (type
);
4943 if (TREE_CODE (member
) == FIELD_DECL
4944 || TREE_CODE (member
) == TYPE_DECL
)
4947 TREE_CHAIN (prev
) = member
;
4949 TYPE_FIELDS (type
) = member
;
4953 member
= TREE_CHAIN (member
);
4957 TREE_CHAIN (prev
) = NULL_TREE
;
4959 TYPE_FIELDS (type
) = NULL_TREE
;
4961 TYPE_METHODS (type
) = NULL_TREE
;
4962 if (TYPE_BINFO (type
))
4963 free_lang_data_in_binfo (TYPE_BINFO (type
));
4967 /* For non-aggregate types, clear out the language slot (which
4968 overloads TYPE_BINFO). */
4969 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
4971 if (INTEGRAL_TYPE_P (type
)
4972 || SCALAR_FLOAT_TYPE_P (type
)
4973 || FIXED_POINT_TYPE_P (type
))
4975 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
4976 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
4980 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
4981 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
4983 if (TYPE_CONTEXT (type
)
4984 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
4986 tree ctx
= TYPE_CONTEXT (type
);
4989 ctx
= BLOCK_SUPERCONTEXT (ctx
);
4991 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
4992 TYPE_CONTEXT (type
) = ctx
;
4997 /* Return true if DECL may need an assembler name to be set. */
5000 need_assembler_name_p (tree decl
)
5002 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5004 if (flag_lto_odr_type_mering
5005 && TREE_CODE (decl
) == TYPE_DECL
5007 && decl
== TYPE_NAME (TREE_TYPE (decl
))
5008 && !is_lang_specific (TREE_TYPE (decl
))
5009 && AGGREGATE_TYPE_P (TREE_TYPE (decl
))
5010 && !variably_modified_type_p (TREE_TYPE (decl
), NULL_TREE
)
5011 && !type_in_anonymous_namespace_p (TREE_TYPE (decl
)))
5012 return !DECL_ASSEMBLER_NAME_SET_P (decl
);
5013 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5014 if (TREE_CODE (decl
) != FUNCTION_DECL
5015 && TREE_CODE (decl
) != VAR_DECL
)
5018 /* If DECL already has its assembler name set, it does not need a
5020 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
5021 || DECL_ASSEMBLER_NAME_SET_P (decl
))
5024 /* Abstract decls do not need an assembler name. */
5025 if (DECL_ABSTRACT_P (decl
))
5028 /* For VAR_DECLs, only static, public and external symbols need an
5030 if (TREE_CODE (decl
) == VAR_DECL
5031 && !TREE_STATIC (decl
)
5032 && !TREE_PUBLIC (decl
)
5033 && !DECL_EXTERNAL (decl
))
5036 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5038 /* Do not set assembler name on builtins. Allow RTL expansion to
5039 decide whether to expand inline or via a regular call. */
5040 if (DECL_BUILT_IN (decl
)
5041 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
5044 /* Functions represented in the callgraph need an assembler name. */
5045 if (cgraph_node::get (decl
) != NULL
)
5048 /* Unused and not public functions don't need an assembler name. */
5049 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
5057 /* Reset all language specific information still present in symbol
5061 free_lang_data_in_decl (tree decl
)
5063 gcc_assert (DECL_P (decl
));
5065 /* Give the FE a chance to remove its own data first. */
5066 lang_hooks
.free_lang_data (decl
);
5068 TREE_LANG_FLAG_0 (decl
) = 0;
5069 TREE_LANG_FLAG_1 (decl
) = 0;
5070 TREE_LANG_FLAG_2 (decl
) = 0;
5071 TREE_LANG_FLAG_3 (decl
) = 0;
5072 TREE_LANG_FLAG_4 (decl
) = 0;
5073 TREE_LANG_FLAG_5 (decl
) = 0;
5074 TREE_LANG_FLAG_6 (decl
) = 0;
5076 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5077 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5078 if (TREE_CODE (decl
) == FIELD_DECL
)
5080 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5081 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5082 DECL_QUALIFIER (decl
) = NULL_TREE
;
5085 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5087 struct cgraph_node
*node
;
5088 if (!(node
= cgraph_node::get (decl
))
5089 || (!node
->definition
&& !node
->clones
))
5092 node
->release_body ();
5095 release_function_body (decl
);
5096 DECL_ARGUMENTS (decl
) = NULL
;
5097 DECL_RESULT (decl
) = NULL
;
5098 DECL_INITIAL (decl
) = error_mark_node
;
5101 if (gimple_has_body_p (decl
))
5105 /* If DECL has a gimple body, then the context for its
5106 arguments must be DECL. Otherwise, it doesn't really
5107 matter, as we will not be emitting any code for DECL. In
5108 general, there may be other instances of DECL created by
5109 the front end and since PARM_DECLs are generally shared,
5110 their DECL_CONTEXT changes as the replicas of DECL are
5111 created. The only time where DECL_CONTEXT is important
5112 is for the FUNCTION_DECLs that have a gimple body (since
5113 the PARM_DECL will be used in the function's body). */
5114 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5115 DECL_CONTEXT (t
) = decl
;
5118 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5119 At this point, it is not needed anymore. */
5120 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5122 /* Clear the abstract origin if it refers to a method. Otherwise
5123 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5124 origin will not be output correctly. */
5125 if (DECL_ABSTRACT_ORIGIN (decl
)
5126 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5127 && RECORD_OR_UNION_TYPE_P
5128 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5129 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5131 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5132 DECL_VINDEX referring to itself into a vtable slot number as it
5133 should. Happens with functions that are copied and then forgotten
5134 about. Just clear it, it won't matter anymore. */
5135 if (DECL_VINDEX (decl
) && !tree_fits_shwi_p (DECL_VINDEX (decl
)))
5136 DECL_VINDEX (decl
) = NULL_TREE
;
5138 else if (TREE_CODE (decl
) == VAR_DECL
)
5140 if ((DECL_EXTERNAL (decl
)
5141 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5142 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5143 DECL_INITIAL (decl
) = NULL_TREE
;
5145 else if (TREE_CODE (decl
) == TYPE_DECL
5146 || TREE_CODE (decl
) == FIELD_DECL
)
5147 DECL_INITIAL (decl
) = NULL_TREE
;
5148 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5149 && DECL_INITIAL (decl
)
5150 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5152 /* Strip builtins from the translation-unit BLOCK. We still have targets
5153 without builtin_decl_explicit support and also builtins are shared
5154 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5155 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5159 if (TREE_CODE (var
) == FUNCTION_DECL
5160 && DECL_BUILT_IN (var
))
5161 *nextp
= TREE_CHAIN (var
);
5163 nextp
= &TREE_CHAIN (var
);
5169 /* Data used when collecting DECLs and TYPEs for language data removal. */
5171 struct free_lang_data_d
5173 /* Worklist to avoid excessive recursion. */
5176 /* Set of traversed objects. Used to avoid duplicate visits. */
5177 hash_set
<tree
> *pset
;
5179 /* Array of symbols to process with free_lang_data_in_decl. */
5182 /* Array of types to process with free_lang_data_in_type. */
5187 /* Save all language fields needed to generate proper debug information
5188 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5191 save_debug_info_for_decl (tree t
)
5193 /*struct saved_debug_info_d *sdi;*/
5195 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& DECL_P (t
));
5197 /* FIXME. Partial implementation for saving debug info removed. */
5201 /* Save all language fields needed to generate proper debug information
5202 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5205 save_debug_info_for_type (tree t
)
5207 /*struct saved_debug_info_d *sdi;*/
5209 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& TYPE_P (t
));
5211 /* FIXME. Partial implementation for saving debug info removed. */
5215 /* Add type or decl T to one of the list of tree nodes that need their
5216 language data removed. The lists are held inside FLD. */
5219 add_tree_to_fld_list (tree t
, struct free_lang_data_d
*fld
)
5223 fld
->decls
.safe_push (t
);
5224 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5225 save_debug_info_for_decl (t
);
5227 else if (TYPE_P (t
))
5229 fld
->types
.safe_push (t
);
5230 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5231 save_debug_info_for_type (t
);
5237 /* Push tree node T into FLD->WORKLIST. */
5240 fld_worklist_push (tree t
, struct free_lang_data_d
*fld
)
5242 if (t
&& !is_lang_specific (t
) && !fld
->pset
->contains (t
))
5243 fld
->worklist
.safe_push ((t
));
5247 /* Operand callback helper for free_lang_data_in_node. *TP is the
5248 subtree operand being considered. */
5251 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5254 struct free_lang_data_d
*fld
= (struct free_lang_data_d
*) data
;
5256 if (TREE_CODE (t
) == TREE_LIST
)
5259 /* Language specific nodes will be removed, so there is no need
5260 to gather anything under them. */
5261 if (is_lang_specific (t
))
5269 /* Note that walk_tree does not traverse every possible field in
5270 decls, so we have to do our own traversals here. */
5271 add_tree_to_fld_list (t
, fld
);
5273 fld_worklist_push (DECL_NAME (t
), fld
);
5274 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5275 fld_worklist_push (DECL_SIZE (t
), fld
);
5276 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5278 /* We are going to remove everything under DECL_INITIAL for
5279 TYPE_DECLs. No point walking them. */
5280 if (TREE_CODE (t
) != TYPE_DECL
)
5281 fld_worklist_push (DECL_INITIAL (t
), fld
);
5283 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5284 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5286 if (TREE_CODE (t
) == FUNCTION_DECL
)
5288 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5289 fld_worklist_push (DECL_RESULT (t
), fld
);
5291 else if (TREE_CODE (t
) == TYPE_DECL
)
5293 fld_worklist_push (DECL_ORIGINAL_TYPE (t
), fld
);
5295 else if (TREE_CODE (t
) == FIELD_DECL
)
5297 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5298 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5299 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5300 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5303 if ((TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
5304 && DECL_HAS_VALUE_EXPR_P (t
))
5305 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5307 if (TREE_CODE (t
) != FIELD_DECL
5308 && TREE_CODE (t
) != TYPE_DECL
)
5309 fld_worklist_push (TREE_CHAIN (t
), fld
);
5312 else if (TYPE_P (t
))
5314 /* Note that walk_tree does not traverse every possible field in
5315 types, so we have to do our own traversals here. */
5316 add_tree_to_fld_list (t
, fld
);
5318 if (!RECORD_OR_UNION_TYPE_P (t
))
5319 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5320 fld_worklist_push (TYPE_SIZE (t
), fld
);
5321 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5322 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5323 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5324 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5325 fld_worklist_push (TYPE_NAME (t
), fld
);
5326 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5327 them and thus do not and want not to reach unused pointer types
5329 if (!POINTER_TYPE_P (t
))
5330 fld_worklist_push (TYPE_MINVAL (t
), fld
);
5331 if (!RECORD_OR_UNION_TYPE_P (t
))
5332 fld_worklist_push (TYPE_MAXVAL (t
), fld
);
5333 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5334 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5335 do not and want not to reach unused variants this way. */
5336 if (TYPE_CONTEXT (t
))
5338 tree ctx
= TYPE_CONTEXT (t
);
5339 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5340 So push that instead. */
5341 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5342 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5343 fld_worklist_push (ctx
, fld
);
5345 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5346 and want not to reach unused types this way. */
5348 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5352 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5353 fld_worklist_push (TREE_TYPE (tem
), fld
);
5354 tem
= BINFO_VIRTUALS (TYPE_BINFO (t
));
5356 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5357 && TREE_CODE (tem
) == TREE_LIST
)
5360 fld_worklist_push (TREE_VALUE (tem
), fld
);
5361 tem
= TREE_CHAIN (tem
);
5365 if (RECORD_OR_UNION_TYPE_P (t
))
5368 /* Push all TYPE_FIELDS - there can be interleaving interesting
5369 and non-interesting things. */
5370 tem
= TYPE_FIELDS (t
);
5373 if (TREE_CODE (tem
) == FIELD_DECL
5374 || TREE_CODE (tem
) == TYPE_DECL
)
5375 fld_worklist_push (tem
, fld
);
5376 tem
= TREE_CHAIN (tem
);
5380 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5383 else if (TREE_CODE (t
) == BLOCK
)
5386 for (tem
= BLOCK_VARS (t
); tem
; tem
= TREE_CHAIN (tem
))
5387 fld_worklist_push (tem
, fld
);
5388 for (tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5389 fld_worklist_push (tem
, fld
);
5390 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5393 if (TREE_CODE (t
) != IDENTIFIER_NODE
5394 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5395 fld_worklist_push (TREE_TYPE (t
), fld
);
5401 /* Find decls and types in T. */
5404 find_decls_types (tree t
, struct free_lang_data_d
*fld
)
5408 if (!fld
->pset
->contains (t
))
5409 walk_tree (&t
, find_decls_types_r
, fld
, fld
->pset
);
5410 if (fld
->worklist
.is_empty ())
5412 t
= fld
->worklist
.pop ();
5416 /* Translate all the types in LIST with the corresponding runtime
5420 get_eh_types_for_runtime (tree list
)
5424 if (list
== NULL_TREE
)
5427 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5429 list
= TREE_CHAIN (list
);
5432 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5433 TREE_CHAIN (prev
) = n
;
5434 prev
= TREE_CHAIN (prev
);
5435 list
= TREE_CHAIN (list
);
5442 /* Find decls and types referenced in EH region R and store them in
5443 FLD->DECLS and FLD->TYPES. */
5446 find_decls_types_in_eh_region (eh_region r
, struct free_lang_data_d
*fld
)
5457 /* The types referenced in each catch must first be changed to the
5458 EH types used at runtime. This removes references to FE types
5460 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
5462 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
5463 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, fld
->pset
);
5468 case ERT_ALLOWED_EXCEPTIONS
:
5469 r
->u
.allowed
.type_list
5470 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
5471 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, fld
->pset
);
5474 case ERT_MUST_NOT_THROW
:
5475 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
5476 find_decls_types_r
, fld
, fld
->pset
);
5482 /* Find decls and types referenced in cgraph node N and store them in
5483 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5484 look for *every* kind of DECL and TYPE node reachable from N,
5485 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5486 NAMESPACE_DECLs, etc). */
5489 find_decls_types_in_node (struct cgraph_node
*n
, struct free_lang_data_d
*fld
)
5492 struct function
*fn
;
5496 find_decls_types (n
->decl
, fld
);
5498 if (!gimple_has_body_p (n
->decl
))
5501 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
5503 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
5505 /* Traverse locals. */
5506 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
5507 find_decls_types (t
, fld
);
5509 /* Traverse EH regions in FN. */
5512 FOR_ALL_EH_REGION_FN (r
, fn
)
5513 find_decls_types_in_eh_region (r
, fld
);
5516 /* Traverse every statement in FN. */
5517 FOR_EACH_BB_FN (bb
, fn
)
5519 gimple_stmt_iterator si
;
5522 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
5524 gimple phi
= gsi_stmt (si
);
5526 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5528 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
5529 find_decls_types (*arg_p
, fld
);
5533 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
5535 gimple stmt
= gsi_stmt (si
);
5537 if (is_gimple_call (stmt
))
5538 find_decls_types (gimple_call_fntype (stmt
), fld
);
5540 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
5542 tree arg
= gimple_op (stmt
, i
);
5543 find_decls_types (arg
, fld
);
5550 /* Find decls and types referenced in varpool node N and store them in
5551 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5552 look for *every* kind of DECL and TYPE node reachable from N,
5553 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5554 NAMESPACE_DECLs, etc). */
5557 find_decls_types_in_var (varpool_node
*v
, struct free_lang_data_d
*fld
)
5559 find_decls_types (v
->decl
, fld
);
5562 /* If T needs an assembler name, have one created for it. */
5565 assign_assembler_name_if_neeeded (tree t
)
5567 if (need_assembler_name_p (t
))
5569 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5570 diagnostics that use input_location to show locus
5571 information. The problem here is that, at this point,
5572 input_location is generally anchored to the end of the file
5573 (since the parser is long gone), so we don't have a good
5574 position to pin it to.
5576 To alleviate this problem, this uses the location of T's
5577 declaration. Examples of this are
5578 testsuite/g++.dg/template/cond2.C and
5579 testsuite/g++.dg/template/pr35240.C. */
5580 location_t saved_location
= input_location
;
5581 input_location
= DECL_SOURCE_LOCATION (t
);
5583 decl_assembler_name (t
);
5585 input_location
= saved_location
;
5590 /* Free language specific information for every operand and expression
5591 in every node of the call graph. This process operates in three stages:
5593 1- Every callgraph node and varpool node is traversed looking for
5594 decls and types embedded in them. This is a more exhaustive
5595 search than that done by find_referenced_vars, because it will
5596 also collect individual fields, decls embedded in types, etc.
5598 2- All the decls found are sent to free_lang_data_in_decl.
5600 3- All the types found are sent to free_lang_data_in_type.
5602 The ordering between decls and types is important because
5603 free_lang_data_in_decl sets assembler names, which includes
5604 mangling. So types cannot be freed up until assembler names have
5608 free_lang_data_in_cgraph (void)
5610 struct cgraph_node
*n
;
5612 struct free_lang_data_d fld
;
5617 /* Initialize sets and arrays to store referenced decls and types. */
5618 fld
.pset
= new hash_set
<tree
>;
5619 fld
.worklist
.create (0);
5620 fld
.decls
.create (100);
5621 fld
.types
.create (100);
5623 /* Find decls and types in the body of every function in the callgraph. */
5624 FOR_EACH_FUNCTION (n
)
5625 find_decls_types_in_node (n
, &fld
);
5627 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
5628 find_decls_types (p
->decl
, &fld
);
5630 /* Find decls and types in every varpool symbol. */
5631 FOR_EACH_VARIABLE (v
)
5632 find_decls_types_in_var (v
, &fld
);
5634 /* Set the assembler name on every decl found. We need to do this
5635 now because free_lang_data_in_decl will invalidate data needed
5636 for mangling. This breaks mangling on interdependent decls. */
5637 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5638 assign_assembler_name_if_neeeded (t
);
5640 /* Traverse every decl found freeing its language data. */
5641 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5642 free_lang_data_in_decl (t
);
5644 /* Traverse every type found freeing its language data. */
5645 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5646 free_lang_data_in_type (t
);
5649 fld
.worklist
.release ();
5650 fld
.decls
.release ();
5651 fld
.types
.release ();
5655 /* Free resources that are used by FE but are not needed once they are done. */
5658 free_lang_data (void)
5662 /* If we are the LTO frontend we have freed lang-specific data already. */
5664 || !flag_generate_lto
)
5667 /* Allocate and assign alias sets to the standard integer types
5668 while the slots are still in the way the frontends generated them. */
5669 for (i
= 0; i
< itk_none
; ++i
)
5670 if (integer_types
[i
])
5671 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
5673 /* Traverse the IL resetting language specific information for
5674 operands, expressions, etc. */
5675 free_lang_data_in_cgraph ();
5677 /* Create gimple variants for common types. */
5678 ptrdiff_type_node
= integer_type_node
;
5679 fileptr_type_node
= ptr_type_node
;
5681 /* Reset some langhooks. Do not reset types_compatible_p, it may
5682 still be used indirectly via the get_alias_set langhook. */
5683 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
5684 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
5685 /* We do not want the default decl_assembler_name implementation,
5686 rather if we have fixed everything we want a wrapper around it
5687 asserting that all non-local symbols already got their assembler
5688 name and only produce assembler names for local symbols. Or rather
5689 make sure we never call decl_assembler_name on local symbols and
5690 devise a separate, middle-end private scheme for it. */
5692 /* Reset diagnostic machinery. */
5693 tree_diagnostics_defaults (global_dc
);
5701 const pass_data pass_data_ipa_free_lang_data
=
5703 SIMPLE_IPA_PASS
, /* type */
5704 "*free_lang_data", /* name */
5705 OPTGROUP_NONE
, /* optinfo_flags */
5706 TV_IPA_FREE_LANG_DATA
, /* tv_id */
5707 0, /* properties_required */
5708 0, /* properties_provided */
5709 0, /* properties_destroyed */
5710 0, /* todo_flags_start */
5711 0, /* todo_flags_finish */
5714 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
5717 pass_ipa_free_lang_data (gcc::context
*ctxt
)
5718 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
5721 /* opt_pass methods: */
5722 virtual unsigned int execute (function
*) { return free_lang_data (); }
5724 }; // class pass_ipa_free_lang_data
5728 simple_ipa_opt_pass
*
5729 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
5731 return new pass_ipa_free_lang_data (ctxt
);
5734 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5735 ATTR_NAME. Also used internally by remove_attribute(). */
5737 private_is_attribute_p (const char *attr_name
, size_t attr_len
, const_tree ident
)
5739 size_t ident_len
= IDENTIFIER_LENGTH (ident
);
5741 if (ident_len
== attr_len
)
5743 if (strcmp (attr_name
, IDENTIFIER_POINTER (ident
)) == 0)
5746 else if (ident_len
== attr_len
+ 4)
5748 /* There is the possibility that ATTR is 'text' and IDENT is
5750 const char *p
= IDENTIFIER_POINTER (ident
);
5751 if (p
[0] == '_' && p
[1] == '_'
5752 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5753 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5760 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5761 of ATTR_NAME, and LIST is not NULL_TREE. */
5763 private_lookup_attribute (const char *attr_name
, size_t attr_len
, tree list
)
5767 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5769 if (ident_len
== attr_len
)
5771 if (!strcmp (attr_name
,
5772 IDENTIFIER_POINTER (get_attribute_name (list
))))
5775 /* TODO: If we made sure that attributes were stored in the
5776 canonical form without '__...__' (ie, as in 'text' as opposed
5777 to '__text__') then we could avoid the following case. */
5778 else if (ident_len
== attr_len
+ 4)
5780 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5781 if (p
[0] == '_' && p
[1] == '_'
5782 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5783 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5786 list
= TREE_CHAIN (list
);
5792 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5793 return a pointer to the attribute's list first element if the attribute
5794 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5798 private_lookup_attribute_by_prefix (const char *attr_name
, size_t attr_len
,
5803 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5805 if (attr_len
> ident_len
)
5807 list
= TREE_CHAIN (list
);
5811 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5813 if (strncmp (attr_name
, p
, attr_len
) == 0)
5816 /* TODO: If we made sure that attributes were stored in the
5817 canonical form without '__...__' (ie, as in 'text' as opposed
5818 to '__text__') then we could avoid the following case. */
5819 if (p
[0] == '_' && p
[1] == '_' &&
5820 strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5823 list
= TREE_CHAIN (list
);
5830 /* A variant of lookup_attribute() that can be used with an identifier
5831 as the first argument, and where the identifier can be either
5832 'text' or '__text__'.
5834 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5835 return a pointer to the attribute's list element if the attribute
5836 is part of the list, or NULL_TREE if not found. If the attribute
5837 appears more than once, this only returns the first occurrence; the
5838 TREE_CHAIN of the return value should be passed back in if further
5839 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5840 can be in the form 'text' or '__text__'. */
5842 lookup_ident_attribute (tree attr_identifier
, tree list
)
5844 gcc_checking_assert (TREE_CODE (attr_identifier
) == IDENTIFIER_NODE
);
5848 gcc_checking_assert (TREE_CODE (get_attribute_name (list
))
5849 == IDENTIFIER_NODE
);
5851 /* Identifiers can be compared directly for equality. */
5852 if (attr_identifier
== get_attribute_name (list
))
5855 /* If they are not equal, they may still be one in the form
5856 'text' while the other one is in the form '__text__'. TODO:
5857 If we were storing attributes in normalized 'text' form, then
5858 this could all go away and we could take full advantage of
5859 the fact that we're comparing identifiers. :-) */
5861 size_t attr_len
= IDENTIFIER_LENGTH (attr_identifier
);
5862 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5864 if (ident_len
== attr_len
+ 4)
5866 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5867 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5868 if (p
[0] == '_' && p
[1] == '_'
5869 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5870 && strncmp (q
, p
+ 2, attr_len
) == 0)
5873 else if (ident_len
+ 4 == attr_len
)
5875 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5876 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5877 if (q
[0] == '_' && q
[1] == '_'
5878 && q
[attr_len
- 2] == '_' && q
[attr_len
- 1] == '_'
5879 && strncmp (q
+ 2, p
, ident_len
) == 0)
5883 list
= TREE_CHAIN (list
);
5889 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5893 remove_attribute (const char *attr_name
, tree list
)
5896 size_t attr_len
= strlen (attr_name
);
5898 gcc_checking_assert (attr_name
[0] != '_');
5900 for (p
= &list
; *p
; )
5903 /* TODO: If we were storing attributes in normalized form, here
5904 we could use a simple strcmp(). */
5905 if (private_is_attribute_p (attr_name
, attr_len
, get_attribute_name (l
)))
5906 *p
= TREE_CHAIN (l
);
5908 p
= &TREE_CHAIN (l
);
5914 /* Return an attribute list that is the union of a1 and a2. */
5917 merge_attributes (tree a1
, tree a2
)
5921 /* Either one unset? Take the set one. */
5923 if ((attributes
= a1
) == 0)
5926 /* One that completely contains the other? Take it. */
5928 else if (a2
!= 0 && ! attribute_list_contained (a1
, a2
))
5930 if (attribute_list_contained (a2
, a1
))
5934 /* Pick the longest list, and hang on the other list. */
5936 if (list_length (a1
) < list_length (a2
))
5937 attributes
= a2
, a2
= a1
;
5939 for (; a2
!= 0; a2
= TREE_CHAIN (a2
))
5942 for (a
= lookup_ident_attribute (get_attribute_name (a2
),
5944 a
!= NULL_TREE
&& !attribute_value_equal (a
, a2
);
5945 a
= lookup_ident_attribute (get_attribute_name (a2
),
5950 a1
= copy_node (a2
);
5951 TREE_CHAIN (a1
) = attributes
;
5960 /* Given types T1 and T2, merge their attributes and return
5964 merge_type_attributes (tree t1
, tree t2
)
5966 return merge_attributes (TYPE_ATTRIBUTES (t1
),
5967 TYPE_ATTRIBUTES (t2
));
5970 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5974 merge_decl_attributes (tree olddecl
, tree newdecl
)
5976 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
5977 DECL_ATTRIBUTES (newdecl
));
5980 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5982 /* Specialization of merge_decl_attributes for various Windows targets.
5984 This handles the following situation:
5986 __declspec (dllimport) int foo;
5989 The second instance of `foo' nullifies the dllimport. */
5992 merge_dllimport_decl_attributes (tree old
, tree new_tree
)
5995 int delete_dllimport_p
= 1;
5997 /* What we need to do here is remove from `old' dllimport if it doesn't
5998 appear in `new'. dllimport behaves like extern: if a declaration is
5999 marked dllimport and a definition appears later, then the object
6000 is not dllimport'd. We also remove a `new' dllimport if the old list
6001 contains dllexport: dllexport always overrides dllimport, regardless
6002 of the order of declaration. */
6003 if (!VAR_OR_FUNCTION_DECL_P (new_tree
))
6004 delete_dllimport_p
= 0;
6005 else if (DECL_DLLIMPORT_P (new_tree
)
6006 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old
)))
6008 DECL_DLLIMPORT_P (new_tree
) = 0;
6009 warning (OPT_Wattributes
, "%q+D already declared with dllexport attribute: "
6010 "dllimport ignored", new_tree
);
6012 else if (DECL_DLLIMPORT_P (old
) && !DECL_DLLIMPORT_P (new_tree
))
6014 /* Warn about overriding a symbol that has already been used, e.g.:
6015 extern int __attribute__ ((dllimport)) foo;
6016 int* bar () {return &foo;}
6019 if (TREE_USED (old
))
6021 warning (0, "%q+D redeclared without dllimport attribute "
6022 "after being referenced with dll linkage", new_tree
);
6023 /* If we have used a variable's address with dllimport linkage,
6024 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6025 decl may already have had TREE_CONSTANT computed.
6026 We still remove the attribute so that assembler code refers
6027 to '&foo rather than '_imp__foo'. */
6028 if (TREE_CODE (old
) == VAR_DECL
&& TREE_ADDRESSABLE (old
))
6029 DECL_DLLIMPORT_P (new_tree
) = 1;
6032 /* Let an inline definition silently override the external reference,
6033 but otherwise warn about attribute inconsistency. */
6034 else if (TREE_CODE (new_tree
) == VAR_DECL
6035 || !DECL_DECLARED_INLINE_P (new_tree
))
6036 warning (OPT_Wattributes
, "%q+D redeclared without dllimport attribute: "
6037 "previous dllimport ignored", new_tree
);
6040 delete_dllimport_p
= 0;
6042 a
= merge_attributes (DECL_ATTRIBUTES (old
), DECL_ATTRIBUTES (new_tree
));
6044 if (delete_dllimport_p
)
6045 a
= remove_attribute ("dllimport", a
);
6050 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6051 struct attribute_spec.handler. */
6054 handle_dll_attribute (tree
* pnode
, tree name
, tree args
, int flags
,
6060 /* These attributes may apply to structure and union types being created,
6061 but otherwise should pass to the declaration involved. */
6064 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
| (int) ATTR_FLAG_FUNCTION_NEXT
6065 | (int) ATTR_FLAG_ARRAY_NEXT
))
6067 *no_add_attrs
= true;
6068 return tree_cons (name
, args
, NULL_TREE
);
6070 if (TREE_CODE (node
) == RECORD_TYPE
6071 || TREE_CODE (node
) == UNION_TYPE
)
6073 node
= TYPE_NAME (node
);
6079 warning (OPT_Wattributes
, "%qE attribute ignored",
6081 *no_add_attrs
= true;
6086 if (TREE_CODE (node
) != FUNCTION_DECL
6087 && TREE_CODE (node
) != VAR_DECL
6088 && TREE_CODE (node
) != TYPE_DECL
)
6090 *no_add_attrs
= true;
6091 warning (OPT_Wattributes
, "%qE attribute ignored",
6096 if (TREE_CODE (node
) == TYPE_DECL
6097 && TREE_CODE (TREE_TYPE (node
)) != RECORD_TYPE
6098 && TREE_CODE (TREE_TYPE (node
)) != UNION_TYPE
)
6100 *no_add_attrs
= true;
6101 warning (OPT_Wattributes
, "%qE attribute ignored",
6106 is_dllimport
= is_attribute_p ("dllimport", name
);
6108 /* Report error on dllimport ambiguities seen now before they cause
6112 /* Honor any target-specific overrides. */
6113 if (!targetm
.valid_dllimport_attribute_p (node
))
6114 *no_add_attrs
= true;
6116 else if (TREE_CODE (node
) == FUNCTION_DECL
6117 && DECL_DECLARED_INLINE_P (node
))
6119 warning (OPT_Wattributes
, "inline function %q+D declared as "
6120 " dllimport: attribute ignored", node
);
6121 *no_add_attrs
= true;
6123 /* Like MS, treat definition of dllimported variables and
6124 non-inlined functions on declaration as syntax errors. */
6125 else if (TREE_CODE (node
) == FUNCTION_DECL
&& DECL_INITIAL (node
))
6127 error ("function %q+D definition is marked dllimport", node
);
6128 *no_add_attrs
= true;
6131 else if (TREE_CODE (node
) == VAR_DECL
)
6133 if (DECL_INITIAL (node
))
6135 error ("variable %q+D definition is marked dllimport",
6137 *no_add_attrs
= true;
6140 /* `extern' needn't be specified with dllimport.
6141 Specify `extern' now and hope for the best. Sigh. */
6142 DECL_EXTERNAL (node
) = 1;
6143 /* Also, implicitly give dllimport'd variables declared within
6144 a function global scope, unless declared static. */
6145 if (current_function_decl
!= NULL_TREE
&& !TREE_STATIC (node
))
6146 TREE_PUBLIC (node
) = 1;
6149 if (*no_add_attrs
== false)
6150 DECL_DLLIMPORT_P (node
) = 1;
6152 else if (TREE_CODE (node
) == FUNCTION_DECL
6153 && DECL_DECLARED_INLINE_P (node
)
6154 && flag_keep_inline_dllexport
)
6155 /* An exported function, even if inline, must be emitted. */
6156 DECL_EXTERNAL (node
) = 0;
6158 /* Report error if symbol is not accessible at global scope. */
6159 if (!TREE_PUBLIC (node
)
6160 && (TREE_CODE (node
) == VAR_DECL
6161 || TREE_CODE (node
) == FUNCTION_DECL
))
6163 error ("external linkage required for symbol %q+D because of "
6164 "%qE attribute", node
, name
);
6165 *no_add_attrs
= true;
6168 /* A dllexport'd entity must have default visibility so that other
6169 program units (shared libraries or the main executable) can see
6170 it. A dllimport'd entity must have default visibility so that
6171 the linker knows that undefined references within this program
6172 unit can be resolved by the dynamic linker. */
6175 if (DECL_VISIBILITY_SPECIFIED (node
)
6176 && DECL_VISIBILITY (node
) != VISIBILITY_DEFAULT
)
6177 error ("%qE implies default visibility, but %qD has already "
6178 "been declared with a different visibility",
6180 DECL_VISIBILITY (node
) = VISIBILITY_DEFAULT
;
6181 DECL_VISIBILITY_SPECIFIED (node
) = 1;
6187 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6189 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6190 of the various TYPE_QUAL values. */
6193 set_type_quals (tree type
, int type_quals
)
6195 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
6196 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
6197 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
6198 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
6199 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
6202 /* Returns true iff unqualified CAND and BASE are equivalent. */
6205 check_base_type (const_tree cand
, const_tree base
)
6207 return (TYPE_NAME (cand
) == TYPE_NAME (base
)
6208 /* Apparently this is needed for Objective-C. */
6209 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6210 /* Check alignment. */
6211 && TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
6212 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6213 TYPE_ATTRIBUTES (base
)));
6216 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6219 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
6221 return (TYPE_QUALS (cand
) == type_quals
6222 && check_base_type (cand
, base
));
6225 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6228 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
6230 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
6231 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6232 /* Apparently this is needed for Objective-C. */
6233 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6234 /* Check alignment. */
6235 && TYPE_ALIGN (cand
) == align
6236 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6237 TYPE_ATTRIBUTES (base
)));
6240 /* This function checks to see if TYPE matches the size one of the built-in
6241 atomic types, and returns that core atomic type. */
6244 find_atomic_core_type (tree type
)
6246 tree base_atomic_type
;
6248 /* Only handle complete types. */
6249 if (TYPE_SIZE (type
) == NULL_TREE
)
6252 HOST_WIDE_INT type_size
= tree_to_uhwi (TYPE_SIZE (type
));
6256 base_atomic_type
= atomicQI_type_node
;
6260 base_atomic_type
= atomicHI_type_node
;
6264 base_atomic_type
= atomicSI_type_node
;
6268 base_atomic_type
= atomicDI_type_node
;
6272 base_atomic_type
= atomicTI_type_node
;
6276 base_atomic_type
= NULL_TREE
;
6279 return base_atomic_type
;
6282 /* Return a version of the TYPE, qualified as indicated by the
6283 TYPE_QUALS, if one exists. If no qualified version exists yet,
6284 return NULL_TREE. */
6287 get_qualified_type (tree type
, int type_quals
)
6291 if (TYPE_QUALS (type
) == type_quals
)
6294 /* Search the chain of variants to see if there is already one there just
6295 like the one we need to have. If so, use that existing one. We must
6296 preserve the TYPE_NAME, since there is code that depends on this. */
6297 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6298 if (check_qualified_type (t
, type
, type_quals
))
6304 /* Like get_qualified_type, but creates the type if it does not
6305 exist. This function never returns NULL_TREE. */
6308 build_qualified_type (tree type
, int type_quals
)
6312 /* See if we already have the appropriate qualified variant. */
6313 t
= get_qualified_type (type
, type_quals
);
6315 /* If not, build it. */
6318 t
= build_variant_type_copy (type
);
6319 set_type_quals (t
, type_quals
);
6321 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
6323 /* See if this object can map to a basic atomic type. */
6324 tree atomic_type
= find_atomic_core_type (type
);
6327 /* Ensure the alignment of this type is compatible with
6328 the required alignment of the atomic type. */
6329 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
6330 TYPE_ALIGN (t
) = TYPE_ALIGN (atomic_type
);
6334 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6335 /* Propagate structural equality. */
6336 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6337 else if (TYPE_CANONICAL (type
) != type
)
6338 /* Build the underlying canonical type, since it is different
6341 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
6342 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
6345 /* T is its own canonical type. */
6346 TYPE_CANONICAL (t
) = t
;
6353 /* Create a variant of type T with alignment ALIGN. */
6356 build_aligned_type (tree type
, unsigned int align
)
6360 if (TYPE_PACKED (type
)
6361 || TYPE_ALIGN (type
) == align
)
6364 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6365 if (check_aligned_type (t
, type
, align
))
6368 t
= build_variant_type_copy (type
);
6369 TYPE_ALIGN (t
) = align
;
6374 /* Create a new distinct copy of TYPE. The new type is made its own
6375 MAIN_VARIANT. If TYPE requires structural equality checks, the
6376 resulting type requires structural equality checks; otherwise, its
6377 TYPE_CANONICAL points to itself. */
6380 build_distinct_type_copy (tree type
)
6382 tree t
= copy_node (type
);
6384 TYPE_POINTER_TO (t
) = 0;
6385 TYPE_REFERENCE_TO (t
) = 0;
6387 /* Set the canonical type either to a new equivalence class, or
6388 propagate the need for structural equality checks. */
6389 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6390 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6392 TYPE_CANONICAL (t
) = t
;
6394 /* Make it its own variant. */
6395 TYPE_MAIN_VARIANT (t
) = t
;
6396 TYPE_NEXT_VARIANT (t
) = 0;
6398 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6399 whose TREE_TYPE is not t. This can also happen in the Ada
6400 frontend when using subtypes. */
6405 /* Create a new variant of TYPE, equivalent but distinct. This is so
6406 the caller can modify it. TYPE_CANONICAL for the return type will
6407 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6408 are considered equal by the language itself (or that both types
6409 require structural equality checks). */
6412 build_variant_type_copy (tree type
)
6414 tree t
, m
= TYPE_MAIN_VARIANT (type
);
6416 t
= build_distinct_type_copy (type
);
6418 /* Since we're building a variant, assume that it is a non-semantic
6419 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6420 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
6422 /* Add the new type to the chain of variants of TYPE. */
6423 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
6424 TYPE_NEXT_VARIANT (m
) = t
;
6425 TYPE_MAIN_VARIANT (t
) = m
;
6430 /* Return true if the from tree in both tree maps are equal. */
6433 tree_map_base_eq (const void *va
, const void *vb
)
6435 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6436 *const b
= (const struct tree_map_base
*) vb
;
6437 return (a
->from
== b
->from
);
6440 /* Hash a from tree in a tree_base_map. */
6443 tree_map_base_hash (const void *item
)
6445 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6448 /* Return true if this tree map structure is marked for garbage collection
6449 purposes. We simply return true if the from tree is marked, so that this
6450 structure goes away when the from tree goes away. */
6453 tree_map_base_marked_p (const void *p
)
6455 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6458 /* Hash a from tree in a tree_map. */
6461 tree_map_hash (const void *item
)
6463 return (((const struct tree_map
*) item
)->hash
);
6466 /* Hash a from tree in a tree_decl_map. */
6469 tree_decl_map_hash (const void *item
)
6471 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6474 /* Return the initialization priority for DECL. */
6477 decl_init_priority_lookup (tree decl
)
6479 symtab_node
*snode
= symtab_node::get (decl
);
6482 return DEFAULT_INIT_PRIORITY
;
6484 snode
->get_init_priority ();
6487 /* Return the finalization priority for DECL. */
6490 decl_fini_priority_lookup (tree decl
)
6492 cgraph_node
*node
= cgraph_node::get (decl
);
6495 return DEFAULT_INIT_PRIORITY
;
6497 node
->get_fini_priority ();
6500 /* Set the initialization priority for DECL to PRIORITY. */
6503 decl_init_priority_insert (tree decl
, priority_type priority
)
6505 struct symtab_node
*snode
;
6507 if (priority
== DEFAULT_INIT_PRIORITY
)
6509 snode
= symtab_node::get (decl
);
6513 else if (TREE_CODE (decl
) == VAR_DECL
)
6514 snode
= varpool_node::get_create (decl
);
6516 snode
= cgraph_node::get_create (decl
);
6517 snode
->set_init_priority (priority
);
6520 /* Set the finalization priority for DECL to PRIORITY. */
6523 decl_fini_priority_insert (tree decl
, priority_type priority
)
6525 struct cgraph_node
*node
;
6527 if (priority
== DEFAULT_INIT_PRIORITY
)
6529 node
= cgraph_node::get (decl
);
6534 node
= cgraph_node::get_create (decl
);
6535 node
->set_fini_priority (priority
);
6538 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6541 print_debug_expr_statistics (void)
6543 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6544 (long) htab_size (debug_expr_for_decl
),
6545 (long) htab_elements (debug_expr_for_decl
),
6546 htab_collisions (debug_expr_for_decl
));
6549 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6552 print_value_expr_statistics (void)
6554 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6555 (long) htab_size (value_expr_for_decl
),
6556 (long) htab_elements (value_expr_for_decl
),
6557 htab_collisions (value_expr_for_decl
));
6560 /* Lookup a debug expression for FROM, and return it if we find one. */
6563 decl_debug_expr_lookup (tree from
)
6565 struct tree_decl_map
*h
, in
;
6566 in
.base
.from
= from
;
6568 h
= (struct tree_decl_map
*)
6569 htab_find_with_hash (debug_expr_for_decl
, &in
, DECL_UID (from
));
6575 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6578 decl_debug_expr_insert (tree from
, tree to
)
6580 struct tree_decl_map
*h
;
6583 h
= ggc_alloc
<tree_decl_map
> ();
6584 h
->base
.from
= from
;
6586 loc
= htab_find_slot_with_hash (debug_expr_for_decl
, h
, DECL_UID (from
),
6588 *(struct tree_decl_map
**) loc
= h
;
6591 /* Lookup a value expression for FROM, and return it if we find one. */
6594 decl_value_expr_lookup (tree from
)
6596 struct tree_decl_map
*h
, in
;
6597 in
.base
.from
= from
;
6599 h
= (struct tree_decl_map
*)
6600 htab_find_with_hash (value_expr_for_decl
, &in
, DECL_UID (from
));
6606 /* Insert a mapping FROM->TO in the value expression hashtable. */
6609 decl_value_expr_insert (tree from
, tree to
)
6611 struct tree_decl_map
*h
;
6614 h
= ggc_alloc
<tree_decl_map
> ();
6615 h
->base
.from
= from
;
6617 loc
= htab_find_slot_with_hash (value_expr_for_decl
, h
, DECL_UID (from
),
6619 *(struct tree_decl_map
**) loc
= h
;
6622 /* Lookup a vector of debug arguments for FROM, and return it if we
6626 decl_debug_args_lookup (tree from
)
6628 struct tree_vec_map
*h
, in
;
6630 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6632 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6633 in
.base
.from
= from
;
6634 h
= (struct tree_vec_map
*)
6635 htab_find_with_hash (debug_args_for_decl
, &in
, DECL_UID (from
));
6641 /* Insert a mapping FROM->empty vector of debug arguments in the value
6642 expression hashtable. */
6645 decl_debug_args_insert (tree from
)
6647 struct tree_vec_map
*h
;
6650 if (DECL_HAS_DEBUG_ARGS_P (from
))
6651 return decl_debug_args_lookup (from
);
6652 if (debug_args_for_decl
== NULL
)
6653 debug_args_for_decl
= htab_create_ggc (64, tree_vec_map_hash
,
6654 tree_vec_map_eq
, 0);
6655 h
= ggc_alloc
<tree_vec_map
> ();
6656 h
->base
.from
= from
;
6658 loc
= htab_find_slot_with_hash (debug_args_for_decl
, h
, DECL_UID (from
),
6660 *(struct tree_vec_map
**) loc
= h
;
6661 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6665 /* Hashing of types so that we don't make duplicates.
6666 The entry point is `type_hash_canon'. */
6668 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6669 with types in the TREE_VALUE slots), by adding the hash codes
6670 of the individual types. */
6673 type_hash_list (const_tree list
, inchash::hash
&hstate
)
6677 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6678 if (TREE_VALUE (tail
) != error_mark_node
)
6679 hstate
.add_object (TYPE_HASH (TREE_VALUE (tail
)));
6682 /* These are the Hashtable callback functions. */
6684 /* Returns true iff the types are equivalent. */
6687 type_hash_eq (const void *va
, const void *vb
)
6689 const struct type_hash
*const a
= (const struct type_hash
*) va
,
6690 *const b
= (const struct type_hash
*) vb
;
6692 /* First test the things that are the same for all types. */
6693 if (a
->hash
!= b
->hash
6694 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6695 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6696 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6697 TYPE_ATTRIBUTES (b
->type
))
6698 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6699 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6702 /* Be careful about comparing arrays before and after the element type
6703 has been completed; don't compare TYPE_ALIGN unless both types are
6705 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6706 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6707 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6710 switch (TREE_CODE (a
->type
))
6715 case REFERENCE_TYPE
:
6720 return TYPE_VECTOR_SUBPARTS (a
->type
) == TYPE_VECTOR_SUBPARTS (b
->type
);
6723 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6724 && !(TYPE_VALUES (a
->type
)
6725 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6726 && TYPE_VALUES (b
->type
)
6727 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6728 && type_list_equal (TYPE_VALUES (a
->type
),
6729 TYPE_VALUES (b
->type
))))
6732 /* ... fall through ... */
6737 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6739 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6740 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6741 TYPE_MAX_VALUE (b
->type
)))
6742 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6743 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6744 TYPE_MIN_VALUE (b
->type
))));
6746 case FIXED_POINT_TYPE
:
6747 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6750 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6753 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6754 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6755 || (TYPE_ARG_TYPES (a
->type
)
6756 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6757 && TYPE_ARG_TYPES (b
->type
)
6758 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6759 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6760 TYPE_ARG_TYPES (b
->type
)))))
6764 return TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
);
6768 case QUAL_UNION_TYPE
:
6769 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6770 || (TYPE_FIELDS (a
->type
)
6771 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6772 && TYPE_FIELDS (b
->type
)
6773 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6774 && type_list_equal (TYPE_FIELDS (a
->type
),
6775 TYPE_FIELDS (b
->type
))));
6778 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6779 || (TYPE_ARG_TYPES (a
->type
)
6780 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6781 && TYPE_ARG_TYPES (b
->type
)
6782 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6783 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6784 TYPE_ARG_TYPES (b
->type
))))
6792 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6793 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6798 /* Return the cached hash value. */
6801 type_hash_hash (const void *item
)
6803 return ((const struct type_hash
*) item
)->hash
;
6806 /* Given TYPE, and HASHCODE its hash code, return the canonical
6807 object for an identical type if one already exists.
6808 Otherwise, return TYPE, and record it as the canonical object.
6810 To use this function, first create a type of the sort you want.
6811 Then compute its hash code from the fields of the type that
6812 make it different from other similar types.
6813 Then call this function and use the value. */
6816 type_hash_canon (unsigned int hashcode
, tree type
)
6821 /* The hash table only contains main variants, so ensure that's what we're
6823 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6825 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6826 must call that routine before comparing TYPE_ALIGNs. */
6832 loc
= htab_find_slot_with_hash (type_hash_table
, &in
, hashcode
, INSERT
);
6835 tree t1
= ((type_hash
*) *loc
)->type
;
6836 gcc_assert (TYPE_MAIN_VARIANT (t1
) == t1
);
6837 if (GATHER_STATISTICS
)
6839 tree_code_counts
[(int) TREE_CODE (type
)]--;
6840 tree_node_counts
[(int) t_kind
]--;
6841 tree_node_sizes
[(int) t_kind
] -= sizeof (struct tree_type_non_common
);
6847 struct type_hash
*h
;
6849 h
= ggc_alloc
<type_hash
> ();
6858 /* See if the data pointed to by the type hash table is marked. We consider
6859 it marked if the type is marked or if a debug type number or symbol
6860 table entry has been made for the type. */
6863 type_hash_marked_p (const void *p
)
6865 const_tree
const type
= ((const struct type_hash
*) p
)->type
;
6867 return ggc_marked_p (type
);
6871 print_type_hash_statistics (void)
6873 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6874 (long) htab_size (type_hash_table
),
6875 (long) htab_elements (type_hash_table
),
6876 htab_collisions (type_hash_table
));
6879 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6880 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6881 by adding the hash codes of the individual attributes. */
6884 attribute_hash_list (const_tree list
, inchash::hash
&hstate
)
6888 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6889 /* ??? Do we want to add in TREE_VALUE too? */
6890 hstate
.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail
)));
6893 /* Given two lists of attributes, return true if list l2 is
6894 equivalent to l1. */
6897 attribute_list_equal (const_tree l1
, const_tree l2
)
6902 return attribute_list_contained (l1
, l2
)
6903 && attribute_list_contained (l2
, l1
);
6906 /* Given two lists of attributes, return true if list L2 is
6907 completely contained within L1. */
6908 /* ??? This would be faster if attribute names were stored in a canonicalized
6909 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6910 must be used to show these elements are equivalent (which they are). */
6911 /* ??? It's not clear that attributes with arguments will always be handled
6915 attribute_list_contained (const_tree l1
, const_tree l2
)
6919 /* First check the obvious, maybe the lists are identical. */
6923 /* Maybe the lists are similar. */
6924 for (t1
= l1
, t2
= l2
;
6926 && get_attribute_name (t1
) == get_attribute_name (t2
)
6927 && TREE_VALUE (t1
) == TREE_VALUE (t2
);
6928 t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6931 /* Maybe the lists are equal. */
6932 if (t1
== 0 && t2
== 0)
6935 for (; t2
!= 0; t2
= TREE_CHAIN (t2
))
6938 /* This CONST_CAST is okay because lookup_attribute does not
6939 modify its argument and the return value is assigned to a
6941 for (attr
= lookup_ident_attribute (get_attribute_name (t2
),
6942 CONST_CAST_TREE (l1
));
6943 attr
!= NULL_TREE
&& !attribute_value_equal (t2
, attr
);
6944 attr
= lookup_ident_attribute (get_attribute_name (t2
),
6948 if (attr
== NULL_TREE
)
6955 /* Given two lists of types
6956 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6957 return 1 if the lists contain the same types in the same order.
6958 Also, the TREE_PURPOSEs must match. */
6961 type_list_equal (const_tree l1
, const_tree l2
)
6965 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6966 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6967 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6968 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6969 && (TREE_TYPE (TREE_PURPOSE (t1
))
6970 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6976 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6977 given by TYPE. If the argument list accepts variable arguments,
6978 then this function counts only the ordinary arguments. */
6981 type_num_arguments (const_tree type
)
6986 for (t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
6987 /* If the function does not take a variable number of arguments,
6988 the last element in the list will have type `void'. */
6989 if (VOID_TYPE_P (TREE_VALUE (t
)))
6997 /* Nonzero if integer constants T1 and T2
6998 represent the same constant value. */
7001 tree_int_cst_equal (const_tree t1
, const_tree t2
)
7006 if (t1
== 0 || t2
== 0)
7009 if (TREE_CODE (t1
) == INTEGER_CST
7010 && TREE_CODE (t2
) == INTEGER_CST
7011 && wi::to_widest (t1
) == wi::to_widest (t2
))
7017 /* Return true if T is an INTEGER_CST whose numerical value (extended
7018 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7021 tree_fits_shwi_p (const_tree t
)
7023 return (t
!= NULL_TREE
7024 && TREE_CODE (t
) == INTEGER_CST
7025 && wi::fits_shwi_p (wi::to_widest (t
)));
7028 /* Return true if T is an INTEGER_CST whose numerical value (extended
7029 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7032 tree_fits_uhwi_p (const_tree t
)
7034 return (t
!= NULL_TREE
7035 && TREE_CODE (t
) == INTEGER_CST
7036 && wi::fits_uhwi_p (wi::to_widest (t
)));
7039 /* T is an INTEGER_CST whose numerical value (extended according to
7040 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7044 tree_to_shwi (const_tree t
)
7046 gcc_assert (tree_fits_shwi_p (t
));
7047 return TREE_INT_CST_LOW (t
);
7050 /* T is an INTEGER_CST whose numerical value (extended according to
7051 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7054 unsigned HOST_WIDE_INT
7055 tree_to_uhwi (const_tree t
)
7057 gcc_assert (tree_fits_uhwi_p (t
));
7058 return TREE_INT_CST_LOW (t
);
7061 /* Return the most significant (sign) bit of T. */
7064 tree_int_cst_sign_bit (const_tree t
)
7066 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
7068 return wi::extract_uhwi (t
, bitno
, 1);
7071 /* Return an indication of the sign of the integer constant T.
7072 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7073 Note that -1 will never be returned if T's type is unsigned. */
7076 tree_int_cst_sgn (const_tree t
)
7078 if (wi::eq_p (t
, 0))
7080 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
7082 else if (wi::neg_p (t
))
7088 /* Return the minimum number of bits needed to represent VALUE in a
7089 signed or unsigned type, UNSIGNEDP says which. */
7092 tree_int_cst_min_precision (tree value
, signop sgn
)
7094 /* If the value is negative, compute its negative minus 1. The latter
7095 adjustment is because the absolute value of the largest negative value
7096 is one larger than the largest positive value. This is equivalent to
7097 a bit-wise negation, so use that operation instead. */
7099 if (tree_int_cst_sgn (value
) < 0)
7100 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
7102 /* Return the number of bits needed, taking into account the fact
7103 that we need one more bit for a signed than unsigned type.
7104 If value is 0 or -1, the minimum precision is 1 no matter
7105 whether unsignedp is true or false. */
7107 if (integer_zerop (value
))
7110 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
7113 /* Return truthvalue of whether T1 is the same tree structure as T2.
7114 Return 1 if they are the same.
7115 Return 0 if they are understandably different.
7116 Return -1 if either contains tree structure not understood by
7120 simple_cst_equal (const_tree t1
, const_tree t2
)
7122 enum tree_code code1
, code2
;
7128 if (t1
== 0 || t2
== 0)
7131 code1
= TREE_CODE (t1
);
7132 code2
= TREE_CODE (t2
);
7134 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
7136 if (CONVERT_EXPR_CODE_P (code2
)
7137 || code2
== NON_LVALUE_EXPR
)
7138 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7140 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
7143 else if (CONVERT_EXPR_CODE_P (code2
)
7144 || code2
== NON_LVALUE_EXPR
)
7145 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
7153 return wi::to_widest (t1
) == wi::to_widest (t2
);
7156 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1
), TREE_REAL_CST (t2
));
7159 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
7162 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
7163 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
7164 TREE_STRING_LENGTH (t1
)));
7168 unsigned HOST_WIDE_INT idx
;
7169 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
7170 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
7172 if (vec_safe_length (v1
) != vec_safe_length (v2
))
7175 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
7176 /* ??? Should we handle also fields here? */
7177 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
7183 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7186 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
7189 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
7192 const_tree arg1
, arg2
;
7193 const_call_expr_arg_iterator iter1
, iter2
;
7194 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
7195 arg2
= first_const_call_expr_arg (t2
, &iter2
);
7197 arg1
= next_const_call_expr_arg (&iter1
),
7198 arg2
= next_const_call_expr_arg (&iter2
))
7200 cmp
= simple_cst_equal (arg1
, arg2
);
7204 return arg1
== arg2
;
7208 /* Special case: if either target is an unallocated VAR_DECL,
7209 it means that it's going to be unified with whatever the
7210 TARGET_EXPR is really supposed to initialize, so treat it
7211 as being equivalent to anything. */
7212 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
7213 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
7214 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
7215 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
7216 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
7217 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
7220 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7225 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
7227 case WITH_CLEANUP_EXPR
:
7228 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7232 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
7235 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
7236 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7250 /* This general rule works for most tree codes. All exceptions should be
7251 handled above. If this is a language-specific tree code, we can't
7252 trust what might be in the operand, so say we don't know
7254 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
7257 switch (TREE_CODE_CLASS (code1
))
7261 case tcc_comparison
:
7262 case tcc_expression
:
7266 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
7268 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
7280 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7281 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7282 than U, respectively. */
7285 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
7287 if (tree_int_cst_sgn (t
) < 0)
7289 else if (!tree_fits_uhwi_p (t
))
7291 else if (TREE_INT_CST_LOW (t
) == u
)
7293 else if (TREE_INT_CST_LOW (t
) < u
)
7299 /* Return true if SIZE represents a constant size that is in bounds of
7300 what the middle-end and the backend accepts (covering not more than
7301 half of the address-space). */
7304 valid_constant_size_p (const_tree size
)
7306 if (! tree_fits_uhwi_p (size
)
7307 || TREE_OVERFLOW (size
)
7308 || tree_int_cst_sign_bit (size
) != 0)
7313 /* Return the precision of the type, or for a complex or vector type the
7314 precision of the type of its elements. */
7317 element_precision (const_tree type
)
7319 enum tree_code code
= TREE_CODE (type
);
7320 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
7321 type
= TREE_TYPE (type
);
7323 return TYPE_PRECISION (type
);
7326 /* Return true if CODE represents an associative tree code. Otherwise
7329 associative_tree_code (enum tree_code code
)
7348 /* Return true if CODE represents a commutative tree code. Otherwise
7351 commutative_tree_code (enum tree_code code
)
7357 case MULT_HIGHPART_EXPR
:
7365 case UNORDERED_EXPR
:
7369 case TRUTH_AND_EXPR
:
7370 case TRUTH_XOR_EXPR
:
7372 case WIDEN_MULT_EXPR
:
7373 case VEC_WIDEN_MULT_HI_EXPR
:
7374 case VEC_WIDEN_MULT_LO_EXPR
:
7375 case VEC_WIDEN_MULT_EVEN_EXPR
:
7376 case VEC_WIDEN_MULT_ODD_EXPR
:
7385 /* Return true if CODE represents a ternary tree code for which the
7386 first two operands are commutative. Otherwise return false. */
7388 commutative_ternary_tree_code (enum tree_code code
)
7392 case WIDEN_MULT_PLUS_EXPR
:
7393 case WIDEN_MULT_MINUS_EXPR
:
7407 /* Generate a hash value for an expression. This can be used iteratively
7408 by passing a previous result as the HSTATE argument.
7410 This function is intended to produce the same hash for expressions which
7411 would compare equal using operand_equal_p. */
7413 add_expr (const_tree t
, inchash::hash
&hstate
)
7416 enum tree_code code
;
7417 enum tree_code_class tclass
;
7421 hstate
.merge_hash (0);
7425 code
= TREE_CODE (t
);
7429 /* Alas, constants aren't shared, so we can't rely on pointer
7432 hstate
.merge_hash (0);
7435 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
7436 hstate
.add_wide_int (TREE_INT_CST_ELT (t
, i
));
7440 unsigned int val2
= real_hash (TREE_REAL_CST_PTR (t
));
7441 hstate
.merge_hash (val2
);
7446 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
7447 hstate
.merge_hash (val2
);
7451 hstate
.add ((const void *) TREE_STRING_POINTER (t
), TREE_STRING_LENGTH (t
));
7454 inchash::add_expr (TREE_REALPART (t
), hstate
);
7455 inchash::add_expr (TREE_IMAGPART (t
), hstate
);
7460 for (i
= 0; i
< VECTOR_CST_NELTS (t
); ++i
)
7461 inchash::add_expr (VECTOR_CST_ELT (t
, i
), hstate
);
7465 /* We can just compare by pointer. */
7466 hstate
.add_wide_int (SSA_NAME_VERSION (t
));
7468 case PLACEHOLDER_EXPR
:
7469 /* The node itself doesn't matter. */
7472 /* A list of expressions, for a CALL_EXPR or as the elements of a
7474 for (; t
; t
= TREE_CHAIN (t
))
7475 inchash::add_expr (TREE_VALUE (t
), hstate
);
7479 unsigned HOST_WIDE_INT idx
;
7481 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
7483 inchash::add_expr (field
, hstate
);
7484 inchash::add_expr (value
, hstate
);
7489 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7490 Otherwise nodes that compare equal according to operand_equal_p might
7491 get different hash codes. However, don't do this for machine specific
7492 or front end builtins, since the function code is overloaded in those
7494 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
7495 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
7497 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
7498 code
= TREE_CODE (t
);
7502 tclass
= TREE_CODE_CLASS (code
);
7504 if (tclass
== tcc_declaration
)
7506 /* DECL's have a unique ID */
7507 hstate
.add_wide_int (DECL_UID (t
));
7511 gcc_assert (IS_EXPR_CODE_CLASS (tclass
));
7513 hstate
.add_object (code
);
7515 /* Don't hash the type, that can lead to having nodes which
7516 compare equal according to operand_equal_p, but which
7517 have different hash codes. */
7518 if (CONVERT_EXPR_CODE_P (code
)
7519 || code
== NON_LVALUE_EXPR
)
7521 /* Make sure to include signness in the hash computation. */
7522 hstate
.add_int (TYPE_UNSIGNED (TREE_TYPE (t
)));
7523 inchash::add_expr (TREE_OPERAND (t
, 0), hstate
);
7526 else if (commutative_tree_code (code
))
7528 /* It's a commutative expression. We want to hash it the same
7529 however it appears. We do this by first hashing both operands
7530 and then rehashing based on the order of their independent
7532 inchash::hash one
, two
;
7533 inchash::add_expr (TREE_OPERAND (t
, 0), one
);
7534 inchash::add_expr (TREE_OPERAND (t
, 1), two
);
7535 hstate
.add_commutative (one
, two
);
7538 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
7539 inchash::add_expr (TREE_OPERAND (t
, i
), hstate
);
7547 /* Constructors for pointer, array and function types.
7548 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7549 constructed by language-dependent code, not here.) */
7551 /* Construct, lay out and return the type of pointers to TO_TYPE with
7552 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7553 reference all of memory. If such a type has already been
7554 constructed, reuse it. */
7557 build_pointer_type_for_mode (tree to_type
, machine_mode mode
,
7562 if (to_type
== error_mark_node
)
7563 return error_mark_node
;
7565 /* If the pointed-to type has the may_alias attribute set, force
7566 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7567 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7568 can_alias_all
= true;
7570 /* In some cases, languages will have things that aren't a POINTER_TYPE
7571 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7572 In that case, return that type without regard to the rest of our
7575 ??? This is a kludge, but consistent with the way this function has
7576 always operated and there doesn't seem to be a good way to avoid this
7578 if (TYPE_POINTER_TO (to_type
) != 0
7579 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7580 return TYPE_POINTER_TO (to_type
);
7582 /* First, if we already have a type for pointers to TO_TYPE and it's
7583 the proper mode, use it. */
7584 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7585 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7588 t
= make_node (POINTER_TYPE
);
7590 TREE_TYPE (t
) = to_type
;
7591 SET_TYPE_MODE (t
, mode
);
7592 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7593 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7594 TYPE_POINTER_TO (to_type
) = t
;
7596 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7597 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7598 else if (TYPE_CANONICAL (to_type
) != to_type
)
7600 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7601 mode
, can_alias_all
);
7603 /* Lay out the type. This function has many callers that are concerned
7604 with expression-construction, and this simplifies them all. */
7610 /* By default build pointers in ptr_mode. */
7613 build_pointer_type (tree to_type
)
7615 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7616 : TYPE_ADDR_SPACE (to_type
);
7617 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7618 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7621 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7624 build_reference_type_for_mode (tree to_type
, machine_mode mode
,
7629 if (to_type
== error_mark_node
)
7630 return error_mark_node
;
7632 /* If the pointed-to type has the may_alias attribute set, force
7633 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7634 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7635 can_alias_all
= true;
7637 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7638 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7639 In that case, return that type without regard to the rest of our
7642 ??? This is a kludge, but consistent with the way this function has
7643 always operated and there doesn't seem to be a good way to avoid this
7645 if (TYPE_REFERENCE_TO (to_type
) != 0
7646 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7647 return TYPE_REFERENCE_TO (to_type
);
7649 /* First, if we already have a type for pointers to TO_TYPE and it's
7650 the proper mode, use it. */
7651 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7652 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7655 t
= make_node (REFERENCE_TYPE
);
7657 TREE_TYPE (t
) = to_type
;
7658 SET_TYPE_MODE (t
, mode
);
7659 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7660 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7661 TYPE_REFERENCE_TO (to_type
) = t
;
7663 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7664 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7665 else if (TYPE_CANONICAL (to_type
) != to_type
)
7667 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7668 mode
, can_alias_all
);
7676 /* Build the node for the type of references-to-TO_TYPE by default
7680 build_reference_type (tree to_type
)
7682 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7683 : TYPE_ADDR_SPACE (to_type
);
7684 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7685 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
7688 #define MAX_INT_CACHED_PREC \
7689 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7690 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7692 /* Builds a signed or unsigned integer type of precision PRECISION.
7693 Used for C bitfields whose precision does not match that of
7694 built-in target types. */
7696 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7702 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7704 if (precision
<= MAX_INT_CACHED_PREC
)
7706 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7711 itype
= make_node (INTEGER_TYPE
);
7712 TYPE_PRECISION (itype
) = precision
;
7715 fixup_unsigned_type (itype
);
7717 fixup_signed_type (itype
);
7720 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype
)))
7721 ret
= type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype
)), itype
);
7722 if (precision
<= MAX_INT_CACHED_PREC
)
7723 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7728 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7729 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7730 is true, reuse such a type that has already been constructed. */
7733 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7735 tree itype
= make_node (INTEGER_TYPE
);
7736 inchash::hash hstate
;
7738 TREE_TYPE (itype
) = type
;
7740 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7741 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7743 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7744 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7745 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7746 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7747 TYPE_ALIGN (itype
) = TYPE_ALIGN (type
);
7748 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7753 if ((TYPE_MIN_VALUE (itype
)
7754 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7755 || (TYPE_MAX_VALUE (itype
)
7756 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7758 /* Since we cannot reliably merge this type, we need to compare it using
7759 structural equality checks. */
7760 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7764 inchash::add_expr (TYPE_MIN_VALUE (itype
), hstate
);
7765 inchash::add_expr (TYPE_MAX_VALUE (itype
), hstate
);
7766 hstate
.merge_hash (TYPE_HASH (type
));
7767 itype
= type_hash_canon (hstate
.end (), itype
);
7772 /* Wrapper around build_range_type_1 with SHARED set to true. */
7775 build_range_type (tree type
, tree lowval
, tree highval
)
7777 return build_range_type_1 (type
, lowval
, highval
, true);
7780 /* Wrapper around build_range_type_1 with SHARED set to false. */
7783 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7785 return build_range_type_1 (type
, lowval
, highval
, false);
7788 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7789 MAXVAL should be the maximum value in the domain
7790 (one less than the length of the array).
7792 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7793 We don't enforce this limit, that is up to caller (e.g. language front end).
7794 The limit exists because the result is a signed type and we don't handle
7795 sizes that use more than one HOST_WIDE_INT. */
7798 build_index_type (tree maxval
)
7800 return build_range_type (sizetype
, size_zero_node
, maxval
);
7803 /* Return true if the debug information for TYPE, a subtype, should be emitted
7804 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7805 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7806 debug info and doesn't reflect the source code. */
7809 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7811 tree base_type
= TREE_TYPE (type
), low
, high
;
7813 /* Subrange types have a base type which is an integral type. */
7814 if (!INTEGRAL_TYPE_P (base_type
))
7817 /* Get the real bounds of the subtype. */
7818 if (lang_hooks
.types
.get_subrange_bounds
)
7819 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7822 low
= TYPE_MIN_VALUE (type
);
7823 high
= TYPE_MAX_VALUE (type
);
7826 /* If the type and its base type have the same representation and the same
7827 name, then the type is not a subrange but a copy of the base type. */
7828 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7829 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7830 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7831 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7832 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7833 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7843 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7844 and number of elements specified by the range of values of INDEX_TYPE.
7845 If SHARED is true, reuse such a type that has already been constructed. */
7848 build_array_type_1 (tree elt_type
, tree index_type
, bool shared
)
7852 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7854 error ("arrays of functions are not meaningful");
7855 elt_type
= integer_type_node
;
7858 t
= make_node (ARRAY_TYPE
);
7859 TREE_TYPE (t
) = elt_type
;
7860 TYPE_DOMAIN (t
) = index_type
;
7861 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7864 /* If the element type is incomplete at this point we get marked for
7865 structural equality. Do not record these types in the canonical
7867 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
7872 inchash::hash hstate
;
7873 hstate
.add_object (TYPE_HASH (elt_type
));
7875 hstate
.add_object (TYPE_HASH (index_type
));
7876 t
= type_hash_canon (hstate
.end (), t
);
7879 if (TYPE_CANONICAL (t
) == t
)
7881 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7882 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
)))
7883 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7884 else if (TYPE_CANONICAL (elt_type
) != elt_type
7885 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7887 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7889 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7896 /* Wrapper around build_array_type_1 with SHARED set to true. */
7899 build_array_type (tree elt_type
, tree index_type
)
7901 return build_array_type_1 (elt_type
, index_type
, true);
7904 /* Wrapper around build_array_type_1 with SHARED set to false. */
7907 build_nonshared_array_type (tree elt_type
, tree index_type
)
7909 return build_array_type_1 (elt_type
, index_type
, false);
7912 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7916 build_array_type_nelts (tree elt_type
, unsigned HOST_WIDE_INT nelts
)
7918 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7921 /* Recursively examines the array elements of TYPE, until a non-array
7922 element type is found. */
7925 strip_array_types (tree type
)
7927 while (TREE_CODE (type
) == ARRAY_TYPE
)
7928 type
= TREE_TYPE (type
);
7933 /* Computes the canonical argument types from the argument type list
7936 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7937 on entry to this function, or if any of the ARGTYPES are
7940 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7941 true on entry to this function, or if any of the ARGTYPES are
7944 Returns a canonical argument list, which may be ARGTYPES when the
7945 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7946 true) or would not differ from ARGTYPES. */
7949 maybe_canonicalize_argtypes (tree argtypes
,
7950 bool *any_structural_p
,
7951 bool *any_noncanonical_p
)
7954 bool any_noncanonical_argtypes_p
= false;
7956 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7958 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7959 /* Fail gracefully by stating that the type is structural. */
7960 *any_structural_p
= true;
7961 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7962 *any_structural_p
= true;
7963 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7964 || TREE_PURPOSE (arg
))
7965 /* If the argument has a default argument, we consider it
7966 non-canonical even though the type itself is canonical.
7967 That way, different variants of function and method types
7968 with default arguments will all point to the variant with
7969 no defaults as their canonical type. */
7970 any_noncanonical_argtypes_p
= true;
7973 if (*any_structural_p
)
7976 if (any_noncanonical_argtypes_p
)
7978 /* Build the canonical list of argument types. */
7979 tree canon_argtypes
= NULL_TREE
;
7980 bool is_void
= false;
7982 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7984 if (arg
== void_list_node
)
7987 canon_argtypes
= tree_cons (NULL_TREE
,
7988 TYPE_CANONICAL (TREE_VALUE (arg
)),
7992 canon_argtypes
= nreverse (canon_argtypes
);
7994 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7996 /* There is a non-canonical type. */
7997 *any_noncanonical_p
= true;
7998 return canon_argtypes
;
8001 /* The canonical argument types are the same as ARGTYPES. */
8005 /* Construct, lay out and return
8006 the type of functions returning type VALUE_TYPE
8007 given arguments of types ARG_TYPES.
8008 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8009 are data type nodes for the arguments of the function.
8010 If such a type has already been constructed, reuse it. */
8013 build_function_type (tree value_type
, tree arg_types
)
8016 inchash::hash hstate
;
8017 bool any_structural_p
, any_noncanonical_p
;
8018 tree canon_argtypes
;
8020 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
8022 error ("function return type cannot be function");
8023 value_type
= integer_type_node
;
8026 /* Make a node of the sort we want. */
8027 t
= make_node (FUNCTION_TYPE
);
8028 TREE_TYPE (t
) = value_type
;
8029 TYPE_ARG_TYPES (t
) = arg_types
;
8031 /* If we already have such a type, use the old one. */
8032 hstate
.add_object (TYPE_HASH (value_type
));
8033 type_hash_list (arg_types
, hstate
);
8034 t
= type_hash_canon (hstate
.end (), t
);
8036 /* Set up the canonical type. */
8037 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
8038 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
8039 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
8041 &any_noncanonical_p
);
8042 if (any_structural_p
)
8043 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8044 else if (any_noncanonical_p
)
8045 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
8048 if (!COMPLETE_TYPE_P (t
))
8053 /* Build a function type. The RETURN_TYPE is the type returned by the
8054 function. If VAARGS is set, no void_type_node is appended to the
8055 the list. ARGP must be always be terminated be a NULL_TREE. */
8058 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
8062 t
= va_arg (argp
, tree
);
8063 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
8064 args
= tree_cons (NULL_TREE
, t
, args
);
8069 if (args
!= NULL_TREE
)
8070 args
= nreverse (args
);
8071 gcc_assert (last
!= void_list_node
);
8073 else if (args
== NULL_TREE
)
8074 args
= void_list_node
;
8078 args
= nreverse (args
);
8079 TREE_CHAIN (last
) = void_list_node
;
8081 args
= build_function_type (return_type
, args
);
8086 /* Build a function type. The RETURN_TYPE is the type returned by the
8087 function. If additional arguments are provided, they are
8088 additional argument types. The list of argument types must always
8089 be terminated by NULL_TREE. */
8092 build_function_type_list (tree return_type
, ...)
8097 va_start (p
, return_type
);
8098 args
= build_function_type_list_1 (false, return_type
, p
);
8103 /* Build a variable argument function type. The RETURN_TYPE is the
8104 type returned by the function. If additional arguments are provided,
8105 they are additional argument types. The list of argument types must
8106 always be terminated by NULL_TREE. */
8109 build_varargs_function_type_list (tree return_type
, ...)
8114 va_start (p
, return_type
);
8115 args
= build_function_type_list_1 (true, return_type
, p
);
8121 /* Build a function type. RETURN_TYPE is the type returned by the
8122 function; VAARGS indicates whether the function takes varargs. The
8123 function takes N named arguments, the types of which are provided in
8127 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
8131 tree t
= vaargs
? NULL_TREE
: void_list_node
;
8133 for (i
= n
- 1; i
>= 0; i
--)
8134 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
8136 return build_function_type (return_type
, t
);
8139 /* Build a function type. RETURN_TYPE is the type returned by the
8140 function. The function takes N named arguments, the types of which
8141 are provided in ARG_TYPES. */
8144 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8146 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
8149 /* Build a variable argument function type. RETURN_TYPE is the type
8150 returned by the function. The function takes N named arguments, the
8151 types of which are provided in ARG_TYPES. */
8154 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8156 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
8159 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8160 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8161 for the method. An implicit additional parameter (of type
8162 pointer-to-BASETYPE) is added to the ARGTYPES. */
8165 build_method_type_directly (tree basetype
,
8171 inchash::hash hstate
;
8172 bool any_structural_p
, any_noncanonical_p
;
8173 tree canon_argtypes
;
8175 /* Make a node of the sort we want. */
8176 t
= make_node (METHOD_TYPE
);
8178 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8179 TREE_TYPE (t
) = rettype
;
8180 ptype
= build_pointer_type (basetype
);
8182 /* The actual arglist for this function includes a "hidden" argument
8183 which is "this". Put it into the list of argument types. */
8184 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
8185 TYPE_ARG_TYPES (t
) = argtypes
;
8187 /* If we already have such a type, use the old one. */
8188 hstate
.add_object (TYPE_HASH (basetype
));
8189 hstate
.add_object (TYPE_HASH (rettype
));
8190 type_hash_list (argtypes
, hstate
);
8191 t
= type_hash_canon (hstate
.end (), t
);
8193 /* Set up the canonical type. */
8195 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8196 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
8198 = (TYPE_CANONICAL (basetype
) != basetype
8199 || TYPE_CANONICAL (rettype
) != rettype
);
8200 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
8202 &any_noncanonical_p
);
8203 if (any_structural_p
)
8204 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8205 else if (any_noncanonical_p
)
8207 = build_method_type_directly (TYPE_CANONICAL (basetype
),
8208 TYPE_CANONICAL (rettype
),
8210 if (!COMPLETE_TYPE_P (t
))
8216 /* Construct, lay out and return the type of methods belonging to class
8217 BASETYPE and whose arguments and values are described by TYPE.
8218 If that type exists already, reuse it.
8219 TYPE must be a FUNCTION_TYPE node. */
8222 build_method_type (tree basetype
, tree type
)
8224 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
8226 return build_method_type_directly (basetype
,
8228 TYPE_ARG_TYPES (type
));
8231 /* Construct, lay out and return the type of offsets to a value
8232 of type TYPE, within an object of type BASETYPE.
8233 If a suitable offset type exists already, reuse it. */
8236 build_offset_type (tree basetype
, tree type
)
8239 inchash::hash hstate
;
8241 /* Make a node of the sort we want. */
8242 t
= make_node (OFFSET_TYPE
);
8244 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8245 TREE_TYPE (t
) = type
;
8247 /* If we already have such a type, use the old one. */
8248 hstate
.add_object (TYPE_HASH (basetype
));
8249 hstate
.add_object (TYPE_HASH (type
));
8250 t
= type_hash_canon (hstate
.end (), t
);
8252 if (!COMPLETE_TYPE_P (t
))
8255 if (TYPE_CANONICAL (t
) == t
)
8257 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8258 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8259 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8260 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8261 || TYPE_CANONICAL (type
) != type
)
8263 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8264 TYPE_CANONICAL (type
));
8270 /* Create a complex type whose components are COMPONENT_TYPE. */
8273 build_complex_type (tree component_type
)
8276 inchash::hash hstate
;
8278 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8279 || SCALAR_FLOAT_TYPE_P (component_type
)
8280 || FIXED_POINT_TYPE_P (component_type
));
8282 /* Make a node of the sort we want. */
8283 t
= make_node (COMPLEX_TYPE
);
8285 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (component_type
);
8287 /* If we already have such a type, use the old one. */
8288 hstate
.add_object (TYPE_HASH (component_type
));
8289 t
= type_hash_canon (hstate
.end (), t
);
8291 if (!COMPLETE_TYPE_P (t
))
8294 if (TYPE_CANONICAL (t
) == t
)
8296 if (TYPE_STRUCTURAL_EQUALITY_P (component_type
))
8297 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8298 else if (TYPE_CANONICAL (component_type
) != component_type
)
8300 = build_complex_type (TYPE_CANONICAL (component_type
));
8303 /* We need to create a name, since complex is a fundamental type. */
8304 if (! TYPE_NAME (t
))
8307 if (component_type
== char_type_node
)
8308 name
= "complex char";
8309 else if (component_type
== signed_char_type_node
)
8310 name
= "complex signed char";
8311 else if (component_type
== unsigned_char_type_node
)
8312 name
= "complex unsigned char";
8313 else if (component_type
== short_integer_type_node
)
8314 name
= "complex short int";
8315 else if (component_type
== short_unsigned_type_node
)
8316 name
= "complex short unsigned int";
8317 else if (component_type
== integer_type_node
)
8318 name
= "complex int";
8319 else if (component_type
== unsigned_type_node
)
8320 name
= "complex unsigned int";
8321 else if (component_type
== long_integer_type_node
)
8322 name
= "complex long int";
8323 else if (component_type
== long_unsigned_type_node
)
8324 name
= "complex long unsigned int";
8325 else if (component_type
== long_long_integer_type_node
)
8326 name
= "complex long long int";
8327 else if (component_type
== long_long_unsigned_type_node
)
8328 name
= "complex long long unsigned int";
8333 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8334 get_identifier (name
), t
);
8337 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8340 /* If TYPE is a real or complex floating-point type and the target
8341 does not directly support arithmetic on TYPE then return the wider
8342 type to be used for arithmetic on TYPE. Otherwise, return
8346 excess_precision_type (tree type
)
8348 if (flag_excess_precision
!= EXCESS_PRECISION_FAST
)
8350 int flt_eval_method
= TARGET_FLT_EVAL_METHOD
;
8351 switch (TREE_CODE (type
))
8354 switch (flt_eval_method
)
8357 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
8358 return double_type_node
;
8361 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
)
8362 || TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
8363 return long_double_type_node
;
8370 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8372 switch (flt_eval_method
)
8375 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
))
8376 return complex_double_type_node
;
8379 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
)
8380 || (TYPE_MODE (TREE_TYPE (type
))
8381 == TYPE_MODE (double_type_node
)))
8382 return complex_long_double_type_node
;
8395 /* Return OP, stripped of any conversions to wider types as much as is safe.
8396 Converting the value back to OP's type makes a value equivalent to OP.
8398 If FOR_TYPE is nonzero, we return a value which, if converted to
8399 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8401 OP must have integer, real or enumeral type. Pointers are not allowed!
8403 There are some cases where the obvious value we could return
8404 would regenerate to OP if converted to OP's type,
8405 but would not extend like OP to wider types.
8406 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8407 For example, if OP is (unsigned short)(signed char)-1,
8408 we avoid returning (signed char)-1 if FOR_TYPE is int,
8409 even though extending that to an unsigned short would regenerate OP,
8410 since the result of extending (signed char)-1 to (int)
8411 is different from (int) OP. */
8414 get_unwidened (tree op
, tree for_type
)
8416 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8417 tree type
= TREE_TYPE (op
);
8419 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8421 = (for_type
!= 0 && for_type
!= type
8422 && final_prec
> TYPE_PRECISION (type
)
8423 && TYPE_UNSIGNED (type
));
8426 while (CONVERT_EXPR_P (op
))
8430 /* TYPE_PRECISION on vector types has different meaning
8431 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8432 so avoid them here. */
8433 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8436 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8437 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8439 /* Truncations are many-one so cannot be removed.
8440 Unless we are later going to truncate down even farther. */
8442 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8445 /* See what's inside this conversion. If we decide to strip it,
8447 op
= TREE_OPERAND (op
, 0);
8449 /* If we have not stripped any zero-extensions (uns is 0),
8450 we can strip any kind of extension.
8451 If we have previously stripped a zero-extension,
8452 only zero-extensions can safely be stripped.
8453 Any extension can be stripped if the bits it would produce
8454 are all going to be discarded later by truncating to FOR_TYPE. */
8458 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8460 /* TYPE_UNSIGNED says whether this is a zero-extension.
8461 Let's avoid computing it if it does not affect WIN
8462 and if UNS will not be needed again. */
8464 || CONVERT_EXPR_P (op
))
8465 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8473 /* If we finally reach a constant see if it fits in for_type and
8474 in that case convert it. */
8476 && TREE_CODE (win
) == INTEGER_CST
8477 && TREE_TYPE (win
) != for_type
8478 && int_fits_type_p (win
, for_type
))
8479 win
= fold_convert (for_type
, win
);
8484 /* Return OP or a simpler expression for a narrower value
8485 which can be sign-extended or zero-extended to give back OP.
8486 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8487 or 0 if the value should be sign-extended. */
8490 get_narrower (tree op
, int *unsignedp_ptr
)
8495 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8497 while (TREE_CODE (op
) == NOP_EXPR
)
8500 = (TYPE_PRECISION (TREE_TYPE (op
))
8501 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8503 /* Truncations are many-one so cannot be removed. */
8507 /* See what's inside this conversion. If we decide to strip it,
8512 op
= TREE_OPERAND (op
, 0);
8513 /* An extension: the outermost one can be stripped,
8514 but remember whether it is zero or sign extension. */
8516 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8517 /* Otherwise, if a sign extension has been stripped,
8518 only sign extensions can now be stripped;
8519 if a zero extension has been stripped, only zero-extensions. */
8520 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8524 else /* bitschange == 0 */
8526 /* A change in nominal type can always be stripped, but we must
8527 preserve the unsignedness. */
8529 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8531 op
= TREE_OPERAND (op
, 0);
8532 /* Keep trying to narrow, but don't assign op to win if it
8533 would turn an integral type into something else. */
8534 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8541 if (TREE_CODE (op
) == COMPONENT_REF
8542 /* Since type_for_size always gives an integer type. */
8543 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8544 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8545 /* Ensure field is laid out already. */
8546 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8547 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8549 unsigned HOST_WIDE_INT innerprec
8550 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8551 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8552 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8553 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8555 /* We can get this structure field in a narrower type that fits it,
8556 but the resulting extension to its nominal type (a fullword type)
8557 must satisfy the same conditions as for other extensions.
8559 Do this only for fields that are aligned (not bit-fields),
8560 because when bit-field insns will be used there is no
8561 advantage in doing this. */
8563 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8564 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8565 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8569 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8570 win
= fold_convert (type
, op
);
8574 *unsignedp_ptr
= uns
;
8578 /* Returns true if integer constant C has a value that is permissible
8579 for type TYPE (an INTEGER_TYPE). */
8582 int_fits_type_p (const_tree c
, const_tree type
)
8584 tree type_low_bound
, type_high_bound
;
8585 bool ok_for_low_bound
, ok_for_high_bound
;
8586 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8589 type_low_bound
= TYPE_MIN_VALUE (type
);
8590 type_high_bound
= TYPE_MAX_VALUE (type
);
8592 /* If at least one bound of the type is a constant integer, we can check
8593 ourselves and maybe make a decision. If no such decision is possible, but
8594 this type is a subtype, try checking against that. Otherwise, use
8595 fits_to_tree_p, which checks against the precision.
8597 Compute the status for each possibly constant bound, and return if we see
8598 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8599 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8600 for "constant known to fit". */
8602 /* Check if c >= type_low_bound. */
8603 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8605 if (tree_int_cst_lt (c
, type_low_bound
))
8607 ok_for_low_bound
= true;
8610 ok_for_low_bound
= false;
8612 /* Check if c <= type_high_bound. */
8613 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8615 if (tree_int_cst_lt (type_high_bound
, c
))
8617 ok_for_high_bound
= true;
8620 ok_for_high_bound
= false;
8622 /* If the constant fits both bounds, the result is known. */
8623 if (ok_for_low_bound
&& ok_for_high_bound
)
8626 /* Perform some generic filtering which may allow making a decision
8627 even if the bounds are not constant. First, negative integers
8628 never fit in unsigned types, */
8629 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (c
))
8632 /* Second, narrower types always fit in wider ones. */
8633 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8636 /* Third, unsigned integers with top bit set never fit signed types. */
8637 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8639 int prec
= GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c
))) - 1;
8640 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8642 /* When a tree_cst is converted to a wide-int, the precision
8643 is taken from the type. However, if the precision of the
8644 mode underneath the type is smaller than that, it is
8645 possible that the value will not fit. The test below
8646 fails if any bit is set between the sign bit of the
8647 underlying mode and the top bit of the type. */
8648 if (wi::ne_p (wi::zext (c
, prec
- 1), c
))
8651 else if (wi::neg_p (c
))
8655 /* If we haven't been able to decide at this point, there nothing more we
8656 can check ourselves here. Look at the base type if we have one and it
8657 has the same precision. */
8658 if (TREE_CODE (type
) == INTEGER_TYPE
8659 && TREE_TYPE (type
) != 0
8660 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8662 type
= TREE_TYPE (type
);
8666 /* Or to fits_to_tree_p, if nothing else. */
8667 return wi::fits_to_tree_p (c
, type
);
8670 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8671 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8672 represented (assuming two's-complement arithmetic) within the bit
8673 precision of the type are returned instead. */
8676 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8678 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8679 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8680 wi::to_mpz (TYPE_MIN_VALUE (type
), min
, TYPE_SIGN (type
));
8683 if (TYPE_UNSIGNED (type
))
8684 mpz_set_ui (min
, 0);
8687 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8688 wi::to_mpz (mn
, min
, SIGNED
);
8692 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8693 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8694 wi::to_mpz (TYPE_MAX_VALUE (type
), max
, TYPE_SIGN (type
));
8697 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8698 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8702 /* Return true if VAR is an automatic variable defined in function FN. */
8705 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8707 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8708 && ((((TREE_CODE (var
) == VAR_DECL
&& ! DECL_EXTERNAL (var
))
8709 || TREE_CODE (var
) == PARM_DECL
)
8710 && ! TREE_STATIC (var
))
8711 || TREE_CODE (var
) == LABEL_DECL
8712 || TREE_CODE (var
) == RESULT_DECL
));
8715 /* Subprogram of following function. Called by walk_tree.
8717 Return *TP if it is an automatic variable or parameter of the
8718 function passed in as DATA. */
8721 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8723 tree fn
= (tree
) data
;
8728 else if (DECL_P (*tp
)
8729 && auto_var_in_fn_p (*tp
, fn
))
8735 /* Returns true if T is, contains, or refers to a type with variable
8736 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8737 arguments, but not the return type. If FN is nonzero, only return
8738 true if a modifier of the type or position of FN is a variable or
8739 parameter inside FN.
8741 This concept is more general than that of C99 'variably modified types':
8742 in C99, a struct type is never variably modified because a VLA may not
8743 appear as a structure member. However, in GNU C code like:
8745 struct S { int i[f()]; };
8747 is valid, and other languages may define similar constructs. */
8750 variably_modified_type_p (tree type
, tree fn
)
8754 /* Test if T is either variable (if FN is zero) or an expression containing
8755 a variable in FN. If TYPE isn't gimplified, return true also if
8756 gimplify_one_sizepos would gimplify the expression into a local
8758 #define RETURN_TRUE_IF_VAR(T) \
8759 do { tree _t = (T); \
8760 if (_t != NULL_TREE \
8761 && _t != error_mark_node \
8762 && TREE_CODE (_t) != INTEGER_CST \
8763 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8765 || (!TYPE_SIZES_GIMPLIFIED (type) \
8766 && !is_gimple_sizepos (_t)) \
8767 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8768 return true; } while (0)
8770 if (type
== error_mark_node
)
8773 /* If TYPE itself has variable size, it is variably modified. */
8774 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8775 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8777 switch (TREE_CODE (type
))
8780 case REFERENCE_TYPE
:
8782 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8788 /* If TYPE is a function type, it is variably modified if the
8789 return type is variably modified. */
8790 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8796 case FIXED_POINT_TYPE
:
8799 /* Scalar types are variably modified if their end points
8801 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8802 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8807 case QUAL_UNION_TYPE
:
8808 /* We can't see if any of the fields are variably-modified by the
8809 definition we normally use, since that would produce infinite
8810 recursion via pointers. */
8811 /* This is variably modified if some field's type is. */
8812 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8813 if (TREE_CODE (t
) == FIELD_DECL
)
8815 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8816 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8817 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8819 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8820 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8825 /* Do not call ourselves to avoid infinite recursion. This is
8826 variably modified if the element type is. */
8827 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8828 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8835 /* The current language may have other cases to check, but in general,
8836 all other types are not variably modified. */
8837 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8839 #undef RETURN_TRUE_IF_VAR
8842 /* Given a DECL or TYPE, return the scope in which it was declared, or
8843 NULL_TREE if there is no containing scope. */
8846 get_containing_scope (const_tree t
)
8848 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8851 /* Return the innermost context enclosing DECL that is
8852 a FUNCTION_DECL, or zero if none. */
8855 decl_function_context (const_tree decl
)
8859 if (TREE_CODE (decl
) == ERROR_MARK
)
8862 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8863 where we look up the function at runtime. Such functions always take
8864 a first argument of type 'pointer to real context'.
8866 C++ should really be fixed to use DECL_CONTEXT for the real context,
8867 and use something else for the "virtual context". */
8868 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VINDEX (decl
))
8871 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8873 context
= DECL_CONTEXT (decl
);
8875 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8877 if (TREE_CODE (context
) == BLOCK
)
8878 context
= BLOCK_SUPERCONTEXT (context
);
8880 context
= get_containing_scope (context
);
8886 /* Return the innermost context enclosing DECL that is
8887 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8888 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8891 decl_type_context (const_tree decl
)
8893 tree context
= DECL_CONTEXT (decl
);
8896 switch (TREE_CODE (context
))
8898 case NAMESPACE_DECL
:
8899 case TRANSLATION_UNIT_DECL
:
8904 case QUAL_UNION_TYPE
:
8909 context
= DECL_CONTEXT (context
);
8913 context
= BLOCK_SUPERCONTEXT (context
);
8923 /* CALL is a CALL_EXPR. Return the declaration for the function
8924 called, or NULL_TREE if the called function cannot be
8928 get_callee_fndecl (const_tree call
)
8932 if (call
== error_mark_node
)
8933 return error_mark_node
;
8935 /* It's invalid to call this function with anything but a
8937 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8939 /* The first operand to the CALL is the address of the function
8941 addr
= CALL_EXPR_FN (call
);
8943 /* If there is no function, return early. */
8944 if (addr
== NULL_TREE
)
8949 /* If this is a readonly function pointer, extract its initial value. */
8950 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8951 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8952 && DECL_INITIAL (addr
))
8953 addr
= DECL_INITIAL (addr
);
8955 /* If the address is just `&f' for some function `f', then we know
8956 that `f' is being called. */
8957 if (TREE_CODE (addr
) == ADDR_EXPR
8958 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8959 return TREE_OPERAND (addr
, 0);
8961 /* We couldn't figure out what was being called. */
8965 /* Print debugging information about tree nodes generated during the compile,
8966 and any language-specific information. */
8969 dump_tree_statistics (void)
8971 if (GATHER_STATISTICS
)
8974 int total_nodes
, total_bytes
;
8975 fprintf (stderr
, "Kind Nodes Bytes\n");
8976 fprintf (stderr
, "---------------------------------------\n");
8977 total_nodes
= total_bytes
= 0;
8978 for (i
= 0; i
< (int) all_kinds
; i
++)
8980 fprintf (stderr
, "%-20s %7d %10d\n", tree_node_kind_names
[i
],
8981 tree_node_counts
[i
], tree_node_sizes
[i
]);
8982 total_nodes
+= tree_node_counts
[i
];
8983 total_bytes
+= tree_node_sizes
[i
];
8985 fprintf (stderr
, "---------------------------------------\n");
8986 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_nodes
, total_bytes
);
8987 fprintf (stderr
, "---------------------------------------\n");
8988 fprintf (stderr
, "Code Nodes\n");
8989 fprintf (stderr
, "----------------------------\n");
8990 for (i
= 0; i
< (int) MAX_TREE_CODES
; i
++)
8991 fprintf (stderr
, "%-20s %7d\n", get_tree_code_name ((enum tree_code
) i
),
8992 tree_code_counts
[i
]);
8993 fprintf (stderr
, "----------------------------\n");
8994 ssanames_print_statistics ();
8995 phinodes_print_statistics ();
8998 fprintf (stderr
, "(No per-node statistics)\n");
9000 print_type_hash_statistics ();
9001 print_debug_expr_statistics ();
9002 print_value_expr_statistics ();
9003 lang_hooks
.print_statistics ();
9006 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9008 /* Generate a crc32 of a byte. */
9011 crc32_unsigned_bits (unsigned chksum
, unsigned value
, unsigned bits
)
9015 for (ix
= bits
; ix
--; value
<<= 1)
9019 feedback
= (value
^ chksum
) & 0x80000000 ? 0x04c11db7 : 0;
9026 /* Generate a crc32 of a 32-bit unsigned. */
9029 crc32_unsigned (unsigned chksum
, unsigned value
)
9031 return crc32_unsigned_bits (chksum
, value
, 32);
9034 /* Generate a crc32 of a byte. */
9037 crc32_byte (unsigned chksum
, char byte
)
9039 return crc32_unsigned_bits (chksum
, (unsigned) byte
<< 24, 8);
9042 /* Generate a crc32 of a string. */
9045 crc32_string (unsigned chksum
, const char *string
)
9049 chksum
= crc32_byte (chksum
, *string
);
9055 /* P is a string that will be used in a symbol. Mask out any characters
9056 that are not valid in that context. */
9059 clean_symbol_name (char *p
)
9063 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9066 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9073 /* Generate a name for a special-purpose function.
9074 The generated name may need to be unique across the whole link.
9075 Changes to this function may also require corresponding changes to
9076 xstrdup_mask_random.
9077 TYPE is some string to identify the purpose of this function to the
9078 linker or collect2; it must start with an uppercase letter,
9080 I - for constructors
9082 N - for C++ anonymous namespaces
9083 F - for DWARF unwind frame information. */
9086 get_file_function_name (const char *type
)
9092 /* If we already have a name we know to be unique, just use that. */
9093 if (first_global_object_name
)
9094 p
= q
= ASTRDUP (first_global_object_name
);
9095 /* If the target is handling the constructors/destructors, they
9096 will be local to this file and the name is only necessary for
9098 We also assign sub_I and sub_D sufixes to constructors called from
9099 the global static constructors. These are always local. */
9100 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9101 || (strncmp (type
, "sub_", 4) == 0
9102 && (type
[4] == 'I' || type
[4] == 'D')))
9104 const char *file
= main_input_filename
;
9106 file
= LOCATION_FILE (input_location
);
9107 /* Just use the file's basename, because the full pathname
9108 might be quite long. */
9109 p
= q
= ASTRDUP (lbasename (file
));
9113 /* Otherwise, the name must be unique across the entire link.
9114 We don't have anything that we know to be unique to this translation
9115 unit, so use what we do have and throw in some randomness. */
9117 const char *name
= weak_global_object_name
;
9118 const char *file
= main_input_filename
;
9123 file
= LOCATION_FILE (input_location
);
9125 len
= strlen (file
);
9126 q
= (char *) alloca (9 + 17 + len
+ 1);
9127 memcpy (q
, file
, len
+ 1);
9129 snprintf (q
+ len
, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9130 crc32_string (0, name
), get_random_seed (false));
9135 clean_symbol_name (q
);
9136 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9139 /* Set up the name of the file-level functions we may need.
9140 Use a global object (which is already required to be unique over
9141 the program) rather than the file name (which imposes extra
9143 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9145 return get_identifier (buf
);
9148 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9150 /* Complain that the tree code of NODE does not match the expected 0
9151 terminated list of trailing codes. The trailing code list can be
9152 empty, for a more vague error message. FILE, LINE, and FUNCTION
9153 are of the caller. */
9156 tree_check_failed (const_tree node
, const char *file
,
9157 int line
, const char *function
, ...)
9161 unsigned length
= 0;
9162 enum tree_code code
;
9164 va_start (args
, function
);
9165 while ((code
= (enum tree_code
) va_arg (args
, int)))
9166 length
+= 4 + strlen (get_tree_code_name (code
));
9171 va_start (args
, function
);
9172 length
+= strlen ("expected ");
9173 buffer
= tmp
= (char *) alloca (length
);
9175 while ((code
= (enum tree_code
) va_arg (args
, int)))
9177 const char *prefix
= length
? " or " : "expected ";
9179 strcpy (tmp
+ length
, prefix
);
9180 length
+= strlen (prefix
);
9181 strcpy (tmp
+ length
, get_tree_code_name (code
));
9182 length
+= strlen (get_tree_code_name (code
));
9187 buffer
= "unexpected node";
9189 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9190 buffer
, get_tree_code_name (TREE_CODE (node
)),
9191 function
, trim_filename (file
), line
);
9194 /* Complain that the tree code of NODE does match the expected 0
9195 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9199 tree_not_check_failed (const_tree node
, const char *file
,
9200 int line
, const char *function
, ...)
9204 unsigned length
= 0;
9205 enum tree_code code
;
9207 va_start (args
, function
);
9208 while ((code
= (enum tree_code
) va_arg (args
, int)))
9209 length
+= 4 + strlen (get_tree_code_name (code
));
9211 va_start (args
, function
);
9212 buffer
= (char *) alloca (length
);
9214 while ((code
= (enum tree_code
) va_arg (args
, int)))
9218 strcpy (buffer
+ length
, " or ");
9221 strcpy (buffer
+ length
, get_tree_code_name (code
));
9222 length
+= strlen (get_tree_code_name (code
));
9226 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9227 buffer
, get_tree_code_name (TREE_CODE (node
)),
9228 function
, trim_filename (file
), line
);
9231 /* Similar to tree_check_failed, except that we check for a class of tree
9232 code, given in CL. */
9235 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9236 const char *file
, int line
, const char *function
)
9239 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9240 TREE_CODE_CLASS_STRING (cl
),
9241 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9242 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9245 /* Similar to tree_check_failed, except that instead of specifying a
9246 dozen codes, use the knowledge that they're all sequential. */
9249 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9250 const char *function
, enum tree_code c1
,
9254 unsigned length
= 0;
9257 for (c
= c1
; c
<= c2
; ++c
)
9258 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9260 length
+= strlen ("expected ");
9261 buffer
= (char *) alloca (length
);
9264 for (c
= c1
; c
<= c2
; ++c
)
9266 const char *prefix
= length
? " or " : "expected ";
9268 strcpy (buffer
+ length
, prefix
);
9269 length
+= strlen (prefix
);
9270 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9271 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9274 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9275 buffer
, get_tree_code_name (TREE_CODE (node
)),
9276 function
, trim_filename (file
), line
);
9280 /* Similar to tree_check_failed, except that we check that a tree does
9281 not have the specified code, given in CL. */
9284 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9285 const char *file
, int line
, const char *function
)
9288 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9289 TREE_CODE_CLASS_STRING (cl
),
9290 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9291 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9295 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9298 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9299 const char *function
, enum omp_clause_code code
)
9301 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9302 omp_clause_code_name
[code
], get_tree_code_name (TREE_CODE (node
)),
9303 function
, trim_filename (file
), line
);
9307 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9310 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9311 const char *function
, enum omp_clause_code c1
,
9312 enum omp_clause_code c2
)
9315 unsigned length
= 0;
9318 for (c
= c1
; c
<= c2
; ++c
)
9319 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9321 length
+= strlen ("expected ");
9322 buffer
= (char *) alloca (length
);
9325 for (c
= c1
; c
<= c2
; ++c
)
9327 const char *prefix
= length
? " or " : "expected ";
9329 strcpy (buffer
+ length
, prefix
);
9330 length
+= strlen (prefix
);
9331 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9332 length
+= strlen (omp_clause_code_name
[c
]);
9335 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9336 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9337 function
, trim_filename (file
), line
);
9341 #undef DEFTREESTRUCT
9342 #define DEFTREESTRUCT(VAL, NAME) NAME,
9344 static const char *ts_enum_names
[] = {
9345 #include "treestruct.def"
9347 #undef DEFTREESTRUCT
9349 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9351 /* Similar to tree_class_check_failed, except that we check for
9352 whether CODE contains the tree structure identified by EN. */
9355 tree_contains_struct_check_failed (const_tree node
,
9356 const enum tree_node_structure_enum en
,
9357 const char *file
, int line
,
9358 const char *function
)
9361 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9363 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9367 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9368 (dynamically sized) vector. */
9371 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9372 const char *function
)
9375 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9376 idx
+ 1, len
, function
, trim_filename (file
), line
);
9379 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9380 (dynamically sized) vector. */
9383 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9384 const char *function
)
9387 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9388 idx
+ 1, len
, function
, trim_filename (file
), line
);
9391 /* Similar to above, except that the check is for the bounds of the operand
9392 vector of an expression node EXP. */
9395 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9396 int line
, const char *function
)
9398 enum tree_code code
= TREE_CODE (exp
);
9400 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9401 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9402 function
, trim_filename (file
), line
);
9405 /* Similar to above, except that the check is for the number of
9406 operands of an OMP_CLAUSE node. */
9409 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9410 int line
, const char *function
)
9413 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9414 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9415 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9416 trim_filename (file
), line
);
9418 #endif /* ENABLE_TREE_CHECKING */
9420 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9421 and mapped to the machine mode MODE. Initialize its fields and build
9422 the information necessary for debugging output. */
9425 make_vector_type (tree innertype
, int nunits
, machine_mode mode
)
9428 inchash::hash hstate
;
9430 t
= make_node (VECTOR_TYPE
);
9431 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (innertype
);
9432 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9433 SET_TYPE_MODE (t
, mode
);
9435 if (TYPE_STRUCTURAL_EQUALITY_P (innertype
))
9436 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9437 else if (TYPE_CANONICAL (innertype
) != innertype
9438 || mode
!= VOIDmode
)
9440 = make_vector_type (TYPE_CANONICAL (innertype
), nunits
, VOIDmode
);
9444 hstate
.add_wide_int (VECTOR_TYPE
);
9445 hstate
.add_wide_int (nunits
);
9446 hstate
.add_wide_int (mode
);
9447 hstate
.add_object (TYPE_HASH (TREE_TYPE (t
)));
9448 t
= type_hash_canon (hstate
.end (), t
);
9450 /* We have built a main variant, based on the main variant of the
9451 inner type. Use it to build the variant we return. */
9452 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9453 && TREE_TYPE (t
) != innertype
)
9454 return build_type_attribute_qual_variant (t
,
9455 TYPE_ATTRIBUTES (innertype
),
9456 TYPE_QUALS (innertype
));
9462 make_or_reuse_type (unsigned size
, int unsignedp
)
9466 if (size
== INT_TYPE_SIZE
)
9467 return unsignedp
? unsigned_type_node
: integer_type_node
;
9468 if (size
== CHAR_TYPE_SIZE
)
9469 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9470 if (size
== SHORT_TYPE_SIZE
)
9471 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9472 if (size
== LONG_TYPE_SIZE
)
9473 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9474 if (size
== LONG_LONG_TYPE_SIZE
)
9475 return (unsignedp
? long_long_unsigned_type_node
9476 : long_long_integer_type_node
);
9478 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9479 if (size
== int_n_data
[i
].bitsize
9480 && int_n_enabled_p
[i
])
9481 return (unsignedp
? int_n_trees
[i
].unsigned_type
9482 : int_n_trees
[i
].signed_type
);
9485 return make_unsigned_type (size
);
9487 return make_signed_type (size
);
9490 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9493 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9497 if (size
== SHORT_FRACT_TYPE_SIZE
)
9498 return unsignedp
? sat_unsigned_short_fract_type_node
9499 : sat_short_fract_type_node
;
9500 if (size
== FRACT_TYPE_SIZE
)
9501 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9502 if (size
== LONG_FRACT_TYPE_SIZE
)
9503 return unsignedp
? sat_unsigned_long_fract_type_node
9504 : sat_long_fract_type_node
;
9505 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9506 return unsignedp
? sat_unsigned_long_long_fract_type_node
9507 : sat_long_long_fract_type_node
;
9511 if (size
== SHORT_FRACT_TYPE_SIZE
)
9512 return unsignedp
? unsigned_short_fract_type_node
9513 : short_fract_type_node
;
9514 if (size
== FRACT_TYPE_SIZE
)
9515 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9516 if (size
== LONG_FRACT_TYPE_SIZE
)
9517 return unsignedp
? unsigned_long_fract_type_node
9518 : long_fract_type_node
;
9519 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9520 return unsignedp
? unsigned_long_long_fract_type_node
9521 : long_long_fract_type_node
;
9524 return make_fract_type (size
, unsignedp
, satp
);
9527 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9530 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9534 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9535 return unsignedp
? sat_unsigned_short_accum_type_node
9536 : sat_short_accum_type_node
;
9537 if (size
== ACCUM_TYPE_SIZE
)
9538 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9539 if (size
== LONG_ACCUM_TYPE_SIZE
)
9540 return unsignedp
? sat_unsigned_long_accum_type_node
9541 : sat_long_accum_type_node
;
9542 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9543 return unsignedp
? sat_unsigned_long_long_accum_type_node
9544 : sat_long_long_accum_type_node
;
9548 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9549 return unsignedp
? unsigned_short_accum_type_node
9550 : short_accum_type_node
;
9551 if (size
== ACCUM_TYPE_SIZE
)
9552 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9553 if (size
== LONG_ACCUM_TYPE_SIZE
)
9554 return unsignedp
? unsigned_long_accum_type_node
9555 : long_accum_type_node
;
9556 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9557 return unsignedp
? unsigned_long_long_accum_type_node
9558 : long_long_accum_type_node
;
9561 return make_accum_type (size
, unsignedp
, satp
);
9565 /* Create an atomic variant node for TYPE. This routine is called
9566 during initialization of data types to create the 5 basic atomic
9567 types. The generic build_variant_type function requires these to
9568 already be set up in order to function properly, so cannot be
9569 called from there. If ALIGN is non-zero, then ensure alignment is
9570 overridden to this value. */
9573 build_atomic_base (tree type
, unsigned int align
)
9577 /* Make sure its not already registered. */
9578 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9581 t
= build_variant_type_copy (type
);
9582 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9585 TYPE_ALIGN (t
) = align
;
9590 /* Create nodes for all integer types (and error_mark_node) using the sizes
9591 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9592 SHORT_DOUBLE specifies whether double should be of the same precision
9596 build_common_tree_nodes (bool signed_char
, bool short_double
)
9600 error_mark_node
= make_node (ERROR_MARK
);
9601 TREE_TYPE (error_mark_node
) = error_mark_node
;
9603 initialize_sizetypes ();
9605 /* Define both `signed char' and `unsigned char'. */
9606 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9607 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9608 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9609 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9611 /* Define `char', which is like either `signed char' or `unsigned char'
9612 but not the same as either. */
9615 ? make_signed_type (CHAR_TYPE_SIZE
)
9616 : make_unsigned_type (CHAR_TYPE_SIZE
));
9617 TYPE_STRING_FLAG (char_type_node
) = 1;
9619 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9620 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9621 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9622 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9623 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9624 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9625 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9626 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9628 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9630 int_n_trees
[i
].signed_type
= make_signed_type (int_n_data
[i
].bitsize
);
9631 int_n_trees
[i
].unsigned_type
= make_unsigned_type (int_n_data
[i
].bitsize
);
9632 TYPE_SIZE (int_n_trees
[i
].signed_type
) = bitsize_int (int_n_data
[i
].bitsize
);
9633 TYPE_SIZE (int_n_trees
[i
].unsigned_type
) = bitsize_int (int_n_data
[i
].bitsize
);
9635 if (int_n_data
[i
].bitsize
> LONG_LONG_TYPE_SIZE
9636 && int_n_enabled_p
[i
])
9638 integer_types
[itk_intN_0
+ i
* 2] = int_n_trees
[i
].signed_type
;
9639 integer_types
[itk_unsigned_intN_0
+ i
* 2] = int_n_trees
[i
].unsigned_type
;
9643 /* Define a boolean type. This type only represents boolean values but
9644 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9645 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9646 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9647 TYPE_PRECISION (boolean_type_node
) = 1;
9648 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9650 /* Define what type to use for size_t. */
9651 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9652 size_type_node
= unsigned_type_node
;
9653 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9654 size_type_node
= long_unsigned_type_node
;
9655 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9656 size_type_node
= long_long_unsigned_type_node
;
9657 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9658 size_type_node
= short_unsigned_type_node
;
9663 size_type_node
= NULL_TREE
;
9664 for (i
= 0; i
< NUM_INT_N_ENTS
; i
++)
9665 if (int_n_enabled_p
[i
])
9668 sprintf (name
, "__int%d unsigned", int_n_data
[i
].bitsize
);
9670 if (strcmp (name
, SIZE_TYPE
) == 0)
9672 size_type_node
= int_n_trees
[i
].unsigned_type
;
9675 if (size_type_node
== NULL_TREE
)
9679 /* Fill in the rest of the sized types. Reuse existing type nodes
9681 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9682 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9683 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9684 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9685 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9687 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9688 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9689 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9690 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9691 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9693 /* Don't call build_qualified type for atomics. That routine does
9694 special processing for atomics, and until they are initialized
9695 it's better not to make that call.
9697 Check to see if there is a target override for atomic types. */
9699 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9700 targetm
.atomic_align_for_mode (QImode
));
9701 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9702 targetm
.atomic_align_for_mode (HImode
));
9703 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9704 targetm
.atomic_align_for_mode (SImode
));
9705 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9706 targetm
.atomic_align_for_mode (DImode
));
9707 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9708 targetm
.atomic_align_for_mode (TImode
));
9710 access_public_node
= get_identifier ("public");
9711 access_protected_node
= get_identifier ("protected");
9712 access_private_node
= get_identifier ("private");
9714 /* Define these next since types below may used them. */
9715 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9716 integer_one_node
= build_int_cst (integer_type_node
, 1);
9717 integer_three_node
= build_int_cst (integer_type_node
, 3);
9718 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9720 size_zero_node
= size_int (0);
9721 size_one_node
= size_int (1);
9722 bitsize_zero_node
= bitsize_int (0);
9723 bitsize_one_node
= bitsize_int (1);
9724 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9726 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9727 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9729 void_type_node
= make_node (VOID_TYPE
);
9730 layout_type (void_type_node
);
9732 /* We are not going to have real types in C with less than byte alignment,
9733 so we might as well not have any types that claim to have it. */
9734 TYPE_ALIGN (void_type_node
) = BITS_PER_UNIT
;
9735 TYPE_USER_ALIGN (void_type_node
) = 0;
9737 void_node
= make_node (VOID_CST
);
9738 TREE_TYPE (void_node
) = void_type_node
;
9740 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9741 layout_type (TREE_TYPE (null_pointer_node
));
9743 ptr_type_node
= build_pointer_type (void_type_node
);
9745 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9746 fileptr_type_node
= ptr_type_node
;
9748 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9750 float_type_node
= make_node (REAL_TYPE
);
9751 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9752 layout_type (float_type_node
);
9754 double_type_node
= make_node (REAL_TYPE
);
9756 TYPE_PRECISION (double_type_node
) = FLOAT_TYPE_SIZE
;
9758 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9759 layout_type (double_type_node
);
9761 long_double_type_node
= make_node (REAL_TYPE
);
9762 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9763 layout_type (long_double_type_node
);
9765 float_ptr_type_node
= build_pointer_type (float_type_node
);
9766 double_ptr_type_node
= build_pointer_type (double_type_node
);
9767 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9768 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9770 /* Fixed size integer types. */
9771 uint16_type_node
= make_or_reuse_type (16, 1);
9772 uint32_type_node
= make_or_reuse_type (32, 1);
9773 uint64_type_node
= make_or_reuse_type (64, 1);
9775 /* Decimal float types. */
9776 dfloat32_type_node
= make_node (REAL_TYPE
);
9777 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9778 layout_type (dfloat32_type_node
);
9779 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9780 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
9782 dfloat64_type_node
= make_node (REAL_TYPE
);
9783 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9784 layout_type (dfloat64_type_node
);
9785 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9786 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
9788 dfloat128_type_node
= make_node (REAL_TYPE
);
9789 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9790 layout_type (dfloat128_type_node
);
9791 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9792 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
9794 complex_integer_type_node
= build_complex_type (integer_type_node
);
9795 complex_float_type_node
= build_complex_type (float_type_node
);
9796 complex_double_type_node
= build_complex_type (double_type_node
);
9797 complex_long_double_type_node
= build_complex_type (long_double_type_node
);
9799 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9800 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9801 sat_ ## KIND ## _type_node = \
9802 make_sat_signed_ ## KIND ## _type (SIZE); \
9803 sat_unsigned_ ## KIND ## _type_node = \
9804 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9805 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9806 unsigned_ ## KIND ## _type_node = \
9807 make_unsigned_ ## KIND ## _type (SIZE);
9809 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9810 sat_ ## WIDTH ## KIND ## _type_node = \
9811 make_sat_signed_ ## KIND ## _type (SIZE); \
9812 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9813 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9814 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9815 unsigned_ ## WIDTH ## KIND ## _type_node = \
9816 make_unsigned_ ## KIND ## _type (SIZE);
9818 /* Make fixed-point type nodes based on four different widths. */
9819 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9820 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9821 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9822 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9823 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9825 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9826 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9827 NAME ## _type_node = \
9828 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9829 u ## NAME ## _type_node = \
9830 make_or_reuse_unsigned_ ## KIND ## _type \
9831 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9832 sat_ ## NAME ## _type_node = \
9833 make_or_reuse_sat_signed_ ## KIND ## _type \
9834 (GET_MODE_BITSIZE (MODE ## mode)); \
9835 sat_u ## NAME ## _type_node = \
9836 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9837 (GET_MODE_BITSIZE (U ## MODE ## mode));
9839 /* Fixed-point type and mode nodes. */
9840 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9841 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9842 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9843 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9844 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9845 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9846 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9847 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9848 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9849 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9850 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9853 tree t
= targetm
.build_builtin_va_list ();
9855 /* Many back-ends define record types without setting TYPE_NAME.
9856 If we copied the record type here, we'd keep the original
9857 record type without a name. This breaks name mangling. So,
9858 don't copy record types and let c_common_nodes_and_builtins()
9859 declare the type to be __builtin_va_list. */
9860 if (TREE_CODE (t
) != RECORD_TYPE
)
9861 t
= build_variant_type_copy (t
);
9863 va_list_type_node
= t
;
9867 /* Modify DECL for given flags.
9868 TM_PURE attribute is set only on types, so the function will modify
9869 DECL's type when ECF_TM_PURE is used. */
9872 set_call_expr_flags (tree decl
, int flags
)
9874 if (flags
& ECF_NOTHROW
)
9875 TREE_NOTHROW (decl
) = 1;
9876 if (flags
& ECF_CONST
)
9877 TREE_READONLY (decl
) = 1;
9878 if (flags
& ECF_PURE
)
9879 DECL_PURE_P (decl
) = 1;
9880 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9881 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9882 if (flags
& ECF_NOVOPS
)
9883 DECL_IS_NOVOPS (decl
) = 1;
9884 if (flags
& ECF_NORETURN
)
9885 TREE_THIS_VOLATILE (decl
) = 1;
9886 if (flags
& ECF_MALLOC
)
9887 DECL_IS_MALLOC (decl
) = 1;
9888 if (flags
& ECF_RETURNS_TWICE
)
9889 DECL_IS_RETURNS_TWICE (decl
) = 1;
9890 if (flags
& ECF_LEAF
)
9891 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9892 NULL
, DECL_ATTRIBUTES (decl
));
9893 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9894 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9895 /* Looping const or pure is implied by noreturn.
9896 There is currently no way to declare looping const or looping pure alone. */
9897 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9898 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9902 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9905 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9906 const char *library_name
, int ecf_flags
)
9910 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9911 library_name
, NULL_TREE
);
9912 set_call_expr_flags (decl
, ecf_flags
);
9914 set_builtin_decl (code
, decl
, true);
9917 /* Call this function after instantiating all builtins that the language
9918 front end cares about. This will build the rest of the builtins
9919 and internal functions that are relied upon by the tree optimizers and
9923 build_common_builtin_nodes (void)
9928 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9930 ftype
= build_function_type (void_type_node
, void_list_node
);
9931 local_define_builtin ("__builtin_unreachable", ftype
, BUILT_IN_UNREACHABLE
,
9932 "__builtin_unreachable",
9933 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9934 | ECF_CONST
| ECF_LEAF
);
9937 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9938 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9940 ftype
= build_function_type_list (ptr_type_node
,
9941 ptr_type_node
, const_ptr_type_node
,
9942 size_type_node
, NULL_TREE
);
9944 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9945 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9946 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9947 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9948 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9949 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9952 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9954 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9955 const_ptr_type_node
, size_type_node
,
9957 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9958 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9961 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9963 ftype
= build_function_type_list (ptr_type_node
,
9964 ptr_type_node
, integer_type_node
,
9965 size_type_node
, NULL_TREE
);
9966 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9967 "memset", ECF_NOTHROW
| ECF_LEAF
);
9970 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9972 ftype
= build_function_type_list (ptr_type_node
,
9973 size_type_node
, NULL_TREE
);
9974 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9975 "alloca", ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
9978 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9979 size_type_node
, NULL_TREE
);
9980 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9981 BUILT_IN_ALLOCA_WITH_ALIGN
, "alloca",
9982 ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
9984 /* If we're checking the stack, `alloca' can throw. */
9985 if (flag_stack_check
)
9987 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA
)) = 0;
9988 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
)) = 0;
9991 ftype
= build_function_type_list (void_type_node
,
9992 ptr_type_node
, ptr_type_node
,
9993 ptr_type_node
, NULL_TREE
);
9994 local_define_builtin ("__builtin_init_trampoline", ftype
,
9995 BUILT_IN_INIT_TRAMPOLINE
,
9996 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9997 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9998 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9999 "__builtin_init_heap_trampoline",
10000 ECF_NOTHROW
| ECF_LEAF
);
10002 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
10003 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
10004 BUILT_IN_ADJUST_TRAMPOLINE
,
10005 "__builtin_adjust_trampoline",
10006 ECF_CONST
| ECF_NOTHROW
);
10008 ftype
= build_function_type_list (void_type_node
,
10009 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10010 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
10011 BUILT_IN_NONLOCAL_GOTO
,
10012 "__builtin_nonlocal_goto",
10013 ECF_NORETURN
| ECF_NOTHROW
);
10015 ftype
= build_function_type_list (void_type_node
,
10016 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10017 local_define_builtin ("__builtin_setjmp_setup", ftype
,
10018 BUILT_IN_SETJMP_SETUP
,
10019 "__builtin_setjmp_setup", ECF_NOTHROW
);
10021 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10022 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
10023 BUILT_IN_SETJMP_RECEIVER
,
10024 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
10026 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
10027 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
10028 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
10030 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10031 local_define_builtin ("__builtin_stack_restore", ftype
,
10032 BUILT_IN_STACK_RESTORE
,
10033 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
10035 /* If there's a possibility that we might use the ARM EABI, build the
10036 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10037 if (targetm
.arm_eabi_unwinder
)
10039 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
10040 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
10041 BUILT_IN_CXA_END_CLEANUP
,
10042 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
10045 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10046 local_define_builtin ("__builtin_unwind_resume", ftype
,
10047 BUILT_IN_UNWIND_RESUME
,
10048 ((targetm_common
.except_unwind_info (&global_options
)
10050 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10053 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10055 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10057 local_define_builtin ("__builtin_return_address", ftype
,
10058 BUILT_IN_RETURN_ADDRESS
,
10059 "__builtin_return_address",
10063 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10064 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10066 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10067 ptr_type_node
, NULL_TREE
);
10068 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10069 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10070 BUILT_IN_PROFILE_FUNC_ENTER
,
10071 "__cyg_profile_func_enter", 0);
10072 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10073 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10074 BUILT_IN_PROFILE_FUNC_EXIT
,
10075 "__cyg_profile_func_exit", 0);
10078 /* The exception object and filter values from the runtime. The argument
10079 must be zero before exception lowering, i.e. from the front end. After
10080 exception lowering, it will be the region number for the exception
10081 landing pad. These functions are PURE instead of CONST to prevent
10082 them from being hoisted past the exception edge that will initialize
10083 its value in the landing pad. */
10084 ftype
= build_function_type_list (ptr_type_node
,
10085 integer_type_node
, NULL_TREE
);
10086 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10087 /* Only use TM_PURE if we we have TM language support. */
10088 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10089 ecf_flags
|= ECF_TM_PURE
;
10090 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10091 "__builtin_eh_pointer", ecf_flags
);
10093 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10094 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10095 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10096 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10098 ftype
= build_function_type_list (void_type_node
,
10099 integer_type_node
, integer_type_node
,
10101 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10102 BUILT_IN_EH_COPY_VALUES
,
10103 "__builtin_eh_copy_values", ECF_NOTHROW
);
10105 /* Complex multiplication and division. These are handled as builtins
10106 rather than optabs because emit_library_call_value doesn't support
10107 complex. Further, we can do slightly better with folding these
10108 beasties if the real and complex parts of the arguments are separate. */
10112 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10114 char mode_name_buf
[4], *q
;
10116 enum built_in_function mcode
, dcode
;
10117 tree type
, inner_type
;
10118 const char *prefix
= "__";
10120 if (targetm
.libfunc_gnu_prefix
)
10123 type
= lang_hooks
.types
.type_for_mode ((machine_mode
) mode
, 0);
10126 inner_type
= TREE_TYPE (type
);
10128 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10129 inner_type
, inner_type
, NULL_TREE
);
10131 mcode
= ((enum built_in_function
)
10132 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10133 dcode
= ((enum built_in_function
)
10134 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10136 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10140 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10142 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10143 built_in_names
[mcode
],
10144 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10146 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10148 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10149 built_in_names
[dcode
],
10150 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10154 init_internal_fns ();
10157 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10160 If we requested a pointer to a vector, build up the pointers that
10161 we stripped off while looking for the inner type. Similarly for
10162 return values from functions.
10164 The argument TYPE is the top of the chain, and BOTTOM is the
10165 new type which we will point to. */
10168 reconstruct_complex_type (tree type
, tree bottom
)
10172 if (TREE_CODE (type
) == POINTER_TYPE
)
10174 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10175 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10176 TYPE_REF_CAN_ALIAS_ALL (type
));
10178 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10180 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10181 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10182 TYPE_REF_CAN_ALIAS_ALL (type
));
10184 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10186 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10187 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10189 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10191 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10192 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10194 else if (TREE_CODE (type
) == METHOD_TYPE
)
10196 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10197 /* The build_method_type_directly() routine prepends 'this' to argument list,
10198 so we must compensate by getting rid of it. */
10200 = build_method_type_directly
10201 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10203 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10205 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10207 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10208 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10213 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10214 TYPE_QUALS (type
));
10217 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10220 build_vector_type_for_mode (tree innertype
, machine_mode mode
)
10224 switch (GET_MODE_CLASS (mode
))
10226 case MODE_VECTOR_INT
:
10227 case MODE_VECTOR_FLOAT
:
10228 case MODE_VECTOR_FRACT
:
10229 case MODE_VECTOR_UFRACT
:
10230 case MODE_VECTOR_ACCUM
:
10231 case MODE_VECTOR_UACCUM
:
10232 nunits
= GET_MODE_NUNITS (mode
);
10236 /* Check that there are no leftover bits. */
10237 gcc_assert (GET_MODE_BITSIZE (mode
)
10238 % TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10240 nunits
= GET_MODE_BITSIZE (mode
)
10241 / TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10245 gcc_unreachable ();
10248 return make_vector_type (innertype
, nunits
, mode
);
10251 /* Similarly, but takes the inner type and number of units, which must be
10255 build_vector_type (tree innertype
, int nunits
)
10257 return make_vector_type (innertype
, nunits
, VOIDmode
);
10260 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10263 build_opaque_vector_type (tree innertype
, int nunits
)
10265 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10267 /* We always build the non-opaque variant before the opaque one,
10268 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10269 cand
= TYPE_NEXT_VARIANT (t
);
10271 && TYPE_VECTOR_OPAQUE (cand
)
10272 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10274 /* Othewise build a variant type and make sure to queue it after
10275 the non-opaque type. */
10276 cand
= build_distinct_type_copy (t
);
10277 TYPE_VECTOR_OPAQUE (cand
) = true;
10278 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10279 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10280 TYPE_NEXT_VARIANT (t
) = cand
;
10281 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10286 /* Given an initializer INIT, return TRUE if INIT is zero or some
10287 aggregate of zeros. Otherwise return FALSE. */
10289 initializer_zerop (const_tree init
)
10295 switch (TREE_CODE (init
))
10298 return integer_zerop (init
);
10301 /* ??? Note that this is not correct for C4X float formats. There,
10302 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10303 negative exponent. */
10304 return real_zerop (init
)
10305 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
));
10308 return fixed_zerop (init
);
10311 return integer_zerop (init
)
10312 || (real_zerop (init
)
10313 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10314 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
))));
10319 for (i
= 0; i
< VECTOR_CST_NELTS (init
); ++i
)
10320 if (!initializer_zerop (VECTOR_CST_ELT (init
, i
)))
10327 unsigned HOST_WIDE_INT idx
;
10329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10330 if (!initializer_zerop (elt
))
10339 /* We need to loop through all elements to handle cases like
10340 "\0" and "\0foobar". */
10341 for (i
= 0; i
< TREE_STRING_LENGTH (init
); ++i
)
10342 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10353 /* Check if vector VEC consists of all the equal elements and
10354 that the number of elements corresponds to the type of VEC.
10355 The function returns first element of the vector
10356 or NULL_TREE if the vector is not uniform. */
10358 uniform_vector_p (const_tree vec
)
10363 if (vec
== NULL_TREE
)
10366 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10368 if (TREE_CODE (vec
) == VECTOR_CST
)
10370 first
= VECTOR_CST_ELT (vec
, 0);
10371 for (i
= 1; i
< VECTOR_CST_NELTS (vec
); ++i
)
10372 if (!operand_equal_p (first
, VECTOR_CST_ELT (vec
, i
), 0))
10378 else if (TREE_CODE (vec
) == CONSTRUCTOR
)
10380 first
= error_mark_node
;
10382 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10389 if (!operand_equal_p (first
, t
, 0))
10392 if (i
!= TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)))
10401 /* Build an empty statement at location LOC. */
10404 build_empty_stmt (location_t loc
)
10406 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10407 SET_EXPR_LOCATION (t
, loc
);
10412 /* Build an OpenMP clause with code CODE. LOC is the location of the
10416 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10421 length
= omp_clause_num_ops
[code
];
10422 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10424 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10426 t
= (tree
) ggc_internal_alloc (size
);
10427 memset (t
, 0, size
);
10428 TREE_SET_CODE (t
, OMP_CLAUSE
);
10429 OMP_CLAUSE_SET_CODE (t
, code
);
10430 OMP_CLAUSE_LOCATION (t
) = loc
;
10435 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10436 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10437 Except for the CODE and operand count field, other storage for the
10438 object is initialized to zeros. */
10441 build_vl_exp_stat (enum tree_code code
, int len MEM_STAT_DECL
)
10444 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10446 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10447 gcc_assert (len
>= 1);
10449 record_node_allocation_statistics (code
, length
);
10451 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10453 TREE_SET_CODE (t
, code
);
10455 /* Can't use TREE_OPERAND to store the length because if checking is
10456 enabled, it will try to check the length before we store it. :-P */
10457 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10462 /* Helper function for build_call_* functions; build a CALL_EXPR with
10463 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10464 the argument slots. */
10467 build_call_1 (tree return_type
, tree fn
, int nargs
)
10471 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10472 TREE_TYPE (t
) = return_type
;
10473 CALL_EXPR_FN (t
) = fn
;
10474 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10479 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10480 FN and a null static chain slot. NARGS is the number of call arguments
10481 which are specified as "..." arguments. */
10484 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10488 va_start (args
, nargs
);
10489 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10494 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10495 FN and a null static chain slot. NARGS is the number of call arguments
10496 which are specified as a va_list ARGS. */
10499 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10504 t
= build_call_1 (return_type
, fn
, nargs
);
10505 for (i
= 0; i
< nargs
; i
++)
10506 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10507 process_call_operands (t
);
10511 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10512 FN and a null static chain slot. NARGS is the number of call arguments
10513 which are specified as a tree array ARGS. */
10516 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10517 int nargs
, const tree
*args
)
10522 t
= build_call_1 (return_type
, fn
, nargs
);
10523 for (i
= 0; i
< nargs
; i
++)
10524 CALL_EXPR_ARG (t
, i
) = args
[i
];
10525 process_call_operands (t
);
10526 SET_EXPR_LOCATION (t
, loc
);
10530 /* Like build_call_array, but takes a vec. */
10533 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10538 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10539 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10540 CALL_EXPR_ARG (ret
, ix
) = t
;
10541 process_call_operands (ret
);
10545 /* Conveniently construct a function call expression. FNDECL names the
10546 function to be called and N arguments are passed in the array
10550 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10552 tree fntype
= TREE_TYPE (fndecl
);
10553 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10555 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10558 /* Conveniently construct a function call expression. FNDECL names the
10559 function to be called and the arguments are passed in the vector
10563 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10565 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10566 vec_safe_address (vec
));
10570 /* Conveniently construct a function call expression. FNDECL names the
10571 function to be called, N is the number of arguments, and the "..."
10572 parameters are the argument expressions. */
10575 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10578 tree
*argarray
= XALLOCAVEC (tree
, n
);
10582 for (i
= 0; i
< n
; i
++)
10583 argarray
[i
] = va_arg (ap
, tree
);
10585 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10588 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10589 varargs macros aren't supported by all bootstrap compilers. */
10592 build_call_expr (tree fndecl
, int n
, ...)
10595 tree
*argarray
= XALLOCAVEC (tree
, n
);
10599 for (i
= 0; i
< n
; i
++)
10600 argarray
[i
] = va_arg (ap
, tree
);
10602 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10605 /* Build internal call expression. This is just like CALL_EXPR, except
10606 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10607 internal function. */
10610 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10611 tree type
, int n
, ...)
10616 tree fn
= build_call_1 (type
, NULL_TREE
, n
);
10618 for (i
= 0; i
< n
; i
++)
10619 CALL_EXPR_ARG (fn
, i
) = va_arg (ap
, tree
);
10621 SET_EXPR_LOCATION (fn
, loc
);
10622 CALL_EXPR_IFN (fn
) = ifn
;
10626 /* Create a new constant string literal and return a char* pointer to it.
10627 The STRING_CST value is the LEN characters at STR. */
10629 build_string_literal (int len
, const char *str
)
10631 tree t
, elem
, index
, type
;
10633 t
= build_string (len
, str
);
10634 elem
= build_type_variant (char_type_node
, 1, 0);
10635 index
= build_index_type (size_int (len
- 1));
10636 type
= build_array_type (elem
, index
);
10637 TREE_TYPE (t
) = type
;
10638 TREE_CONSTANT (t
) = 1;
10639 TREE_READONLY (t
) = 1;
10640 TREE_STATIC (t
) = 1;
10642 type
= build_pointer_type (elem
);
10643 t
= build1 (ADDR_EXPR
, type
,
10644 build4 (ARRAY_REF
, elem
,
10645 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10651 /* Return true if T (assumed to be a DECL) must be assigned a memory
10655 needs_to_live_in_memory (const_tree t
)
10657 return (TREE_ADDRESSABLE (t
)
10658 || is_global_var (t
)
10659 || (TREE_CODE (t
) == RESULT_DECL
10660 && !DECL_BY_REFERENCE (t
)
10661 && aggregate_value_p (t
, current_function_decl
)));
10664 /* Return value of a constant X and sign-extend it. */
10667 int_cst_value (const_tree x
)
10669 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10670 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10672 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10673 gcc_assert (cst_and_fits_in_hwi (x
));
10675 if (bits
< HOST_BITS_PER_WIDE_INT
)
10677 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10679 val
|= (~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1;
10681 val
&= ~((~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1);
10687 /* If TYPE is an integral or pointer type, return an integer type with
10688 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10689 if TYPE is already an integer type of signedness UNSIGNEDP. */
10692 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10694 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
) == unsignedp
)
10697 if (TREE_CODE (type
) == VECTOR_TYPE
)
10699 tree inner
= TREE_TYPE (type
);
10700 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10703 if (inner
== inner2
)
10705 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10708 if (!INTEGRAL_TYPE_P (type
)
10709 && !POINTER_TYPE_P (type
)
10710 && TREE_CODE (type
) != OFFSET_TYPE
)
10713 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
10716 /* If TYPE is an integral or pointer type, return an integer type with
10717 the same precision which is unsigned, or itself if TYPE is already an
10718 unsigned integer type. */
10721 unsigned_type_for (tree type
)
10723 return signed_or_unsigned_type_for (1, type
);
10726 /* If TYPE is an integral or pointer type, return an integer type with
10727 the same precision which is signed, or itself if TYPE is already a
10728 signed integer type. */
10731 signed_type_for (tree type
)
10733 return signed_or_unsigned_type_for (0, type
);
10736 /* If TYPE is a vector type, return a signed integer vector type with the
10737 same width and number of subparts. Otherwise return boolean_type_node. */
10740 truth_type_for (tree type
)
10742 if (TREE_CODE (type
) == VECTOR_TYPE
)
10744 tree elem
= lang_hooks
.types
.type_for_size
10745 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))), 0);
10746 return build_opaque_vector_type (elem
, TYPE_VECTOR_SUBPARTS (type
));
10749 return boolean_type_node
;
10752 /* Returns the largest value obtainable by casting something in INNER type to
10756 upper_bound_in_type (tree outer
, tree inner
)
10758 unsigned int det
= 0;
10759 unsigned oprec
= TYPE_PRECISION (outer
);
10760 unsigned iprec
= TYPE_PRECISION (inner
);
10763 /* Compute a unique number for every combination. */
10764 det
|= (oprec
> iprec
) ? 4 : 0;
10765 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10766 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10768 /* Determine the exponent to use. */
10773 /* oprec <= iprec, outer: signed, inner: don't care. */
10778 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10782 /* oprec > iprec, outer: signed, inner: signed. */
10786 /* oprec > iprec, outer: signed, inner: unsigned. */
10790 /* oprec > iprec, outer: unsigned, inner: signed. */
10794 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10798 gcc_unreachable ();
10801 return wide_int_to_tree (outer
,
10802 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10805 /* Returns the smallest value obtainable by casting something in INNER type to
10809 lower_bound_in_type (tree outer
, tree inner
)
10811 unsigned oprec
= TYPE_PRECISION (outer
);
10812 unsigned iprec
= TYPE_PRECISION (inner
);
10814 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10816 if (TYPE_UNSIGNED (outer
)
10817 /* If we are widening something of an unsigned type, OUTER type
10818 contains all values of INNER type. In particular, both INNER
10819 and OUTER types have zero in common. */
10820 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10821 return build_int_cst (outer
, 0);
10824 /* If we are widening a signed type to another signed type, we
10825 want to obtain -2^^(iprec-1). If we are keeping the
10826 precision or narrowing to a signed type, we want to obtain
10828 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10829 return wide_int_to_tree (outer
,
10830 wi::mask (prec
- 1, true,
10831 TYPE_PRECISION (outer
)));
10835 /* Return nonzero if two operands that are suitable for PHI nodes are
10836 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10837 SSA_NAME or invariant. Note that this is strictly an optimization.
10838 That is, callers of this function can directly call operand_equal_p
10839 and get the same result, only slower. */
10842 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10846 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10848 return operand_equal_p (arg0
, arg1
, 0);
10851 /* Returns number of zeros at the end of binary representation of X. */
10854 num_ending_zeros (const_tree x
)
10856 return build_int_cst (TREE_TYPE (x
), wi::ctz (x
));
10860 #define WALK_SUBTREE(NODE) \
10863 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10869 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10870 be walked whenever a type is seen in the tree. Rest of operands and return
10871 value are as for walk_tree. */
10874 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10875 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10877 tree result
= NULL_TREE
;
10879 switch (TREE_CODE (type
))
10882 case REFERENCE_TYPE
:
10884 /* We have to worry about mutually recursive pointers. These can't
10885 be written in C. They can in Ada. It's pathological, but
10886 there's an ACATS test (c38102a) that checks it. Deal with this
10887 by checking if we're pointing to another pointer, that one
10888 points to another pointer, that one does too, and we have no htab.
10889 If so, get a hash table. We check three levels deep to avoid
10890 the cost of the hash table if we don't need one. */
10891 if (POINTER_TYPE_P (TREE_TYPE (type
))
10892 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10893 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10896 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10904 /* ... fall through ... */
10907 WALK_SUBTREE (TREE_TYPE (type
));
10911 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10913 /* Fall through. */
10915 case FUNCTION_TYPE
:
10916 WALK_SUBTREE (TREE_TYPE (type
));
10920 /* We never want to walk into default arguments. */
10921 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10922 WALK_SUBTREE (TREE_VALUE (arg
));
10927 /* Don't follow this nodes's type if a pointer for fear that
10928 we'll have infinite recursion. If we have a PSET, then we
10931 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10932 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10933 WALK_SUBTREE (TREE_TYPE (type
));
10934 WALK_SUBTREE (TYPE_DOMAIN (type
));
10938 WALK_SUBTREE (TREE_TYPE (type
));
10939 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
10949 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10950 called with the DATA and the address of each sub-tree. If FUNC returns a
10951 non-NULL value, the traversal is stopped, and the value returned by FUNC
10952 is returned. If PSET is non-NULL it is used to record the nodes visited,
10953 and to avoid visiting a node more than once. */
10956 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
10957 hash_set
<tree
> *pset
, walk_tree_lh lh
)
10959 enum tree_code code
;
10963 #define WALK_SUBTREE_TAIL(NODE) \
10967 goto tail_recurse; \
10972 /* Skip empty subtrees. */
10976 /* Don't walk the same tree twice, if the user has requested
10977 that we avoid doing so. */
10978 if (pset
&& pset
->add (*tp
))
10981 /* Call the function. */
10983 result
= (*func
) (tp
, &walk_subtrees
, data
);
10985 /* If we found something, return it. */
10989 code
= TREE_CODE (*tp
);
10991 /* Even if we didn't, FUNC may have decided that there was nothing
10992 interesting below this point in the tree. */
10993 if (!walk_subtrees
)
10995 /* But we still need to check our siblings. */
10996 if (code
== TREE_LIST
)
10997 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
10998 else if (code
== OMP_CLAUSE
)
10999 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11006 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11007 if (result
|| !walk_subtrees
)
11014 case IDENTIFIER_NODE
:
11021 case PLACEHOLDER_EXPR
:
11025 /* None of these have subtrees other than those already walked
11030 WALK_SUBTREE (TREE_VALUE (*tp
));
11031 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11036 int len
= TREE_VEC_LENGTH (*tp
);
11041 /* Walk all elements but the first. */
11043 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11045 /* Now walk the first one as a tail call. */
11046 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11050 WALK_SUBTREE (TREE_REALPART (*tp
));
11051 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11055 unsigned HOST_WIDE_INT idx
;
11056 constructor_elt
*ce
;
11058 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11060 WALK_SUBTREE (ce
->value
);
11065 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11070 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11072 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11073 into declarations that are just mentioned, rather than
11074 declared; they don't really belong to this part of the tree.
11075 And, we can see cycles: the initializer for a declaration
11076 can refer to the declaration itself. */
11077 WALK_SUBTREE (DECL_INITIAL (decl
));
11078 WALK_SUBTREE (DECL_SIZE (decl
));
11079 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11081 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11084 case STATEMENT_LIST
:
11086 tree_stmt_iterator i
;
11087 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11088 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11093 switch (OMP_CLAUSE_CODE (*tp
))
11095 case OMP_CLAUSE_PRIVATE
:
11096 case OMP_CLAUSE_SHARED
:
11097 case OMP_CLAUSE_FIRSTPRIVATE
:
11098 case OMP_CLAUSE_COPYIN
:
11099 case OMP_CLAUSE_COPYPRIVATE
:
11100 case OMP_CLAUSE_FINAL
:
11101 case OMP_CLAUSE_IF
:
11102 case OMP_CLAUSE_NUM_THREADS
:
11103 case OMP_CLAUSE_SCHEDULE
:
11104 case OMP_CLAUSE_UNIFORM
:
11105 case OMP_CLAUSE_DEPEND
:
11106 case OMP_CLAUSE_NUM_TEAMS
:
11107 case OMP_CLAUSE_THREAD_LIMIT
:
11108 case OMP_CLAUSE_DEVICE
:
11109 case OMP_CLAUSE_DIST_SCHEDULE
:
11110 case OMP_CLAUSE_SAFELEN
:
11111 case OMP_CLAUSE_SIMDLEN
:
11112 case OMP_CLAUSE__LOOPTEMP_
:
11113 case OMP_CLAUSE__SIMDUID_
:
11114 case OMP_CLAUSE__CILK_FOR_COUNT_
:
11115 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11118 case OMP_CLAUSE_NOWAIT
:
11119 case OMP_CLAUSE_ORDERED
:
11120 case OMP_CLAUSE_DEFAULT
:
11121 case OMP_CLAUSE_UNTIED
:
11122 case OMP_CLAUSE_MERGEABLE
:
11123 case OMP_CLAUSE_PROC_BIND
:
11124 case OMP_CLAUSE_INBRANCH
:
11125 case OMP_CLAUSE_NOTINBRANCH
:
11126 case OMP_CLAUSE_FOR
:
11127 case OMP_CLAUSE_PARALLEL
:
11128 case OMP_CLAUSE_SECTIONS
:
11129 case OMP_CLAUSE_TASKGROUP
:
11130 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11132 case OMP_CLAUSE_LASTPRIVATE
:
11133 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11134 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11135 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11137 case OMP_CLAUSE_COLLAPSE
:
11140 for (i
= 0; i
< 3; i
++)
11141 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11142 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11145 case OMP_CLAUSE_LINEAR
:
11146 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11147 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11148 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11149 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11151 case OMP_CLAUSE_ALIGNED
:
11152 case OMP_CLAUSE_FROM
:
11153 case OMP_CLAUSE_TO
:
11154 case OMP_CLAUSE_MAP
:
11155 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11156 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11157 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11159 case OMP_CLAUSE_REDUCTION
:
11162 for (i
= 0; i
< 4; i
++)
11163 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11164 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11168 gcc_unreachable ();
11176 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11177 But, we only want to walk once. */
11178 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11179 for (i
= 0; i
< len
; ++i
)
11180 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11181 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11185 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11186 defining. We only want to walk into these fields of a type in this
11187 case and not in the general case of a mere reference to the type.
11189 The criterion is as follows: if the field can be an expression, it
11190 must be walked only here. This should be in keeping with the fields
11191 that are directly gimplified in gimplify_type_sizes in order for the
11192 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11193 variable-sized types.
11195 Note that DECLs get walked as part of processing the BIND_EXPR. */
11196 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11198 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11199 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11202 /* Call the function for the type. See if it returns anything or
11203 doesn't want us to continue. If we are to continue, walk both
11204 the normal fields and those for the declaration case. */
11205 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11206 if (result
|| !walk_subtrees
)
11209 /* But do not walk a pointed-to type since it may itself need to
11210 be walked in the declaration case if it isn't anonymous. */
11211 if (!POINTER_TYPE_P (*type_p
))
11213 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11218 /* If this is a record type, also walk the fields. */
11219 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11223 for (field
= TYPE_FIELDS (*type_p
); field
;
11224 field
= DECL_CHAIN (field
))
11226 /* We'd like to look at the type of the field, but we can
11227 easily get infinite recursion. So assume it's pointed
11228 to elsewhere in the tree. Also, ignore things that
11230 if (TREE_CODE (field
) != FIELD_DECL
)
11233 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11234 WALK_SUBTREE (DECL_SIZE (field
));
11235 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11236 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11237 WALK_SUBTREE (DECL_QUALIFIER (field
));
11241 /* Same for scalar types. */
11242 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11243 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11244 || TREE_CODE (*type_p
) == INTEGER_TYPE
11245 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11246 || TREE_CODE (*type_p
) == REAL_TYPE
)
11248 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11249 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11252 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11253 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11258 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11262 /* Walk over all the sub-trees of this operand. */
11263 len
= TREE_OPERAND_LENGTH (*tp
);
11265 /* Go through the subtrees. We need to do this in forward order so
11266 that the scope of a FOR_EXPR is handled properly. */
11269 for (i
= 0; i
< len
- 1; ++i
)
11270 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11271 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11274 /* If this is a type, walk the needed fields in the type. */
11275 else if (TYPE_P (*tp
))
11276 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11280 /* We didn't find what we were looking for. */
11283 #undef WALK_SUBTREE_TAIL
11285 #undef WALK_SUBTREE
11287 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11290 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11295 hash_set
<tree
> pset
;
11296 result
= walk_tree_1 (tp
, func
, data
, &pset
, lh
);
11302 tree_block (tree t
)
11304 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11306 if (IS_EXPR_CODE_CLASS (c
))
11307 return LOCATION_BLOCK (t
->exp
.locus
);
11308 gcc_unreachable ();
11313 tree_set_block (tree t
, tree b
)
11315 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11317 if (IS_EXPR_CODE_CLASS (c
))
11320 t
->exp
.locus
= COMBINE_LOCATION_DATA (line_table
, t
->exp
.locus
, b
);
11322 t
->exp
.locus
= LOCATION_LOCUS (t
->exp
.locus
);
11325 gcc_unreachable ();
11328 /* Create a nameless artificial label and put it in the current
11329 function context. The label has a location of LOC. Returns the
11330 newly created label. */
11333 create_artificial_label (location_t loc
)
11335 tree lab
= build_decl (loc
,
11336 LABEL_DECL
, NULL_TREE
, void_type_node
);
11338 DECL_ARTIFICIAL (lab
) = 1;
11339 DECL_IGNORED_P (lab
) = 1;
11340 DECL_CONTEXT (lab
) = current_function_decl
;
11344 /* Given a tree, try to return a useful variable name that we can use
11345 to prefix a temporary that is being assigned the value of the tree.
11346 I.E. given <temp> = &A, return A. */
11351 tree stripped_decl
;
11354 STRIP_NOPS (stripped_decl
);
11355 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11356 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11357 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11359 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11362 return IDENTIFIER_POINTER (name
);
11366 switch (TREE_CODE (stripped_decl
))
11369 return get_name (TREE_OPERAND (stripped_decl
, 0));
11376 /* Return true if TYPE has a variable argument list. */
11379 stdarg_p (const_tree fntype
)
11381 function_args_iterator args_iter
;
11382 tree n
= NULL_TREE
, t
;
11387 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11392 return n
!= NULL_TREE
&& n
!= void_type_node
;
11395 /* Return true if TYPE has a prototype. */
11398 prototype_p (tree fntype
)
11402 gcc_assert (fntype
!= NULL_TREE
);
11404 t
= TYPE_ARG_TYPES (fntype
);
11405 return (t
!= NULL_TREE
);
11408 /* If BLOCK is inlined from an __attribute__((__artificial__))
11409 routine, return pointer to location from where it has been
11412 block_nonartificial_location (tree block
)
11414 location_t
*ret
= NULL
;
11416 while (block
&& TREE_CODE (block
) == BLOCK
11417 && BLOCK_ABSTRACT_ORIGIN (block
))
11419 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11421 while (TREE_CODE (ao
) == BLOCK
11422 && BLOCK_ABSTRACT_ORIGIN (ao
)
11423 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
11424 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
11426 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11428 /* If AO is an artificial inline, point RET to the
11429 call site locus at which it has been inlined and continue
11430 the loop, in case AO's caller is also an artificial
11432 if (DECL_DECLARED_INLINE_P (ao
)
11433 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11434 ret
= &BLOCK_SOURCE_LOCATION (block
);
11438 else if (TREE_CODE (ao
) != BLOCK
)
11441 block
= BLOCK_SUPERCONTEXT (block
);
11447 /* If EXP is inlined from an __attribute__((__artificial__))
11448 function, return the location of the original call expression. */
11451 tree_nonartificial_location (tree exp
)
11453 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11458 return EXPR_LOCATION (exp
);
11462 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11465 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11468 cl_option_hash_hash (const void *x
)
11470 const_tree
const t
= (const_tree
) x
;
11474 hashval_t hash
= 0;
11476 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11478 p
= (const char *)TREE_OPTIMIZATION (t
);
11479 len
= sizeof (struct cl_optimization
);
11482 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11484 p
= (const char *)TREE_TARGET_OPTION (t
);
11485 len
= sizeof (struct cl_target_option
);
11489 gcc_unreachable ();
11491 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11493 for (i
= 0; i
< len
; i
++)
11495 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11500 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11501 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11505 cl_option_hash_eq (const void *x
, const void *y
)
11507 const_tree
const xt
= (const_tree
) x
;
11508 const_tree
const yt
= (const_tree
) y
;
11513 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11516 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11518 xp
= (const char *)TREE_OPTIMIZATION (xt
);
11519 yp
= (const char *)TREE_OPTIMIZATION (yt
);
11520 len
= sizeof (struct cl_optimization
);
11523 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11525 xp
= (const char *)TREE_TARGET_OPTION (xt
);
11526 yp
= (const char *)TREE_TARGET_OPTION (yt
);
11527 len
= sizeof (struct cl_target_option
);
11531 gcc_unreachable ();
11533 return (memcmp (xp
, yp
, len
) == 0);
11536 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11539 build_optimization_node (struct gcc_options
*opts
)
11544 /* Use the cache of optimization nodes. */
11546 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11549 slot
= htab_find_slot (cl_option_hash_table
, cl_optimization_node
, INSERT
);
11553 /* Insert this one into the hash table. */
11554 t
= cl_optimization_node
;
11557 /* Make a new node for next time round. */
11558 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11564 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11567 build_target_option_node (struct gcc_options
*opts
)
11572 /* Use the cache of optimization nodes. */
11574 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11577 slot
= htab_find_slot (cl_option_hash_table
, cl_target_option_node
, INSERT
);
11581 /* Insert this one into the hash table. */
11582 t
= cl_target_option_node
;
11585 /* Make a new node for next time round. */
11586 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11592 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11593 Called through htab_traverse. */
11596 prepare_target_option_node_for_pch (void **slot
, void *)
11598 tree node
= (tree
) *slot
;
11599 if (TREE_CODE (node
) == TARGET_OPTION_NODE
)
11600 TREE_TARGET_GLOBALS (node
) = NULL
;
11604 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11605 so that they aren't saved during PCH writing. */
11608 prepare_target_option_nodes_for_pch (void)
11610 htab_traverse (cl_option_hash_table
, prepare_target_option_node_for_pch
,
11614 /* Determine the "ultimate origin" of a block. The block may be an inlined
11615 instance of an inlined instance of a block which is local to an inline
11616 function, so we have to trace all of the way back through the origin chain
11617 to find out what sort of node actually served as the original seed for the
11621 block_ultimate_origin (const_tree block
)
11623 tree immediate_origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11625 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11626 we're trying to output the abstract instance of this function. */
11627 if (BLOCK_ABSTRACT (block
) && immediate_origin
== block
)
11630 if (immediate_origin
== NULL_TREE
)
11635 tree lookahead
= immediate_origin
;
11639 ret_val
= lookahead
;
11640 lookahead
= (TREE_CODE (ret_val
) == BLOCK
11641 ? BLOCK_ABSTRACT_ORIGIN (ret_val
) : NULL
);
11643 while (lookahead
!= NULL
&& lookahead
!= ret_val
);
11645 /* The block's abstract origin chain may not be the *ultimate* origin of
11646 the block. It could lead to a DECL that has an abstract origin set.
11647 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11648 will give us if it has one). Note that DECL's abstract origins are
11649 supposed to be the most distant ancestor (or so decl_ultimate_origin
11650 claims), so we don't need to loop following the DECL origins. */
11651 if (DECL_P (ret_val
))
11652 return DECL_ORIGIN (ret_val
);
11658 /* Return true iff conversion in EXP generates no instruction. Mark
11659 it inline so that we fully inline into the stripping functions even
11660 though we have two uses of this function. */
11663 tree_nop_conversion (const_tree exp
)
11665 tree outer_type
, inner_type
;
11667 if (!CONVERT_EXPR_P (exp
)
11668 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11670 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
11673 outer_type
= TREE_TYPE (exp
);
11674 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11679 /* Use precision rather then machine mode when we can, which gives
11680 the correct answer even for submode (bit-field) types. */
11681 if ((INTEGRAL_TYPE_P (outer_type
)
11682 || POINTER_TYPE_P (outer_type
)
11683 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11684 && (INTEGRAL_TYPE_P (inner_type
)
11685 || POINTER_TYPE_P (inner_type
)
11686 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11687 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11689 /* Otherwise fall back on comparing machine modes (e.g. for
11690 aggregate types, floats). */
11691 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11694 /* Return true iff conversion in EXP generates no instruction. Don't
11695 consider conversions changing the signedness. */
11698 tree_sign_nop_conversion (const_tree exp
)
11700 tree outer_type
, inner_type
;
11702 if (!tree_nop_conversion (exp
))
11705 outer_type
= TREE_TYPE (exp
);
11706 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11708 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11709 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11712 /* Strip conversions from EXP according to tree_nop_conversion and
11713 return the resulting expression. */
11716 tree_strip_nop_conversions (tree exp
)
11718 while (tree_nop_conversion (exp
))
11719 exp
= TREE_OPERAND (exp
, 0);
11723 /* Strip conversions from EXP according to tree_sign_nop_conversion
11724 and return the resulting expression. */
11727 tree_strip_sign_nop_conversions (tree exp
)
11729 while (tree_sign_nop_conversion (exp
))
11730 exp
= TREE_OPERAND (exp
, 0);
11734 /* Avoid any floating point extensions from EXP. */
11736 strip_float_extensions (tree exp
)
11738 tree sub
, expt
, subt
;
11740 /* For floating point constant look up the narrowest type that can hold
11741 it properly and handle it like (type)(narrowest_type)constant.
11742 This way we can optimize for instance a=a*2.0 where "a" is float
11743 but 2.0 is double constant. */
11744 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11746 REAL_VALUE_TYPE orig
;
11749 orig
= TREE_REAL_CST (exp
);
11750 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11751 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11752 type
= float_type_node
;
11753 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11754 > TYPE_PRECISION (double_type_node
)
11755 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11756 type
= double_type_node
;
11758 return build_real (type
, real_value_truncate (TYPE_MODE (type
), orig
));
11761 if (!CONVERT_EXPR_P (exp
))
11764 sub
= TREE_OPERAND (exp
, 0);
11765 subt
= TREE_TYPE (sub
);
11766 expt
= TREE_TYPE (exp
);
11768 if (!FLOAT_TYPE_P (subt
))
11771 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11774 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11777 return strip_float_extensions (sub
);
11780 /* Strip out all handled components that produce invariant
11784 strip_invariant_refs (const_tree op
)
11786 while (handled_component_p (op
))
11788 switch (TREE_CODE (op
))
11791 case ARRAY_RANGE_REF
:
11792 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11793 || TREE_OPERAND (op
, 2) != NULL_TREE
11794 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11798 case COMPONENT_REF
:
11799 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11805 op
= TREE_OPERAND (op
, 0);
11811 static GTY(()) tree gcc_eh_personality_decl
;
11813 /* Return the GCC personality function decl. */
11816 lhd_gcc_personality (void)
11818 if (!gcc_eh_personality_decl
)
11819 gcc_eh_personality_decl
= build_personality_function ("gcc");
11820 return gcc_eh_personality_decl
;
11823 /* TARGET is a call target of GIMPLE call statement
11824 (obtained by gimple_call_fn). Return true if it is
11825 OBJ_TYPE_REF representing an virtual call of C++ method.
11826 (As opposed to OBJ_TYPE_REF representing objc calls
11827 through a cast where middle-end devirtualization machinery
11831 virtual_method_call_p (tree target
)
11833 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11835 target
= TREE_TYPE (target
);
11836 gcc_checking_assert (TREE_CODE (target
) == POINTER_TYPE
);
11837 target
= TREE_TYPE (target
);
11838 if (TREE_CODE (target
) == FUNCTION_TYPE
)
11840 gcc_checking_assert (TREE_CODE (target
) == METHOD_TYPE
);
11844 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11847 obj_type_ref_class (tree ref
)
11849 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
11850 ref
= TREE_TYPE (ref
);
11851 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11852 ref
= TREE_TYPE (ref
);
11853 /* We look for type THIS points to. ObjC also builds
11854 OBJ_TYPE_REF with non-method calls, Their first parameter
11855 ID however also corresponds to class type. */
11856 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
11857 || TREE_CODE (ref
) == FUNCTION_TYPE
);
11858 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
11859 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11860 return TREE_TYPE (ref
);
11863 /* Return true if T is in anonymous namespace. */
11866 type_in_anonymous_namespace_p (const_tree t
)
11868 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11869 bulitin types; those have CONTEXT NULL. */
11870 if (!TYPE_CONTEXT (t
))
11872 return (TYPE_STUB_DECL (t
) && !TREE_PUBLIC (TYPE_STUB_DECL (t
)));
11875 /* Try to find a base info of BINFO that would have its field decl at offset
11876 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11877 found, return, otherwise return NULL_TREE. */
11880 get_binfo_at_offset (tree binfo
, HOST_WIDE_INT offset
, tree expected_type
)
11882 tree type
= BINFO_TYPE (binfo
);
11886 HOST_WIDE_INT pos
, size
;
11890 if (types_same_for_odr (type
, expected_type
))
11895 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11897 if (TREE_CODE (fld
) != FIELD_DECL
)
11900 pos
= int_bit_position (fld
);
11901 size
= tree_to_uhwi (DECL_SIZE (fld
));
11902 if (pos
<= offset
&& (pos
+ size
) > offset
)
11905 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11908 if (!DECL_ARTIFICIAL (fld
))
11910 binfo
= TYPE_BINFO (TREE_TYPE (fld
));
11914 /* Offset 0 indicates the primary base, whose vtable contents are
11915 represented in the binfo for the derived class. */
11916 else if (offset
!= 0)
11918 tree base_binfo
, binfo2
= binfo
;
11920 /* Find BINFO corresponding to FLD. This is bit harder
11921 by a fact that in virtual inheritance we may need to walk down
11922 the non-virtual inheritance chain. */
11925 tree containing_binfo
= NULL
, found_binfo
= NULL
;
11926 for (i
= 0; BINFO_BASE_ITERATE (binfo2
, i
, base_binfo
); i
++)
11927 if (types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
11929 found_binfo
= base_binfo
;
11933 if ((tree_to_shwi (BINFO_OFFSET (base_binfo
))
11934 - tree_to_shwi (BINFO_OFFSET (binfo
)))
11935 * BITS_PER_UNIT
< pos
11936 /* Rule out types with no virtual methods or we can get confused
11937 here by zero sized bases. */
11938 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo
)))
11939 && (!containing_binfo
11940 || (tree_to_shwi (BINFO_OFFSET (containing_binfo
))
11941 < tree_to_shwi (BINFO_OFFSET (base_binfo
)))))
11942 containing_binfo
= base_binfo
;
11945 binfo
= found_binfo
;
11948 if (!containing_binfo
)
11950 binfo2
= containing_binfo
;
11954 type
= TREE_TYPE (fld
);
11959 /* Returns true if X is a typedef decl. */
11962 is_typedef_decl (tree x
)
11964 return (x
&& TREE_CODE (x
) == TYPE_DECL
11965 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
11968 /* Returns true iff TYPE is a type variant created for a typedef. */
11971 typedef_variant_p (tree type
)
11973 return is_typedef_decl (TYPE_NAME (type
));
11976 /* Warn about a use of an identifier which was marked deprecated. */
11978 warn_deprecated_use (tree node
, tree attr
)
11982 if (node
== 0 || !warn_deprecated_decl
)
11988 attr
= DECL_ATTRIBUTES (node
);
11989 else if (TYPE_P (node
))
11991 tree decl
= TYPE_STUB_DECL (node
);
11993 attr
= lookup_attribute ("deprecated",
11994 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
11999 attr
= lookup_attribute ("deprecated", attr
);
12002 msg
= TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)));
12008 expanded_location xloc
= expand_location (DECL_SOURCE_LOCATION (node
));
12010 warning (OPT_Wdeprecated_declarations
,
12011 "%qD is deprecated (declared at %r%s:%d%R): %s",
12012 node
, "locus", xloc
.file
, xloc
.line
, msg
);
12014 warning (OPT_Wdeprecated_declarations
,
12015 "%qD is deprecated (declared at %r%s:%d%R)",
12016 node
, "locus", xloc
.file
, xloc
.line
);
12018 else if (TYPE_P (node
))
12020 tree what
= NULL_TREE
;
12021 tree decl
= TYPE_STUB_DECL (node
);
12023 if (TYPE_NAME (node
))
12025 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12026 what
= TYPE_NAME (node
);
12027 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12028 && DECL_NAME (TYPE_NAME (node
)))
12029 what
= DECL_NAME (TYPE_NAME (node
));
12034 expanded_location xloc
12035 = expand_location (DECL_SOURCE_LOCATION (decl
));
12039 warning (OPT_Wdeprecated_declarations
,
12040 "%qE is deprecated (declared at %r%s:%d%R): %s",
12041 what
, "locus", xloc
.file
, xloc
.line
, msg
);
12043 warning (OPT_Wdeprecated_declarations
,
12044 "%qE is deprecated (declared at %r%s:%d%R)",
12045 what
, "locus", xloc
.file
, xloc
.line
);
12050 warning (OPT_Wdeprecated_declarations
,
12051 "type is deprecated (declared at %r%s:%d%R): %s",
12052 "locus", xloc
.file
, xloc
.line
, msg
);
12054 warning (OPT_Wdeprecated_declarations
,
12055 "type is deprecated (declared at %r%s:%d%R)",
12056 "locus", xloc
.file
, xloc
.line
);
12064 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated: %s",
12067 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated", what
);
12072 warning (OPT_Wdeprecated_declarations
, "type is deprecated: %s",
12075 warning (OPT_Wdeprecated_declarations
, "type is deprecated");
12081 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12082 somewhere in it. */
12085 contains_bitfld_component_ref_p (const_tree ref
)
12087 while (handled_component_p (ref
))
12089 if (TREE_CODE (ref
) == COMPONENT_REF
12090 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12092 ref
= TREE_OPERAND (ref
, 0);
12098 /* Try to determine whether a TRY_CATCH expression can fall through.
12099 This is a subroutine of block_may_fallthru. */
12102 try_catch_may_fallthru (const_tree stmt
)
12104 tree_stmt_iterator i
;
12106 /* If the TRY block can fall through, the whole TRY_CATCH can
12108 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12111 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12112 switch (TREE_CODE (tsi_stmt (i
)))
12115 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12116 catch expression and a body. The whole TRY_CATCH may fall
12117 through iff any of the catch bodies falls through. */
12118 for (; !tsi_end_p (i
); tsi_next (&i
))
12120 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12125 case EH_FILTER_EXPR
:
12126 /* The exception filter expression only matters if there is an
12127 exception. If the exception does not match EH_FILTER_TYPES,
12128 we will execute EH_FILTER_FAILURE, and we will fall through
12129 if that falls through. If the exception does match
12130 EH_FILTER_TYPES, the stack unwinder will continue up the
12131 stack, so we will not fall through. We don't know whether we
12132 will throw an exception which matches EH_FILTER_TYPES or not,
12133 so we just ignore EH_FILTER_TYPES and assume that we might
12134 throw an exception which doesn't match. */
12135 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12138 /* This case represents statements to be executed when an
12139 exception occurs. Those statements are implicitly followed
12140 by a RESX statement to resume execution after the exception.
12141 So in this case the TRY_CATCH never falls through. */
12146 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12147 need not be 100% accurate; simply be conservative and return true if we
12148 don't know. This is used only to avoid stupidly generating extra code.
12149 If we're wrong, we'll just delete the extra code later. */
12152 block_may_fallthru (const_tree block
)
12154 /* This CONST_CAST is okay because expr_last returns its argument
12155 unmodified and we assign it to a const_tree. */
12156 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12158 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12162 /* Easy cases. If the last statement of the block implies
12163 control transfer, then we can't fall through. */
12167 /* If SWITCH_LABELS is set, this is lowered, and represents a
12168 branch to a selected label and hence can not fall through.
12169 Otherwise SWITCH_BODY is set, and the switch can fall
12171 return SWITCH_LABELS (stmt
) == NULL_TREE
;
12174 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12176 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12179 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12181 case TRY_CATCH_EXPR
:
12182 return try_catch_may_fallthru (stmt
);
12184 case TRY_FINALLY_EXPR
:
12185 /* The finally clause is always executed after the try clause,
12186 so if it does not fall through, then the try-finally will not
12187 fall through. Otherwise, if the try clause does not fall
12188 through, then when the finally clause falls through it will
12189 resume execution wherever the try clause was going. So the
12190 whole try-finally will only fall through if both the try
12191 clause and the finally clause fall through. */
12192 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12193 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12196 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12197 stmt
= TREE_OPERAND (stmt
, 1);
12203 /* Functions that do not return do not fall through. */
12204 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12206 case CLEANUP_POINT_EXPR
:
12207 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12210 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12216 return lang_hooks
.block_may_fallthru (stmt
);
12220 /* True if we are using EH to handle cleanups. */
12221 static bool using_eh_for_cleanups_flag
= false;
12223 /* This routine is called from front ends to indicate eh should be used for
12226 using_eh_for_cleanups (void)
12228 using_eh_for_cleanups_flag
= true;
12231 /* Query whether EH is used for cleanups. */
12233 using_eh_for_cleanups_p (void)
12235 return using_eh_for_cleanups_flag
;
12238 /* Wrapper for tree_code_name to ensure that tree code is valid */
12240 get_tree_code_name (enum tree_code code
)
12242 const char *invalid
= "<invalid tree code>";
12244 if (code
>= MAX_TREE_CODES
)
12247 return tree_code_name
[code
];
12250 /* Drops the TREE_OVERFLOW flag from T. */
12253 drop_tree_overflow (tree t
)
12255 gcc_checking_assert (TREE_OVERFLOW (t
));
12257 /* For tree codes with a sharing machinery re-build the result. */
12258 if (TREE_CODE (t
) == INTEGER_CST
)
12259 return wide_int_to_tree (TREE_TYPE (t
), t
);
12261 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12262 and drop the flag. */
12264 TREE_OVERFLOW (t
) = 0;
12268 /* Given a memory reference expression T, return its base address.
12269 The base address of a memory reference expression is the main
12270 object being referenced. For instance, the base address for
12271 'array[i].fld[j]' is 'array'. You can think of this as stripping
12272 away the offset part from a memory address.
12274 This function calls handled_component_p to strip away all the inner
12275 parts of the memory reference until it reaches the base object. */
12278 get_base_address (tree t
)
12280 while (handled_component_p (t
))
12281 t
= TREE_OPERAND (t
, 0);
12283 if ((TREE_CODE (t
) == MEM_REF
12284 || TREE_CODE (t
) == TARGET_MEM_REF
)
12285 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12286 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12288 /* ??? Either the alias oracle or all callers need to properly deal
12289 with WITH_SIZE_EXPRs before we can look through those. */
12290 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12296 #include "gt-tree.h"