1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
32 #include "coretypes.h"
36 #include "stor-layout.h"
43 #include "toplev.h" /* get_random_seed */
45 #include "filenames.h"
48 #include "common/common-target.h"
49 #include "langhooks.h"
50 #include "tree-inline.h"
51 #include "tree-iterator.h"
52 #include "basic-block.h"
54 #include "pointer-set.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
60 #include "gimple-iterator.h"
62 #include "gimple-ssa.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
80 /* Tree code classes. */
82 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
83 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85 const enum tree_code_class tree_code_type
[] = {
86 #include "all-tree.def"
90 #undef END_OF_BASE_TREE_CODES
92 /* Table indexed by tree code giving number of expression
93 operands beyond the fixed part of the node structure.
94 Not used for types or decls. */
96 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
97 #define END_OF_BASE_TREE_CODES 0,
99 const unsigned char tree_code_length
[] = {
100 #include "all-tree.def"
104 #undef END_OF_BASE_TREE_CODES
106 /* Names of tree components.
107 Used for printing out the tree and error messages. */
108 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
109 #define END_OF_BASE_TREE_CODES "@dummy",
111 static const char *const tree_code_name
[] = {
112 #include "all-tree.def"
116 #undef END_OF_BASE_TREE_CODES
118 /* Each tree code class has an associated string representation.
119 These must correspond to the tree_code_class entries. */
121 const char *const tree_code_class_strings
[] =
136 /* obstack.[ch] explicitly declined to prototype this. */
137 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
139 /* Statistics-gathering stuff. */
141 static int tree_code_counts
[MAX_TREE_CODES
];
142 int tree_node_counts
[(int) all_kinds
];
143 int tree_node_sizes
[(int) all_kinds
];
145 /* Keep in sync with tree.h:enum tree_node_kind. */
146 static const char * const tree_node_kind_names
[] = {
165 /* Unique id for next decl created. */
166 static GTY(()) int next_decl_uid
;
167 /* Unique id for next type created. */
168 static GTY(()) int next_type_uid
= 1;
169 /* Unique id for next debug decl created. Use negative numbers,
170 to catch erroneous uses. */
171 static GTY(()) int next_debug_decl_uid
;
173 /* Since we cannot rehash a type after it is in the table, we have to
174 keep the hash code. */
176 struct GTY(()) type_hash
{
181 /* Initial size of the hash table (rounded to next prime). */
182 #define TYPE_HASH_INITIAL_SIZE 1000
184 /* Now here is the hash table. When recording a type, it is added to
185 the slot whose index is the hash code. Note that the hash table is
186 used for several kinds of types (function types, array types and
187 array index range types, for now). While all these live in the
188 same table, they are completely independent, and the hash code is
189 computed differently for each of these. */
191 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash
)))
192 htab_t type_hash_table
;
194 /* Hash table and temporary node for larger integer const values. */
195 static GTY (()) tree int_cst_node
;
196 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
197 htab_t int_cst_hash_table
;
199 /* Hash table for optimization flags and target option flags. Use the same
200 hash table for both sets of options. Nodes for building the current
201 optimization and target option nodes. The assumption is most of the time
202 the options created will already be in the hash table, so we avoid
203 allocating and freeing up a node repeatably. */
204 static GTY (()) tree cl_optimization_node
;
205 static GTY (()) tree cl_target_option_node
;
206 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
207 htab_t cl_option_hash_table
;
209 /* General tree->tree mapping structure for use in hash tables. */
212 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
213 htab_t debug_expr_for_decl
;
215 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
216 htab_t value_expr_for_decl
;
218 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map
)))
219 htab_t debug_args_for_decl
;
221 static GTY ((if_marked ("tree_priority_map_marked_p"),
222 param_is (struct tree_priority_map
)))
223 htab_t init_priority_for_decl
;
225 static void set_type_quals (tree
, int);
226 static int type_hash_eq (const void *, const void *);
227 static hashval_t
type_hash_hash (const void *);
228 static hashval_t
int_cst_hash_hash (const void *);
229 static int int_cst_hash_eq (const void *, const void *);
230 static hashval_t
cl_option_hash_hash (const void *);
231 static int cl_option_hash_eq (const void *, const void *);
232 static void print_type_hash_statistics (void);
233 static void print_debug_expr_statistics (void);
234 static void print_value_expr_statistics (void);
235 static int type_hash_marked_p (const void *);
236 static unsigned int type_hash_list (const_tree
, hashval_t
);
237 static unsigned int attribute_hash_list (const_tree
, hashval_t
);
238 static bool decls_same_for_odr (tree decl1
, tree decl2
);
240 tree global_trees
[TI_MAX
];
241 tree integer_types
[itk_none
];
243 unsigned char tree_contains_struct
[MAX_TREE_CODES
][64];
245 /* Number of operands for each OpenMP clause. */
246 unsigned const char omp_clause_num_ops
[] =
248 0, /* OMP_CLAUSE_ERROR */
249 1, /* OMP_CLAUSE_PRIVATE */
250 1, /* OMP_CLAUSE_SHARED */
251 1, /* OMP_CLAUSE_FIRSTPRIVATE */
252 2, /* OMP_CLAUSE_LASTPRIVATE */
253 4, /* OMP_CLAUSE_REDUCTION */
254 1, /* OMP_CLAUSE_COPYIN */
255 1, /* OMP_CLAUSE_COPYPRIVATE */
256 3, /* OMP_CLAUSE_LINEAR */
257 2, /* OMP_CLAUSE_ALIGNED */
258 1, /* OMP_CLAUSE_DEPEND */
259 1, /* OMP_CLAUSE_UNIFORM */
260 2, /* OMP_CLAUSE_FROM */
261 2, /* OMP_CLAUSE_TO */
262 2, /* OMP_CLAUSE_MAP */
263 1, /* OMP_CLAUSE__LOOPTEMP_ */
264 1, /* OMP_CLAUSE_IF */
265 1, /* OMP_CLAUSE_NUM_THREADS */
266 1, /* OMP_CLAUSE_SCHEDULE */
267 0, /* OMP_CLAUSE_NOWAIT */
268 0, /* OMP_CLAUSE_ORDERED */
269 0, /* OMP_CLAUSE_DEFAULT */
270 3, /* OMP_CLAUSE_COLLAPSE */
271 0, /* OMP_CLAUSE_UNTIED */
272 1, /* OMP_CLAUSE_FINAL */
273 0, /* OMP_CLAUSE_MERGEABLE */
274 1, /* OMP_CLAUSE_DEVICE */
275 1, /* OMP_CLAUSE_DIST_SCHEDULE */
276 0, /* OMP_CLAUSE_INBRANCH */
277 0, /* OMP_CLAUSE_NOTINBRANCH */
278 1, /* OMP_CLAUSE_NUM_TEAMS */
279 1, /* OMP_CLAUSE_THREAD_LIMIT */
280 0, /* OMP_CLAUSE_PROC_BIND */
281 1, /* OMP_CLAUSE_SAFELEN */
282 1, /* OMP_CLAUSE_SIMDLEN */
283 0, /* OMP_CLAUSE_FOR */
284 0, /* OMP_CLAUSE_PARALLEL */
285 0, /* OMP_CLAUSE_SECTIONS */
286 0, /* OMP_CLAUSE_TASKGROUP */
287 1, /* OMP_CLAUSE__SIMDUID_ */
290 const char * const omp_clause_code_name
[] =
335 /* Return the tree node structure used by tree code CODE. */
337 static inline enum tree_node_structure_enum
338 tree_node_structure_for_code (enum tree_code code
)
340 switch (TREE_CODE_CLASS (code
))
342 case tcc_declaration
:
347 return TS_FIELD_DECL
;
353 return TS_LABEL_DECL
;
355 return TS_RESULT_DECL
;
356 case DEBUG_EXPR_DECL
:
359 return TS_CONST_DECL
;
363 return TS_FUNCTION_DECL
;
364 case TRANSLATION_UNIT_DECL
:
365 return TS_TRANSLATION_UNIT_DECL
;
367 return TS_DECL_NON_COMMON
;
371 return TS_TYPE_NON_COMMON
;
380 default: /* tcc_constant and tcc_exceptional */
385 /* tcc_constant cases. */
386 case VOID_CST
: return TS_TYPED
;
387 case INTEGER_CST
: return TS_INT_CST
;
388 case REAL_CST
: return TS_REAL_CST
;
389 case FIXED_CST
: return TS_FIXED_CST
;
390 case COMPLEX_CST
: return TS_COMPLEX
;
391 case VECTOR_CST
: return TS_VECTOR
;
392 case STRING_CST
: return TS_STRING
;
393 /* tcc_exceptional cases. */
394 case ERROR_MARK
: return TS_COMMON
;
395 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
396 case TREE_LIST
: return TS_LIST
;
397 case TREE_VEC
: return TS_VEC
;
398 case SSA_NAME
: return TS_SSA_NAME
;
399 case PLACEHOLDER_EXPR
: return TS_COMMON
;
400 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
401 case BLOCK
: return TS_BLOCK
;
402 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
403 case TREE_BINFO
: return TS_BINFO
;
404 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
405 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
406 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
414 /* Initialize tree_contains_struct to describe the hierarchy of tree
418 initialize_tree_contains_struct (void)
422 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
425 enum tree_node_structure_enum ts_code
;
427 code
= (enum tree_code
) i
;
428 ts_code
= tree_node_structure_for_code (code
);
430 /* Mark the TS structure itself. */
431 tree_contains_struct
[code
][ts_code
] = 1;
433 /* Mark all the structures that TS is derived from. */
451 case TS_STATEMENT_LIST
:
452 MARK_TS_TYPED (code
);
456 case TS_DECL_MINIMAL
:
462 case TS_OPTIMIZATION
:
463 case TS_TARGET_OPTION
:
464 MARK_TS_COMMON (code
);
467 case TS_TYPE_WITH_LANG_SPECIFIC
:
468 MARK_TS_TYPE_COMMON (code
);
471 case TS_TYPE_NON_COMMON
:
472 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
476 MARK_TS_DECL_MINIMAL (code
);
481 MARK_TS_DECL_COMMON (code
);
484 case TS_DECL_NON_COMMON
:
485 MARK_TS_DECL_WITH_VIS (code
);
488 case TS_DECL_WITH_VIS
:
492 MARK_TS_DECL_WRTL (code
);
496 MARK_TS_DECL_COMMON (code
);
500 MARK_TS_DECL_WITH_VIS (code
);
504 case TS_FUNCTION_DECL
:
505 MARK_TS_DECL_NON_COMMON (code
);
508 case TS_TRANSLATION_UNIT_DECL
:
509 MARK_TS_DECL_COMMON (code
);
517 /* Basic consistency checks for attributes used in fold. */
518 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
519 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
520 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
521 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
522 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
523 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
524 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
525 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
526 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
527 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
528 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
529 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
530 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
531 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
532 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
533 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
534 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
535 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
536 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
537 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
538 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
539 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
540 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
541 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
542 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
543 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
544 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
545 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
546 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
547 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
548 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
549 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
550 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
551 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
552 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
553 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
554 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
555 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
556 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
557 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
566 /* Initialize the hash table of types. */
567 type_hash_table
= htab_create_ggc (TYPE_HASH_INITIAL_SIZE
, type_hash_hash
,
570 debug_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
571 tree_decl_map_eq
, 0);
573 value_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
574 tree_decl_map_eq
, 0);
575 init_priority_for_decl
= htab_create_ggc (512, tree_priority_map_hash
,
576 tree_priority_map_eq
, 0);
578 int_cst_hash_table
= htab_create_ggc (1024, int_cst_hash_hash
,
579 int_cst_hash_eq
, NULL
);
581 int_cst_node
= make_int_cst (1, 1);
583 cl_option_hash_table
= htab_create_ggc (64, cl_option_hash_hash
,
584 cl_option_hash_eq
, NULL
);
586 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
587 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
589 /* Initialize the tree_contains_struct array. */
590 initialize_tree_contains_struct ();
591 lang_hooks
.init_ts ();
595 /* The name of the object as the assembler will see it (but before any
596 translations made by ASM_OUTPUT_LABELREF). Often this is the same
597 as DECL_NAME. It is an IDENTIFIER_NODE. */
599 decl_assembler_name (tree decl
)
601 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
602 lang_hooks
.set_decl_assembler_name (decl
);
603 return DECL_WITH_VIS_CHECK (decl
)->decl_with_vis
.assembler_name
;
606 /* Compute the number of bytes occupied by a tree with code CODE.
607 This function cannot be used for nodes that have variable sizes,
608 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
610 tree_code_size (enum tree_code code
)
612 switch (TREE_CODE_CLASS (code
))
614 case tcc_declaration
: /* A decl node */
619 return sizeof (struct tree_field_decl
);
621 return sizeof (struct tree_parm_decl
);
623 return sizeof (struct tree_var_decl
);
625 return sizeof (struct tree_label_decl
);
627 return sizeof (struct tree_result_decl
);
629 return sizeof (struct tree_const_decl
);
631 return sizeof (struct tree_type_decl
);
633 return sizeof (struct tree_function_decl
);
634 case DEBUG_EXPR_DECL
:
635 return sizeof (struct tree_decl_with_rtl
);
637 return sizeof (struct tree_decl_non_common
);
641 case tcc_type
: /* a type node */
642 return sizeof (struct tree_type_non_common
);
644 case tcc_reference
: /* a reference */
645 case tcc_expression
: /* an expression */
646 case tcc_statement
: /* an expression with side effects */
647 case tcc_comparison
: /* a comparison expression */
648 case tcc_unary
: /* a unary arithmetic expression */
649 case tcc_binary
: /* a binary arithmetic expression */
650 return (sizeof (struct tree_exp
)
651 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
653 case tcc_constant
: /* a constant */
656 case VOID_CST
: return sizeof (struct tree_typed
);
657 case INTEGER_CST
: gcc_unreachable ();
658 case REAL_CST
: return sizeof (struct tree_real_cst
);
659 case FIXED_CST
: return sizeof (struct tree_fixed_cst
);
660 case COMPLEX_CST
: return sizeof (struct tree_complex
);
661 case VECTOR_CST
: return sizeof (struct tree_vector
);
662 case STRING_CST
: gcc_unreachable ();
664 return lang_hooks
.tree_size (code
);
667 case tcc_exceptional
: /* something random, like an identifier. */
670 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
671 case TREE_LIST
: return sizeof (struct tree_list
);
674 case PLACEHOLDER_EXPR
: return sizeof (struct tree_common
);
677 case OMP_CLAUSE
: gcc_unreachable ();
679 case SSA_NAME
: return sizeof (struct tree_ssa_name
);
681 case STATEMENT_LIST
: return sizeof (struct tree_statement_list
);
682 case BLOCK
: return sizeof (struct tree_block
);
683 case CONSTRUCTOR
: return sizeof (struct tree_constructor
);
684 case OPTIMIZATION_NODE
: return sizeof (struct tree_optimization_option
);
685 case TARGET_OPTION_NODE
: return sizeof (struct tree_target_option
);
688 return lang_hooks
.tree_size (code
);
696 /* Compute the number of bytes occupied by NODE. This routine only
697 looks at TREE_CODE, except for those nodes that have variable sizes. */
699 tree_size (const_tree node
)
701 const enum tree_code code
= TREE_CODE (node
);
705 return (sizeof (struct tree_int_cst
)
706 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
709 return (offsetof (struct tree_binfo
, base_binfos
)
711 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
714 return (sizeof (struct tree_vec
)
715 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
718 return (sizeof (struct tree_vector
)
719 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node
)) - 1) * sizeof (tree
));
722 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
725 return (sizeof (struct tree_omp_clause
)
726 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
730 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
731 return (sizeof (struct tree_exp
)
732 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
734 return tree_code_size (code
);
738 /* Record interesting allocation statistics for a tree node with CODE
742 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED
,
743 size_t length ATTRIBUTE_UNUSED
)
745 enum tree_code_class type
= TREE_CODE_CLASS (code
);
748 if (!GATHER_STATISTICS
)
753 case tcc_declaration
: /* A decl node */
757 case tcc_type
: /* a type node */
761 case tcc_statement
: /* an expression with side effects */
765 case tcc_reference
: /* a reference */
769 case tcc_expression
: /* an expression */
770 case tcc_comparison
: /* a comparison expression */
771 case tcc_unary
: /* a unary arithmetic expression */
772 case tcc_binary
: /* a binary arithmetic expression */
776 case tcc_constant
: /* a constant */
780 case tcc_exceptional
: /* something random, like an identifier. */
783 case IDENTIFIER_NODE
:
796 kind
= ssa_name_kind
;
808 kind
= omp_clause_kind
;
825 tree_code_counts
[(int) code
]++;
826 tree_node_counts
[(int) kind
]++;
827 tree_node_sizes
[(int) kind
] += length
;
830 /* Allocate and return a new UID from the DECL_UID namespace. */
833 allocate_decl_uid (void)
835 return next_decl_uid
++;
838 /* Return a newly allocated node of code CODE. For decl and type
839 nodes, some other fields are initialized. The rest of the node is
840 initialized to zero. This function cannot be used for TREE_VEC,
841 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
844 Achoo! I got a code in the node. */
847 make_node_stat (enum tree_code code MEM_STAT_DECL
)
850 enum tree_code_class type
= TREE_CODE_CLASS (code
);
851 size_t length
= tree_code_size (code
);
853 record_node_allocation_statistics (code
, length
);
855 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
856 TREE_SET_CODE (t
, code
);
861 TREE_SIDE_EFFECTS (t
) = 1;
864 case tcc_declaration
:
865 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
867 if (code
== FUNCTION_DECL
)
869 DECL_ALIGN (t
) = FUNCTION_BOUNDARY
;
870 DECL_MODE (t
) = FUNCTION_MODE
;
875 DECL_SOURCE_LOCATION (t
) = input_location
;
876 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
877 DECL_UID (t
) = --next_debug_decl_uid
;
880 DECL_UID (t
) = allocate_decl_uid ();
881 SET_DECL_PT_UID (t
, -1);
883 if (TREE_CODE (t
) == LABEL_DECL
)
884 LABEL_DECL_UID (t
) = -1;
889 TYPE_UID (t
) = next_type_uid
++;
890 TYPE_ALIGN (t
) = BITS_PER_UNIT
;
891 TYPE_USER_ALIGN (t
) = 0;
892 TYPE_MAIN_VARIANT (t
) = t
;
893 TYPE_CANONICAL (t
) = t
;
895 /* Default to no attributes for type, but let target change that. */
896 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
897 targetm
.set_default_type_attributes (t
);
899 /* We have not yet computed the alias set for this type. */
900 TYPE_ALIAS_SET (t
) = -1;
904 TREE_CONSTANT (t
) = 1;
913 case PREDECREMENT_EXPR
:
914 case PREINCREMENT_EXPR
:
915 case POSTDECREMENT_EXPR
:
916 case POSTINCREMENT_EXPR
:
917 /* All of these have side-effects, no matter what their
919 TREE_SIDE_EFFECTS (t
) = 1;
928 /* Other classes need no special treatment. */
935 /* Return a new node with the same contents as NODE except that its
936 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
939 copy_node_stat (tree node MEM_STAT_DECL
)
942 enum tree_code code
= TREE_CODE (node
);
945 gcc_assert (code
!= STATEMENT_LIST
);
947 length
= tree_size (node
);
948 record_node_allocation_statistics (code
, length
);
949 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
950 memcpy (t
, node
, length
);
952 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
954 TREE_ASM_WRITTEN (t
) = 0;
955 TREE_VISITED (t
) = 0;
957 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
959 if (code
== DEBUG_EXPR_DECL
)
960 DECL_UID (t
) = --next_debug_decl_uid
;
963 DECL_UID (t
) = allocate_decl_uid ();
964 if (DECL_PT_UID_SET_P (node
))
965 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
967 if ((TREE_CODE (node
) == PARM_DECL
|| TREE_CODE (node
) == VAR_DECL
)
968 && DECL_HAS_VALUE_EXPR_P (node
))
970 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
971 DECL_HAS_VALUE_EXPR_P (t
) = 1;
973 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
974 if (TREE_CODE (node
) == VAR_DECL
)
975 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
976 if (TREE_CODE (node
) == VAR_DECL
&& DECL_HAS_INIT_PRIORITY_P (node
))
978 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
979 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
981 if (TREE_CODE (node
) == FUNCTION_DECL
)
982 DECL_STRUCT_FUNCTION (t
) = NULL
;
984 else if (TREE_CODE_CLASS (code
) == tcc_type
)
986 TYPE_UID (t
) = next_type_uid
++;
987 /* The following is so that the debug code for
988 the copy is different from the original type.
989 The two statements usually duplicate each other
990 (because they clear fields of the same union),
991 but the optimizer should catch that. */
992 TYPE_SYMTAB_POINTER (t
) = 0;
993 TYPE_SYMTAB_ADDRESS (t
) = 0;
995 /* Do not copy the values cache. */
996 if (TYPE_CACHED_VALUES_P (t
))
998 TYPE_CACHED_VALUES_P (t
) = 0;
999 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1006 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1007 For example, this can copy a list made of TREE_LIST nodes. */
1010 copy_list (tree list
)
1018 head
= prev
= copy_node (list
);
1019 next
= TREE_CHAIN (list
);
1022 TREE_CHAIN (prev
) = copy_node (next
);
1023 prev
= TREE_CHAIN (prev
);
1024 next
= TREE_CHAIN (next
);
1030 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1031 INTEGER_CST with value CST and type TYPE. */
1034 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1036 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1037 /* We need an extra zero HWI if CST is an unsigned integer with its
1038 upper bit set, and if CST occupies a whole number of HWIs. */
1039 if (TYPE_UNSIGNED (type
)
1041 && (cst
.get_precision () % HOST_BITS_PER_WIDE_INT
) == 0)
1042 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1043 return cst
.get_len ();
1046 /* Return a new INTEGER_CST with value CST and type TYPE. */
1049 build_new_int_cst (tree type
, const wide_int
&cst
)
1051 unsigned int len
= cst
.get_len ();
1052 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1053 tree nt
= make_int_cst (len
, ext_len
);
1058 TREE_INT_CST_ELT (nt
, ext_len
) = 0;
1059 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1060 TREE_INT_CST_ELT (nt
, i
) = -1;
1062 else if (TYPE_UNSIGNED (type
)
1063 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1066 TREE_INT_CST_ELT (nt
, len
)
1067 = zext_hwi (cst
.elt (len
),
1068 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1071 for (unsigned int i
= 0; i
< len
; i
++)
1072 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1073 TREE_TYPE (nt
) = type
;
1077 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1080 build_int_cst (tree type
, HOST_WIDE_INT low
)
1082 /* Support legacy code. */
1084 type
= integer_type_node
;
1086 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1090 build_int_cstu (tree type
, unsigned HOST_WIDE_INT cst
)
1092 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1095 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1098 build_int_cst_type (tree type
, HOST_WIDE_INT low
)
1101 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1104 /* Constructs tree in type TYPE from with value given by CST. Signedness
1105 of CST is assumed to be the same as the signedness of TYPE. */
1108 double_int_to_tree (tree type
, double_int cst
)
1110 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1113 /* We force the wide_int CST to the range of the type TYPE by sign or
1114 zero extending it. OVERFLOWABLE indicates if we are interested in
1115 overflow of the value, when >0 we are only interested in signed
1116 overflow, for <0 we are interested in any overflow. OVERFLOWED
1117 indicates whether overflow has already occurred. CONST_OVERFLOWED
1118 indicates whether constant overflow has already occurred. We force
1119 T's value to be within range of T's type (by setting to 0 or 1 all
1120 the bits outside the type's range). We set TREE_OVERFLOWED if,
1121 OVERFLOWED is nonzero,
1122 or OVERFLOWABLE is >0 and signed overflow occurs
1123 or OVERFLOWABLE is <0 and any overflow occurs
1124 We return a new tree node for the extended wide_int. The node
1125 is shared if no overflow flags are set. */
1129 force_fit_type (tree type
, const wide_int_ref
&cst
,
1130 int overflowable
, bool overflowed
)
1132 signop sign
= TYPE_SIGN (type
);
1134 /* If we need to set overflow flags, return a new unshared node. */
1135 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1139 || (overflowable
> 0 && sign
== SIGNED
))
1141 wide_int tmp
= wide_int::from (cst
, TYPE_PRECISION (type
), sign
);
1142 tree t
= build_new_int_cst (type
, tmp
);
1143 TREE_OVERFLOW (t
) = 1;
1148 /* Else build a shared node. */
1149 return wide_int_to_tree (type
, cst
);
1152 /* These are the hash table functions for the hash table of INTEGER_CST
1153 nodes of a sizetype. */
1155 /* Return the hash code code X, an INTEGER_CST. */
1158 int_cst_hash_hash (const void *x
)
1160 const_tree
const t
= (const_tree
) x
;
1161 hashval_t code
= htab_hash_pointer (TREE_TYPE (t
));
1164 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1165 code
^= TREE_INT_CST_ELT (t
, i
);
1170 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1171 is the same as that given by *Y, which is the same. */
1174 int_cst_hash_eq (const void *x
, const void *y
)
1176 const_tree
const xt
= (const_tree
) x
;
1177 const_tree
const yt
= (const_tree
) y
;
1179 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1180 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1181 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1184 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1185 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1191 /* Create an INT_CST node of TYPE and value CST.
1192 The returned node is always shared. For small integers we use a
1193 per-type vector cache, for larger ones we use a single hash table.
1194 The value is extended from its precision according to the sign of
1195 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1196 the upper bits and ensures that hashing and value equality based
1197 upon the underlying HOST_WIDE_INTs works without masking. */
1200 wide_int_to_tree (tree type
, const wide_int_ref
&pcst
)
1207 unsigned int prec
= TYPE_PRECISION (type
);
1208 signop sgn
= TYPE_SIGN (type
);
1210 /* Verify that everything is canonical. */
1211 int l
= pcst
.get_len ();
1214 if (pcst
.elt (l
- 1) == 0)
1215 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1216 if (pcst
.elt (l
- 1) == (HOST_WIDE_INT
) -1)
1217 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1220 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1221 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1225 /* We just need to store a single HOST_WIDE_INT. */
1227 if (TYPE_UNSIGNED (type
))
1228 hwi
= cst
.to_uhwi ();
1230 hwi
= cst
.to_shwi ();
1232 switch (TREE_CODE (type
))
1235 gcc_assert (hwi
== 0);
1239 case REFERENCE_TYPE
:
1240 /* Cache NULL pointer. */
1249 /* Cache false or true. */
1257 if (TYPE_SIGN (type
) == UNSIGNED
)
1260 limit
= INTEGER_SHARE_LIMIT
;
1261 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1266 /* Cache [-1, N). */
1267 limit
= INTEGER_SHARE_LIMIT
+ 1;
1268 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1282 /* Look for it in the type's vector of small shared ints. */
1283 if (!TYPE_CACHED_VALUES_P (type
))
1285 TYPE_CACHED_VALUES_P (type
) = 1;
1286 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1289 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1291 /* Make sure no one is clobbering the shared constant. */
1292 gcc_checking_assert (TREE_TYPE (t
) == type
1293 && TREE_INT_CST_NUNITS (t
) == 1
1294 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1295 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1296 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1299 /* Create a new shared int. */
1300 t
= build_new_int_cst (type
, cst
);
1301 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1306 /* Use the cache of larger shared ints, using int_cst_node as
1310 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1311 TREE_TYPE (int_cst_node
) = type
;
1313 slot
= htab_find_slot (int_cst_hash_table
, int_cst_node
, INSERT
);
1317 /* Insert this one into the hash table. */
1320 /* Make a new node for next time round. */
1321 int_cst_node
= make_int_cst (1, 1);
1327 /* The value either hashes properly or we drop it on the floor
1328 for the gc to take care of. There will not be enough of them
1332 tree nt
= build_new_int_cst (type
, cst
);
1333 slot
= htab_find_slot (int_cst_hash_table
, nt
, INSERT
);
1337 /* Insert this one into the hash table. */
1347 cache_integer_cst (tree t
)
1349 tree type
= TREE_TYPE (t
);
1352 int prec
= TYPE_PRECISION (type
);
1354 gcc_assert (!TREE_OVERFLOW (t
));
1356 switch (TREE_CODE (type
))
1359 gcc_assert (integer_zerop (t
));
1363 case REFERENCE_TYPE
:
1364 /* Cache NULL pointer. */
1365 if (integer_zerop (t
))
1373 /* Cache false or true. */
1375 if (wi::ltu_p (t
, 2))
1376 ix
= TREE_INT_CST_ELT (t
, 0);
1381 if (TYPE_UNSIGNED (type
))
1384 limit
= INTEGER_SHARE_LIMIT
;
1386 /* This is a little hokie, but if the prec is smaller than
1387 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1388 obvious test will not get the correct answer. */
1389 if (prec
< HOST_BITS_PER_WIDE_INT
)
1391 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1392 ix
= tree_to_uhwi (t
);
1394 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1395 ix
= tree_to_uhwi (t
);
1400 limit
= INTEGER_SHARE_LIMIT
+ 1;
1402 if (integer_minus_onep (t
))
1404 else if (!wi::neg_p (t
))
1406 if (prec
< HOST_BITS_PER_WIDE_INT
)
1408 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1409 ix
= tree_to_shwi (t
) + 1;
1411 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1412 ix
= tree_to_shwi (t
) + 1;
1426 /* Look for it in the type's vector of small shared ints. */
1427 if (!TYPE_CACHED_VALUES_P (type
))
1429 TYPE_CACHED_VALUES_P (type
) = 1;
1430 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1433 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1434 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1438 /* Use the cache of larger shared ints. */
1441 slot
= htab_find_slot (int_cst_hash_table
, t
, INSERT
);
1442 /* If there is already an entry for the number verify it's the
1445 gcc_assert (wi::eq_p (tree (*slot
), t
));
1447 /* Otherwise insert this one into the hash table. */
1453 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1454 and the rest are zeros. */
1457 build_low_bits_mask (tree type
, unsigned bits
)
1459 gcc_assert (bits
<= TYPE_PRECISION (type
));
1461 return wide_int_to_tree (type
, wi::mask (bits
, false,
1462 TYPE_PRECISION (type
)));
1465 /* Checks that X is integer constant that can be expressed in (unsigned)
1466 HOST_WIDE_INT without loss of precision. */
1469 cst_and_fits_in_hwi (const_tree x
)
1471 if (TREE_CODE (x
) != INTEGER_CST
)
1474 if (TYPE_PRECISION (TREE_TYPE (x
)) > HOST_BITS_PER_WIDE_INT
)
1477 return TREE_INT_CST_NUNITS (x
) == 1;
1480 /* Build a newly constructed TREE_VEC node of length LEN. */
1483 make_vector_stat (unsigned len MEM_STAT_DECL
)
1486 unsigned length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vector
);
1488 record_node_allocation_statistics (VECTOR_CST
, length
);
1490 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1492 TREE_SET_CODE (t
, VECTOR_CST
);
1493 TREE_CONSTANT (t
) = 1;
1498 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1499 are in a list pointed to by VALS. */
1502 build_vector_stat (tree type
, tree
*vals MEM_STAT_DECL
)
1506 tree v
= make_vector (TYPE_VECTOR_SUBPARTS (type
));
1507 TREE_TYPE (v
) = type
;
1509 /* Iterate through elements and check for overflow. */
1510 for (cnt
= 0; cnt
< TYPE_VECTOR_SUBPARTS (type
); ++cnt
)
1512 tree value
= vals
[cnt
];
1514 VECTOR_CST_ELT (v
, cnt
) = value
;
1516 /* Don't crash if we get an address constant. */
1517 if (!CONSTANT_CLASS_P (value
))
1520 over
|= TREE_OVERFLOW (value
);
1523 TREE_OVERFLOW (v
) = over
;
1527 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1528 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1531 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1533 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
1534 unsigned HOST_WIDE_INT idx
;
1537 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1539 for (; idx
< TYPE_VECTOR_SUBPARTS (type
); ++idx
)
1540 vec
[idx
] = build_zero_cst (TREE_TYPE (type
));
1542 return build_vector (type
, vec
);
1545 /* Build a vector of type VECTYPE where all the elements are SCs. */
1547 build_vector_from_val (tree vectype
, tree sc
)
1549 int i
, nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
1551 if (sc
== error_mark_node
)
1554 /* Verify that the vector type is suitable for SC. Note that there
1555 is some inconsistency in the type-system with respect to restrict
1556 qualifications of pointers. Vector types always have a main-variant
1557 element type and the qualification is applied to the vector-type.
1558 So TREE_TYPE (vector-type) does not return a properly qualified
1559 vector element-type. */
1560 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1561 TREE_TYPE (vectype
)));
1563 if (CONSTANT_CLASS_P (sc
))
1565 tree
*v
= XALLOCAVEC (tree
, nunits
);
1566 for (i
= 0; i
< nunits
; ++i
)
1568 return build_vector (vectype
, v
);
1572 vec
<constructor_elt
, va_gc
> *v
;
1573 vec_alloc (v
, nunits
);
1574 for (i
= 0; i
< nunits
; ++i
)
1575 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1576 return build_constructor (vectype
, v
);
1580 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1581 are in the vec pointed to by VALS. */
1583 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals
)
1585 tree c
= make_node (CONSTRUCTOR
);
1587 constructor_elt
*elt
;
1588 bool constant_p
= true;
1589 bool side_effects_p
= false;
1591 TREE_TYPE (c
) = type
;
1592 CONSTRUCTOR_ELTS (c
) = vals
;
1594 FOR_EACH_VEC_SAFE_ELT (vals
, i
, elt
)
1596 /* Mostly ctors will have elts that don't have side-effects, so
1597 the usual case is to scan all the elements. Hence a single
1598 loop for both const and side effects, rather than one loop
1599 each (with early outs). */
1600 if (!TREE_CONSTANT (elt
->value
))
1602 if (TREE_SIDE_EFFECTS (elt
->value
))
1603 side_effects_p
= true;
1606 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
1607 TREE_CONSTANT (c
) = constant_p
;
1612 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1615 build_constructor_single (tree type
, tree index
, tree value
)
1617 vec
<constructor_elt
, va_gc
> *v
;
1618 constructor_elt elt
= {index
, value
};
1621 v
->quick_push (elt
);
1623 return build_constructor (type
, v
);
1627 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1628 are in a list pointed to by VALS. */
1630 build_constructor_from_list (tree type
, tree vals
)
1633 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1637 vec_alloc (v
, list_length (vals
));
1638 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
1639 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
1642 return build_constructor (type
, v
);
1645 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1646 of elements, provided as index/value pairs. */
1649 build_constructor_va (tree type
, int nelts
, ...)
1651 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1654 va_start (p
, nelts
);
1655 vec_alloc (v
, nelts
);
1658 tree index
= va_arg (p
, tree
);
1659 tree value
= va_arg (p
, tree
);
1660 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
1663 return build_constructor (type
, v
);
1666 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1669 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
1672 FIXED_VALUE_TYPE
*fp
;
1674 v
= make_node (FIXED_CST
);
1675 fp
= ggc_alloc
<fixed_value
> ();
1676 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
1678 TREE_TYPE (v
) = type
;
1679 TREE_FIXED_CST_PTR (v
) = fp
;
1683 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1686 build_real (tree type
, REAL_VALUE_TYPE d
)
1689 REAL_VALUE_TYPE
*dp
;
1692 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1693 Consider doing it via real_convert now. */
1695 v
= make_node (REAL_CST
);
1696 dp
= ggc_alloc
<real_value
> ();
1697 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
1699 TREE_TYPE (v
) = type
;
1700 TREE_REAL_CST_PTR (v
) = dp
;
1701 TREE_OVERFLOW (v
) = overflow
;
1705 /* Return a new REAL_CST node whose type is TYPE
1706 and whose value is the integer value of the INTEGER_CST node I. */
1709 real_value_from_int_cst (const_tree type
, const_tree i
)
1713 /* Clear all bits of the real value type so that we can later do
1714 bitwise comparisons to see if two values are the same. */
1715 memset (&d
, 0, sizeof d
);
1717 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, i
,
1718 TYPE_SIGN (TREE_TYPE (i
)));
1722 /* Given a tree representing an integer constant I, return a tree
1723 representing the same value as a floating-point constant of type TYPE. */
1726 build_real_from_int_cst (tree type
, const_tree i
)
1729 int overflow
= TREE_OVERFLOW (i
);
1731 v
= build_real (type
, real_value_from_int_cst (type
, i
));
1733 TREE_OVERFLOW (v
) |= overflow
;
1737 /* Return a newly constructed STRING_CST node whose value is
1738 the LEN characters at STR.
1739 Note that for a C string literal, LEN should include the trailing NUL.
1740 The TREE_TYPE is not initialized. */
1743 build_string (int len
, const char *str
)
1748 /* Do not waste bytes provided by padding of struct tree_string. */
1749 length
= len
+ offsetof (struct tree_string
, str
) + 1;
1751 record_node_allocation_statistics (STRING_CST
, length
);
1753 s
= (tree
) ggc_internal_alloc (length
);
1755 memset (s
, 0, sizeof (struct tree_typed
));
1756 TREE_SET_CODE (s
, STRING_CST
);
1757 TREE_CONSTANT (s
) = 1;
1758 TREE_STRING_LENGTH (s
) = len
;
1759 memcpy (s
->string
.str
, str
, len
);
1760 s
->string
.str
[len
] = '\0';
1765 /* Return a newly constructed COMPLEX_CST node whose value is
1766 specified by the real and imaginary parts REAL and IMAG.
1767 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1768 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1771 build_complex (tree type
, tree real
, tree imag
)
1773 tree t
= make_node (COMPLEX_CST
);
1775 TREE_REALPART (t
) = real
;
1776 TREE_IMAGPART (t
) = imag
;
1777 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
1778 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
1782 /* Return a constant of arithmetic type TYPE which is the
1783 multiplicative identity of the set TYPE. */
1786 build_one_cst (tree type
)
1788 switch (TREE_CODE (type
))
1790 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1791 case POINTER_TYPE
: case REFERENCE_TYPE
:
1793 return build_int_cst (type
, 1);
1796 return build_real (type
, dconst1
);
1798 case FIXED_POINT_TYPE
:
1799 /* We can only generate 1 for accum types. */
1800 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1801 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
1805 tree scalar
= build_one_cst (TREE_TYPE (type
));
1807 return build_vector_from_val (type
, scalar
);
1811 return build_complex (type
,
1812 build_one_cst (TREE_TYPE (type
)),
1813 build_zero_cst (TREE_TYPE (type
)));
1820 /* Return an integer of type TYPE containing all 1's in as much precision as
1821 it contains, or a complex or vector whose subparts are such integers. */
1824 build_all_ones_cst (tree type
)
1826 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1828 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
1829 return build_complex (type
, scalar
, scalar
);
1832 return build_minus_one_cst (type
);
1835 /* Return a constant of arithmetic type TYPE which is the
1836 opposite of the multiplicative identity of the set TYPE. */
1839 build_minus_one_cst (tree type
)
1841 switch (TREE_CODE (type
))
1843 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1844 case POINTER_TYPE
: case REFERENCE_TYPE
:
1846 return build_int_cst (type
, -1);
1849 return build_real (type
, dconstm1
);
1851 case FIXED_POINT_TYPE
:
1852 /* We can only generate 1 for accum types. */
1853 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1854 return build_fixed (type
, fixed_from_double_int (double_int_minus_one
,
1859 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
1861 return build_vector_from_val (type
, scalar
);
1865 return build_complex (type
,
1866 build_minus_one_cst (TREE_TYPE (type
)),
1867 build_zero_cst (TREE_TYPE (type
)));
1874 /* Build 0 constant of type TYPE. This is used by constructor folding
1875 and thus the constant should be represented in memory by
1879 build_zero_cst (tree type
)
1881 switch (TREE_CODE (type
))
1883 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1884 case POINTER_TYPE
: case REFERENCE_TYPE
:
1885 case OFFSET_TYPE
: case NULLPTR_TYPE
:
1886 return build_int_cst (type
, 0);
1889 return build_real (type
, dconst0
);
1891 case FIXED_POINT_TYPE
:
1892 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
1896 tree scalar
= build_zero_cst (TREE_TYPE (type
));
1898 return build_vector_from_val (type
, scalar
);
1903 tree zero
= build_zero_cst (TREE_TYPE (type
));
1905 return build_complex (type
, zero
, zero
);
1909 if (!AGGREGATE_TYPE_P (type
))
1910 return fold_convert (type
, integer_zero_node
);
1911 return build_constructor (type
, NULL
);
1916 /* Build a BINFO with LEN language slots. */
1919 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL
)
1922 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
1923 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
1925 record_node_allocation_statistics (TREE_BINFO
, length
);
1927 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1929 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
1931 TREE_SET_CODE (t
, TREE_BINFO
);
1933 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
1938 /* Create a CASE_LABEL_EXPR tree node and return it. */
1941 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
1943 tree t
= make_node (CASE_LABEL_EXPR
);
1945 TREE_TYPE (t
) = void_type_node
;
1946 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
1948 CASE_LOW (t
) = low_value
;
1949 CASE_HIGH (t
) = high_value
;
1950 CASE_LABEL (t
) = label_decl
;
1951 CASE_CHAIN (t
) = NULL_TREE
;
1956 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
1957 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
1958 The latter determines the length of the HOST_WIDE_INT vector. */
1961 make_int_cst_stat (int len
, int ext_len MEM_STAT_DECL
)
1964 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
1965 + sizeof (struct tree_int_cst
));
1968 record_node_allocation_statistics (INTEGER_CST
, length
);
1970 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1972 TREE_SET_CODE (t
, INTEGER_CST
);
1973 TREE_INT_CST_NUNITS (t
) = len
;
1974 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
1975 /* to_offset can only be applied to trees that are offset_int-sized
1976 or smaller. EXT_LEN is correct if it fits, otherwise the constant
1977 must be exactly the precision of offset_int and so LEN is correct. */
1978 if (ext_len
<= OFFSET_INT_ELTS
)
1979 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
1981 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
1983 TREE_CONSTANT (t
) = 1;
1988 /* Build a newly constructed TREE_VEC node of length LEN. */
1991 make_tree_vec_stat (int len MEM_STAT_DECL
)
1994 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
1996 record_node_allocation_statistics (TREE_VEC
, length
);
1998 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2000 TREE_SET_CODE (t
, TREE_VEC
);
2001 TREE_VEC_LENGTH (t
) = len
;
2006 /* Grow a TREE_VEC node to new length LEN. */
2009 grow_tree_vec_stat (tree v
, int len MEM_STAT_DECL
)
2011 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2013 int oldlen
= TREE_VEC_LENGTH (v
);
2014 gcc_assert (len
> oldlen
);
2016 int oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2017 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2019 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2021 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2023 TREE_VEC_LENGTH (v
) = len
;
2028 /* Return 1 if EXPR is the integer constant zero or a complex constant
2032 integer_zerop (const_tree expr
)
2036 switch (TREE_CODE (expr
))
2039 return wi::eq_p (expr
, 0);
2041 return (integer_zerop (TREE_REALPART (expr
))
2042 && integer_zerop (TREE_IMAGPART (expr
)));
2046 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2047 if (!integer_zerop (VECTOR_CST_ELT (expr
, i
)))
2056 /* Return 1 if EXPR is the integer constant one or the corresponding
2057 complex constant. */
2060 integer_onep (const_tree expr
)
2064 switch (TREE_CODE (expr
))
2067 return wi::eq_p (wi::to_widest (expr
), 1);
2069 return (integer_onep (TREE_REALPART (expr
))
2070 && integer_zerop (TREE_IMAGPART (expr
)));
2074 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2075 if (!integer_onep (VECTOR_CST_ELT (expr
, i
)))
2084 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2085 it contains, or a complex or vector whose subparts are such integers. */
2088 integer_all_onesp (const_tree expr
)
2092 if (TREE_CODE (expr
) == COMPLEX_CST
2093 && integer_all_onesp (TREE_REALPART (expr
))
2094 && integer_all_onesp (TREE_IMAGPART (expr
)))
2097 else if (TREE_CODE (expr
) == VECTOR_CST
)
2100 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2101 if (!integer_all_onesp (VECTOR_CST_ELT (expr
, i
)))
2106 else if (TREE_CODE (expr
) != INTEGER_CST
)
2109 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
) == expr
;
2112 /* Return 1 if EXPR is the integer constant minus one. */
2115 integer_minus_onep (const_tree expr
)
2119 if (TREE_CODE (expr
) == COMPLEX_CST
)
2120 return (integer_all_onesp (TREE_REALPART (expr
))
2121 && integer_zerop (TREE_IMAGPART (expr
)));
2123 return integer_all_onesp (expr
);
2126 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2130 integer_pow2p (const_tree expr
)
2134 if (TREE_CODE (expr
) == COMPLEX_CST
2135 && integer_pow2p (TREE_REALPART (expr
))
2136 && integer_zerop (TREE_IMAGPART (expr
)))
2139 if (TREE_CODE (expr
) != INTEGER_CST
)
2142 return wi::popcount (expr
) == 1;
2145 /* Return 1 if EXPR is an integer constant other than zero or a
2146 complex constant other than zero. */
2149 integer_nonzerop (const_tree expr
)
2153 return ((TREE_CODE (expr
) == INTEGER_CST
2154 && !wi::eq_p (expr
, 0))
2155 || (TREE_CODE (expr
) == COMPLEX_CST
2156 && (integer_nonzerop (TREE_REALPART (expr
))
2157 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2160 /* Return 1 if EXPR is the fixed-point constant zero. */
2163 fixed_zerop (const_tree expr
)
2165 return (TREE_CODE (expr
) == FIXED_CST
2166 && TREE_FIXED_CST (expr
).data
.is_zero ());
2169 /* Return the power of two represented by a tree node known to be a
2173 tree_log2 (const_tree expr
)
2177 if (TREE_CODE (expr
) == COMPLEX_CST
)
2178 return tree_log2 (TREE_REALPART (expr
));
2180 return wi::exact_log2 (expr
);
2183 /* Similar, but return the largest integer Y such that 2 ** Y is less
2184 than or equal to EXPR. */
2187 tree_floor_log2 (const_tree expr
)
2191 if (TREE_CODE (expr
) == COMPLEX_CST
)
2192 return tree_log2 (TREE_REALPART (expr
));
2194 return wi::floor_log2 (expr
);
2197 /* Return number of known trailing zero bits in EXPR, or, if the value of
2198 EXPR is known to be zero, the precision of it's type. */
2201 tree_ctz (const_tree expr
)
2203 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2204 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2207 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2208 switch (TREE_CODE (expr
))
2211 ret1
= wi::ctz (expr
);
2212 return MIN (ret1
, prec
);
2214 ret1
= wi::ctz (get_nonzero_bits (expr
));
2215 return MIN (ret1
, prec
);
2222 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2225 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2226 return MIN (ret1
, ret2
);
2227 case POINTER_PLUS_EXPR
:
2228 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2229 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2230 /* Second operand is sizetype, which could be in theory
2231 wider than pointer's precision. Make sure we never
2232 return more than prec. */
2233 ret2
= MIN (ret2
, prec
);
2234 return MIN (ret1
, ret2
);
2236 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2237 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2238 return MAX (ret1
, ret2
);
2240 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2241 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2242 return MIN (ret1
+ ret2
, prec
);
2244 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2245 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2246 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2248 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2249 return MIN (ret1
+ ret2
, prec
);
2253 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2254 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2256 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2257 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2262 case TRUNC_DIV_EXPR
:
2264 case FLOOR_DIV_EXPR
:
2265 case ROUND_DIV_EXPR
:
2266 case EXACT_DIV_EXPR
:
2267 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2268 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2270 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2273 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2281 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2282 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2284 return MIN (ret1
, prec
);
2286 return tree_ctz (TREE_OPERAND (expr
, 0));
2288 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2291 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2292 return MIN (ret1
, ret2
);
2294 return tree_ctz (TREE_OPERAND (expr
, 1));
2296 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2297 if (ret1
> BITS_PER_UNIT
)
2299 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2300 return MIN (ret1
, prec
);
2308 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2309 decimal float constants, so don't return 1 for them. */
2312 real_zerop (const_tree expr
)
2316 switch (TREE_CODE (expr
))
2319 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst0
)
2320 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2322 return real_zerop (TREE_REALPART (expr
))
2323 && real_zerop (TREE_IMAGPART (expr
));
2327 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2328 if (!real_zerop (VECTOR_CST_ELT (expr
, i
)))
2337 /* Return 1 if EXPR is the real constant one in real or complex form.
2338 Trailing zeroes matter for decimal float constants, so don't return
2342 real_onep (const_tree expr
)
2346 switch (TREE_CODE (expr
))
2349 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst1
)
2350 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2352 return real_onep (TREE_REALPART (expr
))
2353 && real_zerop (TREE_IMAGPART (expr
));
2357 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2358 if (!real_onep (VECTOR_CST_ELT (expr
, i
)))
2367 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2368 matter for decimal float constants, so don't return 1 for them. */
2371 real_minus_onep (const_tree expr
)
2375 switch (TREE_CODE (expr
))
2378 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconstm1
)
2379 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2381 return real_minus_onep (TREE_REALPART (expr
))
2382 && real_zerop (TREE_IMAGPART (expr
));
2386 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2387 if (!real_minus_onep (VECTOR_CST_ELT (expr
, i
)))
2396 /* Nonzero if EXP is a constant or a cast of a constant. */
2399 really_constant_p (const_tree exp
)
2401 /* This is not quite the same as STRIP_NOPS. It does more. */
2402 while (CONVERT_EXPR_P (exp
)
2403 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2404 exp
= TREE_OPERAND (exp
, 0);
2405 return TREE_CONSTANT (exp
);
2408 /* Return first list element whose TREE_VALUE is ELEM.
2409 Return 0 if ELEM is not in LIST. */
2412 value_member (tree elem
, tree list
)
2416 if (elem
== TREE_VALUE (list
))
2418 list
= TREE_CHAIN (list
);
2423 /* Return first list element whose TREE_PURPOSE is ELEM.
2424 Return 0 if ELEM is not in LIST. */
2427 purpose_member (const_tree elem
, tree list
)
2431 if (elem
== TREE_PURPOSE (list
))
2433 list
= TREE_CHAIN (list
);
2438 /* Return true if ELEM is in V. */
2441 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
2445 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
2451 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2455 chain_index (int idx
, tree chain
)
2457 for (; chain
&& idx
> 0; --idx
)
2458 chain
= TREE_CHAIN (chain
);
2462 /* Return nonzero if ELEM is part of the chain CHAIN. */
2465 chain_member (const_tree elem
, const_tree chain
)
2471 chain
= DECL_CHAIN (chain
);
2477 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2478 We expect a null pointer to mark the end of the chain.
2479 This is the Lisp primitive `length'. */
2482 list_length (const_tree t
)
2485 #ifdef ENABLE_TREE_CHECKING
2493 #ifdef ENABLE_TREE_CHECKING
2496 gcc_assert (p
!= q
);
2504 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2505 UNION_TYPE TYPE, or NULL_TREE if none. */
2508 first_field (const_tree type
)
2510 tree t
= TYPE_FIELDS (type
);
2511 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
2516 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2517 by modifying the last node in chain 1 to point to chain 2.
2518 This is the Lisp primitive `nconc'. */
2521 chainon (tree op1
, tree op2
)
2530 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
2532 TREE_CHAIN (t1
) = op2
;
2534 #ifdef ENABLE_TREE_CHECKING
2537 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
2538 gcc_assert (t2
!= t1
);
2545 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2548 tree_last (tree chain
)
2552 while ((next
= TREE_CHAIN (chain
)))
2557 /* Reverse the order of elements in the chain T,
2558 and return the new head of the chain (old last element). */
2563 tree prev
= 0, decl
, next
;
2564 for (decl
= t
; decl
; decl
= next
)
2566 /* We shouldn't be using this function to reverse BLOCK chains; we
2567 have blocks_nreverse for that. */
2568 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
2569 next
= TREE_CHAIN (decl
);
2570 TREE_CHAIN (decl
) = prev
;
2576 /* Return a newly created TREE_LIST node whose
2577 purpose and value fields are PARM and VALUE. */
2580 build_tree_list_stat (tree parm
, tree value MEM_STAT_DECL
)
2582 tree t
= make_node_stat (TREE_LIST PASS_MEM_STAT
);
2583 TREE_PURPOSE (t
) = parm
;
2584 TREE_VALUE (t
) = value
;
2588 /* Build a chain of TREE_LIST nodes from a vector. */
2591 build_tree_list_vec_stat (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
2593 tree ret
= NULL_TREE
;
2597 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
2599 *pp
= build_tree_list_stat (NULL
, t PASS_MEM_STAT
);
2600 pp
= &TREE_CHAIN (*pp
);
2605 /* Return a newly created TREE_LIST node whose
2606 purpose and value fields are PURPOSE and VALUE
2607 and whose TREE_CHAIN is CHAIN. */
2610 tree_cons_stat (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
2614 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
2615 memset (node
, 0, sizeof (struct tree_common
));
2617 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
2619 TREE_SET_CODE (node
, TREE_LIST
);
2620 TREE_CHAIN (node
) = chain
;
2621 TREE_PURPOSE (node
) = purpose
;
2622 TREE_VALUE (node
) = value
;
2626 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2630 ctor_to_vec (tree ctor
)
2632 vec
<tree
, va_gc
> *vec
;
2633 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
2637 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
2638 vec
->quick_push (val
);
2643 /* Return the size nominally occupied by an object of type TYPE
2644 when it resides in memory. The value is measured in units of bytes,
2645 and its data type is that normally used for type sizes
2646 (which is the first type created by make_signed_type or
2647 make_unsigned_type). */
2650 size_in_bytes (const_tree type
)
2654 if (type
== error_mark_node
)
2655 return integer_zero_node
;
2657 type
= TYPE_MAIN_VARIANT (type
);
2658 t
= TYPE_SIZE_UNIT (type
);
2662 lang_hooks
.types
.incomplete_type_error (NULL_TREE
, type
);
2663 return size_zero_node
;
2669 /* Return the size of TYPE (in bytes) as a wide integer
2670 or return -1 if the size can vary or is larger than an integer. */
2673 int_size_in_bytes (const_tree type
)
2677 if (type
== error_mark_node
)
2680 type
= TYPE_MAIN_VARIANT (type
);
2681 t
= TYPE_SIZE_UNIT (type
);
2683 if (t
&& tree_fits_uhwi_p (t
))
2684 return TREE_INT_CST_LOW (t
);
2689 /* Return the maximum size of TYPE (in bytes) as a wide integer
2690 or return -1 if the size can vary or is larger than an integer. */
2693 max_int_size_in_bytes (const_tree type
)
2695 HOST_WIDE_INT size
= -1;
2698 /* If this is an array type, check for a possible MAX_SIZE attached. */
2700 if (TREE_CODE (type
) == ARRAY_TYPE
)
2702 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
2704 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2705 size
= tree_to_uhwi (size_tree
);
2708 /* If we still haven't been able to get a size, see if the language
2709 can compute a maximum size. */
2713 size_tree
= lang_hooks
.types
.max_size (type
);
2715 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2716 size
= tree_to_uhwi (size_tree
);
2722 /* Return the bit position of FIELD, in bits from the start of the record.
2723 This is a tree of type bitsizetype. */
2726 bit_position (const_tree field
)
2728 return bit_from_pos (DECL_FIELD_OFFSET (field
),
2729 DECL_FIELD_BIT_OFFSET (field
));
2732 /* Likewise, but return as an integer. It must be representable in
2733 that way (since it could be a signed value, we don't have the
2734 option of returning -1 like int_size_in_byte can. */
2737 int_bit_position (const_tree field
)
2739 return tree_to_shwi (bit_position (field
));
2742 /* Return the byte position of FIELD, in bytes from the start of the record.
2743 This is a tree of type sizetype. */
2746 byte_position (const_tree field
)
2748 return byte_from_pos (DECL_FIELD_OFFSET (field
),
2749 DECL_FIELD_BIT_OFFSET (field
));
2752 /* Likewise, but return as an integer. It must be representable in
2753 that way (since it could be a signed value, we don't have the
2754 option of returning -1 like int_size_in_byte can. */
2757 int_byte_position (const_tree field
)
2759 return tree_to_shwi (byte_position (field
));
2762 /* Return the strictest alignment, in bits, that T is known to have. */
2765 expr_align (const_tree t
)
2767 unsigned int align0
, align1
;
2769 switch (TREE_CODE (t
))
2771 CASE_CONVERT
: case NON_LVALUE_EXPR
:
2772 /* If we have conversions, we know that the alignment of the
2773 object must meet each of the alignments of the types. */
2774 align0
= expr_align (TREE_OPERAND (t
, 0));
2775 align1
= TYPE_ALIGN (TREE_TYPE (t
));
2776 return MAX (align0
, align1
);
2778 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
2779 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
2780 case CLEANUP_POINT_EXPR
:
2781 /* These don't change the alignment of an object. */
2782 return expr_align (TREE_OPERAND (t
, 0));
2785 /* The best we can do is say that the alignment is the least aligned
2787 align0
= expr_align (TREE_OPERAND (t
, 1));
2788 align1
= expr_align (TREE_OPERAND (t
, 2));
2789 return MIN (align0
, align1
);
2791 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2792 meaningfully, it's always 1. */
2793 case LABEL_DECL
: case CONST_DECL
:
2794 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
2796 gcc_assert (DECL_ALIGN (t
) != 0);
2797 return DECL_ALIGN (t
);
2803 /* Otherwise take the alignment from that of the type. */
2804 return TYPE_ALIGN (TREE_TYPE (t
));
2807 /* Return, as a tree node, the number of elements for TYPE (which is an
2808 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2811 array_type_nelts (const_tree type
)
2813 tree index_type
, min
, max
;
2815 /* If they did it with unspecified bounds, then we should have already
2816 given an error about it before we got here. */
2817 if (! TYPE_DOMAIN (type
))
2818 return error_mark_node
;
2820 index_type
= TYPE_DOMAIN (type
);
2821 min
= TYPE_MIN_VALUE (index_type
);
2822 max
= TYPE_MAX_VALUE (index_type
);
2824 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2826 return error_mark_node
;
2828 return (integer_zerop (min
)
2830 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
2833 /* If arg is static -- a reference to an object in static storage -- then
2834 return the object. This is not the same as the C meaning of `static'.
2835 If arg isn't static, return NULL. */
2840 switch (TREE_CODE (arg
))
2843 /* Nested functions are static, even though taking their address will
2844 involve a trampoline as we unnest the nested function and create
2845 the trampoline on the tree level. */
2849 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2850 && ! DECL_THREAD_LOCAL_P (arg
)
2851 && ! DECL_DLLIMPORT_P (arg
)
2855 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2859 return TREE_STATIC (arg
) ? arg
: NULL
;
2866 /* If the thing being referenced is not a field, then it is
2867 something language specific. */
2868 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
2870 /* If we are referencing a bitfield, we can't evaluate an
2871 ADDR_EXPR at compile time and so it isn't a constant. */
2872 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
2875 return staticp (TREE_OPERAND (arg
, 0));
2881 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
2884 case ARRAY_RANGE_REF
:
2885 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
2886 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
2887 return staticp (TREE_OPERAND (arg
, 0));
2891 case COMPOUND_LITERAL_EXPR
:
2892 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
2902 /* Return whether OP is a DECL whose address is function-invariant. */
2905 decl_address_invariant_p (const_tree op
)
2907 /* The conditions below are slightly less strict than the one in
2910 switch (TREE_CODE (op
))
2919 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
2920 || DECL_THREAD_LOCAL_P (op
)
2921 || DECL_CONTEXT (op
) == current_function_decl
2922 || decl_function_context (op
) == current_function_decl
)
2927 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
2928 || decl_function_context (op
) == current_function_decl
)
2939 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
2942 decl_address_ip_invariant_p (const_tree op
)
2944 /* The conditions below are slightly less strict than the one in
2947 switch (TREE_CODE (op
))
2955 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
2956 && !DECL_DLLIMPORT_P (op
))
2957 || DECL_THREAD_LOCAL_P (op
))
2962 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
2974 /* Return true if T is function-invariant (internal function, does
2975 not handle arithmetic; that's handled in skip_simple_arithmetic and
2976 tree_invariant_p). */
2978 static bool tree_invariant_p (tree t
);
2981 tree_invariant_p_1 (tree t
)
2985 if (TREE_CONSTANT (t
)
2986 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
2989 switch (TREE_CODE (t
))
2995 op
= TREE_OPERAND (t
, 0);
2996 while (handled_component_p (op
))
2998 switch (TREE_CODE (op
))
3001 case ARRAY_RANGE_REF
:
3002 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3003 || TREE_OPERAND (op
, 2) != NULL_TREE
3004 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3009 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3015 op
= TREE_OPERAND (op
, 0);
3018 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3027 /* Return true if T is function-invariant. */
3030 tree_invariant_p (tree t
)
3032 tree inner
= skip_simple_arithmetic (t
);
3033 return tree_invariant_p_1 (inner
);
3036 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3037 Do this to any expression which may be used in more than one place,
3038 but must be evaluated only once.
3040 Normally, expand_expr would reevaluate the expression each time.
3041 Calling save_expr produces something that is evaluated and recorded
3042 the first time expand_expr is called on it. Subsequent calls to
3043 expand_expr just reuse the recorded value.
3045 The call to expand_expr that generates code that actually computes
3046 the value is the first call *at compile time*. Subsequent calls
3047 *at compile time* generate code to use the saved value.
3048 This produces correct result provided that *at run time* control
3049 always flows through the insns made by the first expand_expr
3050 before reaching the other places where the save_expr was evaluated.
3051 You, the caller of save_expr, must make sure this is so.
3053 Constants, and certain read-only nodes, are returned with no
3054 SAVE_EXPR because that is safe. Expressions containing placeholders
3055 are not touched; see tree.def for an explanation of what these
3059 save_expr (tree expr
)
3061 tree t
= fold (expr
);
3064 /* If the tree evaluates to a constant, then we don't want to hide that
3065 fact (i.e. this allows further folding, and direct checks for constants).
3066 However, a read-only object that has side effects cannot be bypassed.
3067 Since it is no problem to reevaluate literals, we just return the
3069 inner
= skip_simple_arithmetic (t
);
3070 if (TREE_CODE (inner
) == ERROR_MARK
)
3073 if (tree_invariant_p_1 (inner
))
3076 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3077 it means that the size or offset of some field of an object depends on
3078 the value within another field.
3080 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3081 and some variable since it would then need to be both evaluated once and
3082 evaluated more than once. Front-ends must assure this case cannot
3083 happen by surrounding any such subexpressions in their own SAVE_EXPR
3084 and forcing evaluation at the proper time. */
3085 if (contains_placeholder_p (inner
))
3088 t
= build1 (SAVE_EXPR
, TREE_TYPE (expr
), t
);
3089 SET_EXPR_LOCATION (t
, EXPR_LOCATION (expr
));
3091 /* This expression might be placed ahead of a jump to ensure that the
3092 value was computed on both sides of the jump. So make sure it isn't
3093 eliminated as dead. */
3094 TREE_SIDE_EFFECTS (t
) = 1;
3098 /* Look inside EXPR into any simple arithmetic operations. Return the
3099 outermost non-arithmetic or non-invariant node. */
3102 skip_simple_arithmetic (tree expr
)
3104 /* We don't care about whether this can be used as an lvalue in this
3106 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3107 expr
= TREE_OPERAND (expr
, 0);
3109 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3110 a constant, it will be more efficient to not make another SAVE_EXPR since
3111 it will allow better simplification and GCSE will be able to merge the
3112 computations if they actually occur. */
3115 if (UNARY_CLASS_P (expr
))
3116 expr
= TREE_OPERAND (expr
, 0);
3117 else if (BINARY_CLASS_P (expr
))
3119 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3120 expr
= TREE_OPERAND (expr
, 0);
3121 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3122 expr
= TREE_OPERAND (expr
, 1);
3133 /* Look inside EXPR into simple arithmetic operations involving constants.
3134 Return the outermost non-arithmetic or non-constant node. */
3137 skip_simple_constant_arithmetic (tree expr
)
3139 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3140 expr
= TREE_OPERAND (expr
, 0);
3144 if (UNARY_CLASS_P (expr
))
3145 expr
= TREE_OPERAND (expr
, 0);
3146 else if (BINARY_CLASS_P (expr
))
3148 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3149 expr
= TREE_OPERAND (expr
, 0);
3150 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3151 expr
= TREE_OPERAND (expr
, 1);
3162 /* Return which tree structure is used by T. */
3164 enum tree_node_structure_enum
3165 tree_node_structure (const_tree t
)
3167 const enum tree_code code
= TREE_CODE (t
);
3168 return tree_node_structure_for_code (code
);
3171 /* Set various status flags when building a CALL_EXPR object T. */
3174 process_call_operands (tree t
)
3176 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3177 bool read_only
= false;
3178 int i
= call_expr_flags (t
);
3180 /* Calls have side-effects, except those to const or pure functions. */
3181 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3182 side_effects
= true;
3183 /* Propagate TREE_READONLY of arguments for const functions. */
3187 if (!side_effects
|| read_only
)
3188 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3190 tree op
= TREE_OPERAND (t
, i
);
3191 if (op
&& TREE_SIDE_EFFECTS (op
))
3192 side_effects
= true;
3193 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3197 TREE_SIDE_EFFECTS (t
) = side_effects
;
3198 TREE_READONLY (t
) = read_only
;
3201 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3202 size or offset that depends on a field within a record. */
3205 contains_placeholder_p (const_tree exp
)
3207 enum tree_code code
;
3212 code
= TREE_CODE (exp
);
3213 if (code
== PLACEHOLDER_EXPR
)
3216 switch (TREE_CODE_CLASS (code
))
3219 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3220 position computations since they will be converted into a
3221 WITH_RECORD_EXPR involving the reference, which will assume
3222 here will be valid. */
3223 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3225 case tcc_exceptional
:
3226 if (code
== TREE_LIST
)
3227 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3228 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3233 case tcc_comparison
:
3234 case tcc_expression
:
3238 /* Ignoring the first operand isn't quite right, but works best. */
3239 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3242 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3243 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3244 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3247 /* The save_expr function never wraps anything containing
3248 a PLACEHOLDER_EXPR. */
3255 switch (TREE_CODE_LENGTH (code
))
3258 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3260 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3261 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3272 const_call_expr_arg_iterator iter
;
3273 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3274 if (CONTAINS_PLACEHOLDER_P (arg
))
3288 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3289 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3293 type_contains_placeholder_1 (const_tree type
)
3295 /* If the size contains a placeholder or the parent type (component type in
3296 the case of arrays) type involves a placeholder, this type does. */
3297 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3298 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3299 || (!POINTER_TYPE_P (type
)
3301 && type_contains_placeholder_p (TREE_TYPE (type
))))
3304 /* Now do type-specific checks. Note that the last part of the check above
3305 greatly limits what we have to do below. */
3306 switch (TREE_CODE (type
))
3314 case REFERENCE_TYPE
:
3323 case FIXED_POINT_TYPE
:
3324 /* Here we just check the bounds. */
3325 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3326 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3329 /* We have already checked the component type above, so just check the
3331 return type_contains_placeholder_p (TYPE_DOMAIN (type
));
3335 case QUAL_UNION_TYPE
:
3339 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3340 if (TREE_CODE (field
) == FIELD_DECL
3341 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3342 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3343 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3344 || type_contains_placeholder_p (TREE_TYPE (field
))))
3355 /* Wrapper around above function used to cache its result. */
3358 type_contains_placeholder_p (tree type
)
3362 /* If the contains_placeholder_bits field has been initialized,
3363 then we know the answer. */
3364 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3365 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3367 /* Indicate that we've seen this type node, and the answer is false.
3368 This is what we want to return if we run into recursion via fields. */
3369 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3371 /* Compute the real value. */
3372 result
= type_contains_placeholder_1 (type
);
3374 /* Store the real value. */
3375 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3380 /* Push tree EXP onto vector QUEUE if it is not already present. */
3383 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3388 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3389 if (simple_cst_equal (iter
, exp
) == 1)
3393 queue
->safe_push (exp
);
3396 /* Given a tree EXP, find all occurrences of references to fields
3397 in a PLACEHOLDER_EXPR and place them in vector REFS without
3398 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3399 we assume here that EXP contains only arithmetic expressions
3400 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3404 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3406 enum tree_code code
= TREE_CODE (exp
);
3410 /* We handle TREE_LIST and COMPONENT_REF separately. */
3411 if (code
== TREE_LIST
)
3413 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3414 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3416 else if (code
== COMPONENT_REF
)
3418 for (inner
= TREE_OPERAND (exp
, 0);
3419 REFERENCE_CLASS_P (inner
);
3420 inner
= TREE_OPERAND (inner
, 0))
3423 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3424 push_without_duplicates (exp
, refs
);
3426 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3429 switch (TREE_CODE_CLASS (code
))
3434 case tcc_declaration
:
3435 /* Variables allocated to static storage can stay. */
3436 if (!TREE_STATIC (exp
))
3437 push_without_duplicates (exp
, refs
);
3440 case tcc_expression
:
3441 /* This is the pattern built in ada/make_aligning_type. */
3442 if (code
== ADDR_EXPR
3443 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
3445 push_without_duplicates (exp
, refs
);
3449 /* Fall through... */
3451 case tcc_exceptional
:
3454 case tcc_comparison
:
3456 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
3457 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3461 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3462 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3470 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3471 return a tree with all occurrences of references to F in a
3472 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3473 CONST_DECLs. Note that we assume here that EXP contains only
3474 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3475 occurring only in their argument list. */
3478 substitute_in_expr (tree exp
, tree f
, tree r
)
3480 enum tree_code code
= TREE_CODE (exp
);
3481 tree op0
, op1
, op2
, op3
;
3484 /* We handle TREE_LIST and COMPONENT_REF separately. */
3485 if (code
== TREE_LIST
)
3487 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
3488 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
3489 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3492 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3494 else if (code
== COMPONENT_REF
)
3498 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3499 and it is the right field, replace it with R. */
3500 for (inner
= TREE_OPERAND (exp
, 0);
3501 REFERENCE_CLASS_P (inner
);
3502 inner
= TREE_OPERAND (inner
, 0))
3506 op1
= TREE_OPERAND (exp
, 1);
3508 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
3511 /* If this expression hasn't been completed let, leave it alone. */
3512 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
3515 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3516 if (op0
== TREE_OPERAND (exp
, 0))
3520 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
3523 switch (TREE_CODE_CLASS (code
))
3528 case tcc_declaration
:
3534 case tcc_expression
:
3538 /* Fall through... */
3540 case tcc_exceptional
:
3543 case tcc_comparison
:
3545 switch (TREE_CODE_LENGTH (code
))
3551 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3552 if (op0
== TREE_OPERAND (exp
, 0))
3555 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3559 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3560 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3562 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3565 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3569 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3570 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3571 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3573 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3574 && op2
== TREE_OPERAND (exp
, 2))
3577 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3581 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3582 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3583 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3584 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
3586 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3587 && op2
== TREE_OPERAND (exp
, 2)
3588 && op3
== TREE_OPERAND (exp
, 3))
3592 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3604 new_tree
= NULL_TREE
;
3606 /* If we are trying to replace F with a constant, inline back
3607 functions which do nothing else than computing a value from
3608 the arguments they are passed. This makes it possible to
3609 fold partially or entirely the replacement expression. */
3610 if (CONSTANT_CLASS_P (r
) && code
== CALL_EXPR
)
3612 tree t
= maybe_inline_call_in_expr (exp
);
3614 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
3617 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3619 tree op
= TREE_OPERAND (exp
, i
);
3620 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
3624 new_tree
= copy_node (exp
);
3625 TREE_OPERAND (new_tree
, i
) = new_op
;
3631 new_tree
= fold (new_tree
);
3632 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3633 process_call_operands (new_tree
);
3644 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3646 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3647 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3652 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3653 for it within OBJ, a tree that is an object or a chain of references. */
3656 substitute_placeholder_in_expr (tree exp
, tree obj
)
3658 enum tree_code code
= TREE_CODE (exp
);
3659 tree op0
, op1
, op2
, op3
;
3662 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3663 in the chain of OBJ. */
3664 if (code
== PLACEHOLDER_EXPR
)
3666 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
3669 for (elt
= obj
; elt
!= 0;
3670 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3671 || TREE_CODE (elt
) == COND_EXPR
)
3672 ? TREE_OPERAND (elt
, 1)
3673 : (REFERENCE_CLASS_P (elt
)
3674 || UNARY_CLASS_P (elt
)
3675 || BINARY_CLASS_P (elt
)
3676 || VL_EXP_CLASS_P (elt
)
3677 || EXPRESSION_CLASS_P (elt
))
3678 ? TREE_OPERAND (elt
, 0) : 0))
3679 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
3682 for (elt
= obj
; elt
!= 0;
3683 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3684 || TREE_CODE (elt
) == COND_EXPR
)
3685 ? TREE_OPERAND (elt
, 1)
3686 : (REFERENCE_CLASS_P (elt
)
3687 || UNARY_CLASS_P (elt
)
3688 || BINARY_CLASS_P (elt
)
3689 || VL_EXP_CLASS_P (elt
)
3690 || EXPRESSION_CLASS_P (elt
))
3691 ? TREE_OPERAND (elt
, 0) : 0))
3692 if (POINTER_TYPE_P (TREE_TYPE (elt
))
3693 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
3695 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
3697 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3698 survives until RTL generation, there will be an error. */
3702 /* TREE_LIST is special because we need to look at TREE_VALUE
3703 and TREE_CHAIN, not TREE_OPERANDS. */
3704 else if (code
== TREE_LIST
)
3706 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
3707 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
3708 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3711 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3714 switch (TREE_CODE_CLASS (code
))
3717 case tcc_declaration
:
3720 case tcc_exceptional
:
3723 case tcc_comparison
:
3724 case tcc_expression
:
3727 switch (TREE_CODE_LENGTH (code
))
3733 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3734 if (op0
== TREE_OPERAND (exp
, 0))
3737 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3741 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3742 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3744 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3747 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3751 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3752 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3753 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3755 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3756 && op2
== TREE_OPERAND (exp
, 2))
3759 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3763 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3764 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3765 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3766 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
3768 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3769 && op2
== TREE_OPERAND (exp
, 2)
3770 && op3
== TREE_OPERAND (exp
, 3))
3774 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3786 new_tree
= NULL_TREE
;
3788 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3790 tree op
= TREE_OPERAND (exp
, i
);
3791 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
3795 new_tree
= copy_node (exp
);
3796 TREE_OPERAND (new_tree
, i
) = new_op
;
3802 new_tree
= fold (new_tree
);
3803 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3804 process_call_operands (new_tree
);
3815 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3817 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3818 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3824 /* Subroutine of stabilize_reference; this is called for subtrees of
3825 references. Any expression with side-effects must be put in a SAVE_EXPR
3826 to ensure that it is only evaluated once.
3828 We don't put SAVE_EXPR nodes around everything, because assigning very
3829 simple expressions to temporaries causes us to miss good opportunities
3830 for optimizations. Among other things, the opportunity to fold in the
3831 addition of a constant into an addressing mode often gets lost, e.g.
3832 "y[i+1] += x;". In general, we take the approach that we should not make
3833 an assignment unless we are forced into it - i.e., that any non-side effect
3834 operator should be allowed, and that cse should take care of coalescing
3835 multiple utterances of the same expression should that prove fruitful. */
3838 stabilize_reference_1 (tree e
)
3841 enum tree_code code
= TREE_CODE (e
);
3843 /* We cannot ignore const expressions because it might be a reference
3844 to a const array but whose index contains side-effects. But we can
3845 ignore things that are actual constant or that already have been
3846 handled by this function. */
3848 if (tree_invariant_p (e
))
3851 switch (TREE_CODE_CLASS (code
))
3853 case tcc_exceptional
:
3855 case tcc_declaration
:
3856 case tcc_comparison
:
3858 case tcc_expression
:
3861 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3862 so that it will only be evaluated once. */
3863 /* The reference (r) and comparison (<) classes could be handled as
3864 below, but it is generally faster to only evaluate them once. */
3865 if (TREE_SIDE_EFFECTS (e
))
3866 return save_expr (e
);
3870 /* Constants need no processing. In fact, we should never reach
3875 /* Division is slow and tends to be compiled with jumps,
3876 especially the division by powers of 2 that is often
3877 found inside of an array reference. So do it just once. */
3878 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
3879 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
3880 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
3881 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
3882 return save_expr (e
);
3883 /* Recursively stabilize each operand. */
3884 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
3885 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
3889 /* Recursively stabilize each operand. */
3890 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
3897 TREE_TYPE (result
) = TREE_TYPE (e
);
3898 TREE_READONLY (result
) = TREE_READONLY (e
);
3899 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
3900 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
3905 /* Stabilize a reference so that we can use it any number of times
3906 without causing its operands to be evaluated more than once.
3907 Returns the stabilized reference. This works by means of save_expr,
3908 so see the caveats in the comments about save_expr.
3910 Also allows conversion expressions whose operands are references.
3911 Any other kind of expression is returned unchanged. */
3914 stabilize_reference (tree ref
)
3917 enum tree_code code
= TREE_CODE (ref
);
3924 /* No action is needed in this case. */
3929 case FIX_TRUNC_EXPR
:
3930 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
3934 result
= build_nt (INDIRECT_REF
,
3935 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
3939 result
= build_nt (COMPONENT_REF
,
3940 stabilize_reference (TREE_OPERAND (ref
, 0)),
3941 TREE_OPERAND (ref
, 1), NULL_TREE
);
3945 result
= build_nt (BIT_FIELD_REF
,
3946 stabilize_reference (TREE_OPERAND (ref
, 0)),
3947 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
3951 result
= build_nt (ARRAY_REF
,
3952 stabilize_reference (TREE_OPERAND (ref
, 0)),
3953 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
3954 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
3957 case ARRAY_RANGE_REF
:
3958 result
= build_nt (ARRAY_RANGE_REF
,
3959 stabilize_reference (TREE_OPERAND (ref
, 0)),
3960 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
3961 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
3965 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3966 it wouldn't be ignored. This matters when dealing with
3968 return stabilize_reference_1 (ref
);
3970 /* If arg isn't a kind of lvalue we recognize, make no change.
3971 Caller should recognize the error for an invalid lvalue. */
3976 return error_mark_node
;
3979 TREE_TYPE (result
) = TREE_TYPE (ref
);
3980 TREE_READONLY (result
) = TREE_READONLY (ref
);
3981 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
3982 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
3987 /* Low-level constructors for expressions. */
3989 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
3990 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
3993 recompute_tree_invariant_for_addr_expr (tree t
)
3996 bool tc
= true, se
= false;
3998 /* We started out assuming this address is both invariant and constant, but
3999 does not have side effects. Now go down any handled components and see if
4000 any of them involve offsets that are either non-constant or non-invariant.
4001 Also check for side-effects.
4003 ??? Note that this code makes no attempt to deal with the case where
4004 taking the address of something causes a copy due to misalignment. */
4006 #define UPDATE_FLAGS(NODE) \
4007 do { tree _node = (NODE); \
4008 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4009 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4011 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4012 node
= TREE_OPERAND (node
, 0))
4014 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4015 array reference (probably made temporarily by the G++ front end),
4016 so ignore all the operands. */
4017 if ((TREE_CODE (node
) == ARRAY_REF
4018 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4019 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4021 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4022 if (TREE_OPERAND (node
, 2))
4023 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4024 if (TREE_OPERAND (node
, 3))
4025 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4027 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4028 FIELD_DECL, apparently. The G++ front end can put something else
4029 there, at least temporarily. */
4030 else if (TREE_CODE (node
) == COMPONENT_REF
4031 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4033 if (TREE_OPERAND (node
, 2))
4034 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4038 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4040 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4041 the address, since &(*a)->b is a form of addition. If it's a constant, the
4042 address is constant too. If it's a decl, its address is constant if the
4043 decl is static. Everything else is not constant and, furthermore,
4044 taking the address of a volatile variable is not volatile. */
4045 if (TREE_CODE (node
) == INDIRECT_REF
4046 || TREE_CODE (node
) == MEM_REF
)
4047 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4048 else if (CONSTANT_CLASS_P (node
))
4050 else if (DECL_P (node
))
4051 tc
&= (staticp (node
) != NULL_TREE
);
4055 se
|= TREE_SIDE_EFFECTS (node
);
4059 TREE_CONSTANT (t
) = tc
;
4060 TREE_SIDE_EFFECTS (t
) = se
;
4064 /* Build an expression of code CODE, data type TYPE, and operands as
4065 specified. Expressions and reference nodes can be created this way.
4066 Constants, decls, types and misc nodes cannot be.
4068 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4069 enough for all extant tree codes. */
4072 build0_stat (enum tree_code code
, tree tt MEM_STAT_DECL
)
4076 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4078 t
= make_node_stat (code PASS_MEM_STAT
);
4085 build1_stat (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4087 int length
= sizeof (struct tree_exp
);
4090 record_node_allocation_statistics (code
, length
);
4092 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4094 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4096 memset (t
, 0, sizeof (struct tree_common
));
4098 TREE_SET_CODE (t
, code
);
4100 TREE_TYPE (t
) = type
;
4101 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4102 TREE_OPERAND (t
, 0) = node
;
4103 if (node
&& !TYPE_P (node
))
4105 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4106 TREE_READONLY (t
) = TREE_READONLY (node
);
4109 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4110 TREE_SIDE_EFFECTS (t
) = 1;
4114 /* All of these have side-effects, no matter what their
4116 TREE_SIDE_EFFECTS (t
) = 1;
4117 TREE_READONLY (t
) = 0;
4121 /* Whether a dereference is readonly has nothing to do with whether
4122 its operand is readonly. */
4123 TREE_READONLY (t
) = 0;
4128 recompute_tree_invariant_for_addr_expr (t
);
4132 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4133 && node
&& !TYPE_P (node
)
4134 && TREE_CONSTANT (node
))
4135 TREE_CONSTANT (t
) = 1;
4136 if (TREE_CODE_CLASS (code
) == tcc_reference
4137 && node
&& TREE_THIS_VOLATILE (node
))
4138 TREE_THIS_VOLATILE (t
) = 1;
4145 #define PROCESS_ARG(N) \
4147 TREE_OPERAND (t, N) = arg##N; \
4148 if (arg##N &&!TYPE_P (arg##N)) \
4150 if (TREE_SIDE_EFFECTS (arg##N)) \
4152 if (!TREE_READONLY (arg##N) \
4153 && !CONSTANT_CLASS_P (arg##N)) \
4154 (void) (read_only = 0); \
4155 if (!TREE_CONSTANT (arg##N)) \
4156 (void) (constant = 0); \
4161 build2_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4163 bool constant
, read_only
, side_effects
;
4166 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4168 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4169 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4170 /* When sizetype precision doesn't match that of pointers
4171 we need to be able to build explicit extensions or truncations
4172 of the offset argument. */
4173 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4174 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4175 && TREE_CODE (arg1
) == INTEGER_CST
);
4177 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4178 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4179 && ptrofftype_p (TREE_TYPE (arg1
)));
4181 t
= make_node_stat (code PASS_MEM_STAT
);
4184 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4185 result based on those same flags for the arguments. But if the
4186 arguments aren't really even `tree' expressions, we shouldn't be trying
4189 /* Expressions without side effects may be constant if their
4190 arguments are as well. */
4191 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4192 || TREE_CODE_CLASS (code
) == tcc_binary
);
4194 side_effects
= TREE_SIDE_EFFECTS (t
);
4199 TREE_READONLY (t
) = read_only
;
4200 TREE_CONSTANT (t
) = constant
;
4201 TREE_SIDE_EFFECTS (t
) = side_effects
;
4202 TREE_THIS_VOLATILE (t
)
4203 = (TREE_CODE_CLASS (code
) == tcc_reference
4204 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4211 build3_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4212 tree arg2 MEM_STAT_DECL
)
4214 bool constant
, read_only
, side_effects
;
4217 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4218 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4220 t
= make_node_stat (code PASS_MEM_STAT
);
4225 /* As a special exception, if COND_EXPR has NULL branches, we
4226 assume that it is a gimple statement and always consider
4227 it to have side effects. */
4228 if (code
== COND_EXPR
4229 && tt
== void_type_node
4230 && arg1
== NULL_TREE
4231 && arg2
== NULL_TREE
)
4232 side_effects
= true;
4234 side_effects
= TREE_SIDE_EFFECTS (t
);
4240 if (code
== COND_EXPR
)
4241 TREE_READONLY (t
) = read_only
;
4243 TREE_SIDE_EFFECTS (t
) = side_effects
;
4244 TREE_THIS_VOLATILE (t
)
4245 = (TREE_CODE_CLASS (code
) == tcc_reference
4246 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4252 build4_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4253 tree arg2
, tree arg3 MEM_STAT_DECL
)
4255 bool constant
, read_only
, side_effects
;
4258 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4260 t
= make_node_stat (code PASS_MEM_STAT
);
4263 side_effects
= TREE_SIDE_EFFECTS (t
);
4270 TREE_SIDE_EFFECTS (t
) = side_effects
;
4271 TREE_THIS_VOLATILE (t
)
4272 = (TREE_CODE_CLASS (code
) == tcc_reference
4273 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4279 build5_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4280 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4282 bool constant
, read_only
, side_effects
;
4285 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4287 t
= make_node_stat (code PASS_MEM_STAT
);
4290 side_effects
= TREE_SIDE_EFFECTS (t
);
4298 TREE_SIDE_EFFECTS (t
) = side_effects
;
4299 TREE_THIS_VOLATILE (t
)
4300 = (TREE_CODE_CLASS (code
) == tcc_reference
4301 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4306 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4307 on the pointer PTR. */
4310 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4312 HOST_WIDE_INT offset
= 0;
4313 tree ptype
= TREE_TYPE (ptr
);
4315 /* For convenience allow addresses that collapse to a simple base
4317 if (TREE_CODE (ptr
) == ADDR_EXPR
4318 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4319 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4321 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4323 ptr
= build_fold_addr_expr (ptr
);
4324 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4326 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4327 ptr
, build_int_cst (ptype
, offset
));
4328 SET_EXPR_LOCATION (tem
, loc
);
4332 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4335 mem_ref_offset (const_tree t
)
4337 return offset_int::from (TREE_OPERAND (t
, 1), SIGNED
);
4340 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4341 offsetted by OFFSET units. */
4344 build_invariant_address (tree type
, tree base
, HOST_WIDE_INT offset
)
4346 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4347 build_fold_addr_expr (base
),
4348 build_int_cst (ptr_type_node
, offset
));
4349 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4350 recompute_tree_invariant_for_addr_expr (addr
);
4354 /* Similar except don't specify the TREE_TYPE
4355 and leave the TREE_SIDE_EFFECTS as 0.
4356 It is permissible for arguments to be null,
4357 or even garbage if their values do not matter. */
4360 build_nt (enum tree_code code
, ...)
4367 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4371 t
= make_node (code
);
4372 length
= TREE_CODE_LENGTH (code
);
4374 for (i
= 0; i
< length
; i
++)
4375 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
4381 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4385 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
4390 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
4391 CALL_EXPR_FN (ret
) = fn
;
4392 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
4393 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
4394 CALL_EXPR_ARG (ret
, ix
) = t
;
4398 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4399 We do NOT enter this node in any sort of symbol table.
4401 LOC is the location of the decl.
4403 layout_decl is used to set up the decl's storage layout.
4404 Other slots are initialized to 0 or null pointers. */
4407 build_decl_stat (location_t loc
, enum tree_code code
, tree name
,
4408 tree type MEM_STAT_DECL
)
4412 t
= make_node_stat (code PASS_MEM_STAT
);
4413 DECL_SOURCE_LOCATION (t
) = loc
;
4415 /* if (type == error_mark_node)
4416 type = integer_type_node; */
4417 /* That is not done, deliberately, so that having error_mark_node
4418 as the type can suppress useless errors in the use of this variable. */
4420 DECL_NAME (t
) = name
;
4421 TREE_TYPE (t
) = type
;
4423 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
4429 /* Builds and returns function declaration with NAME and TYPE. */
4432 build_fn_decl (const char *name
, tree type
)
4434 tree id
= get_identifier (name
);
4435 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
4437 DECL_EXTERNAL (decl
) = 1;
4438 TREE_PUBLIC (decl
) = 1;
4439 DECL_ARTIFICIAL (decl
) = 1;
4440 TREE_NOTHROW (decl
) = 1;
4445 vec
<tree
, va_gc
> *all_translation_units
;
4447 /* Builds a new translation-unit decl with name NAME, queues it in the
4448 global list of translation-unit decls and returns it. */
4451 build_translation_unit_decl (tree name
)
4453 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
4455 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
4456 vec_safe_push (all_translation_units
, tu
);
4461 /* BLOCK nodes are used to represent the structure of binding contours
4462 and declarations, once those contours have been exited and their contents
4463 compiled. This information is used for outputting debugging info. */
4466 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
4468 tree block
= make_node (BLOCK
);
4470 BLOCK_VARS (block
) = vars
;
4471 BLOCK_SUBBLOCKS (block
) = subblocks
;
4472 BLOCK_SUPERCONTEXT (block
) = supercontext
;
4473 BLOCK_CHAIN (block
) = chain
;
4478 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4480 LOC is the location to use in tree T. */
4483 protected_set_expr_location (tree t
, location_t loc
)
4485 if (t
&& CAN_HAVE_LOCATION_P (t
))
4486 SET_EXPR_LOCATION (t
, loc
);
4489 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4493 build_decl_attribute_variant (tree ddecl
, tree attribute
)
4495 DECL_ATTRIBUTES (ddecl
) = attribute
;
4499 /* Borrowed from hashtab.c iterative_hash implementation. */
4500 #define mix(a,b,c) \
4502 a -= b; a -= c; a ^= (c>>13); \
4503 b -= c; b -= a; b ^= (a<< 8); \
4504 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4505 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4506 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4507 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4508 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4509 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4510 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4514 /* Produce good hash value combining VAL and VAL2. */
4516 iterative_hash_hashval_t (hashval_t val
, hashval_t val2
)
4518 /* the golden ratio; an arbitrary value. */
4519 hashval_t a
= 0x9e3779b9;
4525 /* Produce good hash value combining VAL and VAL2. */
4527 iterative_hash_host_wide_int (HOST_WIDE_INT val
, hashval_t val2
)
4529 if (sizeof (HOST_WIDE_INT
) == sizeof (hashval_t
))
4530 return iterative_hash_hashval_t (val
, val2
);
4533 hashval_t a
= (hashval_t
) val
;
4534 /* Avoid warnings about shifting of more than the width of the type on
4535 hosts that won't execute this path. */
4537 hashval_t b
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 8 + zero
));
4539 if (sizeof (HOST_WIDE_INT
) > 2 * sizeof (hashval_t
))
4541 hashval_t a
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 16 + zero
));
4542 hashval_t b
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 24 + zero
));
4549 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4550 is ATTRIBUTE and its qualifiers are QUALS.
4552 Record such modified types already made so we don't make duplicates. */
4555 build_type_attribute_qual_variant (tree ttype
, tree attribute
, int quals
)
4557 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype
), attribute
))
4559 hashval_t hashcode
= 0;
4563 enum tree_code code
= TREE_CODE (ttype
);
4565 /* Building a distinct copy of a tagged type is inappropriate; it
4566 causes breakage in code that expects there to be a one-to-one
4567 relationship between a struct and its fields.
4568 build_duplicate_type is another solution (as used in
4569 handle_transparent_union_attribute), but that doesn't play well
4570 with the stronger C++ type identity model. */
4571 if (TREE_CODE (ttype
) == RECORD_TYPE
4572 || TREE_CODE (ttype
) == UNION_TYPE
4573 || TREE_CODE (ttype
) == QUAL_UNION_TYPE
4574 || TREE_CODE (ttype
) == ENUMERAL_TYPE
)
4576 warning (OPT_Wattributes
,
4577 "ignoring attributes applied to %qT after definition",
4578 TYPE_MAIN_VARIANT (ttype
));
4579 return build_qualified_type (ttype
, quals
);
4582 ttype
= build_qualified_type (ttype
, TYPE_UNQUALIFIED
);
4583 ntype
= build_distinct_type_copy (ttype
);
4585 TYPE_ATTRIBUTES (ntype
) = attribute
;
4587 hashcode
= iterative_hash_object (code
, hashcode
);
4588 if (TREE_TYPE (ntype
))
4589 hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype
)),
4591 hashcode
= attribute_hash_list (attribute
, hashcode
);
4593 switch (TREE_CODE (ntype
))
4596 hashcode
= type_hash_list (TYPE_ARG_TYPES (ntype
), hashcode
);
4599 if (TYPE_DOMAIN (ntype
))
4600 hashcode
= iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype
)),
4604 t
= TYPE_MAX_VALUE (ntype
);
4605 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
4606 hashcode
= iterative_hash_object (TREE_INT_CST_ELT (t
, i
), hashcode
);
4609 case FIXED_POINT_TYPE
:
4611 unsigned int precision
= TYPE_PRECISION (ntype
);
4612 hashcode
= iterative_hash_object (precision
, hashcode
);
4619 ntype
= type_hash_canon (hashcode
, ntype
);
4621 /* If the target-dependent attributes make NTYPE different from
4622 its canonical type, we will need to use structural equality
4623 checks for this type. */
4624 if (TYPE_STRUCTURAL_EQUALITY_P (ttype
)
4625 || !comp_type_attributes (ntype
, ttype
))
4626 SET_TYPE_STRUCTURAL_EQUALITY (ntype
);
4627 else if (TYPE_CANONICAL (ntype
) == ntype
)
4628 TYPE_CANONICAL (ntype
) = TYPE_CANONICAL (ttype
);
4630 ttype
= build_qualified_type (ntype
, quals
);
4632 else if (TYPE_QUALS (ttype
) != quals
)
4633 ttype
= build_qualified_type (ttype
, quals
);
4638 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4642 omp_declare_simd_clauses_equal (tree clauses1
, tree clauses2
)
4645 for (cl1
= clauses1
, cl2
= clauses2
;
4647 cl1
= OMP_CLAUSE_CHAIN (cl1
), cl2
= OMP_CLAUSE_CHAIN (cl2
))
4649 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_CODE (cl2
))
4651 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_SIMDLEN
)
4653 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1
),
4654 OMP_CLAUSE_DECL (cl2
)) != 1)
4657 switch (OMP_CLAUSE_CODE (cl1
))
4659 case OMP_CLAUSE_ALIGNED
:
4660 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1
),
4661 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2
)) != 1)
4664 case OMP_CLAUSE_LINEAR
:
4665 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1
),
4666 OMP_CLAUSE_LINEAR_STEP (cl2
)) != 1)
4669 case OMP_CLAUSE_SIMDLEN
:
4670 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1
),
4671 OMP_CLAUSE_SIMDLEN_EXPR (cl2
)) != 1)
4680 /* Compare two constructor-element-type constants. Return 1 if the lists
4681 are known to be equal; otherwise return 0. */
4684 simple_cst_list_equal (const_tree l1
, const_tree l2
)
4686 while (l1
!= NULL_TREE
&& l2
!= NULL_TREE
)
4688 if (simple_cst_equal (TREE_VALUE (l1
), TREE_VALUE (l2
)) != 1)
4691 l1
= TREE_CHAIN (l1
);
4692 l2
= TREE_CHAIN (l2
);
4698 /* Compare two attributes for their value identity. Return true if the
4699 attribute values are known to be equal; otherwise return false.
4703 attribute_value_equal (const_tree attr1
, const_tree attr2
)
4705 if (TREE_VALUE (attr1
) == TREE_VALUE (attr2
))
4708 if (TREE_VALUE (attr1
) != NULL_TREE
4709 && TREE_CODE (TREE_VALUE (attr1
)) == TREE_LIST
4710 && TREE_VALUE (attr2
) != NULL
4711 && TREE_CODE (TREE_VALUE (attr2
)) == TREE_LIST
)
4712 return (simple_cst_list_equal (TREE_VALUE (attr1
),
4713 TREE_VALUE (attr2
)) == 1);
4715 if ((flag_openmp
|| flag_openmp_simd
)
4716 && TREE_VALUE (attr1
) && TREE_VALUE (attr2
)
4717 && TREE_CODE (TREE_VALUE (attr1
)) == OMP_CLAUSE
4718 && TREE_CODE (TREE_VALUE (attr2
)) == OMP_CLAUSE
)
4719 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1
),
4720 TREE_VALUE (attr2
));
4722 return (simple_cst_equal (TREE_VALUE (attr1
), TREE_VALUE (attr2
)) == 1);
4725 /* Return 0 if the attributes for two types are incompatible, 1 if they
4726 are compatible, and 2 if they are nearly compatible (which causes a
4727 warning to be generated). */
4729 comp_type_attributes (const_tree type1
, const_tree type2
)
4731 const_tree a1
= TYPE_ATTRIBUTES (type1
);
4732 const_tree a2
= TYPE_ATTRIBUTES (type2
);
4737 for (a
= a1
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4739 const struct attribute_spec
*as
;
4742 as
= lookup_attribute_spec (get_attribute_name (a
));
4743 if (!as
|| as
->affects_type_identity
== false)
4746 attr
= lookup_attribute (as
->name
, CONST_CAST_TREE (a2
));
4747 if (!attr
|| !attribute_value_equal (a
, attr
))
4752 for (a
= a2
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4754 const struct attribute_spec
*as
;
4756 as
= lookup_attribute_spec (get_attribute_name (a
));
4757 if (!as
|| as
->affects_type_identity
== false)
4760 if (!lookup_attribute (as
->name
, CONST_CAST_TREE (a1
)))
4762 /* We don't need to compare trees again, as we did this
4763 already in first loop. */
4765 /* All types - affecting identity - are equal, so
4766 there is no need to call target hook for comparison. */
4770 /* As some type combinations - like default calling-convention - might
4771 be compatible, we have to call the target hook to get the final result. */
4772 return targetm
.comp_type_attributes (type1
, type2
);
4775 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4778 Record such modified types already made so we don't make duplicates. */
4781 build_type_attribute_variant (tree ttype
, tree attribute
)
4783 return build_type_attribute_qual_variant (ttype
, attribute
,
4784 TYPE_QUALS (ttype
));
4788 /* Reset the expression *EXPR_P, a size or position.
4790 ??? We could reset all non-constant sizes or positions. But it's cheap
4791 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4793 We need to reset self-referential sizes or positions because they cannot
4794 be gimplified and thus can contain a CALL_EXPR after the gimplification
4795 is finished, which will run afoul of LTO streaming. And they need to be
4796 reset to something essentially dummy but not constant, so as to preserve
4797 the properties of the object they are attached to. */
4800 free_lang_data_in_one_sizepos (tree
*expr_p
)
4802 tree expr
= *expr_p
;
4803 if (CONTAINS_PLACEHOLDER_P (expr
))
4804 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
4808 /* Reset all the fields in a binfo node BINFO. We only keep
4809 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4812 free_lang_data_in_binfo (tree binfo
)
4817 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
4819 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
4820 BINFO_BASE_ACCESSES (binfo
) = NULL
;
4821 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
4822 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
4824 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
4825 free_lang_data_in_binfo (t
);
4829 /* Reset all language specific information still present in TYPE. */
4832 free_lang_data_in_type (tree type
)
4834 gcc_assert (TYPE_P (type
));
4836 /* Give the FE a chance to remove its own data first. */
4837 lang_hooks
.free_lang_data (type
);
4839 TREE_LANG_FLAG_0 (type
) = 0;
4840 TREE_LANG_FLAG_1 (type
) = 0;
4841 TREE_LANG_FLAG_2 (type
) = 0;
4842 TREE_LANG_FLAG_3 (type
) = 0;
4843 TREE_LANG_FLAG_4 (type
) = 0;
4844 TREE_LANG_FLAG_5 (type
) = 0;
4845 TREE_LANG_FLAG_6 (type
) = 0;
4847 if (TREE_CODE (type
) == FUNCTION_TYPE
)
4849 /* Remove the const and volatile qualifiers from arguments. The
4850 C++ front end removes them, but the C front end does not,
4851 leading to false ODR violation errors when merging two
4852 instances of the same function signature compiled by
4853 different front ends. */
4856 for (p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
4858 tree arg_type
= TREE_VALUE (p
);
4860 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
4862 int quals
= TYPE_QUALS (arg_type
)
4864 & ~TYPE_QUAL_VOLATILE
;
4865 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
4866 free_lang_data_in_type (TREE_VALUE (p
));
4871 /* Remove members that are not actually FIELD_DECLs from the field
4872 list of an aggregate. These occur in C++. */
4873 if (RECORD_OR_UNION_TYPE_P (type
))
4877 /* Note that TYPE_FIELDS can be shared across distinct
4878 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4879 to be removed, we cannot set its TREE_CHAIN to NULL.
4880 Otherwise, we would not be able to find all the other fields
4881 in the other instances of this TREE_TYPE.
4883 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4885 member
= TYPE_FIELDS (type
);
4888 if (TREE_CODE (member
) == FIELD_DECL
4889 || TREE_CODE (member
) == TYPE_DECL
)
4892 TREE_CHAIN (prev
) = member
;
4894 TYPE_FIELDS (type
) = member
;
4898 member
= TREE_CHAIN (member
);
4902 TREE_CHAIN (prev
) = NULL_TREE
;
4904 TYPE_FIELDS (type
) = NULL_TREE
;
4906 TYPE_METHODS (type
) = NULL_TREE
;
4907 if (TYPE_BINFO (type
))
4908 free_lang_data_in_binfo (TYPE_BINFO (type
));
4912 /* For non-aggregate types, clear out the language slot (which
4913 overloads TYPE_BINFO). */
4914 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
4916 if (INTEGRAL_TYPE_P (type
)
4917 || SCALAR_FLOAT_TYPE_P (type
)
4918 || FIXED_POINT_TYPE_P (type
))
4920 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
4921 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
4925 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
4926 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
4928 if (TYPE_CONTEXT (type
)
4929 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
4931 tree ctx
= TYPE_CONTEXT (type
);
4934 ctx
= BLOCK_SUPERCONTEXT (ctx
);
4936 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
4937 TYPE_CONTEXT (type
) = ctx
;
4942 /* Return true if DECL may need an assembler name to be set. */
4945 need_assembler_name_p (tree decl
)
4947 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4948 if (TREE_CODE (decl
) != FUNCTION_DECL
4949 && TREE_CODE (decl
) != VAR_DECL
)
4952 /* If DECL already has its assembler name set, it does not need a
4954 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
4955 || DECL_ASSEMBLER_NAME_SET_P (decl
))
4958 /* Abstract decls do not need an assembler name. */
4959 if (DECL_ABSTRACT (decl
))
4962 /* For VAR_DECLs, only static, public and external symbols need an
4964 if (TREE_CODE (decl
) == VAR_DECL
4965 && !TREE_STATIC (decl
)
4966 && !TREE_PUBLIC (decl
)
4967 && !DECL_EXTERNAL (decl
))
4970 if (TREE_CODE (decl
) == FUNCTION_DECL
)
4972 /* Do not set assembler name on builtins. Allow RTL expansion to
4973 decide whether to expand inline or via a regular call. */
4974 if (DECL_BUILT_IN (decl
)
4975 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
4978 /* Functions represented in the callgraph need an assembler name. */
4979 if (cgraph_get_node (decl
) != NULL
)
4982 /* Unused and not public functions don't need an assembler name. */
4983 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
4991 /* Reset all language specific information still present in symbol
4995 free_lang_data_in_decl (tree decl
)
4997 gcc_assert (DECL_P (decl
));
4999 /* Give the FE a chance to remove its own data first. */
5000 lang_hooks
.free_lang_data (decl
);
5002 TREE_LANG_FLAG_0 (decl
) = 0;
5003 TREE_LANG_FLAG_1 (decl
) = 0;
5004 TREE_LANG_FLAG_2 (decl
) = 0;
5005 TREE_LANG_FLAG_3 (decl
) = 0;
5006 TREE_LANG_FLAG_4 (decl
) = 0;
5007 TREE_LANG_FLAG_5 (decl
) = 0;
5008 TREE_LANG_FLAG_6 (decl
) = 0;
5010 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5011 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5012 if (TREE_CODE (decl
) == FIELD_DECL
)
5014 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5015 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5016 DECL_QUALIFIER (decl
) = NULL_TREE
;
5019 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5021 struct cgraph_node
*node
;
5022 if (!(node
= cgraph_get_node (decl
))
5023 || (!node
->definition
&& !node
->clones
))
5026 cgraph_release_function_body (node
);
5029 release_function_body (decl
);
5030 DECL_ARGUMENTS (decl
) = NULL
;
5031 DECL_RESULT (decl
) = NULL
;
5032 DECL_INITIAL (decl
) = error_mark_node
;
5035 if (gimple_has_body_p (decl
))
5039 /* If DECL has a gimple body, then the context for its
5040 arguments must be DECL. Otherwise, it doesn't really
5041 matter, as we will not be emitting any code for DECL. In
5042 general, there may be other instances of DECL created by
5043 the front end and since PARM_DECLs are generally shared,
5044 their DECL_CONTEXT changes as the replicas of DECL are
5045 created. The only time where DECL_CONTEXT is important
5046 is for the FUNCTION_DECLs that have a gimple body (since
5047 the PARM_DECL will be used in the function's body). */
5048 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5049 DECL_CONTEXT (t
) = decl
;
5052 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5053 At this point, it is not needed anymore. */
5054 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5056 /* Clear the abstract origin if it refers to a method. Otherwise
5057 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5058 origin will not be output correctly. */
5059 if (DECL_ABSTRACT_ORIGIN (decl
)
5060 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5061 && RECORD_OR_UNION_TYPE_P
5062 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5063 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5065 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5066 DECL_VINDEX referring to itself into a vtable slot number as it
5067 should. Happens with functions that are copied and then forgotten
5068 about. Just clear it, it won't matter anymore. */
5069 if (DECL_VINDEX (decl
) && !tree_fits_shwi_p (DECL_VINDEX (decl
)))
5070 DECL_VINDEX (decl
) = NULL_TREE
;
5072 else if (TREE_CODE (decl
) == VAR_DECL
)
5074 if ((DECL_EXTERNAL (decl
)
5075 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5076 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5077 DECL_INITIAL (decl
) = NULL_TREE
;
5079 else if (TREE_CODE (decl
) == TYPE_DECL
5080 || TREE_CODE (decl
) == FIELD_DECL
)
5081 DECL_INITIAL (decl
) = NULL_TREE
;
5082 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5083 && DECL_INITIAL (decl
)
5084 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5086 /* Strip builtins from the translation-unit BLOCK. We still have targets
5087 without builtin_decl_explicit support and also builtins are shared
5088 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5089 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5093 if (TREE_CODE (var
) == FUNCTION_DECL
5094 && DECL_BUILT_IN (var
))
5095 *nextp
= TREE_CHAIN (var
);
5097 nextp
= &TREE_CHAIN (var
);
5103 /* Data used when collecting DECLs and TYPEs for language data removal. */
5105 struct free_lang_data_d
5107 /* Worklist to avoid excessive recursion. */
5110 /* Set of traversed objects. Used to avoid duplicate visits. */
5111 struct pointer_set_t
*pset
;
5113 /* Array of symbols to process with free_lang_data_in_decl. */
5116 /* Array of types to process with free_lang_data_in_type. */
5121 /* Save all language fields needed to generate proper debug information
5122 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5125 save_debug_info_for_decl (tree t
)
5127 /*struct saved_debug_info_d *sdi;*/
5129 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& DECL_P (t
));
5131 /* FIXME. Partial implementation for saving debug info removed. */
5135 /* Save all language fields needed to generate proper debug information
5136 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5139 save_debug_info_for_type (tree t
)
5141 /*struct saved_debug_info_d *sdi;*/
5143 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& TYPE_P (t
));
5145 /* FIXME. Partial implementation for saving debug info removed. */
5149 /* Add type or decl T to one of the list of tree nodes that need their
5150 language data removed. The lists are held inside FLD. */
5153 add_tree_to_fld_list (tree t
, struct free_lang_data_d
*fld
)
5157 fld
->decls
.safe_push (t
);
5158 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5159 save_debug_info_for_decl (t
);
5161 else if (TYPE_P (t
))
5163 fld
->types
.safe_push (t
);
5164 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5165 save_debug_info_for_type (t
);
5171 /* Push tree node T into FLD->WORKLIST. */
5174 fld_worklist_push (tree t
, struct free_lang_data_d
*fld
)
5176 if (t
&& !is_lang_specific (t
) && !pointer_set_contains (fld
->pset
, t
))
5177 fld
->worklist
.safe_push ((t
));
5181 /* Operand callback helper for free_lang_data_in_node. *TP is the
5182 subtree operand being considered. */
5185 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5188 struct free_lang_data_d
*fld
= (struct free_lang_data_d
*) data
;
5190 if (TREE_CODE (t
) == TREE_LIST
)
5193 /* Language specific nodes will be removed, so there is no need
5194 to gather anything under them. */
5195 if (is_lang_specific (t
))
5203 /* Note that walk_tree does not traverse every possible field in
5204 decls, so we have to do our own traversals here. */
5205 add_tree_to_fld_list (t
, fld
);
5207 fld_worklist_push (DECL_NAME (t
), fld
);
5208 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5209 fld_worklist_push (DECL_SIZE (t
), fld
);
5210 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5212 /* We are going to remove everything under DECL_INITIAL for
5213 TYPE_DECLs. No point walking them. */
5214 if (TREE_CODE (t
) != TYPE_DECL
)
5215 fld_worklist_push (DECL_INITIAL (t
), fld
);
5217 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5218 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5220 if (TREE_CODE (t
) == FUNCTION_DECL
)
5222 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5223 fld_worklist_push (DECL_RESULT (t
), fld
);
5225 else if (TREE_CODE (t
) == TYPE_DECL
)
5227 fld_worklist_push (DECL_ARGUMENT_FLD (t
), fld
);
5228 fld_worklist_push (DECL_VINDEX (t
), fld
);
5229 fld_worklist_push (DECL_ORIGINAL_TYPE (t
), fld
);
5231 else if (TREE_CODE (t
) == FIELD_DECL
)
5233 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5234 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5235 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5236 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5238 else if (TREE_CODE (t
) == VAR_DECL
)
5240 fld_worklist_push (DECL_SECTION_NAME (t
), fld
);
5241 fld_worklist_push (DECL_COMDAT_GROUP (t
), fld
);
5244 if ((TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
5245 && DECL_HAS_VALUE_EXPR_P (t
))
5246 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5248 if (TREE_CODE (t
) != FIELD_DECL
5249 && TREE_CODE (t
) != TYPE_DECL
)
5250 fld_worklist_push (TREE_CHAIN (t
), fld
);
5253 else if (TYPE_P (t
))
5255 /* Note that walk_tree does not traverse every possible field in
5256 types, so we have to do our own traversals here. */
5257 add_tree_to_fld_list (t
, fld
);
5259 if (!RECORD_OR_UNION_TYPE_P (t
))
5260 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5261 fld_worklist_push (TYPE_SIZE (t
), fld
);
5262 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5263 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5264 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5265 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5266 fld_worklist_push (TYPE_NAME (t
), fld
);
5267 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5268 them and thus do not and want not to reach unused pointer types
5270 if (!POINTER_TYPE_P (t
))
5271 fld_worklist_push (TYPE_MINVAL (t
), fld
);
5272 if (!RECORD_OR_UNION_TYPE_P (t
))
5273 fld_worklist_push (TYPE_MAXVAL (t
), fld
);
5274 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5275 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5276 do not and want not to reach unused variants this way. */
5277 if (TYPE_CONTEXT (t
))
5279 tree ctx
= TYPE_CONTEXT (t
);
5280 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5281 So push that instead. */
5282 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5283 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5284 fld_worklist_push (ctx
, fld
);
5286 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5287 and want not to reach unused types this way. */
5289 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5293 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5294 fld_worklist_push (TREE_TYPE (tem
), fld
);
5295 tem
= BINFO_VIRTUALS (TYPE_BINFO (t
));
5297 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5298 && TREE_CODE (tem
) == TREE_LIST
)
5301 fld_worklist_push (TREE_VALUE (tem
), fld
);
5302 tem
= TREE_CHAIN (tem
);
5306 if (RECORD_OR_UNION_TYPE_P (t
))
5309 /* Push all TYPE_FIELDS - there can be interleaving interesting
5310 and non-interesting things. */
5311 tem
= TYPE_FIELDS (t
);
5314 if (TREE_CODE (tem
) == FIELD_DECL
5315 || TREE_CODE (tem
) == TYPE_DECL
)
5316 fld_worklist_push (tem
, fld
);
5317 tem
= TREE_CHAIN (tem
);
5321 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5324 else if (TREE_CODE (t
) == BLOCK
)
5327 for (tem
= BLOCK_VARS (t
); tem
; tem
= TREE_CHAIN (tem
))
5328 fld_worklist_push (tem
, fld
);
5329 for (tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5330 fld_worklist_push (tem
, fld
);
5331 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5334 if (TREE_CODE (t
) != IDENTIFIER_NODE
5335 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5336 fld_worklist_push (TREE_TYPE (t
), fld
);
5342 /* Find decls and types in T. */
5345 find_decls_types (tree t
, struct free_lang_data_d
*fld
)
5349 if (!pointer_set_contains (fld
->pset
, t
))
5350 walk_tree (&t
, find_decls_types_r
, fld
, fld
->pset
);
5351 if (fld
->worklist
.is_empty ())
5353 t
= fld
->worklist
.pop ();
5357 /* Translate all the types in LIST with the corresponding runtime
5361 get_eh_types_for_runtime (tree list
)
5365 if (list
== NULL_TREE
)
5368 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5370 list
= TREE_CHAIN (list
);
5373 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5374 TREE_CHAIN (prev
) = n
;
5375 prev
= TREE_CHAIN (prev
);
5376 list
= TREE_CHAIN (list
);
5383 /* Find decls and types referenced in EH region R and store them in
5384 FLD->DECLS and FLD->TYPES. */
5387 find_decls_types_in_eh_region (eh_region r
, struct free_lang_data_d
*fld
)
5398 /* The types referenced in each catch must first be changed to the
5399 EH types used at runtime. This removes references to FE types
5401 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
5403 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
5404 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, fld
->pset
);
5409 case ERT_ALLOWED_EXCEPTIONS
:
5410 r
->u
.allowed
.type_list
5411 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
5412 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, fld
->pset
);
5415 case ERT_MUST_NOT_THROW
:
5416 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
5417 find_decls_types_r
, fld
, fld
->pset
);
5423 /* Find decls and types referenced in cgraph node N and store them in
5424 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5425 look for *every* kind of DECL and TYPE node reachable from N,
5426 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5427 NAMESPACE_DECLs, etc). */
5430 find_decls_types_in_node (struct cgraph_node
*n
, struct free_lang_data_d
*fld
)
5433 struct function
*fn
;
5437 find_decls_types (n
->decl
, fld
);
5439 if (!gimple_has_body_p (n
->decl
))
5442 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
5444 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
5446 /* Traverse locals. */
5447 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
5448 find_decls_types (t
, fld
);
5450 /* Traverse EH regions in FN. */
5453 FOR_ALL_EH_REGION_FN (r
, fn
)
5454 find_decls_types_in_eh_region (r
, fld
);
5457 /* Traverse every statement in FN. */
5458 FOR_EACH_BB_FN (bb
, fn
)
5460 gimple_stmt_iterator si
;
5463 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
5465 gimple phi
= gsi_stmt (si
);
5467 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5469 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
5470 find_decls_types (*arg_p
, fld
);
5474 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
5476 gimple stmt
= gsi_stmt (si
);
5478 if (is_gimple_call (stmt
))
5479 find_decls_types (gimple_call_fntype (stmt
), fld
);
5481 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
5483 tree arg
= gimple_op (stmt
, i
);
5484 find_decls_types (arg
, fld
);
5491 /* Find decls and types referenced in varpool node N and store them in
5492 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5493 look for *every* kind of DECL and TYPE node reachable from N,
5494 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5495 NAMESPACE_DECLs, etc). */
5498 find_decls_types_in_var (varpool_node
*v
, struct free_lang_data_d
*fld
)
5500 find_decls_types (v
->decl
, fld
);
5503 /* If T needs an assembler name, have one created for it. */
5506 assign_assembler_name_if_neeeded (tree t
)
5508 if (need_assembler_name_p (t
))
5510 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5511 diagnostics that use input_location to show locus
5512 information. The problem here is that, at this point,
5513 input_location is generally anchored to the end of the file
5514 (since the parser is long gone), so we don't have a good
5515 position to pin it to.
5517 To alleviate this problem, this uses the location of T's
5518 declaration. Examples of this are
5519 testsuite/g++.dg/template/cond2.C and
5520 testsuite/g++.dg/template/pr35240.C. */
5521 location_t saved_location
= input_location
;
5522 input_location
= DECL_SOURCE_LOCATION (t
);
5524 decl_assembler_name (t
);
5526 input_location
= saved_location
;
5531 /* Free language specific information for every operand and expression
5532 in every node of the call graph. This process operates in three stages:
5534 1- Every callgraph node and varpool node is traversed looking for
5535 decls and types embedded in them. This is a more exhaustive
5536 search than that done by find_referenced_vars, because it will
5537 also collect individual fields, decls embedded in types, etc.
5539 2- All the decls found are sent to free_lang_data_in_decl.
5541 3- All the types found are sent to free_lang_data_in_type.
5543 The ordering between decls and types is important because
5544 free_lang_data_in_decl sets assembler names, which includes
5545 mangling. So types cannot be freed up until assembler names have
5549 free_lang_data_in_cgraph (void)
5551 struct cgraph_node
*n
;
5553 struct free_lang_data_d fld
;
5558 /* Initialize sets and arrays to store referenced decls and types. */
5559 fld
.pset
= pointer_set_create ();
5560 fld
.worklist
.create (0);
5561 fld
.decls
.create (100);
5562 fld
.types
.create (100);
5564 /* Find decls and types in the body of every function in the callgraph. */
5565 FOR_EACH_FUNCTION (n
)
5566 find_decls_types_in_node (n
, &fld
);
5568 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
5569 find_decls_types (p
->decl
, &fld
);
5571 /* Find decls and types in every varpool symbol. */
5572 FOR_EACH_VARIABLE (v
)
5573 find_decls_types_in_var (v
, &fld
);
5575 /* Set the assembler name on every decl found. We need to do this
5576 now because free_lang_data_in_decl will invalidate data needed
5577 for mangling. This breaks mangling on interdependent decls. */
5578 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5579 assign_assembler_name_if_neeeded (t
);
5581 /* Traverse every decl found freeing its language data. */
5582 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5583 free_lang_data_in_decl (t
);
5585 /* Traverse every type found freeing its language data. */
5586 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5587 free_lang_data_in_type (t
);
5589 pointer_set_destroy (fld
.pset
);
5590 fld
.worklist
.release ();
5591 fld
.decls
.release ();
5592 fld
.types
.release ();
5596 /* Free resources that are used by FE but are not needed once they are done. */
5599 free_lang_data (void)
5603 /* If we are the LTO frontend we have freed lang-specific data already. */
5605 || !flag_generate_lto
)
5608 /* Allocate and assign alias sets to the standard integer types
5609 while the slots are still in the way the frontends generated them. */
5610 for (i
= 0; i
< itk_none
; ++i
)
5611 if (integer_types
[i
])
5612 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
5614 /* Traverse the IL resetting language specific information for
5615 operands, expressions, etc. */
5616 free_lang_data_in_cgraph ();
5618 /* Create gimple variants for common types. */
5619 ptrdiff_type_node
= integer_type_node
;
5620 fileptr_type_node
= ptr_type_node
;
5622 /* Reset some langhooks. Do not reset types_compatible_p, it may
5623 still be used indirectly via the get_alias_set langhook. */
5624 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
5625 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
5626 /* We do not want the default decl_assembler_name implementation,
5627 rather if we have fixed everything we want a wrapper around it
5628 asserting that all non-local symbols already got their assembler
5629 name and only produce assembler names for local symbols. Or rather
5630 make sure we never call decl_assembler_name on local symbols and
5631 devise a separate, middle-end private scheme for it. */
5633 /* Reset diagnostic machinery. */
5634 tree_diagnostics_defaults (global_dc
);
5642 const pass_data pass_data_ipa_free_lang_data
=
5644 SIMPLE_IPA_PASS
, /* type */
5645 "*free_lang_data", /* name */
5646 OPTGROUP_NONE
, /* optinfo_flags */
5647 true, /* has_execute */
5648 TV_IPA_FREE_LANG_DATA
, /* tv_id */
5649 0, /* properties_required */
5650 0, /* properties_provided */
5651 0, /* properties_destroyed */
5652 0, /* todo_flags_start */
5653 0, /* todo_flags_finish */
5656 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
5659 pass_ipa_free_lang_data (gcc::context
*ctxt
)
5660 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
5663 /* opt_pass methods: */
5664 virtual unsigned int execute (function
*) { return free_lang_data (); }
5666 }; // class pass_ipa_free_lang_data
5670 simple_ipa_opt_pass
*
5671 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
5673 return new pass_ipa_free_lang_data (ctxt
);
5676 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5677 ATTR_NAME. Also used internally by remove_attribute(). */
5679 private_is_attribute_p (const char *attr_name
, size_t attr_len
, const_tree ident
)
5681 size_t ident_len
= IDENTIFIER_LENGTH (ident
);
5683 if (ident_len
== attr_len
)
5685 if (strcmp (attr_name
, IDENTIFIER_POINTER (ident
)) == 0)
5688 else if (ident_len
== attr_len
+ 4)
5690 /* There is the possibility that ATTR is 'text' and IDENT is
5692 const char *p
= IDENTIFIER_POINTER (ident
);
5693 if (p
[0] == '_' && p
[1] == '_'
5694 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5695 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5702 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5703 of ATTR_NAME, and LIST is not NULL_TREE. */
5705 private_lookup_attribute (const char *attr_name
, size_t attr_len
, tree list
)
5709 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5711 if (ident_len
== attr_len
)
5713 if (!strcmp (attr_name
,
5714 IDENTIFIER_POINTER (get_attribute_name (list
))))
5717 /* TODO: If we made sure that attributes were stored in the
5718 canonical form without '__...__' (ie, as in 'text' as opposed
5719 to '__text__') then we could avoid the following case. */
5720 else if (ident_len
== attr_len
+ 4)
5722 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5723 if (p
[0] == '_' && p
[1] == '_'
5724 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5725 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5728 list
= TREE_CHAIN (list
);
5734 /* A variant of lookup_attribute() that can be used with an identifier
5735 as the first argument, and where the identifier can be either
5736 'text' or '__text__'.
5738 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5739 return a pointer to the attribute's list element if the attribute
5740 is part of the list, or NULL_TREE if not found. If the attribute
5741 appears more than once, this only returns the first occurrence; the
5742 TREE_CHAIN of the return value should be passed back in if further
5743 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5744 can be in the form 'text' or '__text__'. */
5746 lookup_ident_attribute (tree attr_identifier
, tree list
)
5748 gcc_checking_assert (TREE_CODE (attr_identifier
) == IDENTIFIER_NODE
);
5752 gcc_checking_assert (TREE_CODE (get_attribute_name (list
))
5753 == IDENTIFIER_NODE
);
5755 /* Identifiers can be compared directly for equality. */
5756 if (attr_identifier
== get_attribute_name (list
))
5759 /* If they are not equal, they may still be one in the form
5760 'text' while the other one is in the form '__text__'. TODO:
5761 If we were storing attributes in normalized 'text' form, then
5762 this could all go away and we could take full advantage of
5763 the fact that we're comparing identifiers. :-) */
5765 size_t attr_len
= IDENTIFIER_LENGTH (attr_identifier
);
5766 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5768 if (ident_len
== attr_len
+ 4)
5770 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5771 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5772 if (p
[0] == '_' && p
[1] == '_'
5773 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5774 && strncmp (q
, p
+ 2, attr_len
) == 0)
5777 else if (ident_len
+ 4 == attr_len
)
5779 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5780 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5781 if (q
[0] == '_' && q
[1] == '_'
5782 && q
[attr_len
- 2] == '_' && q
[attr_len
- 1] == '_'
5783 && strncmp (q
+ 2, p
, ident_len
) == 0)
5787 list
= TREE_CHAIN (list
);
5793 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5797 remove_attribute (const char *attr_name
, tree list
)
5800 size_t attr_len
= strlen (attr_name
);
5802 gcc_checking_assert (attr_name
[0] != '_');
5804 for (p
= &list
; *p
; )
5807 /* TODO: If we were storing attributes in normalized form, here
5808 we could use a simple strcmp(). */
5809 if (private_is_attribute_p (attr_name
, attr_len
, get_attribute_name (l
)))
5810 *p
= TREE_CHAIN (l
);
5812 p
= &TREE_CHAIN (l
);
5818 /* Return an attribute list that is the union of a1 and a2. */
5821 merge_attributes (tree a1
, tree a2
)
5825 /* Either one unset? Take the set one. */
5827 if ((attributes
= a1
) == 0)
5830 /* One that completely contains the other? Take it. */
5832 else if (a2
!= 0 && ! attribute_list_contained (a1
, a2
))
5834 if (attribute_list_contained (a2
, a1
))
5838 /* Pick the longest list, and hang on the other list. */
5840 if (list_length (a1
) < list_length (a2
))
5841 attributes
= a2
, a2
= a1
;
5843 for (; a2
!= 0; a2
= TREE_CHAIN (a2
))
5846 for (a
= lookup_ident_attribute (get_attribute_name (a2
),
5848 a
!= NULL_TREE
&& !attribute_value_equal (a
, a2
);
5849 a
= lookup_ident_attribute (get_attribute_name (a2
),
5854 a1
= copy_node (a2
);
5855 TREE_CHAIN (a1
) = attributes
;
5864 /* Given types T1 and T2, merge their attributes and return
5868 merge_type_attributes (tree t1
, tree t2
)
5870 return merge_attributes (TYPE_ATTRIBUTES (t1
),
5871 TYPE_ATTRIBUTES (t2
));
5874 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5878 merge_decl_attributes (tree olddecl
, tree newdecl
)
5880 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
5881 DECL_ATTRIBUTES (newdecl
));
5884 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5886 /* Specialization of merge_decl_attributes for various Windows targets.
5888 This handles the following situation:
5890 __declspec (dllimport) int foo;
5893 The second instance of `foo' nullifies the dllimport. */
5896 merge_dllimport_decl_attributes (tree old
, tree new_tree
)
5899 int delete_dllimport_p
= 1;
5901 /* What we need to do here is remove from `old' dllimport if it doesn't
5902 appear in `new'. dllimport behaves like extern: if a declaration is
5903 marked dllimport and a definition appears later, then the object
5904 is not dllimport'd. We also remove a `new' dllimport if the old list
5905 contains dllexport: dllexport always overrides dllimport, regardless
5906 of the order of declaration. */
5907 if (!VAR_OR_FUNCTION_DECL_P (new_tree
))
5908 delete_dllimport_p
= 0;
5909 else if (DECL_DLLIMPORT_P (new_tree
)
5910 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old
)))
5912 DECL_DLLIMPORT_P (new_tree
) = 0;
5913 warning (OPT_Wattributes
, "%q+D already declared with dllexport attribute: "
5914 "dllimport ignored", new_tree
);
5916 else if (DECL_DLLIMPORT_P (old
) && !DECL_DLLIMPORT_P (new_tree
))
5918 /* Warn about overriding a symbol that has already been used, e.g.:
5919 extern int __attribute__ ((dllimport)) foo;
5920 int* bar () {return &foo;}
5923 if (TREE_USED (old
))
5925 warning (0, "%q+D redeclared without dllimport attribute "
5926 "after being referenced with dll linkage", new_tree
);
5927 /* If we have used a variable's address with dllimport linkage,
5928 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5929 decl may already have had TREE_CONSTANT computed.
5930 We still remove the attribute so that assembler code refers
5931 to '&foo rather than '_imp__foo'. */
5932 if (TREE_CODE (old
) == VAR_DECL
&& TREE_ADDRESSABLE (old
))
5933 DECL_DLLIMPORT_P (new_tree
) = 1;
5936 /* Let an inline definition silently override the external reference,
5937 but otherwise warn about attribute inconsistency. */
5938 else if (TREE_CODE (new_tree
) == VAR_DECL
5939 || !DECL_DECLARED_INLINE_P (new_tree
))
5940 warning (OPT_Wattributes
, "%q+D redeclared without dllimport attribute: "
5941 "previous dllimport ignored", new_tree
);
5944 delete_dllimport_p
= 0;
5946 a
= merge_attributes (DECL_ATTRIBUTES (old
), DECL_ATTRIBUTES (new_tree
));
5948 if (delete_dllimport_p
)
5949 a
= remove_attribute ("dllimport", a
);
5954 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
5955 struct attribute_spec.handler. */
5958 handle_dll_attribute (tree
* pnode
, tree name
, tree args
, int flags
,
5964 /* These attributes may apply to structure and union types being created,
5965 but otherwise should pass to the declaration involved. */
5968 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
| (int) ATTR_FLAG_FUNCTION_NEXT
5969 | (int) ATTR_FLAG_ARRAY_NEXT
))
5971 *no_add_attrs
= true;
5972 return tree_cons (name
, args
, NULL_TREE
);
5974 if (TREE_CODE (node
) == RECORD_TYPE
5975 || TREE_CODE (node
) == UNION_TYPE
)
5977 node
= TYPE_NAME (node
);
5983 warning (OPT_Wattributes
, "%qE attribute ignored",
5985 *no_add_attrs
= true;
5990 if (TREE_CODE (node
) != FUNCTION_DECL
5991 && TREE_CODE (node
) != VAR_DECL
5992 && TREE_CODE (node
) != TYPE_DECL
)
5994 *no_add_attrs
= true;
5995 warning (OPT_Wattributes
, "%qE attribute ignored",
6000 if (TREE_CODE (node
) == TYPE_DECL
6001 && TREE_CODE (TREE_TYPE (node
)) != RECORD_TYPE
6002 && TREE_CODE (TREE_TYPE (node
)) != UNION_TYPE
)
6004 *no_add_attrs
= true;
6005 warning (OPT_Wattributes
, "%qE attribute ignored",
6010 is_dllimport
= is_attribute_p ("dllimport", name
);
6012 /* Report error on dllimport ambiguities seen now before they cause
6016 /* Honor any target-specific overrides. */
6017 if (!targetm
.valid_dllimport_attribute_p (node
))
6018 *no_add_attrs
= true;
6020 else if (TREE_CODE (node
) == FUNCTION_DECL
6021 && DECL_DECLARED_INLINE_P (node
))
6023 warning (OPT_Wattributes
, "inline function %q+D declared as "
6024 " dllimport: attribute ignored", node
);
6025 *no_add_attrs
= true;
6027 /* Like MS, treat definition of dllimported variables and
6028 non-inlined functions on declaration as syntax errors. */
6029 else if (TREE_CODE (node
) == FUNCTION_DECL
&& DECL_INITIAL (node
))
6031 error ("function %q+D definition is marked dllimport", node
);
6032 *no_add_attrs
= true;
6035 else if (TREE_CODE (node
) == VAR_DECL
)
6037 if (DECL_INITIAL (node
))
6039 error ("variable %q+D definition is marked dllimport",
6041 *no_add_attrs
= true;
6044 /* `extern' needn't be specified with dllimport.
6045 Specify `extern' now and hope for the best. Sigh. */
6046 DECL_EXTERNAL (node
) = 1;
6047 /* Also, implicitly give dllimport'd variables declared within
6048 a function global scope, unless declared static. */
6049 if (current_function_decl
!= NULL_TREE
&& !TREE_STATIC (node
))
6050 TREE_PUBLIC (node
) = 1;
6053 if (*no_add_attrs
== false)
6054 DECL_DLLIMPORT_P (node
) = 1;
6056 else if (TREE_CODE (node
) == FUNCTION_DECL
6057 && DECL_DECLARED_INLINE_P (node
)
6058 && flag_keep_inline_dllexport
)
6059 /* An exported function, even if inline, must be emitted. */
6060 DECL_EXTERNAL (node
) = 0;
6062 /* Report error if symbol is not accessible at global scope. */
6063 if (!TREE_PUBLIC (node
)
6064 && (TREE_CODE (node
) == VAR_DECL
6065 || TREE_CODE (node
) == FUNCTION_DECL
))
6067 error ("external linkage required for symbol %q+D because of "
6068 "%qE attribute", node
, name
);
6069 *no_add_attrs
= true;
6072 /* A dllexport'd entity must have default visibility so that other
6073 program units (shared libraries or the main executable) can see
6074 it. A dllimport'd entity must have default visibility so that
6075 the linker knows that undefined references within this program
6076 unit can be resolved by the dynamic linker. */
6079 if (DECL_VISIBILITY_SPECIFIED (node
)
6080 && DECL_VISIBILITY (node
) != VISIBILITY_DEFAULT
)
6081 error ("%qE implies default visibility, but %qD has already "
6082 "been declared with a different visibility",
6084 DECL_VISIBILITY (node
) = VISIBILITY_DEFAULT
;
6085 DECL_VISIBILITY_SPECIFIED (node
) = 1;
6091 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6093 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6094 of the various TYPE_QUAL values. */
6097 set_type_quals (tree type
, int type_quals
)
6099 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
6100 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
6101 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
6102 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
6103 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
6106 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6109 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
6111 return (TYPE_QUALS (cand
) == type_quals
6112 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6113 /* Apparently this is needed for Objective-C. */
6114 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6115 /* Check alignment. */
6116 && TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
6117 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6118 TYPE_ATTRIBUTES (base
)));
6121 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6124 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
6126 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
6127 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6128 /* Apparently this is needed for Objective-C. */
6129 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6130 /* Check alignment. */
6131 && TYPE_ALIGN (cand
) == align
6132 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6133 TYPE_ATTRIBUTES (base
)));
6136 /* This function checks to see if TYPE matches the size one of the built-in
6137 atomic types, and returns that core atomic type. */
6140 find_atomic_core_type (tree type
)
6142 tree base_atomic_type
;
6144 /* Only handle complete types. */
6145 if (TYPE_SIZE (type
) == NULL_TREE
)
6148 HOST_WIDE_INT type_size
= tree_to_uhwi (TYPE_SIZE (type
));
6152 base_atomic_type
= atomicQI_type_node
;
6156 base_atomic_type
= atomicHI_type_node
;
6160 base_atomic_type
= atomicSI_type_node
;
6164 base_atomic_type
= atomicDI_type_node
;
6168 base_atomic_type
= atomicTI_type_node
;
6172 base_atomic_type
= NULL_TREE
;
6175 return base_atomic_type
;
6178 /* Return a version of the TYPE, qualified as indicated by the
6179 TYPE_QUALS, if one exists. If no qualified version exists yet,
6180 return NULL_TREE. */
6183 get_qualified_type (tree type
, int type_quals
)
6187 if (TYPE_QUALS (type
) == type_quals
)
6190 /* Search the chain of variants to see if there is already one there just
6191 like the one we need to have. If so, use that existing one. We must
6192 preserve the TYPE_NAME, since there is code that depends on this. */
6193 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6194 if (check_qualified_type (t
, type
, type_quals
))
6200 /* Like get_qualified_type, but creates the type if it does not
6201 exist. This function never returns NULL_TREE. */
6204 build_qualified_type (tree type
, int type_quals
)
6208 /* See if we already have the appropriate qualified variant. */
6209 t
= get_qualified_type (type
, type_quals
);
6211 /* If not, build it. */
6214 t
= build_variant_type_copy (type
);
6215 set_type_quals (t
, type_quals
);
6217 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
6219 /* See if this object can map to a basic atomic type. */
6220 tree atomic_type
= find_atomic_core_type (type
);
6223 /* Ensure the alignment of this type is compatible with
6224 the required alignment of the atomic type. */
6225 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
6226 TYPE_ALIGN (t
) = TYPE_ALIGN (atomic_type
);
6230 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6231 /* Propagate structural equality. */
6232 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6233 else if (TYPE_CANONICAL (type
) != type
)
6234 /* Build the underlying canonical type, since it is different
6236 TYPE_CANONICAL (t
) = build_qualified_type (TYPE_CANONICAL (type
),
6239 /* T is its own canonical type. */
6240 TYPE_CANONICAL (t
) = t
;
6247 /* Create a variant of type T with alignment ALIGN. */
6250 build_aligned_type (tree type
, unsigned int align
)
6254 if (TYPE_PACKED (type
)
6255 || TYPE_ALIGN (type
) == align
)
6258 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6259 if (check_aligned_type (t
, type
, align
))
6262 t
= build_variant_type_copy (type
);
6263 TYPE_ALIGN (t
) = align
;
6268 /* Create a new distinct copy of TYPE. The new type is made its own
6269 MAIN_VARIANT. If TYPE requires structural equality checks, the
6270 resulting type requires structural equality checks; otherwise, its
6271 TYPE_CANONICAL points to itself. */
6274 build_distinct_type_copy (tree type
)
6276 tree t
= copy_node (type
);
6278 TYPE_POINTER_TO (t
) = 0;
6279 TYPE_REFERENCE_TO (t
) = 0;
6281 /* Set the canonical type either to a new equivalence class, or
6282 propagate the need for structural equality checks. */
6283 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6284 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6286 TYPE_CANONICAL (t
) = t
;
6288 /* Make it its own variant. */
6289 TYPE_MAIN_VARIANT (t
) = t
;
6290 TYPE_NEXT_VARIANT (t
) = 0;
6292 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6293 whose TREE_TYPE is not t. This can also happen in the Ada
6294 frontend when using subtypes. */
6299 /* Create a new variant of TYPE, equivalent but distinct. This is so
6300 the caller can modify it. TYPE_CANONICAL for the return type will
6301 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6302 are considered equal by the language itself (or that both types
6303 require structural equality checks). */
6306 build_variant_type_copy (tree type
)
6308 tree t
, m
= TYPE_MAIN_VARIANT (type
);
6310 t
= build_distinct_type_copy (type
);
6312 /* Since we're building a variant, assume that it is a non-semantic
6313 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6314 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
6316 /* Add the new type to the chain of variants of TYPE. */
6317 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
6318 TYPE_NEXT_VARIANT (m
) = t
;
6319 TYPE_MAIN_VARIANT (t
) = m
;
6324 /* Return true if the from tree in both tree maps are equal. */
6327 tree_map_base_eq (const void *va
, const void *vb
)
6329 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6330 *const b
= (const struct tree_map_base
*) vb
;
6331 return (a
->from
== b
->from
);
6334 /* Hash a from tree in a tree_base_map. */
6337 tree_map_base_hash (const void *item
)
6339 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6342 /* Return true if this tree map structure is marked for garbage collection
6343 purposes. We simply return true if the from tree is marked, so that this
6344 structure goes away when the from tree goes away. */
6347 tree_map_base_marked_p (const void *p
)
6349 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6352 /* Hash a from tree in a tree_map. */
6355 tree_map_hash (const void *item
)
6357 return (((const struct tree_map
*) item
)->hash
);
6360 /* Hash a from tree in a tree_decl_map. */
6363 tree_decl_map_hash (const void *item
)
6365 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6368 /* Return the initialization priority for DECL. */
6371 decl_init_priority_lookup (tree decl
)
6373 struct tree_priority_map
*h
;
6374 struct tree_map_base in
;
6376 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl
));
6378 h
= (struct tree_priority_map
*) htab_find (init_priority_for_decl
, &in
);
6379 return h
? h
->init
: DEFAULT_INIT_PRIORITY
;
6382 /* Return the finalization priority for DECL. */
6385 decl_fini_priority_lookup (tree decl
)
6387 struct tree_priority_map
*h
;
6388 struct tree_map_base in
;
6390 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
6392 h
= (struct tree_priority_map
*) htab_find (init_priority_for_decl
, &in
);
6393 return h
? h
->fini
: DEFAULT_INIT_PRIORITY
;
6396 /* Return the initialization and finalization priority information for
6397 DECL. If there is no previous priority information, a freshly
6398 allocated structure is returned. */
6400 static struct tree_priority_map
*
6401 decl_priority_info (tree decl
)
6403 struct tree_priority_map in
;
6404 struct tree_priority_map
*h
;
6407 in
.base
.from
= decl
;
6408 loc
= htab_find_slot (init_priority_for_decl
, &in
, INSERT
);
6409 h
= (struct tree_priority_map
*) *loc
;
6412 h
= ggc_cleared_alloc
<tree_priority_map
> ();
6414 h
->base
.from
= decl
;
6415 h
->init
= DEFAULT_INIT_PRIORITY
;
6416 h
->fini
= DEFAULT_INIT_PRIORITY
;
6422 /* Set the initialization priority for DECL to PRIORITY. */
6425 decl_init_priority_insert (tree decl
, priority_type priority
)
6427 struct tree_priority_map
*h
;
6429 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl
));
6430 if (priority
== DEFAULT_INIT_PRIORITY
)
6432 h
= decl_priority_info (decl
);
6436 /* Set the finalization priority for DECL to PRIORITY. */
6439 decl_fini_priority_insert (tree decl
, priority_type priority
)
6441 struct tree_priority_map
*h
;
6443 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
6444 if (priority
== DEFAULT_INIT_PRIORITY
)
6446 h
= decl_priority_info (decl
);
6450 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6453 print_debug_expr_statistics (void)
6455 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6456 (long) htab_size (debug_expr_for_decl
),
6457 (long) htab_elements (debug_expr_for_decl
),
6458 htab_collisions (debug_expr_for_decl
));
6461 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6464 print_value_expr_statistics (void)
6466 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6467 (long) htab_size (value_expr_for_decl
),
6468 (long) htab_elements (value_expr_for_decl
),
6469 htab_collisions (value_expr_for_decl
));
6472 /* Lookup a debug expression for FROM, and return it if we find one. */
6475 decl_debug_expr_lookup (tree from
)
6477 struct tree_decl_map
*h
, in
;
6478 in
.base
.from
= from
;
6480 h
= (struct tree_decl_map
*)
6481 htab_find_with_hash (debug_expr_for_decl
, &in
, DECL_UID (from
));
6487 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6490 decl_debug_expr_insert (tree from
, tree to
)
6492 struct tree_decl_map
*h
;
6495 h
= ggc_alloc
<tree_decl_map
> ();
6496 h
->base
.from
= from
;
6498 loc
= htab_find_slot_with_hash (debug_expr_for_decl
, h
, DECL_UID (from
),
6500 *(struct tree_decl_map
**) loc
= h
;
6503 /* Lookup a value expression for FROM, and return it if we find one. */
6506 decl_value_expr_lookup (tree from
)
6508 struct tree_decl_map
*h
, in
;
6509 in
.base
.from
= from
;
6511 h
= (struct tree_decl_map
*)
6512 htab_find_with_hash (value_expr_for_decl
, &in
, DECL_UID (from
));
6518 /* Insert a mapping FROM->TO in the value expression hashtable. */
6521 decl_value_expr_insert (tree from
, tree to
)
6523 struct tree_decl_map
*h
;
6526 h
= ggc_alloc
<tree_decl_map
> ();
6527 h
->base
.from
= from
;
6529 loc
= htab_find_slot_with_hash (value_expr_for_decl
, h
, DECL_UID (from
),
6531 *(struct tree_decl_map
**) loc
= h
;
6534 /* Lookup a vector of debug arguments for FROM, and return it if we
6538 decl_debug_args_lookup (tree from
)
6540 struct tree_vec_map
*h
, in
;
6542 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6544 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6545 in
.base
.from
= from
;
6546 h
= (struct tree_vec_map
*)
6547 htab_find_with_hash (debug_args_for_decl
, &in
, DECL_UID (from
));
6553 /* Insert a mapping FROM->empty vector of debug arguments in the value
6554 expression hashtable. */
6557 decl_debug_args_insert (tree from
)
6559 struct tree_vec_map
*h
;
6562 if (DECL_HAS_DEBUG_ARGS_P (from
))
6563 return decl_debug_args_lookup (from
);
6564 if (debug_args_for_decl
== NULL
)
6565 debug_args_for_decl
= htab_create_ggc (64, tree_vec_map_hash
,
6566 tree_vec_map_eq
, 0);
6567 h
= ggc_alloc
<tree_vec_map
> ();
6568 h
->base
.from
= from
;
6570 loc
= htab_find_slot_with_hash (debug_args_for_decl
, h
, DECL_UID (from
),
6572 *(struct tree_vec_map
**) loc
= h
;
6573 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6577 /* Hashing of types so that we don't make duplicates.
6578 The entry point is `type_hash_canon'. */
6580 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6581 with types in the TREE_VALUE slots), by adding the hash codes
6582 of the individual types. */
6585 type_hash_list (const_tree list
, hashval_t hashcode
)
6589 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6590 if (TREE_VALUE (tail
) != error_mark_node
)
6591 hashcode
= iterative_hash_object (TYPE_HASH (TREE_VALUE (tail
)),
6597 /* These are the Hashtable callback functions. */
6599 /* Returns true iff the types are equivalent. */
6602 type_hash_eq (const void *va
, const void *vb
)
6604 const struct type_hash
*const a
= (const struct type_hash
*) va
,
6605 *const b
= (const struct type_hash
*) vb
;
6607 /* First test the things that are the same for all types. */
6608 if (a
->hash
!= b
->hash
6609 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6610 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6611 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6612 TYPE_ATTRIBUTES (b
->type
))
6613 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6614 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6617 /* Be careful about comparing arrays before and after the element type
6618 has been completed; don't compare TYPE_ALIGN unless both types are
6620 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6621 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6622 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6625 switch (TREE_CODE (a
->type
))
6630 case REFERENCE_TYPE
:
6635 return TYPE_VECTOR_SUBPARTS (a
->type
) == TYPE_VECTOR_SUBPARTS (b
->type
);
6638 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6639 && !(TYPE_VALUES (a
->type
)
6640 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6641 && TYPE_VALUES (b
->type
)
6642 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6643 && type_list_equal (TYPE_VALUES (a
->type
),
6644 TYPE_VALUES (b
->type
))))
6647 /* ... fall through ... */
6652 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6654 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6655 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6656 TYPE_MAX_VALUE (b
->type
)))
6657 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6658 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6659 TYPE_MIN_VALUE (b
->type
))));
6661 case FIXED_POINT_TYPE
:
6662 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6665 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6668 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6669 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6670 || (TYPE_ARG_TYPES (a
->type
)
6671 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6672 && TYPE_ARG_TYPES (b
->type
)
6673 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6674 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6675 TYPE_ARG_TYPES (b
->type
)))))
6679 return TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
);
6683 case QUAL_UNION_TYPE
:
6684 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6685 || (TYPE_FIELDS (a
->type
)
6686 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6687 && TYPE_FIELDS (b
->type
)
6688 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6689 && type_list_equal (TYPE_FIELDS (a
->type
),
6690 TYPE_FIELDS (b
->type
))));
6693 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6694 || (TYPE_ARG_TYPES (a
->type
)
6695 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6696 && TYPE_ARG_TYPES (b
->type
)
6697 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6698 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6699 TYPE_ARG_TYPES (b
->type
))))
6707 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6708 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6713 /* Return the cached hash value. */
6716 type_hash_hash (const void *item
)
6718 return ((const struct type_hash
*) item
)->hash
;
6721 /* Look in the type hash table for a type isomorphic to TYPE.
6722 If one is found, return it. Otherwise return 0. */
6725 type_hash_lookup (hashval_t hashcode
, tree type
)
6727 struct type_hash
*h
, in
;
6729 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6730 must call that routine before comparing TYPE_ALIGNs. */
6736 h
= (struct type_hash
*) htab_find_with_hash (type_hash_table
, &in
,
6743 /* Add an entry to the type-hash-table
6744 for a type TYPE whose hash code is HASHCODE. */
6747 type_hash_add (hashval_t hashcode
, tree type
)
6749 struct type_hash
*h
;
6752 h
= ggc_alloc
<type_hash
> ();
6755 loc
= htab_find_slot_with_hash (type_hash_table
, h
, hashcode
, INSERT
);
6759 /* Given TYPE, and HASHCODE its hash code, return the canonical
6760 object for an identical type if one already exists.
6761 Otherwise, return TYPE, and record it as the canonical object.
6763 To use this function, first create a type of the sort you want.
6764 Then compute its hash code from the fields of the type that
6765 make it different from other similar types.
6766 Then call this function and use the value. */
6769 type_hash_canon (unsigned int hashcode
, tree type
)
6773 /* The hash table only contains main variants, so ensure that's what we're
6775 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6777 /* See if the type is in the hash table already. If so, return it.
6778 Otherwise, add the type. */
6779 t1
= type_hash_lookup (hashcode
, type
);
6782 if (GATHER_STATISTICS
)
6784 tree_code_counts
[(int) TREE_CODE (type
)]--;
6785 tree_node_counts
[(int) t_kind
]--;
6786 tree_node_sizes
[(int) t_kind
] -= sizeof (struct tree_type_non_common
);
6792 type_hash_add (hashcode
, type
);
6797 /* See if the data pointed to by the type hash table is marked. We consider
6798 it marked if the type is marked or if a debug type number or symbol
6799 table entry has been made for the type. */
6802 type_hash_marked_p (const void *p
)
6804 const_tree
const type
= ((const struct type_hash
*) p
)->type
;
6806 return ggc_marked_p (type
);
6810 print_type_hash_statistics (void)
6812 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6813 (long) htab_size (type_hash_table
),
6814 (long) htab_elements (type_hash_table
),
6815 htab_collisions (type_hash_table
));
6818 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6819 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6820 by adding the hash codes of the individual attributes. */
6823 attribute_hash_list (const_tree list
, hashval_t hashcode
)
6827 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6828 /* ??? Do we want to add in TREE_VALUE too? */
6829 hashcode
= iterative_hash_object
6830 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail
)), hashcode
);
6834 /* Given two lists of attributes, return true if list l2 is
6835 equivalent to l1. */
6838 attribute_list_equal (const_tree l1
, const_tree l2
)
6843 return attribute_list_contained (l1
, l2
)
6844 && attribute_list_contained (l2
, l1
);
6847 /* Given two lists of attributes, return true if list L2 is
6848 completely contained within L1. */
6849 /* ??? This would be faster if attribute names were stored in a canonicalized
6850 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6851 must be used to show these elements are equivalent (which they are). */
6852 /* ??? It's not clear that attributes with arguments will always be handled
6856 attribute_list_contained (const_tree l1
, const_tree l2
)
6860 /* First check the obvious, maybe the lists are identical. */
6864 /* Maybe the lists are similar. */
6865 for (t1
= l1
, t2
= l2
;
6867 && get_attribute_name (t1
) == get_attribute_name (t2
)
6868 && TREE_VALUE (t1
) == TREE_VALUE (t2
);
6869 t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6872 /* Maybe the lists are equal. */
6873 if (t1
== 0 && t2
== 0)
6876 for (; t2
!= 0; t2
= TREE_CHAIN (t2
))
6879 /* This CONST_CAST is okay because lookup_attribute does not
6880 modify its argument and the return value is assigned to a
6882 for (attr
= lookup_ident_attribute (get_attribute_name (t2
),
6883 CONST_CAST_TREE (l1
));
6884 attr
!= NULL_TREE
&& !attribute_value_equal (t2
, attr
);
6885 attr
= lookup_ident_attribute (get_attribute_name (t2
),
6889 if (attr
== NULL_TREE
)
6896 /* Given two lists of types
6897 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6898 return 1 if the lists contain the same types in the same order.
6899 Also, the TREE_PURPOSEs must match. */
6902 type_list_equal (const_tree l1
, const_tree l2
)
6906 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6907 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
6908 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
6909 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
6910 && (TREE_TYPE (TREE_PURPOSE (t1
))
6911 == TREE_TYPE (TREE_PURPOSE (t2
))))))
6917 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6918 given by TYPE. If the argument list accepts variable arguments,
6919 then this function counts only the ordinary arguments. */
6922 type_num_arguments (const_tree type
)
6927 for (t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
6928 /* If the function does not take a variable number of arguments,
6929 the last element in the list will have type `void'. */
6930 if (VOID_TYPE_P (TREE_VALUE (t
)))
6938 /* Nonzero if integer constants T1 and T2
6939 represent the same constant value. */
6942 tree_int_cst_equal (const_tree t1
, const_tree t2
)
6947 if (t1
== 0 || t2
== 0)
6950 if (TREE_CODE (t1
) == INTEGER_CST
6951 && TREE_CODE (t2
) == INTEGER_CST
6952 && wi::to_widest (t1
) == wi::to_widest (t2
))
6958 /* Return true if T is an INTEGER_CST whose numerical value (extended
6959 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6962 tree_fits_shwi_p (const_tree t
)
6964 return (t
!= NULL_TREE
6965 && TREE_CODE (t
) == INTEGER_CST
6966 && wi::fits_shwi_p (wi::to_widest (t
)));
6969 /* Return true if T is an INTEGER_CST whose numerical value (extended
6970 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6973 tree_fits_uhwi_p (const_tree t
)
6975 return (t
!= NULL_TREE
6976 && TREE_CODE (t
) == INTEGER_CST
6977 && wi::fits_uhwi_p (wi::to_widest (t
)));
6980 /* T is an INTEGER_CST whose numerical value (extended according to
6981 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
6985 tree_to_shwi (const_tree t
)
6987 gcc_assert (tree_fits_shwi_p (t
));
6988 return TREE_INT_CST_LOW (t
);
6991 /* T is an INTEGER_CST whose numerical value (extended according to
6992 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
6995 unsigned HOST_WIDE_INT
6996 tree_to_uhwi (const_tree t
)
6998 gcc_assert (tree_fits_uhwi_p (t
));
6999 return TREE_INT_CST_LOW (t
);
7002 /* Return the most significant (sign) bit of T. */
7005 tree_int_cst_sign_bit (const_tree t
)
7007 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
7009 return wi::extract_uhwi (t
, bitno
, 1);
7012 /* Return an indication of the sign of the integer constant T.
7013 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7014 Note that -1 will never be returned if T's type is unsigned. */
7017 tree_int_cst_sgn (const_tree t
)
7019 if (wi::eq_p (t
, 0))
7021 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
7023 else if (wi::neg_p (t
))
7029 /* Return the minimum number of bits needed to represent VALUE in a
7030 signed or unsigned type, UNSIGNEDP says which. */
7033 tree_int_cst_min_precision (tree value
, signop sgn
)
7035 /* If the value is negative, compute its negative minus 1. The latter
7036 adjustment is because the absolute value of the largest negative value
7037 is one larger than the largest positive value. This is equivalent to
7038 a bit-wise negation, so use that operation instead. */
7040 if (tree_int_cst_sgn (value
) < 0)
7041 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
7043 /* Return the number of bits needed, taking into account the fact
7044 that we need one more bit for a signed than unsigned type.
7045 If value is 0 or -1, the minimum precision is 1 no matter
7046 whether unsignedp is true or false. */
7048 if (integer_zerop (value
))
7051 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
7054 /* Return truthvalue of whether T1 is the same tree structure as T2.
7055 Return 1 if they are the same.
7056 Return 0 if they are understandably different.
7057 Return -1 if either contains tree structure not understood by
7061 simple_cst_equal (const_tree t1
, const_tree t2
)
7063 enum tree_code code1
, code2
;
7069 if (t1
== 0 || t2
== 0)
7072 code1
= TREE_CODE (t1
);
7073 code2
= TREE_CODE (t2
);
7075 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
7077 if (CONVERT_EXPR_CODE_P (code2
)
7078 || code2
== NON_LVALUE_EXPR
)
7079 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7081 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
7084 else if (CONVERT_EXPR_CODE_P (code2
)
7085 || code2
== NON_LVALUE_EXPR
)
7086 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
7094 return wi::to_widest (t1
) == wi::to_widest (t2
);
7097 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1
), TREE_REAL_CST (t2
));
7100 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
7103 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
7104 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
7105 TREE_STRING_LENGTH (t1
)));
7109 unsigned HOST_WIDE_INT idx
;
7110 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
7111 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
7113 if (vec_safe_length (v1
) != vec_safe_length (v2
))
7116 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
7117 /* ??? Should we handle also fields here? */
7118 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
7124 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7127 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
7130 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
7133 const_tree arg1
, arg2
;
7134 const_call_expr_arg_iterator iter1
, iter2
;
7135 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
7136 arg2
= first_const_call_expr_arg (t2
, &iter2
);
7138 arg1
= next_const_call_expr_arg (&iter1
),
7139 arg2
= next_const_call_expr_arg (&iter2
))
7141 cmp
= simple_cst_equal (arg1
, arg2
);
7145 return arg1
== arg2
;
7149 /* Special case: if either target is an unallocated VAR_DECL,
7150 it means that it's going to be unified with whatever the
7151 TARGET_EXPR is really supposed to initialize, so treat it
7152 as being equivalent to anything. */
7153 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
7154 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
7155 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
7156 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
7157 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
7158 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
7161 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7166 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
7168 case WITH_CLEANUP_EXPR
:
7169 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7173 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
7176 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
7177 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7191 /* This general rule works for most tree codes. All exceptions should be
7192 handled above. If this is a language-specific tree code, we can't
7193 trust what might be in the operand, so say we don't know
7195 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
7198 switch (TREE_CODE_CLASS (code1
))
7202 case tcc_comparison
:
7203 case tcc_expression
:
7207 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
7209 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
7221 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7222 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7223 than U, respectively. */
7226 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
7228 if (tree_int_cst_sgn (t
) < 0)
7230 else if (!tree_fits_uhwi_p (t
))
7232 else if (TREE_INT_CST_LOW (t
) == u
)
7234 else if (TREE_INT_CST_LOW (t
) < u
)
7240 /* Return true if SIZE represents a constant size that is in bounds of
7241 what the middle-end and the backend accepts (covering not more than
7242 half of the address-space). */
7245 valid_constant_size_p (const_tree size
)
7247 if (! tree_fits_uhwi_p (size
)
7248 || TREE_OVERFLOW (size
)
7249 || tree_int_cst_sign_bit (size
) != 0)
7254 /* Return the precision of the type, or for a complex or vector type the
7255 precision of the type of its elements. */
7258 element_precision (const_tree type
)
7260 enum tree_code code
= TREE_CODE (type
);
7261 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
7262 type
= TREE_TYPE (type
);
7264 return TYPE_PRECISION (type
);
7267 /* Return true if CODE represents an associative tree code. Otherwise
7270 associative_tree_code (enum tree_code code
)
7289 /* Return true if CODE represents a commutative tree code. Otherwise
7292 commutative_tree_code (enum tree_code code
)
7298 case MULT_HIGHPART_EXPR
:
7306 case UNORDERED_EXPR
:
7310 case TRUTH_AND_EXPR
:
7311 case TRUTH_XOR_EXPR
:
7313 case WIDEN_MULT_EXPR
:
7314 case VEC_WIDEN_MULT_HI_EXPR
:
7315 case VEC_WIDEN_MULT_LO_EXPR
:
7316 case VEC_WIDEN_MULT_EVEN_EXPR
:
7317 case VEC_WIDEN_MULT_ODD_EXPR
:
7326 /* Return true if CODE represents a ternary tree code for which the
7327 first two operands are commutative. Otherwise return false. */
7329 commutative_ternary_tree_code (enum tree_code code
)
7333 case WIDEN_MULT_PLUS_EXPR
:
7334 case WIDEN_MULT_MINUS_EXPR
:
7343 /* Generate a hash value for an expression. This can be used iteratively
7344 by passing a previous result as the VAL argument.
7346 This function is intended to produce the same hash for expressions which
7347 would compare equal using operand_equal_p. */
7350 iterative_hash_expr (const_tree t
, hashval_t val
)
7353 enum tree_code code
;
7354 enum tree_code_class tclass
;
7357 return iterative_hash_hashval_t (0, val
);
7359 code
= TREE_CODE (t
);
7363 /* Alas, constants aren't shared, so we can't rely on pointer
7366 return iterative_hash_hashval_t (0, val
);
7368 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
7369 val
= iterative_hash_host_wide_int (TREE_INT_CST_ELT (t
, i
), val
);
7373 unsigned int val2
= real_hash (TREE_REAL_CST_PTR (t
));
7375 return iterative_hash_hashval_t (val2
, val
);
7379 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
7381 return iterative_hash_hashval_t (val2
, val
);
7384 return iterative_hash (TREE_STRING_POINTER (t
),
7385 TREE_STRING_LENGTH (t
), val
);
7387 val
= iterative_hash_expr (TREE_REALPART (t
), val
);
7388 return iterative_hash_expr (TREE_IMAGPART (t
), val
);
7392 for (i
= 0; i
< VECTOR_CST_NELTS (t
); ++i
)
7393 val
= iterative_hash_expr (VECTOR_CST_ELT (t
, i
), val
);
7397 /* We can just compare by pointer. */
7398 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t
), val
);
7399 case PLACEHOLDER_EXPR
:
7400 /* The node itself doesn't matter. */
7403 /* A list of expressions, for a CALL_EXPR or as the elements of a
7405 for (; t
; t
= TREE_CHAIN (t
))
7406 val
= iterative_hash_expr (TREE_VALUE (t
), val
);
7410 unsigned HOST_WIDE_INT idx
;
7412 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
7414 val
= iterative_hash_expr (field
, val
);
7415 val
= iterative_hash_expr (value
, val
);
7420 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7421 Otherwise nodes that compare equal according to operand_equal_p might
7422 get different hash codes. However, don't do this for machine specific
7423 or front end builtins, since the function code is overloaded in those
7425 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
7426 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
7428 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
7429 code
= TREE_CODE (t
);
7433 tclass
= TREE_CODE_CLASS (code
);
7435 if (tclass
== tcc_declaration
)
7437 /* DECL's have a unique ID */
7438 val
= iterative_hash_host_wide_int (DECL_UID (t
), val
);
7442 gcc_assert (IS_EXPR_CODE_CLASS (tclass
));
7444 val
= iterative_hash_object (code
, val
);
7446 /* Don't hash the type, that can lead to having nodes which
7447 compare equal according to operand_equal_p, but which
7448 have different hash codes. */
7449 if (CONVERT_EXPR_CODE_P (code
)
7450 || code
== NON_LVALUE_EXPR
)
7452 /* Make sure to include signness in the hash computation. */
7453 val
+= TYPE_UNSIGNED (TREE_TYPE (t
));
7454 val
= iterative_hash_expr (TREE_OPERAND (t
, 0), val
);
7457 else if (commutative_tree_code (code
))
7459 /* It's a commutative expression. We want to hash it the same
7460 however it appears. We do this by first hashing both operands
7461 and then rehashing based on the order of their independent
7463 hashval_t one
= iterative_hash_expr (TREE_OPERAND (t
, 0), 0);
7464 hashval_t two
= iterative_hash_expr (TREE_OPERAND (t
, 1), 0);
7468 t
= one
, one
= two
, two
= t
;
7470 val
= iterative_hash_hashval_t (one
, val
);
7471 val
= iterative_hash_hashval_t (two
, val
);
7474 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
7475 val
= iterative_hash_expr (TREE_OPERAND (t
, i
), val
);
7481 /* Constructors for pointer, array and function types.
7482 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7483 constructed by language-dependent code, not here.) */
7485 /* Construct, lay out and return the type of pointers to TO_TYPE with
7486 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7487 reference all of memory. If such a type has already been
7488 constructed, reuse it. */
7491 build_pointer_type_for_mode (tree to_type
, enum machine_mode mode
,
7496 if (to_type
== error_mark_node
)
7497 return error_mark_node
;
7499 /* If the pointed-to type has the may_alias attribute set, force
7500 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7501 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7502 can_alias_all
= true;
7504 /* In some cases, languages will have things that aren't a POINTER_TYPE
7505 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7506 In that case, return that type without regard to the rest of our
7509 ??? This is a kludge, but consistent with the way this function has
7510 always operated and there doesn't seem to be a good way to avoid this
7512 if (TYPE_POINTER_TO (to_type
) != 0
7513 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7514 return TYPE_POINTER_TO (to_type
);
7516 /* First, if we already have a type for pointers to TO_TYPE and it's
7517 the proper mode, use it. */
7518 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7519 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7522 t
= make_node (POINTER_TYPE
);
7524 TREE_TYPE (t
) = to_type
;
7525 SET_TYPE_MODE (t
, mode
);
7526 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7527 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7528 TYPE_POINTER_TO (to_type
) = t
;
7530 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7531 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7532 else if (TYPE_CANONICAL (to_type
) != to_type
)
7534 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7535 mode
, can_alias_all
);
7537 /* Lay out the type. This function has many callers that are concerned
7538 with expression-construction, and this simplifies them all. */
7544 /* By default build pointers in ptr_mode. */
7547 build_pointer_type (tree to_type
)
7549 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7550 : TYPE_ADDR_SPACE (to_type
);
7551 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7552 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7555 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7558 build_reference_type_for_mode (tree to_type
, enum machine_mode mode
,
7563 if (to_type
== error_mark_node
)
7564 return error_mark_node
;
7566 /* If the pointed-to type has the may_alias attribute set, force
7567 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7568 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7569 can_alias_all
= true;
7571 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7572 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7573 In that case, return that type without regard to the rest of our
7576 ??? This is a kludge, but consistent with the way this function has
7577 always operated and there doesn't seem to be a good way to avoid this
7579 if (TYPE_REFERENCE_TO (to_type
) != 0
7580 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7581 return TYPE_REFERENCE_TO (to_type
);
7583 /* First, if we already have a type for pointers to TO_TYPE and it's
7584 the proper mode, use it. */
7585 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7586 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7589 t
= make_node (REFERENCE_TYPE
);
7591 TREE_TYPE (t
) = to_type
;
7592 SET_TYPE_MODE (t
, mode
);
7593 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7594 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7595 TYPE_REFERENCE_TO (to_type
) = t
;
7597 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7598 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7599 else if (TYPE_CANONICAL (to_type
) != to_type
)
7601 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7602 mode
, can_alias_all
);
7610 /* Build the node for the type of references-to-TO_TYPE by default
7614 build_reference_type (tree to_type
)
7616 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7617 : TYPE_ADDR_SPACE (to_type
);
7618 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7619 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
7622 #define MAX_INT_CACHED_PREC \
7623 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7624 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7626 /* Builds a signed or unsigned integer type of precision PRECISION.
7627 Used for C bitfields whose precision does not match that of
7628 built-in target types. */
7630 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7636 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7638 if (precision
<= MAX_INT_CACHED_PREC
)
7640 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7645 itype
= make_node (INTEGER_TYPE
);
7646 TYPE_PRECISION (itype
) = precision
;
7649 fixup_unsigned_type (itype
);
7651 fixup_signed_type (itype
);
7654 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype
)))
7655 ret
= type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype
)), itype
);
7656 if (precision
<= MAX_INT_CACHED_PREC
)
7657 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7662 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7663 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7664 is true, reuse such a type that has already been constructed. */
7667 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7669 tree itype
= make_node (INTEGER_TYPE
);
7670 hashval_t hashcode
= 0;
7672 TREE_TYPE (itype
) = type
;
7674 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7675 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7677 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7678 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7679 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7680 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7681 TYPE_ALIGN (itype
) = TYPE_ALIGN (type
);
7682 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7687 if ((TYPE_MIN_VALUE (itype
)
7688 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7689 || (TYPE_MAX_VALUE (itype
)
7690 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7692 /* Since we cannot reliably merge this type, we need to compare it using
7693 structural equality checks. */
7694 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7698 hashcode
= iterative_hash_expr (TYPE_MIN_VALUE (itype
), hashcode
);
7699 hashcode
= iterative_hash_expr (TYPE_MAX_VALUE (itype
), hashcode
);
7700 hashcode
= iterative_hash_hashval_t (TYPE_HASH (type
), hashcode
);
7701 itype
= type_hash_canon (hashcode
, itype
);
7706 /* Wrapper around build_range_type_1 with SHARED set to true. */
7709 build_range_type (tree type
, tree lowval
, tree highval
)
7711 return build_range_type_1 (type
, lowval
, highval
, true);
7714 /* Wrapper around build_range_type_1 with SHARED set to false. */
7717 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7719 return build_range_type_1 (type
, lowval
, highval
, false);
7722 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7723 MAXVAL should be the maximum value in the domain
7724 (one less than the length of the array).
7726 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7727 We don't enforce this limit, that is up to caller (e.g. language front end).
7728 The limit exists because the result is a signed type and we don't handle
7729 sizes that use more than one HOST_WIDE_INT. */
7732 build_index_type (tree maxval
)
7734 return build_range_type (sizetype
, size_zero_node
, maxval
);
7737 /* Return true if the debug information for TYPE, a subtype, should be emitted
7738 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7739 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7740 debug info and doesn't reflect the source code. */
7743 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7745 tree base_type
= TREE_TYPE (type
), low
, high
;
7747 /* Subrange types have a base type which is an integral type. */
7748 if (!INTEGRAL_TYPE_P (base_type
))
7751 /* Get the real bounds of the subtype. */
7752 if (lang_hooks
.types
.get_subrange_bounds
)
7753 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7756 low
= TYPE_MIN_VALUE (type
);
7757 high
= TYPE_MAX_VALUE (type
);
7760 /* If the type and its base type have the same representation and the same
7761 name, then the type is not a subrange but a copy of the base type. */
7762 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7763 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7764 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7765 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7766 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7767 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7777 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7778 and number of elements specified by the range of values of INDEX_TYPE.
7779 If SHARED is true, reuse such a type that has already been constructed. */
7782 build_array_type_1 (tree elt_type
, tree index_type
, bool shared
)
7786 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7788 error ("arrays of functions are not meaningful");
7789 elt_type
= integer_type_node
;
7792 t
= make_node (ARRAY_TYPE
);
7793 TREE_TYPE (t
) = elt_type
;
7794 TYPE_DOMAIN (t
) = index_type
;
7795 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7798 /* If the element type is incomplete at this point we get marked for
7799 structural equality. Do not record these types in the canonical
7801 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
7806 hashval_t hashcode
= iterative_hash_object (TYPE_HASH (elt_type
), 0);
7808 hashcode
= iterative_hash_object (TYPE_HASH (index_type
), hashcode
);
7809 t
= type_hash_canon (hashcode
, t
);
7812 if (TYPE_CANONICAL (t
) == t
)
7814 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7815 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
)))
7816 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7817 else if (TYPE_CANONICAL (elt_type
) != elt_type
7818 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7820 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7822 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7829 /* Wrapper around build_array_type_1 with SHARED set to true. */
7832 build_array_type (tree elt_type
, tree index_type
)
7834 return build_array_type_1 (elt_type
, index_type
, true);
7837 /* Wrapper around build_array_type_1 with SHARED set to false. */
7840 build_nonshared_array_type (tree elt_type
, tree index_type
)
7842 return build_array_type_1 (elt_type
, index_type
, false);
7845 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7849 build_array_type_nelts (tree elt_type
, unsigned HOST_WIDE_INT nelts
)
7851 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7854 /* Recursively examines the array elements of TYPE, until a non-array
7855 element type is found. */
7858 strip_array_types (tree type
)
7860 while (TREE_CODE (type
) == ARRAY_TYPE
)
7861 type
= TREE_TYPE (type
);
7866 /* Computes the canonical argument types from the argument type list
7869 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7870 on entry to this function, or if any of the ARGTYPES are
7873 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7874 true on entry to this function, or if any of the ARGTYPES are
7877 Returns a canonical argument list, which may be ARGTYPES when the
7878 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7879 true) or would not differ from ARGTYPES. */
7882 maybe_canonicalize_argtypes (tree argtypes
,
7883 bool *any_structural_p
,
7884 bool *any_noncanonical_p
)
7887 bool any_noncanonical_argtypes_p
= false;
7889 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7891 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7892 /* Fail gracefully by stating that the type is structural. */
7893 *any_structural_p
= true;
7894 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7895 *any_structural_p
= true;
7896 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7897 || TREE_PURPOSE (arg
))
7898 /* If the argument has a default argument, we consider it
7899 non-canonical even though the type itself is canonical.
7900 That way, different variants of function and method types
7901 with default arguments will all point to the variant with
7902 no defaults as their canonical type. */
7903 any_noncanonical_argtypes_p
= true;
7906 if (*any_structural_p
)
7909 if (any_noncanonical_argtypes_p
)
7911 /* Build the canonical list of argument types. */
7912 tree canon_argtypes
= NULL_TREE
;
7913 bool is_void
= false;
7915 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
7917 if (arg
== void_list_node
)
7920 canon_argtypes
= tree_cons (NULL_TREE
,
7921 TYPE_CANONICAL (TREE_VALUE (arg
)),
7925 canon_argtypes
= nreverse (canon_argtypes
);
7927 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
7929 /* There is a non-canonical type. */
7930 *any_noncanonical_p
= true;
7931 return canon_argtypes
;
7934 /* The canonical argument types are the same as ARGTYPES. */
7938 /* Construct, lay out and return
7939 the type of functions returning type VALUE_TYPE
7940 given arguments of types ARG_TYPES.
7941 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7942 are data type nodes for the arguments of the function.
7943 If such a type has already been constructed, reuse it. */
7946 build_function_type (tree value_type
, tree arg_types
)
7949 hashval_t hashcode
= 0;
7950 bool any_structural_p
, any_noncanonical_p
;
7951 tree canon_argtypes
;
7953 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
7955 error ("function return type cannot be function");
7956 value_type
= integer_type_node
;
7959 /* Make a node of the sort we want. */
7960 t
= make_node (FUNCTION_TYPE
);
7961 TREE_TYPE (t
) = value_type
;
7962 TYPE_ARG_TYPES (t
) = arg_types
;
7964 /* If we already have such a type, use the old one. */
7965 hashcode
= iterative_hash_object (TYPE_HASH (value_type
), hashcode
);
7966 hashcode
= type_hash_list (arg_types
, hashcode
);
7967 t
= type_hash_canon (hashcode
, t
);
7969 /* Set up the canonical type. */
7970 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
7971 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
7972 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
7974 &any_noncanonical_p
);
7975 if (any_structural_p
)
7976 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7977 else if (any_noncanonical_p
)
7978 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
7981 if (!COMPLETE_TYPE_P (t
))
7986 /* Build a function type. The RETURN_TYPE is the type returned by the
7987 function. If VAARGS is set, no void_type_node is appended to the
7988 the list. ARGP must be always be terminated be a NULL_TREE. */
7991 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
7995 t
= va_arg (argp
, tree
);
7996 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
7997 args
= tree_cons (NULL_TREE
, t
, args
);
8002 if (args
!= NULL_TREE
)
8003 args
= nreverse (args
);
8004 gcc_assert (last
!= void_list_node
);
8006 else if (args
== NULL_TREE
)
8007 args
= void_list_node
;
8011 args
= nreverse (args
);
8012 TREE_CHAIN (last
) = void_list_node
;
8014 args
= build_function_type (return_type
, args
);
8019 /* Build a function type. The RETURN_TYPE is the type returned by the
8020 function. If additional arguments are provided, they are
8021 additional argument types. The list of argument types must always
8022 be terminated by NULL_TREE. */
8025 build_function_type_list (tree return_type
, ...)
8030 va_start (p
, return_type
);
8031 args
= build_function_type_list_1 (false, return_type
, p
);
8036 /* Build a variable argument function type. The RETURN_TYPE is the
8037 type returned by the function. If additional arguments are provided,
8038 they are additional argument types. The list of argument types must
8039 always be terminated by NULL_TREE. */
8042 build_varargs_function_type_list (tree return_type
, ...)
8047 va_start (p
, return_type
);
8048 args
= build_function_type_list_1 (true, return_type
, p
);
8054 /* Build a function type. RETURN_TYPE is the type returned by the
8055 function; VAARGS indicates whether the function takes varargs. The
8056 function takes N named arguments, the types of which are provided in
8060 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
8064 tree t
= vaargs
? NULL_TREE
: void_list_node
;
8066 for (i
= n
- 1; i
>= 0; i
--)
8067 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
8069 return build_function_type (return_type
, t
);
8072 /* Build a function type. RETURN_TYPE is the type returned by the
8073 function. The function takes N named arguments, the types of which
8074 are provided in ARG_TYPES. */
8077 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8079 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
8082 /* Build a variable argument function type. RETURN_TYPE is the type
8083 returned by the function. The function takes N named arguments, the
8084 types of which are provided in ARG_TYPES. */
8087 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8089 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
8092 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8093 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8094 for the method. An implicit additional parameter (of type
8095 pointer-to-BASETYPE) is added to the ARGTYPES. */
8098 build_method_type_directly (tree basetype
,
8105 bool any_structural_p
, any_noncanonical_p
;
8106 tree canon_argtypes
;
8108 /* Make a node of the sort we want. */
8109 t
= make_node (METHOD_TYPE
);
8111 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8112 TREE_TYPE (t
) = rettype
;
8113 ptype
= build_pointer_type (basetype
);
8115 /* The actual arglist for this function includes a "hidden" argument
8116 which is "this". Put it into the list of argument types. */
8117 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
8118 TYPE_ARG_TYPES (t
) = argtypes
;
8120 /* If we already have such a type, use the old one. */
8121 hashcode
= iterative_hash_object (TYPE_HASH (basetype
), hashcode
);
8122 hashcode
= iterative_hash_object (TYPE_HASH (rettype
), hashcode
);
8123 hashcode
= type_hash_list (argtypes
, hashcode
);
8124 t
= type_hash_canon (hashcode
, t
);
8126 /* Set up the canonical type. */
8128 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8129 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
8131 = (TYPE_CANONICAL (basetype
) != basetype
8132 || TYPE_CANONICAL (rettype
) != rettype
);
8133 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
8135 &any_noncanonical_p
);
8136 if (any_structural_p
)
8137 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8138 else if (any_noncanonical_p
)
8140 = build_method_type_directly (TYPE_CANONICAL (basetype
),
8141 TYPE_CANONICAL (rettype
),
8143 if (!COMPLETE_TYPE_P (t
))
8149 /* Construct, lay out and return the type of methods belonging to class
8150 BASETYPE and whose arguments and values are described by TYPE.
8151 If that type exists already, reuse it.
8152 TYPE must be a FUNCTION_TYPE node. */
8155 build_method_type (tree basetype
, tree type
)
8157 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
8159 return build_method_type_directly (basetype
,
8161 TYPE_ARG_TYPES (type
));
8164 /* Construct, lay out and return the type of offsets to a value
8165 of type TYPE, within an object of type BASETYPE.
8166 If a suitable offset type exists already, reuse it. */
8169 build_offset_type (tree basetype
, tree type
)
8172 hashval_t hashcode
= 0;
8174 /* Make a node of the sort we want. */
8175 t
= make_node (OFFSET_TYPE
);
8177 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8178 TREE_TYPE (t
) = type
;
8180 /* If we already have such a type, use the old one. */
8181 hashcode
= iterative_hash_object (TYPE_HASH (basetype
), hashcode
);
8182 hashcode
= iterative_hash_object (TYPE_HASH (type
), hashcode
);
8183 t
= type_hash_canon (hashcode
, t
);
8185 if (!COMPLETE_TYPE_P (t
))
8188 if (TYPE_CANONICAL (t
) == t
)
8190 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8191 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8192 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8193 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8194 || TYPE_CANONICAL (type
) != type
)
8196 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8197 TYPE_CANONICAL (type
));
8203 /* Create a complex type whose components are COMPONENT_TYPE. */
8206 build_complex_type (tree component_type
)
8211 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8212 || SCALAR_FLOAT_TYPE_P (component_type
)
8213 || FIXED_POINT_TYPE_P (component_type
));
8215 /* Make a node of the sort we want. */
8216 t
= make_node (COMPLEX_TYPE
);
8218 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (component_type
);
8220 /* If we already have such a type, use the old one. */
8221 hashcode
= iterative_hash_object (TYPE_HASH (component_type
), 0);
8222 t
= type_hash_canon (hashcode
, t
);
8224 if (!COMPLETE_TYPE_P (t
))
8227 if (TYPE_CANONICAL (t
) == t
)
8229 if (TYPE_STRUCTURAL_EQUALITY_P (component_type
))
8230 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8231 else if (TYPE_CANONICAL (component_type
) != component_type
)
8233 = build_complex_type (TYPE_CANONICAL (component_type
));
8236 /* We need to create a name, since complex is a fundamental type. */
8237 if (! TYPE_NAME (t
))
8240 if (component_type
== char_type_node
)
8241 name
= "complex char";
8242 else if (component_type
== signed_char_type_node
)
8243 name
= "complex signed char";
8244 else if (component_type
== unsigned_char_type_node
)
8245 name
= "complex unsigned char";
8246 else if (component_type
== short_integer_type_node
)
8247 name
= "complex short int";
8248 else if (component_type
== short_unsigned_type_node
)
8249 name
= "complex short unsigned int";
8250 else if (component_type
== integer_type_node
)
8251 name
= "complex int";
8252 else if (component_type
== unsigned_type_node
)
8253 name
= "complex unsigned int";
8254 else if (component_type
== long_integer_type_node
)
8255 name
= "complex long int";
8256 else if (component_type
== long_unsigned_type_node
)
8257 name
= "complex long unsigned int";
8258 else if (component_type
== long_long_integer_type_node
)
8259 name
= "complex long long int";
8260 else if (component_type
== long_long_unsigned_type_node
)
8261 name
= "complex long long unsigned int";
8266 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8267 get_identifier (name
), t
);
8270 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8273 /* If TYPE is a real or complex floating-point type and the target
8274 does not directly support arithmetic on TYPE then return the wider
8275 type to be used for arithmetic on TYPE. Otherwise, return
8279 excess_precision_type (tree type
)
8281 if (flag_excess_precision
!= EXCESS_PRECISION_FAST
)
8283 int flt_eval_method
= TARGET_FLT_EVAL_METHOD
;
8284 switch (TREE_CODE (type
))
8287 switch (flt_eval_method
)
8290 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
8291 return double_type_node
;
8294 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
)
8295 || TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
8296 return long_double_type_node
;
8303 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8305 switch (flt_eval_method
)
8308 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
))
8309 return complex_double_type_node
;
8312 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
)
8313 || (TYPE_MODE (TREE_TYPE (type
))
8314 == TYPE_MODE (double_type_node
)))
8315 return complex_long_double_type_node
;
8328 /* Return OP, stripped of any conversions to wider types as much as is safe.
8329 Converting the value back to OP's type makes a value equivalent to OP.
8331 If FOR_TYPE is nonzero, we return a value which, if converted to
8332 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8334 OP must have integer, real or enumeral type. Pointers are not allowed!
8336 There are some cases where the obvious value we could return
8337 would regenerate to OP if converted to OP's type,
8338 but would not extend like OP to wider types.
8339 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8340 For example, if OP is (unsigned short)(signed char)-1,
8341 we avoid returning (signed char)-1 if FOR_TYPE is int,
8342 even though extending that to an unsigned short would regenerate OP,
8343 since the result of extending (signed char)-1 to (int)
8344 is different from (int) OP. */
8347 get_unwidened (tree op
, tree for_type
)
8349 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8350 tree type
= TREE_TYPE (op
);
8352 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8354 = (for_type
!= 0 && for_type
!= type
8355 && final_prec
> TYPE_PRECISION (type
)
8356 && TYPE_UNSIGNED (type
));
8359 while (CONVERT_EXPR_P (op
))
8363 /* TYPE_PRECISION on vector types has different meaning
8364 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8365 so avoid them here. */
8366 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8369 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8370 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8372 /* Truncations are many-one so cannot be removed.
8373 Unless we are later going to truncate down even farther. */
8375 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8378 /* See what's inside this conversion. If we decide to strip it,
8380 op
= TREE_OPERAND (op
, 0);
8382 /* If we have not stripped any zero-extensions (uns is 0),
8383 we can strip any kind of extension.
8384 If we have previously stripped a zero-extension,
8385 only zero-extensions can safely be stripped.
8386 Any extension can be stripped if the bits it would produce
8387 are all going to be discarded later by truncating to FOR_TYPE. */
8391 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8393 /* TYPE_UNSIGNED says whether this is a zero-extension.
8394 Let's avoid computing it if it does not affect WIN
8395 and if UNS will not be needed again. */
8397 || CONVERT_EXPR_P (op
))
8398 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8406 /* If we finally reach a constant see if it fits in for_type and
8407 in that case convert it. */
8409 && TREE_CODE (win
) == INTEGER_CST
8410 && TREE_TYPE (win
) != for_type
8411 && int_fits_type_p (win
, for_type
))
8412 win
= fold_convert (for_type
, win
);
8417 /* Return OP or a simpler expression for a narrower value
8418 which can be sign-extended or zero-extended to give back OP.
8419 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8420 or 0 if the value should be sign-extended. */
8423 get_narrower (tree op
, int *unsignedp_ptr
)
8428 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8430 while (TREE_CODE (op
) == NOP_EXPR
)
8433 = (TYPE_PRECISION (TREE_TYPE (op
))
8434 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8436 /* Truncations are many-one so cannot be removed. */
8440 /* See what's inside this conversion. If we decide to strip it,
8445 op
= TREE_OPERAND (op
, 0);
8446 /* An extension: the outermost one can be stripped,
8447 but remember whether it is zero or sign extension. */
8449 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8450 /* Otherwise, if a sign extension has been stripped,
8451 only sign extensions can now be stripped;
8452 if a zero extension has been stripped, only zero-extensions. */
8453 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8457 else /* bitschange == 0 */
8459 /* A change in nominal type can always be stripped, but we must
8460 preserve the unsignedness. */
8462 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8464 op
= TREE_OPERAND (op
, 0);
8465 /* Keep trying to narrow, but don't assign op to win if it
8466 would turn an integral type into something else. */
8467 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8474 if (TREE_CODE (op
) == COMPONENT_REF
8475 /* Since type_for_size always gives an integer type. */
8476 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8477 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8478 /* Ensure field is laid out already. */
8479 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8480 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8482 unsigned HOST_WIDE_INT innerprec
8483 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8484 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8485 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8486 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8488 /* We can get this structure field in a narrower type that fits it,
8489 but the resulting extension to its nominal type (a fullword type)
8490 must satisfy the same conditions as for other extensions.
8492 Do this only for fields that are aligned (not bit-fields),
8493 because when bit-field insns will be used there is no
8494 advantage in doing this. */
8496 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8497 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8498 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8502 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8503 win
= fold_convert (type
, op
);
8507 *unsignedp_ptr
= uns
;
8511 /* Returns true if integer constant C has a value that is permissible
8512 for type TYPE (an INTEGER_TYPE). */
8515 int_fits_type_p (const_tree c
, const_tree type
)
8517 tree type_low_bound
, type_high_bound
;
8518 bool ok_for_low_bound
, ok_for_high_bound
;
8519 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8522 type_low_bound
= TYPE_MIN_VALUE (type
);
8523 type_high_bound
= TYPE_MAX_VALUE (type
);
8525 /* If at least one bound of the type is a constant integer, we can check
8526 ourselves and maybe make a decision. If no such decision is possible, but
8527 this type is a subtype, try checking against that. Otherwise, use
8528 fits_to_tree_p, which checks against the precision.
8530 Compute the status for each possibly constant bound, and return if we see
8531 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8532 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8533 for "constant known to fit". */
8535 /* Check if c >= type_low_bound. */
8536 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8538 if (tree_int_cst_lt (c
, type_low_bound
))
8540 ok_for_low_bound
= true;
8543 ok_for_low_bound
= false;
8545 /* Check if c <= type_high_bound. */
8546 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8548 if (tree_int_cst_lt (type_high_bound
, c
))
8550 ok_for_high_bound
= true;
8553 ok_for_high_bound
= false;
8555 /* If the constant fits both bounds, the result is known. */
8556 if (ok_for_low_bound
&& ok_for_high_bound
)
8559 /* Perform some generic filtering which may allow making a decision
8560 even if the bounds are not constant. First, negative integers
8561 never fit in unsigned types, */
8562 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (c
))
8565 /* Second, narrower types always fit in wider ones. */
8566 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8569 /* Third, unsigned integers with top bit set never fit signed types. */
8570 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8572 int prec
= GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c
))) - 1;
8573 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8575 /* When a tree_cst is converted to a wide-int, the precision
8576 is taken from the type. However, if the precision of the
8577 mode underneath the type is smaller than that, it is
8578 possible that the value will not fit. The test below
8579 fails if any bit is set between the sign bit of the
8580 underlying mode and the top bit of the type. */
8581 if (wi::ne_p (wi::zext (c
, prec
- 1), c
))
8584 else if (wi::neg_p (c
))
8588 /* If we haven't been able to decide at this point, there nothing more we
8589 can check ourselves here. Look at the base type if we have one and it
8590 has the same precision. */
8591 if (TREE_CODE (type
) == INTEGER_TYPE
8592 && TREE_TYPE (type
) != 0
8593 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8595 type
= TREE_TYPE (type
);
8599 /* Or to fits_to_tree_p, if nothing else. */
8600 return wi::fits_to_tree_p (c
, type
);
8603 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8604 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8605 represented (assuming two's-complement arithmetic) within the bit
8606 precision of the type are returned instead. */
8609 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8611 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8612 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8613 wi::to_mpz (TYPE_MIN_VALUE (type
), min
, TYPE_SIGN (type
));
8616 if (TYPE_UNSIGNED (type
))
8617 mpz_set_ui (min
, 0);
8620 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8621 wi::to_mpz (mn
, min
, SIGNED
);
8625 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8626 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8627 wi::to_mpz (TYPE_MAX_VALUE (type
), max
, TYPE_SIGN (type
));
8630 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8631 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8635 /* Return true if VAR is an automatic variable defined in function FN. */
8638 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8640 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8641 && ((((TREE_CODE (var
) == VAR_DECL
&& ! DECL_EXTERNAL (var
))
8642 || TREE_CODE (var
) == PARM_DECL
)
8643 && ! TREE_STATIC (var
))
8644 || TREE_CODE (var
) == LABEL_DECL
8645 || TREE_CODE (var
) == RESULT_DECL
));
8648 /* Subprogram of following function. Called by walk_tree.
8650 Return *TP if it is an automatic variable or parameter of the
8651 function passed in as DATA. */
8654 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8656 tree fn
= (tree
) data
;
8661 else if (DECL_P (*tp
)
8662 && auto_var_in_fn_p (*tp
, fn
))
8668 /* Returns true if T is, contains, or refers to a type with variable
8669 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8670 arguments, but not the return type. If FN is nonzero, only return
8671 true if a modifier of the type or position of FN is a variable or
8672 parameter inside FN.
8674 This concept is more general than that of C99 'variably modified types':
8675 in C99, a struct type is never variably modified because a VLA may not
8676 appear as a structure member. However, in GNU C code like:
8678 struct S { int i[f()]; };
8680 is valid, and other languages may define similar constructs. */
8683 variably_modified_type_p (tree type
, tree fn
)
8687 /* Test if T is either variable (if FN is zero) or an expression containing
8688 a variable in FN. If TYPE isn't gimplified, return true also if
8689 gimplify_one_sizepos would gimplify the expression into a local
8691 #define RETURN_TRUE_IF_VAR(T) \
8692 do { tree _t = (T); \
8693 if (_t != NULL_TREE \
8694 && _t != error_mark_node \
8695 && TREE_CODE (_t) != INTEGER_CST \
8696 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8698 || (!TYPE_SIZES_GIMPLIFIED (type) \
8699 && !is_gimple_sizepos (_t)) \
8700 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8701 return true; } while (0)
8703 if (type
== error_mark_node
)
8706 /* If TYPE itself has variable size, it is variably modified. */
8707 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8708 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8710 switch (TREE_CODE (type
))
8713 case REFERENCE_TYPE
:
8715 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8721 /* If TYPE is a function type, it is variably modified if the
8722 return type is variably modified. */
8723 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8729 case FIXED_POINT_TYPE
:
8732 /* Scalar types are variably modified if their end points
8734 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8735 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8740 case QUAL_UNION_TYPE
:
8741 /* We can't see if any of the fields are variably-modified by the
8742 definition we normally use, since that would produce infinite
8743 recursion via pointers. */
8744 /* This is variably modified if some field's type is. */
8745 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8746 if (TREE_CODE (t
) == FIELD_DECL
)
8748 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8749 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8750 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8752 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8753 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8758 /* Do not call ourselves to avoid infinite recursion. This is
8759 variably modified if the element type is. */
8760 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8761 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8768 /* The current language may have other cases to check, but in general,
8769 all other types are not variably modified. */
8770 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8772 #undef RETURN_TRUE_IF_VAR
8775 /* Given a DECL or TYPE, return the scope in which it was declared, or
8776 NULL_TREE if there is no containing scope. */
8779 get_containing_scope (const_tree t
)
8781 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8784 /* Return the innermost context enclosing DECL that is
8785 a FUNCTION_DECL, or zero if none. */
8788 decl_function_context (const_tree decl
)
8792 if (TREE_CODE (decl
) == ERROR_MARK
)
8795 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8796 where we look up the function at runtime. Such functions always take
8797 a first argument of type 'pointer to real context'.
8799 C++ should really be fixed to use DECL_CONTEXT for the real context,
8800 and use something else for the "virtual context". */
8801 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VINDEX (decl
))
8804 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8806 context
= DECL_CONTEXT (decl
);
8808 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8810 if (TREE_CODE (context
) == BLOCK
)
8811 context
= BLOCK_SUPERCONTEXT (context
);
8813 context
= get_containing_scope (context
);
8819 /* Return the innermost context enclosing DECL that is
8820 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8821 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8824 decl_type_context (const_tree decl
)
8826 tree context
= DECL_CONTEXT (decl
);
8829 switch (TREE_CODE (context
))
8831 case NAMESPACE_DECL
:
8832 case TRANSLATION_UNIT_DECL
:
8837 case QUAL_UNION_TYPE
:
8842 context
= DECL_CONTEXT (context
);
8846 context
= BLOCK_SUPERCONTEXT (context
);
8856 /* CALL is a CALL_EXPR. Return the declaration for the function
8857 called, or NULL_TREE if the called function cannot be
8861 get_callee_fndecl (const_tree call
)
8865 if (call
== error_mark_node
)
8866 return error_mark_node
;
8868 /* It's invalid to call this function with anything but a
8870 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8872 /* The first operand to the CALL is the address of the function
8874 addr
= CALL_EXPR_FN (call
);
8878 /* If this is a readonly function pointer, extract its initial value. */
8879 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8880 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8881 && DECL_INITIAL (addr
))
8882 addr
= DECL_INITIAL (addr
);
8884 /* If the address is just `&f' for some function `f', then we know
8885 that `f' is being called. */
8886 if (TREE_CODE (addr
) == ADDR_EXPR
8887 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8888 return TREE_OPERAND (addr
, 0);
8890 /* We couldn't figure out what was being called. */
8894 /* Print debugging information about tree nodes generated during the compile,
8895 and any language-specific information. */
8898 dump_tree_statistics (void)
8900 if (GATHER_STATISTICS
)
8903 int total_nodes
, total_bytes
;
8904 fprintf (stderr
, "Kind Nodes Bytes\n");
8905 fprintf (stderr
, "---------------------------------------\n");
8906 total_nodes
= total_bytes
= 0;
8907 for (i
= 0; i
< (int) all_kinds
; i
++)
8909 fprintf (stderr
, "%-20s %7d %10d\n", tree_node_kind_names
[i
],
8910 tree_node_counts
[i
], tree_node_sizes
[i
]);
8911 total_nodes
+= tree_node_counts
[i
];
8912 total_bytes
+= tree_node_sizes
[i
];
8914 fprintf (stderr
, "---------------------------------------\n");
8915 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_nodes
, total_bytes
);
8916 fprintf (stderr
, "---------------------------------------\n");
8917 fprintf (stderr
, "Code Nodes\n");
8918 fprintf (stderr
, "----------------------------\n");
8919 for (i
= 0; i
< (int) MAX_TREE_CODES
; i
++)
8920 fprintf (stderr
, "%-20s %7d\n", get_tree_code_name ((enum tree_code
) i
),
8921 tree_code_counts
[i
]);
8922 fprintf (stderr
, "----------------------------\n");
8923 ssanames_print_statistics ();
8924 phinodes_print_statistics ();
8927 fprintf (stderr
, "(No per-node statistics)\n");
8929 print_type_hash_statistics ();
8930 print_debug_expr_statistics ();
8931 print_value_expr_statistics ();
8932 lang_hooks
.print_statistics ();
8935 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8937 /* Generate a crc32 of a byte. */
8940 crc32_unsigned_bits (unsigned chksum
, unsigned value
, unsigned bits
)
8944 for (ix
= bits
; ix
--; value
<<= 1)
8948 feedback
= (value
^ chksum
) & 0x80000000 ? 0x04c11db7 : 0;
8955 /* Generate a crc32 of a 32-bit unsigned. */
8958 crc32_unsigned (unsigned chksum
, unsigned value
)
8960 return crc32_unsigned_bits (chksum
, value
, 32);
8963 /* Generate a crc32 of a byte. */
8966 crc32_byte (unsigned chksum
, char byte
)
8968 return crc32_unsigned_bits (chksum
, (unsigned) byte
<< 24, 8);
8971 /* Generate a crc32 of a string. */
8974 crc32_string (unsigned chksum
, const char *string
)
8978 chksum
= crc32_byte (chksum
, *string
);
8984 /* P is a string that will be used in a symbol. Mask out any characters
8985 that are not valid in that context. */
8988 clean_symbol_name (char *p
)
8992 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
8995 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9002 /* Generate a name for a special-purpose function.
9003 The generated name may need to be unique across the whole link.
9004 Changes to this function may also require corresponding changes to
9005 xstrdup_mask_random.
9006 TYPE is some string to identify the purpose of this function to the
9007 linker or collect2; it must start with an uppercase letter,
9009 I - for constructors
9011 N - for C++ anonymous namespaces
9012 F - for DWARF unwind frame information. */
9015 get_file_function_name (const char *type
)
9021 /* If we already have a name we know to be unique, just use that. */
9022 if (first_global_object_name
)
9023 p
= q
= ASTRDUP (first_global_object_name
);
9024 /* If the target is handling the constructors/destructors, they
9025 will be local to this file and the name is only necessary for
9027 We also assign sub_I and sub_D sufixes to constructors called from
9028 the global static constructors. These are always local. */
9029 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9030 || (strncmp (type
, "sub_", 4) == 0
9031 && (type
[4] == 'I' || type
[4] == 'D')))
9033 const char *file
= main_input_filename
;
9035 file
= LOCATION_FILE (input_location
);
9036 /* Just use the file's basename, because the full pathname
9037 might be quite long. */
9038 p
= q
= ASTRDUP (lbasename (file
));
9042 /* Otherwise, the name must be unique across the entire link.
9043 We don't have anything that we know to be unique to this translation
9044 unit, so use what we do have and throw in some randomness. */
9046 const char *name
= weak_global_object_name
;
9047 const char *file
= main_input_filename
;
9052 file
= LOCATION_FILE (input_location
);
9054 len
= strlen (file
);
9055 q
= (char *) alloca (9 + 17 + len
+ 1);
9056 memcpy (q
, file
, len
+ 1);
9058 snprintf (q
+ len
, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9059 crc32_string (0, name
), get_random_seed (false));
9064 clean_symbol_name (q
);
9065 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9068 /* Set up the name of the file-level functions we may need.
9069 Use a global object (which is already required to be unique over
9070 the program) rather than the file name (which imposes extra
9072 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9074 return get_identifier (buf
);
9077 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9079 /* Complain that the tree code of NODE does not match the expected 0
9080 terminated list of trailing codes. The trailing code list can be
9081 empty, for a more vague error message. FILE, LINE, and FUNCTION
9082 are of the caller. */
9085 tree_check_failed (const_tree node
, const char *file
,
9086 int line
, const char *function
, ...)
9090 unsigned length
= 0;
9091 enum tree_code code
;
9093 va_start (args
, function
);
9094 while ((code
= (enum tree_code
) va_arg (args
, int)))
9095 length
+= 4 + strlen (get_tree_code_name (code
));
9100 va_start (args
, function
);
9101 length
+= strlen ("expected ");
9102 buffer
= tmp
= (char *) alloca (length
);
9104 while ((code
= (enum tree_code
) va_arg (args
, int)))
9106 const char *prefix
= length
? " or " : "expected ";
9108 strcpy (tmp
+ length
, prefix
);
9109 length
+= strlen (prefix
);
9110 strcpy (tmp
+ length
, get_tree_code_name (code
));
9111 length
+= strlen (get_tree_code_name (code
));
9116 buffer
= "unexpected node";
9118 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9119 buffer
, get_tree_code_name (TREE_CODE (node
)),
9120 function
, trim_filename (file
), line
);
9123 /* Complain that the tree code of NODE does match the expected 0
9124 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9128 tree_not_check_failed (const_tree node
, const char *file
,
9129 int line
, const char *function
, ...)
9133 unsigned length
= 0;
9134 enum tree_code code
;
9136 va_start (args
, function
);
9137 while ((code
= (enum tree_code
) va_arg (args
, int)))
9138 length
+= 4 + strlen (get_tree_code_name (code
));
9140 va_start (args
, function
);
9141 buffer
= (char *) alloca (length
);
9143 while ((code
= (enum tree_code
) va_arg (args
, int)))
9147 strcpy (buffer
+ length
, " or ");
9150 strcpy (buffer
+ length
, get_tree_code_name (code
));
9151 length
+= strlen (get_tree_code_name (code
));
9155 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9156 buffer
, get_tree_code_name (TREE_CODE (node
)),
9157 function
, trim_filename (file
), line
);
9160 /* Similar to tree_check_failed, except that we check for a class of tree
9161 code, given in CL. */
9164 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9165 const char *file
, int line
, const char *function
)
9168 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9169 TREE_CODE_CLASS_STRING (cl
),
9170 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9171 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9174 /* Similar to tree_check_failed, except that instead of specifying a
9175 dozen codes, use the knowledge that they're all sequential. */
9178 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9179 const char *function
, enum tree_code c1
,
9183 unsigned length
= 0;
9186 for (c
= c1
; c
<= c2
; ++c
)
9187 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9189 length
+= strlen ("expected ");
9190 buffer
= (char *) alloca (length
);
9193 for (c
= c1
; c
<= c2
; ++c
)
9195 const char *prefix
= length
? " or " : "expected ";
9197 strcpy (buffer
+ length
, prefix
);
9198 length
+= strlen (prefix
);
9199 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9200 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9203 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9204 buffer
, get_tree_code_name (TREE_CODE (node
)),
9205 function
, trim_filename (file
), line
);
9209 /* Similar to tree_check_failed, except that we check that a tree does
9210 not have the specified code, given in CL. */
9213 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9214 const char *file
, int line
, const char *function
)
9217 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9218 TREE_CODE_CLASS_STRING (cl
),
9219 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9220 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9224 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9227 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9228 const char *function
, enum omp_clause_code code
)
9230 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9231 omp_clause_code_name
[code
], get_tree_code_name (TREE_CODE (node
)),
9232 function
, trim_filename (file
), line
);
9236 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9239 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9240 const char *function
, enum omp_clause_code c1
,
9241 enum omp_clause_code c2
)
9244 unsigned length
= 0;
9247 for (c
= c1
; c
<= c2
; ++c
)
9248 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9250 length
+= strlen ("expected ");
9251 buffer
= (char *) alloca (length
);
9254 for (c
= c1
; c
<= c2
; ++c
)
9256 const char *prefix
= length
? " or " : "expected ";
9258 strcpy (buffer
+ length
, prefix
);
9259 length
+= strlen (prefix
);
9260 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9261 length
+= strlen (omp_clause_code_name
[c
]);
9264 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9265 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9266 function
, trim_filename (file
), line
);
9270 #undef DEFTREESTRUCT
9271 #define DEFTREESTRUCT(VAL, NAME) NAME,
9273 static const char *ts_enum_names
[] = {
9274 #include "treestruct.def"
9276 #undef DEFTREESTRUCT
9278 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9280 /* Similar to tree_class_check_failed, except that we check for
9281 whether CODE contains the tree structure identified by EN. */
9284 tree_contains_struct_check_failed (const_tree node
,
9285 const enum tree_node_structure_enum en
,
9286 const char *file
, int line
,
9287 const char *function
)
9290 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9292 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9296 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9297 (dynamically sized) vector. */
9300 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9301 const char *function
)
9304 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9305 idx
+ 1, len
, function
, trim_filename (file
), line
);
9308 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9309 (dynamically sized) vector. */
9312 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9313 const char *function
)
9316 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9317 idx
+ 1, len
, function
, trim_filename (file
), line
);
9320 /* Similar to above, except that the check is for the bounds of the operand
9321 vector of an expression node EXP. */
9324 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9325 int line
, const char *function
)
9327 enum tree_code code
= TREE_CODE (exp
);
9329 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9330 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9331 function
, trim_filename (file
), line
);
9334 /* Similar to above, except that the check is for the number of
9335 operands of an OMP_CLAUSE node. */
9338 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9339 int line
, const char *function
)
9342 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9343 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9344 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9345 trim_filename (file
), line
);
9347 #endif /* ENABLE_TREE_CHECKING */
9349 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9350 and mapped to the machine mode MODE. Initialize its fields and build
9351 the information necessary for debugging output. */
9354 make_vector_type (tree innertype
, int nunits
, enum machine_mode mode
)
9357 hashval_t hashcode
= 0;
9359 t
= make_node (VECTOR_TYPE
);
9360 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (innertype
);
9361 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9362 SET_TYPE_MODE (t
, mode
);
9364 if (TYPE_STRUCTURAL_EQUALITY_P (innertype
))
9365 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9366 else if (TYPE_CANONICAL (innertype
) != innertype
9367 || mode
!= VOIDmode
)
9369 = make_vector_type (TYPE_CANONICAL (innertype
), nunits
, VOIDmode
);
9373 hashcode
= iterative_hash_host_wide_int (VECTOR_TYPE
, hashcode
);
9374 hashcode
= iterative_hash_host_wide_int (nunits
, hashcode
);
9375 hashcode
= iterative_hash_host_wide_int (mode
, hashcode
);
9376 hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (t
)), hashcode
);
9377 t
= type_hash_canon (hashcode
, t
);
9379 /* We have built a main variant, based on the main variant of the
9380 inner type. Use it to build the variant we return. */
9381 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9382 && TREE_TYPE (t
) != innertype
)
9383 return build_type_attribute_qual_variant (t
,
9384 TYPE_ATTRIBUTES (innertype
),
9385 TYPE_QUALS (innertype
));
9391 make_or_reuse_type (unsigned size
, int unsignedp
)
9393 if (size
== INT_TYPE_SIZE
)
9394 return unsignedp
? unsigned_type_node
: integer_type_node
;
9395 if (size
== CHAR_TYPE_SIZE
)
9396 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9397 if (size
== SHORT_TYPE_SIZE
)
9398 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9399 if (size
== LONG_TYPE_SIZE
)
9400 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9401 if (size
== LONG_LONG_TYPE_SIZE
)
9402 return (unsignedp
? long_long_unsigned_type_node
9403 : long_long_integer_type_node
);
9404 if (size
== 128 && int128_integer_type_node
)
9405 return (unsignedp
? int128_unsigned_type_node
9406 : int128_integer_type_node
);
9409 return make_unsigned_type (size
);
9411 return make_signed_type (size
);
9414 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9417 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9421 if (size
== SHORT_FRACT_TYPE_SIZE
)
9422 return unsignedp
? sat_unsigned_short_fract_type_node
9423 : sat_short_fract_type_node
;
9424 if (size
== FRACT_TYPE_SIZE
)
9425 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9426 if (size
== LONG_FRACT_TYPE_SIZE
)
9427 return unsignedp
? sat_unsigned_long_fract_type_node
9428 : sat_long_fract_type_node
;
9429 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9430 return unsignedp
? sat_unsigned_long_long_fract_type_node
9431 : sat_long_long_fract_type_node
;
9435 if (size
== SHORT_FRACT_TYPE_SIZE
)
9436 return unsignedp
? unsigned_short_fract_type_node
9437 : short_fract_type_node
;
9438 if (size
== FRACT_TYPE_SIZE
)
9439 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9440 if (size
== LONG_FRACT_TYPE_SIZE
)
9441 return unsignedp
? unsigned_long_fract_type_node
9442 : long_fract_type_node
;
9443 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9444 return unsignedp
? unsigned_long_long_fract_type_node
9445 : long_long_fract_type_node
;
9448 return make_fract_type (size
, unsignedp
, satp
);
9451 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9454 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9458 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9459 return unsignedp
? sat_unsigned_short_accum_type_node
9460 : sat_short_accum_type_node
;
9461 if (size
== ACCUM_TYPE_SIZE
)
9462 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9463 if (size
== LONG_ACCUM_TYPE_SIZE
)
9464 return unsignedp
? sat_unsigned_long_accum_type_node
9465 : sat_long_accum_type_node
;
9466 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9467 return unsignedp
? sat_unsigned_long_long_accum_type_node
9468 : sat_long_long_accum_type_node
;
9472 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9473 return unsignedp
? unsigned_short_accum_type_node
9474 : short_accum_type_node
;
9475 if (size
== ACCUM_TYPE_SIZE
)
9476 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9477 if (size
== LONG_ACCUM_TYPE_SIZE
)
9478 return unsignedp
? unsigned_long_accum_type_node
9479 : long_accum_type_node
;
9480 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9481 return unsignedp
? unsigned_long_long_accum_type_node
9482 : long_long_accum_type_node
;
9485 return make_accum_type (size
, unsignedp
, satp
);
9489 /* Create an atomic variant node for TYPE. This routine is called
9490 during initialization of data types to create the 5 basic atomic
9491 types. The generic build_variant_type function requires these to
9492 already be set up in order to function properly, so cannot be
9493 called from there. If ALIGN is non-zero, then ensure alignment is
9494 overridden to this value. */
9497 build_atomic_base (tree type
, unsigned int align
)
9501 /* Make sure its not already registered. */
9502 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9505 t
= build_variant_type_copy (type
);
9506 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9509 TYPE_ALIGN (t
) = align
;
9514 /* Create nodes for all integer types (and error_mark_node) using the sizes
9515 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9516 SHORT_DOUBLE specifies whether double should be of the same precision
9520 build_common_tree_nodes (bool signed_char
, bool short_double
)
9522 error_mark_node
= make_node (ERROR_MARK
);
9523 TREE_TYPE (error_mark_node
) = error_mark_node
;
9525 initialize_sizetypes ();
9527 /* Define both `signed char' and `unsigned char'. */
9528 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9529 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9530 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9531 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9533 /* Define `char', which is like either `signed char' or `unsigned char'
9534 but not the same as either. */
9537 ? make_signed_type (CHAR_TYPE_SIZE
)
9538 : make_unsigned_type (CHAR_TYPE_SIZE
));
9539 TYPE_STRING_FLAG (char_type_node
) = 1;
9541 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9542 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9543 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9544 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9545 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9546 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9547 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9548 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9549 #if HOST_BITS_PER_WIDE_INT >= 64
9550 /* TODO: This isn't correct, but as logic depends at the moment on
9551 host's instead of target's wide-integer.
9552 If there is a target not supporting TImode, but has an 128-bit
9553 integer-scalar register, this target check needs to be adjusted. */
9554 if (targetm
.scalar_mode_supported_p (TImode
))
9556 int128_integer_type_node
= make_signed_type (128);
9557 int128_unsigned_type_node
= make_unsigned_type (128);
9561 /* Define a boolean type. This type only represents boolean values but
9562 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9563 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9564 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9565 TYPE_PRECISION (boolean_type_node
) = 1;
9566 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9568 /* Define what type to use for size_t. */
9569 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9570 size_type_node
= unsigned_type_node
;
9571 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9572 size_type_node
= long_unsigned_type_node
;
9573 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9574 size_type_node
= long_long_unsigned_type_node
;
9575 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9576 size_type_node
= short_unsigned_type_node
;
9580 /* Fill in the rest of the sized types. Reuse existing type nodes
9582 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9583 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9584 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9585 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9586 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9588 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9589 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9590 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9591 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9592 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9594 /* Don't call build_qualified type for atomics. That routine does
9595 special processing for atomics, and until they are initialized
9596 it's better not to make that call.
9598 Check to see if there is a target override for atomic types. */
9600 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9601 targetm
.atomic_align_for_mode (QImode
));
9602 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9603 targetm
.atomic_align_for_mode (HImode
));
9604 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9605 targetm
.atomic_align_for_mode (SImode
));
9606 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9607 targetm
.atomic_align_for_mode (DImode
));
9608 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9609 targetm
.atomic_align_for_mode (TImode
));
9611 access_public_node
= get_identifier ("public");
9612 access_protected_node
= get_identifier ("protected");
9613 access_private_node
= get_identifier ("private");
9615 /* Define these next since types below may used them. */
9616 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9617 integer_one_node
= build_int_cst (integer_type_node
, 1);
9618 integer_three_node
= build_int_cst (integer_type_node
, 3);
9619 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9621 size_zero_node
= size_int (0);
9622 size_one_node
= size_int (1);
9623 bitsize_zero_node
= bitsize_int (0);
9624 bitsize_one_node
= bitsize_int (1);
9625 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9627 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9628 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9630 void_type_node
= make_node (VOID_TYPE
);
9631 layout_type (void_type_node
);
9633 /* We are not going to have real types in C with less than byte alignment,
9634 so we might as well not have any types that claim to have it. */
9635 TYPE_ALIGN (void_type_node
) = BITS_PER_UNIT
;
9636 TYPE_USER_ALIGN (void_type_node
) = 0;
9638 void_node
= make_node (VOID_CST
);
9639 TREE_TYPE (void_node
) = void_type_node
;
9641 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9642 layout_type (TREE_TYPE (null_pointer_node
));
9644 ptr_type_node
= build_pointer_type (void_type_node
);
9646 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9647 fileptr_type_node
= ptr_type_node
;
9649 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9651 float_type_node
= make_node (REAL_TYPE
);
9652 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9653 layout_type (float_type_node
);
9655 double_type_node
= make_node (REAL_TYPE
);
9657 TYPE_PRECISION (double_type_node
) = FLOAT_TYPE_SIZE
;
9659 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9660 layout_type (double_type_node
);
9662 long_double_type_node
= make_node (REAL_TYPE
);
9663 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9664 layout_type (long_double_type_node
);
9666 float_ptr_type_node
= build_pointer_type (float_type_node
);
9667 double_ptr_type_node
= build_pointer_type (double_type_node
);
9668 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9669 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9671 /* Fixed size integer types. */
9672 uint16_type_node
= build_nonstandard_integer_type (16, true);
9673 uint32_type_node
= build_nonstandard_integer_type (32, true);
9674 uint64_type_node
= build_nonstandard_integer_type (64, true);
9676 /* Decimal float types. */
9677 dfloat32_type_node
= make_node (REAL_TYPE
);
9678 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9679 layout_type (dfloat32_type_node
);
9680 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9681 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
9683 dfloat64_type_node
= make_node (REAL_TYPE
);
9684 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9685 layout_type (dfloat64_type_node
);
9686 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9687 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
9689 dfloat128_type_node
= make_node (REAL_TYPE
);
9690 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9691 layout_type (dfloat128_type_node
);
9692 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9693 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
9695 complex_integer_type_node
= build_complex_type (integer_type_node
);
9696 complex_float_type_node
= build_complex_type (float_type_node
);
9697 complex_double_type_node
= build_complex_type (double_type_node
);
9698 complex_long_double_type_node
= build_complex_type (long_double_type_node
);
9700 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9701 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9702 sat_ ## KIND ## _type_node = \
9703 make_sat_signed_ ## KIND ## _type (SIZE); \
9704 sat_unsigned_ ## KIND ## _type_node = \
9705 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9706 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9707 unsigned_ ## KIND ## _type_node = \
9708 make_unsigned_ ## KIND ## _type (SIZE);
9710 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9711 sat_ ## WIDTH ## KIND ## _type_node = \
9712 make_sat_signed_ ## KIND ## _type (SIZE); \
9713 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9714 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9715 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9716 unsigned_ ## WIDTH ## KIND ## _type_node = \
9717 make_unsigned_ ## KIND ## _type (SIZE);
9719 /* Make fixed-point type nodes based on four different widths. */
9720 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9721 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9722 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9723 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9724 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9726 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9727 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9728 NAME ## _type_node = \
9729 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9730 u ## NAME ## _type_node = \
9731 make_or_reuse_unsigned_ ## KIND ## _type \
9732 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9733 sat_ ## NAME ## _type_node = \
9734 make_or_reuse_sat_signed_ ## KIND ## _type \
9735 (GET_MODE_BITSIZE (MODE ## mode)); \
9736 sat_u ## NAME ## _type_node = \
9737 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9738 (GET_MODE_BITSIZE (U ## MODE ## mode));
9740 /* Fixed-point type and mode nodes. */
9741 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9742 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9743 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9744 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9745 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9746 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9747 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9748 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9749 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9750 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9751 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9754 tree t
= targetm
.build_builtin_va_list ();
9756 /* Many back-ends define record types without setting TYPE_NAME.
9757 If we copied the record type here, we'd keep the original
9758 record type without a name. This breaks name mangling. So,
9759 don't copy record types and let c_common_nodes_and_builtins()
9760 declare the type to be __builtin_va_list. */
9761 if (TREE_CODE (t
) != RECORD_TYPE
)
9762 t
= build_variant_type_copy (t
);
9764 va_list_type_node
= t
;
9768 /* Modify DECL for given flags.
9769 TM_PURE attribute is set only on types, so the function will modify
9770 DECL's type when ECF_TM_PURE is used. */
9773 set_call_expr_flags (tree decl
, int flags
)
9775 if (flags
& ECF_NOTHROW
)
9776 TREE_NOTHROW (decl
) = 1;
9777 if (flags
& ECF_CONST
)
9778 TREE_READONLY (decl
) = 1;
9779 if (flags
& ECF_PURE
)
9780 DECL_PURE_P (decl
) = 1;
9781 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9782 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9783 if (flags
& ECF_NOVOPS
)
9784 DECL_IS_NOVOPS (decl
) = 1;
9785 if (flags
& ECF_NORETURN
)
9786 TREE_THIS_VOLATILE (decl
) = 1;
9787 if (flags
& ECF_MALLOC
)
9788 DECL_IS_MALLOC (decl
) = 1;
9789 if (flags
& ECF_RETURNS_TWICE
)
9790 DECL_IS_RETURNS_TWICE (decl
) = 1;
9791 if (flags
& ECF_LEAF
)
9792 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9793 NULL
, DECL_ATTRIBUTES (decl
));
9794 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9795 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9796 /* Looping const or pure is implied by noreturn.
9797 There is currently no way to declare looping const or looping pure alone. */
9798 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9799 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9803 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9806 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9807 const char *library_name
, int ecf_flags
)
9811 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9812 library_name
, NULL_TREE
);
9813 set_call_expr_flags (decl
, ecf_flags
);
9815 set_builtin_decl (code
, decl
, true);
9818 /* Call this function after instantiating all builtins that the language
9819 front end cares about. This will build the rest of the builtins that
9820 are relied upon by the tree optimizers and the middle-end. */
9823 build_common_builtin_nodes (void)
9828 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9830 ftype
= build_function_type (void_type_node
, void_list_node
);
9831 local_define_builtin ("__builtin_unreachable", ftype
, BUILT_IN_UNREACHABLE
,
9832 "__builtin_unreachable",
9833 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9834 | ECF_CONST
| ECF_LEAF
);
9837 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9838 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9840 ftype
= build_function_type_list (ptr_type_node
,
9841 ptr_type_node
, const_ptr_type_node
,
9842 size_type_node
, NULL_TREE
);
9844 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9845 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9846 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9847 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9848 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9849 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9852 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9854 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9855 const_ptr_type_node
, size_type_node
,
9857 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9858 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9861 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9863 ftype
= build_function_type_list (ptr_type_node
,
9864 ptr_type_node
, integer_type_node
,
9865 size_type_node
, NULL_TREE
);
9866 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9867 "memset", ECF_NOTHROW
| ECF_LEAF
);
9870 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9872 ftype
= build_function_type_list (ptr_type_node
,
9873 size_type_node
, NULL_TREE
);
9874 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9875 "alloca", ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
9878 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9879 size_type_node
, NULL_TREE
);
9880 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9881 BUILT_IN_ALLOCA_WITH_ALIGN
, "alloca",
9882 ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
9884 /* If we're checking the stack, `alloca' can throw. */
9885 if (flag_stack_check
)
9887 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA
)) = 0;
9888 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
)) = 0;
9891 ftype
= build_function_type_list (void_type_node
,
9892 ptr_type_node
, ptr_type_node
,
9893 ptr_type_node
, NULL_TREE
);
9894 local_define_builtin ("__builtin_init_trampoline", ftype
,
9895 BUILT_IN_INIT_TRAMPOLINE
,
9896 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
9897 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
9898 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
9899 "__builtin_init_heap_trampoline",
9900 ECF_NOTHROW
| ECF_LEAF
);
9902 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
9903 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
9904 BUILT_IN_ADJUST_TRAMPOLINE
,
9905 "__builtin_adjust_trampoline",
9906 ECF_CONST
| ECF_NOTHROW
);
9908 ftype
= build_function_type_list (void_type_node
,
9909 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9910 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
9911 BUILT_IN_NONLOCAL_GOTO
,
9912 "__builtin_nonlocal_goto",
9913 ECF_NORETURN
| ECF_NOTHROW
);
9915 ftype
= build_function_type_list (void_type_node
,
9916 ptr_type_node
, ptr_type_node
, NULL_TREE
);
9917 local_define_builtin ("__builtin_setjmp_setup", ftype
,
9918 BUILT_IN_SETJMP_SETUP
,
9919 "__builtin_setjmp_setup", ECF_NOTHROW
);
9921 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9922 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
9923 BUILT_IN_SETJMP_RECEIVER
,
9924 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
9926 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
9927 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
9928 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
9930 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9931 local_define_builtin ("__builtin_stack_restore", ftype
,
9932 BUILT_IN_STACK_RESTORE
,
9933 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
9935 /* If there's a possibility that we might use the ARM EABI, build the
9936 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9937 if (targetm
.arm_eabi_unwinder
)
9939 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
9940 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
9941 BUILT_IN_CXA_END_CLEANUP
,
9942 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
9945 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
9946 local_define_builtin ("__builtin_unwind_resume", ftype
,
9947 BUILT_IN_UNWIND_RESUME
,
9948 ((targetm_common
.except_unwind_info (&global_options
)
9950 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9953 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
9955 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
9957 local_define_builtin ("__builtin_return_address", ftype
,
9958 BUILT_IN_RETURN_ADDRESS
,
9959 "__builtin_return_address",
9963 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
9964 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9966 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
9967 ptr_type_node
, NULL_TREE
);
9968 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
9969 local_define_builtin ("__cyg_profile_func_enter", ftype
,
9970 BUILT_IN_PROFILE_FUNC_ENTER
,
9971 "__cyg_profile_func_enter", 0);
9972 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
9973 local_define_builtin ("__cyg_profile_func_exit", ftype
,
9974 BUILT_IN_PROFILE_FUNC_EXIT
,
9975 "__cyg_profile_func_exit", 0);
9978 /* The exception object and filter values from the runtime. The argument
9979 must be zero before exception lowering, i.e. from the front end. After
9980 exception lowering, it will be the region number for the exception
9981 landing pad. These functions are PURE instead of CONST to prevent
9982 them from being hoisted past the exception edge that will initialize
9983 its value in the landing pad. */
9984 ftype
= build_function_type_list (ptr_type_node
,
9985 integer_type_node
, NULL_TREE
);
9986 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
9987 /* Only use TM_PURE if we we have TM language support. */
9988 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
9989 ecf_flags
|= ECF_TM_PURE
;
9990 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
9991 "__builtin_eh_pointer", ecf_flags
);
9993 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
9994 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
9995 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
9996 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9998 ftype
= build_function_type_list (void_type_node
,
9999 integer_type_node
, integer_type_node
,
10001 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10002 BUILT_IN_EH_COPY_VALUES
,
10003 "__builtin_eh_copy_values", ECF_NOTHROW
);
10005 /* Complex multiplication and division. These are handled as builtins
10006 rather than optabs because emit_library_call_value doesn't support
10007 complex. Further, we can do slightly better with folding these
10008 beasties if the real and complex parts of the arguments are separate. */
10012 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10014 char mode_name_buf
[4], *q
;
10016 enum built_in_function mcode
, dcode
;
10017 tree type
, inner_type
;
10018 const char *prefix
= "__";
10020 if (targetm
.libfunc_gnu_prefix
)
10023 type
= lang_hooks
.types
.type_for_mode ((enum machine_mode
) mode
, 0);
10026 inner_type
= TREE_TYPE (type
);
10028 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10029 inner_type
, inner_type
, NULL_TREE
);
10031 mcode
= ((enum built_in_function
)
10032 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10033 dcode
= ((enum built_in_function
)
10034 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10036 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10040 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10042 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10043 built_in_names
[mcode
],
10044 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10046 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10048 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10049 built_in_names
[dcode
],
10050 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10055 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10058 If we requested a pointer to a vector, build up the pointers that
10059 we stripped off while looking for the inner type. Similarly for
10060 return values from functions.
10062 The argument TYPE is the top of the chain, and BOTTOM is the
10063 new type which we will point to. */
10066 reconstruct_complex_type (tree type
, tree bottom
)
10070 if (TREE_CODE (type
) == POINTER_TYPE
)
10072 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10073 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10074 TYPE_REF_CAN_ALIAS_ALL (type
));
10076 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10078 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10079 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10080 TYPE_REF_CAN_ALIAS_ALL (type
));
10082 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10084 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10085 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10087 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10089 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10090 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10092 else if (TREE_CODE (type
) == METHOD_TYPE
)
10094 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10095 /* The build_method_type_directly() routine prepends 'this' to argument list,
10096 so we must compensate by getting rid of it. */
10098 = build_method_type_directly
10099 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10101 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10103 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10105 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10106 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10111 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10112 TYPE_QUALS (type
));
10115 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10118 build_vector_type_for_mode (tree innertype
, enum machine_mode mode
)
10122 switch (GET_MODE_CLASS (mode
))
10124 case MODE_VECTOR_INT
:
10125 case MODE_VECTOR_FLOAT
:
10126 case MODE_VECTOR_FRACT
:
10127 case MODE_VECTOR_UFRACT
:
10128 case MODE_VECTOR_ACCUM
:
10129 case MODE_VECTOR_UACCUM
:
10130 nunits
= GET_MODE_NUNITS (mode
);
10134 /* Check that there are no leftover bits. */
10135 gcc_assert (GET_MODE_BITSIZE (mode
)
10136 % TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10138 nunits
= GET_MODE_BITSIZE (mode
)
10139 / TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10143 gcc_unreachable ();
10146 return make_vector_type (innertype
, nunits
, mode
);
10149 /* Similarly, but takes the inner type and number of units, which must be
10153 build_vector_type (tree innertype
, int nunits
)
10155 return make_vector_type (innertype
, nunits
, VOIDmode
);
10158 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10161 build_opaque_vector_type (tree innertype
, int nunits
)
10163 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10165 /* We always build the non-opaque variant before the opaque one,
10166 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10167 cand
= TYPE_NEXT_VARIANT (t
);
10169 && TYPE_VECTOR_OPAQUE (cand
)
10170 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10172 /* Othewise build a variant type and make sure to queue it after
10173 the non-opaque type. */
10174 cand
= build_distinct_type_copy (t
);
10175 TYPE_VECTOR_OPAQUE (cand
) = true;
10176 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10177 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10178 TYPE_NEXT_VARIANT (t
) = cand
;
10179 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10184 /* Given an initializer INIT, return TRUE if INIT is zero or some
10185 aggregate of zeros. Otherwise return FALSE. */
10187 initializer_zerop (const_tree init
)
10193 switch (TREE_CODE (init
))
10196 return integer_zerop (init
);
10199 /* ??? Note that this is not correct for C4X float formats. There,
10200 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10201 negative exponent. */
10202 return real_zerop (init
)
10203 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
));
10206 return fixed_zerop (init
);
10209 return integer_zerop (init
)
10210 || (real_zerop (init
)
10211 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10212 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
))));
10217 for (i
= 0; i
< VECTOR_CST_NELTS (init
); ++i
)
10218 if (!initializer_zerop (VECTOR_CST_ELT (init
, i
)))
10225 unsigned HOST_WIDE_INT idx
;
10227 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10228 if (!initializer_zerop (elt
))
10237 /* We need to loop through all elements to handle cases like
10238 "\0" and "\0foobar". */
10239 for (i
= 0; i
< TREE_STRING_LENGTH (init
); ++i
)
10240 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10251 /* Check if vector VEC consists of all the equal elements and
10252 that the number of elements corresponds to the type of VEC.
10253 The function returns first element of the vector
10254 or NULL_TREE if the vector is not uniform. */
10256 uniform_vector_p (const_tree vec
)
10261 if (vec
== NULL_TREE
)
10264 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10266 if (TREE_CODE (vec
) == VECTOR_CST
)
10268 first
= VECTOR_CST_ELT (vec
, 0);
10269 for (i
= 1; i
< VECTOR_CST_NELTS (vec
); ++i
)
10270 if (!operand_equal_p (first
, VECTOR_CST_ELT (vec
, i
), 0))
10276 else if (TREE_CODE (vec
) == CONSTRUCTOR
)
10278 first
= error_mark_node
;
10280 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10287 if (!operand_equal_p (first
, t
, 0))
10290 if (i
!= TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)))
10299 /* Build an empty statement at location LOC. */
10302 build_empty_stmt (location_t loc
)
10304 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10305 SET_EXPR_LOCATION (t
, loc
);
10310 /* Build an OpenMP clause with code CODE. LOC is the location of the
10314 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10319 length
= omp_clause_num_ops
[code
];
10320 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10322 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10324 t
= (tree
) ggc_internal_alloc (size
);
10325 memset (t
, 0, size
);
10326 TREE_SET_CODE (t
, OMP_CLAUSE
);
10327 OMP_CLAUSE_SET_CODE (t
, code
);
10328 OMP_CLAUSE_LOCATION (t
) = loc
;
10333 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10334 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10335 Except for the CODE and operand count field, other storage for the
10336 object is initialized to zeros. */
10339 build_vl_exp_stat (enum tree_code code
, int len MEM_STAT_DECL
)
10342 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10344 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10345 gcc_assert (len
>= 1);
10347 record_node_allocation_statistics (code
, length
);
10349 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10351 TREE_SET_CODE (t
, code
);
10353 /* Can't use TREE_OPERAND to store the length because if checking is
10354 enabled, it will try to check the length before we store it. :-P */
10355 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10360 /* Helper function for build_call_* functions; build a CALL_EXPR with
10361 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10362 the argument slots. */
10365 build_call_1 (tree return_type
, tree fn
, int nargs
)
10369 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10370 TREE_TYPE (t
) = return_type
;
10371 CALL_EXPR_FN (t
) = fn
;
10372 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10377 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10378 FN and a null static chain slot. NARGS is the number of call arguments
10379 which are specified as "..." arguments. */
10382 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10386 va_start (args
, nargs
);
10387 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10392 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10393 FN and a null static chain slot. NARGS is the number of call arguments
10394 which are specified as a va_list ARGS. */
10397 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10402 t
= build_call_1 (return_type
, fn
, nargs
);
10403 for (i
= 0; i
< nargs
; i
++)
10404 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10405 process_call_operands (t
);
10409 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10410 FN and a null static chain slot. NARGS is the number of call arguments
10411 which are specified as a tree array ARGS. */
10414 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10415 int nargs
, const tree
*args
)
10420 t
= build_call_1 (return_type
, fn
, nargs
);
10421 for (i
= 0; i
< nargs
; i
++)
10422 CALL_EXPR_ARG (t
, i
) = args
[i
];
10423 process_call_operands (t
);
10424 SET_EXPR_LOCATION (t
, loc
);
10428 /* Like build_call_array, but takes a vec. */
10431 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10436 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10437 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10438 CALL_EXPR_ARG (ret
, ix
) = t
;
10439 process_call_operands (ret
);
10443 /* Return true if T (assumed to be a DECL) must be assigned a memory
10447 needs_to_live_in_memory (const_tree t
)
10449 return (TREE_ADDRESSABLE (t
)
10450 || is_global_var (t
)
10451 || (TREE_CODE (t
) == RESULT_DECL
10452 && !DECL_BY_REFERENCE (t
)
10453 && aggregate_value_p (t
, current_function_decl
)));
10456 /* Return value of a constant X and sign-extend it. */
10459 int_cst_value (const_tree x
)
10461 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10462 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10464 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10465 gcc_assert (cst_and_fits_in_hwi (x
));
10467 if (bits
< HOST_BITS_PER_WIDE_INT
)
10469 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10471 val
|= (~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1;
10473 val
&= ~((~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1);
10479 /* Return value of a constant X and sign-extend it. */
10482 widest_int_cst_value (const_tree x
)
10484 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10485 unsigned HOST_WIDEST_INT val
= TREE_INT_CST_LOW (x
);
10487 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10488 gcc_assert (HOST_BITS_PER_WIDEST_INT
>= HOST_BITS_PER_DOUBLE_INT
);
10489 gcc_assert (TREE_INT_CST_NUNITS (x
) == 2);
10491 if (TREE_INT_CST_NUNITS (x
) == 1)
10492 val
= HOST_WIDE_INT (val
);
10494 val
|= (((unsigned HOST_WIDEST_INT
) TREE_INT_CST_ELT (x
, 1))
10495 << HOST_BITS_PER_WIDE_INT
);
10497 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10498 gcc_assert (TREE_INT_CST_NUNITS (x
) == 1);
10501 if (bits
< HOST_BITS_PER_WIDEST_INT
)
10503 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10505 val
|= (~(unsigned HOST_WIDEST_INT
) 0) << (bits
- 1) << 1;
10507 val
&= ~((~(unsigned HOST_WIDEST_INT
) 0) << (bits
- 1) << 1);
10513 /* If TYPE is an integral or pointer type, return an integer type with
10514 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10515 if TYPE is already an integer type of signedness UNSIGNEDP. */
10518 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10520 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
) == unsignedp
)
10523 if (TREE_CODE (type
) == VECTOR_TYPE
)
10525 tree inner
= TREE_TYPE (type
);
10526 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10529 if (inner
== inner2
)
10531 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10534 if (!INTEGRAL_TYPE_P (type
)
10535 && !POINTER_TYPE_P (type
)
10536 && TREE_CODE (type
) != OFFSET_TYPE
)
10539 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
10542 /* If TYPE is an integral or pointer type, return an integer type with
10543 the same precision which is unsigned, or itself if TYPE is already an
10544 unsigned integer type. */
10547 unsigned_type_for (tree type
)
10549 return signed_or_unsigned_type_for (1, type
);
10552 /* If TYPE is an integral or pointer type, return an integer type with
10553 the same precision which is signed, or itself if TYPE is already a
10554 signed integer type. */
10557 signed_type_for (tree type
)
10559 return signed_or_unsigned_type_for (0, type
);
10562 /* If TYPE is a vector type, return a signed integer vector type with the
10563 same width and number of subparts. Otherwise return boolean_type_node. */
10566 truth_type_for (tree type
)
10568 if (TREE_CODE (type
) == VECTOR_TYPE
)
10570 tree elem
= lang_hooks
.types
.type_for_size
10571 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))), 0);
10572 return build_opaque_vector_type (elem
, TYPE_VECTOR_SUBPARTS (type
));
10575 return boolean_type_node
;
10578 /* Returns the largest value obtainable by casting something in INNER type to
10582 upper_bound_in_type (tree outer
, tree inner
)
10584 unsigned int det
= 0;
10585 unsigned oprec
= TYPE_PRECISION (outer
);
10586 unsigned iprec
= TYPE_PRECISION (inner
);
10589 /* Compute a unique number for every combination. */
10590 det
|= (oprec
> iprec
) ? 4 : 0;
10591 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10592 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10594 /* Determine the exponent to use. */
10599 /* oprec <= iprec, outer: signed, inner: don't care. */
10604 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10608 /* oprec > iprec, outer: signed, inner: signed. */
10612 /* oprec > iprec, outer: signed, inner: unsigned. */
10616 /* oprec > iprec, outer: unsigned, inner: signed. */
10620 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10624 gcc_unreachable ();
10627 return wide_int_to_tree (outer
,
10628 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10631 /* Returns the smallest value obtainable by casting something in INNER type to
10635 lower_bound_in_type (tree outer
, tree inner
)
10637 unsigned oprec
= TYPE_PRECISION (outer
);
10638 unsigned iprec
= TYPE_PRECISION (inner
);
10640 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10642 if (TYPE_UNSIGNED (outer
)
10643 /* If we are widening something of an unsigned type, OUTER type
10644 contains all values of INNER type. In particular, both INNER
10645 and OUTER types have zero in common. */
10646 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10647 return build_int_cst (outer
, 0);
10650 /* If we are widening a signed type to another signed type, we
10651 want to obtain -2^^(iprec-1). If we are keeping the
10652 precision or narrowing to a signed type, we want to obtain
10654 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10655 return wide_int_to_tree (outer
,
10656 wi::mask (prec
- 1, true,
10657 TYPE_PRECISION (outer
)));
10661 /* Return nonzero if two operands that are suitable for PHI nodes are
10662 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10663 SSA_NAME or invariant. Note that this is strictly an optimization.
10664 That is, callers of this function can directly call operand_equal_p
10665 and get the same result, only slower. */
10668 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10672 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10674 return operand_equal_p (arg0
, arg1
, 0);
10677 /* Returns number of zeros at the end of binary representation of X. */
10680 num_ending_zeros (const_tree x
)
10682 return build_int_cst (TREE_TYPE (x
), wi::ctz (x
));
10686 #define WALK_SUBTREE(NODE) \
10689 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10695 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10696 be walked whenever a type is seen in the tree. Rest of operands and return
10697 value are as for walk_tree. */
10700 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10701 struct pointer_set_t
*pset
, walk_tree_lh lh
)
10703 tree result
= NULL_TREE
;
10705 switch (TREE_CODE (type
))
10708 case REFERENCE_TYPE
:
10710 /* We have to worry about mutually recursive pointers. These can't
10711 be written in C. They can in Ada. It's pathological, but
10712 there's an ACATS test (c38102a) that checks it. Deal with this
10713 by checking if we're pointing to another pointer, that one
10714 points to another pointer, that one does too, and we have no htab.
10715 If so, get a hash table. We check three levels deep to avoid
10716 the cost of the hash table if we don't need one. */
10717 if (POINTER_TYPE_P (TREE_TYPE (type
))
10718 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10719 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10722 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10730 /* ... fall through ... */
10733 WALK_SUBTREE (TREE_TYPE (type
));
10737 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10739 /* Fall through. */
10741 case FUNCTION_TYPE
:
10742 WALK_SUBTREE (TREE_TYPE (type
));
10746 /* We never want to walk into default arguments. */
10747 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10748 WALK_SUBTREE (TREE_VALUE (arg
));
10753 /* Don't follow this nodes's type if a pointer for fear that
10754 we'll have infinite recursion. If we have a PSET, then we
10757 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10758 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10759 WALK_SUBTREE (TREE_TYPE (type
));
10760 WALK_SUBTREE (TYPE_DOMAIN (type
));
10764 WALK_SUBTREE (TREE_TYPE (type
));
10765 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
10775 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10776 called with the DATA and the address of each sub-tree. If FUNC returns a
10777 non-NULL value, the traversal is stopped, and the value returned by FUNC
10778 is returned. If PSET is non-NULL it is used to record the nodes visited,
10779 and to avoid visiting a node more than once. */
10782 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
10783 struct pointer_set_t
*pset
, walk_tree_lh lh
)
10785 enum tree_code code
;
10789 #define WALK_SUBTREE_TAIL(NODE) \
10793 goto tail_recurse; \
10798 /* Skip empty subtrees. */
10802 /* Don't walk the same tree twice, if the user has requested
10803 that we avoid doing so. */
10804 if (pset
&& pointer_set_insert (pset
, *tp
))
10807 /* Call the function. */
10809 result
= (*func
) (tp
, &walk_subtrees
, data
);
10811 /* If we found something, return it. */
10815 code
= TREE_CODE (*tp
);
10817 /* Even if we didn't, FUNC may have decided that there was nothing
10818 interesting below this point in the tree. */
10819 if (!walk_subtrees
)
10821 /* But we still need to check our siblings. */
10822 if (code
== TREE_LIST
)
10823 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
10824 else if (code
== OMP_CLAUSE
)
10825 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10832 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
10833 if (result
|| !walk_subtrees
)
10840 case IDENTIFIER_NODE
:
10847 case PLACEHOLDER_EXPR
:
10851 /* None of these have subtrees other than those already walked
10856 WALK_SUBTREE (TREE_VALUE (*tp
));
10857 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
10862 int len
= TREE_VEC_LENGTH (*tp
);
10867 /* Walk all elements but the first. */
10869 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
10871 /* Now walk the first one as a tail call. */
10872 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
10876 WALK_SUBTREE (TREE_REALPART (*tp
));
10877 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
10881 unsigned HOST_WIDE_INT idx
;
10882 constructor_elt
*ce
;
10884 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
10886 WALK_SUBTREE (ce
->value
);
10891 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
10896 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
10898 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
10899 into declarations that are just mentioned, rather than
10900 declared; they don't really belong to this part of the tree.
10901 And, we can see cycles: the initializer for a declaration
10902 can refer to the declaration itself. */
10903 WALK_SUBTREE (DECL_INITIAL (decl
));
10904 WALK_SUBTREE (DECL_SIZE (decl
));
10905 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
10907 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
10910 case STATEMENT_LIST
:
10912 tree_stmt_iterator i
;
10913 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
10914 WALK_SUBTREE (*tsi_stmt_ptr (i
));
10919 switch (OMP_CLAUSE_CODE (*tp
))
10921 case OMP_CLAUSE_PRIVATE
:
10922 case OMP_CLAUSE_SHARED
:
10923 case OMP_CLAUSE_FIRSTPRIVATE
:
10924 case OMP_CLAUSE_COPYIN
:
10925 case OMP_CLAUSE_COPYPRIVATE
:
10926 case OMP_CLAUSE_FINAL
:
10927 case OMP_CLAUSE_IF
:
10928 case OMP_CLAUSE_NUM_THREADS
:
10929 case OMP_CLAUSE_SCHEDULE
:
10930 case OMP_CLAUSE_UNIFORM
:
10931 case OMP_CLAUSE_DEPEND
:
10932 case OMP_CLAUSE_NUM_TEAMS
:
10933 case OMP_CLAUSE_THREAD_LIMIT
:
10934 case OMP_CLAUSE_DEVICE
:
10935 case OMP_CLAUSE_DIST_SCHEDULE
:
10936 case OMP_CLAUSE_SAFELEN
:
10937 case OMP_CLAUSE_SIMDLEN
:
10938 case OMP_CLAUSE__LOOPTEMP_
:
10939 case OMP_CLAUSE__SIMDUID_
:
10940 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
10943 case OMP_CLAUSE_NOWAIT
:
10944 case OMP_CLAUSE_ORDERED
:
10945 case OMP_CLAUSE_DEFAULT
:
10946 case OMP_CLAUSE_UNTIED
:
10947 case OMP_CLAUSE_MERGEABLE
:
10948 case OMP_CLAUSE_PROC_BIND
:
10949 case OMP_CLAUSE_INBRANCH
:
10950 case OMP_CLAUSE_NOTINBRANCH
:
10951 case OMP_CLAUSE_FOR
:
10952 case OMP_CLAUSE_PARALLEL
:
10953 case OMP_CLAUSE_SECTIONS
:
10954 case OMP_CLAUSE_TASKGROUP
:
10955 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10957 case OMP_CLAUSE_LASTPRIVATE
:
10958 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
10959 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
10960 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10962 case OMP_CLAUSE_COLLAPSE
:
10965 for (i
= 0; i
< 3; i
++)
10966 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
10967 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10970 case OMP_CLAUSE_LINEAR
:
10971 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
10972 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
10973 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
10974 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10976 case OMP_CLAUSE_ALIGNED
:
10977 case OMP_CLAUSE_FROM
:
10978 case OMP_CLAUSE_TO
:
10979 case OMP_CLAUSE_MAP
:
10980 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
10981 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
10982 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10984 case OMP_CLAUSE_REDUCTION
:
10987 for (i
= 0; i
< 4; i
++)
10988 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
10989 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
10993 gcc_unreachable ();
11001 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11002 But, we only want to walk once. */
11003 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11004 for (i
= 0; i
< len
; ++i
)
11005 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11006 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11010 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11011 defining. We only want to walk into these fields of a type in this
11012 case and not in the general case of a mere reference to the type.
11014 The criterion is as follows: if the field can be an expression, it
11015 must be walked only here. This should be in keeping with the fields
11016 that are directly gimplified in gimplify_type_sizes in order for the
11017 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11018 variable-sized types.
11020 Note that DECLs get walked as part of processing the BIND_EXPR. */
11021 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11023 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11024 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11027 /* Call the function for the type. See if it returns anything or
11028 doesn't want us to continue. If we are to continue, walk both
11029 the normal fields and those for the declaration case. */
11030 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11031 if (result
|| !walk_subtrees
)
11034 /* But do not walk a pointed-to type since it may itself need to
11035 be walked in the declaration case if it isn't anonymous. */
11036 if (!POINTER_TYPE_P (*type_p
))
11038 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11043 /* If this is a record type, also walk the fields. */
11044 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11048 for (field
= TYPE_FIELDS (*type_p
); field
;
11049 field
= DECL_CHAIN (field
))
11051 /* We'd like to look at the type of the field, but we can
11052 easily get infinite recursion. So assume it's pointed
11053 to elsewhere in the tree. Also, ignore things that
11055 if (TREE_CODE (field
) != FIELD_DECL
)
11058 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11059 WALK_SUBTREE (DECL_SIZE (field
));
11060 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11061 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11062 WALK_SUBTREE (DECL_QUALIFIER (field
));
11066 /* Same for scalar types. */
11067 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11068 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11069 || TREE_CODE (*type_p
) == INTEGER_TYPE
11070 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11071 || TREE_CODE (*type_p
) == REAL_TYPE
)
11073 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11074 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11077 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11078 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11083 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11087 /* Walk over all the sub-trees of this operand. */
11088 len
= TREE_OPERAND_LENGTH (*tp
);
11090 /* Go through the subtrees. We need to do this in forward order so
11091 that the scope of a FOR_EXPR is handled properly. */
11094 for (i
= 0; i
< len
- 1; ++i
)
11095 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11096 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11099 /* If this is a type, walk the needed fields in the type. */
11100 else if (TYPE_P (*tp
))
11101 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11105 /* We didn't find what we were looking for. */
11108 #undef WALK_SUBTREE_TAIL
11110 #undef WALK_SUBTREE
11112 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11115 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11119 struct pointer_set_t
*pset
;
11121 pset
= pointer_set_create ();
11122 result
= walk_tree_1 (tp
, func
, data
, pset
, lh
);
11123 pointer_set_destroy (pset
);
11129 tree_block (tree t
)
11131 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11133 if (IS_EXPR_CODE_CLASS (c
))
11134 return LOCATION_BLOCK (t
->exp
.locus
);
11135 gcc_unreachable ();
11140 tree_set_block (tree t
, tree b
)
11142 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11144 if (IS_EXPR_CODE_CLASS (c
))
11147 t
->exp
.locus
= COMBINE_LOCATION_DATA (line_table
, t
->exp
.locus
, b
);
11149 t
->exp
.locus
= LOCATION_LOCUS (t
->exp
.locus
);
11152 gcc_unreachable ();
11155 /* Create a nameless artificial label and put it in the current
11156 function context. The label has a location of LOC. Returns the
11157 newly created label. */
11160 create_artificial_label (location_t loc
)
11162 tree lab
= build_decl (loc
,
11163 LABEL_DECL
, NULL_TREE
, void_type_node
);
11165 DECL_ARTIFICIAL (lab
) = 1;
11166 DECL_IGNORED_P (lab
) = 1;
11167 DECL_CONTEXT (lab
) = current_function_decl
;
11171 /* Given a tree, try to return a useful variable name that we can use
11172 to prefix a temporary that is being assigned the value of the tree.
11173 I.E. given <temp> = &A, return A. */
11178 tree stripped_decl
;
11181 STRIP_NOPS (stripped_decl
);
11182 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11183 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11184 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11186 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11189 return IDENTIFIER_POINTER (name
);
11193 switch (TREE_CODE (stripped_decl
))
11196 return get_name (TREE_OPERAND (stripped_decl
, 0));
11203 /* Return true if TYPE has a variable argument list. */
11206 stdarg_p (const_tree fntype
)
11208 function_args_iterator args_iter
;
11209 tree n
= NULL_TREE
, t
;
11214 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11219 return n
!= NULL_TREE
&& n
!= void_type_node
;
11222 /* Return true if TYPE has a prototype. */
11225 prototype_p (tree fntype
)
11229 gcc_assert (fntype
!= NULL_TREE
);
11231 t
= TYPE_ARG_TYPES (fntype
);
11232 return (t
!= NULL_TREE
);
11235 /* If BLOCK is inlined from an __attribute__((__artificial__))
11236 routine, return pointer to location from where it has been
11239 block_nonartificial_location (tree block
)
11241 location_t
*ret
= NULL
;
11243 while (block
&& TREE_CODE (block
) == BLOCK
11244 && BLOCK_ABSTRACT_ORIGIN (block
))
11246 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11248 while (TREE_CODE (ao
) == BLOCK
11249 && BLOCK_ABSTRACT_ORIGIN (ao
)
11250 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
11251 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
11253 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11255 /* If AO is an artificial inline, point RET to the
11256 call site locus at which it has been inlined and continue
11257 the loop, in case AO's caller is also an artificial
11259 if (DECL_DECLARED_INLINE_P (ao
)
11260 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11261 ret
= &BLOCK_SOURCE_LOCATION (block
);
11265 else if (TREE_CODE (ao
) != BLOCK
)
11268 block
= BLOCK_SUPERCONTEXT (block
);
11274 /* If EXP is inlined from an __attribute__((__artificial__))
11275 function, return the location of the original call expression. */
11278 tree_nonartificial_location (tree exp
)
11280 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11285 return EXPR_LOCATION (exp
);
11289 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11292 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11295 cl_option_hash_hash (const void *x
)
11297 const_tree
const t
= (const_tree
) x
;
11301 hashval_t hash
= 0;
11303 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11305 p
= (const char *)TREE_OPTIMIZATION (t
);
11306 len
= sizeof (struct cl_optimization
);
11309 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11311 p
= (const char *)TREE_TARGET_OPTION (t
);
11312 len
= sizeof (struct cl_target_option
);
11316 gcc_unreachable ();
11318 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11320 for (i
= 0; i
< len
; i
++)
11322 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11327 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11328 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11332 cl_option_hash_eq (const void *x
, const void *y
)
11334 const_tree
const xt
= (const_tree
) x
;
11335 const_tree
const yt
= (const_tree
) y
;
11340 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11343 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11345 xp
= (const char *)TREE_OPTIMIZATION (xt
);
11346 yp
= (const char *)TREE_OPTIMIZATION (yt
);
11347 len
= sizeof (struct cl_optimization
);
11350 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11352 xp
= (const char *)TREE_TARGET_OPTION (xt
);
11353 yp
= (const char *)TREE_TARGET_OPTION (yt
);
11354 len
= sizeof (struct cl_target_option
);
11358 gcc_unreachable ();
11360 return (memcmp (xp
, yp
, len
) == 0);
11363 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11366 build_optimization_node (struct gcc_options
*opts
)
11371 /* Use the cache of optimization nodes. */
11373 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11376 slot
= htab_find_slot (cl_option_hash_table
, cl_optimization_node
, INSERT
);
11380 /* Insert this one into the hash table. */
11381 t
= cl_optimization_node
;
11384 /* Make a new node for next time round. */
11385 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11391 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11394 build_target_option_node (struct gcc_options
*opts
)
11399 /* Use the cache of optimization nodes. */
11401 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11404 slot
= htab_find_slot (cl_option_hash_table
, cl_target_option_node
, INSERT
);
11408 /* Insert this one into the hash table. */
11409 t
= cl_target_option_node
;
11412 /* Make a new node for next time round. */
11413 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11419 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11420 Called through htab_traverse. */
11423 prepare_target_option_node_for_pch (void **slot
, void *)
11425 tree node
= (tree
) *slot
;
11426 if (TREE_CODE (node
) == TARGET_OPTION_NODE
)
11427 TREE_TARGET_GLOBALS (node
) = NULL
;
11431 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11432 so that they aren't saved during PCH writing. */
11435 prepare_target_option_nodes_for_pch (void)
11437 htab_traverse (cl_option_hash_table
, prepare_target_option_node_for_pch
,
11441 /* Determine the "ultimate origin" of a block. The block may be an inlined
11442 instance of an inlined instance of a block which is local to an inline
11443 function, so we have to trace all of the way back through the origin chain
11444 to find out what sort of node actually served as the original seed for the
11448 block_ultimate_origin (const_tree block
)
11450 tree immediate_origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11452 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11453 nodes in the function to point to themselves; ignore that if
11454 we're trying to output the abstract instance of this function. */
11455 if (BLOCK_ABSTRACT (block
) && immediate_origin
== block
)
11458 if (immediate_origin
== NULL_TREE
)
11463 tree lookahead
= immediate_origin
;
11467 ret_val
= lookahead
;
11468 lookahead
= (TREE_CODE (ret_val
) == BLOCK
11469 ? BLOCK_ABSTRACT_ORIGIN (ret_val
) : NULL
);
11471 while (lookahead
!= NULL
&& lookahead
!= ret_val
);
11473 /* The block's abstract origin chain may not be the *ultimate* origin of
11474 the block. It could lead to a DECL that has an abstract origin set.
11475 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11476 will give us if it has one). Note that DECL's abstract origins are
11477 supposed to be the most distant ancestor (or so decl_ultimate_origin
11478 claims), so we don't need to loop following the DECL origins. */
11479 if (DECL_P (ret_val
))
11480 return DECL_ORIGIN (ret_val
);
11486 /* Return true iff conversion in EXP generates no instruction. Mark
11487 it inline so that we fully inline into the stripping functions even
11488 though we have two uses of this function. */
11491 tree_nop_conversion (const_tree exp
)
11493 tree outer_type
, inner_type
;
11495 if (!CONVERT_EXPR_P (exp
)
11496 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11498 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
11501 outer_type
= TREE_TYPE (exp
);
11502 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11507 /* Use precision rather then machine mode when we can, which gives
11508 the correct answer even for submode (bit-field) types. */
11509 if ((INTEGRAL_TYPE_P (outer_type
)
11510 || POINTER_TYPE_P (outer_type
)
11511 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11512 && (INTEGRAL_TYPE_P (inner_type
)
11513 || POINTER_TYPE_P (inner_type
)
11514 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11515 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11517 /* Otherwise fall back on comparing machine modes (e.g. for
11518 aggregate types, floats). */
11519 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11522 /* Return true iff conversion in EXP generates no instruction. Don't
11523 consider conversions changing the signedness. */
11526 tree_sign_nop_conversion (const_tree exp
)
11528 tree outer_type
, inner_type
;
11530 if (!tree_nop_conversion (exp
))
11533 outer_type
= TREE_TYPE (exp
);
11534 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11536 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11537 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11540 /* Strip conversions from EXP according to tree_nop_conversion and
11541 return the resulting expression. */
11544 tree_strip_nop_conversions (tree exp
)
11546 while (tree_nop_conversion (exp
))
11547 exp
= TREE_OPERAND (exp
, 0);
11551 /* Strip conversions from EXP according to tree_sign_nop_conversion
11552 and return the resulting expression. */
11555 tree_strip_sign_nop_conversions (tree exp
)
11557 while (tree_sign_nop_conversion (exp
))
11558 exp
= TREE_OPERAND (exp
, 0);
11562 /* Avoid any floating point extensions from EXP. */
11564 strip_float_extensions (tree exp
)
11566 tree sub
, expt
, subt
;
11568 /* For floating point constant look up the narrowest type that can hold
11569 it properly and handle it like (type)(narrowest_type)constant.
11570 This way we can optimize for instance a=a*2.0 where "a" is float
11571 but 2.0 is double constant. */
11572 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11574 REAL_VALUE_TYPE orig
;
11577 orig
= TREE_REAL_CST (exp
);
11578 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11579 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11580 type
= float_type_node
;
11581 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11582 > TYPE_PRECISION (double_type_node
)
11583 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11584 type
= double_type_node
;
11586 return build_real (type
, real_value_truncate (TYPE_MODE (type
), orig
));
11589 if (!CONVERT_EXPR_P (exp
))
11592 sub
= TREE_OPERAND (exp
, 0);
11593 subt
= TREE_TYPE (sub
);
11594 expt
= TREE_TYPE (exp
);
11596 if (!FLOAT_TYPE_P (subt
))
11599 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11602 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11605 return strip_float_extensions (sub
);
11608 /* Strip out all handled components that produce invariant
11612 strip_invariant_refs (const_tree op
)
11614 while (handled_component_p (op
))
11616 switch (TREE_CODE (op
))
11619 case ARRAY_RANGE_REF
:
11620 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11621 || TREE_OPERAND (op
, 2) != NULL_TREE
11622 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11626 case COMPONENT_REF
:
11627 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11633 op
= TREE_OPERAND (op
, 0);
11639 static GTY(()) tree gcc_eh_personality_decl
;
11641 /* Return the GCC personality function decl. */
11644 lhd_gcc_personality (void)
11646 if (!gcc_eh_personality_decl
)
11647 gcc_eh_personality_decl
= build_personality_function ("gcc");
11648 return gcc_eh_personality_decl
;
11651 /* For languages with One Definition Rule, work out if
11652 trees are actually the same even if the tree representation
11653 differs. This handles only decls appearing in TYPE_NAME
11654 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11655 RECORD_TYPE and IDENTIFIER_NODE. */
11658 same_for_odr (tree t1
, tree t2
)
11664 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11665 if (TREE_CODE (t1
) == IDENTIFIER_NODE
11666 && TREE_CODE (t2
) == TYPE_DECL
11667 && DECL_FILE_SCOPE_P (t1
))
11669 t2
= DECL_NAME (t2
);
11670 gcc_assert (TREE_CODE (t2
) == IDENTIFIER_NODE
);
11672 if (TREE_CODE (t2
) == IDENTIFIER_NODE
11673 && TREE_CODE (t1
) == TYPE_DECL
11674 && DECL_FILE_SCOPE_P (t2
))
11676 t1
= DECL_NAME (t1
);
11677 gcc_assert (TREE_CODE (t1
) == IDENTIFIER_NODE
);
11679 if (TREE_CODE (t1
) != TREE_CODE (t2
))
11682 return types_same_for_odr (t1
, t2
);
11684 return decls_same_for_odr (t1
, t2
);
11688 /* For languages with One Definition Rule, work out if
11689 decls are actually the same even if the tree representation
11690 differs. This handles only decls appearing in TYPE_NAME
11691 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11692 RECORD_TYPE and IDENTIFIER_NODE. */
11695 decls_same_for_odr (tree decl1
, tree decl2
)
11697 if (decl1
&& TREE_CODE (decl1
) == TYPE_DECL
11698 && DECL_ORIGINAL_TYPE (decl1
))
11699 decl1
= DECL_ORIGINAL_TYPE (decl1
);
11700 if (decl2
&& TREE_CODE (decl2
) == TYPE_DECL
11701 && DECL_ORIGINAL_TYPE (decl2
))
11702 decl2
= DECL_ORIGINAL_TYPE (decl2
);
11703 if (decl1
== decl2
)
11705 if (!decl1
|| !decl2
)
11707 gcc_checking_assert (DECL_P (decl1
) && DECL_P (decl2
));
11708 if (TREE_CODE (decl1
) != TREE_CODE (decl2
))
11710 if (TREE_CODE (decl1
) == TRANSLATION_UNIT_DECL
)
11712 if (TREE_CODE (decl1
) != NAMESPACE_DECL
11713 && TREE_CODE (decl1
) != TYPE_DECL
)
11715 if (!DECL_NAME (decl1
))
11717 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1
)) == IDENTIFIER_NODE
);
11718 gcc_checking_assert (!DECL_NAME (decl2
)
11719 || TREE_CODE (DECL_NAME (decl2
)) == IDENTIFIER_NODE
);
11720 if (DECL_NAME (decl1
) != DECL_NAME (decl2
))
11722 return same_for_odr (DECL_CONTEXT (decl1
),
11723 DECL_CONTEXT (decl2
));
11726 /* For languages with One Definition Rule, work out if
11727 types are same even if the tree representation differs.
11728 This is non-trivial for LTO where minnor differences in
11729 the type representation may have prevented type merging
11730 to merge two copies of otherwise equivalent type. */
11733 types_same_for_odr (tree type1
, tree type2
)
11735 gcc_checking_assert (TYPE_P (type1
) && TYPE_P (type2
));
11736 type1
= TYPE_MAIN_VARIANT (type1
);
11737 type2
= TYPE_MAIN_VARIANT (type2
);
11738 if (type1
== type2
)
11741 #ifndef ENABLE_CHECKING
11746 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11747 on the corresponding TYPE_STUB_DECL. */
11748 if (type_in_anonymous_namespace_p (type1
)
11749 || type_in_anonymous_namespace_p (type2
))
11751 /* When assembler name of virtual table is available, it is
11752 easy to compare types for equivalence. */
11753 if (TYPE_BINFO (type1
) && TYPE_BINFO (type2
)
11754 && BINFO_VTABLE (TYPE_BINFO (type1
))
11755 && BINFO_VTABLE (TYPE_BINFO (type2
)))
11757 tree v1
= BINFO_VTABLE (TYPE_BINFO (type1
));
11758 tree v2
= BINFO_VTABLE (TYPE_BINFO (type2
));
11760 if (TREE_CODE (v1
) == POINTER_PLUS_EXPR
)
11762 if (TREE_CODE (v2
) != POINTER_PLUS_EXPR
11763 || !operand_equal_p (TREE_OPERAND (v1
, 1),
11764 TREE_OPERAND (v2
, 1), 0))
11766 v1
= TREE_OPERAND (TREE_OPERAND (v1
, 0), 0);
11767 v2
= TREE_OPERAND (TREE_OPERAND (v2
, 0), 0);
11769 v1
= DECL_ASSEMBLER_NAME (v1
);
11770 v2
= DECL_ASSEMBLER_NAME (v2
);
11774 /* FIXME: the code comparing type names consider all instantiations of the
11775 same template to have same name. This is because we have no access
11776 to template parameters. For types with no virtual method tables
11777 we thus can return false positives. At the moment we do not need
11778 to compare types in other scenarios than devirtualization. */
11780 /* If types are not structuraly same, do not bother to contnue.
11781 Match in the remainder of code would mean ODR violation. */
11782 if (!types_compatible_p (type1
, type2
))
11784 if (!TYPE_NAME (type1
))
11786 if (!decls_same_for_odr (TYPE_NAME (type1
), TYPE_NAME (type2
)))
11788 if (!same_for_odr (TYPE_CONTEXT (type1
), TYPE_CONTEXT (type2
)))
11790 /* When not in LTO the MAIN_VARIANT check should be the same. */
11791 gcc_assert (in_lto_p
);
11796 /* TARGET is a call target of GIMPLE call statement
11797 (obtained by gimple_call_fn). Return true if it is
11798 OBJ_TYPE_REF representing an virtual call of C++ method.
11799 (As opposed to OBJ_TYPE_REF representing objc calls
11800 through a cast where middle-end devirtualization machinery
11804 virtual_method_call_p (tree target
)
11806 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11808 target
= TREE_TYPE (target
);
11809 gcc_checking_assert (TREE_CODE (target
) == POINTER_TYPE
);
11810 target
= TREE_TYPE (target
);
11811 if (TREE_CODE (target
) == FUNCTION_TYPE
)
11813 gcc_checking_assert (TREE_CODE (target
) == METHOD_TYPE
);
11817 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11820 obj_type_ref_class (tree ref
)
11822 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
11823 ref
= TREE_TYPE (ref
);
11824 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11825 ref
= TREE_TYPE (ref
);
11826 /* We look for type THIS points to. ObjC also builds
11827 OBJ_TYPE_REF with non-method calls, Their first parameter
11828 ID however also corresponds to class type. */
11829 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
11830 || TREE_CODE (ref
) == FUNCTION_TYPE
);
11831 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
11832 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11833 return TREE_TYPE (ref
);
11836 /* Return true if T is in anonymous namespace. */
11839 type_in_anonymous_namespace_p (tree t
)
11841 return (TYPE_STUB_DECL (t
) && !TREE_PUBLIC (TYPE_STUB_DECL (t
)));
11844 /* Try to find a base info of BINFO that would have its field decl at offset
11845 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11846 found, return, otherwise return NULL_TREE. */
11849 get_binfo_at_offset (tree binfo
, HOST_WIDE_INT offset
, tree expected_type
)
11851 tree type
= BINFO_TYPE (binfo
);
11855 HOST_WIDE_INT pos
, size
;
11859 if (types_same_for_odr (type
, expected_type
))
11864 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11866 if (TREE_CODE (fld
) != FIELD_DECL
)
11869 pos
= int_bit_position (fld
);
11870 size
= tree_to_uhwi (DECL_SIZE (fld
));
11871 if (pos
<= offset
&& (pos
+ size
) > offset
)
11874 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11877 if (!DECL_ARTIFICIAL (fld
))
11879 binfo
= TYPE_BINFO (TREE_TYPE (fld
));
11883 /* Offset 0 indicates the primary base, whose vtable contents are
11884 represented in the binfo for the derived class. */
11885 else if (offset
!= 0)
11887 tree base_binfo
, binfo2
= binfo
;
11889 /* Find BINFO corresponding to FLD. This is bit harder
11890 by a fact that in virtual inheritance we may need to walk down
11891 the non-virtual inheritance chain. */
11894 tree containing_binfo
= NULL
, found_binfo
= NULL
;
11895 for (i
= 0; BINFO_BASE_ITERATE (binfo2
, i
, base_binfo
); i
++)
11896 if (types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
11898 found_binfo
= base_binfo
;
11902 if ((tree_to_shwi (BINFO_OFFSET (base_binfo
))
11903 - tree_to_shwi (BINFO_OFFSET (binfo
)))
11904 * BITS_PER_UNIT
< pos
11905 /* Rule out types with no virtual methods or we can get confused
11906 here by zero sized bases. */
11907 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo
)))
11908 && (!containing_binfo
11909 || (tree_to_shwi (BINFO_OFFSET (containing_binfo
))
11910 < tree_to_shwi (BINFO_OFFSET (base_binfo
)))))
11911 containing_binfo
= base_binfo
;
11914 binfo
= found_binfo
;
11917 if (!containing_binfo
)
11919 binfo2
= containing_binfo
;
11923 type
= TREE_TYPE (fld
);
11928 /* Returns true if X is a typedef decl. */
11931 is_typedef_decl (tree x
)
11933 return (x
&& TREE_CODE (x
) == TYPE_DECL
11934 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
11937 /* Returns true iff TYPE is a type variant created for a typedef. */
11940 typedef_variant_p (tree type
)
11942 return is_typedef_decl (TYPE_NAME (type
));
11945 /* Warn about a use of an identifier which was marked deprecated. */
11947 warn_deprecated_use (tree node
, tree attr
)
11951 if (node
== 0 || !warn_deprecated_decl
)
11957 attr
= DECL_ATTRIBUTES (node
);
11958 else if (TYPE_P (node
))
11960 tree decl
= TYPE_STUB_DECL (node
);
11962 attr
= lookup_attribute ("deprecated",
11963 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
11968 attr
= lookup_attribute ("deprecated", attr
);
11971 msg
= TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)));
11977 expanded_location xloc
= expand_location (DECL_SOURCE_LOCATION (node
));
11979 warning (OPT_Wdeprecated_declarations
,
11980 "%qD is deprecated (declared at %r%s:%d%R): %s",
11981 node
, "locus", xloc
.file
, xloc
.line
, msg
);
11983 warning (OPT_Wdeprecated_declarations
,
11984 "%qD is deprecated (declared at %r%s:%d%R)",
11985 node
, "locus", xloc
.file
, xloc
.line
);
11987 else if (TYPE_P (node
))
11989 tree what
= NULL_TREE
;
11990 tree decl
= TYPE_STUB_DECL (node
);
11992 if (TYPE_NAME (node
))
11994 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
11995 what
= TYPE_NAME (node
);
11996 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
11997 && DECL_NAME (TYPE_NAME (node
)))
11998 what
= DECL_NAME (TYPE_NAME (node
));
12003 expanded_location xloc
12004 = expand_location (DECL_SOURCE_LOCATION (decl
));
12008 warning (OPT_Wdeprecated_declarations
,
12009 "%qE is deprecated (declared at %r%s:%d%R): %s",
12010 what
, "locus", xloc
.file
, xloc
.line
, msg
);
12012 warning (OPT_Wdeprecated_declarations
,
12013 "%qE is deprecated (declared at %r%s:%d%R)",
12014 what
, "locus", xloc
.file
, xloc
.line
);
12019 warning (OPT_Wdeprecated_declarations
,
12020 "type is deprecated (declared at %r%s:%d%R): %s",
12021 "locus", xloc
.file
, xloc
.line
, msg
);
12023 warning (OPT_Wdeprecated_declarations
,
12024 "type is deprecated (declared at %r%s:%d%R)",
12025 "locus", xloc
.file
, xloc
.line
);
12033 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated: %s",
12036 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated", what
);
12041 warning (OPT_Wdeprecated_declarations
, "type is deprecated: %s",
12044 warning (OPT_Wdeprecated_declarations
, "type is deprecated");
12050 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12051 somewhere in it. */
12054 contains_bitfld_component_ref_p (const_tree ref
)
12056 while (handled_component_p (ref
))
12058 if (TREE_CODE (ref
) == COMPONENT_REF
12059 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12061 ref
= TREE_OPERAND (ref
, 0);
12067 /* Try to determine whether a TRY_CATCH expression can fall through.
12068 This is a subroutine of block_may_fallthru. */
12071 try_catch_may_fallthru (const_tree stmt
)
12073 tree_stmt_iterator i
;
12075 /* If the TRY block can fall through, the whole TRY_CATCH can
12077 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12080 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12081 switch (TREE_CODE (tsi_stmt (i
)))
12084 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12085 catch expression and a body. The whole TRY_CATCH may fall
12086 through iff any of the catch bodies falls through. */
12087 for (; !tsi_end_p (i
); tsi_next (&i
))
12089 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12094 case EH_FILTER_EXPR
:
12095 /* The exception filter expression only matters if there is an
12096 exception. If the exception does not match EH_FILTER_TYPES,
12097 we will execute EH_FILTER_FAILURE, and we will fall through
12098 if that falls through. If the exception does match
12099 EH_FILTER_TYPES, the stack unwinder will continue up the
12100 stack, so we will not fall through. We don't know whether we
12101 will throw an exception which matches EH_FILTER_TYPES or not,
12102 so we just ignore EH_FILTER_TYPES and assume that we might
12103 throw an exception which doesn't match. */
12104 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12107 /* This case represents statements to be executed when an
12108 exception occurs. Those statements are implicitly followed
12109 by a RESX statement to resume execution after the exception.
12110 So in this case the TRY_CATCH never falls through. */
12115 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12116 need not be 100% accurate; simply be conservative and return true if we
12117 don't know. This is used only to avoid stupidly generating extra code.
12118 If we're wrong, we'll just delete the extra code later. */
12121 block_may_fallthru (const_tree block
)
12123 /* This CONST_CAST is okay because expr_last returns its argument
12124 unmodified and we assign it to a const_tree. */
12125 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12127 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12131 /* Easy cases. If the last statement of the block implies
12132 control transfer, then we can't fall through. */
12136 /* If SWITCH_LABELS is set, this is lowered, and represents a
12137 branch to a selected label and hence can not fall through.
12138 Otherwise SWITCH_BODY is set, and the switch can fall
12140 return SWITCH_LABELS (stmt
) == NULL_TREE
;
12143 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12145 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12148 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12150 case TRY_CATCH_EXPR
:
12151 return try_catch_may_fallthru (stmt
);
12153 case TRY_FINALLY_EXPR
:
12154 /* The finally clause is always executed after the try clause,
12155 so if it does not fall through, then the try-finally will not
12156 fall through. Otherwise, if the try clause does not fall
12157 through, then when the finally clause falls through it will
12158 resume execution wherever the try clause was going. So the
12159 whole try-finally will only fall through if both the try
12160 clause and the finally clause fall through. */
12161 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12162 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12165 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12166 stmt
= TREE_OPERAND (stmt
, 1);
12172 /* Functions that do not return do not fall through. */
12173 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12175 case CLEANUP_POINT_EXPR
:
12176 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12179 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12185 return lang_hooks
.block_may_fallthru (stmt
);
12189 /* True if we are using EH to handle cleanups. */
12190 static bool using_eh_for_cleanups_flag
= false;
12192 /* This routine is called from front ends to indicate eh should be used for
12195 using_eh_for_cleanups (void)
12197 using_eh_for_cleanups_flag
= true;
12200 /* Query whether EH is used for cleanups. */
12202 using_eh_for_cleanups_p (void)
12204 return using_eh_for_cleanups_flag
;
12207 /* Wrapper for tree_code_name to ensure that tree code is valid */
12209 get_tree_code_name (enum tree_code code
)
12211 const char *invalid
= "<invalid tree code>";
12213 if (code
>= MAX_TREE_CODES
)
12216 return tree_code_name
[code
];
12219 /* Drops the TREE_OVERFLOW flag from T. */
12222 drop_tree_overflow (tree t
)
12224 gcc_checking_assert (TREE_OVERFLOW (t
));
12226 /* For tree codes with a sharing machinery re-build the result. */
12227 if (TREE_CODE (t
) == INTEGER_CST
)
12228 return wide_int_to_tree (TREE_TYPE (t
), t
);
12230 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12231 and drop the flag. */
12233 TREE_OVERFLOW (t
) = 0;
12237 /* Given a memory reference expression T, return its base address.
12238 The base address of a memory reference expression is the main
12239 object being referenced. For instance, the base address for
12240 'array[i].fld[j]' is 'array'. You can think of this as stripping
12241 away the offset part from a memory address.
12243 This function calls handled_component_p to strip away all the inner
12244 parts of the memory reference until it reaches the base object. */
12247 get_base_address (tree t
)
12249 while (handled_component_p (t
))
12250 t
= TREE_OPERAND (t
, 0);
12252 if ((TREE_CODE (t
) == MEM_REF
12253 || TREE_CODE (t
) == TARGET_MEM_REF
)
12254 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12255 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12257 /* ??? Either the alias oracle or all callers need to properly deal
12258 with WITH_SIZE_EXPRs before we can look through those. */
12259 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12265 #include "gt-tree.h"