1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
32 #include "coretypes.h"
37 #include "stor-layout.h"
44 #include "toplev.h" /* get_random_seed */
46 #include "filenames.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
55 #include "pointer-set.h"
56 #include "tree-ssa-alias.h"
57 #include "internal-fn.h"
58 #include "gimple-expr.h"
61 #include "gimple-iterator.h"
63 #include "gimple-ssa.h"
65 #include "tree-phinodes.h"
66 #include "stringpool.h"
67 #include "tree-ssanames.h"
71 #include "tree-pass.h"
72 #include "langhooks-def.h"
73 #include "diagnostic.h"
74 #include "tree-diagnostic.h"
75 #include "tree-pretty-print.h"
82 /* Tree code classes. */
84 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
85 #define END_OF_BASE_TREE_CODES tcc_exceptional,
87 const enum tree_code_class tree_code_type
[] = {
88 #include "all-tree.def"
92 #undef END_OF_BASE_TREE_CODES
94 /* Table indexed by tree code giving number of expression
95 operands beyond the fixed part of the node structure.
96 Not used for types or decls. */
98 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
99 #define END_OF_BASE_TREE_CODES 0,
101 const unsigned char tree_code_length
[] = {
102 #include "all-tree.def"
106 #undef END_OF_BASE_TREE_CODES
108 /* Names of tree components.
109 Used for printing out the tree and error messages. */
110 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
111 #define END_OF_BASE_TREE_CODES "@dummy",
113 static const char *const tree_code_name
[] = {
114 #include "all-tree.def"
118 #undef END_OF_BASE_TREE_CODES
120 /* Each tree code class has an associated string representation.
121 These must correspond to the tree_code_class entries. */
123 const char *const tree_code_class_strings
[] =
138 /* obstack.[ch] explicitly declined to prototype this. */
139 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
141 /* Statistics-gathering stuff. */
143 static int tree_code_counts
[MAX_TREE_CODES
];
144 int tree_node_counts
[(int) all_kinds
];
145 int tree_node_sizes
[(int) all_kinds
];
147 /* Keep in sync with tree.h:enum tree_node_kind. */
148 static const char * const tree_node_kind_names
[] = {
167 /* Unique id for next decl created. */
168 static GTY(()) int next_decl_uid
;
169 /* Unique id for next type created. */
170 static GTY(()) int next_type_uid
= 1;
171 /* Unique id for next debug decl created. Use negative numbers,
172 to catch erroneous uses. */
173 static GTY(()) int next_debug_decl_uid
;
175 /* Since we cannot rehash a type after it is in the table, we have to
176 keep the hash code. */
178 struct GTY(()) type_hash
{
183 /* Initial size of the hash table (rounded to next prime). */
184 #define TYPE_HASH_INITIAL_SIZE 1000
186 /* Now here is the hash table. When recording a type, it is added to
187 the slot whose index is the hash code. Note that the hash table is
188 used for several kinds of types (function types, array types and
189 array index range types, for now). While all these live in the
190 same table, they are completely independent, and the hash code is
191 computed differently for each of these. */
193 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash
)))
194 htab_t type_hash_table
;
197 /* Hash table and temporary node for larger integer const values. */
198 static GTY (()) tree int_cst_node
;
199 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
200 htab_t int_cst_hash_table
;
202 /* Hash table for optimization flags and target option flags. Use the same
203 hash table for both sets of options. Nodes for building the current
204 optimization and target option nodes. The assumption is most of the time
205 the options created will already be in the hash table, so we avoid
206 allocating and freeing up a node repeatably. */
207 static GTY (()) tree cl_optimization_node
;
208 static GTY (()) tree cl_target_option_node
;
209 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
210 htab_t cl_option_hash_table
;
212 /* General tree->tree mapping structure for use in hash tables. */
215 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
216 htab_t debug_expr_for_decl
;
218 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
219 htab_t value_expr_for_decl
;
221 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map
)))
222 htab_t debug_args_for_decl
;
224 static void set_type_quals (tree
, int, tree
);
225 static int type_hash_eq (const void *, const void *);
226 static hashval_t
type_hash_hash (const void *);
227 static hashval_t
int_cst_hash_hash (const void *);
228 static int int_cst_hash_eq (const void *, const void *);
229 static hashval_t
cl_option_hash_hash (const void *);
230 static int cl_option_hash_eq (const void *, const void *);
231 static void print_type_hash_statistics (void);
232 static void print_debug_expr_statistics (void);
233 static void print_value_expr_statistics (void);
234 static int type_hash_marked_p (const void *);
235 static unsigned int type_hash_list (const_tree
, hashval_t
);
236 static unsigned int attribute_hash_list (const_tree
, hashval_t
);
238 tree global_trees
[TI_MAX
];
239 tree integer_types
[itk_none
];
241 unsigned char tree_contains_struct
[MAX_TREE_CODES
][64];
243 /* Number of operands for each OpenMP clause. */
244 unsigned const char omp_clause_num_ops
[] =
246 0, /* OMP_CLAUSE_ERROR */
247 1, /* OMP_CLAUSE_PRIVATE */
248 1, /* OMP_CLAUSE_SHARED */
249 1, /* OMP_CLAUSE_FIRSTPRIVATE */
250 2, /* OMP_CLAUSE_LASTPRIVATE */
251 4, /* OMP_CLAUSE_REDUCTION */
252 1, /* OMP_CLAUSE_COPYIN */
253 1, /* OMP_CLAUSE_COPYPRIVATE */
254 3, /* OMP_CLAUSE_LINEAR */
255 2, /* OMP_CLAUSE_ALIGNED */
256 1, /* OMP_CLAUSE_DEPEND */
257 1, /* OMP_CLAUSE_UNIFORM */
258 2, /* OMP_CLAUSE_FROM */
259 2, /* OMP_CLAUSE_TO */
260 2, /* OMP_CLAUSE_MAP */
261 1, /* OMP_CLAUSE__LOOPTEMP_ */
262 1, /* OMP_CLAUSE_IF */
263 1, /* OMP_CLAUSE_NUM_THREADS */
264 1, /* OMP_CLAUSE_SCHEDULE */
265 0, /* OMP_CLAUSE_NOWAIT */
266 0, /* OMP_CLAUSE_ORDERED */
267 0, /* OMP_CLAUSE_DEFAULT */
268 3, /* OMP_CLAUSE_COLLAPSE */
269 0, /* OMP_CLAUSE_UNTIED */
270 1, /* OMP_CLAUSE_FINAL */
271 0, /* OMP_CLAUSE_MERGEABLE */
272 1, /* OMP_CLAUSE_DEVICE */
273 1, /* OMP_CLAUSE_DIST_SCHEDULE */
274 0, /* OMP_CLAUSE_INBRANCH */
275 0, /* OMP_CLAUSE_NOTINBRANCH */
276 1, /* OMP_CLAUSE_NUM_TEAMS */
277 1, /* OMP_CLAUSE_THREAD_LIMIT */
278 0, /* OMP_CLAUSE_PROC_BIND */
279 1, /* OMP_CLAUSE_SAFELEN */
280 1, /* OMP_CLAUSE_SIMDLEN */
281 0, /* OMP_CLAUSE_FOR */
282 0, /* OMP_CLAUSE_PARALLEL */
283 0, /* OMP_CLAUSE_SECTIONS */
284 0, /* OMP_CLAUSE_TASKGROUP */
285 1, /* OMP_CLAUSE__SIMDUID_ */
288 const char * const omp_clause_code_name
[] =
333 /* Return the tree node structure used by tree code CODE. */
335 static inline enum tree_node_structure_enum
336 tree_node_structure_for_code (enum tree_code code
)
338 switch (TREE_CODE_CLASS (code
))
340 case tcc_declaration
:
345 return TS_FIELD_DECL
;
351 return TS_LABEL_DECL
;
353 return TS_RESULT_DECL
;
354 case DEBUG_EXPR_DECL
:
357 return TS_CONST_DECL
;
361 return TS_FUNCTION_DECL
;
362 case TRANSLATION_UNIT_DECL
:
363 return TS_TRANSLATION_UNIT_DECL
;
365 return TS_DECL_NON_COMMON
;
369 return TS_TYPE_NON_COMMON
;
378 default: /* tcc_constant and tcc_exceptional */
383 /* tcc_constant cases. */
384 case VOID_CST
: return TS_TYPED
;
385 case INTEGER_CST
: return TS_INT_CST
;
386 case REAL_CST
: return TS_REAL_CST
;
387 case FIXED_CST
: return TS_FIXED_CST
;
388 case COMPLEX_CST
: return TS_COMPLEX
;
389 case VECTOR_CST
: return TS_VECTOR
;
390 case STRING_CST
: return TS_STRING
;
391 /* tcc_exceptional cases. */
392 case ERROR_MARK
: return TS_COMMON
;
393 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
394 case TREE_LIST
: return TS_LIST
;
395 case TREE_VEC
: return TS_VEC
;
396 case SSA_NAME
: return TS_SSA_NAME
;
397 case PLACEHOLDER_EXPR
: return TS_COMMON
;
398 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
399 case BLOCK
: return TS_BLOCK
;
400 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
401 case TREE_BINFO
: return TS_BINFO
;
402 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
403 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
404 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
412 /* Initialize tree_contains_struct to describe the hierarchy of tree
416 initialize_tree_contains_struct (void)
420 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
423 enum tree_node_structure_enum ts_code
;
425 code
= (enum tree_code
) i
;
426 ts_code
= tree_node_structure_for_code (code
);
428 /* Mark the TS structure itself. */
429 tree_contains_struct
[code
][ts_code
] = 1;
431 /* Mark all the structures that TS is derived from. */
449 case TS_STATEMENT_LIST
:
450 MARK_TS_TYPED (code
);
454 case TS_DECL_MINIMAL
:
460 case TS_OPTIMIZATION
:
461 case TS_TARGET_OPTION
:
462 MARK_TS_COMMON (code
);
465 case TS_TYPE_WITH_LANG_SPECIFIC
:
466 MARK_TS_TYPE_COMMON (code
);
469 case TS_TYPE_NON_COMMON
:
470 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
474 MARK_TS_DECL_MINIMAL (code
);
479 MARK_TS_DECL_COMMON (code
);
482 case TS_DECL_NON_COMMON
:
483 MARK_TS_DECL_WITH_VIS (code
);
486 case TS_DECL_WITH_VIS
:
490 MARK_TS_DECL_WRTL (code
);
494 MARK_TS_DECL_COMMON (code
);
498 MARK_TS_DECL_WITH_VIS (code
);
502 case TS_FUNCTION_DECL
:
503 MARK_TS_DECL_NON_COMMON (code
);
506 case TS_TRANSLATION_UNIT_DECL
:
507 MARK_TS_DECL_COMMON (code
);
515 /* Basic consistency checks for attributes used in fold. */
516 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
517 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
518 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
519 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
520 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
521 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
522 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
523 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
524 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
525 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
526 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
527 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
528 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
529 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
530 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
531 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
532 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
533 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
534 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
535 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
536 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
537 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
538 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
539 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
540 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
541 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
542 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
543 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
544 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
545 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
546 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
547 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
548 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
549 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
550 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
551 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
552 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
553 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
554 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
555 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
564 /* Initialize the hash table of types. */
565 type_hash_table
= htab_create_ggc (TYPE_HASH_INITIAL_SIZE
, type_hash_hash
,
568 /* Initialize hash table used to manage UPC blocking factors. */
569 upc_block_factor_lookup_init ();
571 debug_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
572 tree_decl_map_eq
, 0);
574 value_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
575 tree_decl_map_eq
, 0);
577 int_cst_hash_table
= htab_create_ggc (1024, int_cst_hash_hash
,
578 int_cst_hash_eq
, NULL
);
580 int_cst_node
= make_int_cst (1, 1);
582 cl_option_hash_table
= htab_create_ggc (64, cl_option_hash_hash
,
583 cl_option_hash_eq
, NULL
);
585 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
586 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
588 /* Initialize the tree_contains_struct array. */
589 initialize_tree_contains_struct ();
590 lang_hooks
.init_ts ();
594 /* The name of the object as the assembler will see it (but before any
595 translations made by ASM_OUTPUT_LABELREF). Often this is the same
596 as DECL_NAME. It is an IDENTIFIER_NODE. */
598 decl_assembler_name (tree decl
)
600 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
601 lang_hooks
.set_decl_assembler_name (decl
);
602 return DECL_WITH_VIS_CHECK (decl
)->decl_with_vis
.assembler_name
;
605 /* When the target supports COMDAT groups, this indicates which group the
606 DECL is associated with. This can be either an IDENTIFIER_NODE or a
607 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
609 decl_comdat_group (const_tree node
)
611 struct symtab_node
*snode
= symtab_get_node (node
);
614 return snode
->get_comdat_group ();
617 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
619 decl_comdat_group_id (const_tree node
)
621 struct symtab_node
*snode
= symtab_get_node (node
);
624 return snode
->get_comdat_group_id ();
627 /* When the target supports named section, return its name as IDENTIFIER_NODE
628 or NULL if it is in no section. */
630 decl_section_name (const_tree node
)
632 struct symtab_node
*snode
= symtab_get_node (node
);
635 return snode
->get_section ();
638 /* Set section section name of NODE to VALUE (that is expected to
639 be identifier node) */
641 set_decl_section_name (tree node
, const char *value
)
643 struct symtab_node
*snode
;
647 snode
= symtab_get_node (node
);
651 else if (TREE_CODE (node
) == VAR_DECL
)
652 snode
= varpool_node_for_decl (node
);
654 snode
= cgraph_get_create_node (node
);
655 snode
->set_section (value
);
658 /* Return TLS model of a variable NODE. */
660 decl_tls_model (const_tree node
)
662 struct varpool_node
*snode
= varpool_get_node (node
);
664 return TLS_MODEL_NONE
;
665 return snode
->tls_model
;
668 /* Set TLS model of variable NODE to MODEL. */
670 set_decl_tls_model (tree node
, enum tls_model model
)
672 struct varpool_node
*vnode
;
674 if (model
== TLS_MODEL_NONE
)
676 vnode
= varpool_get_node (node
);
681 vnode
= varpool_node_for_decl (node
);
682 vnode
->tls_model
= model
;
685 /* Compute the number of bytes occupied by a tree with code CODE.
686 This function cannot be used for nodes that have variable sizes,
687 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
689 tree_code_size (enum tree_code code
)
691 switch (TREE_CODE_CLASS (code
))
693 case tcc_declaration
: /* A decl node */
698 return sizeof (struct tree_field_decl
);
700 return sizeof (struct tree_parm_decl
);
702 return sizeof (struct tree_var_decl
);
704 return sizeof (struct tree_label_decl
);
706 return sizeof (struct tree_result_decl
);
708 return sizeof (struct tree_const_decl
);
710 return sizeof (struct tree_type_decl
);
712 return sizeof (struct tree_function_decl
);
713 case DEBUG_EXPR_DECL
:
714 return sizeof (struct tree_decl_with_rtl
);
716 return sizeof (struct tree_decl_non_common
);
720 case tcc_type
: /* a type node */
721 return sizeof (struct tree_type_non_common
);
723 case tcc_reference
: /* a reference */
724 case tcc_expression
: /* an expression */
725 case tcc_statement
: /* an expression with side effects */
726 case tcc_comparison
: /* a comparison expression */
727 case tcc_unary
: /* a unary arithmetic expression */
728 case tcc_binary
: /* a binary arithmetic expression */
729 return (sizeof (struct tree_exp
)
730 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
732 case tcc_constant
: /* a constant */
735 case VOID_CST
: return sizeof (struct tree_typed
);
736 case INTEGER_CST
: gcc_unreachable ();
737 case REAL_CST
: return sizeof (struct tree_real_cst
);
738 case FIXED_CST
: return sizeof (struct tree_fixed_cst
);
739 case COMPLEX_CST
: return sizeof (struct tree_complex
);
740 case VECTOR_CST
: return sizeof (struct tree_vector
);
741 case STRING_CST
: gcc_unreachable ();
743 return lang_hooks
.tree_size (code
);
746 case tcc_exceptional
: /* something random, like an identifier. */
749 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
750 case TREE_LIST
: return sizeof (struct tree_list
);
753 case PLACEHOLDER_EXPR
: return sizeof (struct tree_common
);
756 case OMP_CLAUSE
: gcc_unreachable ();
758 case SSA_NAME
: return sizeof (struct tree_ssa_name
);
760 case STATEMENT_LIST
: return sizeof (struct tree_statement_list
);
761 case BLOCK
: return sizeof (struct tree_block
);
762 case CONSTRUCTOR
: return sizeof (struct tree_constructor
);
763 case OPTIMIZATION_NODE
: return sizeof (struct tree_optimization_option
);
764 case TARGET_OPTION_NODE
: return sizeof (struct tree_target_option
);
767 return lang_hooks
.tree_size (code
);
775 /* Compute the number of bytes occupied by NODE. This routine only
776 looks at TREE_CODE, except for those nodes that have variable sizes. */
778 tree_size (const_tree node
)
780 const enum tree_code code
= TREE_CODE (node
);
784 return (sizeof (struct tree_int_cst
)
785 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
788 return (offsetof (struct tree_binfo
, base_binfos
)
790 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
793 return (sizeof (struct tree_vec
)
794 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
797 return (sizeof (struct tree_vector
)
798 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node
)) - 1) * sizeof (tree
));
801 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
804 return (sizeof (struct tree_omp_clause
)
805 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
809 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
810 return (sizeof (struct tree_exp
)
811 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
813 return tree_code_size (code
);
817 /* Record interesting allocation statistics for a tree node with CODE
821 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED
,
822 size_t length ATTRIBUTE_UNUSED
)
824 enum tree_code_class type
= TREE_CODE_CLASS (code
);
827 if (!GATHER_STATISTICS
)
832 case tcc_declaration
: /* A decl node */
836 case tcc_type
: /* a type node */
840 case tcc_statement
: /* an expression with side effects */
844 case tcc_reference
: /* a reference */
848 case tcc_expression
: /* an expression */
849 case tcc_comparison
: /* a comparison expression */
850 case tcc_unary
: /* a unary arithmetic expression */
851 case tcc_binary
: /* a binary arithmetic expression */
855 case tcc_constant
: /* a constant */
859 case tcc_exceptional
: /* something random, like an identifier. */
862 case IDENTIFIER_NODE
:
875 kind
= ssa_name_kind
;
887 kind
= omp_clause_kind
;
904 tree_code_counts
[(int) code
]++;
905 tree_node_counts
[(int) kind
]++;
906 tree_node_sizes
[(int) kind
] += length
;
909 /* Allocate and return a new UID from the DECL_UID namespace. */
912 allocate_decl_uid (void)
914 return next_decl_uid
++;
917 /* Return a newly allocated node of code CODE. For decl and type
918 nodes, some other fields are initialized. The rest of the node is
919 initialized to zero. This function cannot be used for TREE_VEC,
920 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
923 Achoo! I got a code in the node. */
926 make_node_stat (enum tree_code code MEM_STAT_DECL
)
929 enum tree_code_class type
= TREE_CODE_CLASS (code
);
930 size_t length
= tree_code_size (code
);
932 record_node_allocation_statistics (code
, length
);
934 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
935 TREE_SET_CODE (t
, code
);
940 TREE_SIDE_EFFECTS (t
) = 1;
943 case tcc_declaration
:
944 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
946 if (code
== FUNCTION_DECL
)
948 DECL_ALIGN (t
) = FUNCTION_BOUNDARY
;
949 DECL_MODE (t
) = FUNCTION_MODE
;
954 DECL_SOURCE_LOCATION (t
) = input_location
;
955 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
956 DECL_UID (t
) = --next_debug_decl_uid
;
959 DECL_UID (t
) = allocate_decl_uid ();
960 SET_DECL_PT_UID (t
, -1);
962 if (TREE_CODE (t
) == LABEL_DECL
)
963 LABEL_DECL_UID (t
) = -1;
968 TYPE_UID (t
) = next_type_uid
++;
969 TYPE_ALIGN (t
) = BITS_PER_UNIT
;
970 TYPE_USER_ALIGN (t
) = 0;
971 TYPE_MAIN_VARIANT (t
) = t
;
972 TYPE_CANONICAL (t
) = t
;
974 /* Default to no attributes for type, but let target change that. */
975 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
976 targetm
.set_default_type_attributes (t
);
978 /* We have not yet computed the alias set for this type. */
979 TYPE_ALIAS_SET (t
) = -1;
983 TREE_CONSTANT (t
) = 1;
992 case PREDECREMENT_EXPR
:
993 case PREINCREMENT_EXPR
:
994 case POSTDECREMENT_EXPR
:
995 case POSTINCREMENT_EXPR
:
996 /* All of these have side-effects, no matter what their
998 TREE_SIDE_EFFECTS (t
) = 1;
1007 /* Other classes need no special treatment. */
1014 /* Return a new node with the same contents as NODE except that its
1015 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1018 copy_node_stat (tree node MEM_STAT_DECL
)
1021 enum tree_code code
= TREE_CODE (node
);
1024 gcc_assert (code
!= STATEMENT_LIST
);
1026 length
= tree_size (node
);
1027 record_node_allocation_statistics (code
, length
);
1028 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1029 memcpy (t
, node
, length
);
1031 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1033 TREE_ASM_WRITTEN (t
) = 0;
1034 TREE_VISITED (t
) = 0;
1036 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1038 if (code
== DEBUG_EXPR_DECL
)
1039 DECL_UID (t
) = --next_debug_decl_uid
;
1042 DECL_UID (t
) = allocate_decl_uid ();
1043 if (DECL_PT_UID_SET_P (node
))
1044 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1046 if ((TREE_CODE (node
) == PARM_DECL
|| TREE_CODE (node
) == VAR_DECL
)
1047 && DECL_HAS_VALUE_EXPR_P (node
))
1049 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1050 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1052 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1053 if (TREE_CODE (node
) == VAR_DECL
)
1055 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1056 t
->decl_with_vis
.symtab_node
= NULL
;
1058 if (TREE_CODE (node
) == VAR_DECL
&& DECL_HAS_INIT_PRIORITY_P (node
))
1060 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1061 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1063 if (TREE_CODE (node
) == FUNCTION_DECL
)
1065 DECL_STRUCT_FUNCTION (t
) = NULL
;
1066 t
->decl_with_vis
.symtab_node
= NULL
;
1069 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1071 TYPE_UID (t
) = next_type_uid
++;
1072 /* The following is so that the debug code for
1073 the copy is different from the original type.
1074 The two statements usually duplicate each other
1075 (because they clear fields of the same union),
1076 but the optimizer should catch that. */
1077 TYPE_SYMTAB_POINTER (t
) = 0;
1078 TYPE_SYMTAB_ADDRESS (t
) = 0;
1080 /* Do not copy the values cache. */
1081 if (TYPE_CACHED_VALUES_P (t
))
1083 TYPE_CACHED_VALUES_P (t
) = 0;
1084 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1087 if (TYPE_HAS_UPC_BLOCK_FACTOR (node
))
1088 SET_TYPE_UPC_BLOCK_FACTOR (t
, TYPE_UPC_BLOCK_FACTOR (node
));
1094 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1095 For example, this can copy a list made of TREE_LIST nodes. */
1098 copy_list (tree list
)
1106 head
= prev
= copy_node (list
);
1107 next
= TREE_CHAIN (list
);
1110 TREE_CHAIN (prev
) = copy_node (next
);
1111 prev
= TREE_CHAIN (prev
);
1112 next
= TREE_CHAIN (next
);
1118 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1119 INTEGER_CST with value CST and type TYPE. */
1122 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1124 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1125 /* We need an extra zero HWI if CST is an unsigned integer with its
1126 upper bit set, and if CST occupies a whole number of HWIs. */
1127 if (TYPE_UNSIGNED (type
)
1129 && (cst
.get_precision () % HOST_BITS_PER_WIDE_INT
) == 0)
1130 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1131 return cst
.get_len ();
1134 /* Return a new INTEGER_CST with value CST and type TYPE. */
1137 build_new_int_cst (tree type
, const wide_int
&cst
)
1139 unsigned int len
= cst
.get_len ();
1140 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1141 tree nt
= make_int_cst (len
, ext_len
);
1146 TREE_INT_CST_ELT (nt
, ext_len
) = 0;
1147 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1148 TREE_INT_CST_ELT (nt
, i
) = -1;
1150 else if (TYPE_UNSIGNED (type
)
1151 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1154 TREE_INT_CST_ELT (nt
, len
)
1155 = zext_hwi (cst
.elt (len
),
1156 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1159 for (unsigned int i
= 0; i
< len
; i
++)
1160 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1161 TREE_TYPE (nt
) = type
;
1165 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1168 build_int_cst (tree type
, HOST_WIDE_INT low
)
1170 /* Support legacy code. */
1172 type
= integer_type_node
;
1174 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1178 build_int_cstu (tree type
, unsigned HOST_WIDE_INT cst
)
1180 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1183 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1186 build_int_cst_type (tree type
, HOST_WIDE_INT low
)
1189 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1192 /* Constructs tree in type TYPE from with value given by CST. Signedness
1193 of CST is assumed to be the same as the signedness of TYPE. */
1196 double_int_to_tree (tree type
, double_int cst
)
1198 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1201 /* We force the wide_int CST to the range of the type TYPE by sign or
1202 zero extending it. OVERFLOWABLE indicates if we are interested in
1203 overflow of the value, when >0 we are only interested in signed
1204 overflow, for <0 we are interested in any overflow. OVERFLOWED
1205 indicates whether overflow has already occurred. CONST_OVERFLOWED
1206 indicates whether constant overflow has already occurred. We force
1207 T's value to be within range of T's type (by setting to 0 or 1 all
1208 the bits outside the type's range). We set TREE_OVERFLOWED if,
1209 OVERFLOWED is nonzero,
1210 or OVERFLOWABLE is >0 and signed overflow occurs
1211 or OVERFLOWABLE is <0 and any overflow occurs
1212 We return a new tree node for the extended wide_int. The node
1213 is shared if no overflow flags are set. */
1217 force_fit_type (tree type
, const wide_int_ref
&cst
,
1218 int overflowable
, bool overflowed
)
1220 signop sign
= TYPE_SIGN (type
);
1222 /* If we need to set overflow flags, return a new unshared node. */
1223 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1227 || (overflowable
> 0 && sign
== SIGNED
))
1229 wide_int tmp
= wide_int::from (cst
, TYPE_PRECISION (type
), sign
);
1230 tree t
= build_new_int_cst (type
, tmp
);
1231 TREE_OVERFLOW (t
) = 1;
1236 /* Else build a shared node. */
1237 return wide_int_to_tree (type
, cst
);
1240 /* These are the hash table functions for the hash table of INTEGER_CST
1241 nodes of a sizetype. */
1243 /* Return the hash code code X, an INTEGER_CST. */
1246 int_cst_hash_hash (const void *x
)
1248 const_tree
const t
= (const_tree
) x
;
1249 hashval_t code
= htab_hash_pointer (TREE_TYPE (t
));
1252 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1253 code
^= TREE_INT_CST_ELT (t
, i
);
1258 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1259 is the same as that given by *Y, which is the same. */
1262 int_cst_hash_eq (const void *x
, const void *y
)
1264 const_tree
const xt
= (const_tree
) x
;
1265 const_tree
const yt
= (const_tree
) y
;
1267 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1268 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1269 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1272 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1273 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1279 /* Create an INT_CST node of TYPE and value CST.
1280 The returned node is always shared. For small integers we use a
1281 per-type vector cache, for larger ones we use a single hash table.
1282 The value is extended from its precision according to the sign of
1283 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1284 the upper bits and ensures that hashing and value equality based
1285 upon the underlying HOST_WIDE_INTs works without masking. */
1288 wide_int_to_tree (tree type
, const wide_int_ref
&pcst
)
1295 unsigned int prec
= TYPE_PRECISION (type
);
1296 signop sgn
= TYPE_SIGN (type
);
1298 /* Verify that everything is canonical. */
1299 int l
= pcst
.get_len ();
1302 if (pcst
.elt (l
- 1) == 0)
1303 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1304 if (pcst
.elt (l
- 1) == (HOST_WIDE_INT
) -1)
1305 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1308 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1309 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1313 /* We just need to store a single HOST_WIDE_INT. */
1315 if (TYPE_UNSIGNED (type
))
1316 hwi
= cst
.to_uhwi ();
1318 hwi
= cst
.to_shwi ();
1320 switch (TREE_CODE (type
))
1323 gcc_assert (hwi
== 0);
1327 case REFERENCE_TYPE
:
1328 /* Cache NULL pointer. */
1337 /* Cache false or true. */
1345 if (TYPE_SIGN (type
) == UNSIGNED
)
1348 limit
= INTEGER_SHARE_LIMIT
;
1349 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1354 /* Cache [-1, N). */
1355 limit
= INTEGER_SHARE_LIMIT
+ 1;
1356 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1370 /* Look for it in the type's vector of small shared ints. */
1371 if (!TYPE_CACHED_VALUES_P (type
))
1373 TYPE_CACHED_VALUES_P (type
) = 1;
1374 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1377 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1379 /* Make sure no one is clobbering the shared constant. */
1380 gcc_checking_assert (TREE_TYPE (t
) == type
1381 && TREE_INT_CST_NUNITS (t
) == 1
1382 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1383 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1384 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1387 /* Create a new shared int. */
1388 t
= build_new_int_cst (type
, cst
);
1389 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1394 /* Use the cache of larger shared ints, using int_cst_node as
1398 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1399 TREE_TYPE (int_cst_node
) = type
;
1401 slot
= htab_find_slot (int_cst_hash_table
, int_cst_node
, INSERT
);
1405 /* Insert this one into the hash table. */
1408 /* Make a new node for next time round. */
1409 int_cst_node
= make_int_cst (1, 1);
1415 /* The value either hashes properly or we drop it on the floor
1416 for the gc to take care of. There will not be enough of them
1420 tree nt
= build_new_int_cst (type
, cst
);
1421 slot
= htab_find_slot (int_cst_hash_table
, nt
, INSERT
);
1425 /* Insert this one into the hash table. */
1435 cache_integer_cst (tree t
)
1437 tree type
= TREE_TYPE (t
);
1440 int prec
= TYPE_PRECISION (type
);
1442 gcc_assert (!TREE_OVERFLOW (t
));
1444 switch (TREE_CODE (type
))
1447 gcc_assert (integer_zerop (t
));
1451 case REFERENCE_TYPE
:
1452 /* Cache NULL pointer. */
1453 if (integer_zerop (t
))
1461 /* Cache false or true. */
1463 if (wi::ltu_p (t
, 2))
1464 ix
= TREE_INT_CST_ELT (t
, 0);
1469 if (TYPE_UNSIGNED (type
))
1472 limit
= INTEGER_SHARE_LIMIT
;
1474 /* This is a little hokie, but if the prec is smaller than
1475 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1476 obvious test will not get the correct answer. */
1477 if (prec
< HOST_BITS_PER_WIDE_INT
)
1479 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1480 ix
= tree_to_uhwi (t
);
1482 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1483 ix
= tree_to_uhwi (t
);
1488 limit
= INTEGER_SHARE_LIMIT
+ 1;
1490 if (integer_minus_onep (t
))
1492 else if (!wi::neg_p (t
))
1494 if (prec
< HOST_BITS_PER_WIDE_INT
)
1496 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1497 ix
= tree_to_shwi (t
) + 1;
1499 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1500 ix
= tree_to_shwi (t
) + 1;
1514 /* Look for it in the type's vector of small shared ints. */
1515 if (!TYPE_CACHED_VALUES_P (type
))
1517 TYPE_CACHED_VALUES_P (type
) = 1;
1518 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1521 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1522 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1526 /* Use the cache of larger shared ints. */
1529 slot
= htab_find_slot (int_cst_hash_table
, t
, INSERT
);
1530 /* If there is already an entry for the number verify it's the
1533 gcc_assert (wi::eq_p (tree (*slot
), t
));
1535 /* Otherwise insert this one into the hash table. */
1541 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1542 and the rest are zeros. */
1545 build_low_bits_mask (tree type
, unsigned bits
)
1547 gcc_assert (bits
<= TYPE_PRECISION (type
));
1549 return wide_int_to_tree (type
, wi::mask (bits
, false,
1550 TYPE_PRECISION (type
)));
1553 /* Checks that X is integer constant that can be expressed in (unsigned)
1554 HOST_WIDE_INT without loss of precision. */
1557 cst_and_fits_in_hwi (const_tree x
)
1559 if (TREE_CODE (x
) != INTEGER_CST
)
1562 if (TYPE_PRECISION (TREE_TYPE (x
)) > HOST_BITS_PER_WIDE_INT
)
1565 return TREE_INT_CST_NUNITS (x
) == 1;
1568 /* Build a newly constructed TREE_VEC node of length LEN. */
1571 make_vector_stat (unsigned len MEM_STAT_DECL
)
1574 unsigned length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vector
);
1576 record_node_allocation_statistics (VECTOR_CST
, length
);
1578 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1580 TREE_SET_CODE (t
, VECTOR_CST
);
1581 TREE_CONSTANT (t
) = 1;
1586 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1587 are in a list pointed to by VALS. */
1590 build_vector_stat (tree type
, tree
*vals MEM_STAT_DECL
)
1594 tree v
= make_vector (TYPE_VECTOR_SUBPARTS (type
));
1595 TREE_TYPE (v
) = type
;
1597 /* Iterate through elements and check for overflow. */
1598 for (cnt
= 0; cnt
< TYPE_VECTOR_SUBPARTS (type
); ++cnt
)
1600 tree value
= vals
[cnt
];
1602 VECTOR_CST_ELT (v
, cnt
) = value
;
1604 /* Don't crash if we get an address constant. */
1605 if (!CONSTANT_CLASS_P (value
))
1608 over
|= TREE_OVERFLOW (value
);
1611 TREE_OVERFLOW (v
) = over
;
1615 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1616 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1619 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1621 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
1622 unsigned HOST_WIDE_INT idx
;
1625 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1627 for (; idx
< TYPE_VECTOR_SUBPARTS (type
); ++idx
)
1628 vec
[idx
] = build_zero_cst (TREE_TYPE (type
));
1630 return build_vector (type
, vec
);
1633 /* Build a vector of type VECTYPE where all the elements are SCs. */
1635 build_vector_from_val (tree vectype
, tree sc
)
1637 int i
, nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
1639 if (sc
== error_mark_node
)
1642 /* Verify that the vector type is suitable for SC. Note that there
1643 is some inconsistency in the type-system with respect to restrict
1644 qualifications of pointers. Vector types always have a main-variant
1645 element type and the qualification is applied to the vector-type.
1646 So TREE_TYPE (vector-type) does not return a properly qualified
1647 vector element-type. */
1648 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1649 TREE_TYPE (vectype
)));
1651 if (CONSTANT_CLASS_P (sc
))
1653 tree
*v
= XALLOCAVEC (tree
, nunits
);
1654 for (i
= 0; i
< nunits
; ++i
)
1656 return build_vector (vectype
, v
);
1660 vec
<constructor_elt
, va_gc
> *v
;
1661 vec_alloc (v
, nunits
);
1662 for (i
= 0; i
< nunits
; ++i
)
1663 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1664 return build_constructor (vectype
, v
);
1668 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1669 are in the vec pointed to by VALS. */
1671 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals
)
1673 tree c
= make_node (CONSTRUCTOR
);
1675 constructor_elt
*elt
;
1676 bool constant_p
= true;
1677 bool side_effects_p
= false;
1679 TREE_TYPE (c
) = type
;
1680 CONSTRUCTOR_ELTS (c
) = vals
;
1682 FOR_EACH_VEC_SAFE_ELT (vals
, i
, elt
)
1684 /* Mostly ctors will have elts that don't have side-effects, so
1685 the usual case is to scan all the elements. Hence a single
1686 loop for both const and side effects, rather than one loop
1687 each (with early outs). */
1688 if (!TREE_CONSTANT (elt
->value
))
1690 if (TREE_SIDE_EFFECTS (elt
->value
))
1691 side_effects_p
= true;
1694 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
1695 TREE_CONSTANT (c
) = constant_p
;
1700 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1703 build_constructor_single (tree type
, tree index
, tree value
)
1705 vec
<constructor_elt
, va_gc
> *v
;
1706 constructor_elt elt
= {index
, value
};
1709 v
->quick_push (elt
);
1711 return build_constructor (type
, v
);
1715 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1716 are in a list pointed to by VALS. */
1718 build_constructor_from_list (tree type
, tree vals
)
1721 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1725 vec_alloc (v
, list_length (vals
));
1726 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
1727 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
1730 return build_constructor (type
, v
);
1733 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1734 of elements, provided as index/value pairs. */
1737 build_constructor_va (tree type
, int nelts
, ...)
1739 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1742 va_start (p
, nelts
);
1743 vec_alloc (v
, nelts
);
1746 tree index
= va_arg (p
, tree
);
1747 tree value
= va_arg (p
, tree
);
1748 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
1751 return build_constructor (type
, v
);
1754 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1757 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
1760 FIXED_VALUE_TYPE
*fp
;
1762 v
= make_node (FIXED_CST
);
1763 fp
= ggc_alloc
<fixed_value
> ();
1764 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
1766 TREE_TYPE (v
) = type
;
1767 TREE_FIXED_CST_PTR (v
) = fp
;
1771 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1774 build_real (tree type
, REAL_VALUE_TYPE d
)
1777 REAL_VALUE_TYPE
*dp
;
1780 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1781 Consider doing it via real_convert now. */
1783 v
= make_node (REAL_CST
);
1784 dp
= ggc_alloc
<real_value
> ();
1785 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
1787 TREE_TYPE (v
) = type
;
1788 TREE_REAL_CST_PTR (v
) = dp
;
1789 TREE_OVERFLOW (v
) = overflow
;
1793 /* Return a new REAL_CST node whose type is TYPE
1794 and whose value is the integer value of the INTEGER_CST node I. */
1797 real_value_from_int_cst (const_tree type
, const_tree i
)
1801 /* Clear all bits of the real value type so that we can later do
1802 bitwise comparisons to see if two values are the same. */
1803 memset (&d
, 0, sizeof d
);
1805 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, i
,
1806 TYPE_SIGN (TREE_TYPE (i
)));
1810 /* Given a tree representing an integer constant I, return a tree
1811 representing the same value as a floating-point constant of type TYPE. */
1814 build_real_from_int_cst (tree type
, const_tree i
)
1817 int overflow
= TREE_OVERFLOW (i
);
1819 v
= build_real (type
, real_value_from_int_cst (type
, i
));
1821 TREE_OVERFLOW (v
) |= overflow
;
1825 /* Return a newly constructed STRING_CST node whose value is
1826 the LEN characters at STR.
1827 Note that for a C string literal, LEN should include the trailing NUL.
1828 The TREE_TYPE is not initialized. */
1831 build_string (int len
, const char *str
)
1836 /* Do not waste bytes provided by padding of struct tree_string. */
1837 length
= len
+ offsetof (struct tree_string
, str
) + 1;
1839 record_node_allocation_statistics (STRING_CST
, length
);
1841 s
= (tree
) ggc_internal_alloc (length
);
1843 memset (s
, 0, sizeof (struct tree_typed
));
1844 TREE_SET_CODE (s
, STRING_CST
);
1845 TREE_CONSTANT (s
) = 1;
1846 TREE_STRING_LENGTH (s
) = len
;
1847 memcpy (s
->string
.str
, str
, len
);
1848 s
->string
.str
[len
] = '\0';
1853 /* Return a newly constructed COMPLEX_CST node whose value is
1854 specified by the real and imaginary parts REAL and IMAG.
1855 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1856 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1859 build_complex (tree type
, tree real
, tree imag
)
1861 tree t
= make_node (COMPLEX_CST
);
1863 TREE_REALPART (t
) = real
;
1864 TREE_IMAGPART (t
) = imag
;
1865 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
1866 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
1870 /* Return a constant of arithmetic type TYPE which is the
1871 multiplicative identity of the set TYPE. */
1874 build_one_cst (tree type
)
1876 switch (TREE_CODE (type
))
1878 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1879 case POINTER_TYPE
: case REFERENCE_TYPE
:
1881 return build_int_cst (type
, 1);
1884 return build_real (type
, dconst1
);
1886 case FIXED_POINT_TYPE
:
1887 /* We can only generate 1 for accum types. */
1888 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1889 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
1893 tree scalar
= build_one_cst (TREE_TYPE (type
));
1895 return build_vector_from_val (type
, scalar
);
1899 return build_complex (type
,
1900 build_one_cst (TREE_TYPE (type
)),
1901 build_zero_cst (TREE_TYPE (type
)));
1908 /* Return an integer of type TYPE containing all 1's in as much precision as
1909 it contains, or a complex or vector whose subparts are such integers. */
1912 build_all_ones_cst (tree type
)
1914 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1916 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
1917 return build_complex (type
, scalar
, scalar
);
1920 return build_minus_one_cst (type
);
1923 /* Return a constant of arithmetic type TYPE which is the
1924 opposite of the multiplicative identity of the set TYPE. */
1927 build_minus_one_cst (tree type
)
1929 switch (TREE_CODE (type
))
1931 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1932 case POINTER_TYPE
: case REFERENCE_TYPE
:
1934 return build_int_cst (type
, -1);
1937 return build_real (type
, dconstm1
);
1939 case FIXED_POINT_TYPE
:
1940 /* We can only generate 1 for accum types. */
1941 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1942 return build_fixed (type
, fixed_from_double_int (double_int_minus_one
,
1947 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
1949 return build_vector_from_val (type
, scalar
);
1953 return build_complex (type
,
1954 build_minus_one_cst (TREE_TYPE (type
)),
1955 build_zero_cst (TREE_TYPE (type
)));
1962 /* Build 0 constant of type TYPE. This is used by constructor folding
1963 and thus the constant should be represented in memory by
1967 build_zero_cst (tree type
)
1969 switch (TREE_CODE (type
))
1971 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1972 case POINTER_TYPE
: case REFERENCE_TYPE
:
1973 case OFFSET_TYPE
: case NULLPTR_TYPE
:
1974 return build_int_cst (type
, 0);
1977 return build_real (type
, dconst0
);
1979 case FIXED_POINT_TYPE
:
1980 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
1984 tree scalar
= build_zero_cst (TREE_TYPE (type
));
1986 return build_vector_from_val (type
, scalar
);
1991 tree zero
= build_zero_cst (TREE_TYPE (type
));
1993 return build_complex (type
, zero
, zero
);
1997 if (!AGGREGATE_TYPE_P (type
))
1998 return fold_convert (type
, integer_zero_node
);
1999 return build_constructor (type
, NULL
);
2004 /* Build a BINFO with LEN language slots. */
2007 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL
)
2010 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2011 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2013 record_node_allocation_statistics (TREE_BINFO
, length
);
2015 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2017 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2019 TREE_SET_CODE (t
, TREE_BINFO
);
2021 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2026 /* Create a CASE_LABEL_EXPR tree node and return it. */
2029 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2031 tree t
= make_node (CASE_LABEL_EXPR
);
2033 TREE_TYPE (t
) = void_type_node
;
2034 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2036 CASE_LOW (t
) = low_value
;
2037 CASE_HIGH (t
) = high_value
;
2038 CASE_LABEL (t
) = label_decl
;
2039 CASE_CHAIN (t
) = NULL_TREE
;
2044 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2045 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2046 The latter determines the length of the HOST_WIDE_INT vector. */
2049 make_int_cst_stat (int len
, int ext_len MEM_STAT_DECL
)
2052 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2053 + sizeof (struct tree_int_cst
));
2056 record_node_allocation_statistics (INTEGER_CST
, length
);
2058 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2060 TREE_SET_CODE (t
, INTEGER_CST
);
2061 TREE_INT_CST_NUNITS (t
) = len
;
2062 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2063 /* to_offset can only be applied to trees that are offset_int-sized
2064 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2065 must be exactly the precision of offset_int and so LEN is correct. */
2066 if (ext_len
<= OFFSET_INT_ELTS
)
2067 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2069 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2071 TREE_CONSTANT (t
) = 1;
2076 /* Build a newly constructed TREE_VEC node of length LEN. */
2079 make_tree_vec_stat (int len MEM_STAT_DECL
)
2082 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2084 record_node_allocation_statistics (TREE_VEC
, length
);
2086 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2088 TREE_SET_CODE (t
, TREE_VEC
);
2089 TREE_VEC_LENGTH (t
) = len
;
2094 /* Grow a TREE_VEC node to new length LEN. */
2097 grow_tree_vec_stat (tree v
, int len MEM_STAT_DECL
)
2099 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2101 int oldlen
= TREE_VEC_LENGTH (v
);
2102 gcc_assert (len
> oldlen
);
2104 int oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2105 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2107 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2109 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2111 TREE_VEC_LENGTH (v
) = len
;
2116 /* Return 1 if EXPR is the integer constant zero or a complex constant
2120 integer_zerop (const_tree expr
)
2124 switch (TREE_CODE (expr
))
2127 return wi::eq_p (expr
, 0);
2129 return (integer_zerop (TREE_REALPART (expr
))
2130 && integer_zerop (TREE_IMAGPART (expr
)));
2134 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2135 if (!integer_zerop (VECTOR_CST_ELT (expr
, i
)))
2144 /* Return 1 if EXPR is the integer constant one or the corresponding
2145 complex constant. */
2148 integer_onep (const_tree expr
)
2152 switch (TREE_CODE (expr
))
2155 return wi::eq_p (wi::to_widest (expr
), 1);
2157 return (integer_onep (TREE_REALPART (expr
))
2158 && integer_zerop (TREE_IMAGPART (expr
)));
2162 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2163 if (!integer_onep (VECTOR_CST_ELT (expr
, i
)))
2172 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2173 it contains, or a complex or vector whose subparts are such integers. */
2176 integer_all_onesp (const_tree expr
)
2180 if (TREE_CODE (expr
) == COMPLEX_CST
2181 && integer_all_onesp (TREE_REALPART (expr
))
2182 && integer_all_onesp (TREE_IMAGPART (expr
)))
2185 else if (TREE_CODE (expr
) == VECTOR_CST
)
2188 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2189 if (!integer_all_onesp (VECTOR_CST_ELT (expr
, i
)))
2194 else if (TREE_CODE (expr
) != INTEGER_CST
)
2197 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
) == expr
;
2200 /* Return 1 if EXPR is the integer constant minus one. */
2203 integer_minus_onep (const_tree expr
)
2207 if (TREE_CODE (expr
) == COMPLEX_CST
)
2208 return (integer_all_onesp (TREE_REALPART (expr
))
2209 && integer_zerop (TREE_IMAGPART (expr
)));
2211 return integer_all_onesp (expr
);
2214 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2218 integer_pow2p (const_tree expr
)
2222 if (TREE_CODE (expr
) == COMPLEX_CST
2223 && integer_pow2p (TREE_REALPART (expr
))
2224 && integer_zerop (TREE_IMAGPART (expr
)))
2227 if (TREE_CODE (expr
) != INTEGER_CST
)
2230 return wi::popcount (expr
) == 1;
2233 /* Return 1 if EXPR is an integer constant other than zero or a
2234 complex constant other than zero. */
2237 integer_nonzerop (const_tree expr
)
2241 return ((TREE_CODE (expr
) == INTEGER_CST
2242 && !wi::eq_p (expr
, 0))
2243 || (TREE_CODE (expr
) == COMPLEX_CST
2244 && (integer_nonzerop (TREE_REALPART (expr
))
2245 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2248 /* Return 1 if EXPR is the fixed-point constant zero. */
2251 fixed_zerop (const_tree expr
)
2253 return (TREE_CODE (expr
) == FIXED_CST
2254 && TREE_FIXED_CST (expr
).data
.is_zero ());
2257 /* Return the power of two represented by a tree node known to be a
2261 tree_log2 (const_tree expr
)
2265 if (TREE_CODE (expr
) == COMPLEX_CST
)
2266 return tree_log2 (TREE_REALPART (expr
));
2268 return wi::exact_log2 (expr
);
2271 /* Similar, but return the largest integer Y such that 2 ** Y is less
2272 than or equal to EXPR. */
2275 tree_floor_log2 (const_tree expr
)
2279 if (TREE_CODE (expr
) == COMPLEX_CST
)
2280 return tree_log2 (TREE_REALPART (expr
));
2282 return wi::floor_log2 (expr
);
2285 /* Return number of known trailing zero bits in EXPR, or, if the value of
2286 EXPR is known to be zero, the precision of it's type. */
2289 tree_ctz (const_tree expr
)
2291 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2292 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2295 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2296 switch (TREE_CODE (expr
))
2299 ret1
= wi::ctz (expr
);
2300 return MIN (ret1
, prec
);
2302 ret1
= wi::ctz (get_nonzero_bits (expr
));
2303 return MIN (ret1
, prec
);
2310 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2313 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2314 return MIN (ret1
, ret2
);
2315 case POINTER_PLUS_EXPR
:
2316 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2317 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2318 /* Second operand is sizetype, which could be in theory
2319 wider than pointer's precision. Make sure we never
2320 return more than prec. */
2321 ret2
= MIN (ret2
, prec
);
2322 return MIN (ret1
, ret2
);
2324 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2325 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2326 return MAX (ret1
, ret2
);
2328 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2329 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2330 return MIN (ret1
+ ret2
, prec
);
2332 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2333 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2334 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2336 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2337 return MIN (ret1
+ ret2
, prec
);
2341 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2342 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2344 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2345 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2350 case TRUNC_DIV_EXPR
:
2352 case FLOOR_DIV_EXPR
:
2353 case ROUND_DIV_EXPR
:
2354 case EXACT_DIV_EXPR
:
2355 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2356 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2358 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2361 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2369 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2370 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2372 return MIN (ret1
, prec
);
2374 return tree_ctz (TREE_OPERAND (expr
, 0));
2376 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2379 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2380 return MIN (ret1
, ret2
);
2382 return tree_ctz (TREE_OPERAND (expr
, 1));
2384 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2385 if (ret1
> BITS_PER_UNIT
)
2387 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2388 return MIN (ret1
, prec
);
2396 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2397 decimal float constants, so don't return 1 for them. */
2400 real_zerop (const_tree expr
)
2404 switch (TREE_CODE (expr
))
2407 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst0
)
2408 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2410 return real_zerop (TREE_REALPART (expr
))
2411 && real_zerop (TREE_IMAGPART (expr
));
2415 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2416 if (!real_zerop (VECTOR_CST_ELT (expr
, i
)))
2425 /* Return 1 if EXPR is the real constant one in real or complex form.
2426 Trailing zeroes matter for decimal float constants, so don't return
2430 real_onep (const_tree expr
)
2434 switch (TREE_CODE (expr
))
2437 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst1
)
2438 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2440 return real_onep (TREE_REALPART (expr
))
2441 && real_zerop (TREE_IMAGPART (expr
));
2445 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2446 if (!real_onep (VECTOR_CST_ELT (expr
, i
)))
2455 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2456 matter for decimal float constants, so don't return 1 for them. */
2459 real_minus_onep (const_tree expr
)
2463 switch (TREE_CODE (expr
))
2466 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconstm1
)
2467 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2469 return real_minus_onep (TREE_REALPART (expr
))
2470 && real_zerop (TREE_IMAGPART (expr
));
2474 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2475 if (!real_minus_onep (VECTOR_CST_ELT (expr
, i
)))
2484 /* Nonzero if EXP is a constant or a cast of a constant. */
2487 really_constant_p (const_tree exp
)
2489 /* This is not quite the same as STRIP_NOPS. It does more. */
2490 while (CONVERT_EXPR_P (exp
)
2491 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2492 exp
= TREE_OPERAND (exp
, 0);
2493 return TREE_CONSTANT (exp
);
2496 /* Return first list element whose TREE_VALUE is ELEM.
2497 Return 0 if ELEM is not in LIST. */
2500 value_member (tree elem
, tree list
)
2504 if (elem
== TREE_VALUE (list
))
2506 list
= TREE_CHAIN (list
);
2511 /* Return first list element whose TREE_PURPOSE is ELEM.
2512 Return 0 if ELEM is not in LIST. */
2515 purpose_member (const_tree elem
, tree list
)
2519 if (elem
== TREE_PURPOSE (list
))
2521 list
= TREE_CHAIN (list
);
2526 /* Return true if ELEM is in V. */
2529 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
2533 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
2539 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2543 chain_index (int idx
, tree chain
)
2545 for (; chain
&& idx
> 0; --idx
)
2546 chain
= TREE_CHAIN (chain
);
2550 /* Return nonzero if ELEM is part of the chain CHAIN. */
2553 chain_member (const_tree elem
, const_tree chain
)
2559 chain
= DECL_CHAIN (chain
);
2565 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2566 We expect a null pointer to mark the end of the chain.
2567 This is the Lisp primitive `length'. */
2570 list_length (const_tree t
)
2573 #ifdef ENABLE_TREE_CHECKING
2581 #ifdef ENABLE_TREE_CHECKING
2584 gcc_assert (p
!= q
);
2592 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2593 UNION_TYPE TYPE, or NULL_TREE if none. */
2596 first_field (const_tree type
)
2598 tree t
= TYPE_FIELDS (type
);
2599 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
2604 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2605 by modifying the last node in chain 1 to point to chain 2.
2606 This is the Lisp primitive `nconc'. */
2609 chainon (tree op1
, tree op2
)
2618 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
2620 TREE_CHAIN (t1
) = op2
;
2622 #ifdef ENABLE_TREE_CHECKING
2625 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
2626 gcc_assert (t2
!= t1
);
2633 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2636 tree_last (tree chain
)
2640 while ((next
= TREE_CHAIN (chain
)))
2645 /* Reverse the order of elements in the chain T,
2646 and return the new head of the chain (old last element). */
2651 tree prev
= 0, decl
, next
;
2652 for (decl
= t
; decl
; decl
= next
)
2654 /* We shouldn't be using this function to reverse BLOCK chains; we
2655 have blocks_nreverse for that. */
2656 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
2657 next
= TREE_CHAIN (decl
);
2658 TREE_CHAIN (decl
) = prev
;
2664 /* Return a newly created TREE_LIST node whose
2665 purpose and value fields are PARM and VALUE. */
2668 build_tree_list_stat (tree parm
, tree value MEM_STAT_DECL
)
2670 tree t
= make_node_stat (TREE_LIST PASS_MEM_STAT
);
2671 TREE_PURPOSE (t
) = parm
;
2672 TREE_VALUE (t
) = value
;
2676 /* Build a chain of TREE_LIST nodes from a vector. */
2679 build_tree_list_vec_stat (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
2681 tree ret
= NULL_TREE
;
2685 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
2687 *pp
= build_tree_list_stat (NULL
, t PASS_MEM_STAT
);
2688 pp
= &TREE_CHAIN (*pp
);
2693 /* Return a newly created TREE_LIST node whose
2694 purpose and value fields are PURPOSE and VALUE
2695 and whose TREE_CHAIN is CHAIN. */
2698 tree_cons_stat (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
2702 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
2703 memset (node
, 0, sizeof (struct tree_common
));
2705 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
2707 TREE_SET_CODE (node
, TREE_LIST
);
2708 TREE_CHAIN (node
) = chain
;
2709 TREE_PURPOSE (node
) = purpose
;
2710 TREE_VALUE (node
) = value
;
2714 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2718 ctor_to_vec (tree ctor
)
2720 vec
<tree
, va_gc
> *vec
;
2721 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
2725 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
2726 vec
->quick_push (val
);
2731 /* Return the size nominally occupied by an object of type TYPE
2732 when it resides in memory. The value is measured in units of bytes,
2733 and its data type is that normally used for type sizes
2734 (which is the first type created by make_signed_type or
2735 make_unsigned_type). */
2738 size_in_bytes (const_tree type
)
2742 if (type
== error_mark_node
)
2743 return integer_zero_node
;
2745 type
= TYPE_MAIN_VARIANT (type
);
2746 t
= TYPE_SIZE_UNIT (type
);
2750 lang_hooks
.types
.incomplete_type_error (NULL_TREE
, type
);
2751 return size_zero_node
;
2757 /* Returns a tree for the size of EXP in bytes. */
2760 tree_expr_size (const_tree exp
)
2763 && DECL_SIZE_UNIT (exp
) != 0)
2764 return DECL_SIZE_UNIT (exp
);
2766 return size_in_bytes (TREE_TYPE (exp
));
2769 /* Return the size of TYPE (in bytes) as a wide integer
2770 or return -1 if the size can vary or is larger than an integer. */
2773 int_size_in_bytes (const_tree type
)
2777 if (type
== error_mark_node
)
2780 type
= TYPE_MAIN_VARIANT (type
);
2781 t
= TYPE_SIZE_UNIT (type
);
2783 if (t
&& tree_fits_uhwi_p (t
))
2784 return TREE_INT_CST_LOW (t
);
2789 /* Return the maximum size of TYPE (in bytes) as a wide integer
2790 or return -1 if the size can vary or is larger than an integer. */
2793 max_int_size_in_bytes (const_tree type
)
2795 HOST_WIDE_INT size
= -1;
2798 /* If this is an array type, check for a possible MAX_SIZE attached. */
2800 if (TREE_CODE (type
) == ARRAY_TYPE
)
2802 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
2804 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2805 size
= tree_to_uhwi (size_tree
);
2808 /* If we still haven't been able to get a size, see if the language
2809 can compute a maximum size. */
2813 size_tree
= lang_hooks
.types
.max_size (type
);
2815 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2816 size
= tree_to_uhwi (size_tree
);
2822 /* Return the bit position of FIELD, in bits from the start of the record.
2823 This is a tree of type bitsizetype. */
2826 bit_position (const_tree field
)
2828 return bit_from_pos (DECL_FIELD_OFFSET (field
),
2829 DECL_FIELD_BIT_OFFSET (field
));
2832 /* Likewise, but return as an integer. It must be representable in
2833 that way (since it could be a signed value, we don't have the
2834 option of returning -1 like int_size_in_byte can. */
2837 int_bit_position (const_tree field
)
2839 return tree_to_shwi (bit_position (field
));
2842 /* Return the byte position of FIELD, in bytes from the start of the record.
2843 This is a tree of type sizetype. */
2846 byte_position (const_tree field
)
2848 return byte_from_pos (DECL_FIELD_OFFSET (field
),
2849 DECL_FIELD_BIT_OFFSET (field
));
2852 /* Likewise, but return as an integer. It must be representable in
2853 that way (since it could be a signed value, we don't have the
2854 option of returning -1 like int_size_in_byte can. */
2857 int_byte_position (const_tree field
)
2859 return tree_to_shwi (byte_position (field
));
2862 /* Return the strictest alignment, in bits, that T is known to have. */
2865 expr_align (const_tree t
)
2867 unsigned int align0
, align1
;
2869 switch (TREE_CODE (t
))
2871 CASE_CONVERT
: case NON_LVALUE_EXPR
:
2872 /* If we have conversions, we know that the alignment of the
2873 object must meet each of the alignments of the types. */
2874 align0
= expr_align (TREE_OPERAND (t
, 0));
2875 align1
= TYPE_ALIGN (TREE_TYPE (t
));
2876 return MAX (align0
, align1
);
2878 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
2879 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
2880 case CLEANUP_POINT_EXPR
:
2881 /* These don't change the alignment of an object. */
2882 return expr_align (TREE_OPERAND (t
, 0));
2885 /* The best we can do is say that the alignment is the least aligned
2887 align0
= expr_align (TREE_OPERAND (t
, 1));
2888 align1
= expr_align (TREE_OPERAND (t
, 2));
2889 return MIN (align0
, align1
);
2891 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2892 meaningfully, it's always 1. */
2893 case LABEL_DECL
: case CONST_DECL
:
2894 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
2896 gcc_assert (DECL_ALIGN (t
) != 0);
2897 return DECL_ALIGN (t
);
2903 /* Otherwise take the alignment from that of the type. */
2904 return TYPE_ALIGN (TREE_TYPE (t
));
2907 /* Return, as a tree node, the number of elements for TYPE (which is an
2908 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2911 array_type_nelts (const_tree type
)
2913 tree index_type
, min
, max
;
2915 /* If they did it with unspecified bounds, then we should have already
2916 given an error about it before we got here. */
2917 if (! TYPE_DOMAIN (type
))
2918 return error_mark_node
;
2920 index_type
= TYPE_DOMAIN (type
);
2921 min
= TYPE_MIN_VALUE (index_type
);
2922 max
= TYPE_MAX_VALUE (index_type
);
2924 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2926 return error_mark_node
;
2928 return (integer_zerop (min
)
2930 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
2933 /* If arg is static -- a reference to an object in static storage -- then
2934 return the object. This is not the same as the C meaning of `static'.
2935 If arg isn't static, return NULL. */
2940 switch (TREE_CODE (arg
))
2943 /* Nested functions are static, even though taking their address will
2944 involve a trampoline as we unnest the nested function and create
2945 the trampoline on the tree level. */
2949 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2950 && ! DECL_THREAD_LOCAL_P (arg
)
2951 && ! DECL_DLLIMPORT_P (arg
)
2955 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2959 return TREE_STATIC (arg
) ? arg
: NULL
;
2966 /* If the thing being referenced is not a field, then it is
2967 something language specific. */
2968 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
2970 /* If we are referencing a bitfield, we can't evaluate an
2971 ADDR_EXPR at compile time and so it isn't a constant. */
2972 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
2975 return staticp (TREE_OPERAND (arg
, 0));
2981 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
2984 case ARRAY_RANGE_REF
:
2985 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
2986 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
2987 return staticp (TREE_OPERAND (arg
, 0));
2991 case COMPOUND_LITERAL_EXPR
:
2992 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
3002 /* Return whether OP is a DECL whose address is function-invariant. */
3005 decl_address_invariant_p (const_tree op
)
3007 /* The conditions below are slightly less strict than the one in
3010 switch (TREE_CODE (op
))
3019 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3020 || DECL_THREAD_LOCAL_P (op
)
3021 || DECL_CONTEXT (op
) == current_function_decl
3022 || decl_function_context (op
) == current_function_decl
)
3027 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3028 || decl_function_context (op
) == current_function_decl
)
3039 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3042 decl_address_ip_invariant_p (const_tree op
)
3044 /* The conditions below are slightly less strict than the one in
3047 switch (TREE_CODE (op
))
3055 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3056 && !DECL_DLLIMPORT_P (op
))
3057 || DECL_THREAD_LOCAL_P (op
))
3062 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3074 /* Return true if T is function-invariant (internal function, does
3075 not handle arithmetic; that's handled in skip_simple_arithmetic and
3076 tree_invariant_p). */
3078 static bool tree_invariant_p (tree t
);
3081 tree_invariant_p_1 (tree t
)
3085 if (TREE_CONSTANT (t
)
3086 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3089 switch (TREE_CODE (t
))
3095 op
= TREE_OPERAND (t
, 0);
3096 while (handled_component_p (op
))
3098 switch (TREE_CODE (op
))
3101 case ARRAY_RANGE_REF
:
3102 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3103 || TREE_OPERAND (op
, 2) != NULL_TREE
3104 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3109 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3115 op
= TREE_OPERAND (op
, 0);
3118 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3127 /* Return true if T is function-invariant. */
3130 tree_invariant_p (tree t
)
3132 tree inner
= skip_simple_arithmetic (t
);
3133 return tree_invariant_p_1 (inner
);
3136 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3137 Do this to any expression which may be used in more than one place,
3138 but must be evaluated only once.
3140 Normally, expand_expr would reevaluate the expression each time.
3141 Calling save_expr produces something that is evaluated and recorded
3142 the first time expand_expr is called on it. Subsequent calls to
3143 expand_expr just reuse the recorded value.
3145 The call to expand_expr that generates code that actually computes
3146 the value is the first call *at compile time*. Subsequent calls
3147 *at compile time* generate code to use the saved value.
3148 This produces correct result provided that *at run time* control
3149 always flows through the insns made by the first expand_expr
3150 before reaching the other places where the save_expr was evaluated.
3151 You, the caller of save_expr, must make sure this is so.
3153 Constants, and certain read-only nodes, are returned with no
3154 SAVE_EXPR because that is safe. Expressions containing placeholders
3155 are not touched; see tree.def for an explanation of what these
3159 save_expr (tree expr
)
3161 tree t
= fold (expr
);
3164 /* If the tree evaluates to a constant, then we don't want to hide that
3165 fact (i.e. this allows further folding, and direct checks for constants).
3166 However, a read-only object that has side effects cannot be bypassed.
3167 Since it is no problem to reevaluate literals, we just return the
3169 inner
= skip_simple_arithmetic (t
);
3170 if (TREE_CODE (inner
) == ERROR_MARK
)
3173 if (tree_invariant_p_1 (inner
))
3176 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3177 it means that the size or offset of some field of an object depends on
3178 the value within another field.
3180 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3181 and some variable since it would then need to be both evaluated once and
3182 evaluated more than once. Front-ends must assure this case cannot
3183 happen by surrounding any such subexpressions in their own SAVE_EXPR
3184 and forcing evaluation at the proper time. */
3185 if (contains_placeholder_p (inner
))
3188 t
= build1 (SAVE_EXPR
, TREE_TYPE (expr
), t
);
3189 SET_EXPR_LOCATION (t
, EXPR_LOCATION (expr
));
3191 /* This expression might be placed ahead of a jump to ensure that the
3192 value was computed on both sides of the jump. So make sure it isn't
3193 eliminated as dead. */
3194 TREE_SIDE_EFFECTS (t
) = 1;
3198 /* Look inside EXPR into any simple arithmetic operations. Return the
3199 outermost non-arithmetic or non-invariant node. */
3202 skip_simple_arithmetic (tree expr
)
3204 /* We don't care about whether this can be used as an lvalue in this
3206 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3207 expr
= TREE_OPERAND (expr
, 0);
3209 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3210 a constant, it will be more efficient to not make another SAVE_EXPR since
3211 it will allow better simplification and GCSE will be able to merge the
3212 computations if they actually occur. */
3215 if (UNARY_CLASS_P (expr
))
3216 expr
= TREE_OPERAND (expr
, 0);
3217 else if (BINARY_CLASS_P (expr
))
3219 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3220 expr
= TREE_OPERAND (expr
, 0);
3221 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3222 expr
= TREE_OPERAND (expr
, 1);
3233 /* Look inside EXPR into simple arithmetic operations involving constants.
3234 Return the outermost non-arithmetic or non-constant node. */
3237 skip_simple_constant_arithmetic (tree expr
)
3239 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3240 expr
= TREE_OPERAND (expr
, 0);
3244 if (UNARY_CLASS_P (expr
))
3245 expr
= TREE_OPERAND (expr
, 0);
3246 else if (BINARY_CLASS_P (expr
))
3248 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3249 expr
= TREE_OPERAND (expr
, 0);
3250 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3251 expr
= TREE_OPERAND (expr
, 1);
3262 /* Return which tree structure is used by T. */
3264 enum tree_node_structure_enum
3265 tree_node_structure (const_tree t
)
3267 const enum tree_code code
= TREE_CODE (t
);
3268 return tree_node_structure_for_code (code
);
3271 /* Set various status flags when building a CALL_EXPR object T. */
3274 process_call_operands (tree t
)
3276 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3277 bool read_only
= false;
3278 int i
= call_expr_flags (t
);
3280 /* Calls have side-effects, except those to const or pure functions. */
3281 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3282 side_effects
= true;
3283 /* Propagate TREE_READONLY of arguments for const functions. */
3287 if (!side_effects
|| read_only
)
3288 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3290 tree op
= TREE_OPERAND (t
, i
);
3291 if (op
&& TREE_SIDE_EFFECTS (op
))
3292 side_effects
= true;
3293 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3297 TREE_SIDE_EFFECTS (t
) = side_effects
;
3298 TREE_READONLY (t
) = read_only
;
3301 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3302 size or offset that depends on a field within a record. */
3305 contains_placeholder_p (const_tree exp
)
3307 enum tree_code code
;
3312 code
= TREE_CODE (exp
);
3313 if (code
== PLACEHOLDER_EXPR
)
3316 switch (TREE_CODE_CLASS (code
))
3319 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3320 position computations since they will be converted into a
3321 WITH_RECORD_EXPR involving the reference, which will assume
3322 here will be valid. */
3323 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3325 case tcc_exceptional
:
3326 if (code
== TREE_LIST
)
3327 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3328 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3333 case tcc_comparison
:
3334 case tcc_expression
:
3338 /* Ignoring the first operand isn't quite right, but works best. */
3339 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3342 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3343 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3344 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3347 /* The save_expr function never wraps anything containing
3348 a PLACEHOLDER_EXPR. */
3355 switch (TREE_CODE_LENGTH (code
))
3358 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3360 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3361 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3372 const_call_expr_arg_iterator iter
;
3373 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3374 if (CONTAINS_PLACEHOLDER_P (arg
))
3388 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3389 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3393 type_contains_placeholder_1 (const_tree type
)
3395 /* If the size contains a placeholder or the parent type (component type in
3396 the case of arrays) type involves a placeholder, this type does. */
3397 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3398 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3399 || (!POINTER_TYPE_P (type
)
3401 && type_contains_placeholder_p (TREE_TYPE (type
))))
3404 /* Now do type-specific checks. Note that the last part of the check above
3405 greatly limits what we have to do below. */
3406 switch (TREE_CODE (type
))
3414 case REFERENCE_TYPE
:
3423 case FIXED_POINT_TYPE
:
3424 /* Here we just check the bounds. */
3425 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3426 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3429 /* We have already checked the component type above, so just check the
3431 return type_contains_placeholder_p (TYPE_DOMAIN (type
));
3435 case QUAL_UNION_TYPE
:
3439 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3440 if (TREE_CODE (field
) == FIELD_DECL
3441 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3442 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3443 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3444 || type_contains_placeholder_p (TREE_TYPE (field
))))
3455 /* Wrapper around above function used to cache its result. */
3458 type_contains_placeholder_p (tree type
)
3462 /* If the contains_placeholder_bits field has been initialized,
3463 then we know the answer. */
3464 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3465 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3467 /* Indicate that we've seen this type node, and the answer is false.
3468 This is what we want to return if we run into recursion via fields. */
3469 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3471 /* Compute the real value. */
3472 result
= type_contains_placeholder_1 (type
);
3474 /* Store the real value. */
3475 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3480 /* Push tree EXP onto vector QUEUE if it is not already present. */
3483 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3488 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3489 if (simple_cst_equal (iter
, exp
) == 1)
3493 queue
->safe_push (exp
);
3496 /* Given a tree EXP, find all occurrences of references to fields
3497 in a PLACEHOLDER_EXPR and place them in vector REFS without
3498 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3499 we assume here that EXP contains only arithmetic expressions
3500 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3504 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3506 enum tree_code code
= TREE_CODE (exp
);
3510 /* We handle TREE_LIST and COMPONENT_REF separately. */
3511 if (code
== TREE_LIST
)
3513 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3514 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3516 else if (code
== COMPONENT_REF
)
3518 for (inner
= TREE_OPERAND (exp
, 0);
3519 REFERENCE_CLASS_P (inner
);
3520 inner
= TREE_OPERAND (inner
, 0))
3523 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3524 push_without_duplicates (exp
, refs
);
3526 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3529 switch (TREE_CODE_CLASS (code
))
3534 case tcc_declaration
:
3535 /* Variables allocated to static storage can stay. */
3536 if (!TREE_STATIC (exp
))
3537 push_without_duplicates (exp
, refs
);
3540 case tcc_expression
:
3541 /* This is the pattern built in ada/make_aligning_type. */
3542 if (code
== ADDR_EXPR
3543 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
3545 push_without_duplicates (exp
, refs
);
3549 /* Fall through... */
3551 case tcc_exceptional
:
3554 case tcc_comparison
:
3556 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
3557 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3561 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3562 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3570 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3571 return a tree with all occurrences of references to F in a
3572 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3573 CONST_DECLs. Note that we assume here that EXP contains only
3574 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3575 occurring only in their argument list. */
3578 substitute_in_expr (tree exp
, tree f
, tree r
)
3580 enum tree_code code
= TREE_CODE (exp
);
3581 tree op0
, op1
, op2
, op3
;
3584 /* We handle TREE_LIST and COMPONENT_REF separately. */
3585 if (code
== TREE_LIST
)
3587 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
3588 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
3589 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3592 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3594 else if (code
== COMPONENT_REF
)
3598 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3599 and it is the right field, replace it with R. */
3600 for (inner
= TREE_OPERAND (exp
, 0);
3601 REFERENCE_CLASS_P (inner
);
3602 inner
= TREE_OPERAND (inner
, 0))
3606 op1
= TREE_OPERAND (exp
, 1);
3608 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
3611 /* If this expression hasn't been completed let, leave it alone. */
3612 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
3615 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3616 if (op0
== TREE_OPERAND (exp
, 0))
3620 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
3623 switch (TREE_CODE_CLASS (code
))
3628 case tcc_declaration
:
3634 case tcc_expression
:
3638 /* Fall through... */
3640 case tcc_exceptional
:
3643 case tcc_comparison
:
3645 switch (TREE_CODE_LENGTH (code
))
3651 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3652 if (op0
== TREE_OPERAND (exp
, 0))
3655 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3659 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3660 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3662 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3665 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3669 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3670 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3671 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3673 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3674 && op2
== TREE_OPERAND (exp
, 2))
3677 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3681 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3682 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3683 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3684 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
3686 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3687 && op2
== TREE_OPERAND (exp
, 2)
3688 && op3
== TREE_OPERAND (exp
, 3))
3692 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3704 new_tree
= NULL_TREE
;
3706 /* If we are trying to replace F with a constant, inline back
3707 functions which do nothing else than computing a value from
3708 the arguments they are passed. This makes it possible to
3709 fold partially or entirely the replacement expression. */
3710 if (CONSTANT_CLASS_P (r
) && code
== CALL_EXPR
)
3712 tree t
= maybe_inline_call_in_expr (exp
);
3714 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
3717 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3719 tree op
= TREE_OPERAND (exp
, i
);
3720 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
3724 new_tree
= copy_node (exp
);
3725 TREE_OPERAND (new_tree
, i
) = new_op
;
3731 new_tree
= fold (new_tree
);
3732 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3733 process_call_operands (new_tree
);
3744 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3746 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3747 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3752 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3753 for it within OBJ, a tree that is an object or a chain of references. */
3756 substitute_placeholder_in_expr (tree exp
, tree obj
)
3758 enum tree_code code
= TREE_CODE (exp
);
3759 tree op0
, op1
, op2
, op3
;
3762 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3763 in the chain of OBJ. */
3764 if (code
== PLACEHOLDER_EXPR
)
3766 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
3769 for (elt
= obj
; elt
!= 0;
3770 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3771 || TREE_CODE (elt
) == COND_EXPR
)
3772 ? TREE_OPERAND (elt
, 1)
3773 : (REFERENCE_CLASS_P (elt
)
3774 || UNARY_CLASS_P (elt
)
3775 || BINARY_CLASS_P (elt
)
3776 || VL_EXP_CLASS_P (elt
)
3777 || EXPRESSION_CLASS_P (elt
))
3778 ? TREE_OPERAND (elt
, 0) : 0))
3779 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
3782 for (elt
= obj
; elt
!= 0;
3783 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3784 || TREE_CODE (elt
) == COND_EXPR
)
3785 ? TREE_OPERAND (elt
, 1)
3786 : (REFERENCE_CLASS_P (elt
)
3787 || UNARY_CLASS_P (elt
)
3788 || BINARY_CLASS_P (elt
)
3789 || VL_EXP_CLASS_P (elt
)
3790 || EXPRESSION_CLASS_P (elt
))
3791 ? TREE_OPERAND (elt
, 0) : 0))
3792 if (POINTER_TYPE_P (TREE_TYPE (elt
))
3793 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
3795 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
3797 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3798 survives until RTL generation, there will be an error. */
3802 /* TREE_LIST is special because we need to look at TREE_VALUE
3803 and TREE_CHAIN, not TREE_OPERANDS. */
3804 else if (code
== TREE_LIST
)
3806 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
3807 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
3808 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3811 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3814 switch (TREE_CODE_CLASS (code
))
3817 case tcc_declaration
:
3820 case tcc_exceptional
:
3823 case tcc_comparison
:
3824 case tcc_expression
:
3827 switch (TREE_CODE_LENGTH (code
))
3833 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3834 if (op0
== TREE_OPERAND (exp
, 0))
3837 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3841 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3842 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3844 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3847 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3851 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3852 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3853 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3855 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3856 && op2
== TREE_OPERAND (exp
, 2))
3859 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3863 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3864 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3865 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3866 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
3868 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3869 && op2
== TREE_OPERAND (exp
, 2)
3870 && op3
== TREE_OPERAND (exp
, 3))
3874 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3886 new_tree
= NULL_TREE
;
3888 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3890 tree op
= TREE_OPERAND (exp
, i
);
3891 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
3895 new_tree
= copy_node (exp
);
3896 TREE_OPERAND (new_tree
, i
) = new_op
;
3902 new_tree
= fold (new_tree
);
3903 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3904 process_call_operands (new_tree
);
3915 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3917 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3918 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3924 /* Subroutine of stabilize_reference; this is called for subtrees of
3925 references. Any expression with side-effects must be put in a SAVE_EXPR
3926 to ensure that it is only evaluated once.
3928 We don't put SAVE_EXPR nodes around everything, because assigning very
3929 simple expressions to temporaries causes us to miss good opportunities
3930 for optimizations. Among other things, the opportunity to fold in the
3931 addition of a constant into an addressing mode often gets lost, e.g.
3932 "y[i+1] += x;". In general, we take the approach that we should not make
3933 an assignment unless we are forced into it - i.e., that any non-side effect
3934 operator should be allowed, and that cse should take care of coalescing
3935 multiple utterances of the same expression should that prove fruitful. */
3938 stabilize_reference_1 (tree e
)
3941 enum tree_code code
= TREE_CODE (e
);
3943 /* We cannot ignore const expressions because it might be a reference
3944 to a const array but whose index contains side-effects. But we can
3945 ignore things that are actual constant or that already have been
3946 handled by this function. */
3948 if (tree_invariant_p (e
))
3951 switch (TREE_CODE_CLASS (code
))
3953 case tcc_exceptional
:
3955 case tcc_declaration
:
3956 case tcc_comparison
:
3958 case tcc_expression
:
3961 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3962 so that it will only be evaluated once. */
3963 /* The reference (r) and comparison (<) classes could be handled as
3964 below, but it is generally faster to only evaluate them once. */
3965 if (TREE_SIDE_EFFECTS (e
))
3966 return save_expr (e
);
3970 /* Constants need no processing. In fact, we should never reach
3975 /* Division is slow and tends to be compiled with jumps,
3976 especially the division by powers of 2 that is often
3977 found inside of an array reference. So do it just once. */
3978 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
3979 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
3980 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
3981 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
3982 return save_expr (e
);
3983 /* Recursively stabilize each operand. */
3984 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
3985 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
3989 /* Recursively stabilize each operand. */
3990 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
3997 TREE_TYPE (result
) = TREE_TYPE (e
);
3998 TREE_READONLY (result
) = TREE_READONLY (e
);
3999 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
4000 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
4001 TREE_SHARED (result
) = TREE_SHARED (e
);
4002 TREE_STRICT (result
) = TREE_STRICT (e
);
4003 TREE_RELAXED (result
) = TREE_RELAXED (e
);
4008 /* Stabilize a reference so that we can use it any number of times
4009 without causing its operands to be evaluated more than once.
4010 Returns the stabilized reference. This works by means of save_expr,
4011 so see the caveats in the comments about save_expr.
4013 Also allows conversion expressions whose operands are references.
4014 Any other kind of expression is returned unchanged. */
4017 stabilize_reference (tree ref
)
4020 enum tree_code code
= TREE_CODE (ref
);
4027 /* No action is needed in this case. */
4032 case FIX_TRUNC_EXPR
:
4033 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4037 result
= build_nt (INDIRECT_REF
,
4038 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4042 result
= build_nt (COMPONENT_REF
,
4043 stabilize_reference (TREE_OPERAND (ref
, 0)),
4044 TREE_OPERAND (ref
, 1), NULL_TREE
);
4048 result
= build_nt (BIT_FIELD_REF
,
4049 stabilize_reference (TREE_OPERAND (ref
, 0)),
4050 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4054 result
= build_nt (ARRAY_REF
,
4055 stabilize_reference (TREE_OPERAND (ref
, 0)),
4056 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4057 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4060 case ARRAY_RANGE_REF
:
4061 result
= build_nt (ARRAY_RANGE_REF
,
4062 stabilize_reference (TREE_OPERAND (ref
, 0)),
4063 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4064 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4068 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4069 it wouldn't be ignored. This matters when dealing with
4071 return stabilize_reference_1 (ref
);
4073 /* If arg isn't a kind of lvalue we recognize, make no change.
4074 Caller should recognize the error for an invalid lvalue. */
4079 return error_mark_node
;
4082 TREE_TYPE (result
) = TREE_TYPE (ref
);
4083 TREE_READONLY (result
) = TREE_READONLY (ref
);
4084 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4085 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4086 TREE_SHARED (result
) = TREE_SHARED (ref
);
4087 TREE_STRICT (result
) = TREE_STRICT (ref
);
4088 TREE_RELAXED (result
) = TREE_RELAXED (ref
);
4094 /* Low-level constructors for expressions. */
4096 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4097 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4100 recompute_tree_invariant_for_addr_expr (tree t
)
4103 bool tc
= true, se
= false;
4105 /* We started out assuming this address is both invariant and constant, but
4106 does not have side effects. Now go down any handled components and see if
4107 any of them involve offsets that are either non-constant or non-invariant.
4108 Also check for side-effects.
4110 ??? Note that this code makes no attempt to deal with the case where
4111 taking the address of something causes a copy due to misalignment. */
4113 #define UPDATE_FLAGS(NODE) \
4114 do { tree _node = (NODE); \
4115 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4116 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4118 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4119 node
= TREE_OPERAND (node
, 0))
4121 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4122 array reference (probably made temporarily by the G++ front end),
4123 so ignore all the operands. */
4124 if ((TREE_CODE (node
) == ARRAY_REF
4125 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4126 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4128 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4129 if (TREE_OPERAND (node
, 2))
4130 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4131 if (TREE_OPERAND (node
, 3))
4132 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4134 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4135 FIELD_DECL, apparently. The G++ front end can put something else
4136 there, at least temporarily. */
4137 else if (TREE_CODE (node
) == COMPONENT_REF
4138 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4140 if (TREE_OPERAND (node
, 2))
4141 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4145 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4147 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4148 the address, since &(*a)->b is a form of addition. If it's a constant, the
4149 address is constant too. If it's a decl, its address is constant if the
4150 decl is static. Everything else is not constant and, furthermore,
4151 taking the address of a volatile variable is not volatile. */
4152 if (TREE_CODE (node
) == INDIRECT_REF
4153 || TREE_CODE (node
) == MEM_REF
)
4154 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4155 else if (CONSTANT_CLASS_P (node
))
4157 else if (DECL_P (node
))
4158 tc
&= (staticp (node
) != NULL_TREE
);
4162 se
|= TREE_SIDE_EFFECTS (node
);
4166 TREE_CONSTANT (t
) = tc
;
4167 TREE_SIDE_EFFECTS (t
) = se
;
4171 /* Build an expression of code CODE, data type TYPE, and operands as
4172 specified. Expressions and reference nodes can be created this way.
4173 Constants, decls, types and misc nodes cannot be.
4175 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4176 enough for all extant tree codes. */
4179 build0_stat (enum tree_code code
, tree tt MEM_STAT_DECL
)
4183 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4185 t
= make_node_stat (code PASS_MEM_STAT
);
4192 build1_stat (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4194 int length
= sizeof (struct tree_exp
);
4197 record_node_allocation_statistics (code
, length
);
4199 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4201 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4203 memset (t
, 0, sizeof (struct tree_common
));
4205 TREE_SET_CODE (t
, code
);
4207 TREE_TYPE (t
) = type
;
4208 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4209 TREE_OPERAND (t
, 0) = node
;
4210 if (node
&& !TYPE_P (node
))
4212 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4213 TREE_READONLY (t
) = TREE_READONLY (node
);
4216 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4217 TREE_SIDE_EFFECTS (t
) = 1;
4221 /* All of these have side-effects, no matter what their
4223 TREE_SIDE_EFFECTS (t
) = 1;
4224 TREE_READONLY (t
) = 0;
4228 /* Whether a dereference is readonly has nothing to do with whether
4229 its operand is readonly. */
4230 TREE_READONLY (t
) = 0;
4231 TREE_SHARED (t
) = upc_shared_type_p (type
);
4236 recompute_tree_invariant_for_addr_expr (t
);
4240 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4241 && node
&& !TYPE_P (node
)
4242 && TREE_CONSTANT (node
))
4243 TREE_CONSTANT (t
) = 1;
4244 if (TREE_CODE_CLASS (code
) == tcc_reference
4245 && node
&& TREE_THIS_VOLATILE (node
))
4246 TREE_THIS_VOLATILE (t
) = 1;
4247 /* Drop the UPC "shared" type qualifier for
4248 expressions involving UPC shared objects. */
4249 if (TREE_CODE_CLASS (code
) == tcc_unary
4250 && node
&& !TYPE_P (node
)
4251 && upc_shared_type_p (type
))
4252 TREE_TYPE (t
) = build_upc_unshared_type (type
);
4259 #define PROCESS_ARG(N) \
4261 TREE_OPERAND (t, N) = arg##N; \
4262 if (arg##N &&!TYPE_P (arg##N)) \
4264 if (TREE_SIDE_EFFECTS (arg##N)) \
4266 if (!TREE_READONLY (arg##N) \
4267 && !CONSTANT_CLASS_P (arg##N)) \
4268 (void) (read_only = 0); \
4269 if (!TREE_CONSTANT (arg##N)) \
4270 (void) (constant = 0); \
4275 build2_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4277 bool constant
, read_only
, side_effects
;
4280 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4282 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4283 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4284 /* When sizetype precision doesn't match that of pointers
4285 we need to be able to build explicit extensions or truncations
4286 of the offset argument. */
4287 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4288 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4289 && TREE_CODE (arg1
) == INTEGER_CST
);
4291 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4292 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4293 && ptrofftype_p (TREE_TYPE (arg1
)));
4295 t
= make_node_stat (code PASS_MEM_STAT
);
4297 /* Remove UPC shared type qualifiers from the result type. */
4298 if (upc_shared_type_p (tt
))
4299 tt
= build_upc_unshared_type (tt
);
4302 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4303 result based on those same flags for the arguments. But if the
4304 arguments aren't really even `tree' expressions, we shouldn't be trying
4307 /* Expressions without side effects may be constant if their
4308 arguments are as well. */
4309 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4310 || TREE_CODE_CLASS (code
) == tcc_binary
);
4312 side_effects
= TREE_SIDE_EFFECTS (t
);
4317 TREE_READONLY (t
) = read_only
;
4318 TREE_CONSTANT (t
) = constant
;
4319 TREE_SIDE_EFFECTS (t
) = side_effects
;
4320 TREE_THIS_VOLATILE (t
)
4321 = (TREE_CODE_CLASS (code
) == tcc_reference
4322 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4329 build3_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4330 tree arg2 MEM_STAT_DECL
)
4332 bool constant
, read_only
, side_effects
;
4335 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4336 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4338 t
= make_node_stat (code PASS_MEM_STAT
);
4343 /* As a special exception, if COND_EXPR has NULL branches, we
4344 assume that it is a gimple statement and always consider
4345 it to have side effects. */
4346 if (code
== COND_EXPR
4347 && tt
== void_type_node
4348 && arg1
== NULL_TREE
4349 && arg2
== NULL_TREE
)
4350 side_effects
= true;
4352 side_effects
= TREE_SIDE_EFFECTS (t
);
4358 if (code
== COND_EXPR
)
4359 TREE_READONLY (t
) = read_only
;
4361 TREE_SIDE_EFFECTS (t
) = side_effects
;
4362 TREE_THIS_VOLATILE (t
)
4363 = (TREE_CODE_CLASS (code
) == tcc_reference
4364 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4366 = (TREE_CODE_CLASS (code
) == tcc_reference
4367 && arg0
&& TREE_SHARED (arg0
));
4368 if (TREE_SHARED (t
))
4370 TREE_STRICT (t
) = TREE_STRICT (arg0
);
4371 TREE_RELAXED (t
) = TREE_RELAXED (arg0
);
4378 build4_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4379 tree arg2
, tree arg3 MEM_STAT_DECL
)
4381 bool constant
, read_only
, side_effects
;
4384 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4386 t
= make_node_stat (code PASS_MEM_STAT
);
4389 side_effects
= TREE_SIDE_EFFECTS (t
);
4396 TREE_SIDE_EFFECTS (t
) = side_effects
;
4397 TREE_THIS_VOLATILE (t
)
4398 = (TREE_CODE_CLASS (code
) == tcc_reference
4399 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4405 build5_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4406 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4408 bool constant
, read_only
, side_effects
;
4411 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4413 t
= make_node_stat (code PASS_MEM_STAT
);
4416 side_effects
= TREE_SIDE_EFFECTS (t
);
4424 TREE_SIDE_EFFECTS (t
) = side_effects
;
4425 TREE_THIS_VOLATILE (t
)
4426 = (TREE_CODE_CLASS (code
) == tcc_reference
4427 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4432 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4433 on the pointer PTR. */
4436 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4438 HOST_WIDE_INT offset
= 0;
4439 tree ptype
= TREE_TYPE (ptr
);
4441 /* For convenience allow addresses that collapse to a simple base
4443 if (TREE_CODE (ptr
) == ADDR_EXPR
4444 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4445 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4447 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4449 ptr
= build_fold_addr_expr (ptr
);
4450 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4452 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4453 ptr
, build_int_cst (ptype
, offset
));
4454 SET_EXPR_LOCATION (tem
, loc
);
4458 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4461 mem_ref_offset (const_tree t
)
4463 return offset_int::from (TREE_OPERAND (t
, 1), SIGNED
);
4466 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4467 offsetted by OFFSET units. */
4470 build_invariant_address (tree type
, tree base
, HOST_WIDE_INT offset
)
4472 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4473 build_fold_addr_expr (base
),
4474 build_int_cst (ptr_type_node
, offset
));
4475 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4476 recompute_tree_invariant_for_addr_expr (addr
);
4480 /* Similar except don't specify the TREE_TYPE
4481 and leave the TREE_SIDE_EFFECTS as 0.
4482 It is permissible for arguments to be null,
4483 or even garbage if their values do not matter. */
4486 build_nt (enum tree_code code
, ...)
4493 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4497 t
= make_node (code
);
4498 length
= TREE_CODE_LENGTH (code
);
4500 for (i
= 0; i
< length
; i
++)
4501 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
4507 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4511 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
4516 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
4517 CALL_EXPR_FN (ret
) = fn
;
4518 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
4519 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
4520 CALL_EXPR_ARG (ret
, ix
) = t
;
4524 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4525 We do NOT enter this node in any sort of symbol table.
4527 LOC is the location of the decl.
4529 layout_decl is used to set up the decl's storage layout.
4530 Other slots are initialized to 0 or null pointers. */
4533 build_decl_stat (location_t loc
, enum tree_code code
, tree name
,
4534 tree type MEM_STAT_DECL
)
4538 t
= make_node_stat (code PASS_MEM_STAT
);
4539 DECL_SOURCE_LOCATION (t
) = loc
;
4541 /* if (type == error_mark_node)
4542 type = integer_type_node; */
4543 /* That is not done, deliberately, so that having error_mark_node
4544 as the type can suppress useless errors in the use of this variable. */
4546 DECL_NAME (t
) = name
;
4547 TREE_TYPE (t
) = type
;
4549 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
4555 /* Builds and returns function declaration with NAME and TYPE. */
4558 build_fn_decl (const char *name
, tree type
)
4560 tree id
= get_identifier (name
);
4561 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
4563 DECL_EXTERNAL (decl
) = 1;
4564 TREE_PUBLIC (decl
) = 1;
4565 DECL_ARTIFICIAL (decl
) = 1;
4566 TREE_NOTHROW (decl
) = 1;
4571 vec
<tree
, va_gc
> *all_translation_units
;
4573 /* Builds a new translation-unit decl with name NAME, queues it in the
4574 global list of translation-unit decls and returns it. */
4577 build_translation_unit_decl (tree name
)
4579 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
4581 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
4582 vec_safe_push (all_translation_units
, tu
);
4587 /* BLOCK nodes are used to represent the structure of binding contours
4588 and declarations, once those contours have been exited and their contents
4589 compiled. This information is used for outputting debugging info. */
4592 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
4594 tree block
= make_node (BLOCK
);
4596 BLOCK_VARS (block
) = vars
;
4597 BLOCK_SUBBLOCKS (block
) = subblocks
;
4598 BLOCK_SUPERCONTEXT (block
) = supercontext
;
4599 BLOCK_CHAIN (block
) = chain
;
4604 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4606 LOC is the location to use in tree T. */
4609 protected_set_expr_location (tree t
, location_t loc
)
4611 if (t
&& CAN_HAVE_LOCATION_P (t
))
4612 SET_EXPR_LOCATION (t
, loc
);
4615 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4619 build_decl_attribute_variant (tree ddecl
, tree attribute
)
4621 DECL_ATTRIBUTES (ddecl
) = attribute
;
4625 /* Borrowed from hashtab.c iterative_hash implementation. */
4626 #define mix(a,b,c) \
4628 a -= b; a -= c; a ^= (c>>13); \
4629 b -= c; b -= a; b ^= (a<< 8); \
4630 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4631 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4632 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4633 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4634 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4635 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4636 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4640 /* Produce good hash value combining VAL and VAL2. */
4642 iterative_hash_hashval_t (hashval_t val
, hashval_t val2
)
4644 /* the golden ratio; an arbitrary value. */
4645 hashval_t a
= 0x9e3779b9;
4651 /* Produce good hash value combining VAL and VAL2. */
4653 iterative_hash_host_wide_int (HOST_WIDE_INT val
, hashval_t val2
)
4655 if (sizeof (HOST_WIDE_INT
) == sizeof (hashval_t
))
4656 return iterative_hash_hashval_t (val
, val2
);
4659 hashval_t a
= (hashval_t
) val
;
4660 /* Avoid warnings about shifting of more than the width of the type on
4661 hosts that won't execute this path. */
4663 hashval_t b
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 8 + zero
));
4665 if (sizeof (HOST_WIDE_INT
) > 2 * sizeof (hashval_t
))
4667 hashval_t a
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 16 + zero
));
4668 hashval_t b
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 24 + zero
));
4675 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4676 is ATTRIBUTE and its qualifiers are QUALS.
4678 Record such modified types already made so we don't make duplicates. */
4681 build_type_attribute_qual_variant (tree ttype
, tree attribute
, int quals
)
4683 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype
), attribute
))
4685 hashval_t hashcode
= 0;
4689 enum tree_code code
= TREE_CODE (ttype
);
4691 /* Building a distinct copy of a tagged type is inappropriate; it
4692 causes breakage in code that expects there to be a one-to-one
4693 relationship between a struct and its fields.
4694 build_duplicate_type is another solution (as used in
4695 handle_transparent_union_attribute), but that doesn't play well
4696 with the stronger C++ type identity model. */
4697 if (TREE_CODE (ttype
) == RECORD_TYPE
4698 || TREE_CODE (ttype
) == UNION_TYPE
4699 || TREE_CODE (ttype
) == QUAL_UNION_TYPE
4700 || TREE_CODE (ttype
) == ENUMERAL_TYPE
)
4702 warning (OPT_Wattributes
,
4703 "ignoring attributes applied to %qT after definition",
4704 TYPE_MAIN_VARIANT (ttype
));
4705 return build_qualified_type (ttype
, quals
);
4708 ttype
= build_qualified_type (ttype
, TYPE_UNQUALIFIED
);
4709 ntype
= build_distinct_type_copy (ttype
);
4711 TYPE_ATTRIBUTES (ntype
) = attribute
;
4713 hashcode
= iterative_hash_object (code
, hashcode
);
4714 if (TREE_TYPE (ntype
))
4715 hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype
)),
4717 hashcode
= attribute_hash_list (attribute
, hashcode
);
4719 switch (TREE_CODE (ntype
))
4722 hashcode
= type_hash_list (TYPE_ARG_TYPES (ntype
), hashcode
);
4725 if (TYPE_DOMAIN (ntype
))
4726 hashcode
= iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype
)),
4730 t
= TYPE_MAX_VALUE (ntype
);
4731 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
4732 hashcode
= iterative_hash_object (TREE_INT_CST_ELT (t
, i
), hashcode
);
4735 case FIXED_POINT_TYPE
:
4737 unsigned int precision
= TYPE_PRECISION (ntype
);
4738 hashcode
= iterative_hash_object (precision
, hashcode
);
4745 ntype
= type_hash_canon (hashcode
, ntype
);
4747 /* If the target-dependent attributes make NTYPE different from
4748 its canonical type, we will need to use structural equality
4749 checks for this type. */
4750 if (TYPE_STRUCTURAL_EQUALITY_P (ttype
)
4751 || !comp_type_attributes (ntype
, ttype
))
4752 SET_TYPE_STRUCTURAL_EQUALITY (ntype
);
4753 else if (TYPE_CANONICAL (ntype
) == ntype
)
4754 TYPE_CANONICAL (ntype
) = TYPE_CANONICAL (ttype
);
4756 ttype
= build_qualified_type (ntype
, quals
);
4758 else if (TYPE_QUALS (ttype
) != quals
)
4759 ttype
= build_qualified_type (ttype
, quals
);
4764 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4768 omp_declare_simd_clauses_equal (tree clauses1
, tree clauses2
)
4771 for (cl1
= clauses1
, cl2
= clauses2
;
4773 cl1
= OMP_CLAUSE_CHAIN (cl1
), cl2
= OMP_CLAUSE_CHAIN (cl2
))
4775 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_CODE (cl2
))
4777 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_SIMDLEN
)
4779 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1
),
4780 OMP_CLAUSE_DECL (cl2
)) != 1)
4783 switch (OMP_CLAUSE_CODE (cl1
))
4785 case OMP_CLAUSE_ALIGNED
:
4786 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1
),
4787 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2
)) != 1)
4790 case OMP_CLAUSE_LINEAR
:
4791 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1
),
4792 OMP_CLAUSE_LINEAR_STEP (cl2
)) != 1)
4795 case OMP_CLAUSE_SIMDLEN
:
4796 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1
),
4797 OMP_CLAUSE_SIMDLEN_EXPR (cl2
)) != 1)
4806 /* Compare two constructor-element-type constants. Return 1 if the lists
4807 are known to be equal; otherwise return 0. */
4810 simple_cst_list_equal (const_tree l1
, const_tree l2
)
4812 while (l1
!= NULL_TREE
&& l2
!= NULL_TREE
)
4814 if (simple_cst_equal (TREE_VALUE (l1
), TREE_VALUE (l2
)) != 1)
4817 l1
= TREE_CHAIN (l1
);
4818 l2
= TREE_CHAIN (l2
);
4824 /* Compare two attributes for their value identity. Return true if the
4825 attribute values are known to be equal; otherwise return false.
4829 attribute_value_equal (const_tree attr1
, const_tree attr2
)
4831 if (TREE_VALUE (attr1
) == TREE_VALUE (attr2
))
4834 if (TREE_VALUE (attr1
) != NULL_TREE
4835 && TREE_CODE (TREE_VALUE (attr1
)) == TREE_LIST
4836 && TREE_VALUE (attr2
) != NULL
4837 && TREE_CODE (TREE_VALUE (attr2
)) == TREE_LIST
)
4838 return (simple_cst_list_equal (TREE_VALUE (attr1
),
4839 TREE_VALUE (attr2
)) == 1);
4841 if ((flag_openmp
|| flag_openmp_simd
)
4842 && TREE_VALUE (attr1
) && TREE_VALUE (attr2
)
4843 && TREE_CODE (TREE_VALUE (attr1
)) == OMP_CLAUSE
4844 && TREE_CODE (TREE_VALUE (attr2
)) == OMP_CLAUSE
)
4845 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1
),
4846 TREE_VALUE (attr2
));
4848 return (simple_cst_equal (TREE_VALUE (attr1
), TREE_VALUE (attr2
)) == 1);
4851 /* Return 0 if the attributes for two types are incompatible, 1 if they
4852 are compatible, and 2 if they are nearly compatible (which causes a
4853 warning to be generated). */
4855 comp_type_attributes (const_tree type1
, const_tree type2
)
4857 const_tree a1
= TYPE_ATTRIBUTES (type1
);
4858 const_tree a2
= TYPE_ATTRIBUTES (type2
);
4863 for (a
= a1
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4865 const struct attribute_spec
*as
;
4868 as
= lookup_attribute_spec (get_attribute_name (a
));
4869 if (!as
|| as
->affects_type_identity
== false)
4872 attr
= lookup_attribute (as
->name
, CONST_CAST_TREE (a2
));
4873 if (!attr
|| !attribute_value_equal (a
, attr
))
4878 for (a
= a2
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4880 const struct attribute_spec
*as
;
4882 as
= lookup_attribute_spec (get_attribute_name (a
));
4883 if (!as
|| as
->affects_type_identity
== false)
4886 if (!lookup_attribute (as
->name
, CONST_CAST_TREE (a1
)))
4888 /* We don't need to compare trees again, as we did this
4889 already in first loop. */
4891 /* All types - affecting identity - are equal, so
4892 there is no need to call target hook for comparison. */
4896 /* As some type combinations - like default calling-convention - might
4897 be compatible, we have to call the target hook to get the final result. */
4898 return targetm
.comp_type_attributes (type1
, type2
);
4901 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4904 Record such modified types already made so we don't make duplicates. */
4907 build_type_attribute_variant (tree ttype
, tree attribute
)
4909 return build_type_attribute_qual_variant (ttype
, attribute
,
4910 TYPE_QUALS (ttype
));
4914 /* Reset the expression *EXPR_P, a size or position.
4916 ??? We could reset all non-constant sizes or positions. But it's cheap
4917 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4919 We need to reset self-referential sizes or positions because they cannot
4920 be gimplified and thus can contain a CALL_EXPR after the gimplification
4921 is finished, which will run afoul of LTO streaming. And they need to be
4922 reset to something essentially dummy but not constant, so as to preserve
4923 the properties of the object they are attached to. */
4926 free_lang_data_in_one_sizepos (tree
*expr_p
)
4928 tree expr
= *expr_p
;
4929 if (CONTAINS_PLACEHOLDER_P (expr
))
4930 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
4934 /* Reset all the fields in a binfo node BINFO. We only keep
4935 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4938 free_lang_data_in_binfo (tree binfo
)
4943 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
4945 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
4946 BINFO_BASE_ACCESSES (binfo
) = NULL
;
4947 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
4948 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
4950 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
4951 free_lang_data_in_binfo (t
);
4955 /* Reset all language specific information still present in TYPE. */
4958 free_lang_data_in_type (tree type
)
4960 gcc_assert (TYPE_P (type
));
4962 /* Give the FE a chance to remove its own data first. */
4963 lang_hooks
.free_lang_data (type
);
4965 TREE_LANG_FLAG_0 (type
) = 0;
4966 TREE_LANG_FLAG_1 (type
) = 0;
4967 TREE_LANG_FLAG_2 (type
) = 0;
4968 TREE_LANG_FLAG_3 (type
) = 0;
4969 TREE_LANG_FLAG_4 (type
) = 0;
4970 TREE_LANG_FLAG_5 (type
) = 0;
4971 TREE_LANG_FLAG_6 (type
) = 0;
4973 if (TREE_CODE (type
) == FUNCTION_TYPE
)
4975 /* Remove the const and volatile qualifiers from arguments. The
4976 C++ front end removes them, but the C front end does not,
4977 leading to false ODR violation errors when merging two
4978 instances of the same function signature compiled by
4979 different front ends. */
4982 for (p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
4984 tree arg_type
= TREE_VALUE (p
);
4986 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
4988 int quals
= TYPE_QUALS (arg_type
)
4990 & ~TYPE_QUAL_VOLATILE
;
4991 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
4992 free_lang_data_in_type (TREE_VALUE (p
));
4997 /* Remove members that are not actually FIELD_DECLs from the field
4998 list of an aggregate. These occur in C++. */
4999 if (RECORD_OR_UNION_TYPE_P (type
))
5003 /* Note that TYPE_FIELDS can be shared across distinct
5004 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5005 to be removed, we cannot set its TREE_CHAIN to NULL.
5006 Otherwise, we would not be able to find all the other fields
5007 in the other instances of this TREE_TYPE.
5009 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5011 member
= TYPE_FIELDS (type
);
5014 if (TREE_CODE (member
) == FIELD_DECL
5015 || TREE_CODE (member
) == TYPE_DECL
)
5018 TREE_CHAIN (prev
) = member
;
5020 TYPE_FIELDS (type
) = member
;
5024 member
= TREE_CHAIN (member
);
5028 TREE_CHAIN (prev
) = NULL_TREE
;
5030 TYPE_FIELDS (type
) = NULL_TREE
;
5032 TYPE_METHODS (type
) = NULL_TREE
;
5033 if (TYPE_BINFO (type
))
5034 free_lang_data_in_binfo (TYPE_BINFO (type
));
5038 /* For non-aggregate types, clear out the language slot (which
5039 overloads TYPE_BINFO). */
5040 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
5042 if (INTEGRAL_TYPE_P (type
)
5043 || SCALAR_FLOAT_TYPE_P (type
)
5044 || FIXED_POINT_TYPE_P (type
))
5046 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
5047 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
5051 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
5052 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
5054 if (TYPE_CONTEXT (type
)
5055 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
5057 tree ctx
= TYPE_CONTEXT (type
);
5060 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5062 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
5063 TYPE_CONTEXT (type
) = ctx
;
5068 /* Return true if DECL may need an assembler name to be set. */
5071 need_assembler_name_p (tree decl
)
5073 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5074 if (TREE_CODE (decl
) != FUNCTION_DECL
5075 && TREE_CODE (decl
) != VAR_DECL
)
5078 /* If DECL already has its assembler name set, it does not need a
5080 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
5081 || DECL_ASSEMBLER_NAME_SET_P (decl
))
5084 /* Abstract decls do not need an assembler name. */
5085 if (DECL_ABSTRACT (decl
))
5088 /* For VAR_DECLs, only static, public and external symbols need an
5090 if (TREE_CODE (decl
) == VAR_DECL
5091 && !TREE_STATIC (decl
)
5092 && !TREE_PUBLIC (decl
)
5093 && !DECL_EXTERNAL (decl
))
5096 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5098 /* Do not set assembler name on builtins. Allow RTL expansion to
5099 decide whether to expand inline or via a regular call. */
5100 if (DECL_BUILT_IN (decl
)
5101 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
5104 /* Functions represented in the callgraph need an assembler name. */
5105 if (cgraph_get_node (decl
) != NULL
)
5108 /* Unused and not public functions don't need an assembler name. */
5109 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
5117 /* Reset all language specific information still present in symbol
5121 free_lang_data_in_decl (tree decl
)
5123 gcc_assert (DECL_P (decl
));
5125 /* Give the FE a chance to remove its own data first. */
5126 lang_hooks
.free_lang_data (decl
);
5128 TREE_LANG_FLAG_0 (decl
) = 0;
5129 TREE_LANG_FLAG_1 (decl
) = 0;
5130 TREE_LANG_FLAG_2 (decl
) = 0;
5131 TREE_LANG_FLAG_3 (decl
) = 0;
5132 TREE_LANG_FLAG_4 (decl
) = 0;
5133 TREE_LANG_FLAG_5 (decl
) = 0;
5134 TREE_LANG_FLAG_6 (decl
) = 0;
5136 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5137 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5138 if (TREE_CODE (decl
) == FIELD_DECL
)
5140 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5141 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5142 DECL_QUALIFIER (decl
) = NULL_TREE
;
5145 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5147 struct cgraph_node
*node
;
5148 if (!(node
= cgraph_get_node (decl
))
5149 || (!node
->definition
&& !node
->clones
))
5152 cgraph_release_function_body (node
);
5155 release_function_body (decl
);
5156 DECL_ARGUMENTS (decl
) = NULL
;
5157 DECL_RESULT (decl
) = NULL
;
5158 DECL_INITIAL (decl
) = error_mark_node
;
5161 if (gimple_has_body_p (decl
))
5165 /* If DECL has a gimple body, then the context for its
5166 arguments must be DECL. Otherwise, it doesn't really
5167 matter, as we will not be emitting any code for DECL. In
5168 general, there may be other instances of DECL created by
5169 the front end and since PARM_DECLs are generally shared,
5170 their DECL_CONTEXT changes as the replicas of DECL are
5171 created. The only time where DECL_CONTEXT is important
5172 is for the FUNCTION_DECLs that have a gimple body (since
5173 the PARM_DECL will be used in the function's body). */
5174 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5175 DECL_CONTEXT (t
) = decl
;
5178 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5179 At this point, it is not needed anymore. */
5180 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5182 /* Clear the abstract origin if it refers to a method. Otherwise
5183 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5184 origin will not be output correctly. */
5185 if (DECL_ABSTRACT_ORIGIN (decl
)
5186 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5187 && RECORD_OR_UNION_TYPE_P
5188 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5189 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5191 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5192 DECL_VINDEX referring to itself into a vtable slot number as it
5193 should. Happens with functions that are copied and then forgotten
5194 about. Just clear it, it won't matter anymore. */
5195 if (DECL_VINDEX (decl
) && !tree_fits_shwi_p (DECL_VINDEX (decl
)))
5196 DECL_VINDEX (decl
) = NULL_TREE
;
5198 else if (TREE_CODE (decl
) == VAR_DECL
)
5200 if ((DECL_EXTERNAL (decl
)
5201 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5202 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5203 DECL_INITIAL (decl
) = NULL_TREE
;
5205 else if (TREE_CODE (decl
) == TYPE_DECL
5206 || TREE_CODE (decl
) == FIELD_DECL
)
5207 DECL_INITIAL (decl
) = NULL_TREE
;
5208 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5209 && DECL_INITIAL (decl
)
5210 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5212 /* Strip builtins from the translation-unit BLOCK. We still have targets
5213 without builtin_decl_explicit support and also builtins are shared
5214 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5215 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5219 if (TREE_CODE (var
) == FUNCTION_DECL
5220 && DECL_BUILT_IN (var
))
5221 *nextp
= TREE_CHAIN (var
);
5223 nextp
= &TREE_CHAIN (var
);
5229 /* Data used when collecting DECLs and TYPEs for language data removal. */
5231 struct free_lang_data_d
5233 /* Worklist to avoid excessive recursion. */
5236 /* Set of traversed objects. Used to avoid duplicate visits. */
5237 struct pointer_set_t
*pset
;
5239 /* Array of symbols to process with free_lang_data_in_decl. */
5242 /* Array of types to process with free_lang_data_in_type. */
5247 /* Save all language fields needed to generate proper debug information
5248 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5251 save_debug_info_for_decl (tree t
)
5253 /*struct saved_debug_info_d *sdi;*/
5255 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& DECL_P (t
));
5257 /* FIXME. Partial implementation for saving debug info removed. */
5261 /* Save all language fields needed to generate proper debug information
5262 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5265 save_debug_info_for_type (tree t
)
5267 /*struct saved_debug_info_d *sdi;*/
5269 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& TYPE_P (t
));
5271 /* FIXME. Partial implementation for saving debug info removed. */
5275 /* Add type or decl T to one of the list of tree nodes that need their
5276 language data removed. The lists are held inside FLD. */
5279 add_tree_to_fld_list (tree t
, struct free_lang_data_d
*fld
)
5283 fld
->decls
.safe_push (t
);
5284 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5285 save_debug_info_for_decl (t
);
5287 else if (TYPE_P (t
))
5289 fld
->types
.safe_push (t
);
5290 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5291 save_debug_info_for_type (t
);
5297 /* Push tree node T into FLD->WORKLIST. */
5300 fld_worklist_push (tree t
, struct free_lang_data_d
*fld
)
5302 if (t
&& !is_lang_specific (t
) && !pointer_set_contains (fld
->pset
, t
))
5303 fld
->worklist
.safe_push ((t
));
5307 /* Operand callback helper for free_lang_data_in_node. *TP is the
5308 subtree operand being considered. */
5311 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5314 struct free_lang_data_d
*fld
= (struct free_lang_data_d
*) data
;
5316 if (TREE_CODE (t
) == TREE_LIST
)
5319 /* Language specific nodes will be removed, so there is no need
5320 to gather anything under them. */
5321 if (is_lang_specific (t
))
5329 /* Note that walk_tree does not traverse every possible field in
5330 decls, so we have to do our own traversals here. */
5331 add_tree_to_fld_list (t
, fld
);
5333 fld_worklist_push (DECL_NAME (t
), fld
);
5334 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5335 fld_worklist_push (DECL_SIZE (t
), fld
);
5336 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5338 /* We are going to remove everything under DECL_INITIAL for
5339 TYPE_DECLs. No point walking them. */
5340 if (TREE_CODE (t
) != TYPE_DECL
)
5341 fld_worklist_push (DECL_INITIAL (t
), fld
);
5343 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5344 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5346 if (TREE_CODE (t
) == FUNCTION_DECL
)
5348 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5349 fld_worklist_push (DECL_RESULT (t
), fld
);
5351 else if (TREE_CODE (t
) == TYPE_DECL
)
5353 fld_worklist_push (DECL_ARGUMENT_FLD (t
), fld
);
5354 fld_worklist_push (DECL_ORIGINAL_TYPE (t
), fld
);
5356 else if (TREE_CODE (t
) == FIELD_DECL
)
5358 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5359 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5360 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5361 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5364 if ((TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
5365 && DECL_HAS_VALUE_EXPR_P (t
))
5366 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5368 if (TREE_CODE (t
) != FIELD_DECL
5369 && TREE_CODE (t
) != TYPE_DECL
)
5370 fld_worklist_push (TREE_CHAIN (t
), fld
);
5373 else if (TYPE_P (t
))
5375 /* Note that walk_tree does not traverse every possible field in
5376 types, so we have to do our own traversals here. */
5377 add_tree_to_fld_list (t
, fld
);
5379 if (!RECORD_OR_UNION_TYPE_P (t
))
5380 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5381 fld_worklist_push (TYPE_SIZE (t
), fld
);
5382 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5383 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5384 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5385 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5386 fld_worklist_push (TYPE_NAME (t
), fld
);
5387 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5388 them and thus do not and want not to reach unused pointer types
5390 if (!POINTER_TYPE_P (t
))
5391 fld_worklist_push (TYPE_MINVAL (t
), fld
);
5392 if (!RECORD_OR_UNION_TYPE_P (t
))
5393 fld_worklist_push (TYPE_MAXVAL (t
), fld
);
5394 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5395 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5396 do not and want not to reach unused variants this way. */
5397 if (TYPE_CONTEXT (t
))
5399 tree ctx
= TYPE_CONTEXT (t
);
5400 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5401 So push that instead. */
5402 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5403 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5404 fld_worklist_push (ctx
, fld
);
5406 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5407 and want not to reach unused types this way. */
5409 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5413 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5414 fld_worklist_push (TREE_TYPE (tem
), fld
);
5415 tem
= BINFO_VIRTUALS (TYPE_BINFO (t
));
5417 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5418 && TREE_CODE (tem
) == TREE_LIST
)
5421 fld_worklist_push (TREE_VALUE (tem
), fld
);
5422 tem
= TREE_CHAIN (tem
);
5426 if (RECORD_OR_UNION_TYPE_P (t
))
5429 /* Push all TYPE_FIELDS - there can be interleaving interesting
5430 and non-interesting things. */
5431 tem
= TYPE_FIELDS (t
);
5434 if (TREE_CODE (tem
) == FIELD_DECL
5435 || TREE_CODE (tem
) == TYPE_DECL
)
5436 fld_worklist_push (tem
, fld
);
5437 tem
= TREE_CHAIN (tem
);
5441 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5444 else if (TREE_CODE (t
) == BLOCK
)
5447 for (tem
= BLOCK_VARS (t
); tem
; tem
= TREE_CHAIN (tem
))
5448 fld_worklist_push (tem
, fld
);
5449 for (tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5450 fld_worklist_push (tem
, fld
);
5451 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5454 if (TREE_CODE (t
) != IDENTIFIER_NODE
5455 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5456 fld_worklist_push (TREE_TYPE (t
), fld
);
5462 /* Find decls and types in T. */
5465 find_decls_types (tree t
, struct free_lang_data_d
*fld
)
5469 if (!pointer_set_contains (fld
->pset
, t
))
5470 walk_tree (&t
, find_decls_types_r
, fld
, fld
->pset
);
5471 if (fld
->worklist
.is_empty ())
5473 t
= fld
->worklist
.pop ();
5477 /* Translate all the types in LIST with the corresponding runtime
5481 get_eh_types_for_runtime (tree list
)
5485 if (list
== NULL_TREE
)
5488 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5490 list
= TREE_CHAIN (list
);
5493 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5494 TREE_CHAIN (prev
) = n
;
5495 prev
= TREE_CHAIN (prev
);
5496 list
= TREE_CHAIN (list
);
5503 /* Find decls and types referenced in EH region R and store them in
5504 FLD->DECLS and FLD->TYPES. */
5507 find_decls_types_in_eh_region (eh_region r
, struct free_lang_data_d
*fld
)
5518 /* The types referenced in each catch must first be changed to the
5519 EH types used at runtime. This removes references to FE types
5521 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
5523 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
5524 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, fld
->pset
);
5529 case ERT_ALLOWED_EXCEPTIONS
:
5530 r
->u
.allowed
.type_list
5531 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
5532 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, fld
->pset
);
5535 case ERT_MUST_NOT_THROW
:
5536 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
5537 find_decls_types_r
, fld
, fld
->pset
);
5543 /* Find decls and types referenced in cgraph node N and store them in
5544 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5545 look for *every* kind of DECL and TYPE node reachable from N,
5546 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5547 NAMESPACE_DECLs, etc). */
5550 find_decls_types_in_node (struct cgraph_node
*n
, struct free_lang_data_d
*fld
)
5553 struct function
*fn
;
5557 find_decls_types (n
->decl
, fld
);
5559 if (!gimple_has_body_p (n
->decl
))
5562 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
5564 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
5566 /* Traverse locals. */
5567 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
5568 find_decls_types (t
, fld
);
5570 /* Traverse EH regions in FN. */
5573 FOR_ALL_EH_REGION_FN (r
, fn
)
5574 find_decls_types_in_eh_region (r
, fld
);
5577 /* Traverse every statement in FN. */
5578 FOR_EACH_BB_FN (bb
, fn
)
5580 gimple_stmt_iterator si
;
5583 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
5585 gimple phi
= gsi_stmt (si
);
5587 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5589 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
5590 find_decls_types (*arg_p
, fld
);
5594 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
5596 gimple stmt
= gsi_stmt (si
);
5598 if (is_gimple_call (stmt
))
5599 find_decls_types (gimple_call_fntype (stmt
), fld
);
5601 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
5603 tree arg
= gimple_op (stmt
, i
);
5604 find_decls_types (arg
, fld
);
5611 /* Find decls and types referenced in varpool node N and store them in
5612 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5613 look for *every* kind of DECL and TYPE node reachable from N,
5614 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5615 NAMESPACE_DECLs, etc). */
5618 find_decls_types_in_var (varpool_node
*v
, struct free_lang_data_d
*fld
)
5620 find_decls_types (v
->decl
, fld
);
5623 /* If T needs an assembler name, have one created for it. */
5626 assign_assembler_name_if_neeeded (tree t
)
5628 if (need_assembler_name_p (t
))
5630 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5631 diagnostics that use input_location to show locus
5632 information. The problem here is that, at this point,
5633 input_location is generally anchored to the end of the file
5634 (since the parser is long gone), so we don't have a good
5635 position to pin it to.
5637 To alleviate this problem, this uses the location of T's
5638 declaration. Examples of this are
5639 testsuite/g++.dg/template/cond2.C and
5640 testsuite/g++.dg/template/pr35240.C. */
5641 location_t saved_location
= input_location
;
5642 input_location
= DECL_SOURCE_LOCATION (t
);
5644 decl_assembler_name (t
);
5646 input_location
= saved_location
;
5651 /* Free language specific information for every operand and expression
5652 in every node of the call graph. This process operates in three stages:
5654 1- Every callgraph node and varpool node is traversed looking for
5655 decls and types embedded in them. This is a more exhaustive
5656 search than that done by find_referenced_vars, because it will
5657 also collect individual fields, decls embedded in types, etc.
5659 2- All the decls found are sent to free_lang_data_in_decl.
5661 3- All the types found are sent to free_lang_data_in_type.
5663 The ordering between decls and types is important because
5664 free_lang_data_in_decl sets assembler names, which includes
5665 mangling. So types cannot be freed up until assembler names have
5669 free_lang_data_in_cgraph (void)
5671 struct cgraph_node
*n
;
5673 struct free_lang_data_d fld
;
5678 /* Initialize sets and arrays to store referenced decls and types. */
5679 fld
.pset
= pointer_set_create ();
5680 fld
.worklist
.create (0);
5681 fld
.decls
.create (100);
5682 fld
.types
.create (100);
5684 /* Find decls and types in the body of every function in the callgraph. */
5685 FOR_EACH_FUNCTION (n
)
5686 find_decls_types_in_node (n
, &fld
);
5688 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
5689 find_decls_types (p
->decl
, &fld
);
5691 /* Find decls and types in every varpool symbol. */
5692 FOR_EACH_VARIABLE (v
)
5693 find_decls_types_in_var (v
, &fld
);
5695 /* Set the assembler name on every decl found. We need to do this
5696 now because free_lang_data_in_decl will invalidate data needed
5697 for mangling. This breaks mangling on interdependent decls. */
5698 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5699 assign_assembler_name_if_neeeded (t
);
5701 /* Traverse every decl found freeing its language data. */
5702 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5703 free_lang_data_in_decl (t
);
5705 /* Traverse every type found freeing its language data. */
5706 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5707 free_lang_data_in_type (t
);
5709 pointer_set_destroy (fld
.pset
);
5710 fld
.worklist
.release ();
5711 fld
.decls
.release ();
5712 fld
.types
.release ();
5716 /* Free resources that are used by FE but are not needed once they are done. */
5719 free_lang_data (void)
5723 /* If we are the LTO frontend we have freed lang-specific data already. */
5725 || !flag_generate_lto
)
5728 /* Allocate and assign alias sets to the standard integer types
5729 while the slots are still in the way the frontends generated them. */
5730 for (i
= 0; i
< itk_none
; ++i
)
5731 if (integer_types
[i
])
5732 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
5734 /* Traverse the IL resetting language specific information for
5735 operands, expressions, etc. */
5736 free_lang_data_in_cgraph ();
5738 /* Create gimple variants for common types. */
5739 ptrdiff_type_node
= integer_type_node
;
5740 fileptr_type_node
= ptr_type_node
;
5742 /* Reset some langhooks. Do not reset types_compatible_p, it may
5743 still be used indirectly via the get_alias_set langhook. */
5744 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
5745 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
5746 /* We do not want the default decl_assembler_name implementation,
5747 rather if we have fixed everything we want a wrapper around it
5748 asserting that all non-local symbols already got their assembler
5749 name and only produce assembler names for local symbols. Or rather
5750 make sure we never call decl_assembler_name on local symbols and
5751 devise a separate, middle-end private scheme for it. */
5753 /* Reset diagnostic machinery. */
5754 tree_diagnostics_defaults (global_dc
);
5762 const pass_data pass_data_ipa_free_lang_data
=
5764 SIMPLE_IPA_PASS
, /* type */
5765 "*free_lang_data", /* name */
5766 OPTGROUP_NONE
, /* optinfo_flags */
5767 TV_IPA_FREE_LANG_DATA
, /* tv_id */
5768 0, /* properties_required */
5769 0, /* properties_provided */
5770 0, /* properties_destroyed */
5771 0, /* todo_flags_start */
5772 0, /* todo_flags_finish */
5775 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
5778 pass_ipa_free_lang_data (gcc::context
*ctxt
)
5779 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
5782 /* opt_pass methods: */
5783 virtual unsigned int execute (function
*) { return free_lang_data (); }
5785 }; // class pass_ipa_free_lang_data
5789 simple_ipa_opt_pass
*
5790 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
5792 return new pass_ipa_free_lang_data (ctxt
);
5795 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5796 ATTR_NAME. Also used internally by remove_attribute(). */
5798 private_is_attribute_p (const char *attr_name
, size_t attr_len
, const_tree ident
)
5800 size_t ident_len
= IDENTIFIER_LENGTH (ident
);
5802 if (ident_len
== attr_len
)
5804 if (strcmp (attr_name
, IDENTIFIER_POINTER (ident
)) == 0)
5807 else if (ident_len
== attr_len
+ 4)
5809 /* There is the possibility that ATTR is 'text' and IDENT is
5811 const char *p
= IDENTIFIER_POINTER (ident
);
5812 if (p
[0] == '_' && p
[1] == '_'
5813 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5814 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5821 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5822 of ATTR_NAME, and LIST is not NULL_TREE. */
5824 private_lookup_attribute (const char *attr_name
, size_t attr_len
, tree list
)
5828 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5830 if (ident_len
== attr_len
)
5832 if (!strcmp (attr_name
,
5833 IDENTIFIER_POINTER (get_attribute_name (list
))))
5836 /* TODO: If we made sure that attributes were stored in the
5837 canonical form without '__...__' (ie, as in 'text' as opposed
5838 to '__text__') then we could avoid the following case. */
5839 else if (ident_len
== attr_len
+ 4)
5841 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5842 if (p
[0] == '_' && p
[1] == '_'
5843 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5844 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5847 list
= TREE_CHAIN (list
);
5853 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5854 return a pointer to the attribute's list first element if the attribute
5855 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5859 private_lookup_attribute_by_prefix (const char *attr_name
, size_t attr_len
,
5864 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5866 if (attr_len
> ident_len
)
5868 list
= TREE_CHAIN (list
);
5872 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5874 if (strncmp (attr_name
, p
, attr_len
) == 0)
5877 /* TODO: If we made sure that attributes were stored in the
5878 canonical form without '__...__' (ie, as in 'text' as opposed
5879 to '__text__') then we could avoid the following case. */
5880 if (p
[0] == '_' && p
[1] == '_' &&
5881 strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5884 list
= TREE_CHAIN (list
);
5891 /* A variant of lookup_attribute() that can be used with an identifier
5892 as the first argument, and where the identifier can be either
5893 'text' or '__text__'.
5895 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5896 return a pointer to the attribute's list element if the attribute
5897 is part of the list, or NULL_TREE if not found. If the attribute
5898 appears more than once, this only returns the first occurrence; the
5899 TREE_CHAIN of the return value should be passed back in if further
5900 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5901 can be in the form 'text' or '__text__'. */
5903 lookup_ident_attribute (tree attr_identifier
, tree list
)
5905 gcc_checking_assert (TREE_CODE (attr_identifier
) == IDENTIFIER_NODE
);
5909 gcc_checking_assert (TREE_CODE (get_attribute_name (list
))
5910 == IDENTIFIER_NODE
);
5912 /* Identifiers can be compared directly for equality. */
5913 if (attr_identifier
== get_attribute_name (list
))
5916 /* If they are not equal, they may still be one in the form
5917 'text' while the other one is in the form '__text__'. TODO:
5918 If we were storing attributes in normalized 'text' form, then
5919 this could all go away and we could take full advantage of
5920 the fact that we're comparing identifiers. :-) */
5922 size_t attr_len
= IDENTIFIER_LENGTH (attr_identifier
);
5923 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5925 if (ident_len
== attr_len
+ 4)
5927 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5928 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5929 if (p
[0] == '_' && p
[1] == '_'
5930 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5931 && strncmp (q
, p
+ 2, attr_len
) == 0)
5934 else if (ident_len
+ 4 == attr_len
)
5936 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5937 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5938 if (q
[0] == '_' && q
[1] == '_'
5939 && q
[attr_len
- 2] == '_' && q
[attr_len
- 1] == '_'
5940 && strncmp (q
+ 2, p
, ident_len
) == 0)
5944 list
= TREE_CHAIN (list
);
5950 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5954 remove_attribute (const char *attr_name
, tree list
)
5957 size_t attr_len
= strlen (attr_name
);
5959 gcc_checking_assert (attr_name
[0] != '_');
5961 for (p
= &list
; *p
; )
5964 /* TODO: If we were storing attributes in normalized form, here
5965 we could use a simple strcmp(). */
5966 if (private_is_attribute_p (attr_name
, attr_len
, get_attribute_name (l
)))
5967 *p
= TREE_CHAIN (l
);
5969 p
= &TREE_CHAIN (l
);
5975 /* Return an attribute list that is the union of a1 and a2. */
5978 merge_attributes (tree a1
, tree a2
)
5982 /* Either one unset? Take the set one. */
5984 if ((attributes
= a1
) == 0)
5987 /* One that completely contains the other? Take it. */
5989 else if (a2
!= 0 && ! attribute_list_contained (a1
, a2
))
5991 if (attribute_list_contained (a2
, a1
))
5995 /* Pick the longest list, and hang on the other list. */
5997 if (list_length (a1
) < list_length (a2
))
5998 attributes
= a2
, a2
= a1
;
6000 for (; a2
!= 0; a2
= TREE_CHAIN (a2
))
6003 for (a
= lookup_ident_attribute (get_attribute_name (a2
),
6005 a
!= NULL_TREE
&& !attribute_value_equal (a
, a2
);
6006 a
= lookup_ident_attribute (get_attribute_name (a2
),
6011 a1
= copy_node (a2
);
6012 TREE_CHAIN (a1
) = attributes
;
6021 /* Given types T1 and T2, merge their attributes and return
6025 merge_type_attributes (tree t1
, tree t2
)
6027 return merge_attributes (TYPE_ATTRIBUTES (t1
),
6028 TYPE_ATTRIBUTES (t2
));
6031 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6035 merge_decl_attributes (tree olddecl
, tree newdecl
)
6037 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
6038 DECL_ATTRIBUTES (newdecl
));
6041 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6043 /* Specialization of merge_decl_attributes for various Windows targets.
6045 This handles the following situation:
6047 __declspec (dllimport) int foo;
6050 The second instance of `foo' nullifies the dllimport. */
6053 merge_dllimport_decl_attributes (tree old
, tree new_tree
)
6056 int delete_dllimport_p
= 1;
6058 /* What we need to do here is remove from `old' dllimport if it doesn't
6059 appear in `new'. dllimport behaves like extern: if a declaration is
6060 marked dllimport and a definition appears later, then the object
6061 is not dllimport'd. We also remove a `new' dllimport if the old list
6062 contains dllexport: dllexport always overrides dllimport, regardless
6063 of the order of declaration. */
6064 if (!VAR_OR_FUNCTION_DECL_P (new_tree
))
6065 delete_dllimport_p
= 0;
6066 else if (DECL_DLLIMPORT_P (new_tree
)
6067 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old
)))
6069 DECL_DLLIMPORT_P (new_tree
) = 0;
6070 warning (OPT_Wattributes
, "%q+D already declared with dllexport attribute: "
6071 "dllimport ignored", new_tree
);
6073 else if (DECL_DLLIMPORT_P (old
) && !DECL_DLLIMPORT_P (new_tree
))
6075 /* Warn about overriding a symbol that has already been used, e.g.:
6076 extern int __attribute__ ((dllimport)) foo;
6077 int* bar () {return &foo;}
6080 if (TREE_USED (old
))
6082 warning (0, "%q+D redeclared without dllimport attribute "
6083 "after being referenced with dll linkage", new_tree
);
6084 /* If we have used a variable's address with dllimport linkage,
6085 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6086 decl may already have had TREE_CONSTANT computed.
6087 We still remove the attribute so that assembler code refers
6088 to '&foo rather than '_imp__foo'. */
6089 if (TREE_CODE (old
) == VAR_DECL
&& TREE_ADDRESSABLE (old
))
6090 DECL_DLLIMPORT_P (new_tree
) = 1;
6093 /* Let an inline definition silently override the external reference,
6094 but otherwise warn about attribute inconsistency. */
6095 else if (TREE_CODE (new_tree
) == VAR_DECL
6096 || !DECL_DECLARED_INLINE_P (new_tree
))
6097 warning (OPT_Wattributes
, "%q+D redeclared without dllimport attribute: "
6098 "previous dllimport ignored", new_tree
);
6101 delete_dllimport_p
= 0;
6103 a
= merge_attributes (DECL_ATTRIBUTES (old
), DECL_ATTRIBUTES (new_tree
));
6105 if (delete_dllimport_p
)
6106 a
= remove_attribute ("dllimport", a
);
6111 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6112 struct attribute_spec.handler. */
6115 handle_dll_attribute (tree
* pnode
, tree name
, tree args
, int flags
,
6121 /* These attributes may apply to structure and union types being created,
6122 but otherwise should pass to the declaration involved. */
6125 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
| (int) ATTR_FLAG_FUNCTION_NEXT
6126 | (int) ATTR_FLAG_ARRAY_NEXT
))
6128 *no_add_attrs
= true;
6129 return tree_cons (name
, args
, NULL_TREE
);
6131 if (TREE_CODE (node
) == RECORD_TYPE
6132 || TREE_CODE (node
) == UNION_TYPE
)
6134 node
= TYPE_NAME (node
);
6140 warning (OPT_Wattributes
, "%qE attribute ignored",
6142 *no_add_attrs
= true;
6147 if (TREE_CODE (node
) != FUNCTION_DECL
6148 && TREE_CODE (node
) != VAR_DECL
6149 && TREE_CODE (node
) != TYPE_DECL
)
6151 *no_add_attrs
= true;
6152 warning (OPT_Wattributes
, "%qE attribute ignored",
6157 if (TREE_CODE (node
) == TYPE_DECL
6158 && TREE_CODE (TREE_TYPE (node
)) != RECORD_TYPE
6159 && TREE_CODE (TREE_TYPE (node
)) != UNION_TYPE
)
6161 *no_add_attrs
= true;
6162 warning (OPT_Wattributes
, "%qE attribute ignored",
6167 is_dllimport
= is_attribute_p ("dllimport", name
);
6169 /* Report error on dllimport ambiguities seen now before they cause
6173 /* Honor any target-specific overrides. */
6174 if (!targetm
.valid_dllimport_attribute_p (node
))
6175 *no_add_attrs
= true;
6177 else if (TREE_CODE (node
) == FUNCTION_DECL
6178 && DECL_DECLARED_INLINE_P (node
))
6180 warning (OPT_Wattributes
, "inline function %q+D declared as "
6181 " dllimport: attribute ignored", node
);
6182 *no_add_attrs
= true;
6184 /* Like MS, treat definition of dllimported variables and
6185 non-inlined functions on declaration as syntax errors. */
6186 else if (TREE_CODE (node
) == FUNCTION_DECL
&& DECL_INITIAL (node
))
6188 error ("function %q+D definition is marked dllimport", node
);
6189 *no_add_attrs
= true;
6192 else if (TREE_CODE (node
) == VAR_DECL
)
6194 if (DECL_INITIAL (node
))
6196 error ("variable %q+D definition is marked dllimport",
6198 *no_add_attrs
= true;
6201 /* `extern' needn't be specified with dllimport.
6202 Specify `extern' now and hope for the best. Sigh. */
6203 DECL_EXTERNAL (node
) = 1;
6204 /* Also, implicitly give dllimport'd variables declared within
6205 a function global scope, unless declared static. */
6206 if (current_function_decl
!= NULL_TREE
&& !TREE_STATIC (node
))
6207 TREE_PUBLIC (node
) = 1;
6210 if (*no_add_attrs
== false)
6211 DECL_DLLIMPORT_P (node
) = 1;
6213 else if (TREE_CODE (node
) == FUNCTION_DECL
6214 && DECL_DECLARED_INLINE_P (node
)
6215 && flag_keep_inline_dllexport
)
6216 /* An exported function, even if inline, must be emitted. */
6217 DECL_EXTERNAL (node
) = 0;
6219 /* Report error if symbol is not accessible at global scope. */
6220 if (!TREE_PUBLIC (node
)
6221 && (TREE_CODE (node
) == VAR_DECL
6222 || TREE_CODE (node
) == FUNCTION_DECL
))
6224 error ("external linkage required for symbol %q+D because of "
6225 "%qE attribute", node
, name
);
6226 *no_add_attrs
= true;
6229 /* A dllexport'd entity must have default visibility so that other
6230 program units (shared libraries or the main executable) can see
6231 it. A dllimport'd entity must have default visibility so that
6232 the linker knows that undefined references within this program
6233 unit can be resolved by the dynamic linker. */
6236 if (DECL_VISIBILITY_SPECIFIED (node
)
6237 && DECL_VISIBILITY (node
) != VISIBILITY_DEFAULT
)
6238 error ("%qE implies default visibility, but %qD has already "
6239 "been declared with a different visibility",
6241 DECL_VISIBILITY (node
) = VISIBILITY_DEFAULT
;
6242 DECL_VISIBILITY_SPECIFIED (node
) = 1;
6248 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6250 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6251 of the various TYPE_QUAL values. Also, set the UPC layout qualifier,
6252 which is either null or a reference to an integral constant. */
6255 set_type_quals (tree type
, int type_quals
, tree layout_qualifier
)
6257 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
6258 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
6259 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
6260 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
6261 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
6262 TYPE_UPC_SHARED (type
) = (type_quals
& TYPE_QUAL_UPC_SHARED
) != 0;
6263 TYPE_UPC_STRICT (type
) = (type_quals
& TYPE_QUAL_UPC_STRICT
) != 0;
6264 TYPE_UPC_RELAXED (type
) = (type_quals
& TYPE_QUAL_UPC_RELAXED
) != 0;
6265 if (TYPE_UPC_SHARED (type
))
6266 SET_TYPE_UPC_BLOCK_FACTOR (type
, layout_qualifier
);
6269 /* Returns true iff CAND is equivalent to BASE with
6270 TYPE_QUALS and LAYOUT_QUALIFIER. */
6273 check_qualified_type (const_tree cand
, const_tree base
,
6274 int type_quals
, tree layout_qualifier
)
6276 return (TYPE_QUALS (cand
) == type_quals
6277 && TYPE_UPC_BLOCK_FACTOR (cand
) == layout_qualifier
6278 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6279 /* Apparently this is needed for Objective-C. */
6280 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6281 /* Check alignment. */
6282 && TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
6283 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6284 TYPE_ATTRIBUTES (base
)));
6287 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6290 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
6292 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
6293 && TYPE_UPC_BLOCK_FACTOR (cand
) == TYPE_UPC_BLOCK_FACTOR (base
)
6294 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6295 /* Apparently this is needed for Objective-C. */
6296 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6297 /* Check alignment. */
6298 && TYPE_ALIGN (cand
) == align
6299 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6300 TYPE_ATTRIBUTES (base
)));
6303 /* This function checks to see if TYPE matches the size one of the built-in
6304 atomic types, and returns that core atomic type. */
6307 find_atomic_core_type (tree type
)
6309 tree base_atomic_type
;
6311 /* Only handle complete types. */
6312 if (TYPE_SIZE (type
) == NULL_TREE
)
6315 HOST_WIDE_INT type_size
= tree_to_uhwi (TYPE_SIZE (type
));
6319 base_atomic_type
= atomicQI_type_node
;
6323 base_atomic_type
= atomicHI_type_node
;
6327 base_atomic_type
= atomicSI_type_node
;
6331 base_atomic_type
= atomicDI_type_node
;
6335 base_atomic_type
= atomicTI_type_node
;
6339 base_atomic_type
= NULL_TREE
;
6342 return base_atomic_type
;
6345 /* Return a version of the TYPE, qualified as indicated by the
6346 TYPE_QUALS, if one exists. If no qualified version exists yet,
6347 return NULL_TREE. */
6350 get_qualified_type_1 (tree type
, int type_quals
, tree layout_qualifier
)
6354 if (TYPE_QUALS (type
) == type_quals
)
6357 /* Search the chain of variants to see if there is already one there just
6358 like the one we need to have. If so, use that existing one. We must
6359 preserve the TYPE_NAME, since there is code that depends on this. */
6360 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6361 if (check_qualified_type (t
, type
, type_quals
, layout_qualifier
))
6367 /* Like get_qualified_type_1, but creates the type if it does not
6368 exist. This function never returns NULL_TREE. */
6371 build_qualified_type_1 (tree type
, int type_quals
, tree layout_qualifier
)
6375 /* See if we already have the appropriate qualified variant. */
6376 t
= get_qualified_type_1 (type
, type_quals
, layout_qualifier
);
6378 /* If not, build it. */
6381 t
= build_variant_type_copy (type
);
6382 set_type_quals (t
, type_quals
, layout_qualifier
);
6384 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
6386 /* See if this object can map to a basic atomic type. */
6387 tree atomic_type
= find_atomic_core_type (type
);
6390 /* Ensure the alignment of this type is compatible with
6391 the required alignment of the atomic type. */
6392 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
6393 TYPE_ALIGN (t
) = TYPE_ALIGN (atomic_type
);
6397 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6398 /* Propagate structural equality. */
6399 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6400 else if (TYPE_CANONICAL (type
) != type
)
6401 /* Build the underlying canonical type, since it is different
6404 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
6405 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
6408 /* T is its own canonical type. */
6409 TYPE_CANONICAL (t
) = t
;
6416 /* Create a variant of type T with alignment ALIGN. */
6419 build_aligned_type (tree type
, unsigned int align
)
6423 if (TYPE_PACKED (type
)
6424 || TYPE_ALIGN (type
) == align
)
6427 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6428 if (check_aligned_type (t
, type
, align
))
6431 t
= build_variant_type_copy (type
);
6432 TYPE_ALIGN (t
) = align
;
6437 /* Create a new distinct copy of TYPE. The new type is made its own
6438 MAIN_VARIANT. If TYPE requires structural equality checks, the
6439 resulting type requires structural equality checks; otherwise, its
6440 TYPE_CANONICAL points to itself. */
6443 build_distinct_type_copy (tree type
)
6445 tree t
= copy_node (type
);
6447 TYPE_POINTER_TO (t
) = 0;
6448 TYPE_REFERENCE_TO (t
) = 0;
6450 /* Set the canonical type either to a new equivalence class, or
6451 propagate the need for structural equality checks. */
6452 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6453 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6455 TYPE_CANONICAL (t
) = t
;
6457 /* Make it its own variant. */
6458 TYPE_MAIN_VARIANT (t
) = t
;
6459 TYPE_NEXT_VARIANT (t
) = 0;
6461 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6462 whose TREE_TYPE is not t. This can also happen in the Ada
6463 frontend when using subtypes. */
6468 /* Create a new variant of TYPE, equivalent but distinct. This is so
6469 the caller can modify it. TYPE_CANONICAL for the return type will
6470 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6471 are considered equal by the language itself (or that both types
6472 require structural equality checks). */
6475 build_variant_type_copy (tree type
)
6477 tree t
, m
= TYPE_MAIN_VARIANT (type
);
6479 t
= build_distinct_type_copy (type
);
6481 /* Since we're building a variant, assume that it is a non-semantic
6482 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6483 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
6485 /* Add the new type to the chain of variants of TYPE. */
6486 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
6487 TYPE_NEXT_VARIANT (m
) = t
;
6488 TYPE_MAIN_VARIANT (t
) = m
;
6493 /* Return true if the from tree in both tree maps are equal. */
6496 tree_map_base_eq (const void *va
, const void *vb
)
6498 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6499 *const b
= (const struct tree_map_base
*) vb
;
6500 return (a
->from
== b
->from
);
6503 /* Hash a from tree in a tree_base_map. */
6506 tree_map_base_hash (const void *item
)
6508 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6511 /* Return true if this tree map structure is marked for garbage collection
6512 purposes. We simply return true if the from tree is marked, so that this
6513 structure goes away when the from tree goes away. */
6516 tree_map_base_marked_p (const void *p
)
6518 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6521 /* Hash a from tree in a tree_map. */
6524 tree_map_hash (const void *item
)
6526 return (((const struct tree_map
*) item
)->hash
);
6529 /* Hash a from tree in a tree_decl_map. */
6532 tree_decl_map_hash (const void *item
)
6534 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6537 /* Return the initialization priority for DECL. */
6540 decl_init_priority_lookup (tree decl
)
6542 symtab_node
*snode
= symtab_get_node (decl
);
6545 return DEFAULT_INIT_PRIORITY
;
6547 snode
->get_init_priority ();
6550 /* Return the finalization priority for DECL. */
6553 decl_fini_priority_lookup (tree decl
)
6555 cgraph_node
*node
= cgraph_get_node (decl
);
6558 return DEFAULT_INIT_PRIORITY
;
6560 node
->get_fini_priority ();
6563 /* Set the initialization priority for DECL to PRIORITY. */
6566 decl_init_priority_insert (tree decl
, priority_type priority
)
6568 struct symtab_node
*snode
;
6570 if (priority
== DEFAULT_INIT_PRIORITY
)
6572 snode
= symtab_get_node (decl
);
6576 else if (TREE_CODE (decl
) == VAR_DECL
)
6577 snode
= varpool_node_for_decl (decl
);
6579 snode
= cgraph_get_create_node (decl
);
6580 snode
->set_init_priority (priority
);
6583 /* Set the finalization priority for DECL to PRIORITY. */
6586 decl_fini_priority_insert (tree decl
, priority_type priority
)
6588 struct cgraph_node
*node
;
6590 if (priority
== DEFAULT_INIT_PRIORITY
)
6592 node
= cgraph_get_node (decl
);
6597 node
= cgraph_get_create_node (decl
);
6598 node
->set_fini_priority (priority
);
6601 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6604 print_debug_expr_statistics (void)
6606 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6607 (long) htab_size (debug_expr_for_decl
),
6608 (long) htab_elements (debug_expr_for_decl
),
6609 htab_collisions (debug_expr_for_decl
));
6612 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6615 print_value_expr_statistics (void)
6617 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6618 (long) htab_size (value_expr_for_decl
),
6619 (long) htab_elements (value_expr_for_decl
),
6620 htab_collisions (value_expr_for_decl
));
6623 /* Lookup a debug expression for FROM, and return it if we find one. */
6626 decl_debug_expr_lookup (tree from
)
6628 struct tree_decl_map
*h
, in
;
6629 in
.base
.from
= from
;
6631 h
= (struct tree_decl_map
*)
6632 htab_find_with_hash (debug_expr_for_decl
, &in
, DECL_UID (from
));
6638 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6641 decl_debug_expr_insert (tree from
, tree to
)
6643 struct tree_decl_map
*h
;
6646 h
= ggc_alloc
<tree_decl_map
> ();
6647 h
->base
.from
= from
;
6649 loc
= htab_find_slot_with_hash (debug_expr_for_decl
, h
, DECL_UID (from
),
6651 *(struct tree_decl_map
**) loc
= h
;
6654 /* Lookup a value expression for FROM, and return it if we find one. */
6657 decl_value_expr_lookup (tree from
)
6659 struct tree_decl_map
*h
, in
;
6660 in
.base
.from
= from
;
6662 h
= (struct tree_decl_map
*)
6663 htab_find_with_hash (value_expr_for_decl
, &in
, DECL_UID (from
));
6669 /* Insert a mapping FROM->TO in the value expression hashtable. */
6672 decl_value_expr_insert (tree from
, tree to
)
6674 struct tree_decl_map
*h
;
6677 h
= ggc_alloc
<tree_decl_map
> ();
6678 h
->base
.from
= from
;
6680 loc
= htab_find_slot_with_hash (value_expr_for_decl
, h
, DECL_UID (from
),
6682 *(struct tree_decl_map
**) loc
= h
;
6685 /* Lookup a vector of debug arguments for FROM, and return it if we
6689 decl_debug_args_lookup (tree from
)
6691 struct tree_vec_map
*h
, in
;
6693 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6695 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6696 in
.base
.from
= from
;
6697 h
= (struct tree_vec_map
*)
6698 htab_find_with_hash (debug_args_for_decl
, &in
, DECL_UID (from
));
6704 /* Insert a mapping FROM->empty vector of debug arguments in the value
6705 expression hashtable. */
6708 decl_debug_args_insert (tree from
)
6710 struct tree_vec_map
*h
;
6713 if (DECL_HAS_DEBUG_ARGS_P (from
))
6714 return decl_debug_args_lookup (from
);
6715 if (debug_args_for_decl
== NULL
)
6716 debug_args_for_decl
= htab_create_ggc (64, tree_vec_map_hash
,
6717 tree_vec_map_eq
, 0);
6718 h
= ggc_alloc
<tree_vec_map
> ();
6719 h
->base
.from
= from
;
6721 loc
= htab_find_slot_with_hash (debug_args_for_decl
, h
, DECL_UID (from
),
6723 *(struct tree_vec_map
**) loc
= h
;
6724 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6728 /* Hashing of types so that we don't make duplicates.
6729 The entry point is `type_hash_canon'. */
6731 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6732 with types in the TREE_VALUE slots), by adding the hash codes
6733 of the individual types. */
6736 type_hash_list (const_tree list
, hashval_t hashcode
)
6740 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6741 if (TREE_VALUE (tail
) != error_mark_node
)
6742 hashcode
= iterative_hash_object (TYPE_HASH (TREE_VALUE (tail
)),
6748 /* These are the Hashtable callback functions. */
6750 /* Returns true iff the types are equivalent. */
6753 type_hash_eq (const void *va
, const void *vb
)
6755 const struct type_hash
*const a
= (const struct type_hash
*) va
,
6756 *const b
= (const struct type_hash
*) vb
;
6758 /* First test the things that are the same for all types. */
6759 if (a
->hash
!= b
->hash
6760 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6761 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6762 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6763 TYPE_ATTRIBUTES (b
->type
))
6764 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6765 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6768 /* Be careful about comparing arrays before and after the element type
6769 has been completed; don't compare TYPE_ALIGN unless both types are
6771 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6772 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6773 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6776 switch (TREE_CODE (a
->type
))
6781 case REFERENCE_TYPE
:
6786 return TYPE_VECTOR_SUBPARTS (a
->type
) == TYPE_VECTOR_SUBPARTS (b
->type
);
6789 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6790 && !(TYPE_VALUES (a
->type
)
6791 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6792 && TYPE_VALUES (b
->type
)
6793 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6794 && type_list_equal (TYPE_VALUES (a
->type
),
6795 TYPE_VALUES (b
->type
))))
6798 /* ... fall through ... */
6803 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6805 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6806 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6807 TYPE_MAX_VALUE (b
->type
)))
6808 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6809 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6810 TYPE_MIN_VALUE (b
->type
))));
6812 case FIXED_POINT_TYPE
:
6813 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6816 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6819 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6820 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6821 || (TYPE_ARG_TYPES (a
->type
)
6822 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6823 && TYPE_ARG_TYPES (b
->type
)
6824 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6825 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6826 TYPE_ARG_TYPES (b
->type
)))))
6830 return TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
);
6834 case QUAL_UNION_TYPE
:
6835 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6836 || (TYPE_FIELDS (a
->type
)
6837 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6838 && TYPE_FIELDS (b
->type
)
6839 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6840 && type_list_equal (TYPE_FIELDS (a
->type
),
6841 TYPE_FIELDS (b
->type
))));
6844 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6845 || (TYPE_ARG_TYPES (a
->type
)
6846 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6847 && TYPE_ARG_TYPES (b
->type
)
6848 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6849 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6850 TYPE_ARG_TYPES (b
->type
))))
6858 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6859 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6864 /* Return the cached hash value. */
6867 type_hash_hash (const void *item
)
6869 return ((const struct type_hash
*) item
)->hash
;
6872 /* Look in the type hash table for a type isomorphic to TYPE.
6873 If one is found, return it. Otherwise return 0. */
6876 type_hash_lookup (hashval_t hashcode
, tree type
)
6878 struct type_hash
*h
, in
;
6880 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6881 must call that routine before comparing TYPE_ALIGNs. */
6887 h
= (struct type_hash
*) htab_find_with_hash (type_hash_table
, &in
,
6894 /* Add an entry to the type-hash-table
6895 for a type TYPE whose hash code is HASHCODE. */
6898 type_hash_add (hashval_t hashcode
, tree type
)
6900 struct type_hash
*h
;
6903 h
= ggc_alloc
<type_hash
> ();
6906 loc
= htab_find_slot_with_hash (type_hash_table
, h
, hashcode
, INSERT
);
6910 /* Given TYPE, and HASHCODE its hash code, return the canonical
6911 object for an identical type if one already exists.
6912 Otherwise, return TYPE, and record it as the canonical object.
6914 To use this function, first create a type of the sort you want.
6915 Then compute its hash code from the fields of the type that
6916 make it different from other similar types.
6917 Then call this function and use the value. */
6920 type_hash_canon (unsigned int hashcode
, tree type
)
6924 /* The hash table only contains main variants, so ensure that's what we're
6926 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6928 /* See if the type is in the hash table already. If so, return it.
6929 Otherwise, add the type. */
6930 t1
= type_hash_lookup (hashcode
, type
);
6933 if (GATHER_STATISTICS
)
6935 tree_code_counts
[(int) TREE_CODE (type
)]--;
6936 tree_node_counts
[(int) t_kind
]--;
6937 tree_node_sizes
[(int) t_kind
] -= sizeof (struct tree_type_non_common
);
6943 type_hash_add (hashcode
, type
);
6948 /* See if the data pointed to by the type hash table is marked. We consider
6949 it marked if the type is marked or if a debug type number or symbol
6950 table entry has been made for the type. */
6953 type_hash_marked_p (const void *p
)
6955 const_tree
const type
= ((const struct type_hash
*) p
)->type
;
6957 return ggc_marked_p (type
);
6961 print_type_hash_statistics (void)
6963 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6964 (long) htab_size (type_hash_table
),
6965 (long) htab_elements (type_hash_table
),
6966 htab_collisions (type_hash_table
));
6969 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6970 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6971 by adding the hash codes of the individual attributes. */
6974 attribute_hash_list (const_tree list
, hashval_t hashcode
)
6978 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6979 /* ??? Do we want to add in TREE_VALUE too? */
6980 hashcode
= iterative_hash_object
6981 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail
)), hashcode
);
6985 /* Given two lists of attributes, return true if list l2 is
6986 equivalent to l1. */
6989 attribute_list_equal (const_tree l1
, const_tree l2
)
6994 return attribute_list_contained (l1
, l2
)
6995 && attribute_list_contained (l2
, l1
);
6998 /* Given two lists of attributes, return true if list L2 is
6999 completely contained within L1. */
7000 /* ??? This would be faster if attribute names were stored in a canonicalized
7001 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7002 must be used to show these elements are equivalent (which they are). */
7003 /* ??? It's not clear that attributes with arguments will always be handled
7007 attribute_list_contained (const_tree l1
, const_tree l2
)
7011 /* First check the obvious, maybe the lists are identical. */
7015 /* Maybe the lists are similar. */
7016 for (t1
= l1
, t2
= l2
;
7018 && get_attribute_name (t1
) == get_attribute_name (t2
)
7019 && TREE_VALUE (t1
) == TREE_VALUE (t2
);
7020 t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
7023 /* Maybe the lists are equal. */
7024 if (t1
== 0 && t2
== 0)
7027 for (; t2
!= 0; t2
= TREE_CHAIN (t2
))
7030 /* This CONST_CAST is okay because lookup_attribute does not
7031 modify its argument and the return value is assigned to a
7033 for (attr
= lookup_ident_attribute (get_attribute_name (t2
),
7034 CONST_CAST_TREE (l1
));
7035 attr
!= NULL_TREE
&& !attribute_value_equal (t2
, attr
);
7036 attr
= lookup_ident_attribute (get_attribute_name (t2
),
7040 if (attr
== NULL_TREE
)
7047 /* Given two lists of types
7048 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7049 return 1 if the lists contain the same types in the same order.
7050 Also, the TREE_PURPOSEs must match. */
7053 type_list_equal (const_tree l1
, const_tree l2
)
7057 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
7058 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
7059 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
7060 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
7061 && (TREE_TYPE (TREE_PURPOSE (t1
))
7062 == TREE_TYPE (TREE_PURPOSE (t2
))))))
7068 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7069 given by TYPE. If the argument list accepts variable arguments,
7070 then this function counts only the ordinary arguments. */
7073 type_num_arguments (const_tree type
)
7078 for (t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
7079 /* If the function does not take a variable number of arguments,
7080 the last element in the list will have type `void'. */
7081 if (VOID_TYPE_P (TREE_VALUE (t
)))
7089 /* Nonzero if integer constants T1 and T2
7090 represent the same constant value. */
7093 tree_int_cst_equal (const_tree t1
, const_tree t2
)
7098 if (t1
== 0 || t2
== 0)
7101 if (TREE_CODE (t1
) == INTEGER_CST
7102 && TREE_CODE (t2
) == INTEGER_CST
7103 && wi::to_widest (t1
) == wi::to_widest (t2
))
7109 /* Return true if T is an INTEGER_CST whose numerical value (extended
7110 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7113 tree_fits_shwi_p (const_tree t
)
7115 return (t
!= NULL_TREE
7116 && TREE_CODE (t
) == INTEGER_CST
7117 && wi::fits_shwi_p (wi::to_widest (t
)));
7120 /* Return true if T is an INTEGER_CST whose numerical value (extended
7121 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7124 tree_fits_uhwi_p (const_tree t
)
7126 return (t
!= NULL_TREE
7127 && TREE_CODE (t
) == INTEGER_CST
7128 && wi::fits_uhwi_p (wi::to_widest (t
)));
7131 /* T is an INTEGER_CST whose numerical value (extended according to
7132 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7136 tree_to_shwi (const_tree t
)
7138 gcc_assert (tree_fits_shwi_p (t
));
7139 return TREE_INT_CST_LOW (t
);
7142 /* T is an INTEGER_CST whose numerical value (extended according to
7143 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7146 unsigned HOST_WIDE_INT
7147 tree_to_uhwi (const_tree t
)
7149 gcc_assert (tree_fits_uhwi_p (t
));
7150 return TREE_INT_CST_LOW (t
);
7153 /* Return the most significant (sign) bit of T. */
7156 tree_int_cst_sign_bit (const_tree t
)
7158 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
7160 return wi::extract_uhwi (t
, bitno
, 1);
7163 /* Return an indication of the sign of the integer constant T.
7164 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7165 Note that -1 will never be returned if T's type is unsigned. */
7168 tree_int_cst_sgn (const_tree t
)
7170 if (wi::eq_p (t
, 0))
7172 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
7174 else if (wi::neg_p (t
))
7180 /* Return the minimum number of bits needed to represent VALUE in a
7181 signed or unsigned type, UNSIGNEDP says which. */
7184 tree_int_cst_min_precision (tree value
, signop sgn
)
7186 /* If the value is negative, compute its negative minus 1. The latter
7187 adjustment is because the absolute value of the largest negative value
7188 is one larger than the largest positive value. This is equivalent to
7189 a bit-wise negation, so use that operation instead. */
7191 if (tree_int_cst_sgn (value
) < 0)
7192 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
7194 /* Return the number of bits needed, taking into account the fact
7195 that we need one more bit for a signed than unsigned type.
7196 If value is 0 or -1, the minimum precision is 1 no matter
7197 whether unsignedp is true or false. */
7199 if (integer_zerop (value
))
7202 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
7205 /* Return truthvalue of whether T1 is the same tree structure as T2.
7206 Return 1 if they are the same.
7207 Return 0 if they are understandably different.
7208 Return -1 if either contains tree structure not understood by
7212 simple_cst_equal (const_tree t1
, const_tree t2
)
7214 enum tree_code code1
, code2
;
7220 if (t1
== 0 || t2
== 0)
7223 code1
= TREE_CODE (t1
);
7224 code2
= TREE_CODE (t2
);
7226 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
7228 if (CONVERT_EXPR_CODE_P (code2
)
7229 || code2
== NON_LVALUE_EXPR
)
7230 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7232 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
7235 else if (CONVERT_EXPR_CODE_P (code2
)
7236 || code2
== NON_LVALUE_EXPR
)
7237 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
7245 return wi::to_widest (t1
) == wi::to_widest (t2
);
7248 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1
), TREE_REAL_CST (t2
));
7251 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
7254 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
7255 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
7256 TREE_STRING_LENGTH (t1
)));
7260 unsigned HOST_WIDE_INT idx
;
7261 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
7262 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
7264 if (vec_safe_length (v1
) != vec_safe_length (v2
))
7267 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
7268 /* ??? Should we handle also fields here? */
7269 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
7275 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7278 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
7281 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
7284 const_tree arg1
, arg2
;
7285 const_call_expr_arg_iterator iter1
, iter2
;
7286 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
7287 arg2
= first_const_call_expr_arg (t2
, &iter2
);
7289 arg1
= next_const_call_expr_arg (&iter1
),
7290 arg2
= next_const_call_expr_arg (&iter2
))
7292 cmp
= simple_cst_equal (arg1
, arg2
);
7296 return arg1
== arg2
;
7300 /* Special case: if either target is an unallocated VAR_DECL,
7301 it means that it's going to be unified with whatever the
7302 TARGET_EXPR is really supposed to initialize, so treat it
7303 as being equivalent to anything. */
7304 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
7305 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
7306 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
7307 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
7308 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
7309 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
7312 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7317 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
7319 case WITH_CLEANUP_EXPR
:
7320 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7324 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
7327 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
7328 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7342 /* This general rule works for most tree codes. All exceptions should be
7343 handled above. If this is a language-specific tree code, we can't
7344 trust what might be in the operand, so say we don't know
7346 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
7349 switch (TREE_CODE_CLASS (code1
))
7353 case tcc_comparison
:
7354 case tcc_expression
:
7358 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
7360 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
7372 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7373 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7374 than U, respectively. */
7377 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
7379 if (tree_int_cst_sgn (t
) < 0)
7381 else if (!tree_fits_uhwi_p (t
))
7383 else if (TREE_INT_CST_LOW (t
) == u
)
7385 else if (TREE_INT_CST_LOW (t
) < u
)
7391 /* Return true if SIZE represents a constant size that is in bounds of
7392 what the middle-end and the backend accepts (covering not more than
7393 half of the address-space). */
7396 valid_constant_size_p (const_tree size
)
7398 if (! tree_fits_uhwi_p (size
)
7399 || TREE_OVERFLOW (size
)
7400 || tree_int_cst_sign_bit (size
) != 0)
7405 /* Return the precision of the type, or for a complex or vector type the
7406 precision of the type of its elements. */
7409 element_precision (const_tree type
)
7411 enum tree_code code
= TREE_CODE (type
);
7412 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
7413 type
= TREE_TYPE (type
);
7415 return TYPE_PRECISION (type
);
7418 /* Return true if CODE represents an associative tree code. Otherwise
7421 associative_tree_code (enum tree_code code
)
7440 /* Return true if CODE represents a commutative tree code. Otherwise
7443 commutative_tree_code (enum tree_code code
)
7449 case MULT_HIGHPART_EXPR
:
7457 case UNORDERED_EXPR
:
7461 case TRUTH_AND_EXPR
:
7462 case TRUTH_XOR_EXPR
:
7464 case WIDEN_MULT_EXPR
:
7465 case VEC_WIDEN_MULT_HI_EXPR
:
7466 case VEC_WIDEN_MULT_LO_EXPR
:
7467 case VEC_WIDEN_MULT_EVEN_EXPR
:
7468 case VEC_WIDEN_MULT_ODD_EXPR
:
7477 /* Return true if CODE represents a ternary tree code for which the
7478 first two operands are commutative. Otherwise return false. */
7480 commutative_ternary_tree_code (enum tree_code code
)
7484 case WIDEN_MULT_PLUS_EXPR
:
7485 case WIDEN_MULT_MINUS_EXPR
:
7494 /* Generate a hash value for an expression. This can be used iteratively
7495 by passing a previous result as the VAL argument.
7497 This function is intended to produce the same hash for expressions which
7498 would compare equal using operand_equal_p. */
7501 iterative_hash_expr (const_tree t
, hashval_t val
)
7504 enum tree_code code
;
7505 enum tree_code_class tclass
;
7508 return iterative_hash_hashval_t (0, val
);
7510 code
= TREE_CODE (t
);
7514 /* Alas, constants aren't shared, so we can't rely on pointer
7517 return iterative_hash_hashval_t (0, val
);
7519 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
7520 val
= iterative_hash_host_wide_int (TREE_INT_CST_ELT (t
, i
), val
);
7524 unsigned int val2
= real_hash (TREE_REAL_CST_PTR (t
));
7526 return iterative_hash_hashval_t (val2
, val
);
7530 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
7532 return iterative_hash_hashval_t (val2
, val
);
7535 return iterative_hash (TREE_STRING_POINTER (t
),
7536 TREE_STRING_LENGTH (t
), val
);
7538 val
= iterative_hash_expr (TREE_REALPART (t
), val
);
7539 return iterative_hash_expr (TREE_IMAGPART (t
), val
);
7543 for (i
= 0; i
< VECTOR_CST_NELTS (t
); ++i
)
7544 val
= iterative_hash_expr (VECTOR_CST_ELT (t
, i
), val
);
7548 /* We can just compare by pointer. */
7549 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t
), val
);
7550 case PLACEHOLDER_EXPR
:
7551 /* The node itself doesn't matter. */
7554 /* A list of expressions, for a CALL_EXPR or as the elements of a
7556 for (; t
; t
= TREE_CHAIN (t
))
7557 val
= iterative_hash_expr (TREE_VALUE (t
), val
);
7561 unsigned HOST_WIDE_INT idx
;
7563 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
7565 val
= iterative_hash_expr (field
, val
);
7566 val
= iterative_hash_expr (value
, val
);
7571 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7572 Otherwise nodes that compare equal according to operand_equal_p might
7573 get different hash codes. However, don't do this for machine specific
7574 or front end builtins, since the function code is overloaded in those
7576 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
7577 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
7579 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
7580 code
= TREE_CODE (t
);
7584 tclass
= TREE_CODE_CLASS (code
);
7586 if (tclass
== tcc_declaration
)
7588 /* DECL's have a unique ID */
7589 val
= iterative_hash_host_wide_int (DECL_UID (t
), val
);
7593 gcc_assert (IS_EXPR_CODE_CLASS (tclass
));
7595 val
= iterative_hash_object (code
, val
);
7597 /* Don't hash the type, that can lead to having nodes which
7598 compare equal according to operand_equal_p, but which
7599 have different hash codes. */
7600 if (CONVERT_EXPR_CODE_P (code
)
7601 || code
== NON_LVALUE_EXPR
)
7603 /* Make sure to include signness in the hash computation. */
7604 val
+= TYPE_UNSIGNED (TREE_TYPE (t
));
7605 val
= iterative_hash_expr (TREE_OPERAND (t
, 0), val
);
7608 else if (commutative_tree_code (code
))
7610 /* It's a commutative expression. We want to hash it the same
7611 however it appears. We do this by first hashing both operands
7612 and then rehashing based on the order of their independent
7614 hashval_t one
= iterative_hash_expr (TREE_OPERAND (t
, 0), 0);
7615 hashval_t two
= iterative_hash_expr (TREE_OPERAND (t
, 1), 0);
7619 t
= one
, one
= two
, two
= t
;
7621 val
= iterative_hash_hashval_t (one
, val
);
7622 val
= iterative_hash_hashval_t (two
, val
);
7625 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
7626 val
= iterative_hash_expr (TREE_OPERAND (t
, i
), val
);
7632 /* Constructors for pointer, array and function types.
7633 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7634 constructed by language-dependent code, not here.) */
7636 /* Construct, lay out and return the type of pointers to TO_TYPE with
7637 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7638 reference all of memory. If such a type has already been
7639 constructed, reuse it. */
7642 build_pointer_type_for_mode (tree to_type
, enum machine_mode mode
,
7647 if (to_type
== error_mark_node
)
7648 return error_mark_node
;
7650 /* If the pointed-to type has the may_alias attribute set, force
7651 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7652 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7653 can_alias_all
= true;
7655 /* In some cases, languages will have things that aren't a POINTER_TYPE
7656 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7657 In that case, return that type without regard to the rest of our
7660 ??? This is a kludge, but consistent with the way this function has
7661 always operated and there doesn't seem to be a good way to avoid this
7663 if (TYPE_POINTER_TO (to_type
) != 0
7664 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7665 return TYPE_POINTER_TO (to_type
);
7667 /* First, if we already have a type for pointers to TO_TYPE and it's
7668 the proper mode, use it. */
7669 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7670 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7673 t
= make_node (POINTER_TYPE
);
7675 TREE_TYPE (t
) = to_type
;
7676 SET_TYPE_MODE (t
, mode
);
7677 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7678 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7679 TYPE_POINTER_TO (to_type
) = t
;
7681 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7682 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7683 else if (TYPE_CANONICAL (to_type
) != to_type
)
7685 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7686 mode
, can_alias_all
);
7688 /* Lay out the type. This function has many callers that are concerned
7689 with expression-construction, and this simplifies them all. */
7695 /* By default build pointers in ptr_mode. */
7698 build_pointer_type (tree to_type
)
7700 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7701 : TYPE_ADDR_SPACE (to_type
);
7702 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7703 if (upc_shared_type_p (to_type
))
7706 pointer_mode
= TYPE_MODE (upc_pts_rep_type_node
);
7707 upc_pts_type
= build_pointer_type_for_mode (to_type
, pointer_mode
,
7709 TYPE_USER_ALIGN (upc_pts_type
) = TYPE_USER_ALIGN (upc_pts_rep_type_node
);
7710 TYPE_ALIGN (upc_pts_type
) = TYPE_ALIGN (upc_pts_rep_type_node
);
7711 return upc_pts_type
;
7713 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7716 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7719 build_reference_type_for_mode (tree to_type
, enum machine_mode mode
,
7724 if (to_type
== error_mark_node
)
7725 return error_mark_node
;
7727 /* If the pointed-to type has the may_alias attribute set, force
7728 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7729 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7730 can_alias_all
= true;
7732 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7733 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7734 In that case, return that type without regard to the rest of our
7737 ??? This is a kludge, but consistent with the way this function has
7738 always operated and there doesn't seem to be a good way to avoid this
7740 if (TYPE_REFERENCE_TO (to_type
) != 0
7741 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7742 return TYPE_REFERENCE_TO (to_type
);
7744 /* First, if we already have a type for pointers to TO_TYPE and it's
7745 the proper mode, use it. */
7746 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7747 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7750 t
= make_node (REFERENCE_TYPE
);
7752 TREE_TYPE (t
) = to_type
;
7753 SET_TYPE_MODE (t
, mode
);
7754 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7755 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7756 TYPE_REFERENCE_TO (to_type
) = t
;
7758 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7759 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7760 else if (TYPE_CANONICAL (to_type
) != to_type
)
7762 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7763 mode
, can_alias_all
);
7771 /* Build the node for the type of references-to-TO_TYPE by default
7775 build_reference_type (tree to_type
)
7777 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7778 : TYPE_ADDR_SPACE (to_type
);
7779 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7780 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
7783 #define MAX_INT_CACHED_PREC \
7784 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7785 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7787 /* Builds a signed or unsigned integer type of precision PRECISION.
7788 Used for C bitfields whose precision does not match that of
7789 built-in target types. */
7791 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7797 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7799 if (precision
<= MAX_INT_CACHED_PREC
)
7801 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7806 itype
= make_node (INTEGER_TYPE
);
7807 TYPE_PRECISION (itype
) = precision
;
7810 fixup_unsigned_type (itype
);
7812 fixup_signed_type (itype
);
7815 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype
)))
7816 ret
= type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype
)), itype
);
7817 if (precision
<= MAX_INT_CACHED_PREC
)
7818 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7823 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7824 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7825 is true, reuse such a type that has already been constructed. */
7828 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7830 tree itype
= make_node (INTEGER_TYPE
);
7831 hashval_t hashcode
= 0;
7833 TREE_TYPE (itype
) = type
;
7835 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7836 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7838 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7839 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7840 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7841 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7842 TYPE_ALIGN (itype
) = TYPE_ALIGN (type
);
7843 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7848 if ((TYPE_MIN_VALUE (itype
)
7849 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7850 || (TYPE_MAX_VALUE (itype
)
7851 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7853 /* Since we cannot reliably merge this type, we need to compare it using
7854 structural equality checks. */
7855 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7859 hashcode
= iterative_hash_expr (TYPE_MIN_VALUE (itype
), hashcode
);
7860 hashcode
= iterative_hash_expr (TYPE_MAX_VALUE (itype
), hashcode
);
7861 hashcode
= iterative_hash_hashval_t (TYPE_HASH (type
), hashcode
);
7862 itype
= type_hash_canon (hashcode
, itype
);
7867 /* Wrapper around build_range_type_1 with SHARED set to true. */
7870 build_range_type (tree type
, tree lowval
, tree highval
)
7872 return build_range_type_1 (type
, lowval
, highval
, true);
7875 /* Wrapper around build_range_type_1 with SHARED set to false. */
7878 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7880 return build_range_type_1 (type
, lowval
, highval
, false);
7883 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7884 MAXVAL should be the maximum value in the domain
7885 (one less than the length of the array).
7887 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7888 We don't enforce this limit, that is up to caller (e.g. language front end).
7889 The limit exists because the result is a signed type and we don't handle
7890 sizes that use more than one HOST_WIDE_INT. */
7893 build_index_type (tree maxval
)
7895 return build_range_type (sizetype
, size_zero_node
, maxval
);
7898 /* Return true if the debug information for TYPE, a subtype, should be emitted
7899 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7900 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7901 debug info and doesn't reflect the source code. */
7904 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7906 tree base_type
= TREE_TYPE (type
), low
, high
;
7908 /* Subrange types have a base type which is an integral type. */
7909 if (!INTEGRAL_TYPE_P (base_type
))
7912 /* Get the real bounds of the subtype. */
7913 if (lang_hooks
.types
.get_subrange_bounds
)
7914 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7917 low
= TYPE_MIN_VALUE (type
);
7918 high
= TYPE_MAX_VALUE (type
);
7921 /* If the type and its base type have the same representation and the same
7922 name, then the type is not a subrange but a copy of the base type. */
7923 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7924 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7925 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7926 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7927 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7928 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7938 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7939 and number of elements specified by the range of values of INDEX_TYPE.
7940 If SHARED is true, reuse such a type that has already been constructed. */
7943 build_array_type_1 (tree elt_type
, tree index_type
, bool shared
)
7947 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7949 error ("arrays of functions are not meaningful");
7950 elt_type
= integer_type_node
;
7953 t
= make_node (ARRAY_TYPE
);
7954 TREE_TYPE (t
) = elt_type
;
7955 TYPE_DOMAIN (t
) = index_type
;
7956 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7959 /* If the element type is incomplete at this point we get marked for
7960 structural equality. Do not record these types in the canonical
7962 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
7967 hashval_t hashcode
= iterative_hash_object (TYPE_HASH (elt_type
), 0);
7969 hashcode
= iterative_hash_object (TYPE_HASH (index_type
), hashcode
);
7970 t
= type_hash_canon (hashcode
, t
);
7973 if (TYPE_CANONICAL (t
) == t
)
7975 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7976 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
)))
7977 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7978 else if (TYPE_CANONICAL (elt_type
) != elt_type
7979 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7981 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7983 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7990 /* Wrapper around build_array_type_1 with SHARED set to true. */
7993 build_array_type (tree elt_type
, tree index_type
)
7995 return build_array_type_1 (elt_type
, index_type
, true);
7998 /* Wrapper around build_array_type_1 with SHARED set to false. */
8001 build_nonshared_array_type (tree elt_type
, tree index_type
)
8003 return build_array_type_1 (elt_type
, index_type
, false);
8006 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8010 build_array_type_nelts (tree elt_type
, unsigned HOST_WIDE_INT nelts
)
8012 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
8015 /* Recursively examines the array elements of TYPE, until a non-array
8016 element type is found. */
8019 strip_array_types (tree type
)
8021 while (TREE_CODE (type
) == ARRAY_TYPE
)
8022 type
= TREE_TYPE (type
);
8027 /* Computes the canonical argument types from the argument type list
8030 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8031 on entry to this function, or if any of the ARGTYPES are
8034 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8035 true on entry to this function, or if any of the ARGTYPES are
8038 Returns a canonical argument list, which may be ARGTYPES when the
8039 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8040 true) or would not differ from ARGTYPES. */
8043 maybe_canonicalize_argtypes (tree argtypes
,
8044 bool *any_structural_p
,
8045 bool *any_noncanonical_p
)
8048 bool any_noncanonical_argtypes_p
= false;
8050 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
8052 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
8053 /* Fail gracefully by stating that the type is structural. */
8054 *any_structural_p
= true;
8055 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
8056 *any_structural_p
= true;
8057 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
8058 || TREE_PURPOSE (arg
))
8059 /* If the argument has a default argument, we consider it
8060 non-canonical even though the type itself is canonical.
8061 That way, different variants of function and method types
8062 with default arguments will all point to the variant with
8063 no defaults as their canonical type. */
8064 any_noncanonical_argtypes_p
= true;
8067 if (*any_structural_p
)
8070 if (any_noncanonical_argtypes_p
)
8072 /* Build the canonical list of argument types. */
8073 tree canon_argtypes
= NULL_TREE
;
8074 bool is_void
= false;
8076 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
8078 if (arg
== void_list_node
)
8081 canon_argtypes
= tree_cons (NULL_TREE
,
8082 TYPE_CANONICAL (TREE_VALUE (arg
)),
8086 canon_argtypes
= nreverse (canon_argtypes
);
8088 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
8090 /* There is a non-canonical type. */
8091 *any_noncanonical_p
= true;
8092 return canon_argtypes
;
8095 /* The canonical argument types are the same as ARGTYPES. */
8099 /* Construct, lay out and return
8100 the type of functions returning type VALUE_TYPE
8101 given arguments of types ARG_TYPES.
8102 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8103 are data type nodes for the arguments of the function.
8104 If such a type has already been constructed, reuse it. */
8107 build_function_type (tree value_type
, tree arg_types
)
8110 hashval_t hashcode
= 0;
8111 bool any_structural_p
, any_noncanonical_p
;
8112 tree canon_argtypes
;
8114 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
8116 error ("function return type cannot be function");
8117 value_type
= integer_type_node
;
8120 /* Make a node of the sort we want. */
8121 t
= make_node (FUNCTION_TYPE
);
8122 TREE_TYPE (t
) = value_type
;
8123 TYPE_ARG_TYPES (t
) = arg_types
;
8125 /* If we already have such a type, use the old one. */
8126 hashcode
= iterative_hash_object (TYPE_HASH (value_type
), hashcode
);
8127 hashcode
= type_hash_list (arg_types
, hashcode
);
8128 t
= type_hash_canon (hashcode
, t
);
8130 /* Set up the canonical type. */
8131 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
8132 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
8133 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
8135 &any_noncanonical_p
);
8136 if (any_structural_p
)
8137 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8138 else if (any_noncanonical_p
)
8139 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
8142 if (!COMPLETE_TYPE_P (t
))
8147 /* Build a function type. The RETURN_TYPE is the type returned by the
8148 function. If VAARGS is set, no void_type_node is appended to the
8149 the list. ARGP must be always be terminated be a NULL_TREE. */
8152 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
8156 t
= va_arg (argp
, tree
);
8157 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
8158 args
= tree_cons (NULL_TREE
, t
, args
);
8163 if (args
!= NULL_TREE
)
8164 args
= nreverse (args
);
8165 gcc_assert (last
!= void_list_node
);
8167 else if (args
== NULL_TREE
)
8168 args
= void_list_node
;
8172 args
= nreverse (args
);
8173 TREE_CHAIN (last
) = void_list_node
;
8175 args
= build_function_type (return_type
, args
);
8180 /* Build a function type. The RETURN_TYPE is the type returned by the
8181 function. If additional arguments are provided, they are
8182 additional argument types. The list of argument types must always
8183 be terminated by NULL_TREE. */
8186 build_function_type_list (tree return_type
, ...)
8191 va_start (p
, return_type
);
8192 args
= build_function_type_list_1 (false, return_type
, p
);
8197 /* Build a variable argument function type. The RETURN_TYPE is the
8198 type returned by the function. If additional arguments are provided,
8199 they are additional argument types. The list of argument types must
8200 always be terminated by NULL_TREE. */
8203 build_varargs_function_type_list (tree return_type
, ...)
8208 va_start (p
, return_type
);
8209 args
= build_function_type_list_1 (true, return_type
, p
);
8215 /* Build a function type. RETURN_TYPE is the type returned by the
8216 function; VAARGS indicates whether the function takes varargs. The
8217 function takes N named arguments, the types of which are provided in
8221 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
8225 tree t
= vaargs
? NULL_TREE
: void_list_node
;
8227 for (i
= n
- 1; i
>= 0; i
--)
8228 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
8230 return build_function_type (return_type
, t
);
8233 /* Build a function type. RETURN_TYPE is the type returned by the
8234 function. The function takes N named arguments, the types of which
8235 are provided in ARG_TYPES. */
8238 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8240 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
8243 /* Build a variable argument function type. RETURN_TYPE is the type
8244 returned by the function. The function takes N named arguments, the
8245 types of which are provided in ARG_TYPES. */
8248 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8250 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
8253 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8254 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8255 for the method. An implicit additional parameter (of type
8256 pointer-to-BASETYPE) is added to the ARGTYPES. */
8259 build_method_type_directly (tree basetype
,
8266 bool any_structural_p
, any_noncanonical_p
;
8267 tree canon_argtypes
;
8269 /* Make a node of the sort we want. */
8270 t
= make_node (METHOD_TYPE
);
8272 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8273 TREE_TYPE (t
) = rettype
;
8274 ptype
= build_pointer_type (basetype
);
8276 /* The actual arglist for this function includes a "hidden" argument
8277 which is "this". Put it into the list of argument types. */
8278 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
8279 TYPE_ARG_TYPES (t
) = argtypes
;
8281 /* If we already have such a type, use the old one. */
8282 hashcode
= iterative_hash_object (TYPE_HASH (basetype
), hashcode
);
8283 hashcode
= iterative_hash_object (TYPE_HASH (rettype
), hashcode
);
8284 hashcode
= type_hash_list (argtypes
, hashcode
);
8285 t
= type_hash_canon (hashcode
, t
);
8287 /* Set up the canonical type. */
8289 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8290 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
8292 = (TYPE_CANONICAL (basetype
) != basetype
8293 || TYPE_CANONICAL (rettype
) != rettype
);
8294 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
8296 &any_noncanonical_p
);
8297 if (any_structural_p
)
8298 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8299 else if (any_noncanonical_p
)
8301 = build_method_type_directly (TYPE_CANONICAL (basetype
),
8302 TYPE_CANONICAL (rettype
),
8304 if (!COMPLETE_TYPE_P (t
))
8310 /* Construct, lay out and return the type of methods belonging to class
8311 BASETYPE and whose arguments and values are described by TYPE.
8312 If that type exists already, reuse it.
8313 TYPE must be a FUNCTION_TYPE node. */
8316 build_method_type (tree basetype
, tree type
)
8318 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
8320 return build_method_type_directly (basetype
,
8322 TYPE_ARG_TYPES (type
));
8325 /* Construct, lay out and return the type of offsets to a value
8326 of type TYPE, within an object of type BASETYPE.
8327 If a suitable offset type exists already, reuse it. */
8330 build_offset_type (tree basetype
, tree type
)
8333 hashval_t hashcode
= 0;
8335 /* Make a node of the sort we want. */
8336 t
= make_node (OFFSET_TYPE
);
8338 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8339 TREE_TYPE (t
) = type
;
8341 /* If we already have such a type, use the old one. */
8342 hashcode
= iterative_hash_object (TYPE_HASH (basetype
), hashcode
);
8343 hashcode
= iterative_hash_object (TYPE_HASH (type
), hashcode
);
8344 t
= type_hash_canon (hashcode
, t
);
8346 if (!COMPLETE_TYPE_P (t
))
8349 if (TYPE_CANONICAL (t
) == t
)
8351 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8352 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8353 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8354 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8355 || TYPE_CANONICAL (type
) != type
)
8357 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8358 TYPE_CANONICAL (type
));
8364 /* Create a complex type whose components are COMPONENT_TYPE. */
8367 build_complex_type (tree component_type
)
8372 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8373 || SCALAR_FLOAT_TYPE_P (component_type
)
8374 || FIXED_POINT_TYPE_P (component_type
));
8376 /* Make a node of the sort we want. */
8377 t
= make_node (COMPLEX_TYPE
);
8379 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (component_type
);
8381 /* If we already have such a type, use the old one. */
8382 hashcode
= iterative_hash_object (TYPE_HASH (component_type
), 0);
8383 t
= type_hash_canon (hashcode
, t
);
8385 if (!COMPLETE_TYPE_P (t
))
8388 if (TYPE_CANONICAL (t
) == t
)
8390 if (TYPE_STRUCTURAL_EQUALITY_P (component_type
))
8391 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8392 else if (TYPE_CANONICAL (component_type
) != component_type
)
8394 = build_complex_type (TYPE_CANONICAL (component_type
));
8397 /* We need to create a name, since complex is a fundamental type. */
8398 if (! TYPE_NAME (t
))
8401 if (component_type
== char_type_node
)
8402 name
= "complex char";
8403 else if (component_type
== signed_char_type_node
)
8404 name
= "complex signed char";
8405 else if (component_type
== unsigned_char_type_node
)
8406 name
= "complex unsigned char";
8407 else if (component_type
== short_integer_type_node
)
8408 name
= "complex short int";
8409 else if (component_type
== short_unsigned_type_node
)
8410 name
= "complex short unsigned int";
8411 else if (component_type
== integer_type_node
)
8412 name
= "complex int";
8413 else if (component_type
== unsigned_type_node
)
8414 name
= "complex unsigned int";
8415 else if (component_type
== long_integer_type_node
)
8416 name
= "complex long int";
8417 else if (component_type
== long_unsigned_type_node
)
8418 name
= "complex long unsigned int";
8419 else if (component_type
== long_long_integer_type_node
)
8420 name
= "complex long long int";
8421 else if (component_type
== long_long_unsigned_type_node
)
8422 name
= "complex long long unsigned int";
8427 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8428 get_identifier (name
), t
);
8431 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8434 /* If TYPE is a real or complex floating-point type and the target
8435 does not directly support arithmetic on TYPE then return the wider
8436 type to be used for arithmetic on TYPE. Otherwise, return
8440 excess_precision_type (tree type
)
8442 if (flag_excess_precision
!= EXCESS_PRECISION_FAST
)
8444 int flt_eval_method
= TARGET_FLT_EVAL_METHOD
;
8445 switch (TREE_CODE (type
))
8448 switch (flt_eval_method
)
8451 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
8452 return double_type_node
;
8455 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
)
8456 || TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
8457 return long_double_type_node
;
8464 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8466 switch (flt_eval_method
)
8469 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
))
8470 return complex_double_type_node
;
8473 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
)
8474 || (TYPE_MODE (TREE_TYPE (type
))
8475 == TYPE_MODE (double_type_node
)))
8476 return complex_long_double_type_node
;
8489 /* Return OP, stripped of any conversions to wider types as much as is safe.
8490 Converting the value back to OP's type makes a value equivalent to OP.
8492 If FOR_TYPE is nonzero, we return a value which, if converted to
8493 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8495 OP must have integer, real or enumeral type. Pointers are not allowed!
8497 There are some cases where the obvious value we could return
8498 would regenerate to OP if converted to OP's type,
8499 but would not extend like OP to wider types.
8500 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8501 For example, if OP is (unsigned short)(signed char)-1,
8502 we avoid returning (signed char)-1 if FOR_TYPE is int,
8503 even though extending that to an unsigned short would regenerate OP,
8504 since the result of extending (signed char)-1 to (int)
8505 is different from (int) OP. */
8508 get_unwidened (tree op
, tree for_type
)
8510 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8511 tree type
= TREE_TYPE (op
);
8513 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8515 = (for_type
!= 0 && for_type
!= type
8516 && final_prec
> TYPE_PRECISION (type
)
8517 && TYPE_UNSIGNED (type
));
8520 while (CONVERT_EXPR_P (op
))
8524 /* TYPE_PRECISION on vector types has different meaning
8525 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8526 so avoid them here. */
8527 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8530 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8531 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8533 /* Truncations are many-one so cannot be removed.
8534 Unless we are later going to truncate down even farther. */
8536 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8539 /* See what's inside this conversion. If we decide to strip it,
8541 op
= TREE_OPERAND (op
, 0);
8543 /* If we have not stripped any zero-extensions (uns is 0),
8544 we can strip any kind of extension.
8545 If we have previously stripped a zero-extension,
8546 only zero-extensions can safely be stripped.
8547 Any extension can be stripped if the bits it would produce
8548 are all going to be discarded later by truncating to FOR_TYPE. */
8552 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8554 /* TYPE_UNSIGNED says whether this is a zero-extension.
8555 Let's avoid computing it if it does not affect WIN
8556 and if UNS will not be needed again. */
8558 || CONVERT_EXPR_P (op
))
8559 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8567 /* If we finally reach a constant see if it fits in for_type and
8568 in that case convert it. */
8570 && TREE_CODE (win
) == INTEGER_CST
8571 && TREE_TYPE (win
) != for_type
8572 && int_fits_type_p (win
, for_type
))
8573 win
= fold_convert (for_type
, win
);
8578 /* Return OP or a simpler expression for a narrower value
8579 which can be sign-extended or zero-extended to give back OP.
8580 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8581 or 0 if the value should be sign-extended. */
8584 get_narrower (tree op
, int *unsignedp_ptr
)
8589 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8591 while (TREE_CODE (op
) == NOP_EXPR
)
8594 = (TYPE_PRECISION (TREE_TYPE (op
))
8595 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8597 /* Truncations are many-one so cannot be removed. */
8601 /* See what's inside this conversion. If we decide to strip it,
8606 op
= TREE_OPERAND (op
, 0);
8607 /* An extension: the outermost one can be stripped,
8608 but remember whether it is zero or sign extension. */
8610 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8611 /* Otherwise, if a sign extension has been stripped,
8612 only sign extensions can now be stripped;
8613 if a zero extension has been stripped, only zero-extensions. */
8614 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8618 else /* bitschange == 0 */
8620 /* A change in nominal type can always be stripped, but we must
8621 preserve the unsignedness. */
8623 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8625 op
= TREE_OPERAND (op
, 0);
8626 /* Keep trying to narrow, but don't assign op to win if it
8627 would turn an integral type into something else. */
8628 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8635 if (TREE_CODE (op
) == COMPONENT_REF
8636 /* Since type_for_size always gives an integer type. */
8637 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8638 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8639 /* Ensure field is laid out already. */
8640 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8641 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8643 unsigned HOST_WIDE_INT innerprec
8644 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8645 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8646 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8647 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8649 /* We can get this structure field in a narrower type that fits it,
8650 but the resulting extension to its nominal type (a fullword type)
8651 must satisfy the same conditions as for other extensions.
8653 Do this only for fields that are aligned (not bit-fields),
8654 because when bit-field insns will be used there is no
8655 advantage in doing this. */
8657 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8658 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8659 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8663 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8664 win
= fold_convert (type
, op
);
8668 *unsignedp_ptr
= uns
;
8672 /* Returns true if integer constant C has a value that is permissible
8673 for type TYPE (an INTEGER_TYPE). */
8676 int_fits_type_p (const_tree c
, const_tree type
)
8678 tree type_low_bound
, type_high_bound
;
8679 bool ok_for_low_bound
, ok_for_high_bound
;
8680 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8683 type_low_bound
= TYPE_MIN_VALUE (type
);
8684 type_high_bound
= TYPE_MAX_VALUE (type
);
8686 /* If at least one bound of the type is a constant integer, we can check
8687 ourselves and maybe make a decision. If no such decision is possible, but
8688 this type is a subtype, try checking against that. Otherwise, use
8689 fits_to_tree_p, which checks against the precision.
8691 Compute the status for each possibly constant bound, and return if we see
8692 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8693 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8694 for "constant known to fit". */
8696 /* Check if c >= type_low_bound. */
8697 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8699 if (tree_int_cst_lt (c
, type_low_bound
))
8701 ok_for_low_bound
= true;
8704 ok_for_low_bound
= false;
8706 /* Check if c <= type_high_bound. */
8707 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8709 if (tree_int_cst_lt (type_high_bound
, c
))
8711 ok_for_high_bound
= true;
8714 ok_for_high_bound
= false;
8716 /* If the constant fits both bounds, the result is known. */
8717 if (ok_for_low_bound
&& ok_for_high_bound
)
8720 /* Perform some generic filtering which may allow making a decision
8721 even if the bounds are not constant. First, negative integers
8722 never fit in unsigned types, */
8723 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (c
))
8726 /* Second, narrower types always fit in wider ones. */
8727 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8730 /* Third, unsigned integers with top bit set never fit signed types. */
8731 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8733 int prec
= GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c
))) - 1;
8734 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8736 /* When a tree_cst is converted to a wide-int, the precision
8737 is taken from the type. However, if the precision of the
8738 mode underneath the type is smaller than that, it is
8739 possible that the value will not fit. The test below
8740 fails if any bit is set between the sign bit of the
8741 underlying mode and the top bit of the type. */
8742 if (wi::ne_p (wi::zext (c
, prec
- 1), c
))
8745 else if (wi::neg_p (c
))
8749 /* If we haven't been able to decide at this point, there nothing more we
8750 can check ourselves here. Look at the base type if we have one and it
8751 has the same precision. */
8752 if (TREE_CODE (type
) == INTEGER_TYPE
8753 && TREE_TYPE (type
) != 0
8754 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8756 type
= TREE_TYPE (type
);
8760 /* Or to fits_to_tree_p, if nothing else. */
8761 return wi::fits_to_tree_p (c
, type
);
8764 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8765 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8766 represented (assuming two's-complement arithmetic) within the bit
8767 precision of the type are returned instead. */
8770 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8772 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8773 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8774 wi::to_mpz (TYPE_MIN_VALUE (type
), min
, TYPE_SIGN (type
));
8777 if (TYPE_UNSIGNED (type
))
8778 mpz_set_ui (min
, 0);
8781 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8782 wi::to_mpz (mn
, min
, SIGNED
);
8786 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8787 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8788 wi::to_mpz (TYPE_MAX_VALUE (type
), max
, TYPE_SIGN (type
));
8791 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8792 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8796 /* Return true if VAR is an automatic variable defined in function FN. */
8799 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8801 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8802 && ((((TREE_CODE (var
) == VAR_DECL
&& ! DECL_EXTERNAL (var
))
8803 || TREE_CODE (var
) == PARM_DECL
)
8804 && ! TREE_STATIC (var
))
8805 || TREE_CODE (var
) == LABEL_DECL
8806 || TREE_CODE (var
) == RESULT_DECL
));
8809 /* Subprogram of following function. Called by walk_tree.
8811 Return *TP if it is an automatic variable or parameter of the
8812 function passed in as DATA. */
8815 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8817 tree fn
= (tree
) data
;
8822 else if (DECL_P (*tp
)
8823 && auto_var_in_fn_p (*tp
, fn
))
8829 /* Returns true if T is, contains, or refers to a type with variable
8830 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8831 arguments, but not the return type. If FN is nonzero, only return
8832 true if a modifier of the type or position of FN is a variable or
8833 parameter inside FN.
8835 This concept is more general than that of C99 'variably modified types':
8836 in C99, a struct type is never variably modified because a VLA may not
8837 appear as a structure member. However, in GNU C code like:
8839 struct S { int i[f()]; };
8841 is valid, and other languages may define similar constructs. */
8844 variably_modified_type_p (tree type
, tree fn
)
8848 /* Test if T is either variable (if FN is zero) or an expression containing
8849 a variable in FN. If TYPE isn't gimplified, return true also if
8850 gimplify_one_sizepos would gimplify the expression into a local
8852 #define RETURN_TRUE_IF_VAR(T) \
8853 do { tree _t = (T); \
8854 if (_t != NULL_TREE \
8855 && _t != error_mark_node \
8856 && TREE_CODE (_t) != INTEGER_CST \
8857 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8859 || (!TYPE_SIZES_GIMPLIFIED (type) \
8860 && !is_gimple_sizepos (_t)) \
8861 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8862 return true; } while (0)
8864 if (type
== error_mark_node
)
8867 /* If TYPE itself has variable size, it is variably modified. */
8868 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8869 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8871 switch (TREE_CODE (type
))
8874 case REFERENCE_TYPE
:
8876 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8882 /* If TYPE is a function type, it is variably modified if the
8883 return type is variably modified. */
8884 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8890 case FIXED_POINT_TYPE
:
8893 /* Scalar types are variably modified if their end points
8895 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8896 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8901 case QUAL_UNION_TYPE
:
8902 /* We can't see if any of the fields are variably-modified by the
8903 definition we normally use, since that would produce infinite
8904 recursion via pointers. */
8905 /* This is variably modified if some field's type is. */
8906 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8907 if (TREE_CODE (t
) == FIELD_DECL
)
8909 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8910 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8911 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8913 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8914 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8919 /* Do not call ourselves to avoid infinite recursion. This is
8920 variably modified if the element type is. */
8921 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8922 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8929 /* The current language may have other cases to check, but in general,
8930 all other types are not variably modified. */
8931 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8933 #undef RETURN_TRUE_IF_VAR
8936 /* Given a DECL or TYPE, return the scope in which it was declared, or
8937 NULL_TREE if there is no containing scope. */
8940 get_containing_scope (const_tree t
)
8942 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8945 /* Return the innermost context enclosing DECL that is
8946 a FUNCTION_DECL, or zero if none. */
8949 decl_function_context (const_tree decl
)
8953 if (TREE_CODE (decl
) == ERROR_MARK
)
8956 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8957 where we look up the function at runtime. Such functions always take
8958 a first argument of type 'pointer to real context'.
8960 C++ should really be fixed to use DECL_CONTEXT for the real context,
8961 and use something else for the "virtual context". */
8962 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VINDEX (decl
))
8965 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8967 context
= DECL_CONTEXT (decl
);
8969 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8971 if (TREE_CODE (context
) == BLOCK
)
8972 context
= BLOCK_SUPERCONTEXT (context
);
8974 context
= get_containing_scope (context
);
8980 /* Return the innermost context enclosing DECL that is
8981 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8982 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8985 decl_type_context (const_tree decl
)
8987 tree context
= DECL_CONTEXT (decl
);
8990 switch (TREE_CODE (context
))
8992 case NAMESPACE_DECL
:
8993 case TRANSLATION_UNIT_DECL
:
8998 case QUAL_UNION_TYPE
:
9003 context
= DECL_CONTEXT (context
);
9007 context
= BLOCK_SUPERCONTEXT (context
);
9017 /* CALL is a CALL_EXPR. Return the declaration for the function
9018 called, or NULL_TREE if the called function cannot be
9022 get_callee_fndecl (const_tree call
)
9026 if (call
== error_mark_node
)
9027 return error_mark_node
;
9029 /* It's invalid to call this function with anything but a
9031 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
9033 /* The first operand to the CALL is the address of the function
9035 addr
= CALL_EXPR_FN (call
);
9037 /* If there is no function, return early. */
9038 if (addr
== NULL_TREE
)
9043 /* If this is a readonly function pointer, extract its initial value. */
9044 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
9045 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
9046 && DECL_INITIAL (addr
))
9047 addr
= DECL_INITIAL (addr
);
9049 /* If the address is just `&f' for some function `f', then we know
9050 that `f' is being called. */
9051 if (TREE_CODE (addr
) == ADDR_EXPR
9052 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
9053 return TREE_OPERAND (addr
, 0);
9055 /* We couldn't figure out what was being called. */
9059 /* Print debugging information about tree nodes generated during the compile,
9060 and any language-specific information. */
9063 dump_tree_statistics (void)
9065 if (GATHER_STATISTICS
)
9068 int total_nodes
, total_bytes
;
9069 fprintf (stderr
, "Kind Nodes Bytes\n");
9070 fprintf (stderr
, "---------------------------------------\n");
9071 total_nodes
= total_bytes
= 0;
9072 for (i
= 0; i
< (int) all_kinds
; i
++)
9074 fprintf (stderr
, "%-20s %7d %10d\n", tree_node_kind_names
[i
],
9075 tree_node_counts
[i
], tree_node_sizes
[i
]);
9076 total_nodes
+= tree_node_counts
[i
];
9077 total_bytes
+= tree_node_sizes
[i
];
9079 fprintf (stderr
, "---------------------------------------\n");
9080 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_nodes
, total_bytes
);
9081 fprintf (stderr
, "---------------------------------------\n");
9082 fprintf (stderr
, "Code Nodes\n");
9083 fprintf (stderr
, "----------------------------\n");
9084 for (i
= 0; i
< (int) MAX_TREE_CODES
; i
++)
9085 fprintf (stderr
, "%-20s %7d\n", get_tree_code_name ((enum tree_code
) i
),
9086 tree_code_counts
[i
]);
9087 fprintf (stderr
, "----------------------------\n");
9088 ssanames_print_statistics ();
9089 phinodes_print_statistics ();
9092 fprintf (stderr
, "(No per-node statistics)\n");
9094 print_type_hash_statistics ();
9095 print_debug_expr_statistics ();
9096 print_value_expr_statistics ();
9097 lang_hooks
.print_statistics ();
9100 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9102 /* Generate a crc32 of a byte. */
9105 crc32_unsigned_bits (unsigned chksum
, unsigned value
, unsigned bits
)
9109 for (ix
= bits
; ix
--; value
<<= 1)
9113 feedback
= (value
^ chksum
) & 0x80000000 ? 0x04c11db7 : 0;
9120 /* Generate a crc32 of a 32-bit unsigned. */
9123 crc32_unsigned (unsigned chksum
, unsigned value
)
9125 return crc32_unsigned_bits (chksum
, value
, 32);
9128 /* Generate a crc32 of a byte. */
9131 crc32_byte (unsigned chksum
, char byte
)
9133 return crc32_unsigned_bits (chksum
, (unsigned) byte
<< 24, 8);
9136 /* Generate a crc32 of a string. */
9139 crc32_string (unsigned chksum
, const char *string
)
9143 chksum
= crc32_byte (chksum
, *string
);
9149 /* P is a string that will be used in a symbol. Mask out any characters
9150 that are not valid in that context. */
9153 clean_symbol_name (char *p
)
9157 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9160 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9167 /* Generate a name for a special-purpose function.
9168 The generated name may need to be unique across the whole link.
9169 Changes to this function may also require corresponding changes to
9170 xstrdup_mask_random.
9171 TYPE is some string to identify the purpose of this function to the
9172 linker or collect2; it must start with an uppercase letter,
9174 I - for constructors
9176 N - for C++ anonymous namespaces
9177 F - for DWARF unwind frame information. */
9180 get_file_function_name (const char *type
)
9186 /* If we already have a name we know to be unique, just use that. */
9187 if (first_global_object_name
)
9188 p
= q
= ASTRDUP (first_global_object_name
);
9189 /* If the target is handling the constructors/destructors, they
9190 will be local to this file and the name is only necessary for
9192 We also assign sub_I and sub_D sufixes to constructors called from
9193 the global static constructors. These are always local. */
9194 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9195 || (strncmp (type
, "sub_", 4) == 0
9196 && (type
[4] == 'I' || type
[4] == 'D')))
9198 const char *file
= main_input_filename
;
9200 file
= LOCATION_FILE (input_location
);
9201 /* Just use the file's basename, because the full pathname
9202 might be quite long. */
9203 p
= q
= ASTRDUP (lbasename (file
));
9207 /* Otherwise, the name must be unique across the entire link.
9208 We don't have anything that we know to be unique to this translation
9209 unit, so use what we do have and throw in some randomness. */
9211 const char *name
= weak_global_object_name
;
9212 const char *file
= main_input_filename
;
9217 file
= LOCATION_FILE (input_location
);
9219 len
= strlen (file
);
9220 q
= (char *) alloca (9 + 17 + len
+ 1);
9221 memcpy (q
, file
, len
+ 1);
9223 snprintf (q
+ len
, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9224 crc32_string (0, name
), get_random_seed (false));
9229 clean_symbol_name (q
);
9230 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9233 /* Set up the name of the file-level functions we may need.
9234 Use a global object (which is already required to be unique over
9235 the program) rather than the file name (which imposes extra
9237 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9239 return get_identifier (buf
);
9242 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9244 /* Complain that the tree code of NODE does not match the expected 0
9245 terminated list of trailing codes. The trailing code list can be
9246 empty, for a more vague error message. FILE, LINE, and FUNCTION
9247 are of the caller. */
9250 tree_check_failed (const_tree node
, const char *file
,
9251 int line
, const char *function
, ...)
9255 unsigned length
= 0;
9256 enum tree_code code
;
9258 va_start (args
, function
);
9259 while ((code
= (enum tree_code
) va_arg (args
, int)))
9260 length
+= 4 + strlen (get_tree_code_name (code
));
9265 va_start (args
, function
);
9266 length
+= strlen ("expected ");
9267 buffer
= tmp
= (char *) alloca (length
);
9269 while ((code
= (enum tree_code
) va_arg (args
, int)))
9271 const char *prefix
= length
? " or " : "expected ";
9273 strcpy (tmp
+ length
, prefix
);
9274 length
+= strlen (prefix
);
9275 strcpy (tmp
+ length
, get_tree_code_name (code
));
9276 length
+= strlen (get_tree_code_name (code
));
9281 buffer
= "unexpected node";
9283 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9284 buffer
, get_tree_code_name (TREE_CODE (node
)),
9285 function
, trim_filename (file
), line
);
9288 /* Complain that the tree code of NODE does match the expected 0
9289 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9293 tree_not_check_failed (const_tree node
, const char *file
,
9294 int line
, const char *function
, ...)
9298 unsigned length
= 0;
9299 enum tree_code code
;
9301 va_start (args
, function
);
9302 while ((code
= (enum tree_code
) va_arg (args
, int)))
9303 length
+= 4 + strlen (get_tree_code_name (code
));
9305 va_start (args
, function
);
9306 buffer
= (char *) alloca (length
);
9308 while ((code
= (enum tree_code
) va_arg (args
, int)))
9312 strcpy (buffer
+ length
, " or ");
9315 strcpy (buffer
+ length
, get_tree_code_name (code
));
9316 length
+= strlen (get_tree_code_name (code
));
9320 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9321 buffer
, get_tree_code_name (TREE_CODE (node
)),
9322 function
, trim_filename (file
), line
);
9325 /* Similar to tree_check_failed, except that we check for a class of tree
9326 code, given in CL. */
9329 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9330 const char *file
, int line
, const char *function
)
9333 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9334 TREE_CODE_CLASS_STRING (cl
),
9335 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9336 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9339 /* Similar to tree_check_failed, except that instead of specifying a
9340 dozen codes, use the knowledge that they're all sequential. */
9343 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9344 const char *function
, enum tree_code c1
,
9348 unsigned length
= 0;
9351 for (c
= c1
; c
<= c2
; ++c
)
9352 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9354 length
+= strlen ("expected ");
9355 buffer
= (char *) alloca (length
);
9358 for (c
= c1
; c
<= c2
; ++c
)
9360 const char *prefix
= length
? " or " : "expected ";
9362 strcpy (buffer
+ length
, prefix
);
9363 length
+= strlen (prefix
);
9364 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9365 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9368 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9369 buffer
, get_tree_code_name (TREE_CODE (node
)),
9370 function
, trim_filename (file
), line
);
9374 /* Similar to tree_check_failed, except that we check that a tree does
9375 not have the specified code, given in CL. */
9378 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9379 const char *file
, int line
, const char *function
)
9382 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9383 TREE_CODE_CLASS_STRING (cl
),
9384 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9385 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9389 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9392 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9393 const char *function
, enum omp_clause_code code
)
9395 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9396 omp_clause_code_name
[code
], get_tree_code_name (TREE_CODE (node
)),
9397 function
, trim_filename (file
), line
);
9401 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9404 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9405 const char *function
, enum omp_clause_code c1
,
9406 enum omp_clause_code c2
)
9409 unsigned length
= 0;
9412 for (c
= c1
; c
<= c2
; ++c
)
9413 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9415 length
+= strlen ("expected ");
9416 buffer
= (char *) alloca (length
);
9419 for (c
= c1
; c
<= c2
; ++c
)
9421 const char *prefix
= length
? " or " : "expected ";
9423 strcpy (buffer
+ length
, prefix
);
9424 length
+= strlen (prefix
);
9425 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9426 length
+= strlen (omp_clause_code_name
[c
]);
9429 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9430 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9431 function
, trim_filename (file
), line
);
9435 #undef DEFTREESTRUCT
9436 #define DEFTREESTRUCT(VAL, NAME) NAME,
9438 static const char *ts_enum_names
[] = {
9439 #include "treestruct.def"
9441 #undef DEFTREESTRUCT
9443 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9445 /* Similar to tree_class_check_failed, except that we check for
9446 whether CODE contains the tree structure identified by EN. */
9449 tree_contains_struct_check_failed (const_tree node
,
9450 const enum tree_node_structure_enum en
,
9451 const char *file
, int line
,
9452 const char *function
)
9455 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9457 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9461 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9462 (dynamically sized) vector. */
9465 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9466 const char *function
)
9469 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9470 idx
+ 1, len
, function
, trim_filename (file
), line
);
9473 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9474 (dynamically sized) vector. */
9477 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9478 const char *function
)
9481 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9482 idx
+ 1, len
, function
, trim_filename (file
), line
);
9485 /* Similar to above, except that the check is for the bounds of the operand
9486 vector of an expression node EXP. */
9489 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9490 int line
, const char *function
)
9492 enum tree_code code
= TREE_CODE (exp
);
9494 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9495 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9496 function
, trim_filename (file
), line
);
9499 /* Similar to above, except that the check is for the number of
9500 operands of an OMP_CLAUSE node. */
9503 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9504 int line
, const char *function
)
9507 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9508 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9509 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9510 trim_filename (file
), line
);
9512 #endif /* ENABLE_TREE_CHECKING */
9514 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9515 and mapped to the machine mode MODE. Initialize its fields and build
9516 the information necessary for debugging output. */
9519 make_vector_type (tree innertype
, int nunits
, enum machine_mode mode
)
9522 hashval_t hashcode
= 0;
9524 t
= make_node (VECTOR_TYPE
);
9525 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (innertype
);
9526 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9527 SET_TYPE_MODE (t
, mode
);
9529 if (TYPE_STRUCTURAL_EQUALITY_P (innertype
))
9530 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9531 else if (TYPE_CANONICAL (innertype
) != innertype
9532 || mode
!= VOIDmode
)
9534 = make_vector_type (TYPE_CANONICAL (innertype
), nunits
, VOIDmode
);
9538 hashcode
= iterative_hash_host_wide_int (VECTOR_TYPE
, hashcode
);
9539 hashcode
= iterative_hash_host_wide_int (nunits
, hashcode
);
9540 hashcode
= iterative_hash_host_wide_int (mode
, hashcode
);
9541 hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (t
)), hashcode
);
9542 t
= type_hash_canon (hashcode
, t
);
9544 /* We have built a main variant, based on the main variant of the
9545 inner type. Use it to build the variant we return. */
9546 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9547 && TREE_TYPE (t
) != innertype
)
9548 return build_type_attribute_qual_variant (t
,
9549 TYPE_ATTRIBUTES (innertype
),
9550 TYPE_QUALS (innertype
));
9556 make_or_reuse_type (unsigned size
, int unsignedp
)
9558 if (size
== INT_TYPE_SIZE
)
9559 return unsignedp
? unsigned_type_node
: integer_type_node
;
9560 if (size
== CHAR_TYPE_SIZE
)
9561 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9562 if (size
== SHORT_TYPE_SIZE
)
9563 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9564 if (size
== LONG_TYPE_SIZE
)
9565 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9566 if (size
== LONG_LONG_TYPE_SIZE
)
9567 return (unsignedp
? long_long_unsigned_type_node
9568 : long_long_integer_type_node
);
9569 if (size
== 128 && int128_integer_type_node
)
9570 return (unsignedp
? int128_unsigned_type_node
9571 : int128_integer_type_node
);
9574 return make_unsigned_type (size
);
9576 return make_signed_type (size
);
9579 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9582 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9586 if (size
== SHORT_FRACT_TYPE_SIZE
)
9587 return unsignedp
? sat_unsigned_short_fract_type_node
9588 : sat_short_fract_type_node
;
9589 if (size
== FRACT_TYPE_SIZE
)
9590 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9591 if (size
== LONG_FRACT_TYPE_SIZE
)
9592 return unsignedp
? sat_unsigned_long_fract_type_node
9593 : sat_long_fract_type_node
;
9594 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9595 return unsignedp
? sat_unsigned_long_long_fract_type_node
9596 : sat_long_long_fract_type_node
;
9600 if (size
== SHORT_FRACT_TYPE_SIZE
)
9601 return unsignedp
? unsigned_short_fract_type_node
9602 : short_fract_type_node
;
9603 if (size
== FRACT_TYPE_SIZE
)
9604 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9605 if (size
== LONG_FRACT_TYPE_SIZE
)
9606 return unsignedp
? unsigned_long_fract_type_node
9607 : long_fract_type_node
;
9608 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9609 return unsignedp
? unsigned_long_long_fract_type_node
9610 : long_long_fract_type_node
;
9613 return make_fract_type (size
, unsignedp
, satp
);
9616 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9619 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9623 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9624 return unsignedp
? sat_unsigned_short_accum_type_node
9625 : sat_short_accum_type_node
;
9626 if (size
== ACCUM_TYPE_SIZE
)
9627 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9628 if (size
== LONG_ACCUM_TYPE_SIZE
)
9629 return unsignedp
? sat_unsigned_long_accum_type_node
9630 : sat_long_accum_type_node
;
9631 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9632 return unsignedp
? sat_unsigned_long_long_accum_type_node
9633 : sat_long_long_accum_type_node
;
9637 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9638 return unsignedp
? unsigned_short_accum_type_node
9639 : short_accum_type_node
;
9640 if (size
== ACCUM_TYPE_SIZE
)
9641 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9642 if (size
== LONG_ACCUM_TYPE_SIZE
)
9643 return unsignedp
? unsigned_long_accum_type_node
9644 : long_accum_type_node
;
9645 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9646 return unsignedp
? unsigned_long_long_accum_type_node
9647 : long_long_accum_type_node
;
9650 return make_accum_type (size
, unsignedp
, satp
);
9654 /* Create an atomic variant node for TYPE. This routine is called
9655 during initialization of data types to create the 5 basic atomic
9656 types. The generic build_variant_type function requires these to
9657 already be set up in order to function properly, so cannot be
9658 called from there. If ALIGN is non-zero, then ensure alignment is
9659 overridden to this value. */
9662 build_atomic_base (tree type
, unsigned int align
)
9666 /* Make sure its not already registered. */
9667 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9670 t
= build_variant_type_copy (type
);
9671 set_type_quals (t
, TYPE_QUAL_ATOMIC
, NULL_TREE
);
9674 TYPE_ALIGN (t
) = align
;
9679 /* Create nodes for all integer types (and error_mark_node) using the sizes
9680 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9681 SHORT_DOUBLE specifies whether double should be of the same precision
9685 build_common_tree_nodes (bool signed_char
, bool short_double
)
9687 error_mark_node
= make_node (ERROR_MARK
);
9688 TREE_TYPE (error_mark_node
) = error_mark_node
;
9690 initialize_sizetypes ();
9692 /* Define both `signed char' and `unsigned char'. */
9693 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9694 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9695 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9696 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9698 /* Define `char', which is like either `signed char' or `unsigned char'
9699 but not the same as either. */
9702 ? make_signed_type (CHAR_TYPE_SIZE
)
9703 : make_unsigned_type (CHAR_TYPE_SIZE
));
9704 TYPE_STRING_FLAG (char_type_node
) = 1;
9706 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9707 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9708 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9709 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9710 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9711 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9712 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9713 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9714 #if HOST_BITS_PER_WIDE_INT >= 64
9715 /* TODO: This isn't correct, but as logic depends at the moment on
9716 host's instead of target's wide-integer.
9717 If there is a target not supporting TImode, but has an 128-bit
9718 integer-scalar register, this target check needs to be adjusted. */
9719 if (targetm
.scalar_mode_supported_p (TImode
))
9721 int128_integer_type_node
= make_signed_type (128);
9722 int128_unsigned_type_node
= make_unsigned_type (128);
9726 /* Define a boolean type. This type only represents boolean values but
9727 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9728 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9729 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9730 TYPE_PRECISION (boolean_type_node
) = 1;
9731 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9733 /* Define what type to use for size_t. */
9734 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9735 size_type_node
= unsigned_type_node
;
9736 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9737 size_type_node
= long_unsigned_type_node
;
9738 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9739 size_type_node
= long_long_unsigned_type_node
;
9740 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9741 size_type_node
= short_unsigned_type_node
;
9745 /* Fill in the rest of the sized types. Reuse existing type nodes
9747 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9748 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9749 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9750 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9751 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9753 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9754 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9755 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9756 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9757 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9759 /* Don't call build_qualified type for atomics. That routine does
9760 special processing for atomics, and until they are initialized
9761 it's better not to make that call.
9763 Check to see if there is a target override for atomic types. */
9765 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9766 targetm
.atomic_align_for_mode (QImode
));
9767 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9768 targetm
.atomic_align_for_mode (HImode
));
9769 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9770 targetm
.atomic_align_for_mode (SImode
));
9771 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9772 targetm
.atomic_align_for_mode (DImode
));
9773 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9774 targetm
.atomic_align_for_mode (TImode
));
9776 access_public_node
= get_identifier ("public");
9777 access_protected_node
= get_identifier ("protected");
9778 access_private_node
= get_identifier ("private");
9780 /* Define these next since types below may used them. */
9781 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9782 integer_one_node
= build_int_cst (integer_type_node
, 1);
9783 integer_three_node
= build_int_cst (integer_type_node
, 3);
9784 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9786 size_zero_node
= size_int (0);
9787 size_one_node
= size_int (1);
9788 bitsize_zero_node
= bitsize_int (0);
9789 bitsize_one_node
= bitsize_int (1);
9790 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9792 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9793 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9795 void_type_node
= make_node (VOID_TYPE
);
9796 layout_type (void_type_node
);
9798 /* We are not going to have real types in C with less than byte alignment,
9799 so we might as well not have any types that claim to have it. */
9800 TYPE_ALIGN (void_type_node
) = BITS_PER_UNIT
;
9801 TYPE_USER_ALIGN (void_type_node
) = 0;
9803 void_node
= make_node (VOID_CST
);
9804 TREE_TYPE (void_node
) = void_type_node
;
9806 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9807 layout_type (TREE_TYPE (null_pointer_node
));
9809 ptr_type_node
= build_pointer_type (void_type_node
);
9811 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9812 fileptr_type_node
= ptr_type_node
;
9814 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9816 float_type_node
= make_node (REAL_TYPE
);
9817 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9818 layout_type (float_type_node
);
9820 double_type_node
= make_node (REAL_TYPE
);
9822 TYPE_PRECISION (double_type_node
) = FLOAT_TYPE_SIZE
;
9824 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9825 layout_type (double_type_node
);
9827 long_double_type_node
= make_node (REAL_TYPE
);
9828 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9829 layout_type (long_double_type_node
);
9831 float_ptr_type_node
= build_pointer_type (float_type_node
);
9832 double_ptr_type_node
= build_pointer_type (double_type_node
);
9833 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9834 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9836 /* Fixed size integer types. */
9837 uint16_type_node
= build_nonstandard_integer_type (16, true);
9838 uint32_type_node
= build_nonstandard_integer_type (32, true);
9839 uint64_type_node
= build_nonstandard_integer_type (64, true);
9841 /* Decimal float types. */
9842 dfloat32_type_node
= make_node (REAL_TYPE
);
9843 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9844 layout_type (dfloat32_type_node
);
9845 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9846 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
9848 dfloat64_type_node
= make_node (REAL_TYPE
);
9849 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9850 layout_type (dfloat64_type_node
);
9851 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9852 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
9854 dfloat128_type_node
= make_node (REAL_TYPE
);
9855 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9856 layout_type (dfloat128_type_node
);
9857 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9858 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
9860 complex_integer_type_node
= build_complex_type (integer_type_node
);
9861 complex_float_type_node
= build_complex_type (float_type_node
);
9862 complex_double_type_node
= build_complex_type (double_type_node
);
9863 complex_long_double_type_node
= build_complex_type (long_double_type_node
);
9865 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9866 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9867 sat_ ## KIND ## _type_node = \
9868 make_sat_signed_ ## KIND ## _type (SIZE); \
9869 sat_unsigned_ ## KIND ## _type_node = \
9870 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9871 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9872 unsigned_ ## KIND ## _type_node = \
9873 make_unsigned_ ## KIND ## _type (SIZE);
9875 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9876 sat_ ## WIDTH ## KIND ## _type_node = \
9877 make_sat_signed_ ## KIND ## _type (SIZE); \
9878 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9879 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9880 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9881 unsigned_ ## WIDTH ## KIND ## _type_node = \
9882 make_unsigned_ ## KIND ## _type (SIZE);
9884 /* Make fixed-point type nodes based on four different widths. */
9885 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9886 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9887 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9888 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9889 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9891 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9892 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9893 NAME ## _type_node = \
9894 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9895 u ## NAME ## _type_node = \
9896 make_or_reuse_unsigned_ ## KIND ## _type \
9897 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9898 sat_ ## NAME ## _type_node = \
9899 make_or_reuse_sat_signed_ ## KIND ## _type \
9900 (GET_MODE_BITSIZE (MODE ## mode)); \
9901 sat_u ## NAME ## _type_node = \
9902 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9903 (GET_MODE_BITSIZE (U ## MODE ## mode));
9905 /* Fixed-point type and mode nodes. */
9906 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9907 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9908 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9909 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9910 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9911 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9912 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9913 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9914 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9915 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9916 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9919 tree t
= targetm
.build_builtin_va_list ();
9921 /* Many back-ends define record types without setting TYPE_NAME.
9922 If we copied the record type here, we'd keep the original
9923 record type without a name. This breaks name mangling. So,
9924 don't copy record types and let c_common_nodes_and_builtins()
9925 declare the type to be __builtin_va_list. */
9926 if (TREE_CODE (t
) != RECORD_TYPE
)
9927 t
= build_variant_type_copy (t
);
9929 va_list_type_node
= t
;
9933 /* Modify DECL for given flags.
9934 TM_PURE attribute is set only on types, so the function will modify
9935 DECL's type when ECF_TM_PURE is used. */
9938 set_call_expr_flags (tree decl
, int flags
)
9940 if (flags
& ECF_NOTHROW
)
9941 TREE_NOTHROW (decl
) = 1;
9942 if (flags
& ECF_CONST
)
9943 TREE_READONLY (decl
) = 1;
9944 if (flags
& ECF_PURE
)
9945 DECL_PURE_P (decl
) = 1;
9946 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9947 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9948 if (flags
& ECF_NOVOPS
)
9949 DECL_IS_NOVOPS (decl
) = 1;
9950 if (flags
& ECF_NORETURN
)
9951 TREE_THIS_VOLATILE (decl
) = 1;
9952 if (flags
& ECF_MALLOC
)
9953 DECL_IS_MALLOC (decl
) = 1;
9954 if (flags
& ECF_RETURNS_TWICE
)
9955 DECL_IS_RETURNS_TWICE (decl
) = 1;
9956 if (flags
& ECF_LEAF
)
9957 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9958 NULL
, DECL_ATTRIBUTES (decl
));
9959 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9960 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9961 /* Looping const or pure is implied by noreturn.
9962 There is currently no way to declare looping const or looping pure alone. */
9963 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9964 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9968 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9971 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9972 const char *library_name
, int ecf_flags
)
9976 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9977 library_name
, NULL_TREE
);
9978 set_call_expr_flags (decl
, ecf_flags
);
9980 set_builtin_decl (code
, decl
, true);
9983 /* Call this function after instantiating all builtins that the language
9984 front end cares about. This will build the rest of the builtins that
9985 are relied upon by the tree optimizers and the middle-end. */
9988 build_common_builtin_nodes (void)
9993 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9995 ftype
= build_function_type (void_type_node
, void_list_node
);
9996 local_define_builtin ("__builtin_unreachable", ftype
, BUILT_IN_UNREACHABLE
,
9997 "__builtin_unreachable",
9998 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9999 | ECF_CONST
| ECF_LEAF
);
10002 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
10003 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
10005 ftype
= build_function_type_list (ptr_type_node
,
10006 ptr_type_node
, const_ptr_type_node
,
10007 size_type_node
, NULL_TREE
);
10009 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
10010 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
10011 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
10012 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
10013 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
10014 "memmove", ECF_NOTHROW
| ECF_LEAF
);
10017 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
10019 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
10020 const_ptr_type_node
, size_type_node
,
10022 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
10023 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10026 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
10028 ftype
= build_function_type_list (ptr_type_node
,
10029 ptr_type_node
, integer_type_node
,
10030 size_type_node
, NULL_TREE
);
10031 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
10032 "memset", ECF_NOTHROW
| ECF_LEAF
);
10035 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
10037 ftype
= build_function_type_list (ptr_type_node
,
10038 size_type_node
, NULL_TREE
);
10039 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
10040 "alloca", ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
10043 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
10044 size_type_node
, NULL_TREE
);
10045 local_define_builtin ("__builtin_alloca_with_align", ftype
,
10046 BUILT_IN_ALLOCA_WITH_ALIGN
, "alloca",
10047 ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
10049 /* If we're checking the stack, `alloca' can throw. */
10050 if (flag_stack_check
)
10052 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA
)) = 0;
10053 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
)) = 0;
10056 ftype
= build_function_type_list (void_type_node
,
10057 ptr_type_node
, ptr_type_node
,
10058 ptr_type_node
, NULL_TREE
);
10059 local_define_builtin ("__builtin_init_trampoline", ftype
,
10060 BUILT_IN_INIT_TRAMPOLINE
,
10061 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
10062 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
10063 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
10064 "__builtin_init_heap_trampoline",
10065 ECF_NOTHROW
| ECF_LEAF
);
10067 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
10068 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
10069 BUILT_IN_ADJUST_TRAMPOLINE
,
10070 "__builtin_adjust_trampoline",
10071 ECF_CONST
| ECF_NOTHROW
);
10073 ftype
= build_function_type_list (void_type_node
,
10074 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10075 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
10076 BUILT_IN_NONLOCAL_GOTO
,
10077 "__builtin_nonlocal_goto",
10078 ECF_NORETURN
| ECF_NOTHROW
);
10080 ftype
= build_function_type_list (void_type_node
,
10081 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10082 local_define_builtin ("__builtin_setjmp_setup", ftype
,
10083 BUILT_IN_SETJMP_SETUP
,
10084 "__builtin_setjmp_setup", ECF_NOTHROW
);
10086 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10087 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
10088 BUILT_IN_SETJMP_RECEIVER
,
10089 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
10091 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
10092 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
10093 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
10095 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10096 local_define_builtin ("__builtin_stack_restore", ftype
,
10097 BUILT_IN_STACK_RESTORE
,
10098 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
10100 /* If there's a possibility that we might use the ARM EABI, build the
10101 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10102 if (targetm
.arm_eabi_unwinder
)
10104 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
10105 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
10106 BUILT_IN_CXA_END_CLEANUP
,
10107 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
10110 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10111 local_define_builtin ("__builtin_unwind_resume", ftype
,
10112 BUILT_IN_UNWIND_RESUME
,
10113 ((targetm_common
.except_unwind_info (&global_options
)
10115 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10118 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10120 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10122 local_define_builtin ("__builtin_return_address", ftype
,
10123 BUILT_IN_RETURN_ADDRESS
,
10124 "__builtin_return_address",
10128 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10129 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10131 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10132 ptr_type_node
, NULL_TREE
);
10133 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10134 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10135 BUILT_IN_PROFILE_FUNC_ENTER
,
10136 "__cyg_profile_func_enter", 0);
10137 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10138 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10139 BUILT_IN_PROFILE_FUNC_EXIT
,
10140 "__cyg_profile_func_exit", 0);
10143 /* The exception object and filter values from the runtime. The argument
10144 must be zero before exception lowering, i.e. from the front end. After
10145 exception lowering, it will be the region number for the exception
10146 landing pad. These functions are PURE instead of CONST to prevent
10147 them from being hoisted past the exception edge that will initialize
10148 its value in the landing pad. */
10149 ftype
= build_function_type_list (ptr_type_node
,
10150 integer_type_node
, NULL_TREE
);
10151 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10152 /* Only use TM_PURE if we we have TM language support. */
10153 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10154 ecf_flags
|= ECF_TM_PURE
;
10155 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10156 "__builtin_eh_pointer", ecf_flags
);
10158 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10159 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10160 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10161 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10163 ftype
= build_function_type_list (void_type_node
,
10164 integer_type_node
, integer_type_node
,
10166 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10167 BUILT_IN_EH_COPY_VALUES
,
10168 "__builtin_eh_copy_values", ECF_NOTHROW
);
10170 /* Complex multiplication and division. These are handled as builtins
10171 rather than optabs because emit_library_call_value doesn't support
10172 complex. Further, we can do slightly better with folding these
10173 beasties if the real and complex parts of the arguments are separate. */
10177 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10179 char mode_name_buf
[4], *q
;
10181 enum built_in_function mcode
, dcode
;
10182 tree type
, inner_type
;
10183 const char *prefix
= "__";
10185 if (targetm
.libfunc_gnu_prefix
)
10188 type
= lang_hooks
.types
.type_for_mode ((enum machine_mode
) mode
, 0);
10191 inner_type
= TREE_TYPE (type
);
10193 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10194 inner_type
, inner_type
, NULL_TREE
);
10196 mcode
= ((enum built_in_function
)
10197 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10198 dcode
= ((enum built_in_function
)
10199 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10201 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10205 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10207 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10208 built_in_names
[mcode
],
10209 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10211 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10213 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10214 built_in_names
[dcode
],
10215 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10220 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10223 If we requested a pointer to a vector, build up the pointers that
10224 we stripped off while looking for the inner type. Similarly for
10225 return values from functions.
10227 The argument TYPE is the top of the chain, and BOTTOM is the
10228 new type which we will point to. */
10231 reconstruct_complex_type (tree type
, tree bottom
)
10235 if (TREE_CODE (type
) == POINTER_TYPE
)
10237 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10238 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10239 TYPE_REF_CAN_ALIAS_ALL (type
));
10241 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10243 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10244 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10245 TYPE_REF_CAN_ALIAS_ALL (type
));
10247 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10249 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10250 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10252 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10254 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10255 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10257 else if (TREE_CODE (type
) == METHOD_TYPE
)
10259 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10260 /* The build_method_type_directly() routine prepends 'this' to argument list,
10261 so we must compensate by getting rid of it. */
10263 = build_method_type_directly
10264 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10266 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10268 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10270 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10271 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10276 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10277 TYPE_QUALS (type
));
10280 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10283 build_vector_type_for_mode (tree innertype
, enum machine_mode mode
)
10287 switch (GET_MODE_CLASS (mode
))
10289 case MODE_VECTOR_INT
:
10290 case MODE_VECTOR_FLOAT
:
10291 case MODE_VECTOR_FRACT
:
10292 case MODE_VECTOR_UFRACT
:
10293 case MODE_VECTOR_ACCUM
:
10294 case MODE_VECTOR_UACCUM
:
10295 nunits
= GET_MODE_NUNITS (mode
);
10299 /* Check that there are no leftover bits. */
10300 gcc_assert (GET_MODE_BITSIZE (mode
)
10301 % TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10303 nunits
= GET_MODE_BITSIZE (mode
)
10304 / TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10308 gcc_unreachable ();
10311 return make_vector_type (innertype
, nunits
, mode
);
10314 /* Similarly, but takes the inner type and number of units, which must be
10318 build_vector_type (tree innertype
, int nunits
)
10320 return make_vector_type (innertype
, nunits
, VOIDmode
);
10323 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10326 build_opaque_vector_type (tree innertype
, int nunits
)
10328 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10330 /* We always build the non-opaque variant before the opaque one,
10331 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10332 cand
= TYPE_NEXT_VARIANT (t
);
10334 && TYPE_VECTOR_OPAQUE (cand
)
10335 && check_qualified_type (cand
, t
, TYPE_QUALS (t
), NULL_TREE
))
10337 /* Othewise build a variant type and make sure to queue it after
10338 the non-opaque type. */
10339 cand
= build_distinct_type_copy (t
);
10340 TYPE_VECTOR_OPAQUE (cand
) = true;
10341 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10342 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10343 TYPE_NEXT_VARIANT (t
) = cand
;
10344 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10349 /* Given an initializer INIT, return TRUE if INIT is zero or some
10350 aggregate of zeros. Otherwise return FALSE. */
10352 initializer_zerop (const_tree init
)
10358 switch (TREE_CODE (init
))
10361 return integer_zerop (init
);
10364 /* ??? Note that this is not correct for C4X float formats. There,
10365 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10366 negative exponent. */
10367 return real_zerop (init
)
10368 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
));
10371 return fixed_zerop (init
);
10374 return integer_zerop (init
)
10375 || (real_zerop (init
)
10376 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10377 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
))));
10382 for (i
= 0; i
< VECTOR_CST_NELTS (init
); ++i
)
10383 if (!initializer_zerop (VECTOR_CST_ELT (init
, i
)))
10390 unsigned HOST_WIDE_INT idx
;
10392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10393 if (!initializer_zerop (elt
))
10402 /* We need to loop through all elements to handle cases like
10403 "\0" and "\0foobar". */
10404 for (i
= 0; i
< TREE_STRING_LENGTH (init
); ++i
)
10405 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10416 /* Check if vector VEC consists of all the equal elements and
10417 that the number of elements corresponds to the type of VEC.
10418 The function returns first element of the vector
10419 or NULL_TREE if the vector is not uniform. */
10421 uniform_vector_p (const_tree vec
)
10426 if (vec
== NULL_TREE
)
10429 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10431 if (TREE_CODE (vec
) == VECTOR_CST
)
10433 first
= VECTOR_CST_ELT (vec
, 0);
10434 for (i
= 1; i
< VECTOR_CST_NELTS (vec
); ++i
)
10435 if (!operand_equal_p (first
, VECTOR_CST_ELT (vec
, i
), 0))
10441 else if (TREE_CODE (vec
) == CONSTRUCTOR
)
10443 first
= error_mark_node
;
10445 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10452 if (!operand_equal_p (first
, t
, 0))
10455 if (i
!= TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)))
10464 /* Build an empty statement at location LOC. */
10467 build_empty_stmt (location_t loc
)
10469 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10470 SET_EXPR_LOCATION (t
, loc
);
10475 /* Build an OpenMP clause with code CODE. LOC is the location of the
10479 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10484 length
= omp_clause_num_ops
[code
];
10485 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10487 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10489 t
= (tree
) ggc_internal_alloc (size
);
10490 memset (t
, 0, size
);
10491 TREE_SET_CODE (t
, OMP_CLAUSE
);
10492 OMP_CLAUSE_SET_CODE (t
, code
);
10493 OMP_CLAUSE_LOCATION (t
) = loc
;
10498 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10499 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10500 Except for the CODE and operand count field, other storage for the
10501 object is initialized to zeros. */
10504 build_vl_exp_stat (enum tree_code code
, int len MEM_STAT_DECL
)
10507 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10509 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10510 gcc_assert (len
>= 1);
10512 record_node_allocation_statistics (code
, length
);
10514 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10516 TREE_SET_CODE (t
, code
);
10518 /* Can't use TREE_OPERAND to store the length because if checking is
10519 enabled, it will try to check the length before we store it. :-P */
10520 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10525 /* Helper function for build_call_* functions; build a CALL_EXPR with
10526 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10527 the argument slots. */
10530 build_call_1 (tree return_type
, tree fn
, int nargs
)
10534 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10535 TREE_TYPE (t
) = return_type
;
10536 CALL_EXPR_FN (t
) = fn
;
10537 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10542 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10543 FN and a null static chain slot. NARGS is the number of call arguments
10544 which are specified as "..." arguments. */
10547 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10551 va_start (args
, nargs
);
10552 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10557 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10558 FN and a null static chain slot. NARGS is the number of call arguments
10559 which are specified as a va_list ARGS. */
10562 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10567 t
= build_call_1 (return_type
, fn
, nargs
);
10568 for (i
= 0; i
< nargs
; i
++)
10569 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10570 process_call_operands (t
);
10574 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10575 FN and a null static chain slot. NARGS is the number of call arguments
10576 which are specified as a tree array ARGS. */
10579 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10580 int nargs
, const tree
*args
)
10585 t
= build_call_1 (return_type
, fn
, nargs
);
10586 for (i
= 0; i
< nargs
; i
++)
10587 CALL_EXPR_ARG (t
, i
) = args
[i
];
10588 process_call_operands (t
);
10589 SET_EXPR_LOCATION (t
, loc
);
10593 /* Like build_call_array, but takes a vec. */
10596 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10601 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10602 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10603 CALL_EXPR_ARG (ret
, ix
) = t
;
10604 process_call_operands (ret
);
10608 /* Conveniently construct a function call expression. FNDECL names the
10609 function to be called and N arguments are passed in the array
10613 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10615 tree fntype
= TREE_TYPE (fndecl
);
10616 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10618 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10621 /* Conveniently construct a function call expression. FNDECL names the
10622 function to be called and the arguments are passed in the vector
10626 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10628 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10629 vec_safe_address (vec
));
10633 /* Conveniently construct a function call expression. FNDECL names the
10634 function to be called, N is the number of arguments, and the "..."
10635 parameters are the argument expressions. */
10638 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10641 tree
*argarray
= XALLOCAVEC (tree
, n
);
10645 for (i
= 0; i
< n
; i
++)
10646 argarray
[i
] = va_arg (ap
, tree
);
10648 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10651 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10652 varargs macros aren't supported by all bootstrap compilers. */
10655 build_call_expr (tree fndecl
, int n
, ...)
10658 tree
*argarray
= XALLOCAVEC (tree
, n
);
10662 for (i
= 0; i
< n
; i
++)
10663 argarray
[i
] = va_arg (ap
, tree
);
10665 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10668 /* Build internal call expression. This is just like CALL_EXPR, except
10669 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10670 internal function. */
10673 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10674 tree type
, int n
, ...)
10679 tree fn
= build_call_1 (type
, NULL_TREE
, n
);
10681 for (i
= 0; i
< n
; i
++)
10682 CALL_EXPR_ARG (fn
, i
) = va_arg (ap
, tree
);
10684 SET_EXPR_LOCATION (fn
, loc
);
10685 CALL_EXPR_IFN (fn
) = ifn
;
10689 /* Create a new constant string literal and return a char* pointer to it.
10690 The STRING_CST value is the LEN characters at STR. */
10692 build_string_literal (int len
, const char *str
)
10694 tree t
, elem
, index
, type
;
10696 t
= build_string (len
, str
);
10697 elem
= build_type_variant (char_type_node
, 1, 0);
10698 index
= build_index_type (size_int (len
- 1));
10699 type
= build_array_type (elem
, index
);
10700 TREE_TYPE (t
) = type
;
10701 TREE_CONSTANT (t
) = 1;
10702 TREE_READONLY (t
) = 1;
10703 TREE_STATIC (t
) = 1;
10705 type
= build_pointer_type (elem
);
10706 t
= build1 (ADDR_EXPR
, type
,
10707 build4 (ARRAY_REF
, elem
,
10708 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10714 /* Return true if T (assumed to be a DECL) must be assigned a memory
10718 needs_to_live_in_memory (const_tree t
)
10720 return (TREE_ADDRESSABLE (t
)
10721 || is_global_var (t
)
10722 || (TREE_CODE (t
) == RESULT_DECL
10723 && !DECL_BY_REFERENCE (t
)
10724 && aggregate_value_p (t
, current_function_decl
)));
10727 /* Return value of a constant X and sign-extend it. */
10730 int_cst_value (const_tree x
)
10732 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10733 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10735 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10736 gcc_assert (cst_and_fits_in_hwi (x
));
10738 if (bits
< HOST_BITS_PER_WIDE_INT
)
10740 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10742 val
|= (~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1;
10744 val
&= ~((~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1);
10750 /* If TYPE is an integral or pointer type, return an integer type with
10751 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10752 if TYPE is already an integer type of signedness UNSIGNEDP. */
10755 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10757 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
) == unsignedp
)
10760 if (TREE_CODE (type
) == VECTOR_TYPE
)
10762 tree inner
= TREE_TYPE (type
);
10763 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10766 if (inner
== inner2
)
10768 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10771 if (!INTEGRAL_TYPE_P (type
)
10772 && !POINTER_TYPE_P (type
)
10773 && TREE_CODE (type
) != OFFSET_TYPE
)
10776 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
10779 /* If TYPE is an integral or pointer type, return an integer type with
10780 the same precision which is unsigned, or itself if TYPE is already an
10781 unsigned integer type. */
10784 unsigned_type_for (tree type
)
10786 return signed_or_unsigned_type_for (1, type
);
10789 /* If TYPE is an integral or pointer type, return an integer type with
10790 the same precision which is signed, or itself if TYPE is already a
10791 signed integer type. */
10794 signed_type_for (tree type
)
10796 return signed_or_unsigned_type_for (0, type
);
10799 /* If TYPE is a vector type, return a signed integer vector type with the
10800 same width and number of subparts. Otherwise return boolean_type_node. */
10803 truth_type_for (tree type
)
10805 if (TREE_CODE (type
) == VECTOR_TYPE
)
10807 tree elem
= lang_hooks
.types
.type_for_size
10808 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))), 0);
10809 return build_opaque_vector_type (elem
, TYPE_VECTOR_SUBPARTS (type
));
10812 return boolean_type_node
;
10815 /* Returns the largest value obtainable by casting something in INNER type to
10819 upper_bound_in_type (tree outer
, tree inner
)
10821 unsigned int det
= 0;
10822 unsigned oprec
= TYPE_PRECISION (outer
);
10823 unsigned iprec
= TYPE_PRECISION (inner
);
10826 /* Compute a unique number for every combination. */
10827 det
|= (oprec
> iprec
) ? 4 : 0;
10828 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10829 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10831 /* Determine the exponent to use. */
10836 /* oprec <= iprec, outer: signed, inner: don't care. */
10841 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10845 /* oprec > iprec, outer: signed, inner: signed. */
10849 /* oprec > iprec, outer: signed, inner: unsigned. */
10853 /* oprec > iprec, outer: unsigned, inner: signed. */
10857 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10861 gcc_unreachable ();
10864 return wide_int_to_tree (outer
,
10865 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10868 /* Returns the smallest value obtainable by casting something in INNER type to
10872 lower_bound_in_type (tree outer
, tree inner
)
10874 unsigned oprec
= TYPE_PRECISION (outer
);
10875 unsigned iprec
= TYPE_PRECISION (inner
);
10877 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10879 if (TYPE_UNSIGNED (outer
)
10880 /* If we are widening something of an unsigned type, OUTER type
10881 contains all values of INNER type. In particular, both INNER
10882 and OUTER types have zero in common. */
10883 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10884 return build_int_cst (outer
, 0);
10887 /* If we are widening a signed type to another signed type, we
10888 want to obtain -2^^(iprec-1). If we are keeping the
10889 precision or narrowing to a signed type, we want to obtain
10891 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10892 return wide_int_to_tree (outer
,
10893 wi::mask (prec
- 1, true,
10894 TYPE_PRECISION (outer
)));
10898 /* Return nonzero if two operands that are suitable for PHI nodes are
10899 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10900 SSA_NAME or invariant. Note that this is strictly an optimization.
10901 That is, callers of this function can directly call operand_equal_p
10902 and get the same result, only slower. */
10905 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10909 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10911 return operand_equal_p (arg0
, arg1
, 0);
10914 /* Returns number of zeros at the end of binary representation of X. */
10917 num_ending_zeros (const_tree x
)
10919 return build_int_cst (TREE_TYPE (x
), wi::ctz (x
));
10923 #define WALK_SUBTREE(NODE) \
10926 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10932 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10933 be walked whenever a type is seen in the tree. Rest of operands and return
10934 value are as for walk_tree. */
10937 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10938 struct pointer_set_t
*pset
, walk_tree_lh lh
)
10940 tree result
= NULL_TREE
;
10942 switch (TREE_CODE (type
))
10945 case REFERENCE_TYPE
:
10947 /* We have to worry about mutually recursive pointers. These can't
10948 be written in C. They can in Ada. It's pathological, but
10949 there's an ACATS test (c38102a) that checks it. Deal with this
10950 by checking if we're pointing to another pointer, that one
10951 points to another pointer, that one does too, and we have no htab.
10952 If so, get a hash table. We check three levels deep to avoid
10953 the cost of the hash table if we don't need one. */
10954 if (POINTER_TYPE_P (TREE_TYPE (type
))
10955 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10956 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10959 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10967 /* ... fall through ... */
10970 WALK_SUBTREE (TREE_TYPE (type
));
10974 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10976 /* Fall through. */
10978 case FUNCTION_TYPE
:
10979 WALK_SUBTREE (TREE_TYPE (type
));
10983 /* We never want to walk into default arguments. */
10984 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10985 WALK_SUBTREE (TREE_VALUE (arg
));
10990 /* Don't follow this nodes's type if a pointer for fear that
10991 we'll have infinite recursion. If we have a PSET, then we
10994 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10995 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10996 WALK_SUBTREE (TREE_TYPE (type
));
10997 WALK_SUBTREE (TYPE_DOMAIN (type
));
11001 WALK_SUBTREE (TREE_TYPE (type
));
11002 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
11012 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11013 called with the DATA and the address of each sub-tree. If FUNC returns a
11014 non-NULL value, the traversal is stopped, and the value returned by FUNC
11015 is returned. If PSET is non-NULL it is used to record the nodes visited,
11016 and to avoid visiting a node more than once. */
11019 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11020 struct pointer_set_t
*pset
, walk_tree_lh lh
)
11022 enum tree_code code
;
11026 #define WALK_SUBTREE_TAIL(NODE) \
11030 goto tail_recurse; \
11035 /* Skip empty subtrees. */
11039 /* Don't walk the same tree twice, if the user has requested
11040 that we avoid doing so. */
11041 if (pset
&& pointer_set_insert (pset
, *tp
))
11044 /* Call the function. */
11046 result
= (*func
) (tp
, &walk_subtrees
, data
);
11048 /* If we found something, return it. */
11052 code
= TREE_CODE (*tp
);
11054 /* Even if we didn't, FUNC may have decided that there was nothing
11055 interesting below this point in the tree. */
11056 if (!walk_subtrees
)
11058 /* But we still need to check our siblings. */
11059 if (code
== TREE_LIST
)
11060 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11061 else if (code
== OMP_CLAUSE
)
11062 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11069 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11070 if (result
|| !walk_subtrees
)
11077 case IDENTIFIER_NODE
:
11084 case PLACEHOLDER_EXPR
:
11088 /* None of these have subtrees other than those already walked
11093 WALK_SUBTREE (TREE_VALUE (*tp
));
11094 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11099 int len
= TREE_VEC_LENGTH (*tp
);
11104 /* Walk all elements but the first. */
11106 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11108 /* Now walk the first one as a tail call. */
11109 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11113 WALK_SUBTREE (TREE_REALPART (*tp
));
11114 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11118 unsigned HOST_WIDE_INT idx
;
11119 constructor_elt
*ce
;
11121 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11123 WALK_SUBTREE (ce
->value
);
11128 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11133 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11135 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11136 into declarations that are just mentioned, rather than
11137 declared; they don't really belong to this part of the tree.
11138 And, we can see cycles: the initializer for a declaration
11139 can refer to the declaration itself. */
11140 WALK_SUBTREE (DECL_INITIAL (decl
));
11141 WALK_SUBTREE (DECL_SIZE (decl
));
11142 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11144 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11147 case STATEMENT_LIST
:
11149 tree_stmt_iterator i
;
11150 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11151 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11156 switch (OMP_CLAUSE_CODE (*tp
))
11158 case OMP_CLAUSE_PRIVATE
:
11159 case OMP_CLAUSE_SHARED
:
11160 case OMP_CLAUSE_FIRSTPRIVATE
:
11161 case OMP_CLAUSE_COPYIN
:
11162 case OMP_CLAUSE_COPYPRIVATE
:
11163 case OMP_CLAUSE_FINAL
:
11164 case OMP_CLAUSE_IF
:
11165 case OMP_CLAUSE_NUM_THREADS
:
11166 case OMP_CLAUSE_SCHEDULE
:
11167 case OMP_CLAUSE_UNIFORM
:
11168 case OMP_CLAUSE_DEPEND
:
11169 case OMP_CLAUSE_NUM_TEAMS
:
11170 case OMP_CLAUSE_THREAD_LIMIT
:
11171 case OMP_CLAUSE_DEVICE
:
11172 case OMP_CLAUSE_DIST_SCHEDULE
:
11173 case OMP_CLAUSE_SAFELEN
:
11174 case OMP_CLAUSE_SIMDLEN
:
11175 case OMP_CLAUSE__LOOPTEMP_
:
11176 case OMP_CLAUSE__SIMDUID_
:
11177 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11180 case OMP_CLAUSE_NOWAIT
:
11181 case OMP_CLAUSE_ORDERED
:
11182 case OMP_CLAUSE_DEFAULT
:
11183 case OMP_CLAUSE_UNTIED
:
11184 case OMP_CLAUSE_MERGEABLE
:
11185 case OMP_CLAUSE_PROC_BIND
:
11186 case OMP_CLAUSE_INBRANCH
:
11187 case OMP_CLAUSE_NOTINBRANCH
:
11188 case OMP_CLAUSE_FOR
:
11189 case OMP_CLAUSE_PARALLEL
:
11190 case OMP_CLAUSE_SECTIONS
:
11191 case OMP_CLAUSE_TASKGROUP
:
11192 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11194 case OMP_CLAUSE_LASTPRIVATE
:
11195 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11196 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11197 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11199 case OMP_CLAUSE_COLLAPSE
:
11202 for (i
= 0; i
< 3; i
++)
11203 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11204 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11207 case OMP_CLAUSE_LINEAR
:
11208 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11209 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11210 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11211 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11213 case OMP_CLAUSE_ALIGNED
:
11214 case OMP_CLAUSE_FROM
:
11215 case OMP_CLAUSE_TO
:
11216 case OMP_CLAUSE_MAP
:
11217 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11218 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11219 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11221 case OMP_CLAUSE_REDUCTION
:
11224 for (i
= 0; i
< 4; i
++)
11225 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11226 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11230 gcc_unreachable ();
11238 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11239 But, we only want to walk once. */
11240 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11241 for (i
= 0; i
< len
; ++i
)
11242 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11243 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11247 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11248 defining. We only want to walk into these fields of a type in this
11249 case and not in the general case of a mere reference to the type.
11251 The criterion is as follows: if the field can be an expression, it
11252 must be walked only here. This should be in keeping with the fields
11253 that are directly gimplified in gimplify_type_sizes in order for the
11254 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11255 variable-sized types.
11257 Note that DECLs get walked as part of processing the BIND_EXPR. */
11258 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11260 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11261 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11264 /* Call the function for the type. See if it returns anything or
11265 doesn't want us to continue. If we are to continue, walk both
11266 the normal fields and those for the declaration case. */
11267 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11268 if (result
|| !walk_subtrees
)
11271 /* But do not walk a pointed-to type since it may itself need to
11272 be walked in the declaration case if it isn't anonymous. */
11273 if (!POINTER_TYPE_P (*type_p
))
11275 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11280 /* If this is a record type, also walk the fields. */
11281 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11285 for (field
= TYPE_FIELDS (*type_p
); field
;
11286 field
= DECL_CHAIN (field
))
11288 /* We'd like to look at the type of the field, but we can
11289 easily get infinite recursion. So assume it's pointed
11290 to elsewhere in the tree. Also, ignore things that
11292 if (TREE_CODE (field
) != FIELD_DECL
)
11295 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11296 WALK_SUBTREE (DECL_SIZE (field
));
11297 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11298 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11299 WALK_SUBTREE (DECL_QUALIFIER (field
));
11303 /* Same for scalar types. */
11304 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11305 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11306 || TREE_CODE (*type_p
) == INTEGER_TYPE
11307 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11308 || TREE_CODE (*type_p
) == REAL_TYPE
)
11310 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11311 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11314 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11315 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11320 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11324 /* Walk over all the sub-trees of this operand. */
11325 len
= TREE_OPERAND_LENGTH (*tp
);
11327 /* Go through the subtrees. We need to do this in forward order so
11328 that the scope of a FOR_EXPR is handled properly. */
11331 for (i
= 0; i
< len
- 1; ++i
)
11332 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11333 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11336 /* If this is a type, walk the needed fields in the type. */
11337 else if (TYPE_P (*tp
))
11338 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11342 /* We didn't find what we were looking for. */
11345 #undef WALK_SUBTREE_TAIL
11347 #undef WALK_SUBTREE
11349 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11352 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11356 struct pointer_set_t
*pset
;
11358 pset
= pointer_set_create ();
11359 result
= walk_tree_1 (tp
, func
, data
, pset
, lh
);
11360 pointer_set_destroy (pset
);
11366 tree_block (tree t
)
11368 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11370 if (IS_EXPR_CODE_CLASS (c
))
11371 return LOCATION_BLOCK (t
->exp
.locus
);
11372 gcc_unreachable ();
11377 tree_set_block (tree t
, tree b
)
11379 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11381 if (IS_EXPR_CODE_CLASS (c
))
11384 t
->exp
.locus
= COMBINE_LOCATION_DATA (line_table
, t
->exp
.locus
, b
);
11386 t
->exp
.locus
= LOCATION_LOCUS (t
->exp
.locus
);
11389 gcc_unreachable ();
11392 /* Create a nameless artificial label and put it in the current
11393 function context. The label has a location of LOC. Returns the
11394 newly created label. */
11397 create_artificial_label (location_t loc
)
11399 tree lab
= build_decl (loc
,
11400 LABEL_DECL
, NULL_TREE
, void_type_node
);
11402 DECL_ARTIFICIAL (lab
) = 1;
11403 DECL_IGNORED_P (lab
) = 1;
11404 DECL_CONTEXT (lab
) = current_function_decl
;
11408 /* Given a tree, try to return a useful variable name that we can use
11409 to prefix a temporary that is being assigned the value of the tree.
11410 I.E. given <temp> = &A, return A. */
11415 tree stripped_decl
;
11418 STRIP_NOPS (stripped_decl
);
11419 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11420 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11421 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11423 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11426 return IDENTIFIER_POINTER (name
);
11430 switch (TREE_CODE (stripped_decl
))
11433 return get_name (TREE_OPERAND (stripped_decl
, 0));
11440 /* Return true if TYPE has a variable argument list. */
11443 stdarg_p (const_tree fntype
)
11445 function_args_iterator args_iter
;
11446 tree n
= NULL_TREE
, t
;
11451 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11456 return n
!= NULL_TREE
&& n
!= void_type_node
;
11459 /* Return true if TYPE has a prototype. */
11462 prototype_p (tree fntype
)
11466 gcc_assert (fntype
!= NULL_TREE
);
11468 t
= TYPE_ARG_TYPES (fntype
);
11469 return (t
!= NULL_TREE
);
11472 /* If BLOCK is inlined from an __attribute__((__artificial__))
11473 routine, return pointer to location from where it has been
11476 block_nonartificial_location (tree block
)
11478 location_t
*ret
= NULL
;
11480 while (block
&& TREE_CODE (block
) == BLOCK
11481 && BLOCK_ABSTRACT_ORIGIN (block
))
11483 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11485 while (TREE_CODE (ao
) == BLOCK
11486 && BLOCK_ABSTRACT_ORIGIN (ao
)
11487 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
11488 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
11490 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11492 /* If AO is an artificial inline, point RET to the
11493 call site locus at which it has been inlined and continue
11494 the loop, in case AO's caller is also an artificial
11496 if (DECL_DECLARED_INLINE_P (ao
)
11497 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11498 ret
= &BLOCK_SOURCE_LOCATION (block
);
11502 else if (TREE_CODE (ao
) != BLOCK
)
11505 block
= BLOCK_SUPERCONTEXT (block
);
11511 /* If EXP is inlined from an __attribute__((__artificial__))
11512 function, return the location of the original call expression. */
11515 tree_nonartificial_location (tree exp
)
11517 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11522 return EXPR_LOCATION (exp
);
11526 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11529 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11532 cl_option_hash_hash (const void *x
)
11534 const_tree
const t
= (const_tree
) x
;
11538 hashval_t hash
= 0;
11540 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11542 p
= (const char *)TREE_OPTIMIZATION (t
);
11543 len
= sizeof (struct cl_optimization
);
11546 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11548 p
= (const char *)TREE_TARGET_OPTION (t
);
11549 len
= sizeof (struct cl_target_option
);
11553 gcc_unreachable ();
11555 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11557 for (i
= 0; i
< len
; i
++)
11559 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11564 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11565 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11569 cl_option_hash_eq (const void *x
, const void *y
)
11571 const_tree
const xt
= (const_tree
) x
;
11572 const_tree
const yt
= (const_tree
) y
;
11577 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11580 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11582 xp
= (const char *)TREE_OPTIMIZATION (xt
);
11583 yp
= (const char *)TREE_OPTIMIZATION (yt
);
11584 len
= sizeof (struct cl_optimization
);
11587 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11589 xp
= (const char *)TREE_TARGET_OPTION (xt
);
11590 yp
= (const char *)TREE_TARGET_OPTION (yt
);
11591 len
= sizeof (struct cl_target_option
);
11595 gcc_unreachable ();
11597 return (memcmp (xp
, yp
, len
) == 0);
11600 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11603 build_optimization_node (struct gcc_options
*opts
)
11608 /* Use the cache of optimization nodes. */
11610 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11613 slot
= htab_find_slot (cl_option_hash_table
, cl_optimization_node
, INSERT
);
11617 /* Insert this one into the hash table. */
11618 t
= cl_optimization_node
;
11621 /* Make a new node for next time round. */
11622 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11628 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11631 build_target_option_node (struct gcc_options
*opts
)
11636 /* Use the cache of optimization nodes. */
11638 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11641 slot
= htab_find_slot (cl_option_hash_table
, cl_target_option_node
, INSERT
);
11645 /* Insert this one into the hash table. */
11646 t
= cl_target_option_node
;
11649 /* Make a new node for next time round. */
11650 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11656 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11657 Called through htab_traverse. */
11660 prepare_target_option_node_for_pch (void **slot
, void *)
11662 tree node
= (tree
) *slot
;
11663 if (TREE_CODE (node
) == TARGET_OPTION_NODE
)
11664 TREE_TARGET_GLOBALS (node
) = NULL
;
11668 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11669 so that they aren't saved during PCH writing. */
11672 prepare_target_option_nodes_for_pch (void)
11674 htab_traverse (cl_option_hash_table
, prepare_target_option_node_for_pch
,
11678 /* Determine the "ultimate origin" of a block. The block may be an inlined
11679 instance of an inlined instance of a block which is local to an inline
11680 function, so we have to trace all of the way back through the origin chain
11681 to find out what sort of node actually served as the original seed for the
11685 block_ultimate_origin (const_tree block
)
11687 tree immediate_origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11689 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11690 nodes in the function to point to themselves; ignore that if
11691 we're trying to output the abstract instance of this function. */
11692 if (BLOCK_ABSTRACT (block
) && immediate_origin
== block
)
11695 if (immediate_origin
== NULL_TREE
)
11700 tree lookahead
= immediate_origin
;
11704 ret_val
= lookahead
;
11705 lookahead
= (TREE_CODE (ret_val
) == BLOCK
11706 ? BLOCK_ABSTRACT_ORIGIN (ret_val
) : NULL
);
11708 while (lookahead
!= NULL
&& lookahead
!= ret_val
);
11710 /* The block's abstract origin chain may not be the *ultimate* origin of
11711 the block. It could lead to a DECL that has an abstract origin set.
11712 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11713 will give us if it has one). Note that DECL's abstract origins are
11714 supposed to be the most distant ancestor (or so decl_ultimate_origin
11715 claims), so we don't need to loop following the DECL origins. */
11716 if (DECL_P (ret_val
))
11717 return DECL_ORIGIN (ret_val
);
11723 /* Return true iff conversion in EXP generates no instruction. Mark
11724 it inline so that we fully inline into the stripping functions even
11725 though we have two uses of this function. */
11728 tree_nop_conversion (const_tree exp
)
11730 tree outer_type
, inner_type
;
11731 int outer_is_pts_p
, inner_is_pts_p
;
11733 if (!CONVERT_EXPR_P (exp
)
11734 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11736 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
11739 outer_type
= TREE_TYPE (exp
);
11740 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11745 outer_is_pts_p
= (POINTER_TYPE_P (outer_type
)
11746 && upc_shared_type_p (TREE_TYPE (outer_type
)));
11747 inner_is_pts_p
= (POINTER_TYPE_P (inner_type
)
11748 && upc_shared_type_p (TREE_TYPE (inner_type
)));
11750 /* UPC pointer-to-shared types have special
11751 equivalence rules that must be checked. */
11752 if (outer_is_pts_p
&& inner_is_pts_p
11753 && lang_hooks
.types_compatible_p
)
11754 return lang_hooks
.types_compatible_p (outer_type
, inner_type
);
11756 /* UPC pointer-to-shared types are not interchangeable
11757 with integral types. */
11758 if (outer_is_pts_p
|| inner_is_pts_p
)
11761 /* Use precision rather then machine mode when we can, which gives
11762 the correct answer even for submode (bit-field) types. */
11763 if ((INTEGRAL_TYPE_P (outer_type
)
11764 || POINTER_TYPE_P (outer_type
)
11765 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11766 && (INTEGRAL_TYPE_P (inner_type
)
11767 || POINTER_TYPE_P (inner_type
)
11768 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11769 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11771 /* Otherwise fall back on comparing machine modes (e.g. for
11772 aggregate types, floats). */
11773 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11776 /* Return true iff conversion in EXP generates no instruction. Don't
11777 consider conversions changing the signedness. */
11780 tree_sign_nop_conversion (const_tree exp
)
11782 tree outer_type
, inner_type
;
11784 if (!tree_nop_conversion (exp
))
11787 outer_type
= TREE_TYPE (exp
);
11788 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11790 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11791 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11794 /* Strip conversions from EXP according to tree_nop_conversion and
11795 return the resulting expression. */
11798 tree_strip_nop_conversions (tree exp
)
11800 while (tree_nop_conversion (exp
))
11801 exp
= TREE_OPERAND (exp
, 0);
11805 /* Strip conversions from EXP according to tree_sign_nop_conversion
11806 and return the resulting expression. */
11809 tree_strip_sign_nop_conversions (tree exp
)
11811 while (tree_sign_nop_conversion (exp
))
11812 exp
= TREE_OPERAND (exp
, 0);
11816 /* Avoid any floating point extensions from EXP. */
11818 strip_float_extensions (tree exp
)
11820 tree sub
, expt
, subt
;
11822 /* For floating point constant look up the narrowest type that can hold
11823 it properly and handle it like (type)(narrowest_type)constant.
11824 This way we can optimize for instance a=a*2.0 where "a" is float
11825 but 2.0 is double constant. */
11826 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11828 REAL_VALUE_TYPE orig
;
11831 orig
= TREE_REAL_CST (exp
);
11832 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11833 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11834 type
= float_type_node
;
11835 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11836 > TYPE_PRECISION (double_type_node
)
11837 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11838 type
= double_type_node
;
11840 return build_real (type
, real_value_truncate (TYPE_MODE (type
), orig
));
11843 if (!CONVERT_EXPR_P (exp
))
11846 sub
= TREE_OPERAND (exp
, 0);
11847 subt
= TREE_TYPE (sub
);
11848 expt
= TREE_TYPE (exp
);
11850 if (!FLOAT_TYPE_P (subt
))
11853 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11856 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11859 return strip_float_extensions (sub
);
11862 /* Strip out all handled components that produce invariant
11866 strip_invariant_refs (const_tree op
)
11868 while (handled_component_p (op
))
11870 switch (TREE_CODE (op
))
11873 case ARRAY_RANGE_REF
:
11874 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11875 || TREE_OPERAND (op
, 2) != NULL_TREE
11876 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11880 case COMPONENT_REF
:
11881 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11887 op
= TREE_OPERAND (op
, 0);
11893 static GTY(()) tree gcc_eh_personality_decl
;
11895 /* Return the GCC personality function decl. */
11898 lhd_gcc_personality (void)
11900 if (!gcc_eh_personality_decl
)
11901 gcc_eh_personality_decl
= build_personality_function ("gcc");
11902 return gcc_eh_personality_decl
;
11905 /* TARGET is a call target of GIMPLE call statement
11906 (obtained by gimple_call_fn). Return true if it is
11907 OBJ_TYPE_REF representing an virtual call of C++ method.
11908 (As opposed to OBJ_TYPE_REF representing objc calls
11909 through a cast where middle-end devirtualization machinery
11913 virtual_method_call_p (tree target
)
11915 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11917 target
= TREE_TYPE (target
);
11918 gcc_checking_assert (TREE_CODE (target
) == POINTER_TYPE
);
11919 target
= TREE_TYPE (target
);
11920 if (TREE_CODE (target
) == FUNCTION_TYPE
)
11922 gcc_checking_assert (TREE_CODE (target
) == METHOD_TYPE
);
11926 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11929 obj_type_ref_class (tree ref
)
11931 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
11932 ref
= TREE_TYPE (ref
);
11933 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11934 ref
= TREE_TYPE (ref
);
11935 /* We look for type THIS points to. ObjC also builds
11936 OBJ_TYPE_REF with non-method calls, Their first parameter
11937 ID however also corresponds to class type. */
11938 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
11939 || TREE_CODE (ref
) == FUNCTION_TYPE
);
11940 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
11941 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11942 return TREE_TYPE (ref
);
11945 /* Return true if T is in anonymous namespace. */
11948 type_in_anonymous_namespace_p (const_tree t
)
11950 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11951 bulitin types; those have CONTEXT NULL. */
11952 if (!TYPE_CONTEXT (t
))
11954 return (TYPE_STUB_DECL (t
) && !TREE_PUBLIC (TYPE_STUB_DECL (t
)));
11957 /* Try to find a base info of BINFO that would have its field decl at offset
11958 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11959 found, return, otherwise return NULL_TREE. */
11962 get_binfo_at_offset (tree binfo
, HOST_WIDE_INT offset
, tree expected_type
)
11964 tree type
= BINFO_TYPE (binfo
);
11968 HOST_WIDE_INT pos
, size
;
11972 if (types_same_for_odr (type
, expected_type
))
11977 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11979 if (TREE_CODE (fld
) != FIELD_DECL
)
11982 pos
= int_bit_position (fld
);
11983 size
= tree_to_uhwi (DECL_SIZE (fld
));
11984 if (pos
<= offset
&& (pos
+ size
) > offset
)
11987 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11990 if (!DECL_ARTIFICIAL (fld
))
11992 binfo
= TYPE_BINFO (TREE_TYPE (fld
));
11996 /* Offset 0 indicates the primary base, whose vtable contents are
11997 represented in the binfo for the derived class. */
11998 else if (offset
!= 0)
12000 tree base_binfo
, binfo2
= binfo
;
12002 /* Find BINFO corresponding to FLD. This is bit harder
12003 by a fact that in virtual inheritance we may need to walk down
12004 the non-virtual inheritance chain. */
12007 tree containing_binfo
= NULL
, found_binfo
= NULL
;
12008 for (i
= 0; BINFO_BASE_ITERATE (binfo2
, i
, base_binfo
); i
++)
12009 if (types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
12011 found_binfo
= base_binfo
;
12015 if ((tree_to_shwi (BINFO_OFFSET (base_binfo
))
12016 - tree_to_shwi (BINFO_OFFSET (binfo
)))
12017 * BITS_PER_UNIT
< pos
12018 /* Rule out types with no virtual methods or we can get confused
12019 here by zero sized bases. */
12020 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo
)))
12021 && (!containing_binfo
12022 || (tree_to_shwi (BINFO_OFFSET (containing_binfo
))
12023 < tree_to_shwi (BINFO_OFFSET (base_binfo
)))))
12024 containing_binfo
= base_binfo
;
12027 binfo
= found_binfo
;
12030 if (!containing_binfo
)
12032 binfo2
= containing_binfo
;
12036 type
= TREE_TYPE (fld
);
12041 /* Returns true if X is a typedef decl. */
12044 is_typedef_decl (tree x
)
12046 return (x
&& TREE_CODE (x
) == TYPE_DECL
12047 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
12050 /* Returns true iff TYPE is a type variant created for a typedef. */
12053 typedef_variant_p (tree type
)
12055 return is_typedef_decl (TYPE_NAME (type
));
12058 /* Warn about a use of an identifier which was marked deprecated. */
12060 warn_deprecated_use (tree node
, tree attr
)
12064 if (node
== 0 || !warn_deprecated_decl
)
12070 attr
= DECL_ATTRIBUTES (node
);
12071 else if (TYPE_P (node
))
12073 tree decl
= TYPE_STUB_DECL (node
);
12075 attr
= lookup_attribute ("deprecated",
12076 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12081 attr
= lookup_attribute ("deprecated", attr
);
12084 msg
= TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)));
12090 expanded_location xloc
= expand_location (DECL_SOURCE_LOCATION (node
));
12092 warning (OPT_Wdeprecated_declarations
,
12093 "%qD is deprecated (declared at %r%s:%d%R): %s",
12094 node
, "locus", xloc
.file
, xloc
.line
, msg
);
12096 warning (OPT_Wdeprecated_declarations
,
12097 "%qD is deprecated (declared at %r%s:%d%R)",
12098 node
, "locus", xloc
.file
, xloc
.line
);
12100 else if (TYPE_P (node
))
12102 tree what
= NULL_TREE
;
12103 tree decl
= TYPE_STUB_DECL (node
);
12105 if (TYPE_NAME (node
))
12107 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12108 what
= TYPE_NAME (node
);
12109 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12110 && DECL_NAME (TYPE_NAME (node
)))
12111 what
= DECL_NAME (TYPE_NAME (node
));
12116 expanded_location xloc
12117 = expand_location (DECL_SOURCE_LOCATION (decl
));
12121 warning (OPT_Wdeprecated_declarations
,
12122 "%qE is deprecated (declared at %r%s:%d%R): %s",
12123 what
, "locus", xloc
.file
, xloc
.line
, msg
);
12125 warning (OPT_Wdeprecated_declarations
,
12126 "%qE is deprecated (declared at %r%s:%d%R)",
12127 what
, "locus", xloc
.file
, xloc
.line
);
12132 warning (OPT_Wdeprecated_declarations
,
12133 "type is deprecated (declared at %r%s:%d%R): %s",
12134 "locus", xloc
.file
, xloc
.line
, msg
);
12136 warning (OPT_Wdeprecated_declarations
,
12137 "type is deprecated (declared at %r%s:%d%R)",
12138 "locus", xloc
.file
, xloc
.line
);
12146 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated: %s",
12149 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated", what
);
12154 warning (OPT_Wdeprecated_declarations
, "type is deprecated: %s",
12157 warning (OPT_Wdeprecated_declarations
, "type is deprecated");
12163 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12164 somewhere in it. */
12167 contains_bitfld_component_ref_p (const_tree ref
)
12169 while (handled_component_p (ref
))
12171 if (TREE_CODE (ref
) == COMPONENT_REF
12172 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12174 ref
= TREE_OPERAND (ref
, 0);
12180 /* Try to determine whether a TRY_CATCH expression can fall through.
12181 This is a subroutine of block_may_fallthru. */
12184 try_catch_may_fallthru (const_tree stmt
)
12186 tree_stmt_iterator i
;
12188 /* If the TRY block can fall through, the whole TRY_CATCH can
12190 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12193 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12194 switch (TREE_CODE (tsi_stmt (i
)))
12197 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12198 catch expression and a body. The whole TRY_CATCH may fall
12199 through iff any of the catch bodies falls through. */
12200 for (; !tsi_end_p (i
); tsi_next (&i
))
12202 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12207 case EH_FILTER_EXPR
:
12208 /* The exception filter expression only matters if there is an
12209 exception. If the exception does not match EH_FILTER_TYPES,
12210 we will execute EH_FILTER_FAILURE, and we will fall through
12211 if that falls through. If the exception does match
12212 EH_FILTER_TYPES, the stack unwinder will continue up the
12213 stack, so we will not fall through. We don't know whether we
12214 will throw an exception which matches EH_FILTER_TYPES or not,
12215 so we just ignore EH_FILTER_TYPES and assume that we might
12216 throw an exception which doesn't match. */
12217 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12220 /* This case represents statements to be executed when an
12221 exception occurs. Those statements are implicitly followed
12222 by a RESX statement to resume execution after the exception.
12223 So in this case the TRY_CATCH never falls through. */
12228 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12229 need not be 100% accurate; simply be conservative and return true if we
12230 don't know. This is used only to avoid stupidly generating extra code.
12231 If we're wrong, we'll just delete the extra code later. */
12234 block_may_fallthru (const_tree block
)
12236 /* This CONST_CAST is okay because expr_last returns its argument
12237 unmodified and we assign it to a const_tree. */
12238 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12240 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12244 /* Easy cases. If the last statement of the block implies
12245 control transfer, then we can't fall through. */
12249 /* If SWITCH_LABELS is set, this is lowered, and represents a
12250 branch to a selected label and hence can not fall through.
12251 Otherwise SWITCH_BODY is set, and the switch can fall
12253 return SWITCH_LABELS (stmt
) == NULL_TREE
;
12256 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12258 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12261 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12263 case TRY_CATCH_EXPR
:
12264 return try_catch_may_fallthru (stmt
);
12266 case TRY_FINALLY_EXPR
:
12267 /* The finally clause is always executed after the try clause,
12268 so if it does not fall through, then the try-finally will not
12269 fall through. Otherwise, if the try clause does not fall
12270 through, then when the finally clause falls through it will
12271 resume execution wherever the try clause was going. So the
12272 whole try-finally will only fall through if both the try
12273 clause and the finally clause fall through. */
12274 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12275 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12278 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12279 stmt
= TREE_OPERAND (stmt
, 1);
12285 /* Functions that do not return do not fall through. */
12286 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12288 case CLEANUP_POINT_EXPR
:
12289 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12292 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12298 return lang_hooks
.block_may_fallthru (stmt
);
12302 /* Garbage collection support for tree_type_common. */
12304 extern void gt_ggc_mx (tree
&);
12305 extern void gt_ggc_mx_die_struct (void *);
12307 void gt_ggc_mx (tree_type_common
*tt
)
12309 tree t
= (tree
) tt
;
12310 tree block_factor
= TYPE_UPC_BLOCK_FACTOR (t
);
12312 gt_ggc_mx (tt
->common
.typed
.type
);
12313 gt_ggc_mx (tt
->common
.chain
);
12314 gt_ggc_mx (tt
->size
);
12315 gt_ggc_mx (tt
->size_unit
);
12316 gt_ggc_mx (tt
->attributes
);
12317 gt_ggc_mx (tt
->pointer_to
);
12318 gt_ggc_mx (tt
->reference_to
);
12319 switch (debug_hooks
->tree_type_symtab_field
)
12321 case TYPE_SYMTAB_IS_ADDRESS
:
12323 case TYPE_SYMTAB_IS_POINTER
:
12324 gt_ggc_m_S (tt
->symtab
.pointer
);
12326 case TYPE_SYMTAB_IS_DIE
:
12327 gt_ggc_mx_die_struct (tt
->symtab
.die
);
12332 gt_ggc_mx (tt
->name
);
12333 gt_ggc_mx (tt
->next_variant
);
12334 gt_ggc_mx (tt
->main_variant
);
12335 gt_ggc_mx (tt
->context
);
12336 gt_ggc_mx (tt
->canonical
);
12338 if (TYPE_HAS_UPC_BLOCK_FACTOR_X (t
))
12339 gt_ggc_mx (block_factor
);
12342 /* PCH support for tree_type_common. */
12344 extern void gt_pch_nx (tree
&);
12345 extern void gt_ggc_nx_die_struct (void *);
12347 void gt_pch_nx (tree_type_common
*tt
)
12349 tree t
= (tree
) tt
;
12350 tree block_factor
= TYPE_UPC_BLOCK_FACTOR (t
);
12352 gt_pch_nx (tt
->common
.typed
.type
);
12353 gt_pch_nx (tt
->common
.chain
);
12354 gt_pch_nx (tt
->size
);
12355 gt_pch_nx (tt
->size_unit
);
12356 gt_pch_nx (tt
->attributes
);
12357 gt_pch_nx (tt
->pointer_to
);
12358 gt_pch_nx (tt
->reference_to
);
12359 switch (debug_hooks
->tree_type_symtab_field
)
12361 case TYPE_SYMTAB_IS_ADDRESS
:
12363 case TYPE_SYMTAB_IS_POINTER
:
12364 gt_pch_n_S (tt
->symtab
.pointer
);
12366 case TYPE_SYMTAB_IS_DIE
:
12367 gt_pch_nx_die_struct (tt
->symtab
.die
);
12372 gt_pch_nx (tt
->name
);
12373 gt_pch_nx (tt
->next_variant
);
12374 gt_pch_nx (tt
->main_variant
);
12375 gt_pch_nx (tt
->context
);
12376 gt_pch_nx (tt
->canonical
);
12378 if (TYPE_HAS_UPC_BLOCK_FACTOR_X (t
))
12379 gt_pch_nx (block_factor
);
12382 void gt_pch_nx (tree_type_common
*tt
, gt_pointer_operator op
, void *cookie
)
12384 tree t
= (tree
) tt
;
12385 tree block_factor
= TYPE_UPC_BLOCK_FACTOR (t
);
12387 op (&(tt
->common
.typed
.type
), cookie
);
12388 op (&(tt
->common
.chain
), cookie
);
12389 op (&(tt
->size
), cookie
);
12390 op (&(tt
->size_unit
), cookie
);
12391 op (&(tt
->attributes
), cookie
);
12392 op (&(tt
->pointer_to
), cookie
);
12393 op (&(tt
->reference_to
), cookie
);
12394 switch (debug_hooks
->tree_type_symtab_field
)
12396 case TYPE_SYMTAB_IS_ADDRESS
:
12398 case TYPE_SYMTAB_IS_POINTER
:
12399 op (&(tt
->symtab
.pointer
), cookie
);
12401 case TYPE_SYMTAB_IS_DIE
:
12402 op (&(tt
->symtab
.die
), cookie
);
12407 op (&(tt
->name
), cookie
);
12408 op (&(tt
->next_variant
), cookie
);
12409 op (&(tt
->main_variant
), cookie
);
12410 op (&(tt
->context
), cookie
);
12411 op (&(tt
->canonical
), cookie
);
12413 if (TYPE_HAS_UPC_BLOCK_FACTOR_X (t
))
12414 op (&(block_factor
), cookie
);
12417 /* True if we are using EH to handle cleanups. */
12418 static bool using_eh_for_cleanups_flag
= false;
12420 /* This routine is called from front ends to indicate eh should be used for
12423 using_eh_for_cleanups (void)
12425 using_eh_for_cleanups_flag
= true;
12428 /* Query whether EH is used for cleanups. */
12430 using_eh_for_cleanups_p (void)
12432 return using_eh_for_cleanups_flag
;
12435 /* Wrapper for tree_code_name to ensure that tree code is valid */
12437 get_tree_code_name (enum tree_code code
)
12439 const char *invalid
= "<invalid tree code>";
12441 if (code
>= MAX_TREE_CODES
)
12444 return tree_code_name
[code
];
12447 /* Drops the TREE_OVERFLOW flag from T. */
12450 drop_tree_overflow (tree t
)
12452 gcc_checking_assert (TREE_OVERFLOW (t
));
12454 /* For tree codes with a sharing machinery re-build the result. */
12455 if (TREE_CODE (t
) == INTEGER_CST
)
12456 return wide_int_to_tree (TREE_TYPE (t
), t
);
12458 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12459 and drop the flag. */
12461 TREE_OVERFLOW (t
) = 0;
12465 /* Given a memory reference expression T, return its base address.
12466 The base address of a memory reference expression is the main
12467 object being referenced. For instance, the base address for
12468 'array[i].fld[j]' is 'array'. You can think of this as stripping
12469 away the offset part from a memory address.
12471 This function calls handled_component_p to strip away all the inner
12472 parts of the memory reference until it reaches the base object. */
12475 get_base_address (tree t
)
12477 while (handled_component_p (t
))
12478 t
= TREE_OPERAND (t
, 0);
12480 if ((TREE_CODE (t
) == MEM_REF
12481 || TREE_CODE (t
) == TARGET_MEM_REF
)
12482 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12483 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12485 /* ??? Either the alias oracle or all callers need to properly deal
12486 with WITH_SIZE_EXPRs before we can look through those. */
12487 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12493 #include "gt-tree.h"