1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
32 #include "coretypes.h"
36 #include "stor-layout.h"
43 #include "toplev.h" /* get_random_seed */
45 #include "filenames.h"
48 #include "common/common-target.h"
49 #include "langhooks.h"
50 #include "tree-inline.h"
51 #include "tree-iterator.h"
52 #include "basic-block.h"
54 #include "pointer-set.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
60 #include "gimple-iterator.h"
62 #include "gimple-ssa.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
81 /* Tree code classes. */
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
86 const enum tree_code_class tree_code_type
[] = {
87 #include "all-tree.def"
91 #undef END_OF_BASE_TREE_CODES
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
100 const unsigned char tree_code_length
[] = {
101 #include "all-tree.def"
105 #undef END_OF_BASE_TREE_CODES
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
112 static const char *const tree_code_name
[] = {
113 #include "all-tree.def"
117 #undef END_OF_BASE_TREE_CODES
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
122 const char *const tree_code_class_strings
[] =
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack
*h
, void *obj
);
140 /* Statistics-gathering stuff. */
142 static int tree_code_counts
[MAX_TREE_CODES
];
143 int tree_node_counts
[(int) all_kinds
];
144 int tree_node_sizes
[(int) all_kinds
];
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names
[] = {
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid
;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid
= 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid
;
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
177 struct GTY(()) type_hash
{
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash
)))
193 htab_t type_hash_table
;
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node
;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
198 htab_t int_cst_hash_table
;
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node
;
206 static GTY (()) tree cl_target_option_node
;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
208 htab_t cl_option_hash_table
;
210 /* General tree->tree mapping structure for use in hash tables. */
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
214 htab_t debug_expr_for_decl
;
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map
)))
217 htab_t value_expr_for_decl
;
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map
)))
220 htab_t debug_args_for_decl
;
222 static void set_type_quals (tree
, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t
type_hash_hash (const void *);
225 static hashval_t
int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t
cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static unsigned int type_hash_list (const_tree
, hashval_t
);
234 static unsigned int attribute_hash_list (const_tree
, hashval_t
);
236 tree global_trees
[TI_MAX
];
237 tree integer_types
[itk_none
];
239 unsigned char tree_contains_struct
[MAX_TREE_CODES
][64];
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops
[] =
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
286 const char * const omp_clause_code_name
[] =
331 /* Return the tree node structure used by tree code CODE. */
333 static inline enum tree_node_structure_enum
334 tree_node_structure_for_code (enum tree_code code
)
336 switch (TREE_CODE_CLASS (code
))
338 case tcc_declaration
:
343 return TS_FIELD_DECL
;
349 return TS_LABEL_DECL
;
351 return TS_RESULT_DECL
;
352 case DEBUG_EXPR_DECL
:
355 return TS_CONST_DECL
;
359 return TS_FUNCTION_DECL
;
360 case TRANSLATION_UNIT_DECL
:
361 return TS_TRANSLATION_UNIT_DECL
;
363 return TS_DECL_NON_COMMON
;
367 return TS_TYPE_NON_COMMON
;
376 default: /* tcc_constant and tcc_exceptional */
381 /* tcc_constant cases. */
382 case VOID_CST
: return TS_TYPED
;
383 case INTEGER_CST
: return TS_INT_CST
;
384 case REAL_CST
: return TS_REAL_CST
;
385 case FIXED_CST
: return TS_FIXED_CST
;
386 case COMPLEX_CST
: return TS_COMPLEX
;
387 case VECTOR_CST
: return TS_VECTOR
;
388 case STRING_CST
: return TS_STRING
;
389 /* tcc_exceptional cases. */
390 case ERROR_MARK
: return TS_COMMON
;
391 case IDENTIFIER_NODE
: return TS_IDENTIFIER
;
392 case TREE_LIST
: return TS_LIST
;
393 case TREE_VEC
: return TS_VEC
;
394 case SSA_NAME
: return TS_SSA_NAME
;
395 case PLACEHOLDER_EXPR
: return TS_COMMON
;
396 case STATEMENT_LIST
: return TS_STATEMENT_LIST
;
397 case BLOCK
: return TS_BLOCK
;
398 case CONSTRUCTOR
: return TS_CONSTRUCTOR
;
399 case TREE_BINFO
: return TS_BINFO
;
400 case OMP_CLAUSE
: return TS_OMP_CLAUSE
;
401 case OPTIMIZATION_NODE
: return TS_OPTIMIZATION
;
402 case TARGET_OPTION_NODE
: return TS_TARGET_OPTION
;
410 /* Initialize tree_contains_struct to describe the hierarchy of tree
414 initialize_tree_contains_struct (void)
418 for (i
= ERROR_MARK
; i
< LAST_AND_UNUSED_TREE_CODE
; i
++)
421 enum tree_node_structure_enum ts_code
;
423 code
= (enum tree_code
) i
;
424 ts_code
= tree_node_structure_for_code (code
);
426 /* Mark the TS structure itself. */
427 tree_contains_struct
[code
][ts_code
] = 1;
429 /* Mark all the structures that TS is derived from. */
447 case TS_STATEMENT_LIST
:
448 MARK_TS_TYPED (code
);
452 case TS_DECL_MINIMAL
:
458 case TS_OPTIMIZATION
:
459 case TS_TARGET_OPTION
:
460 MARK_TS_COMMON (code
);
463 case TS_TYPE_WITH_LANG_SPECIFIC
:
464 MARK_TS_TYPE_COMMON (code
);
467 case TS_TYPE_NON_COMMON
:
468 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code
);
472 MARK_TS_DECL_MINIMAL (code
);
477 MARK_TS_DECL_COMMON (code
);
480 case TS_DECL_NON_COMMON
:
481 MARK_TS_DECL_WITH_VIS (code
);
484 case TS_DECL_WITH_VIS
:
488 MARK_TS_DECL_WRTL (code
);
492 MARK_TS_DECL_COMMON (code
);
496 MARK_TS_DECL_WITH_VIS (code
);
500 case TS_FUNCTION_DECL
:
501 MARK_TS_DECL_NON_COMMON (code
);
504 case TS_TRANSLATION_UNIT_DECL
:
505 MARK_TS_DECL_COMMON (code
);
513 /* Basic consistency checks for attributes used in fold. */
514 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_NON_COMMON
]);
515 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_NON_COMMON
]);
516 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_COMMON
]);
517 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_COMMON
]);
518 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_COMMON
]);
519 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_COMMON
]);
520 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_COMMON
]);
521 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_COMMON
]);
522 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_COMMON
]);
523 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_COMMON
]);
524 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_COMMON
]);
525 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WRTL
]);
526 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_WRTL
]);
527 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_WRTL
]);
528 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WRTL
]);
529 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_WRTL
]);
530 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_DECL_MINIMAL
]);
531 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_MINIMAL
]);
532 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_DECL_MINIMAL
]);
533 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_DECL_MINIMAL
]);
534 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_MINIMAL
]);
535 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_MINIMAL
]);
536 gcc_assert (tree_contains_struct
[TRANSLATION_UNIT_DECL
][TS_DECL_MINIMAL
]);
537 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_DECL_MINIMAL
]);
538 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_DECL_MINIMAL
]);
539 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_DECL_WITH_VIS
]);
540 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_DECL_WITH_VIS
]);
541 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_DECL_WITH_VIS
]);
542 gcc_assert (tree_contains_struct
[VAR_DECL
][TS_VAR_DECL
]);
543 gcc_assert (tree_contains_struct
[FIELD_DECL
][TS_FIELD_DECL
]);
544 gcc_assert (tree_contains_struct
[PARM_DECL
][TS_PARM_DECL
]);
545 gcc_assert (tree_contains_struct
[LABEL_DECL
][TS_LABEL_DECL
]);
546 gcc_assert (tree_contains_struct
[RESULT_DECL
][TS_RESULT_DECL
]);
547 gcc_assert (tree_contains_struct
[CONST_DECL
][TS_CONST_DECL
]);
548 gcc_assert (tree_contains_struct
[TYPE_DECL
][TS_TYPE_DECL
]);
549 gcc_assert (tree_contains_struct
[FUNCTION_DECL
][TS_FUNCTION_DECL
]);
550 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_MINIMAL
]);
551 gcc_assert (tree_contains_struct
[IMPORTED_DECL
][TS_DECL_COMMON
]);
552 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_MINIMAL
]);
553 gcc_assert (tree_contains_struct
[NAMELIST_DECL
][TS_DECL_COMMON
]);
562 /* Initialize the hash table of types. */
563 type_hash_table
= htab_create_ggc (TYPE_HASH_INITIAL_SIZE
, type_hash_hash
,
566 debug_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
567 tree_decl_map_eq
, 0);
569 value_expr_for_decl
= htab_create_ggc (512, tree_decl_map_hash
,
570 tree_decl_map_eq
, 0);
572 int_cst_hash_table
= htab_create_ggc (1024, int_cst_hash_hash
,
573 int_cst_hash_eq
, NULL
);
575 int_cst_node
= make_int_cst (1, 1);
577 cl_option_hash_table
= htab_create_ggc (64, cl_option_hash_hash
,
578 cl_option_hash_eq
, NULL
);
580 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
581 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
583 /* Initialize the tree_contains_struct array. */
584 initialize_tree_contains_struct ();
585 lang_hooks
.init_ts ();
589 /* The name of the object as the assembler will see it (but before any
590 translations made by ASM_OUTPUT_LABELREF). Often this is the same
591 as DECL_NAME. It is an IDENTIFIER_NODE. */
593 decl_assembler_name (tree decl
)
595 if (!DECL_ASSEMBLER_NAME_SET_P (decl
))
596 lang_hooks
.set_decl_assembler_name (decl
);
597 return DECL_WITH_VIS_CHECK (decl
)->decl_with_vis
.assembler_name
;
600 /* When the target supports COMDAT groups, this indicates which group the
601 DECL is associated with. This can be either an IDENTIFIER_NODE or a
602 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
604 decl_comdat_group (const_tree node
)
606 struct symtab_node
*snode
= symtab_get_node (node
);
609 return snode
->get_comdat_group ();
612 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
614 decl_comdat_group_id (const_tree node
)
616 struct symtab_node
*snode
= symtab_get_node (node
);
619 return snode
->get_comdat_group_id ();
622 /* When the target supports named section, return its name as IDENTIFIER_NODE
623 or NULL if it is in no section. */
625 decl_section_name (const_tree node
)
627 struct symtab_node
*snode
= symtab_get_node (node
);
630 return snode
->get_section ();
633 /* Set section section name of NODE to VALUE (that is expected to
634 be identifier node) */
636 set_decl_section_name (tree node
, const char *value
)
638 struct symtab_node
*snode
;
642 snode
= symtab_get_node (node
);
646 else if (TREE_CODE (node
) == VAR_DECL
)
647 snode
= varpool_node_for_decl (node
);
649 snode
= cgraph_get_create_node (node
);
650 snode
->set_section (value
);
653 /* Return TLS model of a variable NODE. */
655 decl_tls_model (const_tree node
)
657 struct varpool_node
*snode
= varpool_get_node (node
);
659 return TLS_MODEL_NONE
;
660 return snode
->tls_model
;
663 /* Set TLS model of variable NODE to MODEL. */
665 set_decl_tls_model (tree node
, enum tls_model model
)
667 struct varpool_node
*vnode
;
669 if (model
== TLS_MODEL_NONE
)
671 vnode
= varpool_get_node (node
);
676 vnode
= varpool_node_for_decl (node
);
677 vnode
->tls_model
= model
;
680 /* Compute the number of bytes occupied by a tree with code CODE.
681 This function cannot be used for nodes that have variable sizes,
682 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
684 tree_code_size (enum tree_code code
)
686 switch (TREE_CODE_CLASS (code
))
688 case tcc_declaration
: /* A decl node */
693 return sizeof (struct tree_field_decl
);
695 return sizeof (struct tree_parm_decl
);
697 return sizeof (struct tree_var_decl
);
699 return sizeof (struct tree_label_decl
);
701 return sizeof (struct tree_result_decl
);
703 return sizeof (struct tree_const_decl
);
705 return sizeof (struct tree_type_decl
);
707 return sizeof (struct tree_function_decl
);
708 case DEBUG_EXPR_DECL
:
709 return sizeof (struct tree_decl_with_rtl
);
710 case TRANSLATION_UNIT_DECL
:
711 return sizeof (struct tree_translation_unit_decl
);
715 return sizeof (struct tree_decl_non_common
);
717 return lang_hooks
.tree_size (code
);
721 case tcc_type
: /* a type node */
722 return sizeof (struct tree_type_non_common
);
724 case tcc_reference
: /* a reference */
725 case tcc_expression
: /* an expression */
726 case tcc_statement
: /* an expression with side effects */
727 case tcc_comparison
: /* a comparison expression */
728 case tcc_unary
: /* a unary arithmetic expression */
729 case tcc_binary
: /* a binary arithmetic expression */
730 return (sizeof (struct tree_exp
)
731 + (TREE_CODE_LENGTH (code
) - 1) * sizeof (tree
));
733 case tcc_constant
: /* a constant */
736 case VOID_CST
: return sizeof (struct tree_typed
);
737 case INTEGER_CST
: gcc_unreachable ();
738 case REAL_CST
: return sizeof (struct tree_real_cst
);
739 case FIXED_CST
: return sizeof (struct tree_fixed_cst
);
740 case COMPLEX_CST
: return sizeof (struct tree_complex
);
741 case VECTOR_CST
: return sizeof (struct tree_vector
);
742 case STRING_CST
: gcc_unreachable ();
744 return lang_hooks
.tree_size (code
);
747 case tcc_exceptional
: /* something random, like an identifier. */
750 case IDENTIFIER_NODE
: return lang_hooks
.identifier_size
;
751 case TREE_LIST
: return sizeof (struct tree_list
);
754 case PLACEHOLDER_EXPR
: return sizeof (struct tree_common
);
757 case OMP_CLAUSE
: gcc_unreachable ();
759 case SSA_NAME
: return sizeof (struct tree_ssa_name
);
761 case STATEMENT_LIST
: return sizeof (struct tree_statement_list
);
762 case BLOCK
: return sizeof (struct tree_block
);
763 case CONSTRUCTOR
: return sizeof (struct tree_constructor
);
764 case OPTIMIZATION_NODE
: return sizeof (struct tree_optimization_option
);
765 case TARGET_OPTION_NODE
: return sizeof (struct tree_target_option
);
768 return lang_hooks
.tree_size (code
);
776 /* Compute the number of bytes occupied by NODE. This routine only
777 looks at TREE_CODE, except for those nodes that have variable sizes. */
779 tree_size (const_tree node
)
781 const enum tree_code code
= TREE_CODE (node
);
785 return (sizeof (struct tree_int_cst
)
786 + (TREE_INT_CST_EXT_NUNITS (node
) - 1) * sizeof (HOST_WIDE_INT
));
789 return (offsetof (struct tree_binfo
, base_binfos
)
791 ::embedded_size (BINFO_N_BASE_BINFOS (node
)));
794 return (sizeof (struct tree_vec
)
795 + (TREE_VEC_LENGTH (node
) - 1) * sizeof (tree
));
798 return (sizeof (struct tree_vector
)
799 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node
)) - 1) * sizeof (tree
));
802 return TREE_STRING_LENGTH (node
) + offsetof (struct tree_string
, str
) + 1;
805 return (sizeof (struct tree_omp_clause
)
806 + (omp_clause_num_ops
[OMP_CLAUSE_CODE (node
)] - 1)
810 if (TREE_CODE_CLASS (code
) == tcc_vl_exp
)
811 return (sizeof (struct tree_exp
)
812 + (VL_EXP_OPERAND_LENGTH (node
) - 1) * sizeof (tree
));
814 return tree_code_size (code
);
818 /* Record interesting allocation statistics for a tree node with CODE
822 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED
,
823 size_t length ATTRIBUTE_UNUSED
)
825 enum tree_code_class type
= TREE_CODE_CLASS (code
);
828 if (!GATHER_STATISTICS
)
833 case tcc_declaration
: /* A decl node */
837 case tcc_type
: /* a type node */
841 case tcc_statement
: /* an expression with side effects */
845 case tcc_reference
: /* a reference */
849 case tcc_expression
: /* an expression */
850 case tcc_comparison
: /* a comparison expression */
851 case tcc_unary
: /* a unary arithmetic expression */
852 case tcc_binary
: /* a binary arithmetic expression */
856 case tcc_constant
: /* a constant */
860 case tcc_exceptional
: /* something random, like an identifier. */
863 case IDENTIFIER_NODE
:
876 kind
= ssa_name_kind
;
888 kind
= omp_clause_kind
;
905 tree_code_counts
[(int) code
]++;
906 tree_node_counts
[(int) kind
]++;
907 tree_node_sizes
[(int) kind
] += length
;
910 /* Allocate and return a new UID from the DECL_UID namespace. */
913 allocate_decl_uid (void)
915 return next_decl_uid
++;
918 /* Return a newly allocated node of code CODE. For decl and type
919 nodes, some other fields are initialized. The rest of the node is
920 initialized to zero. This function cannot be used for TREE_VEC,
921 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
924 Achoo! I got a code in the node. */
927 make_node_stat (enum tree_code code MEM_STAT_DECL
)
930 enum tree_code_class type
= TREE_CODE_CLASS (code
);
931 size_t length
= tree_code_size (code
);
933 record_node_allocation_statistics (code
, length
);
935 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
936 TREE_SET_CODE (t
, code
);
941 TREE_SIDE_EFFECTS (t
) = 1;
944 case tcc_declaration
:
945 if (CODE_CONTAINS_STRUCT (code
, TS_DECL_COMMON
))
947 if (code
== FUNCTION_DECL
)
949 DECL_ALIGN (t
) = FUNCTION_BOUNDARY
;
950 DECL_MODE (t
) = FUNCTION_MODE
;
955 DECL_SOURCE_LOCATION (t
) = input_location
;
956 if (TREE_CODE (t
) == DEBUG_EXPR_DECL
)
957 DECL_UID (t
) = --next_debug_decl_uid
;
960 DECL_UID (t
) = allocate_decl_uid ();
961 SET_DECL_PT_UID (t
, -1);
963 if (TREE_CODE (t
) == LABEL_DECL
)
964 LABEL_DECL_UID (t
) = -1;
969 TYPE_UID (t
) = next_type_uid
++;
970 TYPE_ALIGN (t
) = BITS_PER_UNIT
;
971 TYPE_USER_ALIGN (t
) = 0;
972 TYPE_MAIN_VARIANT (t
) = t
;
973 TYPE_CANONICAL (t
) = t
;
975 /* Default to no attributes for type, but let target change that. */
976 TYPE_ATTRIBUTES (t
) = NULL_TREE
;
977 targetm
.set_default_type_attributes (t
);
979 /* We have not yet computed the alias set for this type. */
980 TYPE_ALIAS_SET (t
) = -1;
984 TREE_CONSTANT (t
) = 1;
993 case PREDECREMENT_EXPR
:
994 case PREINCREMENT_EXPR
:
995 case POSTDECREMENT_EXPR
:
996 case POSTINCREMENT_EXPR
:
997 /* All of these have side-effects, no matter what their
999 TREE_SIDE_EFFECTS (t
) = 1;
1008 /* Other classes need no special treatment. */
1015 /* Return a new node with the same contents as NODE except that its
1016 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1019 copy_node_stat (tree node MEM_STAT_DECL
)
1022 enum tree_code code
= TREE_CODE (node
);
1025 gcc_assert (code
!= STATEMENT_LIST
);
1027 length
= tree_size (node
);
1028 record_node_allocation_statistics (code
, length
);
1029 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
1030 memcpy (t
, node
, length
);
1032 if (CODE_CONTAINS_STRUCT (code
, TS_COMMON
))
1034 TREE_ASM_WRITTEN (t
) = 0;
1035 TREE_VISITED (t
) = 0;
1037 if (TREE_CODE_CLASS (code
) == tcc_declaration
)
1039 if (code
== DEBUG_EXPR_DECL
)
1040 DECL_UID (t
) = --next_debug_decl_uid
;
1043 DECL_UID (t
) = allocate_decl_uid ();
1044 if (DECL_PT_UID_SET_P (node
))
1045 SET_DECL_PT_UID (t
, DECL_PT_UID (node
));
1047 if ((TREE_CODE (node
) == PARM_DECL
|| TREE_CODE (node
) == VAR_DECL
)
1048 && DECL_HAS_VALUE_EXPR_P (node
))
1050 SET_DECL_VALUE_EXPR (t
, DECL_VALUE_EXPR (node
));
1051 DECL_HAS_VALUE_EXPR_P (t
) = 1;
1053 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1054 if (TREE_CODE (node
) == VAR_DECL
)
1056 DECL_HAS_DEBUG_EXPR_P (t
) = 0;
1057 t
->decl_with_vis
.symtab_node
= NULL
;
1059 if (TREE_CODE (node
) == VAR_DECL
&& DECL_HAS_INIT_PRIORITY_P (node
))
1061 SET_DECL_INIT_PRIORITY (t
, DECL_INIT_PRIORITY (node
));
1062 DECL_HAS_INIT_PRIORITY_P (t
) = 1;
1064 if (TREE_CODE (node
) == FUNCTION_DECL
)
1066 DECL_STRUCT_FUNCTION (t
) = NULL
;
1067 t
->decl_with_vis
.symtab_node
= NULL
;
1070 else if (TREE_CODE_CLASS (code
) == tcc_type
)
1072 TYPE_UID (t
) = next_type_uid
++;
1073 /* The following is so that the debug code for
1074 the copy is different from the original type.
1075 The two statements usually duplicate each other
1076 (because they clear fields of the same union),
1077 but the optimizer should catch that. */
1078 TYPE_SYMTAB_POINTER (t
) = 0;
1079 TYPE_SYMTAB_ADDRESS (t
) = 0;
1081 /* Do not copy the values cache. */
1082 if (TYPE_CACHED_VALUES_P (t
))
1084 TYPE_CACHED_VALUES_P (t
) = 0;
1085 TYPE_CACHED_VALUES (t
) = NULL_TREE
;
1092 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1093 For example, this can copy a list made of TREE_LIST nodes. */
1096 copy_list (tree list
)
1104 head
= prev
= copy_node (list
);
1105 next
= TREE_CHAIN (list
);
1108 TREE_CHAIN (prev
) = copy_node (next
);
1109 prev
= TREE_CHAIN (prev
);
1110 next
= TREE_CHAIN (next
);
1116 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1117 INTEGER_CST with value CST and type TYPE. */
1120 get_int_cst_ext_nunits (tree type
, const wide_int
&cst
)
1122 gcc_checking_assert (cst
.get_precision () == TYPE_PRECISION (type
));
1123 /* We need an extra zero HWI if CST is an unsigned integer with its
1124 upper bit set, and if CST occupies a whole number of HWIs. */
1125 if (TYPE_UNSIGNED (type
)
1127 && (cst
.get_precision () % HOST_BITS_PER_WIDE_INT
) == 0)
1128 return cst
.get_precision () / HOST_BITS_PER_WIDE_INT
+ 1;
1129 return cst
.get_len ();
1132 /* Return a new INTEGER_CST with value CST and type TYPE. */
1135 build_new_int_cst (tree type
, const wide_int
&cst
)
1137 unsigned int len
= cst
.get_len ();
1138 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1139 tree nt
= make_int_cst (len
, ext_len
);
1144 TREE_INT_CST_ELT (nt
, ext_len
) = 0;
1145 for (unsigned int i
= len
; i
< ext_len
; ++i
)
1146 TREE_INT_CST_ELT (nt
, i
) = -1;
1148 else if (TYPE_UNSIGNED (type
)
1149 && cst
.get_precision () < len
* HOST_BITS_PER_WIDE_INT
)
1152 TREE_INT_CST_ELT (nt
, len
)
1153 = zext_hwi (cst
.elt (len
),
1154 cst
.get_precision () % HOST_BITS_PER_WIDE_INT
);
1157 for (unsigned int i
= 0; i
< len
; i
++)
1158 TREE_INT_CST_ELT (nt
, i
) = cst
.elt (i
);
1159 TREE_TYPE (nt
) = type
;
1163 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1166 build_int_cst (tree type
, HOST_WIDE_INT low
)
1168 /* Support legacy code. */
1170 type
= integer_type_node
;
1172 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1176 build_int_cstu (tree type
, unsigned HOST_WIDE_INT cst
)
1178 return wide_int_to_tree (type
, wi::uhwi (cst
, TYPE_PRECISION (type
)));
1181 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1184 build_int_cst_type (tree type
, HOST_WIDE_INT low
)
1187 return wide_int_to_tree (type
, wi::shwi (low
, TYPE_PRECISION (type
)));
1190 /* Constructs tree in type TYPE from with value given by CST. Signedness
1191 of CST is assumed to be the same as the signedness of TYPE. */
1194 double_int_to_tree (tree type
, double_int cst
)
1196 return wide_int_to_tree (type
, widest_int::from (cst
, TYPE_SIGN (type
)));
1199 /* We force the wide_int CST to the range of the type TYPE by sign or
1200 zero extending it. OVERFLOWABLE indicates if we are interested in
1201 overflow of the value, when >0 we are only interested in signed
1202 overflow, for <0 we are interested in any overflow. OVERFLOWED
1203 indicates whether overflow has already occurred. CONST_OVERFLOWED
1204 indicates whether constant overflow has already occurred. We force
1205 T's value to be within range of T's type (by setting to 0 or 1 all
1206 the bits outside the type's range). We set TREE_OVERFLOWED if,
1207 OVERFLOWED is nonzero,
1208 or OVERFLOWABLE is >0 and signed overflow occurs
1209 or OVERFLOWABLE is <0 and any overflow occurs
1210 We return a new tree node for the extended wide_int. The node
1211 is shared if no overflow flags are set. */
1215 force_fit_type (tree type
, const wide_int_ref
&cst
,
1216 int overflowable
, bool overflowed
)
1218 signop sign
= TYPE_SIGN (type
);
1220 /* If we need to set overflow flags, return a new unshared node. */
1221 if (overflowed
|| !wi::fits_to_tree_p (cst
, type
))
1225 || (overflowable
> 0 && sign
== SIGNED
))
1227 wide_int tmp
= wide_int::from (cst
, TYPE_PRECISION (type
), sign
);
1228 tree t
= build_new_int_cst (type
, tmp
);
1229 TREE_OVERFLOW (t
) = 1;
1234 /* Else build a shared node. */
1235 return wide_int_to_tree (type
, cst
);
1238 /* These are the hash table functions for the hash table of INTEGER_CST
1239 nodes of a sizetype. */
1241 /* Return the hash code code X, an INTEGER_CST. */
1244 int_cst_hash_hash (const void *x
)
1246 const_tree
const t
= (const_tree
) x
;
1247 hashval_t code
= htab_hash_pointer (TREE_TYPE (t
));
1250 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
1251 code
^= TREE_INT_CST_ELT (t
, i
);
1256 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1257 is the same as that given by *Y, which is the same. */
1260 int_cst_hash_eq (const void *x
, const void *y
)
1262 const_tree
const xt
= (const_tree
) x
;
1263 const_tree
const yt
= (const_tree
) y
;
1265 if (TREE_TYPE (xt
) != TREE_TYPE (yt
)
1266 || TREE_INT_CST_NUNITS (xt
) != TREE_INT_CST_NUNITS (yt
)
1267 || TREE_INT_CST_EXT_NUNITS (xt
) != TREE_INT_CST_EXT_NUNITS (yt
))
1270 for (int i
= 0; i
< TREE_INT_CST_NUNITS (xt
); i
++)
1271 if (TREE_INT_CST_ELT (xt
, i
) != TREE_INT_CST_ELT (yt
, i
))
1277 /* Create an INT_CST node of TYPE and value CST.
1278 The returned node is always shared. For small integers we use a
1279 per-type vector cache, for larger ones we use a single hash table.
1280 The value is extended from its precision according to the sign of
1281 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1282 the upper bits and ensures that hashing and value equality based
1283 upon the underlying HOST_WIDE_INTs works without masking. */
1286 wide_int_to_tree (tree type
, const wide_int_ref
&pcst
)
1293 unsigned int prec
= TYPE_PRECISION (type
);
1294 signop sgn
= TYPE_SIGN (type
);
1296 /* Verify that everything is canonical. */
1297 int l
= pcst
.get_len ();
1300 if (pcst
.elt (l
- 1) == 0)
1301 gcc_checking_assert (pcst
.elt (l
- 2) < 0);
1302 if (pcst
.elt (l
- 1) == (HOST_WIDE_INT
) -1)
1303 gcc_checking_assert (pcst
.elt (l
- 2) >= 0);
1306 wide_int cst
= wide_int::from (pcst
, prec
, sgn
);
1307 unsigned int ext_len
= get_int_cst_ext_nunits (type
, cst
);
1311 /* We just need to store a single HOST_WIDE_INT. */
1313 if (TYPE_UNSIGNED (type
))
1314 hwi
= cst
.to_uhwi ();
1316 hwi
= cst
.to_shwi ();
1318 switch (TREE_CODE (type
))
1321 gcc_assert (hwi
== 0);
1325 case REFERENCE_TYPE
:
1326 /* Cache NULL pointer. */
1335 /* Cache false or true. */
1343 if (TYPE_SIGN (type
) == UNSIGNED
)
1346 limit
= INTEGER_SHARE_LIMIT
;
1347 if (IN_RANGE (hwi
, 0, INTEGER_SHARE_LIMIT
- 1))
1352 /* Cache [-1, N). */
1353 limit
= INTEGER_SHARE_LIMIT
+ 1;
1354 if (IN_RANGE (hwi
, -1, INTEGER_SHARE_LIMIT
- 1))
1368 /* Look for it in the type's vector of small shared ints. */
1369 if (!TYPE_CACHED_VALUES_P (type
))
1371 TYPE_CACHED_VALUES_P (type
) = 1;
1372 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1375 t
= TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
);
1377 /* Make sure no one is clobbering the shared constant. */
1378 gcc_checking_assert (TREE_TYPE (t
) == type
1379 && TREE_INT_CST_NUNITS (t
) == 1
1380 && TREE_INT_CST_OFFSET_NUNITS (t
) == 1
1381 && TREE_INT_CST_EXT_NUNITS (t
) == 1
1382 && TREE_INT_CST_ELT (t
, 0) == hwi
);
1385 /* Create a new shared int. */
1386 t
= build_new_int_cst (type
, cst
);
1387 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1392 /* Use the cache of larger shared ints, using int_cst_node as
1396 TREE_INT_CST_ELT (int_cst_node
, 0) = hwi
;
1397 TREE_TYPE (int_cst_node
) = type
;
1399 slot
= htab_find_slot (int_cst_hash_table
, int_cst_node
, INSERT
);
1403 /* Insert this one into the hash table. */
1406 /* Make a new node for next time round. */
1407 int_cst_node
= make_int_cst (1, 1);
1413 /* The value either hashes properly or we drop it on the floor
1414 for the gc to take care of. There will not be enough of them
1418 tree nt
= build_new_int_cst (type
, cst
);
1419 slot
= htab_find_slot (int_cst_hash_table
, nt
, INSERT
);
1423 /* Insert this one into the hash table. */
1433 cache_integer_cst (tree t
)
1435 tree type
= TREE_TYPE (t
);
1438 int prec
= TYPE_PRECISION (type
);
1440 gcc_assert (!TREE_OVERFLOW (t
));
1442 switch (TREE_CODE (type
))
1445 gcc_assert (integer_zerop (t
));
1449 case REFERENCE_TYPE
:
1450 /* Cache NULL pointer. */
1451 if (integer_zerop (t
))
1459 /* Cache false or true. */
1461 if (wi::ltu_p (t
, 2))
1462 ix
= TREE_INT_CST_ELT (t
, 0);
1467 if (TYPE_UNSIGNED (type
))
1470 limit
= INTEGER_SHARE_LIMIT
;
1472 /* This is a little hokie, but if the prec is smaller than
1473 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1474 obvious test will not get the correct answer. */
1475 if (prec
< HOST_BITS_PER_WIDE_INT
)
1477 if (tree_to_uhwi (t
) < (unsigned HOST_WIDE_INT
) INTEGER_SHARE_LIMIT
)
1478 ix
= tree_to_uhwi (t
);
1480 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1481 ix
= tree_to_uhwi (t
);
1486 limit
= INTEGER_SHARE_LIMIT
+ 1;
1488 if (integer_minus_onep (t
))
1490 else if (!wi::neg_p (t
))
1492 if (prec
< HOST_BITS_PER_WIDE_INT
)
1494 if (tree_to_shwi (t
) < INTEGER_SHARE_LIMIT
)
1495 ix
= tree_to_shwi (t
) + 1;
1497 else if (wi::ltu_p (t
, INTEGER_SHARE_LIMIT
))
1498 ix
= tree_to_shwi (t
) + 1;
1512 /* Look for it in the type's vector of small shared ints. */
1513 if (!TYPE_CACHED_VALUES_P (type
))
1515 TYPE_CACHED_VALUES_P (type
) = 1;
1516 TYPE_CACHED_VALUES (type
) = make_tree_vec (limit
);
1519 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) == NULL_TREE
);
1520 TREE_VEC_ELT (TYPE_CACHED_VALUES (type
), ix
) = t
;
1524 /* Use the cache of larger shared ints. */
1527 slot
= htab_find_slot (int_cst_hash_table
, t
, INSERT
);
1528 /* If there is already an entry for the number verify it's the
1531 gcc_assert (wi::eq_p (tree (*slot
), t
));
1533 /* Otherwise insert this one into the hash table. */
1539 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1540 and the rest are zeros. */
1543 build_low_bits_mask (tree type
, unsigned bits
)
1545 gcc_assert (bits
<= TYPE_PRECISION (type
));
1547 return wide_int_to_tree (type
, wi::mask (bits
, false,
1548 TYPE_PRECISION (type
)));
1551 /* Checks that X is integer constant that can be expressed in (unsigned)
1552 HOST_WIDE_INT without loss of precision. */
1555 cst_and_fits_in_hwi (const_tree x
)
1557 if (TREE_CODE (x
) != INTEGER_CST
)
1560 if (TYPE_PRECISION (TREE_TYPE (x
)) > HOST_BITS_PER_WIDE_INT
)
1563 return TREE_INT_CST_NUNITS (x
) == 1;
1566 /* Build a newly constructed TREE_VEC node of length LEN. */
1569 make_vector_stat (unsigned len MEM_STAT_DECL
)
1572 unsigned length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vector
);
1574 record_node_allocation_statistics (VECTOR_CST
, length
);
1576 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
1578 TREE_SET_CODE (t
, VECTOR_CST
);
1579 TREE_CONSTANT (t
) = 1;
1584 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1585 are in a list pointed to by VALS. */
1588 build_vector_stat (tree type
, tree
*vals MEM_STAT_DECL
)
1592 tree v
= make_vector (TYPE_VECTOR_SUBPARTS (type
));
1593 TREE_TYPE (v
) = type
;
1595 /* Iterate through elements and check for overflow. */
1596 for (cnt
= 0; cnt
< TYPE_VECTOR_SUBPARTS (type
); ++cnt
)
1598 tree value
= vals
[cnt
];
1600 VECTOR_CST_ELT (v
, cnt
) = value
;
1602 /* Don't crash if we get an address constant. */
1603 if (!CONSTANT_CLASS_P (value
))
1606 over
|= TREE_OVERFLOW (value
);
1609 TREE_OVERFLOW (v
) = over
;
1613 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1614 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1617 build_vector_from_ctor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1619 tree
*vec
= XALLOCAVEC (tree
, TYPE_VECTOR_SUBPARTS (type
));
1620 unsigned HOST_WIDE_INT idx
;
1623 FOR_EACH_CONSTRUCTOR_VALUE (v
, idx
, value
)
1625 for (; idx
< TYPE_VECTOR_SUBPARTS (type
); ++idx
)
1626 vec
[idx
] = build_zero_cst (TREE_TYPE (type
));
1628 return build_vector (type
, vec
);
1631 /* Build a vector of type VECTYPE where all the elements are SCs. */
1633 build_vector_from_val (tree vectype
, tree sc
)
1635 int i
, nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
1637 if (sc
== error_mark_node
)
1640 /* Verify that the vector type is suitable for SC. Note that there
1641 is some inconsistency in the type-system with respect to restrict
1642 qualifications of pointers. Vector types always have a main-variant
1643 element type and the qualification is applied to the vector-type.
1644 So TREE_TYPE (vector-type) does not return a properly qualified
1645 vector element-type. */
1646 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc
)),
1647 TREE_TYPE (vectype
)));
1649 if (CONSTANT_CLASS_P (sc
))
1651 tree
*v
= XALLOCAVEC (tree
, nunits
);
1652 for (i
= 0; i
< nunits
; ++i
)
1654 return build_vector (vectype
, v
);
1658 vec
<constructor_elt
, va_gc
> *v
;
1659 vec_alloc (v
, nunits
);
1660 for (i
= 0; i
< nunits
; ++i
)
1661 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, sc
);
1662 return build_constructor (vectype
, v
);
1666 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1667 are in the vec pointed to by VALS. */
1669 build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *vals
)
1671 tree c
= make_node (CONSTRUCTOR
);
1673 constructor_elt
*elt
;
1674 bool constant_p
= true;
1675 bool side_effects_p
= false;
1677 TREE_TYPE (c
) = type
;
1678 CONSTRUCTOR_ELTS (c
) = vals
;
1680 FOR_EACH_VEC_SAFE_ELT (vals
, i
, elt
)
1682 /* Mostly ctors will have elts that don't have side-effects, so
1683 the usual case is to scan all the elements. Hence a single
1684 loop for both const and side effects, rather than one loop
1685 each (with early outs). */
1686 if (!TREE_CONSTANT (elt
->value
))
1688 if (TREE_SIDE_EFFECTS (elt
->value
))
1689 side_effects_p
= true;
1692 TREE_SIDE_EFFECTS (c
) = side_effects_p
;
1693 TREE_CONSTANT (c
) = constant_p
;
1698 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1701 build_constructor_single (tree type
, tree index
, tree value
)
1703 vec
<constructor_elt
, va_gc
> *v
;
1704 constructor_elt elt
= {index
, value
};
1707 v
->quick_push (elt
);
1709 return build_constructor (type
, v
);
1713 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1714 are in a list pointed to by VALS. */
1716 build_constructor_from_list (tree type
, tree vals
)
1719 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1723 vec_alloc (v
, list_length (vals
));
1724 for (t
= vals
; t
; t
= TREE_CHAIN (t
))
1725 CONSTRUCTOR_APPEND_ELT (v
, TREE_PURPOSE (t
), TREE_VALUE (t
));
1728 return build_constructor (type
, v
);
1731 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1732 of elements, provided as index/value pairs. */
1735 build_constructor_va (tree type
, int nelts
, ...)
1737 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1740 va_start (p
, nelts
);
1741 vec_alloc (v
, nelts
);
1744 tree index
= va_arg (p
, tree
);
1745 tree value
= va_arg (p
, tree
);
1746 CONSTRUCTOR_APPEND_ELT (v
, index
, value
);
1749 return build_constructor (type
, v
);
1752 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1755 build_fixed (tree type
, FIXED_VALUE_TYPE f
)
1758 FIXED_VALUE_TYPE
*fp
;
1760 v
= make_node (FIXED_CST
);
1761 fp
= ggc_alloc
<fixed_value
> ();
1762 memcpy (fp
, &f
, sizeof (FIXED_VALUE_TYPE
));
1764 TREE_TYPE (v
) = type
;
1765 TREE_FIXED_CST_PTR (v
) = fp
;
1769 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1772 build_real (tree type
, REAL_VALUE_TYPE d
)
1775 REAL_VALUE_TYPE
*dp
;
1778 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1779 Consider doing it via real_convert now. */
1781 v
= make_node (REAL_CST
);
1782 dp
= ggc_alloc
<real_value
> ();
1783 memcpy (dp
, &d
, sizeof (REAL_VALUE_TYPE
));
1785 TREE_TYPE (v
) = type
;
1786 TREE_REAL_CST_PTR (v
) = dp
;
1787 TREE_OVERFLOW (v
) = overflow
;
1791 /* Return a new REAL_CST node whose type is TYPE
1792 and whose value is the integer value of the INTEGER_CST node I. */
1795 real_value_from_int_cst (const_tree type
, const_tree i
)
1799 /* Clear all bits of the real value type so that we can later do
1800 bitwise comparisons to see if two values are the same. */
1801 memset (&d
, 0, sizeof d
);
1803 real_from_integer (&d
, type
? TYPE_MODE (type
) : VOIDmode
, i
,
1804 TYPE_SIGN (TREE_TYPE (i
)));
1808 /* Given a tree representing an integer constant I, return a tree
1809 representing the same value as a floating-point constant of type TYPE. */
1812 build_real_from_int_cst (tree type
, const_tree i
)
1815 int overflow
= TREE_OVERFLOW (i
);
1817 v
= build_real (type
, real_value_from_int_cst (type
, i
));
1819 TREE_OVERFLOW (v
) |= overflow
;
1823 /* Return a newly constructed STRING_CST node whose value is
1824 the LEN characters at STR.
1825 Note that for a C string literal, LEN should include the trailing NUL.
1826 The TREE_TYPE is not initialized. */
1829 build_string (int len
, const char *str
)
1834 /* Do not waste bytes provided by padding of struct tree_string. */
1835 length
= len
+ offsetof (struct tree_string
, str
) + 1;
1837 record_node_allocation_statistics (STRING_CST
, length
);
1839 s
= (tree
) ggc_internal_alloc (length
);
1841 memset (s
, 0, sizeof (struct tree_typed
));
1842 TREE_SET_CODE (s
, STRING_CST
);
1843 TREE_CONSTANT (s
) = 1;
1844 TREE_STRING_LENGTH (s
) = len
;
1845 memcpy (s
->string
.str
, str
, len
);
1846 s
->string
.str
[len
] = '\0';
1851 /* Return a newly constructed COMPLEX_CST node whose value is
1852 specified by the real and imaginary parts REAL and IMAG.
1853 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1854 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1857 build_complex (tree type
, tree real
, tree imag
)
1859 tree t
= make_node (COMPLEX_CST
);
1861 TREE_REALPART (t
) = real
;
1862 TREE_IMAGPART (t
) = imag
;
1863 TREE_TYPE (t
) = type
? type
: build_complex_type (TREE_TYPE (real
));
1864 TREE_OVERFLOW (t
) = TREE_OVERFLOW (real
) | TREE_OVERFLOW (imag
);
1868 /* Return a constant of arithmetic type TYPE which is the
1869 multiplicative identity of the set TYPE. */
1872 build_one_cst (tree type
)
1874 switch (TREE_CODE (type
))
1876 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1877 case POINTER_TYPE
: case REFERENCE_TYPE
:
1879 return build_int_cst (type
, 1);
1882 return build_real (type
, dconst1
);
1884 case FIXED_POINT_TYPE
:
1885 /* We can only generate 1 for accum types. */
1886 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1887 return build_fixed (type
, FCONST1 (TYPE_MODE (type
)));
1891 tree scalar
= build_one_cst (TREE_TYPE (type
));
1893 return build_vector_from_val (type
, scalar
);
1897 return build_complex (type
,
1898 build_one_cst (TREE_TYPE (type
)),
1899 build_zero_cst (TREE_TYPE (type
)));
1906 /* Return an integer of type TYPE containing all 1's in as much precision as
1907 it contains, or a complex or vector whose subparts are such integers. */
1910 build_all_ones_cst (tree type
)
1912 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1914 tree scalar
= build_all_ones_cst (TREE_TYPE (type
));
1915 return build_complex (type
, scalar
, scalar
);
1918 return build_minus_one_cst (type
);
1921 /* Return a constant of arithmetic type TYPE which is the
1922 opposite of the multiplicative identity of the set TYPE. */
1925 build_minus_one_cst (tree type
)
1927 switch (TREE_CODE (type
))
1929 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1930 case POINTER_TYPE
: case REFERENCE_TYPE
:
1932 return build_int_cst (type
, -1);
1935 return build_real (type
, dconstm1
);
1937 case FIXED_POINT_TYPE
:
1938 /* We can only generate 1 for accum types. */
1939 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type
)));
1940 return build_fixed (type
, fixed_from_double_int (double_int_minus_one
,
1945 tree scalar
= build_minus_one_cst (TREE_TYPE (type
));
1947 return build_vector_from_val (type
, scalar
);
1951 return build_complex (type
,
1952 build_minus_one_cst (TREE_TYPE (type
)),
1953 build_zero_cst (TREE_TYPE (type
)));
1960 /* Build 0 constant of type TYPE. This is used by constructor folding
1961 and thus the constant should be represented in memory by
1965 build_zero_cst (tree type
)
1967 switch (TREE_CODE (type
))
1969 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
1970 case POINTER_TYPE
: case REFERENCE_TYPE
:
1971 case OFFSET_TYPE
: case NULLPTR_TYPE
:
1972 return build_int_cst (type
, 0);
1975 return build_real (type
, dconst0
);
1977 case FIXED_POINT_TYPE
:
1978 return build_fixed (type
, FCONST0 (TYPE_MODE (type
)));
1982 tree scalar
= build_zero_cst (TREE_TYPE (type
));
1984 return build_vector_from_val (type
, scalar
);
1989 tree zero
= build_zero_cst (TREE_TYPE (type
));
1991 return build_complex (type
, zero
, zero
);
1995 if (!AGGREGATE_TYPE_P (type
))
1996 return fold_convert (type
, integer_zero_node
);
1997 return build_constructor (type
, NULL
);
2002 /* Build a BINFO with LEN language slots. */
2005 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL
)
2008 size_t length
= (offsetof (struct tree_binfo
, base_binfos
)
2009 + vec
<tree
, va_gc
>::embedded_size (base_binfos
));
2011 record_node_allocation_statistics (TREE_BINFO
, length
);
2013 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
2015 memset (t
, 0, offsetof (struct tree_binfo
, base_binfos
));
2017 TREE_SET_CODE (t
, TREE_BINFO
);
2019 BINFO_BASE_BINFOS (t
)->embedded_init (base_binfos
);
2024 /* Create a CASE_LABEL_EXPR tree node and return it. */
2027 build_case_label (tree low_value
, tree high_value
, tree label_decl
)
2029 tree t
= make_node (CASE_LABEL_EXPR
);
2031 TREE_TYPE (t
) = void_type_node
;
2032 SET_EXPR_LOCATION (t
, DECL_SOURCE_LOCATION (label_decl
));
2034 CASE_LOW (t
) = low_value
;
2035 CASE_HIGH (t
) = high_value
;
2036 CASE_LABEL (t
) = label_decl
;
2037 CASE_CHAIN (t
) = NULL_TREE
;
2042 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2043 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2044 The latter determines the length of the HOST_WIDE_INT vector. */
2047 make_int_cst_stat (int len
, int ext_len MEM_STAT_DECL
)
2050 int length
= ((ext_len
- 1) * sizeof (HOST_WIDE_INT
)
2051 + sizeof (struct tree_int_cst
));
2054 record_node_allocation_statistics (INTEGER_CST
, length
);
2056 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2058 TREE_SET_CODE (t
, INTEGER_CST
);
2059 TREE_INT_CST_NUNITS (t
) = len
;
2060 TREE_INT_CST_EXT_NUNITS (t
) = ext_len
;
2061 /* to_offset can only be applied to trees that are offset_int-sized
2062 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2063 must be exactly the precision of offset_int and so LEN is correct. */
2064 if (ext_len
<= OFFSET_INT_ELTS
)
2065 TREE_INT_CST_OFFSET_NUNITS (t
) = ext_len
;
2067 TREE_INT_CST_OFFSET_NUNITS (t
) = len
;
2069 TREE_CONSTANT (t
) = 1;
2074 /* Build a newly constructed TREE_VEC node of length LEN. */
2077 make_tree_vec_stat (int len MEM_STAT_DECL
)
2080 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2082 record_node_allocation_statistics (TREE_VEC
, length
);
2084 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
2086 TREE_SET_CODE (t
, TREE_VEC
);
2087 TREE_VEC_LENGTH (t
) = len
;
2092 /* Grow a TREE_VEC node to new length LEN. */
2095 grow_tree_vec_stat (tree v
, int len MEM_STAT_DECL
)
2097 gcc_assert (TREE_CODE (v
) == TREE_VEC
);
2099 int oldlen
= TREE_VEC_LENGTH (v
);
2100 gcc_assert (len
> oldlen
);
2102 int oldlength
= (oldlen
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2103 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_vec
);
2105 record_node_allocation_statistics (TREE_VEC
, length
- oldlength
);
2107 v
= (tree
) ggc_realloc (v
, length PASS_MEM_STAT
);
2109 TREE_VEC_LENGTH (v
) = len
;
2114 /* Return 1 if EXPR is the integer constant zero or a complex constant
2118 integer_zerop (const_tree expr
)
2122 switch (TREE_CODE (expr
))
2125 return wi::eq_p (expr
, 0);
2127 return (integer_zerop (TREE_REALPART (expr
))
2128 && integer_zerop (TREE_IMAGPART (expr
)));
2132 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2133 if (!integer_zerop (VECTOR_CST_ELT (expr
, i
)))
2142 /* Return 1 if EXPR is the integer constant one or the corresponding
2143 complex constant. */
2146 integer_onep (const_tree expr
)
2150 switch (TREE_CODE (expr
))
2153 return wi::eq_p (wi::to_widest (expr
), 1);
2155 return (integer_onep (TREE_REALPART (expr
))
2156 && integer_zerop (TREE_IMAGPART (expr
)));
2160 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2161 if (!integer_onep (VECTOR_CST_ELT (expr
, i
)))
2170 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2171 it contains, or a complex or vector whose subparts are such integers. */
2174 integer_all_onesp (const_tree expr
)
2178 if (TREE_CODE (expr
) == COMPLEX_CST
2179 && integer_all_onesp (TREE_REALPART (expr
))
2180 && integer_all_onesp (TREE_IMAGPART (expr
)))
2183 else if (TREE_CODE (expr
) == VECTOR_CST
)
2186 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2187 if (!integer_all_onesp (VECTOR_CST_ELT (expr
, i
)))
2192 else if (TREE_CODE (expr
) != INTEGER_CST
)
2195 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr
)), UNSIGNED
) == expr
;
2198 /* Return 1 if EXPR is the integer constant minus one. */
2201 integer_minus_onep (const_tree expr
)
2205 if (TREE_CODE (expr
) == COMPLEX_CST
)
2206 return (integer_all_onesp (TREE_REALPART (expr
))
2207 && integer_zerop (TREE_IMAGPART (expr
)));
2209 return integer_all_onesp (expr
);
2212 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2216 integer_pow2p (const_tree expr
)
2220 if (TREE_CODE (expr
) == COMPLEX_CST
2221 && integer_pow2p (TREE_REALPART (expr
))
2222 && integer_zerop (TREE_IMAGPART (expr
)))
2225 if (TREE_CODE (expr
) != INTEGER_CST
)
2228 return wi::popcount (expr
) == 1;
2231 /* Return 1 if EXPR is an integer constant other than zero or a
2232 complex constant other than zero. */
2235 integer_nonzerop (const_tree expr
)
2239 return ((TREE_CODE (expr
) == INTEGER_CST
2240 && !wi::eq_p (expr
, 0))
2241 || (TREE_CODE (expr
) == COMPLEX_CST
2242 && (integer_nonzerop (TREE_REALPART (expr
))
2243 || integer_nonzerop (TREE_IMAGPART (expr
)))));
2246 /* Return 1 if EXPR is the fixed-point constant zero. */
2249 fixed_zerop (const_tree expr
)
2251 return (TREE_CODE (expr
) == FIXED_CST
2252 && TREE_FIXED_CST (expr
).data
.is_zero ());
2255 /* Return the power of two represented by a tree node known to be a
2259 tree_log2 (const_tree expr
)
2263 if (TREE_CODE (expr
) == COMPLEX_CST
)
2264 return tree_log2 (TREE_REALPART (expr
));
2266 return wi::exact_log2 (expr
);
2269 /* Similar, but return the largest integer Y such that 2 ** Y is less
2270 than or equal to EXPR. */
2273 tree_floor_log2 (const_tree expr
)
2277 if (TREE_CODE (expr
) == COMPLEX_CST
)
2278 return tree_log2 (TREE_REALPART (expr
));
2280 return wi::floor_log2 (expr
);
2283 /* Return number of known trailing zero bits in EXPR, or, if the value of
2284 EXPR is known to be zero, the precision of it's type. */
2287 tree_ctz (const_tree expr
)
2289 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr
))
2290 && !POINTER_TYPE_P (TREE_TYPE (expr
)))
2293 unsigned int ret1
, ret2
, prec
= TYPE_PRECISION (TREE_TYPE (expr
));
2294 switch (TREE_CODE (expr
))
2297 ret1
= wi::ctz (expr
);
2298 return MIN (ret1
, prec
);
2300 ret1
= wi::ctz (get_nonzero_bits (expr
));
2301 return MIN (ret1
, prec
);
2308 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2311 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2312 return MIN (ret1
, ret2
);
2313 case POINTER_PLUS_EXPR
:
2314 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2315 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2316 /* Second operand is sizetype, which could be in theory
2317 wider than pointer's precision. Make sure we never
2318 return more than prec. */
2319 ret2
= MIN (ret2
, prec
);
2320 return MIN (ret1
, ret2
);
2322 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2323 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2324 return MAX (ret1
, ret2
);
2326 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2327 ret2
= tree_ctz (TREE_OPERAND (expr
, 1));
2328 return MIN (ret1
+ ret2
, prec
);
2330 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2331 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2332 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2334 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2335 return MIN (ret1
+ ret2
, prec
);
2339 if (tree_fits_uhwi_p (TREE_OPERAND (expr
, 1))
2340 && (tree_to_uhwi (TREE_OPERAND (expr
, 1)) < prec
))
2342 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2343 ret2
= tree_to_uhwi (TREE_OPERAND (expr
, 1));
2348 case TRUNC_DIV_EXPR
:
2350 case FLOOR_DIV_EXPR
:
2351 case ROUND_DIV_EXPR
:
2352 case EXACT_DIV_EXPR
:
2353 if (TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
2354 && tree_int_cst_sgn (TREE_OPERAND (expr
, 1)) == 1)
2356 int l
= tree_log2 (TREE_OPERAND (expr
, 1));
2359 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2367 ret1
= tree_ctz (TREE_OPERAND (expr
, 0));
2368 if (ret1
&& ret1
== TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr
, 0))))
2370 return MIN (ret1
, prec
);
2372 return tree_ctz (TREE_OPERAND (expr
, 0));
2374 ret1
= tree_ctz (TREE_OPERAND (expr
, 1));
2377 ret2
= tree_ctz (TREE_OPERAND (expr
, 2));
2378 return MIN (ret1
, ret2
);
2380 return tree_ctz (TREE_OPERAND (expr
, 1));
2382 ret1
= get_pointer_alignment (CONST_CAST_TREE (expr
));
2383 if (ret1
> BITS_PER_UNIT
)
2385 ret1
= ctz_hwi (ret1
/ BITS_PER_UNIT
);
2386 return MIN (ret1
, prec
);
2394 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2395 decimal float constants, so don't return 1 for them. */
2398 real_zerop (const_tree expr
)
2402 switch (TREE_CODE (expr
))
2405 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst0
)
2406 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2408 return real_zerop (TREE_REALPART (expr
))
2409 && real_zerop (TREE_IMAGPART (expr
));
2413 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2414 if (!real_zerop (VECTOR_CST_ELT (expr
, i
)))
2423 /* Return 1 if EXPR is the real constant one in real or complex form.
2424 Trailing zeroes matter for decimal float constants, so don't return
2428 real_onep (const_tree expr
)
2432 switch (TREE_CODE (expr
))
2435 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconst1
)
2436 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2438 return real_onep (TREE_REALPART (expr
))
2439 && real_zerop (TREE_IMAGPART (expr
));
2443 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2444 if (!real_onep (VECTOR_CST_ELT (expr
, i
)))
2453 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2454 matter for decimal float constants, so don't return 1 for them. */
2457 real_minus_onep (const_tree expr
)
2461 switch (TREE_CODE (expr
))
2464 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), dconstm1
)
2465 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr
))));
2467 return real_minus_onep (TREE_REALPART (expr
))
2468 && real_zerop (TREE_IMAGPART (expr
));
2472 for (i
= 0; i
< VECTOR_CST_NELTS (expr
); ++i
)
2473 if (!real_minus_onep (VECTOR_CST_ELT (expr
, i
)))
2482 /* Nonzero if EXP is a constant or a cast of a constant. */
2485 really_constant_p (const_tree exp
)
2487 /* This is not quite the same as STRIP_NOPS. It does more. */
2488 while (CONVERT_EXPR_P (exp
)
2489 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
2490 exp
= TREE_OPERAND (exp
, 0);
2491 return TREE_CONSTANT (exp
);
2494 /* Return first list element whose TREE_VALUE is ELEM.
2495 Return 0 if ELEM is not in LIST. */
2498 value_member (tree elem
, tree list
)
2502 if (elem
== TREE_VALUE (list
))
2504 list
= TREE_CHAIN (list
);
2509 /* Return first list element whose TREE_PURPOSE is ELEM.
2510 Return 0 if ELEM is not in LIST. */
2513 purpose_member (const_tree elem
, tree list
)
2517 if (elem
== TREE_PURPOSE (list
))
2519 list
= TREE_CHAIN (list
);
2524 /* Return true if ELEM is in V. */
2527 vec_member (const_tree elem
, vec
<tree
, va_gc
> *v
)
2531 FOR_EACH_VEC_SAFE_ELT (v
, ix
, t
)
2537 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2541 chain_index (int idx
, tree chain
)
2543 for (; chain
&& idx
> 0; --idx
)
2544 chain
= TREE_CHAIN (chain
);
2548 /* Return nonzero if ELEM is part of the chain CHAIN. */
2551 chain_member (const_tree elem
, const_tree chain
)
2557 chain
= DECL_CHAIN (chain
);
2563 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2564 We expect a null pointer to mark the end of the chain.
2565 This is the Lisp primitive `length'. */
2568 list_length (const_tree t
)
2571 #ifdef ENABLE_TREE_CHECKING
2579 #ifdef ENABLE_TREE_CHECKING
2582 gcc_assert (p
!= q
);
2590 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2591 UNION_TYPE TYPE, or NULL_TREE if none. */
2594 first_field (const_tree type
)
2596 tree t
= TYPE_FIELDS (type
);
2597 while (t
&& TREE_CODE (t
) != FIELD_DECL
)
2602 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2603 by modifying the last node in chain 1 to point to chain 2.
2604 This is the Lisp primitive `nconc'. */
2607 chainon (tree op1
, tree op2
)
2616 for (t1
= op1
; TREE_CHAIN (t1
); t1
= TREE_CHAIN (t1
))
2618 TREE_CHAIN (t1
) = op2
;
2620 #ifdef ENABLE_TREE_CHECKING
2623 for (t2
= op2
; t2
; t2
= TREE_CHAIN (t2
))
2624 gcc_assert (t2
!= t1
);
2631 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2634 tree_last (tree chain
)
2638 while ((next
= TREE_CHAIN (chain
)))
2643 /* Reverse the order of elements in the chain T,
2644 and return the new head of the chain (old last element). */
2649 tree prev
= 0, decl
, next
;
2650 for (decl
= t
; decl
; decl
= next
)
2652 /* We shouldn't be using this function to reverse BLOCK chains; we
2653 have blocks_nreverse for that. */
2654 gcc_checking_assert (TREE_CODE (decl
) != BLOCK
);
2655 next
= TREE_CHAIN (decl
);
2656 TREE_CHAIN (decl
) = prev
;
2662 /* Return a newly created TREE_LIST node whose
2663 purpose and value fields are PARM and VALUE. */
2666 build_tree_list_stat (tree parm
, tree value MEM_STAT_DECL
)
2668 tree t
= make_node_stat (TREE_LIST PASS_MEM_STAT
);
2669 TREE_PURPOSE (t
) = parm
;
2670 TREE_VALUE (t
) = value
;
2674 /* Build a chain of TREE_LIST nodes from a vector. */
2677 build_tree_list_vec_stat (const vec
<tree
, va_gc
> *vec MEM_STAT_DECL
)
2679 tree ret
= NULL_TREE
;
2683 FOR_EACH_VEC_SAFE_ELT (vec
, i
, t
)
2685 *pp
= build_tree_list_stat (NULL
, t PASS_MEM_STAT
);
2686 pp
= &TREE_CHAIN (*pp
);
2691 /* Return a newly created TREE_LIST node whose
2692 purpose and value fields are PURPOSE and VALUE
2693 and whose TREE_CHAIN is CHAIN. */
2696 tree_cons_stat (tree purpose
, tree value
, tree chain MEM_STAT_DECL
)
2700 node
= ggc_alloc_tree_node_stat (sizeof (struct tree_list
) PASS_MEM_STAT
);
2701 memset (node
, 0, sizeof (struct tree_common
));
2703 record_node_allocation_statistics (TREE_LIST
, sizeof (struct tree_list
));
2705 TREE_SET_CODE (node
, TREE_LIST
);
2706 TREE_CHAIN (node
) = chain
;
2707 TREE_PURPOSE (node
) = purpose
;
2708 TREE_VALUE (node
) = value
;
2712 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2716 ctor_to_vec (tree ctor
)
2718 vec
<tree
, va_gc
> *vec
;
2719 vec_alloc (vec
, CONSTRUCTOR_NELTS (ctor
));
2723 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor
), ix
, val
)
2724 vec
->quick_push (val
);
2729 /* Return the size nominally occupied by an object of type TYPE
2730 when it resides in memory. The value is measured in units of bytes,
2731 and its data type is that normally used for type sizes
2732 (which is the first type created by make_signed_type or
2733 make_unsigned_type). */
2736 size_in_bytes (const_tree type
)
2740 if (type
== error_mark_node
)
2741 return integer_zero_node
;
2743 type
= TYPE_MAIN_VARIANT (type
);
2744 t
= TYPE_SIZE_UNIT (type
);
2748 lang_hooks
.types
.incomplete_type_error (NULL_TREE
, type
);
2749 return size_zero_node
;
2755 /* Return the size of TYPE (in bytes) as a wide integer
2756 or return -1 if the size can vary or is larger than an integer. */
2759 int_size_in_bytes (const_tree type
)
2763 if (type
== error_mark_node
)
2766 type
= TYPE_MAIN_VARIANT (type
);
2767 t
= TYPE_SIZE_UNIT (type
);
2769 if (t
&& tree_fits_uhwi_p (t
))
2770 return TREE_INT_CST_LOW (t
);
2775 /* Return the maximum size of TYPE (in bytes) as a wide integer
2776 or return -1 if the size can vary or is larger than an integer. */
2779 max_int_size_in_bytes (const_tree type
)
2781 HOST_WIDE_INT size
= -1;
2784 /* If this is an array type, check for a possible MAX_SIZE attached. */
2786 if (TREE_CODE (type
) == ARRAY_TYPE
)
2788 size_tree
= TYPE_ARRAY_MAX_SIZE (type
);
2790 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2791 size
= tree_to_uhwi (size_tree
);
2794 /* If we still haven't been able to get a size, see if the language
2795 can compute a maximum size. */
2799 size_tree
= lang_hooks
.types
.max_size (type
);
2801 if (size_tree
&& tree_fits_uhwi_p (size_tree
))
2802 size
= tree_to_uhwi (size_tree
);
2808 /* Return the bit position of FIELD, in bits from the start of the record.
2809 This is a tree of type bitsizetype. */
2812 bit_position (const_tree field
)
2814 return bit_from_pos (DECL_FIELD_OFFSET (field
),
2815 DECL_FIELD_BIT_OFFSET (field
));
2818 /* Likewise, but return as an integer. It must be representable in
2819 that way (since it could be a signed value, we don't have the
2820 option of returning -1 like int_size_in_byte can. */
2823 int_bit_position (const_tree field
)
2825 return tree_to_shwi (bit_position (field
));
2828 /* Return the byte position of FIELD, in bytes from the start of the record.
2829 This is a tree of type sizetype. */
2832 byte_position (const_tree field
)
2834 return byte_from_pos (DECL_FIELD_OFFSET (field
),
2835 DECL_FIELD_BIT_OFFSET (field
));
2838 /* Likewise, but return as an integer. It must be representable in
2839 that way (since it could be a signed value, we don't have the
2840 option of returning -1 like int_size_in_byte can. */
2843 int_byte_position (const_tree field
)
2845 return tree_to_shwi (byte_position (field
));
2848 /* Return the strictest alignment, in bits, that T is known to have. */
2851 expr_align (const_tree t
)
2853 unsigned int align0
, align1
;
2855 switch (TREE_CODE (t
))
2857 CASE_CONVERT
: case NON_LVALUE_EXPR
:
2858 /* If we have conversions, we know that the alignment of the
2859 object must meet each of the alignments of the types. */
2860 align0
= expr_align (TREE_OPERAND (t
, 0));
2861 align1
= TYPE_ALIGN (TREE_TYPE (t
));
2862 return MAX (align0
, align1
);
2864 case SAVE_EXPR
: case COMPOUND_EXPR
: case MODIFY_EXPR
:
2865 case INIT_EXPR
: case TARGET_EXPR
: case WITH_CLEANUP_EXPR
:
2866 case CLEANUP_POINT_EXPR
:
2867 /* These don't change the alignment of an object. */
2868 return expr_align (TREE_OPERAND (t
, 0));
2871 /* The best we can do is say that the alignment is the least aligned
2873 align0
= expr_align (TREE_OPERAND (t
, 1));
2874 align1
= expr_align (TREE_OPERAND (t
, 2));
2875 return MIN (align0
, align1
);
2877 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2878 meaningfully, it's always 1. */
2879 case LABEL_DECL
: case CONST_DECL
:
2880 case VAR_DECL
: case PARM_DECL
: case RESULT_DECL
:
2882 gcc_assert (DECL_ALIGN (t
) != 0);
2883 return DECL_ALIGN (t
);
2889 /* Otherwise take the alignment from that of the type. */
2890 return TYPE_ALIGN (TREE_TYPE (t
));
2893 /* Return, as a tree node, the number of elements for TYPE (which is an
2894 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2897 array_type_nelts (const_tree type
)
2899 tree index_type
, min
, max
;
2901 /* If they did it with unspecified bounds, then we should have already
2902 given an error about it before we got here. */
2903 if (! TYPE_DOMAIN (type
))
2904 return error_mark_node
;
2906 index_type
= TYPE_DOMAIN (type
);
2907 min
= TYPE_MIN_VALUE (index_type
);
2908 max
= TYPE_MAX_VALUE (index_type
);
2910 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2912 return error_mark_node
;
2914 return (integer_zerop (min
)
2916 : fold_build2 (MINUS_EXPR
, TREE_TYPE (max
), max
, min
));
2919 /* If arg is static -- a reference to an object in static storage -- then
2920 return the object. This is not the same as the C meaning of `static'.
2921 If arg isn't static, return NULL. */
2926 switch (TREE_CODE (arg
))
2929 /* Nested functions are static, even though taking their address will
2930 involve a trampoline as we unnest the nested function and create
2931 the trampoline on the tree level. */
2935 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2936 && ! DECL_THREAD_LOCAL_P (arg
)
2937 && ! DECL_DLLIMPORT_P (arg
)
2941 return ((TREE_STATIC (arg
) || DECL_EXTERNAL (arg
))
2945 return TREE_STATIC (arg
) ? arg
: NULL
;
2952 /* If the thing being referenced is not a field, then it is
2953 something language specific. */
2954 gcc_assert (TREE_CODE (TREE_OPERAND (arg
, 1)) == FIELD_DECL
);
2956 /* If we are referencing a bitfield, we can't evaluate an
2957 ADDR_EXPR at compile time and so it isn't a constant. */
2958 if (DECL_BIT_FIELD (TREE_OPERAND (arg
, 1)))
2961 return staticp (TREE_OPERAND (arg
, 0));
2967 return TREE_CONSTANT (TREE_OPERAND (arg
, 0)) ? arg
: NULL
;
2970 case ARRAY_RANGE_REF
:
2971 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg
))) == INTEGER_CST
2972 && TREE_CODE (TREE_OPERAND (arg
, 1)) == INTEGER_CST
)
2973 return staticp (TREE_OPERAND (arg
, 0));
2977 case COMPOUND_LITERAL_EXPR
:
2978 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg
)) ? arg
: NULL
;
2988 /* Return whether OP is a DECL whose address is function-invariant. */
2991 decl_address_invariant_p (const_tree op
)
2993 /* The conditions below are slightly less strict than the one in
2996 switch (TREE_CODE (op
))
3005 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3006 || DECL_THREAD_LOCAL_P (op
)
3007 || DECL_CONTEXT (op
) == current_function_decl
3008 || decl_function_context (op
) == current_function_decl
)
3013 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3014 || decl_function_context (op
) == current_function_decl
)
3025 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3028 decl_address_ip_invariant_p (const_tree op
)
3030 /* The conditions below are slightly less strict than the one in
3033 switch (TREE_CODE (op
))
3041 if (((TREE_STATIC (op
) || DECL_EXTERNAL (op
))
3042 && !DECL_DLLIMPORT_P (op
))
3043 || DECL_THREAD_LOCAL_P (op
))
3048 if ((TREE_STATIC (op
) || DECL_EXTERNAL (op
)))
3060 /* Return true if T is function-invariant (internal function, does
3061 not handle arithmetic; that's handled in skip_simple_arithmetic and
3062 tree_invariant_p). */
3064 static bool tree_invariant_p (tree t
);
3067 tree_invariant_p_1 (tree t
)
3071 if (TREE_CONSTANT (t
)
3072 || (TREE_READONLY (t
) && !TREE_SIDE_EFFECTS (t
)))
3075 switch (TREE_CODE (t
))
3081 op
= TREE_OPERAND (t
, 0);
3082 while (handled_component_p (op
))
3084 switch (TREE_CODE (op
))
3087 case ARRAY_RANGE_REF
:
3088 if (!tree_invariant_p (TREE_OPERAND (op
, 1))
3089 || TREE_OPERAND (op
, 2) != NULL_TREE
3090 || TREE_OPERAND (op
, 3) != NULL_TREE
)
3095 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
3101 op
= TREE_OPERAND (op
, 0);
3104 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
3113 /* Return true if T is function-invariant. */
3116 tree_invariant_p (tree t
)
3118 tree inner
= skip_simple_arithmetic (t
);
3119 return tree_invariant_p_1 (inner
);
3122 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3123 Do this to any expression which may be used in more than one place,
3124 but must be evaluated only once.
3126 Normally, expand_expr would reevaluate the expression each time.
3127 Calling save_expr produces something that is evaluated and recorded
3128 the first time expand_expr is called on it. Subsequent calls to
3129 expand_expr just reuse the recorded value.
3131 The call to expand_expr that generates code that actually computes
3132 the value is the first call *at compile time*. Subsequent calls
3133 *at compile time* generate code to use the saved value.
3134 This produces correct result provided that *at run time* control
3135 always flows through the insns made by the first expand_expr
3136 before reaching the other places where the save_expr was evaluated.
3137 You, the caller of save_expr, must make sure this is so.
3139 Constants, and certain read-only nodes, are returned with no
3140 SAVE_EXPR because that is safe. Expressions containing placeholders
3141 are not touched; see tree.def for an explanation of what these
3145 save_expr (tree expr
)
3147 tree t
= fold (expr
);
3150 /* If the tree evaluates to a constant, then we don't want to hide that
3151 fact (i.e. this allows further folding, and direct checks for constants).
3152 However, a read-only object that has side effects cannot be bypassed.
3153 Since it is no problem to reevaluate literals, we just return the
3155 inner
= skip_simple_arithmetic (t
);
3156 if (TREE_CODE (inner
) == ERROR_MARK
)
3159 if (tree_invariant_p_1 (inner
))
3162 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3163 it means that the size or offset of some field of an object depends on
3164 the value within another field.
3166 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3167 and some variable since it would then need to be both evaluated once and
3168 evaluated more than once. Front-ends must assure this case cannot
3169 happen by surrounding any such subexpressions in their own SAVE_EXPR
3170 and forcing evaluation at the proper time. */
3171 if (contains_placeholder_p (inner
))
3174 t
= build1 (SAVE_EXPR
, TREE_TYPE (expr
), t
);
3175 SET_EXPR_LOCATION (t
, EXPR_LOCATION (expr
));
3177 /* This expression might be placed ahead of a jump to ensure that the
3178 value was computed on both sides of the jump. So make sure it isn't
3179 eliminated as dead. */
3180 TREE_SIDE_EFFECTS (t
) = 1;
3184 /* Look inside EXPR into any simple arithmetic operations. Return the
3185 outermost non-arithmetic or non-invariant node. */
3188 skip_simple_arithmetic (tree expr
)
3190 /* We don't care about whether this can be used as an lvalue in this
3192 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3193 expr
= TREE_OPERAND (expr
, 0);
3195 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3196 a constant, it will be more efficient to not make another SAVE_EXPR since
3197 it will allow better simplification and GCSE will be able to merge the
3198 computations if they actually occur. */
3201 if (UNARY_CLASS_P (expr
))
3202 expr
= TREE_OPERAND (expr
, 0);
3203 else if (BINARY_CLASS_P (expr
))
3205 if (tree_invariant_p (TREE_OPERAND (expr
, 1)))
3206 expr
= TREE_OPERAND (expr
, 0);
3207 else if (tree_invariant_p (TREE_OPERAND (expr
, 0)))
3208 expr
= TREE_OPERAND (expr
, 1);
3219 /* Look inside EXPR into simple arithmetic operations involving constants.
3220 Return the outermost non-arithmetic or non-constant node. */
3223 skip_simple_constant_arithmetic (tree expr
)
3225 while (TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3226 expr
= TREE_OPERAND (expr
, 0);
3230 if (UNARY_CLASS_P (expr
))
3231 expr
= TREE_OPERAND (expr
, 0);
3232 else if (BINARY_CLASS_P (expr
))
3234 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
3235 expr
= TREE_OPERAND (expr
, 0);
3236 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
3237 expr
= TREE_OPERAND (expr
, 1);
3248 /* Return which tree structure is used by T. */
3250 enum tree_node_structure_enum
3251 tree_node_structure (const_tree t
)
3253 const enum tree_code code
= TREE_CODE (t
);
3254 return tree_node_structure_for_code (code
);
3257 /* Set various status flags when building a CALL_EXPR object T. */
3260 process_call_operands (tree t
)
3262 bool side_effects
= TREE_SIDE_EFFECTS (t
);
3263 bool read_only
= false;
3264 int i
= call_expr_flags (t
);
3266 /* Calls have side-effects, except those to const or pure functions. */
3267 if ((i
& ECF_LOOPING_CONST_OR_PURE
) || !(i
& (ECF_CONST
| ECF_PURE
)))
3268 side_effects
= true;
3269 /* Propagate TREE_READONLY of arguments for const functions. */
3273 if (!side_effects
|| read_only
)
3274 for (i
= 1; i
< TREE_OPERAND_LENGTH (t
); i
++)
3276 tree op
= TREE_OPERAND (t
, i
);
3277 if (op
&& TREE_SIDE_EFFECTS (op
))
3278 side_effects
= true;
3279 if (op
&& !TREE_READONLY (op
) && !CONSTANT_CLASS_P (op
))
3283 TREE_SIDE_EFFECTS (t
) = side_effects
;
3284 TREE_READONLY (t
) = read_only
;
3287 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3288 size or offset that depends on a field within a record. */
3291 contains_placeholder_p (const_tree exp
)
3293 enum tree_code code
;
3298 code
= TREE_CODE (exp
);
3299 if (code
== PLACEHOLDER_EXPR
)
3302 switch (TREE_CODE_CLASS (code
))
3305 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3306 position computations since they will be converted into a
3307 WITH_RECORD_EXPR involving the reference, which will assume
3308 here will be valid. */
3309 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3311 case tcc_exceptional
:
3312 if (code
== TREE_LIST
)
3313 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp
))
3314 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp
)));
3319 case tcc_comparison
:
3320 case tcc_expression
:
3324 /* Ignoring the first operand isn't quite right, but works best. */
3325 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1));
3328 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3329 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1))
3330 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 2)));
3333 /* The save_expr function never wraps anything containing
3334 a PLACEHOLDER_EXPR. */
3341 switch (TREE_CODE_LENGTH (code
))
3344 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0));
3346 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 0))
3347 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp
, 1)));
3358 const_call_expr_arg_iterator iter
;
3359 FOR_EACH_CONST_CALL_EXPR_ARG (arg
, iter
, exp
)
3360 if (CONTAINS_PLACEHOLDER_P (arg
))
3374 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3375 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3379 type_contains_placeholder_1 (const_tree type
)
3381 /* If the size contains a placeholder or the parent type (component type in
3382 the case of arrays) type involves a placeholder, this type does. */
3383 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
3384 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type
))
3385 || (!POINTER_TYPE_P (type
)
3387 && type_contains_placeholder_p (TREE_TYPE (type
))))
3390 /* Now do type-specific checks. Note that the last part of the check above
3391 greatly limits what we have to do below. */
3392 switch (TREE_CODE (type
))
3400 case REFERENCE_TYPE
:
3409 case FIXED_POINT_TYPE
:
3410 /* Here we just check the bounds. */
3411 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type
))
3412 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type
)));
3415 /* We have already checked the component type above, so just check the
3417 return type_contains_placeholder_p (TYPE_DOMAIN (type
));
3421 case QUAL_UNION_TYPE
:
3425 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
3426 if (TREE_CODE (field
) == FIELD_DECL
3427 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field
))
3428 || (TREE_CODE (type
) == QUAL_UNION_TYPE
3429 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field
)))
3430 || type_contains_placeholder_p (TREE_TYPE (field
))))
3441 /* Wrapper around above function used to cache its result. */
3444 type_contains_placeholder_p (tree type
)
3448 /* If the contains_placeholder_bits field has been initialized,
3449 then we know the answer. */
3450 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) > 0)
3451 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) - 1;
3453 /* Indicate that we've seen this type node, and the answer is false.
3454 This is what we want to return if we run into recursion via fields. */
3455 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = 1;
3457 /* Compute the real value. */
3458 result
= type_contains_placeholder_1 (type
);
3460 /* Store the real value. */
3461 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type
) = result
+ 1;
3466 /* Push tree EXP onto vector QUEUE if it is not already present. */
3469 push_without_duplicates (tree exp
, vec
<tree
> *queue
)
3474 FOR_EACH_VEC_ELT (*queue
, i
, iter
)
3475 if (simple_cst_equal (iter
, exp
) == 1)
3479 queue
->safe_push (exp
);
3482 /* Given a tree EXP, find all occurrences of references to fields
3483 in a PLACEHOLDER_EXPR and place them in vector REFS without
3484 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3485 we assume here that EXP contains only arithmetic expressions
3486 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3490 find_placeholder_in_expr (tree exp
, vec
<tree
> *refs
)
3492 enum tree_code code
= TREE_CODE (exp
);
3496 /* We handle TREE_LIST and COMPONENT_REF separately. */
3497 if (code
== TREE_LIST
)
3499 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), refs
);
3500 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), refs
);
3502 else if (code
== COMPONENT_REF
)
3504 for (inner
= TREE_OPERAND (exp
, 0);
3505 REFERENCE_CLASS_P (inner
);
3506 inner
= TREE_OPERAND (inner
, 0))
3509 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
)
3510 push_without_duplicates (exp
, refs
);
3512 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), refs
);
3515 switch (TREE_CODE_CLASS (code
))
3520 case tcc_declaration
:
3521 /* Variables allocated to static storage can stay. */
3522 if (!TREE_STATIC (exp
))
3523 push_without_duplicates (exp
, refs
);
3526 case tcc_expression
:
3527 /* This is the pattern built in ada/make_aligning_type. */
3528 if (code
== ADDR_EXPR
3529 && TREE_CODE (TREE_OPERAND (exp
, 0)) == PLACEHOLDER_EXPR
)
3531 push_without_duplicates (exp
, refs
);
3535 /* Fall through... */
3537 case tcc_exceptional
:
3540 case tcc_comparison
:
3542 for (i
= 0; i
< TREE_CODE_LENGTH (code
); i
++)
3543 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3547 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3548 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, i
), refs
);
3556 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3557 return a tree with all occurrences of references to F in a
3558 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3559 CONST_DECLs. Note that we assume here that EXP contains only
3560 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3561 occurring only in their argument list. */
3564 substitute_in_expr (tree exp
, tree f
, tree r
)
3566 enum tree_code code
= TREE_CODE (exp
);
3567 tree op0
, op1
, op2
, op3
;
3570 /* We handle TREE_LIST and COMPONENT_REF separately. */
3571 if (code
== TREE_LIST
)
3573 op0
= SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp
), f
, r
);
3574 op1
= SUBSTITUTE_IN_EXPR (TREE_VALUE (exp
), f
, r
);
3575 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3578 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3580 else if (code
== COMPONENT_REF
)
3584 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3585 and it is the right field, replace it with R. */
3586 for (inner
= TREE_OPERAND (exp
, 0);
3587 REFERENCE_CLASS_P (inner
);
3588 inner
= TREE_OPERAND (inner
, 0))
3592 op1
= TREE_OPERAND (exp
, 1);
3594 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& op1
== f
)
3597 /* If this expression hasn't been completed let, leave it alone. */
3598 if (TREE_CODE (inner
) == PLACEHOLDER_EXPR
&& !TREE_TYPE (inner
))
3601 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3602 if (op0
== TREE_OPERAND (exp
, 0))
3606 = fold_build3 (COMPONENT_REF
, TREE_TYPE (exp
), op0
, op1
, NULL_TREE
);
3609 switch (TREE_CODE_CLASS (code
))
3614 case tcc_declaration
:
3620 case tcc_expression
:
3624 /* Fall through... */
3626 case tcc_exceptional
:
3629 case tcc_comparison
:
3631 switch (TREE_CODE_LENGTH (code
))
3637 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3638 if (op0
== TREE_OPERAND (exp
, 0))
3641 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3645 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3646 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3648 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3651 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3655 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3656 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3657 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3659 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3660 && op2
== TREE_OPERAND (exp
, 2))
3663 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3667 op0
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 0), f
, r
);
3668 op1
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 1), f
, r
);
3669 op2
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 2), f
, r
);
3670 op3
= SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp
, 3), f
, r
);
3672 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3673 && op2
== TREE_OPERAND (exp
, 2)
3674 && op3
== TREE_OPERAND (exp
, 3))
3678 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3690 new_tree
= NULL_TREE
;
3692 /* If we are trying to replace F with a constant, inline back
3693 functions which do nothing else than computing a value from
3694 the arguments they are passed. This makes it possible to
3695 fold partially or entirely the replacement expression. */
3696 if (CONSTANT_CLASS_P (r
) && code
== CALL_EXPR
)
3698 tree t
= maybe_inline_call_in_expr (exp
);
3700 return SUBSTITUTE_IN_EXPR (t
, f
, r
);
3703 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3705 tree op
= TREE_OPERAND (exp
, i
);
3706 tree new_op
= SUBSTITUTE_IN_EXPR (op
, f
, r
);
3710 new_tree
= copy_node (exp
);
3711 TREE_OPERAND (new_tree
, i
) = new_op
;
3717 new_tree
= fold (new_tree
);
3718 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3719 process_call_operands (new_tree
);
3730 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3732 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3733 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3738 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3739 for it within OBJ, a tree that is an object or a chain of references. */
3742 substitute_placeholder_in_expr (tree exp
, tree obj
)
3744 enum tree_code code
= TREE_CODE (exp
);
3745 tree op0
, op1
, op2
, op3
;
3748 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3749 in the chain of OBJ. */
3750 if (code
== PLACEHOLDER_EXPR
)
3752 tree need_type
= TYPE_MAIN_VARIANT (TREE_TYPE (exp
));
3755 for (elt
= obj
; elt
!= 0;
3756 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3757 || TREE_CODE (elt
) == COND_EXPR
)
3758 ? TREE_OPERAND (elt
, 1)
3759 : (REFERENCE_CLASS_P (elt
)
3760 || UNARY_CLASS_P (elt
)
3761 || BINARY_CLASS_P (elt
)
3762 || VL_EXP_CLASS_P (elt
)
3763 || EXPRESSION_CLASS_P (elt
))
3764 ? TREE_OPERAND (elt
, 0) : 0))
3765 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt
)) == need_type
)
3768 for (elt
= obj
; elt
!= 0;
3769 elt
= ((TREE_CODE (elt
) == COMPOUND_EXPR
3770 || TREE_CODE (elt
) == COND_EXPR
)
3771 ? TREE_OPERAND (elt
, 1)
3772 : (REFERENCE_CLASS_P (elt
)
3773 || UNARY_CLASS_P (elt
)
3774 || BINARY_CLASS_P (elt
)
3775 || VL_EXP_CLASS_P (elt
)
3776 || EXPRESSION_CLASS_P (elt
))
3777 ? TREE_OPERAND (elt
, 0) : 0))
3778 if (POINTER_TYPE_P (TREE_TYPE (elt
))
3779 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt
)))
3781 return fold_build1 (INDIRECT_REF
, need_type
, elt
);
3783 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3784 survives until RTL generation, there will be an error. */
3788 /* TREE_LIST is special because we need to look at TREE_VALUE
3789 and TREE_CHAIN, not TREE_OPERANDS. */
3790 else if (code
== TREE_LIST
)
3792 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp
), obj
);
3793 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp
), obj
);
3794 if (op0
== TREE_CHAIN (exp
) && op1
== TREE_VALUE (exp
))
3797 return tree_cons (TREE_PURPOSE (exp
), op1
, op0
);
3800 switch (TREE_CODE_CLASS (code
))
3803 case tcc_declaration
:
3806 case tcc_exceptional
:
3809 case tcc_comparison
:
3810 case tcc_expression
:
3813 switch (TREE_CODE_LENGTH (code
))
3819 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3820 if (op0
== TREE_OPERAND (exp
, 0))
3823 new_tree
= fold_build1 (code
, TREE_TYPE (exp
), op0
);
3827 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3828 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3830 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1))
3833 new_tree
= fold_build2 (code
, TREE_TYPE (exp
), op0
, op1
);
3837 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3838 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3839 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3841 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3842 && op2
== TREE_OPERAND (exp
, 2))
3845 new_tree
= fold_build3 (code
, TREE_TYPE (exp
), op0
, op1
, op2
);
3849 op0
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 0), obj
);
3850 op1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 1), obj
);
3851 op2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 2), obj
);
3852 op3
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp
, 3), obj
);
3854 if (op0
== TREE_OPERAND (exp
, 0) && op1
== TREE_OPERAND (exp
, 1)
3855 && op2
== TREE_OPERAND (exp
, 2)
3856 && op3
== TREE_OPERAND (exp
, 3))
3860 = fold (build4 (code
, TREE_TYPE (exp
), op0
, op1
, op2
, op3
));
3872 new_tree
= NULL_TREE
;
3874 for (i
= 1; i
< TREE_OPERAND_LENGTH (exp
); i
++)
3876 tree op
= TREE_OPERAND (exp
, i
);
3877 tree new_op
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (op
, obj
);
3881 new_tree
= copy_node (exp
);
3882 TREE_OPERAND (new_tree
, i
) = new_op
;
3888 new_tree
= fold (new_tree
);
3889 if (TREE_CODE (new_tree
) == CALL_EXPR
)
3890 process_call_operands (new_tree
);
3901 TREE_READONLY (new_tree
) |= TREE_READONLY (exp
);
3903 if (code
== INDIRECT_REF
|| code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
3904 TREE_THIS_NOTRAP (new_tree
) |= TREE_THIS_NOTRAP (exp
);
3910 /* Subroutine of stabilize_reference; this is called for subtrees of
3911 references. Any expression with side-effects must be put in a SAVE_EXPR
3912 to ensure that it is only evaluated once.
3914 We don't put SAVE_EXPR nodes around everything, because assigning very
3915 simple expressions to temporaries causes us to miss good opportunities
3916 for optimizations. Among other things, the opportunity to fold in the
3917 addition of a constant into an addressing mode often gets lost, e.g.
3918 "y[i+1] += x;". In general, we take the approach that we should not make
3919 an assignment unless we are forced into it - i.e., that any non-side effect
3920 operator should be allowed, and that cse should take care of coalescing
3921 multiple utterances of the same expression should that prove fruitful. */
3924 stabilize_reference_1 (tree e
)
3927 enum tree_code code
= TREE_CODE (e
);
3929 /* We cannot ignore const expressions because it might be a reference
3930 to a const array but whose index contains side-effects. But we can
3931 ignore things that are actual constant or that already have been
3932 handled by this function. */
3934 if (tree_invariant_p (e
))
3937 switch (TREE_CODE_CLASS (code
))
3939 case tcc_exceptional
:
3941 case tcc_declaration
:
3942 case tcc_comparison
:
3944 case tcc_expression
:
3947 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3948 so that it will only be evaluated once. */
3949 /* The reference (r) and comparison (<) classes could be handled as
3950 below, but it is generally faster to only evaluate them once. */
3951 if (TREE_SIDE_EFFECTS (e
))
3952 return save_expr (e
);
3956 /* Constants need no processing. In fact, we should never reach
3961 /* Division is slow and tends to be compiled with jumps,
3962 especially the division by powers of 2 that is often
3963 found inside of an array reference. So do it just once. */
3964 if (code
== TRUNC_DIV_EXPR
|| code
== TRUNC_MOD_EXPR
3965 || code
== FLOOR_DIV_EXPR
|| code
== FLOOR_MOD_EXPR
3966 || code
== CEIL_DIV_EXPR
|| code
== CEIL_MOD_EXPR
3967 || code
== ROUND_DIV_EXPR
|| code
== ROUND_MOD_EXPR
)
3968 return save_expr (e
);
3969 /* Recursively stabilize each operand. */
3970 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)),
3971 stabilize_reference_1 (TREE_OPERAND (e
, 1)));
3975 /* Recursively stabilize each operand. */
3976 result
= build_nt (code
, stabilize_reference_1 (TREE_OPERAND (e
, 0)));
3983 TREE_TYPE (result
) = TREE_TYPE (e
);
3984 TREE_READONLY (result
) = TREE_READONLY (e
);
3985 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (e
);
3986 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
3991 /* Stabilize a reference so that we can use it any number of times
3992 without causing its operands to be evaluated more than once.
3993 Returns the stabilized reference. This works by means of save_expr,
3994 so see the caveats in the comments about save_expr.
3996 Also allows conversion expressions whose operands are references.
3997 Any other kind of expression is returned unchanged. */
4000 stabilize_reference (tree ref
)
4003 enum tree_code code
= TREE_CODE (ref
);
4010 /* No action is needed in this case. */
4015 case FIX_TRUNC_EXPR
:
4016 result
= build_nt (code
, stabilize_reference (TREE_OPERAND (ref
, 0)));
4020 result
= build_nt (INDIRECT_REF
,
4021 stabilize_reference_1 (TREE_OPERAND (ref
, 0)));
4025 result
= build_nt (COMPONENT_REF
,
4026 stabilize_reference (TREE_OPERAND (ref
, 0)),
4027 TREE_OPERAND (ref
, 1), NULL_TREE
);
4031 result
= build_nt (BIT_FIELD_REF
,
4032 stabilize_reference (TREE_OPERAND (ref
, 0)),
4033 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
4037 result
= build_nt (ARRAY_REF
,
4038 stabilize_reference (TREE_OPERAND (ref
, 0)),
4039 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4040 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4043 case ARRAY_RANGE_REF
:
4044 result
= build_nt (ARRAY_RANGE_REF
,
4045 stabilize_reference (TREE_OPERAND (ref
, 0)),
4046 stabilize_reference_1 (TREE_OPERAND (ref
, 1)),
4047 TREE_OPERAND (ref
, 2), TREE_OPERAND (ref
, 3));
4051 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4052 it wouldn't be ignored. This matters when dealing with
4054 return stabilize_reference_1 (ref
);
4056 /* If arg isn't a kind of lvalue we recognize, make no change.
4057 Caller should recognize the error for an invalid lvalue. */
4062 return error_mark_node
;
4065 TREE_TYPE (result
) = TREE_TYPE (ref
);
4066 TREE_READONLY (result
) = TREE_READONLY (ref
);
4067 TREE_SIDE_EFFECTS (result
) = TREE_SIDE_EFFECTS (ref
);
4068 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
4073 /* Low-level constructors for expressions. */
4075 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4076 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4079 recompute_tree_invariant_for_addr_expr (tree t
)
4082 bool tc
= true, se
= false;
4084 /* We started out assuming this address is both invariant and constant, but
4085 does not have side effects. Now go down any handled components and see if
4086 any of them involve offsets that are either non-constant or non-invariant.
4087 Also check for side-effects.
4089 ??? Note that this code makes no attempt to deal with the case where
4090 taking the address of something causes a copy due to misalignment. */
4092 #define UPDATE_FLAGS(NODE) \
4093 do { tree _node = (NODE); \
4094 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4095 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4097 for (node
= TREE_OPERAND (t
, 0); handled_component_p (node
);
4098 node
= TREE_OPERAND (node
, 0))
4100 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4101 array reference (probably made temporarily by the G++ front end),
4102 so ignore all the operands. */
4103 if ((TREE_CODE (node
) == ARRAY_REF
4104 || TREE_CODE (node
) == ARRAY_RANGE_REF
)
4105 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node
, 0))) == ARRAY_TYPE
)
4107 UPDATE_FLAGS (TREE_OPERAND (node
, 1));
4108 if (TREE_OPERAND (node
, 2))
4109 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4110 if (TREE_OPERAND (node
, 3))
4111 UPDATE_FLAGS (TREE_OPERAND (node
, 3));
4113 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4114 FIELD_DECL, apparently. The G++ front end can put something else
4115 there, at least temporarily. */
4116 else if (TREE_CODE (node
) == COMPONENT_REF
4117 && TREE_CODE (TREE_OPERAND (node
, 1)) == FIELD_DECL
)
4119 if (TREE_OPERAND (node
, 2))
4120 UPDATE_FLAGS (TREE_OPERAND (node
, 2));
4124 node
= lang_hooks
.expr_to_decl (node
, &tc
, &se
);
4126 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4127 the address, since &(*a)->b is a form of addition. If it's a constant, the
4128 address is constant too. If it's a decl, its address is constant if the
4129 decl is static. Everything else is not constant and, furthermore,
4130 taking the address of a volatile variable is not volatile. */
4131 if (TREE_CODE (node
) == INDIRECT_REF
4132 || TREE_CODE (node
) == MEM_REF
)
4133 UPDATE_FLAGS (TREE_OPERAND (node
, 0));
4134 else if (CONSTANT_CLASS_P (node
))
4136 else if (DECL_P (node
))
4137 tc
&= (staticp (node
) != NULL_TREE
);
4141 se
|= TREE_SIDE_EFFECTS (node
);
4145 TREE_CONSTANT (t
) = tc
;
4146 TREE_SIDE_EFFECTS (t
) = se
;
4150 /* Build an expression of code CODE, data type TYPE, and operands as
4151 specified. Expressions and reference nodes can be created this way.
4152 Constants, decls, types and misc nodes cannot be.
4154 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4155 enough for all extant tree codes. */
4158 build0_stat (enum tree_code code
, tree tt MEM_STAT_DECL
)
4162 gcc_assert (TREE_CODE_LENGTH (code
) == 0);
4164 t
= make_node_stat (code PASS_MEM_STAT
);
4171 build1_stat (enum tree_code code
, tree type
, tree node MEM_STAT_DECL
)
4173 int length
= sizeof (struct tree_exp
);
4176 record_node_allocation_statistics (code
, length
);
4178 gcc_assert (TREE_CODE_LENGTH (code
) == 1);
4180 t
= ggc_alloc_tree_node_stat (length PASS_MEM_STAT
);
4182 memset (t
, 0, sizeof (struct tree_common
));
4184 TREE_SET_CODE (t
, code
);
4186 TREE_TYPE (t
) = type
;
4187 SET_EXPR_LOCATION (t
, UNKNOWN_LOCATION
);
4188 TREE_OPERAND (t
, 0) = node
;
4189 if (node
&& !TYPE_P (node
))
4191 TREE_SIDE_EFFECTS (t
) = TREE_SIDE_EFFECTS (node
);
4192 TREE_READONLY (t
) = TREE_READONLY (node
);
4195 if (TREE_CODE_CLASS (code
) == tcc_statement
)
4196 TREE_SIDE_EFFECTS (t
) = 1;
4200 /* All of these have side-effects, no matter what their
4202 TREE_SIDE_EFFECTS (t
) = 1;
4203 TREE_READONLY (t
) = 0;
4207 /* Whether a dereference is readonly has nothing to do with whether
4208 its operand is readonly. */
4209 TREE_READONLY (t
) = 0;
4214 recompute_tree_invariant_for_addr_expr (t
);
4218 if ((TREE_CODE_CLASS (code
) == tcc_unary
|| code
== VIEW_CONVERT_EXPR
)
4219 && node
&& !TYPE_P (node
)
4220 && TREE_CONSTANT (node
))
4221 TREE_CONSTANT (t
) = 1;
4222 if (TREE_CODE_CLASS (code
) == tcc_reference
4223 && node
&& TREE_THIS_VOLATILE (node
))
4224 TREE_THIS_VOLATILE (t
) = 1;
4231 #define PROCESS_ARG(N) \
4233 TREE_OPERAND (t, N) = arg##N; \
4234 if (arg##N &&!TYPE_P (arg##N)) \
4236 if (TREE_SIDE_EFFECTS (arg##N)) \
4238 if (!TREE_READONLY (arg##N) \
4239 && !CONSTANT_CLASS_P (arg##N)) \
4240 (void) (read_only = 0); \
4241 if (!TREE_CONSTANT (arg##N)) \
4242 (void) (constant = 0); \
4247 build2_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1 MEM_STAT_DECL
)
4249 bool constant
, read_only
, side_effects
;
4252 gcc_assert (TREE_CODE_LENGTH (code
) == 2);
4254 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
|| code
== MULT_EXPR
)
4255 && arg0
&& arg1
&& tt
&& POINTER_TYPE_P (tt
)
4256 /* When sizetype precision doesn't match that of pointers
4257 we need to be able to build explicit extensions or truncations
4258 of the offset argument. */
4259 && TYPE_PRECISION (sizetype
) == TYPE_PRECISION (tt
))
4260 gcc_assert (TREE_CODE (arg0
) == INTEGER_CST
4261 && TREE_CODE (arg1
) == INTEGER_CST
);
4263 if (code
== POINTER_PLUS_EXPR
&& arg0
&& arg1
&& tt
)
4264 gcc_assert (POINTER_TYPE_P (tt
) && POINTER_TYPE_P (TREE_TYPE (arg0
))
4265 && ptrofftype_p (TREE_TYPE (arg1
)));
4267 t
= make_node_stat (code PASS_MEM_STAT
);
4270 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4271 result based on those same flags for the arguments. But if the
4272 arguments aren't really even `tree' expressions, we shouldn't be trying
4275 /* Expressions without side effects may be constant if their
4276 arguments are as well. */
4277 constant
= (TREE_CODE_CLASS (code
) == tcc_comparison
4278 || TREE_CODE_CLASS (code
) == tcc_binary
);
4280 side_effects
= TREE_SIDE_EFFECTS (t
);
4285 TREE_READONLY (t
) = read_only
;
4286 TREE_CONSTANT (t
) = constant
;
4287 TREE_SIDE_EFFECTS (t
) = side_effects
;
4288 TREE_THIS_VOLATILE (t
)
4289 = (TREE_CODE_CLASS (code
) == tcc_reference
4290 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4297 build3_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4298 tree arg2 MEM_STAT_DECL
)
4300 bool constant
, read_only
, side_effects
;
4303 gcc_assert (TREE_CODE_LENGTH (code
) == 3);
4304 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4306 t
= make_node_stat (code PASS_MEM_STAT
);
4311 /* As a special exception, if COND_EXPR has NULL branches, we
4312 assume that it is a gimple statement and always consider
4313 it to have side effects. */
4314 if (code
== COND_EXPR
4315 && tt
== void_type_node
4316 && arg1
== NULL_TREE
4317 && arg2
== NULL_TREE
)
4318 side_effects
= true;
4320 side_effects
= TREE_SIDE_EFFECTS (t
);
4326 if (code
== COND_EXPR
)
4327 TREE_READONLY (t
) = read_only
;
4329 TREE_SIDE_EFFECTS (t
) = side_effects
;
4330 TREE_THIS_VOLATILE (t
)
4331 = (TREE_CODE_CLASS (code
) == tcc_reference
4332 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4338 build4_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4339 tree arg2
, tree arg3 MEM_STAT_DECL
)
4341 bool constant
, read_only
, side_effects
;
4344 gcc_assert (TREE_CODE_LENGTH (code
) == 4);
4346 t
= make_node_stat (code PASS_MEM_STAT
);
4349 side_effects
= TREE_SIDE_EFFECTS (t
);
4356 TREE_SIDE_EFFECTS (t
) = side_effects
;
4357 TREE_THIS_VOLATILE (t
)
4358 = (TREE_CODE_CLASS (code
) == tcc_reference
4359 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4365 build5_stat (enum tree_code code
, tree tt
, tree arg0
, tree arg1
,
4366 tree arg2
, tree arg3
, tree arg4 MEM_STAT_DECL
)
4368 bool constant
, read_only
, side_effects
;
4371 gcc_assert (TREE_CODE_LENGTH (code
) == 5);
4373 t
= make_node_stat (code PASS_MEM_STAT
);
4376 side_effects
= TREE_SIDE_EFFECTS (t
);
4384 TREE_SIDE_EFFECTS (t
) = side_effects
;
4385 TREE_THIS_VOLATILE (t
)
4386 = (TREE_CODE_CLASS (code
) == tcc_reference
4387 && arg0
&& TREE_THIS_VOLATILE (arg0
));
4392 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4393 on the pointer PTR. */
4396 build_simple_mem_ref_loc (location_t loc
, tree ptr
)
4398 HOST_WIDE_INT offset
= 0;
4399 tree ptype
= TREE_TYPE (ptr
);
4401 /* For convenience allow addresses that collapse to a simple base
4403 if (TREE_CODE (ptr
) == ADDR_EXPR
4404 && (handled_component_p (TREE_OPERAND (ptr
, 0))
4405 || TREE_CODE (TREE_OPERAND (ptr
, 0)) == MEM_REF
))
4407 ptr
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &offset
);
4409 ptr
= build_fold_addr_expr (ptr
);
4410 gcc_assert (is_gimple_reg (ptr
) || is_gimple_min_invariant (ptr
));
4412 tem
= build2 (MEM_REF
, TREE_TYPE (ptype
),
4413 ptr
, build_int_cst (ptype
, offset
));
4414 SET_EXPR_LOCATION (tem
, loc
);
4418 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4421 mem_ref_offset (const_tree t
)
4423 return offset_int::from (TREE_OPERAND (t
, 1), SIGNED
);
4426 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4427 offsetted by OFFSET units. */
4430 build_invariant_address (tree type
, tree base
, HOST_WIDE_INT offset
)
4432 tree ref
= fold_build2 (MEM_REF
, TREE_TYPE (type
),
4433 build_fold_addr_expr (base
),
4434 build_int_cst (ptr_type_node
, offset
));
4435 tree addr
= build1 (ADDR_EXPR
, type
, ref
);
4436 recompute_tree_invariant_for_addr_expr (addr
);
4440 /* Similar except don't specify the TREE_TYPE
4441 and leave the TREE_SIDE_EFFECTS as 0.
4442 It is permissible for arguments to be null,
4443 or even garbage if their values do not matter. */
4446 build_nt (enum tree_code code
, ...)
4453 gcc_assert (TREE_CODE_CLASS (code
) != tcc_vl_exp
);
4457 t
= make_node (code
);
4458 length
= TREE_CODE_LENGTH (code
);
4460 for (i
= 0; i
< length
; i
++)
4461 TREE_OPERAND (t
, i
) = va_arg (p
, tree
);
4467 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4471 build_nt_call_vec (tree fn
, vec
<tree
, va_gc
> *args
)
4476 ret
= build_vl_exp (CALL_EXPR
, vec_safe_length (args
) + 3);
4477 CALL_EXPR_FN (ret
) = fn
;
4478 CALL_EXPR_STATIC_CHAIN (ret
) = NULL_TREE
;
4479 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
4480 CALL_EXPR_ARG (ret
, ix
) = t
;
4484 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4485 We do NOT enter this node in any sort of symbol table.
4487 LOC is the location of the decl.
4489 layout_decl is used to set up the decl's storage layout.
4490 Other slots are initialized to 0 or null pointers. */
4493 build_decl_stat (location_t loc
, enum tree_code code
, tree name
,
4494 tree type MEM_STAT_DECL
)
4498 t
= make_node_stat (code PASS_MEM_STAT
);
4499 DECL_SOURCE_LOCATION (t
) = loc
;
4501 /* if (type == error_mark_node)
4502 type = integer_type_node; */
4503 /* That is not done, deliberately, so that having error_mark_node
4504 as the type can suppress useless errors in the use of this variable. */
4506 DECL_NAME (t
) = name
;
4507 TREE_TYPE (t
) = type
;
4509 if (code
== VAR_DECL
|| code
== PARM_DECL
|| code
== RESULT_DECL
)
4515 /* Builds and returns function declaration with NAME and TYPE. */
4518 build_fn_decl (const char *name
, tree type
)
4520 tree id
= get_identifier (name
);
4521 tree decl
= build_decl (input_location
, FUNCTION_DECL
, id
, type
);
4523 DECL_EXTERNAL (decl
) = 1;
4524 TREE_PUBLIC (decl
) = 1;
4525 DECL_ARTIFICIAL (decl
) = 1;
4526 TREE_NOTHROW (decl
) = 1;
4531 vec
<tree
, va_gc
> *all_translation_units
;
4533 /* Builds a new translation-unit decl with name NAME, queues it in the
4534 global list of translation-unit decls and returns it. */
4537 build_translation_unit_decl (tree name
)
4539 tree tu
= build_decl (UNKNOWN_LOCATION
, TRANSLATION_UNIT_DECL
,
4541 TRANSLATION_UNIT_LANGUAGE (tu
) = lang_hooks
.name
;
4542 vec_safe_push (all_translation_units
, tu
);
4547 /* BLOCK nodes are used to represent the structure of binding contours
4548 and declarations, once those contours have been exited and their contents
4549 compiled. This information is used for outputting debugging info. */
4552 build_block (tree vars
, tree subblocks
, tree supercontext
, tree chain
)
4554 tree block
= make_node (BLOCK
);
4556 BLOCK_VARS (block
) = vars
;
4557 BLOCK_SUBBLOCKS (block
) = subblocks
;
4558 BLOCK_SUPERCONTEXT (block
) = supercontext
;
4559 BLOCK_CHAIN (block
) = chain
;
4564 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4566 LOC is the location to use in tree T. */
4569 protected_set_expr_location (tree t
, location_t loc
)
4571 if (t
&& CAN_HAVE_LOCATION_P (t
))
4572 SET_EXPR_LOCATION (t
, loc
);
4575 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4579 build_decl_attribute_variant (tree ddecl
, tree attribute
)
4581 DECL_ATTRIBUTES (ddecl
) = attribute
;
4585 /* Borrowed from hashtab.c iterative_hash implementation. */
4586 #define mix(a,b,c) \
4588 a -= b; a -= c; a ^= (c>>13); \
4589 b -= c; b -= a; b ^= (a<< 8); \
4590 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4591 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4592 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4593 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4594 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4595 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4596 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4600 /* Produce good hash value combining VAL and VAL2. */
4602 iterative_hash_hashval_t (hashval_t val
, hashval_t val2
)
4604 /* the golden ratio; an arbitrary value. */
4605 hashval_t a
= 0x9e3779b9;
4611 /* Produce good hash value combining VAL and VAL2. */
4613 iterative_hash_host_wide_int (HOST_WIDE_INT val
, hashval_t val2
)
4615 if (sizeof (HOST_WIDE_INT
) == sizeof (hashval_t
))
4616 return iterative_hash_hashval_t (val
, val2
);
4619 hashval_t a
= (hashval_t
) val
;
4620 /* Avoid warnings about shifting of more than the width of the type on
4621 hosts that won't execute this path. */
4623 hashval_t b
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 8 + zero
));
4625 if (sizeof (HOST_WIDE_INT
) > 2 * sizeof (hashval_t
))
4627 hashval_t a
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 16 + zero
));
4628 hashval_t b
= (hashval_t
) (val
>> (sizeof (hashval_t
) * 24 + zero
));
4635 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4636 is ATTRIBUTE and its qualifiers are QUALS.
4638 Record such modified types already made so we don't make duplicates. */
4641 build_type_attribute_qual_variant (tree ttype
, tree attribute
, int quals
)
4643 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype
), attribute
))
4645 hashval_t hashcode
= 0;
4649 enum tree_code code
= TREE_CODE (ttype
);
4651 /* Building a distinct copy of a tagged type is inappropriate; it
4652 causes breakage in code that expects there to be a one-to-one
4653 relationship between a struct and its fields.
4654 build_duplicate_type is another solution (as used in
4655 handle_transparent_union_attribute), but that doesn't play well
4656 with the stronger C++ type identity model. */
4657 if (TREE_CODE (ttype
) == RECORD_TYPE
4658 || TREE_CODE (ttype
) == UNION_TYPE
4659 || TREE_CODE (ttype
) == QUAL_UNION_TYPE
4660 || TREE_CODE (ttype
) == ENUMERAL_TYPE
)
4662 warning (OPT_Wattributes
,
4663 "ignoring attributes applied to %qT after definition",
4664 TYPE_MAIN_VARIANT (ttype
));
4665 return build_qualified_type (ttype
, quals
);
4668 ttype
= build_qualified_type (ttype
, TYPE_UNQUALIFIED
);
4669 ntype
= build_distinct_type_copy (ttype
);
4671 TYPE_ATTRIBUTES (ntype
) = attribute
;
4673 hashcode
= iterative_hash_object (code
, hashcode
);
4674 if (TREE_TYPE (ntype
))
4675 hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype
)),
4677 hashcode
= attribute_hash_list (attribute
, hashcode
);
4679 switch (TREE_CODE (ntype
))
4682 hashcode
= type_hash_list (TYPE_ARG_TYPES (ntype
), hashcode
);
4685 if (TYPE_DOMAIN (ntype
))
4686 hashcode
= iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype
)),
4690 t
= TYPE_MAX_VALUE (ntype
);
4691 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
4692 hashcode
= iterative_hash_object (TREE_INT_CST_ELT (t
, i
), hashcode
);
4695 case FIXED_POINT_TYPE
:
4697 unsigned int precision
= TYPE_PRECISION (ntype
);
4698 hashcode
= iterative_hash_object (precision
, hashcode
);
4705 ntype
= type_hash_canon (hashcode
, ntype
);
4707 /* If the target-dependent attributes make NTYPE different from
4708 its canonical type, we will need to use structural equality
4709 checks for this type. */
4710 if (TYPE_STRUCTURAL_EQUALITY_P (ttype
)
4711 || !comp_type_attributes (ntype
, ttype
))
4712 SET_TYPE_STRUCTURAL_EQUALITY (ntype
);
4713 else if (TYPE_CANONICAL (ntype
) == ntype
)
4714 TYPE_CANONICAL (ntype
) = TYPE_CANONICAL (ttype
);
4716 ttype
= build_qualified_type (ntype
, quals
);
4718 else if (TYPE_QUALS (ttype
) != quals
)
4719 ttype
= build_qualified_type (ttype
, quals
);
4724 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4728 omp_declare_simd_clauses_equal (tree clauses1
, tree clauses2
)
4731 for (cl1
= clauses1
, cl2
= clauses2
;
4733 cl1
= OMP_CLAUSE_CHAIN (cl1
), cl2
= OMP_CLAUSE_CHAIN (cl2
))
4735 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_CODE (cl2
))
4737 if (OMP_CLAUSE_CODE (cl1
) != OMP_CLAUSE_SIMDLEN
)
4739 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1
),
4740 OMP_CLAUSE_DECL (cl2
)) != 1)
4743 switch (OMP_CLAUSE_CODE (cl1
))
4745 case OMP_CLAUSE_ALIGNED
:
4746 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1
),
4747 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2
)) != 1)
4750 case OMP_CLAUSE_LINEAR
:
4751 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1
),
4752 OMP_CLAUSE_LINEAR_STEP (cl2
)) != 1)
4755 case OMP_CLAUSE_SIMDLEN
:
4756 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1
),
4757 OMP_CLAUSE_SIMDLEN_EXPR (cl2
)) != 1)
4766 /* Compare two constructor-element-type constants. Return 1 if the lists
4767 are known to be equal; otherwise return 0. */
4770 simple_cst_list_equal (const_tree l1
, const_tree l2
)
4772 while (l1
!= NULL_TREE
&& l2
!= NULL_TREE
)
4774 if (simple_cst_equal (TREE_VALUE (l1
), TREE_VALUE (l2
)) != 1)
4777 l1
= TREE_CHAIN (l1
);
4778 l2
= TREE_CHAIN (l2
);
4784 /* Compare two attributes for their value identity. Return true if the
4785 attribute values are known to be equal; otherwise return false.
4789 attribute_value_equal (const_tree attr1
, const_tree attr2
)
4791 if (TREE_VALUE (attr1
) == TREE_VALUE (attr2
))
4794 if (TREE_VALUE (attr1
) != NULL_TREE
4795 && TREE_CODE (TREE_VALUE (attr1
)) == TREE_LIST
4796 && TREE_VALUE (attr2
) != NULL
4797 && TREE_CODE (TREE_VALUE (attr2
)) == TREE_LIST
)
4798 return (simple_cst_list_equal (TREE_VALUE (attr1
),
4799 TREE_VALUE (attr2
)) == 1);
4801 if ((flag_openmp
|| flag_openmp_simd
)
4802 && TREE_VALUE (attr1
) && TREE_VALUE (attr2
)
4803 && TREE_CODE (TREE_VALUE (attr1
)) == OMP_CLAUSE
4804 && TREE_CODE (TREE_VALUE (attr2
)) == OMP_CLAUSE
)
4805 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1
),
4806 TREE_VALUE (attr2
));
4808 return (simple_cst_equal (TREE_VALUE (attr1
), TREE_VALUE (attr2
)) == 1);
4811 /* Return 0 if the attributes for two types are incompatible, 1 if they
4812 are compatible, and 2 if they are nearly compatible (which causes a
4813 warning to be generated). */
4815 comp_type_attributes (const_tree type1
, const_tree type2
)
4817 const_tree a1
= TYPE_ATTRIBUTES (type1
);
4818 const_tree a2
= TYPE_ATTRIBUTES (type2
);
4823 for (a
= a1
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4825 const struct attribute_spec
*as
;
4828 as
= lookup_attribute_spec (get_attribute_name (a
));
4829 if (!as
|| as
->affects_type_identity
== false)
4832 attr
= lookup_attribute (as
->name
, CONST_CAST_TREE (a2
));
4833 if (!attr
|| !attribute_value_equal (a
, attr
))
4838 for (a
= a2
; a
!= NULL_TREE
; a
= TREE_CHAIN (a
))
4840 const struct attribute_spec
*as
;
4842 as
= lookup_attribute_spec (get_attribute_name (a
));
4843 if (!as
|| as
->affects_type_identity
== false)
4846 if (!lookup_attribute (as
->name
, CONST_CAST_TREE (a1
)))
4848 /* We don't need to compare trees again, as we did this
4849 already in first loop. */
4851 /* All types - affecting identity - are equal, so
4852 there is no need to call target hook for comparison. */
4856 /* As some type combinations - like default calling-convention - might
4857 be compatible, we have to call the target hook to get the final result. */
4858 return targetm
.comp_type_attributes (type1
, type2
);
4861 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4864 Record such modified types already made so we don't make duplicates. */
4867 build_type_attribute_variant (tree ttype
, tree attribute
)
4869 return build_type_attribute_qual_variant (ttype
, attribute
,
4870 TYPE_QUALS (ttype
));
4874 /* Reset the expression *EXPR_P, a size or position.
4876 ??? We could reset all non-constant sizes or positions. But it's cheap
4877 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4879 We need to reset self-referential sizes or positions because they cannot
4880 be gimplified and thus can contain a CALL_EXPR after the gimplification
4881 is finished, which will run afoul of LTO streaming. And they need to be
4882 reset to something essentially dummy but not constant, so as to preserve
4883 the properties of the object they are attached to. */
4886 free_lang_data_in_one_sizepos (tree
*expr_p
)
4888 tree expr
= *expr_p
;
4889 if (CONTAINS_PLACEHOLDER_P (expr
))
4890 *expr_p
= build0 (PLACEHOLDER_EXPR
, TREE_TYPE (expr
));
4894 /* Reset all the fields in a binfo node BINFO. We only keep
4895 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4898 free_lang_data_in_binfo (tree binfo
)
4903 gcc_assert (TREE_CODE (binfo
) == TREE_BINFO
);
4905 BINFO_VIRTUALS (binfo
) = NULL_TREE
;
4906 BINFO_BASE_ACCESSES (binfo
) = NULL
;
4907 BINFO_INHERITANCE_CHAIN (binfo
) = NULL_TREE
;
4908 BINFO_SUBVTT_INDEX (binfo
) = NULL_TREE
;
4910 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo
), i
, t
)
4911 free_lang_data_in_binfo (t
);
4915 /* Reset all language specific information still present in TYPE. */
4918 free_lang_data_in_type (tree type
)
4920 gcc_assert (TYPE_P (type
));
4922 /* Give the FE a chance to remove its own data first. */
4923 lang_hooks
.free_lang_data (type
);
4925 TREE_LANG_FLAG_0 (type
) = 0;
4926 TREE_LANG_FLAG_1 (type
) = 0;
4927 TREE_LANG_FLAG_2 (type
) = 0;
4928 TREE_LANG_FLAG_3 (type
) = 0;
4929 TREE_LANG_FLAG_4 (type
) = 0;
4930 TREE_LANG_FLAG_5 (type
) = 0;
4931 TREE_LANG_FLAG_6 (type
) = 0;
4933 if (TREE_CODE (type
) == FUNCTION_TYPE
)
4935 /* Remove the const and volatile qualifiers from arguments. The
4936 C++ front end removes them, but the C front end does not,
4937 leading to false ODR violation errors when merging two
4938 instances of the same function signature compiled by
4939 different front ends. */
4942 for (p
= TYPE_ARG_TYPES (type
); p
; p
= TREE_CHAIN (p
))
4944 tree arg_type
= TREE_VALUE (p
);
4946 if (TYPE_READONLY (arg_type
) || TYPE_VOLATILE (arg_type
))
4948 int quals
= TYPE_QUALS (arg_type
)
4950 & ~TYPE_QUAL_VOLATILE
;
4951 TREE_VALUE (p
) = build_qualified_type (arg_type
, quals
);
4952 free_lang_data_in_type (TREE_VALUE (p
));
4957 /* Remove members that are not actually FIELD_DECLs from the field
4958 list of an aggregate. These occur in C++. */
4959 if (RECORD_OR_UNION_TYPE_P (type
))
4963 /* Note that TYPE_FIELDS can be shared across distinct
4964 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4965 to be removed, we cannot set its TREE_CHAIN to NULL.
4966 Otherwise, we would not be able to find all the other fields
4967 in the other instances of this TREE_TYPE.
4969 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4971 member
= TYPE_FIELDS (type
);
4974 if (TREE_CODE (member
) == FIELD_DECL
4975 || TREE_CODE (member
) == TYPE_DECL
)
4978 TREE_CHAIN (prev
) = member
;
4980 TYPE_FIELDS (type
) = member
;
4984 member
= TREE_CHAIN (member
);
4988 TREE_CHAIN (prev
) = NULL_TREE
;
4990 TYPE_FIELDS (type
) = NULL_TREE
;
4992 TYPE_METHODS (type
) = NULL_TREE
;
4993 if (TYPE_BINFO (type
))
4994 free_lang_data_in_binfo (TYPE_BINFO (type
));
4998 /* For non-aggregate types, clear out the language slot (which
4999 overloads TYPE_BINFO). */
5000 TYPE_LANG_SLOT_1 (type
) = NULL_TREE
;
5002 if (INTEGRAL_TYPE_P (type
)
5003 || SCALAR_FLOAT_TYPE_P (type
)
5004 || FIXED_POINT_TYPE_P (type
))
5006 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type
));
5007 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type
));
5011 free_lang_data_in_one_sizepos (&TYPE_SIZE (type
));
5012 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type
));
5014 if (TYPE_CONTEXT (type
)
5015 && TREE_CODE (TYPE_CONTEXT (type
)) == BLOCK
)
5017 tree ctx
= TYPE_CONTEXT (type
);
5020 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5022 while (ctx
&& TREE_CODE (ctx
) == BLOCK
);
5023 TYPE_CONTEXT (type
) = ctx
;
5028 /* Return true if DECL may need an assembler name to be set. */
5031 need_assembler_name_p (tree decl
)
5033 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5034 if (TREE_CODE (decl
) != FUNCTION_DECL
5035 && TREE_CODE (decl
) != VAR_DECL
)
5038 /* If DECL already has its assembler name set, it does not need a
5040 if (!HAS_DECL_ASSEMBLER_NAME_P (decl
)
5041 || DECL_ASSEMBLER_NAME_SET_P (decl
))
5044 /* Abstract decls do not need an assembler name. */
5045 if (DECL_ABSTRACT (decl
))
5048 /* For VAR_DECLs, only static, public and external symbols need an
5050 if (TREE_CODE (decl
) == VAR_DECL
5051 && !TREE_STATIC (decl
)
5052 && !TREE_PUBLIC (decl
)
5053 && !DECL_EXTERNAL (decl
))
5056 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5058 /* Do not set assembler name on builtins. Allow RTL expansion to
5059 decide whether to expand inline or via a regular call. */
5060 if (DECL_BUILT_IN (decl
)
5061 && DECL_BUILT_IN_CLASS (decl
) != BUILT_IN_FRONTEND
)
5064 /* Functions represented in the callgraph need an assembler name. */
5065 if (cgraph_get_node (decl
) != NULL
)
5068 /* Unused and not public functions don't need an assembler name. */
5069 if (!TREE_USED (decl
) && !TREE_PUBLIC (decl
))
5077 /* Reset all language specific information still present in symbol
5081 free_lang_data_in_decl (tree decl
)
5083 gcc_assert (DECL_P (decl
));
5085 /* Give the FE a chance to remove its own data first. */
5086 lang_hooks
.free_lang_data (decl
);
5088 TREE_LANG_FLAG_0 (decl
) = 0;
5089 TREE_LANG_FLAG_1 (decl
) = 0;
5090 TREE_LANG_FLAG_2 (decl
) = 0;
5091 TREE_LANG_FLAG_3 (decl
) = 0;
5092 TREE_LANG_FLAG_4 (decl
) = 0;
5093 TREE_LANG_FLAG_5 (decl
) = 0;
5094 TREE_LANG_FLAG_6 (decl
) = 0;
5096 free_lang_data_in_one_sizepos (&DECL_SIZE (decl
));
5097 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl
));
5098 if (TREE_CODE (decl
) == FIELD_DECL
)
5100 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl
));
5101 if (TREE_CODE (DECL_CONTEXT (decl
)) == QUAL_UNION_TYPE
)
5102 DECL_QUALIFIER (decl
) = NULL_TREE
;
5105 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5107 struct cgraph_node
*node
;
5108 if (!(node
= cgraph_get_node (decl
))
5109 || (!node
->definition
&& !node
->clones
))
5112 cgraph_release_function_body (node
);
5115 release_function_body (decl
);
5116 DECL_ARGUMENTS (decl
) = NULL
;
5117 DECL_RESULT (decl
) = NULL
;
5118 DECL_INITIAL (decl
) = error_mark_node
;
5121 if (gimple_has_body_p (decl
))
5125 /* If DECL has a gimple body, then the context for its
5126 arguments must be DECL. Otherwise, it doesn't really
5127 matter, as we will not be emitting any code for DECL. In
5128 general, there may be other instances of DECL created by
5129 the front end and since PARM_DECLs are generally shared,
5130 their DECL_CONTEXT changes as the replicas of DECL are
5131 created. The only time where DECL_CONTEXT is important
5132 is for the FUNCTION_DECLs that have a gimple body (since
5133 the PARM_DECL will be used in the function's body). */
5134 for (t
= DECL_ARGUMENTS (decl
); t
; t
= TREE_CHAIN (t
))
5135 DECL_CONTEXT (t
) = decl
;
5138 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5139 At this point, it is not needed anymore. */
5140 DECL_SAVED_TREE (decl
) = NULL_TREE
;
5142 /* Clear the abstract origin if it refers to a method. Otherwise
5143 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5144 origin will not be output correctly. */
5145 if (DECL_ABSTRACT_ORIGIN (decl
)
5146 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))
5147 && RECORD_OR_UNION_TYPE_P
5148 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl
))))
5149 DECL_ABSTRACT_ORIGIN (decl
) = NULL_TREE
;
5151 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5152 DECL_VINDEX referring to itself into a vtable slot number as it
5153 should. Happens with functions that are copied and then forgotten
5154 about. Just clear it, it won't matter anymore. */
5155 if (DECL_VINDEX (decl
) && !tree_fits_shwi_p (DECL_VINDEX (decl
)))
5156 DECL_VINDEX (decl
) = NULL_TREE
;
5158 else if (TREE_CODE (decl
) == VAR_DECL
)
5160 if ((DECL_EXTERNAL (decl
)
5161 && (!TREE_STATIC (decl
) || !TREE_READONLY (decl
)))
5162 || (decl_function_context (decl
) && !TREE_STATIC (decl
)))
5163 DECL_INITIAL (decl
) = NULL_TREE
;
5165 else if (TREE_CODE (decl
) == TYPE_DECL
5166 || TREE_CODE (decl
) == FIELD_DECL
)
5167 DECL_INITIAL (decl
) = NULL_TREE
;
5168 else if (TREE_CODE (decl
) == TRANSLATION_UNIT_DECL
5169 && DECL_INITIAL (decl
)
5170 && TREE_CODE (DECL_INITIAL (decl
)) == BLOCK
)
5172 /* Strip builtins from the translation-unit BLOCK. We still have targets
5173 without builtin_decl_explicit support and also builtins are shared
5174 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5175 tree
*nextp
= &BLOCK_VARS (DECL_INITIAL (decl
));
5179 if (TREE_CODE (var
) == FUNCTION_DECL
5180 && DECL_BUILT_IN (var
))
5181 *nextp
= TREE_CHAIN (var
);
5183 nextp
= &TREE_CHAIN (var
);
5189 /* Data used when collecting DECLs and TYPEs for language data removal. */
5191 struct free_lang_data_d
5193 /* Worklist to avoid excessive recursion. */
5196 /* Set of traversed objects. Used to avoid duplicate visits. */
5197 struct pointer_set_t
*pset
;
5199 /* Array of symbols to process with free_lang_data_in_decl. */
5202 /* Array of types to process with free_lang_data_in_type. */
5207 /* Save all language fields needed to generate proper debug information
5208 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5211 save_debug_info_for_decl (tree t
)
5213 /*struct saved_debug_info_d *sdi;*/
5215 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& DECL_P (t
));
5217 /* FIXME. Partial implementation for saving debug info removed. */
5221 /* Save all language fields needed to generate proper debug information
5222 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5225 save_debug_info_for_type (tree t
)
5227 /*struct saved_debug_info_d *sdi;*/
5229 gcc_assert (debug_info_level
> DINFO_LEVEL_TERSE
&& t
&& TYPE_P (t
));
5231 /* FIXME. Partial implementation for saving debug info removed. */
5235 /* Add type or decl T to one of the list of tree nodes that need their
5236 language data removed. The lists are held inside FLD. */
5239 add_tree_to_fld_list (tree t
, struct free_lang_data_d
*fld
)
5243 fld
->decls
.safe_push (t
);
5244 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5245 save_debug_info_for_decl (t
);
5247 else if (TYPE_P (t
))
5249 fld
->types
.safe_push (t
);
5250 if (debug_info_level
> DINFO_LEVEL_TERSE
)
5251 save_debug_info_for_type (t
);
5257 /* Push tree node T into FLD->WORKLIST. */
5260 fld_worklist_push (tree t
, struct free_lang_data_d
*fld
)
5262 if (t
&& !is_lang_specific (t
) && !pointer_set_contains (fld
->pset
, t
))
5263 fld
->worklist
.safe_push ((t
));
5267 /* Operand callback helper for free_lang_data_in_node. *TP is the
5268 subtree operand being considered. */
5271 find_decls_types_r (tree
*tp
, int *ws
, void *data
)
5274 struct free_lang_data_d
*fld
= (struct free_lang_data_d
*) data
;
5276 if (TREE_CODE (t
) == TREE_LIST
)
5279 /* Language specific nodes will be removed, so there is no need
5280 to gather anything under them. */
5281 if (is_lang_specific (t
))
5289 /* Note that walk_tree does not traverse every possible field in
5290 decls, so we have to do our own traversals here. */
5291 add_tree_to_fld_list (t
, fld
);
5293 fld_worklist_push (DECL_NAME (t
), fld
);
5294 fld_worklist_push (DECL_CONTEXT (t
), fld
);
5295 fld_worklist_push (DECL_SIZE (t
), fld
);
5296 fld_worklist_push (DECL_SIZE_UNIT (t
), fld
);
5298 /* We are going to remove everything under DECL_INITIAL for
5299 TYPE_DECLs. No point walking them. */
5300 if (TREE_CODE (t
) != TYPE_DECL
)
5301 fld_worklist_push (DECL_INITIAL (t
), fld
);
5303 fld_worklist_push (DECL_ATTRIBUTES (t
), fld
);
5304 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t
), fld
);
5306 if (TREE_CODE (t
) == FUNCTION_DECL
)
5308 fld_worklist_push (DECL_ARGUMENTS (t
), fld
);
5309 fld_worklist_push (DECL_RESULT (t
), fld
);
5311 else if (TREE_CODE (t
) == TYPE_DECL
)
5313 fld_worklist_push (DECL_ORIGINAL_TYPE (t
), fld
);
5315 else if (TREE_CODE (t
) == FIELD_DECL
)
5317 fld_worklist_push (DECL_FIELD_OFFSET (t
), fld
);
5318 fld_worklist_push (DECL_BIT_FIELD_TYPE (t
), fld
);
5319 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t
), fld
);
5320 fld_worklist_push (DECL_FCONTEXT (t
), fld
);
5323 if ((TREE_CODE (t
) == VAR_DECL
|| TREE_CODE (t
) == PARM_DECL
)
5324 && DECL_HAS_VALUE_EXPR_P (t
))
5325 fld_worklist_push (DECL_VALUE_EXPR (t
), fld
);
5327 if (TREE_CODE (t
) != FIELD_DECL
5328 && TREE_CODE (t
) != TYPE_DECL
)
5329 fld_worklist_push (TREE_CHAIN (t
), fld
);
5332 else if (TYPE_P (t
))
5334 /* Note that walk_tree does not traverse every possible field in
5335 types, so we have to do our own traversals here. */
5336 add_tree_to_fld_list (t
, fld
);
5338 if (!RECORD_OR_UNION_TYPE_P (t
))
5339 fld_worklist_push (TYPE_CACHED_VALUES (t
), fld
);
5340 fld_worklist_push (TYPE_SIZE (t
), fld
);
5341 fld_worklist_push (TYPE_SIZE_UNIT (t
), fld
);
5342 fld_worklist_push (TYPE_ATTRIBUTES (t
), fld
);
5343 fld_worklist_push (TYPE_POINTER_TO (t
), fld
);
5344 fld_worklist_push (TYPE_REFERENCE_TO (t
), fld
);
5345 fld_worklist_push (TYPE_NAME (t
), fld
);
5346 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5347 them and thus do not and want not to reach unused pointer types
5349 if (!POINTER_TYPE_P (t
))
5350 fld_worklist_push (TYPE_MINVAL (t
), fld
);
5351 if (!RECORD_OR_UNION_TYPE_P (t
))
5352 fld_worklist_push (TYPE_MAXVAL (t
), fld
);
5353 fld_worklist_push (TYPE_MAIN_VARIANT (t
), fld
);
5354 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5355 do not and want not to reach unused variants this way. */
5356 if (TYPE_CONTEXT (t
))
5358 tree ctx
= TYPE_CONTEXT (t
);
5359 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5360 So push that instead. */
5361 while (ctx
&& TREE_CODE (ctx
) == BLOCK
)
5362 ctx
= BLOCK_SUPERCONTEXT (ctx
);
5363 fld_worklist_push (ctx
, fld
);
5365 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5366 and want not to reach unused types this way. */
5368 if (RECORD_OR_UNION_TYPE_P (t
) && TYPE_BINFO (t
))
5372 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t
)), i
, tem
)
5373 fld_worklist_push (TREE_TYPE (tem
), fld
);
5374 tem
= BINFO_VIRTUALS (TYPE_BINFO (t
));
5376 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5377 && TREE_CODE (tem
) == TREE_LIST
)
5380 fld_worklist_push (TREE_VALUE (tem
), fld
);
5381 tem
= TREE_CHAIN (tem
);
5385 if (RECORD_OR_UNION_TYPE_P (t
))
5388 /* Push all TYPE_FIELDS - there can be interleaving interesting
5389 and non-interesting things. */
5390 tem
= TYPE_FIELDS (t
);
5393 if (TREE_CODE (tem
) == FIELD_DECL
5394 || TREE_CODE (tem
) == TYPE_DECL
)
5395 fld_worklist_push (tem
, fld
);
5396 tem
= TREE_CHAIN (tem
);
5400 fld_worklist_push (TYPE_STUB_DECL (t
), fld
);
5403 else if (TREE_CODE (t
) == BLOCK
)
5406 for (tem
= BLOCK_VARS (t
); tem
; tem
= TREE_CHAIN (tem
))
5407 fld_worklist_push (tem
, fld
);
5408 for (tem
= BLOCK_SUBBLOCKS (t
); tem
; tem
= BLOCK_CHAIN (tem
))
5409 fld_worklist_push (tem
, fld
);
5410 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t
), fld
);
5413 if (TREE_CODE (t
) != IDENTIFIER_NODE
5414 && CODE_CONTAINS_STRUCT (TREE_CODE (t
), TS_TYPED
))
5415 fld_worklist_push (TREE_TYPE (t
), fld
);
5421 /* Find decls and types in T. */
5424 find_decls_types (tree t
, struct free_lang_data_d
*fld
)
5428 if (!pointer_set_contains (fld
->pset
, t
))
5429 walk_tree (&t
, find_decls_types_r
, fld
, fld
->pset
);
5430 if (fld
->worklist
.is_empty ())
5432 t
= fld
->worklist
.pop ();
5436 /* Translate all the types in LIST with the corresponding runtime
5440 get_eh_types_for_runtime (tree list
)
5444 if (list
== NULL_TREE
)
5447 head
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5449 list
= TREE_CHAIN (list
);
5452 tree n
= build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list
)));
5453 TREE_CHAIN (prev
) = n
;
5454 prev
= TREE_CHAIN (prev
);
5455 list
= TREE_CHAIN (list
);
5462 /* Find decls and types referenced in EH region R and store them in
5463 FLD->DECLS and FLD->TYPES. */
5466 find_decls_types_in_eh_region (eh_region r
, struct free_lang_data_d
*fld
)
5477 /* The types referenced in each catch must first be changed to the
5478 EH types used at runtime. This removes references to FE types
5480 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
5482 c
->type_list
= get_eh_types_for_runtime (c
->type_list
);
5483 walk_tree (&c
->type_list
, find_decls_types_r
, fld
, fld
->pset
);
5488 case ERT_ALLOWED_EXCEPTIONS
:
5489 r
->u
.allowed
.type_list
5490 = get_eh_types_for_runtime (r
->u
.allowed
.type_list
);
5491 walk_tree (&r
->u
.allowed
.type_list
, find_decls_types_r
, fld
, fld
->pset
);
5494 case ERT_MUST_NOT_THROW
:
5495 walk_tree (&r
->u
.must_not_throw
.failure_decl
,
5496 find_decls_types_r
, fld
, fld
->pset
);
5502 /* Find decls and types referenced in cgraph node N and store them in
5503 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5504 look for *every* kind of DECL and TYPE node reachable from N,
5505 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5506 NAMESPACE_DECLs, etc). */
5509 find_decls_types_in_node (struct cgraph_node
*n
, struct free_lang_data_d
*fld
)
5512 struct function
*fn
;
5516 find_decls_types (n
->decl
, fld
);
5518 if (!gimple_has_body_p (n
->decl
))
5521 gcc_assert (current_function_decl
== NULL_TREE
&& cfun
== NULL
);
5523 fn
= DECL_STRUCT_FUNCTION (n
->decl
);
5525 /* Traverse locals. */
5526 FOR_EACH_LOCAL_DECL (fn
, ix
, t
)
5527 find_decls_types (t
, fld
);
5529 /* Traverse EH regions in FN. */
5532 FOR_ALL_EH_REGION_FN (r
, fn
)
5533 find_decls_types_in_eh_region (r
, fld
);
5536 /* Traverse every statement in FN. */
5537 FOR_EACH_BB_FN (bb
, fn
)
5539 gimple_stmt_iterator si
;
5542 for (si
= gsi_start_phis (bb
); !gsi_end_p (si
); gsi_next (&si
))
5544 gimple phi
= gsi_stmt (si
);
5546 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
5548 tree
*arg_p
= gimple_phi_arg_def_ptr (phi
, i
);
5549 find_decls_types (*arg_p
, fld
);
5553 for (si
= gsi_start_bb (bb
); !gsi_end_p (si
); gsi_next (&si
))
5555 gimple stmt
= gsi_stmt (si
);
5557 if (is_gimple_call (stmt
))
5558 find_decls_types (gimple_call_fntype (stmt
), fld
);
5560 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
5562 tree arg
= gimple_op (stmt
, i
);
5563 find_decls_types (arg
, fld
);
5570 /* Find decls and types referenced in varpool node N and store them in
5571 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5572 look for *every* kind of DECL and TYPE node reachable from N,
5573 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5574 NAMESPACE_DECLs, etc). */
5577 find_decls_types_in_var (varpool_node
*v
, struct free_lang_data_d
*fld
)
5579 find_decls_types (v
->decl
, fld
);
5582 /* If T needs an assembler name, have one created for it. */
5585 assign_assembler_name_if_neeeded (tree t
)
5587 if (need_assembler_name_p (t
))
5589 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5590 diagnostics that use input_location to show locus
5591 information. The problem here is that, at this point,
5592 input_location is generally anchored to the end of the file
5593 (since the parser is long gone), so we don't have a good
5594 position to pin it to.
5596 To alleviate this problem, this uses the location of T's
5597 declaration. Examples of this are
5598 testsuite/g++.dg/template/cond2.C and
5599 testsuite/g++.dg/template/pr35240.C. */
5600 location_t saved_location
= input_location
;
5601 input_location
= DECL_SOURCE_LOCATION (t
);
5603 decl_assembler_name (t
);
5605 input_location
= saved_location
;
5610 /* Free language specific information for every operand and expression
5611 in every node of the call graph. This process operates in three stages:
5613 1- Every callgraph node and varpool node is traversed looking for
5614 decls and types embedded in them. This is a more exhaustive
5615 search than that done by find_referenced_vars, because it will
5616 also collect individual fields, decls embedded in types, etc.
5618 2- All the decls found are sent to free_lang_data_in_decl.
5620 3- All the types found are sent to free_lang_data_in_type.
5622 The ordering between decls and types is important because
5623 free_lang_data_in_decl sets assembler names, which includes
5624 mangling. So types cannot be freed up until assembler names have
5628 free_lang_data_in_cgraph (void)
5630 struct cgraph_node
*n
;
5632 struct free_lang_data_d fld
;
5637 /* Initialize sets and arrays to store referenced decls and types. */
5638 fld
.pset
= pointer_set_create ();
5639 fld
.worklist
.create (0);
5640 fld
.decls
.create (100);
5641 fld
.types
.create (100);
5643 /* Find decls and types in the body of every function in the callgraph. */
5644 FOR_EACH_FUNCTION (n
)
5645 find_decls_types_in_node (n
, &fld
);
5647 FOR_EACH_VEC_SAFE_ELT (alias_pairs
, i
, p
)
5648 find_decls_types (p
->decl
, &fld
);
5650 /* Find decls and types in every varpool symbol. */
5651 FOR_EACH_VARIABLE (v
)
5652 find_decls_types_in_var (v
, &fld
);
5654 /* Set the assembler name on every decl found. We need to do this
5655 now because free_lang_data_in_decl will invalidate data needed
5656 for mangling. This breaks mangling on interdependent decls. */
5657 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5658 assign_assembler_name_if_neeeded (t
);
5660 /* Traverse every decl found freeing its language data. */
5661 FOR_EACH_VEC_ELT (fld
.decls
, i
, t
)
5662 free_lang_data_in_decl (t
);
5664 /* Traverse every type found freeing its language data. */
5665 FOR_EACH_VEC_ELT (fld
.types
, i
, t
)
5666 free_lang_data_in_type (t
);
5668 pointer_set_destroy (fld
.pset
);
5669 fld
.worklist
.release ();
5670 fld
.decls
.release ();
5671 fld
.types
.release ();
5675 /* Free resources that are used by FE but are not needed once they are done. */
5678 free_lang_data (void)
5682 /* If we are the LTO frontend we have freed lang-specific data already. */
5684 || !flag_generate_lto
)
5687 /* Allocate and assign alias sets to the standard integer types
5688 while the slots are still in the way the frontends generated them. */
5689 for (i
= 0; i
< itk_none
; ++i
)
5690 if (integer_types
[i
])
5691 TYPE_ALIAS_SET (integer_types
[i
]) = get_alias_set (integer_types
[i
]);
5693 /* Traverse the IL resetting language specific information for
5694 operands, expressions, etc. */
5695 free_lang_data_in_cgraph ();
5697 /* Create gimple variants for common types. */
5698 ptrdiff_type_node
= integer_type_node
;
5699 fileptr_type_node
= ptr_type_node
;
5701 /* Reset some langhooks. Do not reset types_compatible_p, it may
5702 still be used indirectly via the get_alias_set langhook. */
5703 lang_hooks
.dwarf_name
= lhd_dwarf_name
;
5704 lang_hooks
.decl_printable_name
= gimple_decl_printable_name
;
5705 /* We do not want the default decl_assembler_name implementation,
5706 rather if we have fixed everything we want a wrapper around it
5707 asserting that all non-local symbols already got their assembler
5708 name and only produce assembler names for local symbols. Or rather
5709 make sure we never call decl_assembler_name on local symbols and
5710 devise a separate, middle-end private scheme for it. */
5712 /* Reset diagnostic machinery. */
5713 tree_diagnostics_defaults (global_dc
);
5721 const pass_data pass_data_ipa_free_lang_data
=
5723 SIMPLE_IPA_PASS
, /* type */
5724 "*free_lang_data", /* name */
5725 OPTGROUP_NONE
, /* optinfo_flags */
5726 TV_IPA_FREE_LANG_DATA
, /* tv_id */
5727 0, /* properties_required */
5728 0, /* properties_provided */
5729 0, /* properties_destroyed */
5730 0, /* todo_flags_start */
5731 0, /* todo_flags_finish */
5734 class pass_ipa_free_lang_data
: public simple_ipa_opt_pass
5737 pass_ipa_free_lang_data (gcc::context
*ctxt
)
5738 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data
, ctxt
)
5741 /* opt_pass methods: */
5742 virtual unsigned int execute (function
*) { return free_lang_data (); }
5744 }; // class pass_ipa_free_lang_data
5748 simple_ipa_opt_pass
*
5749 make_pass_ipa_free_lang_data (gcc::context
*ctxt
)
5751 return new pass_ipa_free_lang_data (ctxt
);
5754 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5755 ATTR_NAME. Also used internally by remove_attribute(). */
5757 private_is_attribute_p (const char *attr_name
, size_t attr_len
, const_tree ident
)
5759 size_t ident_len
= IDENTIFIER_LENGTH (ident
);
5761 if (ident_len
== attr_len
)
5763 if (strcmp (attr_name
, IDENTIFIER_POINTER (ident
)) == 0)
5766 else if (ident_len
== attr_len
+ 4)
5768 /* There is the possibility that ATTR is 'text' and IDENT is
5770 const char *p
= IDENTIFIER_POINTER (ident
);
5771 if (p
[0] == '_' && p
[1] == '_'
5772 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5773 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5780 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5781 of ATTR_NAME, and LIST is not NULL_TREE. */
5783 private_lookup_attribute (const char *attr_name
, size_t attr_len
, tree list
)
5787 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5789 if (ident_len
== attr_len
)
5791 if (!strcmp (attr_name
,
5792 IDENTIFIER_POINTER (get_attribute_name (list
))))
5795 /* TODO: If we made sure that attributes were stored in the
5796 canonical form without '__...__' (ie, as in 'text' as opposed
5797 to '__text__') then we could avoid the following case. */
5798 else if (ident_len
== attr_len
+ 4)
5800 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5801 if (p
[0] == '_' && p
[1] == '_'
5802 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5803 && strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5806 list
= TREE_CHAIN (list
);
5812 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5813 return a pointer to the attribute's list first element if the attribute
5814 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5818 private_lookup_attribute_by_prefix (const char *attr_name
, size_t attr_len
,
5823 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5825 if (attr_len
> ident_len
)
5827 list
= TREE_CHAIN (list
);
5831 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5833 if (strncmp (attr_name
, p
, attr_len
) == 0)
5836 /* TODO: If we made sure that attributes were stored in the
5837 canonical form without '__...__' (ie, as in 'text' as opposed
5838 to '__text__') then we could avoid the following case. */
5839 if (p
[0] == '_' && p
[1] == '_' &&
5840 strncmp (attr_name
, p
+ 2, attr_len
) == 0)
5843 list
= TREE_CHAIN (list
);
5850 /* A variant of lookup_attribute() that can be used with an identifier
5851 as the first argument, and where the identifier can be either
5852 'text' or '__text__'.
5854 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5855 return a pointer to the attribute's list element if the attribute
5856 is part of the list, or NULL_TREE if not found. If the attribute
5857 appears more than once, this only returns the first occurrence; the
5858 TREE_CHAIN of the return value should be passed back in if further
5859 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5860 can be in the form 'text' or '__text__'. */
5862 lookup_ident_attribute (tree attr_identifier
, tree list
)
5864 gcc_checking_assert (TREE_CODE (attr_identifier
) == IDENTIFIER_NODE
);
5868 gcc_checking_assert (TREE_CODE (get_attribute_name (list
))
5869 == IDENTIFIER_NODE
);
5871 /* Identifiers can be compared directly for equality. */
5872 if (attr_identifier
== get_attribute_name (list
))
5875 /* If they are not equal, they may still be one in the form
5876 'text' while the other one is in the form '__text__'. TODO:
5877 If we were storing attributes in normalized 'text' form, then
5878 this could all go away and we could take full advantage of
5879 the fact that we're comparing identifiers. :-) */
5881 size_t attr_len
= IDENTIFIER_LENGTH (attr_identifier
);
5882 size_t ident_len
= IDENTIFIER_LENGTH (get_attribute_name (list
));
5884 if (ident_len
== attr_len
+ 4)
5886 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5887 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5888 if (p
[0] == '_' && p
[1] == '_'
5889 && p
[ident_len
- 2] == '_' && p
[ident_len
- 1] == '_'
5890 && strncmp (q
, p
+ 2, attr_len
) == 0)
5893 else if (ident_len
+ 4 == attr_len
)
5895 const char *p
= IDENTIFIER_POINTER (get_attribute_name (list
));
5896 const char *q
= IDENTIFIER_POINTER (attr_identifier
);
5897 if (q
[0] == '_' && q
[1] == '_'
5898 && q
[attr_len
- 2] == '_' && q
[attr_len
- 1] == '_'
5899 && strncmp (q
+ 2, p
, ident_len
) == 0)
5903 list
= TREE_CHAIN (list
);
5909 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5913 remove_attribute (const char *attr_name
, tree list
)
5916 size_t attr_len
= strlen (attr_name
);
5918 gcc_checking_assert (attr_name
[0] != '_');
5920 for (p
= &list
; *p
; )
5923 /* TODO: If we were storing attributes in normalized form, here
5924 we could use a simple strcmp(). */
5925 if (private_is_attribute_p (attr_name
, attr_len
, get_attribute_name (l
)))
5926 *p
= TREE_CHAIN (l
);
5928 p
= &TREE_CHAIN (l
);
5934 /* Return an attribute list that is the union of a1 and a2. */
5937 merge_attributes (tree a1
, tree a2
)
5941 /* Either one unset? Take the set one. */
5943 if ((attributes
= a1
) == 0)
5946 /* One that completely contains the other? Take it. */
5948 else if (a2
!= 0 && ! attribute_list_contained (a1
, a2
))
5950 if (attribute_list_contained (a2
, a1
))
5954 /* Pick the longest list, and hang on the other list. */
5956 if (list_length (a1
) < list_length (a2
))
5957 attributes
= a2
, a2
= a1
;
5959 for (; a2
!= 0; a2
= TREE_CHAIN (a2
))
5962 for (a
= lookup_ident_attribute (get_attribute_name (a2
),
5964 a
!= NULL_TREE
&& !attribute_value_equal (a
, a2
);
5965 a
= lookup_ident_attribute (get_attribute_name (a2
),
5970 a1
= copy_node (a2
);
5971 TREE_CHAIN (a1
) = attributes
;
5980 /* Given types T1 and T2, merge their attributes and return
5984 merge_type_attributes (tree t1
, tree t2
)
5986 return merge_attributes (TYPE_ATTRIBUTES (t1
),
5987 TYPE_ATTRIBUTES (t2
));
5990 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5994 merge_decl_attributes (tree olddecl
, tree newdecl
)
5996 return merge_attributes (DECL_ATTRIBUTES (olddecl
),
5997 DECL_ATTRIBUTES (newdecl
));
6000 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6002 /* Specialization of merge_decl_attributes for various Windows targets.
6004 This handles the following situation:
6006 __declspec (dllimport) int foo;
6009 The second instance of `foo' nullifies the dllimport. */
6012 merge_dllimport_decl_attributes (tree old
, tree new_tree
)
6015 int delete_dllimport_p
= 1;
6017 /* What we need to do here is remove from `old' dllimport if it doesn't
6018 appear in `new'. dllimport behaves like extern: if a declaration is
6019 marked dllimport and a definition appears later, then the object
6020 is not dllimport'd. We also remove a `new' dllimport if the old list
6021 contains dllexport: dllexport always overrides dllimport, regardless
6022 of the order of declaration. */
6023 if (!VAR_OR_FUNCTION_DECL_P (new_tree
))
6024 delete_dllimport_p
= 0;
6025 else if (DECL_DLLIMPORT_P (new_tree
)
6026 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old
)))
6028 DECL_DLLIMPORT_P (new_tree
) = 0;
6029 warning (OPT_Wattributes
, "%q+D already declared with dllexport attribute: "
6030 "dllimport ignored", new_tree
);
6032 else if (DECL_DLLIMPORT_P (old
) && !DECL_DLLIMPORT_P (new_tree
))
6034 /* Warn about overriding a symbol that has already been used, e.g.:
6035 extern int __attribute__ ((dllimport)) foo;
6036 int* bar () {return &foo;}
6039 if (TREE_USED (old
))
6041 warning (0, "%q+D redeclared without dllimport attribute "
6042 "after being referenced with dll linkage", new_tree
);
6043 /* If we have used a variable's address with dllimport linkage,
6044 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6045 decl may already have had TREE_CONSTANT computed.
6046 We still remove the attribute so that assembler code refers
6047 to '&foo rather than '_imp__foo'. */
6048 if (TREE_CODE (old
) == VAR_DECL
&& TREE_ADDRESSABLE (old
))
6049 DECL_DLLIMPORT_P (new_tree
) = 1;
6052 /* Let an inline definition silently override the external reference,
6053 but otherwise warn about attribute inconsistency. */
6054 else if (TREE_CODE (new_tree
) == VAR_DECL
6055 || !DECL_DECLARED_INLINE_P (new_tree
))
6056 warning (OPT_Wattributes
, "%q+D redeclared without dllimport attribute: "
6057 "previous dllimport ignored", new_tree
);
6060 delete_dllimport_p
= 0;
6062 a
= merge_attributes (DECL_ATTRIBUTES (old
), DECL_ATTRIBUTES (new_tree
));
6064 if (delete_dllimport_p
)
6065 a
= remove_attribute ("dllimport", a
);
6070 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6071 struct attribute_spec.handler. */
6074 handle_dll_attribute (tree
* pnode
, tree name
, tree args
, int flags
,
6080 /* These attributes may apply to structure and union types being created,
6081 but otherwise should pass to the declaration involved. */
6084 if (flags
& ((int) ATTR_FLAG_DECL_NEXT
| (int) ATTR_FLAG_FUNCTION_NEXT
6085 | (int) ATTR_FLAG_ARRAY_NEXT
))
6087 *no_add_attrs
= true;
6088 return tree_cons (name
, args
, NULL_TREE
);
6090 if (TREE_CODE (node
) == RECORD_TYPE
6091 || TREE_CODE (node
) == UNION_TYPE
)
6093 node
= TYPE_NAME (node
);
6099 warning (OPT_Wattributes
, "%qE attribute ignored",
6101 *no_add_attrs
= true;
6106 if (TREE_CODE (node
) != FUNCTION_DECL
6107 && TREE_CODE (node
) != VAR_DECL
6108 && TREE_CODE (node
) != TYPE_DECL
)
6110 *no_add_attrs
= true;
6111 warning (OPT_Wattributes
, "%qE attribute ignored",
6116 if (TREE_CODE (node
) == TYPE_DECL
6117 && TREE_CODE (TREE_TYPE (node
)) != RECORD_TYPE
6118 && TREE_CODE (TREE_TYPE (node
)) != UNION_TYPE
)
6120 *no_add_attrs
= true;
6121 warning (OPT_Wattributes
, "%qE attribute ignored",
6126 is_dllimport
= is_attribute_p ("dllimport", name
);
6128 /* Report error on dllimport ambiguities seen now before they cause
6132 /* Honor any target-specific overrides. */
6133 if (!targetm
.valid_dllimport_attribute_p (node
))
6134 *no_add_attrs
= true;
6136 else if (TREE_CODE (node
) == FUNCTION_DECL
6137 && DECL_DECLARED_INLINE_P (node
))
6139 warning (OPT_Wattributes
, "inline function %q+D declared as "
6140 " dllimport: attribute ignored", node
);
6141 *no_add_attrs
= true;
6143 /* Like MS, treat definition of dllimported variables and
6144 non-inlined functions on declaration as syntax errors. */
6145 else if (TREE_CODE (node
) == FUNCTION_DECL
&& DECL_INITIAL (node
))
6147 error ("function %q+D definition is marked dllimport", node
);
6148 *no_add_attrs
= true;
6151 else if (TREE_CODE (node
) == VAR_DECL
)
6153 if (DECL_INITIAL (node
))
6155 error ("variable %q+D definition is marked dllimport",
6157 *no_add_attrs
= true;
6160 /* `extern' needn't be specified with dllimport.
6161 Specify `extern' now and hope for the best. Sigh. */
6162 DECL_EXTERNAL (node
) = 1;
6163 /* Also, implicitly give dllimport'd variables declared within
6164 a function global scope, unless declared static. */
6165 if (current_function_decl
!= NULL_TREE
&& !TREE_STATIC (node
))
6166 TREE_PUBLIC (node
) = 1;
6169 if (*no_add_attrs
== false)
6170 DECL_DLLIMPORT_P (node
) = 1;
6172 else if (TREE_CODE (node
) == FUNCTION_DECL
6173 && DECL_DECLARED_INLINE_P (node
)
6174 && flag_keep_inline_dllexport
)
6175 /* An exported function, even if inline, must be emitted. */
6176 DECL_EXTERNAL (node
) = 0;
6178 /* Report error if symbol is not accessible at global scope. */
6179 if (!TREE_PUBLIC (node
)
6180 && (TREE_CODE (node
) == VAR_DECL
6181 || TREE_CODE (node
) == FUNCTION_DECL
))
6183 error ("external linkage required for symbol %q+D because of "
6184 "%qE attribute", node
, name
);
6185 *no_add_attrs
= true;
6188 /* A dllexport'd entity must have default visibility so that other
6189 program units (shared libraries or the main executable) can see
6190 it. A dllimport'd entity must have default visibility so that
6191 the linker knows that undefined references within this program
6192 unit can be resolved by the dynamic linker. */
6195 if (DECL_VISIBILITY_SPECIFIED (node
)
6196 && DECL_VISIBILITY (node
) != VISIBILITY_DEFAULT
)
6197 error ("%qE implies default visibility, but %qD has already "
6198 "been declared with a different visibility",
6200 DECL_VISIBILITY (node
) = VISIBILITY_DEFAULT
;
6201 DECL_VISIBILITY_SPECIFIED (node
) = 1;
6207 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6209 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6210 of the various TYPE_QUAL values. */
6213 set_type_quals (tree type
, int type_quals
)
6215 TYPE_READONLY (type
) = (type_quals
& TYPE_QUAL_CONST
) != 0;
6216 TYPE_VOLATILE (type
) = (type_quals
& TYPE_QUAL_VOLATILE
) != 0;
6217 TYPE_RESTRICT (type
) = (type_quals
& TYPE_QUAL_RESTRICT
) != 0;
6218 TYPE_ATOMIC (type
) = (type_quals
& TYPE_QUAL_ATOMIC
) != 0;
6219 TYPE_ADDR_SPACE (type
) = DECODE_QUAL_ADDR_SPACE (type_quals
);
6222 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6225 check_qualified_type (const_tree cand
, const_tree base
, int type_quals
)
6227 return (TYPE_QUALS (cand
) == type_quals
6228 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6229 /* Apparently this is needed for Objective-C. */
6230 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6231 /* Check alignment. */
6232 && TYPE_ALIGN (cand
) == TYPE_ALIGN (base
)
6233 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6234 TYPE_ATTRIBUTES (base
)));
6237 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6240 check_aligned_type (const_tree cand
, const_tree base
, unsigned int align
)
6242 return (TYPE_QUALS (cand
) == TYPE_QUALS (base
)
6243 && TYPE_NAME (cand
) == TYPE_NAME (base
)
6244 /* Apparently this is needed for Objective-C. */
6245 && TYPE_CONTEXT (cand
) == TYPE_CONTEXT (base
)
6246 /* Check alignment. */
6247 && TYPE_ALIGN (cand
) == align
6248 && attribute_list_equal (TYPE_ATTRIBUTES (cand
),
6249 TYPE_ATTRIBUTES (base
)));
6252 /* This function checks to see if TYPE matches the size one of the built-in
6253 atomic types, and returns that core atomic type. */
6256 find_atomic_core_type (tree type
)
6258 tree base_atomic_type
;
6260 /* Only handle complete types. */
6261 if (TYPE_SIZE (type
) == NULL_TREE
)
6264 HOST_WIDE_INT type_size
= tree_to_uhwi (TYPE_SIZE (type
));
6268 base_atomic_type
= atomicQI_type_node
;
6272 base_atomic_type
= atomicHI_type_node
;
6276 base_atomic_type
= atomicSI_type_node
;
6280 base_atomic_type
= atomicDI_type_node
;
6284 base_atomic_type
= atomicTI_type_node
;
6288 base_atomic_type
= NULL_TREE
;
6291 return base_atomic_type
;
6294 /* Return a version of the TYPE, qualified as indicated by the
6295 TYPE_QUALS, if one exists. If no qualified version exists yet,
6296 return NULL_TREE. */
6299 get_qualified_type (tree type
, int type_quals
)
6303 if (TYPE_QUALS (type
) == type_quals
)
6306 /* Search the chain of variants to see if there is already one there just
6307 like the one we need to have. If so, use that existing one. We must
6308 preserve the TYPE_NAME, since there is code that depends on this. */
6309 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6310 if (check_qualified_type (t
, type
, type_quals
))
6316 /* Like get_qualified_type, but creates the type if it does not
6317 exist. This function never returns NULL_TREE. */
6320 build_qualified_type (tree type
, int type_quals
)
6324 /* See if we already have the appropriate qualified variant. */
6325 t
= get_qualified_type (type
, type_quals
);
6327 /* If not, build it. */
6330 t
= build_variant_type_copy (type
);
6331 set_type_quals (t
, type_quals
);
6333 if (((type_quals
& TYPE_QUAL_ATOMIC
) == TYPE_QUAL_ATOMIC
))
6335 /* See if this object can map to a basic atomic type. */
6336 tree atomic_type
= find_atomic_core_type (type
);
6339 /* Ensure the alignment of this type is compatible with
6340 the required alignment of the atomic type. */
6341 if (TYPE_ALIGN (atomic_type
) > TYPE_ALIGN (t
))
6342 TYPE_ALIGN (t
) = TYPE_ALIGN (atomic_type
);
6346 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6347 /* Propagate structural equality. */
6348 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6349 else if (TYPE_CANONICAL (type
) != type
)
6350 /* Build the underlying canonical type, since it is different
6353 tree c
= build_qualified_type (TYPE_CANONICAL (type
), type_quals
);
6354 TYPE_CANONICAL (t
) = TYPE_CANONICAL (c
);
6357 /* T is its own canonical type. */
6358 TYPE_CANONICAL (t
) = t
;
6365 /* Create a variant of type T with alignment ALIGN. */
6368 build_aligned_type (tree type
, unsigned int align
)
6372 if (TYPE_PACKED (type
)
6373 || TYPE_ALIGN (type
) == align
)
6376 for (t
= TYPE_MAIN_VARIANT (type
); t
; t
= TYPE_NEXT_VARIANT (t
))
6377 if (check_aligned_type (t
, type
, align
))
6380 t
= build_variant_type_copy (type
);
6381 TYPE_ALIGN (t
) = align
;
6386 /* Create a new distinct copy of TYPE. The new type is made its own
6387 MAIN_VARIANT. If TYPE requires structural equality checks, the
6388 resulting type requires structural equality checks; otherwise, its
6389 TYPE_CANONICAL points to itself. */
6392 build_distinct_type_copy (tree type
)
6394 tree t
= copy_node (type
);
6396 TYPE_POINTER_TO (t
) = 0;
6397 TYPE_REFERENCE_TO (t
) = 0;
6399 /* Set the canonical type either to a new equivalence class, or
6400 propagate the need for structural equality checks. */
6401 if (TYPE_STRUCTURAL_EQUALITY_P (type
))
6402 SET_TYPE_STRUCTURAL_EQUALITY (t
);
6404 TYPE_CANONICAL (t
) = t
;
6406 /* Make it its own variant. */
6407 TYPE_MAIN_VARIANT (t
) = t
;
6408 TYPE_NEXT_VARIANT (t
) = 0;
6410 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6411 whose TREE_TYPE is not t. This can also happen in the Ada
6412 frontend when using subtypes. */
6417 /* Create a new variant of TYPE, equivalent but distinct. This is so
6418 the caller can modify it. TYPE_CANONICAL for the return type will
6419 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6420 are considered equal by the language itself (or that both types
6421 require structural equality checks). */
6424 build_variant_type_copy (tree type
)
6426 tree t
, m
= TYPE_MAIN_VARIANT (type
);
6428 t
= build_distinct_type_copy (type
);
6430 /* Since we're building a variant, assume that it is a non-semantic
6431 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6432 TYPE_CANONICAL (t
) = TYPE_CANONICAL (type
);
6434 /* Add the new type to the chain of variants of TYPE. */
6435 TYPE_NEXT_VARIANT (t
) = TYPE_NEXT_VARIANT (m
);
6436 TYPE_NEXT_VARIANT (m
) = t
;
6437 TYPE_MAIN_VARIANT (t
) = m
;
6442 /* Return true if the from tree in both tree maps are equal. */
6445 tree_map_base_eq (const void *va
, const void *vb
)
6447 const struct tree_map_base
*const a
= (const struct tree_map_base
*) va
,
6448 *const b
= (const struct tree_map_base
*) vb
;
6449 return (a
->from
== b
->from
);
6452 /* Hash a from tree in a tree_base_map. */
6455 tree_map_base_hash (const void *item
)
6457 return htab_hash_pointer (((const struct tree_map_base
*)item
)->from
);
6460 /* Return true if this tree map structure is marked for garbage collection
6461 purposes. We simply return true if the from tree is marked, so that this
6462 structure goes away when the from tree goes away. */
6465 tree_map_base_marked_p (const void *p
)
6467 return ggc_marked_p (((const struct tree_map_base
*) p
)->from
);
6470 /* Hash a from tree in a tree_map. */
6473 tree_map_hash (const void *item
)
6475 return (((const struct tree_map
*) item
)->hash
);
6478 /* Hash a from tree in a tree_decl_map. */
6481 tree_decl_map_hash (const void *item
)
6483 return DECL_UID (((const struct tree_decl_map
*) item
)->base
.from
);
6486 /* Return the initialization priority for DECL. */
6489 decl_init_priority_lookup (tree decl
)
6491 symtab_node
*snode
= symtab_get_node (decl
);
6494 return DEFAULT_INIT_PRIORITY
;
6496 snode
->get_init_priority ();
6499 /* Return the finalization priority for DECL. */
6502 decl_fini_priority_lookup (tree decl
)
6504 cgraph_node
*node
= cgraph_get_node (decl
);
6507 return DEFAULT_INIT_PRIORITY
;
6509 node
->get_fini_priority ();
6512 /* Set the initialization priority for DECL to PRIORITY. */
6515 decl_init_priority_insert (tree decl
, priority_type priority
)
6517 struct symtab_node
*snode
;
6519 if (priority
== DEFAULT_INIT_PRIORITY
)
6521 snode
= symtab_get_node (decl
);
6525 else if (TREE_CODE (decl
) == VAR_DECL
)
6526 snode
= varpool_node_for_decl (decl
);
6528 snode
= cgraph_get_create_node (decl
);
6529 snode
->set_init_priority (priority
);
6532 /* Set the finalization priority for DECL to PRIORITY. */
6535 decl_fini_priority_insert (tree decl
, priority_type priority
)
6537 struct cgraph_node
*node
;
6539 if (priority
== DEFAULT_INIT_PRIORITY
)
6541 node
= cgraph_get_node (decl
);
6546 node
= cgraph_get_create_node (decl
);
6547 node
->set_fini_priority (priority
);
6550 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6553 print_debug_expr_statistics (void)
6555 fprintf (stderr
, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6556 (long) htab_size (debug_expr_for_decl
),
6557 (long) htab_elements (debug_expr_for_decl
),
6558 htab_collisions (debug_expr_for_decl
));
6561 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6564 print_value_expr_statistics (void)
6566 fprintf (stderr
, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6567 (long) htab_size (value_expr_for_decl
),
6568 (long) htab_elements (value_expr_for_decl
),
6569 htab_collisions (value_expr_for_decl
));
6572 /* Lookup a debug expression for FROM, and return it if we find one. */
6575 decl_debug_expr_lookup (tree from
)
6577 struct tree_decl_map
*h
, in
;
6578 in
.base
.from
= from
;
6580 h
= (struct tree_decl_map
*)
6581 htab_find_with_hash (debug_expr_for_decl
, &in
, DECL_UID (from
));
6587 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6590 decl_debug_expr_insert (tree from
, tree to
)
6592 struct tree_decl_map
*h
;
6595 h
= ggc_alloc
<tree_decl_map
> ();
6596 h
->base
.from
= from
;
6598 loc
= htab_find_slot_with_hash (debug_expr_for_decl
, h
, DECL_UID (from
),
6600 *(struct tree_decl_map
**) loc
= h
;
6603 /* Lookup a value expression for FROM, and return it if we find one. */
6606 decl_value_expr_lookup (tree from
)
6608 struct tree_decl_map
*h
, in
;
6609 in
.base
.from
= from
;
6611 h
= (struct tree_decl_map
*)
6612 htab_find_with_hash (value_expr_for_decl
, &in
, DECL_UID (from
));
6618 /* Insert a mapping FROM->TO in the value expression hashtable. */
6621 decl_value_expr_insert (tree from
, tree to
)
6623 struct tree_decl_map
*h
;
6626 h
= ggc_alloc
<tree_decl_map
> ();
6627 h
->base
.from
= from
;
6629 loc
= htab_find_slot_with_hash (value_expr_for_decl
, h
, DECL_UID (from
),
6631 *(struct tree_decl_map
**) loc
= h
;
6634 /* Lookup a vector of debug arguments for FROM, and return it if we
6638 decl_debug_args_lookup (tree from
)
6640 struct tree_vec_map
*h
, in
;
6642 if (!DECL_HAS_DEBUG_ARGS_P (from
))
6644 gcc_checking_assert (debug_args_for_decl
!= NULL
);
6645 in
.base
.from
= from
;
6646 h
= (struct tree_vec_map
*)
6647 htab_find_with_hash (debug_args_for_decl
, &in
, DECL_UID (from
));
6653 /* Insert a mapping FROM->empty vector of debug arguments in the value
6654 expression hashtable. */
6657 decl_debug_args_insert (tree from
)
6659 struct tree_vec_map
*h
;
6662 if (DECL_HAS_DEBUG_ARGS_P (from
))
6663 return decl_debug_args_lookup (from
);
6664 if (debug_args_for_decl
== NULL
)
6665 debug_args_for_decl
= htab_create_ggc (64, tree_vec_map_hash
,
6666 tree_vec_map_eq
, 0);
6667 h
= ggc_alloc
<tree_vec_map
> ();
6668 h
->base
.from
= from
;
6670 loc
= htab_find_slot_with_hash (debug_args_for_decl
, h
, DECL_UID (from
),
6672 *(struct tree_vec_map
**) loc
= h
;
6673 DECL_HAS_DEBUG_ARGS_P (from
) = 1;
6677 /* Hashing of types so that we don't make duplicates.
6678 The entry point is `type_hash_canon'. */
6680 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6681 with types in the TREE_VALUE slots), by adding the hash codes
6682 of the individual types. */
6685 type_hash_list (const_tree list
, hashval_t hashcode
)
6689 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6690 if (TREE_VALUE (tail
) != error_mark_node
)
6691 hashcode
= iterative_hash_object (TYPE_HASH (TREE_VALUE (tail
)),
6697 /* These are the Hashtable callback functions. */
6699 /* Returns true iff the types are equivalent. */
6702 type_hash_eq (const void *va
, const void *vb
)
6704 const struct type_hash
*const a
= (const struct type_hash
*) va
,
6705 *const b
= (const struct type_hash
*) vb
;
6707 /* First test the things that are the same for all types. */
6708 if (a
->hash
!= b
->hash
6709 || TREE_CODE (a
->type
) != TREE_CODE (b
->type
)
6710 || TREE_TYPE (a
->type
) != TREE_TYPE (b
->type
)
6711 || !attribute_list_equal (TYPE_ATTRIBUTES (a
->type
),
6712 TYPE_ATTRIBUTES (b
->type
))
6713 || (TREE_CODE (a
->type
) != COMPLEX_TYPE
6714 && TYPE_NAME (a
->type
) != TYPE_NAME (b
->type
)))
6717 /* Be careful about comparing arrays before and after the element type
6718 has been completed; don't compare TYPE_ALIGN unless both types are
6720 if (COMPLETE_TYPE_P (a
->type
) && COMPLETE_TYPE_P (b
->type
)
6721 && (TYPE_ALIGN (a
->type
) != TYPE_ALIGN (b
->type
)
6722 || TYPE_MODE (a
->type
) != TYPE_MODE (b
->type
)))
6725 switch (TREE_CODE (a
->type
))
6730 case REFERENCE_TYPE
:
6735 return TYPE_VECTOR_SUBPARTS (a
->type
) == TYPE_VECTOR_SUBPARTS (b
->type
);
6738 if (TYPE_VALUES (a
->type
) != TYPE_VALUES (b
->type
)
6739 && !(TYPE_VALUES (a
->type
)
6740 && TREE_CODE (TYPE_VALUES (a
->type
)) == TREE_LIST
6741 && TYPE_VALUES (b
->type
)
6742 && TREE_CODE (TYPE_VALUES (b
->type
)) == TREE_LIST
6743 && type_list_equal (TYPE_VALUES (a
->type
),
6744 TYPE_VALUES (b
->type
))))
6747 /* ... fall through ... */
6752 if (TYPE_PRECISION (a
->type
) != TYPE_PRECISION (b
->type
))
6754 return ((TYPE_MAX_VALUE (a
->type
) == TYPE_MAX_VALUE (b
->type
)
6755 || tree_int_cst_equal (TYPE_MAX_VALUE (a
->type
),
6756 TYPE_MAX_VALUE (b
->type
)))
6757 && (TYPE_MIN_VALUE (a
->type
) == TYPE_MIN_VALUE (b
->type
)
6758 || tree_int_cst_equal (TYPE_MIN_VALUE (a
->type
),
6759 TYPE_MIN_VALUE (b
->type
))));
6761 case FIXED_POINT_TYPE
:
6762 return TYPE_SATURATING (a
->type
) == TYPE_SATURATING (b
->type
);
6765 return TYPE_OFFSET_BASETYPE (a
->type
) == TYPE_OFFSET_BASETYPE (b
->type
);
6768 if (TYPE_METHOD_BASETYPE (a
->type
) == TYPE_METHOD_BASETYPE (b
->type
)
6769 && (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6770 || (TYPE_ARG_TYPES (a
->type
)
6771 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6772 && TYPE_ARG_TYPES (b
->type
)
6773 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6774 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6775 TYPE_ARG_TYPES (b
->type
)))))
6779 return TYPE_DOMAIN (a
->type
) == TYPE_DOMAIN (b
->type
);
6783 case QUAL_UNION_TYPE
:
6784 return (TYPE_FIELDS (a
->type
) == TYPE_FIELDS (b
->type
)
6785 || (TYPE_FIELDS (a
->type
)
6786 && TREE_CODE (TYPE_FIELDS (a
->type
)) == TREE_LIST
6787 && TYPE_FIELDS (b
->type
)
6788 && TREE_CODE (TYPE_FIELDS (b
->type
)) == TREE_LIST
6789 && type_list_equal (TYPE_FIELDS (a
->type
),
6790 TYPE_FIELDS (b
->type
))));
6793 if (TYPE_ARG_TYPES (a
->type
) == TYPE_ARG_TYPES (b
->type
)
6794 || (TYPE_ARG_TYPES (a
->type
)
6795 && TREE_CODE (TYPE_ARG_TYPES (a
->type
)) == TREE_LIST
6796 && TYPE_ARG_TYPES (b
->type
)
6797 && TREE_CODE (TYPE_ARG_TYPES (b
->type
)) == TREE_LIST
6798 && type_list_equal (TYPE_ARG_TYPES (a
->type
),
6799 TYPE_ARG_TYPES (b
->type
))))
6807 if (lang_hooks
.types
.type_hash_eq
!= NULL
)
6808 return lang_hooks
.types
.type_hash_eq (a
->type
, b
->type
);
6813 /* Return the cached hash value. */
6816 type_hash_hash (const void *item
)
6818 return ((const struct type_hash
*) item
)->hash
;
6821 /* Look in the type hash table for a type isomorphic to TYPE.
6822 If one is found, return it. Otherwise return 0. */
6825 type_hash_lookup (hashval_t hashcode
, tree type
)
6827 struct type_hash
*h
, in
;
6829 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6830 must call that routine before comparing TYPE_ALIGNs. */
6836 h
= (struct type_hash
*) htab_find_with_hash (type_hash_table
, &in
,
6843 /* Add an entry to the type-hash-table
6844 for a type TYPE whose hash code is HASHCODE. */
6847 type_hash_add (hashval_t hashcode
, tree type
)
6849 struct type_hash
*h
;
6852 h
= ggc_alloc
<type_hash
> ();
6855 loc
= htab_find_slot_with_hash (type_hash_table
, h
, hashcode
, INSERT
);
6859 /* Given TYPE, and HASHCODE its hash code, return the canonical
6860 object for an identical type if one already exists.
6861 Otherwise, return TYPE, and record it as the canonical object.
6863 To use this function, first create a type of the sort you want.
6864 Then compute its hash code from the fields of the type that
6865 make it different from other similar types.
6866 Then call this function and use the value. */
6869 type_hash_canon (unsigned int hashcode
, tree type
)
6873 /* The hash table only contains main variants, so ensure that's what we're
6875 gcc_assert (TYPE_MAIN_VARIANT (type
) == type
);
6877 /* See if the type is in the hash table already. If so, return it.
6878 Otherwise, add the type. */
6879 t1
= type_hash_lookup (hashcode
, type
);
6882 if (GATHER_STATISTICS
)
6884 tree_code_counts
[(int) TREE_CODE (type
)]--;
6885 tree_node_counts
[(int) t_kind
]--;
6886 tree_node_sizes
[(int) t_kind
] -= sizeof (struct tree_type_non_common
);
6892 type_hash_add (hashcode
, type
);
6897 /* See if the data pointed to by the type hash table is marked. We consider
6898 it marked if the type is marked or if a debug type number or symbol
6899 table entry has been made for the type. */
6902 type_hash_marked_p (const void *p
)
6904 const_tree
const type
= ((const struct type_hash
*) p
)->type
;
6906 return ggc_marked_p (type
);
6910 print_type_hash_statistics (void)
6912 fprintf (stderr
, "Type hash: size %ld, %ld elements, %f collisions\n",
6913 (long) htab_size (type_hash_table
),
6914 (long) htab_elements (type_hash_table
),
6915 htab_collisions (type_hash_table
));
6918 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6919 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6920 by adding the hash codes of the individual attributes. */
6923 attribute_hash_list (const_tree list
, hashval_t hashcode
)
6927 for (tail
= list
; tail
; tail
= TREE_CHAIN (tail
))
6928 /* ??? Do we want to add in TREE_VALUE too? */
6929 hashcode
= iterative_hash_object
6930 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail
)), hashcode
);
6934 /* Given two lists of attributes, return true if list l2 is
6935 equivalent to l1. */
6938 attribute_list_equal (const_tree l1
, const_tree l2
)
6943 return attribute_list_contained (l1
, l2
)
6944 && attribute_list_contained (l2
, l1
);
6947 /* Given two lists of attributes, return true if list L2 is
6948 completely contained within L1. */
6949 /* ??? This would be faster if attribute names were stored in a canonicalized
6950 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6951 must be used to show these elements are equivalent (which they are). */
6952 /* ??? It's not clear that attributes with arguments will always be handled
6956 attribute_list_contained (const_tree l1
, const_tree l2
)
6960 /* First check the obvious, maybe the lists are identical. */
6964 /* Maybe the lists are similar. */
6965 for (t1
= l1
, t2
= l2
;
6967 && get_attribute_name (t1
) == get_attribute_name (t2
)
6968 && TREE_VALUE (t1
) == TREE_VALUE (t2
);
6969 t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
6972 /* Maybe the lists are equal. */
6973 if (t1
== 0 && t2
== 0)
6976 for (; t2
!= 0; t2
= TREE_CHAIN (t2
))
6979 /* This CONST_CAST is okay because lookup_attribute does not
6980 modify its argument and the return value is assigned to a
6982 for (attr
= lookup_ident_attribute (get_attribute_name (t2
),
6983 CONST_CAST_TREE (l1
));
6984 attr
!= NULL_TREE
&& !attribute_value_equal (t2
, attr
);
6985 attr
= lookup_ident_attribute (get_attribute_name (t2
),
6989 if (attr
== NULL_TREE
)
6996 /* Given two lists of types
6997 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6998 return 1 if the lists contain the same types in the same order.
6999 Also, the TREE_PURPOSEs must match. */
7002 type_list_equal (const_tree l1
, const_tree l2
)
7006 for (t1
= l1
, t2
= l2
; t1
&& t2
; t1
= TREE_CHAIN (t1
), t2
= TREE_CHAIN (t2
))
7007 if (TREE_VALUE (t1
) != TREE_VALUE (t2
)
7008 || (TREE_PURPOSE (t1
) != TREE_PURPOSE (t2
)
7009 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1
), TREE_PURPOSE (t2
))
7010 && (TREE_TYPE (TREE_PURPOSE (t1
))
7011 == TREE_TYPE (TREE_PURPOSE (t2
))))))
7017 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7018 given by TYPE. If the argument list accepts variable arguments,
7019 then this function counts only the ordinary arguments. */
7022 type_num_arguments (const_tree type
)
7027 for (t
= TYPE_ARG_TYPES (type
); t
; t
= TREE_CHAIN (t
))
7028 /* If the function does not take a variable number of arguments,
7029 the last element in the list will have type `void'. */
7030 if (VOID_TYPE_P (TREE_VALUE (t
)))
7038 /* Nonzero if integer constants T1 and T2
7039 represent the same constant value. */
7042 tree_int_cst_equal (const_tree t1
, const_tree t2
)
7047 if (t1
== 0 || t2
== 0)
7050 if (TREE_CODE (t1
) == INTEGER_CST
7051 && TREE_CODE (t2
) == INTEGER_CST
7052 && wi::to_widest (t1
) == wi::to_widest (t2
))
7058 /* Return true if T is an INTEGER_CST whose numerical value (extended
7059 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7062 tree_fits_shwi_p (const_tree t
)
7064 return (t
!= NULL_TREE
7065 && TREE_CODE (t
) == INTEGER_CST
7066 && wi::fits_shwi_p (wi::to_widest (t
)));
7069 /* Return true if T is an INTEGER_CST whose numerical value (extended
7070 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7073 tree_fits_uhwi_p (const_tree t
)
7075 return (t
!= NULL_TREE
7076 && TREE_CODE (t
) == INTEGER_CST
7077 && wi::fits_uhwi_p (wi::to_widest (t
)));
7080 /* T is an INTEGER_CST whose numerical value (extended according to
7081 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7085 tree_to_shwi (const_tree t
)
7087 gcc_assert (tree_fits_shwi_p (t
));
7088 return TREE_INT_CST_LOW (t
);
7091 /* T is an INTEGER_CST whose numerical value (extended according to
7092 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7095 unsigned HOST_WIDE_INT
7096 tree_to_uhwi (const_tree t
)
7098 gcc_assert (tree_fits_uhwi_p (t
));
7099 return TREE_INT_CST_LOW (t
);
7102 /* Return the most significant (sign) bit of T. */
7105 tree_int_cst_sign_bit (const_tree t
)
7107 unsigned bitno
= TYPE_PRECISION (TREE_TYPE (t
)) - 1;
7109 return wi::extract_uhwi (t
, bitno
, 1);
7112 /* Return an indication of the sign of the integer constant T.
7113 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7114 Note that -1 will never be returned if T's type is unsigned. */
7117 tree_int_cst_sgn (const_tree t
)
7119 if (wi::eq_p (t
, 0))
7121 else if (TYPE_UNSIGNED (TREE_TYPE (t
)))
7123 else if (wi::neg_p (t
))
7129 /* Return the minimum number of bits needed to represent VALUE in a
7130 signed or unsigned type, UNSIGNEDP says which. */
7133 tree_int_cst_min_precision (tree value
, signop sgn
)
7135 /* If the value is negative, compute its negative minus 1. The latter
7136 adjustment is because the absolute value of the largest negative value
7137 is one larger than the largest positive value. This is equivalent to
7138 a bit-wise negation, so use that operation instead. */
7140 if (tree_int_cst_sgn (value
) < 0)
7141 value
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (value
), value
);
7143 /* Return the number of bits needed, taking into account the fact
7144 that we need one more bit for a signed than unsigned type.
7145 If value is 0 or -1, the minimum precision is 1 no matter
7146 whether unsignedp is true or false. */
7148 if (integer_zerop (value
))
7151 return tree_floor_log2 (value
) + 1 + (sgn
== SIGNED
? 1 : 0) ;
7154 /* Return truthvalue of whether T1 is the same tree structure as T2.
7155 Return 1 if they are the same.
7156 Return 0 if they are understandably different.
7157 Return -1 if either contains tree structure not understood by
7161 simple_cst_equal (const_tree t1
, const_tree t2
)
7163 enum tree_code code1
, code2
;
7169 if (t1
== 0 || t2
== 0)
7172 code1
= TREE_CODE (t1
);
7173 code2
= TREE_CODE (t2
);
7175 if (CONVERT_EXPR_CODE_P (code1
) || code1
== NON_LVALUE_EXPR
)
7177 if (CONVERT_EXPR_CODE_P (code2
)
7178 || code2
== NON_LVALUE_EXPR
)
7179 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7181 return simple_cst_equal (TREE_OPERAND (t1
, 0), t2
);
7184 else if (CONVERT_EXPR_CODE_P (code2
)
7185 || code2
== NON_LVALUE_EXPR
)
7186 return simple_cst_equal (t1
, TREE_OPERAND (t2
, 0));
7194 return wi::to_widest (t1
) == wi::to_widest (t2
);
7197 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1
), TREE_REAL_CST (t2
));
7200 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1
), TREE_FIXED_CST (t2
));
7203 return (TREE_STRING_LENGTH (t1
) == TREE_STRING_LENGTH (t2
)
7204 && ! memcmp (TREE_STRING_POINTER (t1
), TREE_STRING_POINTER (t2
),
7205 TREE_STRING_LENGTH (t1
)));
7209 unsigned HOST_WIDE_INT idx
;
7210 vec
<constructor_elt
, va_gc
> *v1
= CONSTRUCTOR_ELTS (t1
);
7211 vec
<constructor_elt
, va_gc
> *v2
= CONSTRUCTOR_ELTS (t2
);
7213 if (vec_safe_length (v1
) != vec_safe_length (v2
))
7216 for (idx
= 0; idx
< vec_safe_length (v1
); ++idx
)
7217 /* ??? Should we handle also fields here? */
7218 if (!simple_cst_equal ((*v1
)[idx
].value
, (*v2
)[idx
].value
))
7224 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7227 cmp
= simple_cst_equal (CALL_EXPR_FN (t1
), CALL_EXPR_FN (t2
));
7230 if (call_expr_nargs (t1
) != call_expr_nargs (t2
))
7233 const_tree arg1
, arg2
;
7234 const_call_expr_arg_iterator iter1
, iter2
;
7235 for (arg1
= first_const_call_expr_arg (t1
, &iter1
),
7236 arg2
= first_const_call_expr_arg (t2
, &iter2
);
7238 arg1
= next_const_call_expr_arg (&iter1
),
7239 arg2
= next_const_call_expr_arg (&iter2
))
7241 cmp
= simple_cst_equal (arg1
, arg2
);
7245 return arg1
== arg2
;
7249 /* Special case: if either target is an unallocated VAR_DECL,
7250 it means that it's going to be unified with whatever the
7251 TARGET_EXPR is really supposed to initialize, so treat it
7252 as being equivalent to anything. */
7253 if ((TREE_CODE (TREE_OPERAND (t1
, 0)) == VAR_DECL
7254 && DECL_NAME (TREE_OPERAND (t1
, 0)) == NULL_TREE
7255 && !DECL_RTL_SET_P (TREE_OPERAND (t1
, 0)))
7256 || (TREE_CODE (TREE_OPERAND (t2
, 0)) == VAR_DECL
7257 && DECL_NAME (TREE_OPERAND (t2
, 0)) == NULL_TREE
7258 && !DECL_RTL_SET_P (TREE_OPERAND (t2
, 0))))
7261 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7266 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t2
, 1));
7268 case WITH_CLEANUP_EXPR
:
7269 cmp
= simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7273 return simple_cst_equal (TREE_OPERAND (t1
, 1), TREE_OPERAND (t1
, 1));
7276 if (TREE_OPERAND (t1
, 1) == TREE_OPERAND (t2
, 1))
7277 return simple_cst_equal (TREE_OPERAND (t1
, 0), TREE_OPERAND (t2
, 0));
7291 /* This general rule works for most tree codes. All exceptions should be
7292 handled above. If this is a language-specific tree code, we can't
7293 trust what might be in the operand, so say we don't know
7295 if ((int) code1
>= (int) LAST_AND_UNUSED_TREE_CODE
)
7298 switch (TREE_CODE_CLASS (code1
))
7302 case tcc_comparison
:
7303 case tcc_expression
:
7307 for (i
= 0; i
< TREE_CODE_LENGTH (code1
); i
++)
7309 cmp
= simple_cst_equal (TREE_OPERAND (t1
, i
), TREE_OPERAND (t2
, i
));
7321 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7322 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7323 than U, respectively. */
7326 compare_tree_int (const_tree t
, unsigned HOST_WIDE_INT u
)
7328 if (tree_int_cst_sgn (t
) < 0)
7330 else if (!tree_fits_uhwi_p (t
))
7332 else if (TREE_INT_CST_LOW (t
) == u
)
7334 else if (TREE_INT_CST_LOW (t
) < u
)
7340 /* Return true if SIZE represents a constant size that is in bounds of
7341 what the middle-end and the backend accepts (covering not more than
7342 half of the address-space). */
7345 valid_constant_size_p (const_tree size
)
7347 if (! tree_fits_uhwi_p (size
)
7348 || TREE_OVERFLOW (size
)
7349 || tree_int_cst_sign_bit (size
) != 0)
7354 /* Return the precision of the type, or for a complex or vector type the
7355 precision of the type of its elements. */
7358 element_precision (const_tree type
)
7360 enum tree_code code
= TREE_CODE (type
);
7361 if (code
== COMPLEX_TYPE
|| code
== VECTOR_TYPE
)
7362 type
= TREE_TYPE (type
);
7364 return TYPE_PRECISION (type
);
7367 /* Return true if CODE represents an associative tree code. Otherwise
7370 associative_tree_code (enum tree_code code
)
7389 /* Return true if CODE represents a commutative tree code. Otherwise
7392 commutative_tree_code (enum tree_code code
)
7398 case MULT_HIGHPART_EXPR
:
7406 case UNORDERED_EXPR
:
7410 case TRUTH_AND_EXPR
:
7411 case TRUTH_XOR_EXPR
:
7413 case WIDEN_MULT_EXPR
:
7414 case VEC_WIDEN_MULT_HI_EXPR
:
7415 case VEC_WIDEN_MULT_LO_EXPR
:
7416 case VEC_WIDEN_MULT_EVEN_EXPR
:
7417 case VEC_WIDEN_MULT_ODD_EXPR
:
7426 /* Return true if CODE represents a ternary tree code for which the
7427 first two operands are commutative. Otherwise return false. */
7429 commutative_ternary_tree_code (enum tree_code code
)
7433 case WIDEN_MULT_PLUS_EXPR
:
7434 case WIDEN_MULT_MINUS_EXPR
:
7443 /* Generate a hash value for an expression. This can be used iteratively
7444 by passing a previous result as the VAL argument.
7446 This function is intended to produce the same hash for expressions which
7447 would compare equal using operand_equal_p. */
7450 iterative_hash_expr (const_tree t
, hashval_t val
)
7453 enum tree_code code
;
7454 enum tree_code_class tclass
;
7457 return iterative_hash_hashval_t (0, val
);
7459 code
= TREE_CODE (t
);
7463 /* Alas, constants aren't shared, so we can't rely on pointer
7466 return iterative_hash_hashval_t (0, val
);
7468 for (i
= 0; i
< TREE_INT_CST_NUNITS (t
); i
++)
7469 val
= iterative_hash_host_wide_int (TREE_INT_CST_ELT (t
, i
), val
);
7473 unsigned int val2
= real_hash (TREE_REAL_CST_PTR (t
));
7475 return iterative_hash_hashval_t (val2
, val
);
7479 unsigned int val2
= fixed_hash (TREE_FIXED_CST_PTR (t
));
7481 return iterative_hash_hashval_t (val2
, val
);
7484 return iterative_hash (TREE_STRING_POINTER (t
),
7485 TREE_STRING_LENGTH (t
), val
);
7487 val
= iterative_hash_expr (TREE_REALPART (t
), val
);
7488 return iterative_hash_expr (TREE_IMAGPART (t
), val
);
7492 for (i
= 0; i
< VECTOR_CST_NELTS (t
); ++i
)
7493 val
= iterative_hash_expr (VECTOR_CST_ELT (t
, i
), val
);
7497 /* We can just compare by pointer. */
7498 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t
), val
);
7499 case PLACEHOLDER_EXPR
:
7500 /* The node itself doesn't matter. */
7503 /* A list of expressions, for a CALL_EXPR or as the elements of a
7505 for (; t
; t
= TREE_CHAIN (t
))
7506 val
= iterative_hash_expr (TREE_VALUE (t
), val
);
7510 unsigned HOST_WIDE_INT idx
;
7512 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t
), idx
, field
, value
)
7514 val
= iterative_hash_expr (field
, val
);
7515 val
= iterative_hash_expr (value
, val
);
7520 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7521 Otherwise nodes that compare equal according to operand_equal_p might
7522 get different hash codes. However, don't do this for machine specific
7523 or front end builtins, since the function code is overloaded in those
7525 if (DECL_BUILT_IN_CLASS (t
) == BUILT_IN_NORMAL
7526 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t
)))
7528 t
= builtin_decl_explicit (DECL_FUNCTION_CODE (t
));
7529 code
= TREE_CODE (t
);
7533 tclass
= TREE_CODE_CLASS (code
);
7535 if (tclass
== tcc_declaration
)
7537 /* DECL's have a unique ID */
7538 val
= iterative_hash_host_wide_int (DECL_UID (t
), val
);
7542 gcc_assert (IS_EXPR_CODE_CLASS (tclass
));
7544 val
= iterative_hash_object (code
, val
);
7546 /* Don't hash the type, that can lead to having nodes which
7547 compare equal according to operand_equal_p, but which
7548 have different hash codes. */
7549 if (CONVERT_EXPR_CODE_P (code
)
7550 || code
== NON_LVALUE_EXPR
)
7552 /* Make sure to include signness in the hash computation. */
7553 val
+= TYPE_UNSIGNED (TREE_TYPE (t
));
7554 val
= iterative_hash_expr (TREE_OPERAND (t
, 0), val
);
7557 else if (commutative_tree_code (code
))
7559 /* It's a commutative expression. We want to hash it the same
7560 however it appears. We do this by first hashing both operands
7561 and then rehashing based on the order of their independent
7563 hashval_t one
= iterative_hash_expr (TREE_OPERAND (t
, 0), 0);
7564 hashval_t two
= iterative_hash_expr (TREE_OPERAND (t
, 1), 0);
7568 t
= one
, one
= two
, two
= t
;
7570 val
= iterative_hash_hashval_t (one
, val
);
7571 val
= iterative_hash_hashval_t (two
, val
);
7574 for (i
= TREE_OPERAND_LENGTH (t
) - 1; i
>= 0; --i
)
7575 val
= iterative_hash_expr (TREE_OPERAND (t
, i
), val
);
7581 /* Constructors for pointer, array and function types.
7582 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7583 constructed by language-dependent code, not here.) */
7585 /* Construct, lay out and return the type of pointers to TO_TYPE with
7586 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7587 reference all of memory. If such a type has already been
7588 constructed, reuse it. */
7591 build_pointer_type_for_mode (tree to_type
, enum machine_mode mode
,
7596 if (to_type
== error_mark_node
)
7597 return error_mark_node
;
7599 /* If the pointed-to type has the may_alias attribute set, force
7600 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7601 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7602 can_alias_all
= true;
7604 /* In some cases, languages will have things that aren't a POINTER_TYPE
7605 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7606 In that case, return that type without regard to the rest of our
7609 ??? This is a kludge, but consistent with the way this function has
7610 always operated and there doesn't seem to be a good way to avoid this
7612 if (TYPE_POINTER_TO (to_type
) != 0
7613 && TREE_CODE (TYPE_POINTER_TO (to_type
)) != POINTER_TYPE
)
7614 return TYPE_POINTER_TO (to_type
);
7616 /* First, if we already have a type for pointers to TO_TYPE and it's
7617 the proper mode, use it. */
7618 for (t
= TYPE_POINTER_TO (to_type
); t
; t
= TYPE_NEXT_PTR_TO (t
))
7619 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7622 t
= make_node (POINTER_TYPE
);
7624 TREE_TYPE (t
) = to_type
;
7625 SET_TYPE_MODE (t
, mode
);
7626 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7627 TYPE_NEXT_PTR_TO (t
) = TYPE_POINTER_TO (to_type
);
7628 TYPE_POINTER_TO (to_type
) = t
;
7630 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7631 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7632 else if (TYPE_CANONICAL (to_type
) != to_type
)
7634 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type
),
7635 mode
, can_alias_all
);
7637 /* Lay out the type. This function has many callers that are concerned
7638 with expression-construction, and this simplifies them all. */
7644 /* By default build pointers in ptr_mode. */
7647 build_pointer_type (tree to_type
)
7649 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7650 : TYPE_ADDR_SPACE (to_type
);
7651 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7652 return build_pointer_type_for_mode (to_type
, pointer_mode
, false);
7655 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7658 build_reference_type_for_mode (tree to_type
, enum machine_mode mode
,
7663 if (to_type
== error_mark_node
)
7664 return error_mark_node
;
7666 /* If the pointed-to type has the may_alias attribute set, force
7667 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7668 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type
)))
7669 can_alias_all
= true;
7671 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7672 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7673 In that case, return that type without regard to the rest of our
7676 ??? This is a kludge, but consistent with the way this function has
7677 always operated and there doesn't seem to be a good way to avoid this
7679 if (TYPE_REFERENCE_TO (to_type
) != 0
7680 && TREE_CODE (TYPE_REFERENCE_TO (to_type
)) != REFERENCE_TYPE
)
7681 return TYPE_REFERENCE_TO (to_type
);
7683 /* First, if we already have a type for pointers to TO_TYPE and it's
7684 the proper mode, use it. */
7685 for (t
= TYPE_REFERENCE_TO (to_type
); t
; t
= TYPE_NEXT_REF_TO (t
))
7686 if (TYPE_MODE (t
) == mode
&& TYPE_REF_CAN_ALIAS_ALL (t
) == can_alias_all
)
7689 t
= make_node (REFERENCE_TYPE
);
7691 TREE_TYPE (t
) = to_type
;
7692 SET_TYPE_MODE (t
, mode
);
7693 TYPE_REF_CAN_ALIAS_ALL (t
) = can_alias_all
;
7694 TYPE_NEXT_REF_TO (t
) = TYPE_REFERENCE_TO (to_type
);
7695 TYPE_REFERENCE_TO (to_type
) = t
;
7697 if (TYPE_STRUCTURAL_EQUALITY_P (to_type
))
7698 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7699 else if (TYPE_CANONICAL (to_type
) != to_type
)
7701 = build_reference_type_for_mode (TYPE_CANONICAL (to_type
),
7702 mode
, can_alias_all
);
7710 /* Build the node for the type of references-to-TO_TYPE by default
7714 build_reference_type (tree to_type
)
7716 addr_space_t as
= to_type
== error_mark_node
? ADDR_SPACE_GENERIC
7717 : TYPE_ADDR_SPACE (to_type
);
7718 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7719 return build_reference_type_for_mode (to_type
, pointer_mode
, false);
7722 #define MAX_INT_CACHED_PREC \
7723 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7724 static GTY(()) tree nonstandard_integer_type_cache
[2 * MAX_INT_CACHED_PREC
+ 2];
7726 /* Builds a signed or unsigned integer type of precision PRECISION.
7727 Used for C bitfields whose precision does not match that of
7728 built-in target types. */
7730 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision
,
7736 unsignedp
= MAX_INT_CACHED_PREC
+ 1;
7738 if (precision
<= MAX_INT_CACHED_PREC
)
7740 itype
= nonstandard_integer_type_cache
[precision
+ unsignedp
];
7745 itype
= make_node (INTEGER_TYPE
);
7746 TYPE_PRECISION (itype
) = precision
;
7749 fixup_unsigned_type (itype
);
7751 fixup_signed_type (itype
);
7754 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype
)))
7755 ret
= type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype
)), itype
);
7756 if (precision
<= MAX_INT_CACHED_PREC
)
7757 nonstandard_integer_type_cache
[precision
+ unsignedp
] = ret
;
7762 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7763 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7764 is true, reuse such a type that has already been constructed. */
7767 build_range_type_1 (tree type
, tree lowval
, tree highval
, bool shared
)
7769 tree itype
= make_node (INTEGER_TYPE
);
7770 hashval_t hashcode
= 0;
7772 TREE_TYPE (itype
) = type
;
7774 TYPE_MIN_VALUE (itype
) = fold_convert (type
, lowval
);
7775 TYPE_MAX_VALUE (itype
) = highval
? fold_convert (type
, highval
) : NULL
;
7777 TYPE_PRECISION (itype
) = TYPE_PRECISION (type
);
7778 SET_TYPE_MODE (itype
, TYPE_MODE (type
));
7779 TYPE_SIZE (itype
) = TYPE_SIZE (type
);
7780 TYPE_SIZE_UNIT (itype
) = TYPE_SIZE_UNIT (type
);
7781 TYPE_ALIGN (itype
) = TYPE_ALIGN (type
);
7782 TYPE_USER_ALIGN (itype
) = TYPE_USER_ALIGN (type
);
7787 if ((TYPE_MIN_VALUE (itype
)
7788 && TREE_CODE (TYPE_MIN_VALUE (itype
)) != INTEGER_CST
)
7789 || (TYPE_MAX_VALUE (itype
)
7790 && TREE_CODE (TYPE_MAX_VALUE (itype
)) != INTEGER_CST
))
7792 /* Since we cannot reliably merge this type, we need to compare it using
7793 structural equality checks. */
7794 SET_TYPE_STRUCTURAL_EQUALITY (itype
);
7798 hashcode
= iterative_hash_expr (TYPE_MIN_VALUE (itype
), hashcode
);
7799 hashcode
= iterative_hash_expr (TYPE_MAX_VALUE (itype
), hashcode
);
7800 hashcode
= iterative_hash_hashval_t (TYPE_HASH (type
), hashcode
);
7801 itype
= type_hash_canon (hashcode
, itype
);
7806 /* Wrapper around build_range_type_1 with SHARED set to true. */
7809 build_range_type (tree type
, tree lowval
, tree highval
)
7811 return build_range_type_1 (type
, lowval
, highval
, true);
7814 /* Wrapper around build_range_type_1 with SHARED set to false. */
7817 build_nonshared_range_type (tree type
, tree lowval
, tree highval
)
7819 return build_range_type_1 (type
, lowval
, highval
, false);
7822 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7823 MAXVAL should be the maximum value in the domain
7824 (one less than the length of the array).
7826 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7827 We don't enforce this limit, that is up to caller (e.g. language front end).
7828 The limit exists because the result is a signed type and we don't handle
7829 sizes that use more than one HOST_WIDE_INT. */
7832 build_index_type (tree maxval
)
7834 return build_range_type (sizetype
, size_zero_node
, maxval
);
7837 /* Return true if the debug information for TYPE, a subtype, should be emitted
7838 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7839 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7840 debug info and doesn't reflect the source code. */
7843 subrange_type_for_debug_p (const_tree type
, tree
*lowval
, tree
*highval
)
7845 tree base_type
= TREE_TYPE (type
), low
, high
;
7847 /* Subrange types have a base type which is an integral type. */
7848 if (!INTEGRAL_TYPE_P (base_type
))
7851 /* Get the real bounds of the subtype. */
7852 if (lang_hooks
.types
.get_subrange_bounds
)
7853 lang_hooks
.types
.get_subrange_bounds (type
, &low
, &high
);
7856 low
= TYPE_MIN_VALUE (type
);
7857 high
= TYPE_MAX_VALUE (type
);
7860 /* If the type and its base type have the same representation and the same
7861 name, then the type is not a subrange but a copy of the base type. */
7862 if ((TREE_CODE (base_type
) == INTEGER_TYPE
7863 || TREE_CODE (base_type
) == BOOLEAN_TYPE
)
7864 && int_size_in_bytes (type
) == int_size_in_bytes (base_type
)
7865 && tree_int_cst_equal (low
, TYPE_MIN_VALUE (base_type
))
7866 && tree_int_cst_equal (high
, TYPE_MAX_VALUE (base_type
))
7867 && TYPE_IDENTIFIER (type
) == TYPE_IDENTIFIER (base_type
))
7877 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7878 and number of elements specified by the range of values of INDEX_TYPE.
7879 If SHARED is true, reuse such a type that has already been constructed. */
7882 build_array_type_1 (tree elt_type
, tree index_type
, bool shared
)
7886 if (TREE_CODE (elt_type
) == FUNCTION_TYPE
)
7888 error ("arrays of functions are not meaningful");
7889 elt_type
= integer_type_node
;
7892 t
= make_node (ARRAY_TYPE
);
7893 TREE_TYPE (t
) = elt_type
;
7894 TYPE_DOMAIN (t
) = index_type
;
7895 TYPE_ADDR_SPACE (t
) = TYPE_ADDR_SPACE (elt_type
);
7898 /* If the element type is incomplete at this point we get marked for
7899 structural equality. Do not record these types in the canonical
7901 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
7906 hashval_t hashcode
= iterative_hash_object (TYPE_HASH (elt_type
), 0);
7908 hashcode
= iterative_hash_object (TYPE_HASH (index_type
), hashcode
);
7909 t
= type_hash_canon (hashcode
, t
);
7912 if (TYPE_CANONICAL (t
) == t
)
7914 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type
)
7915 || (index_type
&& TYPE_STRUCTURAL_EQUALITY_P (index_type
)))
7916 SET_TYPE_STRUCTURAL_EQUALITY (t
);
7917 else if (TYPE_CANONICAL (elt_type
) != elt_type
7918 || (index_type
&& TYPE_CANONICAL (index_type
) != index_type
))
7920 = build_array_type_1 (TYPE_CANONICAL (elt_type
),
7922 ? TYPE_CANONICAL (index_type
) : NULL_TREE
,
7929 /* Wrapper around build_array_type_1 with SHARED set to true. */
7932 build_array_type (tree elt_type
, tree index_type
)
7934 return build_array_type_1 (elt_type
, index_type
, true);
7937 /* Wrapper around build_array_type_1 with SHARED set to false. */
7940 build_nonshared_array_type (tree elt_type
, tree index_type
)
7942 return build_array_type_1 (elt_type
, index_type
, false);
7945 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7949 build_array_type_nelts (tree elt_type
, unsigned HOST_WIDE_INT nelts
)
7951 return build_array_type (elt_type
, build_index_type (size_int (nelts
- 1)));
7954 /* Recursively examines the array elements of TYPE, until a non-array
7955 element type is found. */
7958 strip_array_types (tree type
)
7960 while (TREE_CODE (type
) == ARRAY_TYPE
)
7961 type
= TREE_TYPE (type
);
7966 /* Computes the canonical argument types from the argument type list
7969 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7970 on entry to this function, or if any of the ARGTYPES are
7973 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7974 true on entry to this function, or if any of the ARGTYPES are
7977 Returns a canonical argument list, which may be ARGTYPES when the
7978 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7979 true) or would not differ from ARGTYPES. */
7982 maybe_canonicalize_argtypes (tree argtypes
,
7983 bool *any_structural_p
,
7984 bool *any_noncanonical_p
)
7987 bool any_noncanonical_argtypes_p
= false;
7989 for (arg
= argtypes
; arg
&& !(*any_structural_p
); arg
= TREE_CHAIN (arg
))
7991 if (!TREE_VALUE (arg
) || TREE_VALUE (arg
) == error_mark_node
)
7992 /* Fail gracefully by stating that the type is structural. */
7993 *any_structural_p
= true;
7994 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg
)))
7995 *any_structural_p
= true;
7996 else if (TYPE_CANONICAL (TREE_VALUE (arg
)) != TREE_VALUE (arg
)
7997 || TREE_PURPOSE (arg
))
7998 /* If the argument has a default argument, we consider it
7999 non-canonical even though the type itself is canonical.
8000 That way, different variants of function and method types
8001 with default arguments will all point to the variant with
8002 no defaults as their canonical type. */
8003 any_noncanonical_argtypes_p
= true;
8006 if (*any_structural_p
)
8009 if (any_noncanonical_argtypes_p
)
8011 /* Build the canonical list of argument types. */
8012 tree canon_argtypes
= NULL_TREE
;
8013 bool is_void
= false;
8015 for (arg
= argtypes
; arg
; arg
= TREE_CHAIN (arg
))
8017 if (arg
== void_list_node
)
8020 canon_argtypes
= tree_cons (NULL_TREE
,
8021 TYPE_CANONICAL (TREE_VALUE (arg
)),
8025 canon_argtypes
= nreverse (canon_argtypes
);
8027 canon_argtypes
= chainon (canon_argtypes
, void_list_node
);
8029 /* There is a non-canonical type. */
8030 *any_noncanonical_p
= true;
8031 return canon_argtypes
;
8034 /* The canonical argument types are the same as ARGTYPES. */
8038 /* Construct, lay out and return
8039 the type of functions returning type VALUE_TYPE
8040 given arguments of types ARG_TYPES.
8041 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8042 are data type nodes for the arguments of the function.
8043 If such a type has already been constructed, reuse it. */
8046 build_function_type (tree value_type
, tree arg_types
)
8049 hashval_t hashcode
= 0;
8050 bool any_structural_p
, any_noncanonical_p
;
8051 tree canon_argtypes
;
8053 if (TREE_CODE (value_type
) == FUNCTION_TYPE
)
8055 error ("function return type cannot be function");
8056 value_type
= integer_type_node
;
8059 /* Make a node of the sort we want. */
8060 t
= make_node (FUNCTION_TYPE
);
8061 TREE_TYPE (t
) = value_type
;
8062 TYPE_ARG_TYPES (t
) = arg_types
;
8064 /* If we already have such a type, use the old one. */
8065 hashcode
= iterative_hash_object (TYPE_HASH (value_type
), hashcode
);
8066 hashcode
= type_hash_list (arg_types
, hashcode
);
8067 t
= type_hash_canon (hashcode
, t
);
8069 /* Set up the canonical type. */
8070 any_structural_p
= TYPE_STRUCTURAL_EQUALITY_P (value_type
);
8071 any_noncanonical_p
= TYPE_CANONICAL (value_type
) != value_type
;
8072 canon_argtypes
= maybe_canonicalize_argtypes (arg_types
,
8074 &any_noncanonical_p
);
8075 if (any_structural_p
)
8076 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8077 else if (any_noncanonical_p
)
8078 TYPE_CANONICAL (t
) = build_function_type (TYPE_CANONICAL (value_type
),
8081 if (!COMPLETE_TYPE_P (t
))
8086 /* Build a function type. The RETURN_TYPE is the type returned by the
8087 function. If VAARGS is set, no void_type_node is appended to the
8088 the list. ARGP must be always be terminated be a NULL_TREE. */
8091 build_function_type_list_1 (bool vaargs
, tree return_type
, va_list argp
)
8095 t
= va_arg (argp
, tree
);
8096 for (args
= NULL_TREE
; t
!= NULL_TREE
; t
= va_arg (argp
, tree
))
8097 args
= tree_cons (NULL_TREE
, t
, args
);
8102 if (args
!= NULL_TREE
)
8103 args
= nreverse (args
);
8104 gcc_assert (last
!= void_list_node
);
8106 else if (args
== NULL_TREE
)
8107 args
= void_list_node
;
8111 args
= nreverse (args
);
8112 TREE_CHAIN (last
) = void_list_node
;
8114 args
= build_function_type (return_type
, args
);
8119 /* Build a function type. The RETURN_TYPE is the type returned by the
8120 function. If additional arguments are provided, they are
8121 additional argument types. The list of argument types must always
8122 be terminated by NULL_TREE. */
8125 build_function_type_list (tree return_type
, ...)
8130 va_start (p
, return_type
);
8131 args
= build_function_type_list_1 (false, return_type
, p
);
8136 /* Build a variable argument function type. The RETURN_TYPE is the
8137 type returned by the function. If additional arguments are provided,
8138 they are additional argument types. The list of argument types must
8139 always be terminated by NULL_TREE. */
8142 build_varargs_function_type_list (tree return_type
, ...)
8147 va_start (p
, return_type
);
8148 args
= build_function_type_list_1 (true, return_type
, p
);
8154 /* Build a function type. RETURN_TYPE is the type returned by the
8155 function; VAARGS indicates whether the function takes varargs. The
8156 function takes N named arguments, the types of which are provided in
8160 build_function_type_array_1 (bool vaargs
, tree return_type
, int n
,
8164 tree t
= vaargs
? NULL_TREE
: void_list_node
;
8166 for (i
= n
- 1; i
>= 0; i
--)
8167 t
= tree_cons (NULL_TREE
, arg_types
[i
], t
);
8169 return build_function_type (return_type
, t
);
8172 /* Build a function type. RETURN_TYPE is the type returned by the
8173 function. The function takes N named arguments, the types of which
8174 are provided in ARG_TYPES. */
8177 build_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8179 return build_function_type_array_1 (false, return_type
, n
, arg_types
);
8182 /* Build a variable argument function type. RETURN_TYPE is the type
8183 returned by the function. The function takes N named arguments, the
8184 types of which are provided in ARG_TYPES. */
8187 build_varargs_function_type_array (tree return_type
, int n
, tree
*arg_types
)
8189 return build_function_type_array_1 (true, return_type
, n
, arg_types
);
8192 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8193 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8194 for the method. An implicit additional parameter (of type
8195 pointer-to-BASETYPE) is added to the ARGTYPES. */
8198 build_method_type_directly (tree basetype
,
8205 bool any_structural_p
, any_noncanonical_p
;
8206 tree canon_argtypes
;
8208 /* Make a node of the sort we want. */
8209 t
= make_node (METHOD_TYPE
);
8211 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8212 TREE_TYPE (t
) = rettype
;
8213 ptype
= build_pointer_type (basetype
);
8215 /* The actual arglist for this function includes a "hidden" argument
8216 which is "this". Put it into the list of argument types. */
8217 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
8218 TYPE_ARG_TYPES (t
) = argtypes
;
8220 /* If we already have such a type, use the old one. */
8221 hashcode
= iterative_hash_object (TYPE_HASH (basetype
), hashcode
);
8222 hashcode
= iterative_hash_object (TYPE_HASH (rettype
), hashcode
);
8223 hashcode
= type_hash_list (argtypes
, hashcode
);
8224 t
= type_hash_canon (hashcode
, t
);
8226 /* Set up the canonical type. */
8228 = (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8229 || TYPE_STRUCTURAL_EQUALITY_P (rettype
));
8231 = (TYPE_CANONICAL (basetype
) != basetype
8232 || TYPE_CANONICAL (rettype
) != rettype
);
8233 canon_argtypes
= maybe_canonicalize_argtypes (TREE_CHAIN (argtypes
),
8235 &any_noncanonical_p
);
8236 if (any_structural_p
)
8237 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8238 else if (any_noncanonical_p
)
8240 = build_method_type_directly (TYPE_CANONICAL (basetype
),
8241 TYPE_CANONICAL (rettype
),
8243 if (!COMPLETE_TYPE_P (t
))
8249 /* Construct, lay out and return the type of methods belonging to class
8250 BASETYPE and whose arguments and values are described by TYPE.
8251 If that type exists already, reuse it.
8252 TYPE must be a FUNCTION_TYPE node. */
8255 build_method_type (tree basetype
, tree type
)
8257 gcc_assert (TREE_CODE (type
) == FUNCTION_TYPE
);
8259 return build_method_type_directly (basetype
,
8261 TYPE_ARG_TYPES (type
));
8264 /* Construct, lay out and return the type of offsets to a value
8265 of type TYPE, within an object of type BASETYPE.
8266 If a suitable offset type exists already, reuse it. */
8269 build_offset_type (tree basetype
, tree type
)
8272 hashval_t hashcode
= 0;
8274 /* Make a node of the sort we want. */
8275 t
= make_node (OFFSET_TYPE
);
8277 TYPE_OFFSET_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
8278 TREE_TYPE (t
) = type
;
8280 /* If we already have such a type, use the old one. */
8281 hashcode
= iterative_hash_object (TYPE_HASH (basetype
), hashcode
);
8282 hashcode
= iterative_hash_object (TYPE_HASH (type
), hashcode
);
8283 t
= type_hash_canon (hashcode
, t
);
8285 if (!COMPLETE_TYPE_P (t
))
8288 if (TYPE_CANONICAL (t
) == t
)
8290 if (TYPE_STRUCTURAL_EQUALITY_P (basetype
)
8291 || TYPE_STRUCTURAL_EQUALITY_P (type
))
8292 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8293 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)) != basetype
8294 || TYPE_CANONICAL (type
) != type
)
8296 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype
)),
8297 TYPE_CANONICAL (type
));
8303 /* Create a complex type whose components are COMPONENT_TYPE. */
8306 build_complex_type (tree component_type
)
8311 gcc_assert (INTEGRAL_TYPE_P (component_type
)
8312 || SCALAR_FLOAT_TYPE_P (component_type
)
8313 || FIXED_POINT_TYPE_P (component_type
));
8315 /* Make a node of the sort we want. */
8316 t
= make_node (COMPLEX_TYPE
);
8318 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (component_type
);
8320 /* If we already have such a type, use the old one. */
8321 hashcode
= iterative_hash_object (TYPE_HASH (component_type
), 0);
8322 t
= type_hash_canon (hashcode
, t
);
8324 if (!COMPLETE_TYPE_P (t
))
8327 if (TYPE_CANONICAL (t
) == t
)
8329 if (TYPE_STRUCTURAL_EQUALITY_P (component_type
))
8330 SET_TYPE_STRUCTURAL_EQUALITY (t
);
8331 else if (TYPE_CANONICAL (component_type
) != component_type
)
8333 = build_complex_type (TYPE_CANONICAL (component_type
));
8336 /* We need to create a name, since complex is a fundamental type. */
8337 if (! TYPE_NAME (t
))
8340 if (component_type
== char_type_node
)
8341 name
= "complex char";
8342 else if (component_type
== signed_char_type_node
)
8343 name
= "complex signed char";
8344 else if (component_type
== unsigned_char_type_node
)
8345 name
= "complex unsigned char";
8346 else if (component_type
== short_integer_type_node
)
8347 name
= "complex short int";
8348 else if (component_type
== short_unsigned_type_node
)
8349 name
= "complex short unsigned int";
8350 else if (component_type
== integer_type_node
)
8351 name
= "complex int";
8352 else if (component_type
== unsigned_type_node
)
8353 name
= "complex unsigned int";
8354 else if (component_type
== long_integer_type_node
)
8355 name
= "complex long int";
8356 else if (component_type
== long_unsigned_type_node
)
8357 name
= "complex long unsigned int";
8358 else if (component_type
== long_long_integer_type_node
)
8359 name
= "complex long long int";
8360 else if (component_type
== long_long_unsigned_type_node
)
8361 name
= "complex long long unsigned int";
8366 TYPE_NAME (t
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
8367 get_identifier (name
), t
);
8370 return build_qualified_type (t
, TYPE_QUALS (component_type
));
8373 /* If TYPE is a real or complex floating-point type and the target
8374 does not directly support arithmetic on TYPE then return the wider
8375 type to be used for arithmetic on TYPE. Otherwise, return
8379 excess_precision_type (tree type
)
8381 if (flag_excess_precision
!= EXCESS_PRECISION_FAST
)
8383 int flt_eval_method
= TARGET_FLT_EVAL_METHOD
;
8384 switch (TREE_CODE (type
))
8387 switch (flt_eval_method
)
8390 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
8391 return double_type_node
;
8394 if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
)
8395 || TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
8396 return long_double_type_node
;
8403 if (TREE_CODE (TREE_TYPE (type
)) != REAL_TYPE
)
8405 switch (flt_eval_method
)
8408 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
))
8409 return complex_double_type_node
;
8412 if (TYPE_MODE (TREE_TYPE (type
)) == TYPE_MODE (float_type_node
)
8413 || (TYPE_MODE (TREE_TYPE (type
))
8414 == TYPE_MODE (double_type_node
)))
8415 return complex_long_double_type_node
;
8428 /* Return OP, stripped of any conversions to wider types as much as is safe.
8429 Converting the value back to OP's type makes a value equivalent to OP.
8431 If FOR_TYPE is nonzero, we return a value which, if converted to
8432 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8434 OP must have integer, real or enumeral type. Pointers are not allowed!
8436 There are some cases where the obvious value we could return
8437 would regenerate to OP if converted to OP's type,
8438 but would not extend like OP to wider types.
8439 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8440 For example, if OP is (unsigned short)(signed char)-1,
8441 we avoid returning (signed char)-1 if FOR_TYPE is int,
8442 even though extending that to an unsigned short would regenerate OP,
8443 since the result of extending (signed char)-1 to (int)
8444 is different from (int) OP. */
8447 get_unwidened (tree op
, tree for_type
)
8449 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8450 tree type
= TREE_TYPE (op
);
8452 = TYPE_PRECISION (for_type
!= 0 ? for_type
: type
);
8454 = (for_type
!= 0 && for_type
!= type
8455 && final_prec
> TYPE_PRECISION (type
)
8456 && TYPE_UNSIGNED (type
));
8459 while (CONVERT_EXPR_P (op
))
8463 /* TYPE_PRECISION on vector types has different meaning
8464 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8465 so avoid them here. */
8466 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op
, 0))) == VECTOR_TYPE
)
8469 bitschange
= TYPE_PRECISION (TREE_TYPE (op
))
8470 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0)));
8472 /* Truncations are many-one so cannot be removed.
8473 Unless we are later going to truncate down even farther. */
8475 && final_prec
> TYPE_PRECISION (TREE_TYPE (op
)))
8478 /* See what's inside this conversion. If we decide to strip it,
8480 op
= TREE_OPERAND (op
, 0);
8482 /* If we have not stripped any zero-extensions (uns is 0),
8483 we can strip any kind of extension.
8484 If we have previously stripped a zero-extension,
8485 only zero-extensions can safely be stripped.
8486 Any extension can be stripped if the bits it would produce
8487 are all going to be discarded later by truncating to FOR_TYPE. */
8491 if (! uns
|| final_prec
<= TYPE_PRECISION (TREE_TYPE (op
)))
8493 /* TYPE_UNSIGNED says whether this is a zero-extension.
8494 Let's avoid computing it if it does not affect WIN
8495 and if UNS will not be needed again. */
8497 || CONVERT_EXPR_P (op
))
8498 && TYPE_UNSIGNED (TREE_TYPE (op
)))
8506 /* If we finally reach a constant see if it fits in for_type and
8507 in that case convert it. */
8509 && TREE_CODE (win
) == INTEGER_CST
8510 && TREE_TYPE (win
) != for_type
8511 && int_fits_type_p (win
, for_type
))
8512 win
= fold_convert (for_type
, win
);
8517 /* Return OP or a simpler expression for a narrower value
8518 which can be sign-extended or zero-extended to give back OP.
8519 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8520 or 0 if the value should be sign-extended. */
8523 get_narrower (tree op
, int *unsignedp_ptr
)
8528 bool integral_p
= INTEGRAL_TYPE_P (TREE_TYPE (op
));
8530 while (TREE_CODE (op
) == NOP_EXPR
)
8533 = (TYPE_PRECISION (TREE_TYPE (op
))
8534 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op
, 0))));
8536 /* Truncations are many-one so cannot be removed. */
8540 /* See what's inside this conversion. If we decide to strip it,
8545 op
= TREE_OPERAND (op
, 0);
8546 /* An extension: the outermost one can be stripped,
8547 but remember whether it is zero or sign extension. */
8549 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8550 /* Otherwise, if a sign extension has been stripped,
8551 only sign extensions can now be stripped;
8552 if a zero extension has been stripped, only zero-extensions. */
8553 else if (uns
!= TYPE_UNSIGNED (TREE_TYPE (op
)))
8557 else /* bitschange == 0 */
8559 /* A change in nominal type can always be stripped, but we must
8560 preserve the unsignedness. */
8562 uns
= TYPE_UNSIGNED (TREE_TYPE (op
));
8564 op
= TREE_OPERAND (op
, 0);
8565 /* Keep trying to narrow, but don't assign op to win if it
8566 would turn an integral type into something else. */
8567 if (INTEGRAL_TYPE_P (TREE_TYPE (op
)) != integral_p
)
8574 if (TREE_CODE (op
) == COMPONENT_REF
8575 /* Since type_for_size always gives an integer type. */
8576 && TREE_CODE (TREE_TYPE (op
)) != REAL_TYPE
8577 && TREE_CODE (TREE_TYPE (op
)) != FIXED_POINT_TYPE
8578 /* Ensure field is laid out already. */
8579 && DECL_SIZE (TREE_OPERAND (op
, 1)) != 0
8580 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op
, 1))))
8582 unsigned HOST_WIDE_INT innerprec
8583 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op
, 1)));
8584 int unsignedp
= (DECL_UNSIGNED (TREE_OPERAND (op
, 1))
8585 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op
, 1))));
8586 tree type
= lang_hooks
.types
.type_for_size (innerprec
, unsignedp
);
8588 /* We can get this structure field in a narrower type that fits it,
8589 but the resulting extension to its nominal type (a fullword type)
8590 must satisfy the same conditions as for other extensions.
8592 Do this only for fields that are aligned (not bit-fields),
8593 because when bit-field insns will be used there is no
8594 advantage in doing this. */
8596 if (innerprec
< TYPE_PRECISION (TREE_TYPE (op
))
8597 && ! DECL_BIT_FIELD (TREE_OPERAND (op
, 1))
8598 && (first
|| uns
== DECL_UNSIGNED (TREE_OPERAND (op
, 1)))
8602 uns
= DECL_UNSIGNED (TREE_OPERAND (op
, 1));
8603 win
= fold_convert (type
, op
);
8607 *unsignedp_ptr
= uns
;
8611 /* Returns true if integer constant C has a value that is permissible
8612 for type TYPE (an INTEGER_TYPE). */
8615 int_fits_type_p (const_tree c
, const_tree type
)
8617 tree type_low_bound
, type_high_bound
;
8618 bool ok_for_low_bound
, ok_for_high_bound
;
8619 signop sgn_c
= TYPE_SIGN (TREE_TYPE (c
));
8622 type_low_bound
= TYPE_MIN_VALUE (type
);
8623 type_high_bound
= TYPE_MAX_VALUE (type
);
8625 /* If at least one bound of the type is a constant integer, we can check
8626 ourselves and maybe make a decision. If no such decision is possible, but
8627 this type is a subtype, try checking against that. Otherwise, use
8628 fits_to_tree_p, which checks against the precision.
8630 Compute the status for each possibly constant bound, and return if we see
8631 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8632 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8633 for "constant known to fit". */
8635 /* Check if c >= type_low_bound. */
8636 if (type_low_bound
&& TREE_CODE (type_low_bound
) == INTEGER_CST
)
8638 if (tree_int_cst_lt (c
, type_low_bound
))
8640 ok_for_low_bound
= true;
8643 ok_for_low_bound
= false;
8645 /* Check if c <= type_high_bound. */
8646 if (type_high_bound
&& TREE_CODE (type_high_bound
) == INTEGER_CST
)
8648 if (tree_int_cst_lt (type_high_bound
, c
))
8650 ok_for_high_bound
= true;
8653 ok_for_high_bound
= false;
8655 /* If the constant fits both bounds, the result is known. */
8656 if (ok_for_low_bound
&& ok_for_high_bound
)
8659 /* Perform some generic filtering which may allow making a decision
8660 even if the bounds are not constant. First, negative integers
8661 never fit in unsigned types, */
8662 if (TYPE_UNSIGNED (type
) && sgn_c
== SIGNED
&& wi::neg_p (c
))
8665 /* Second, narrower types always fit in wider ones. */
8666 if (TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (c
)))
8669 /* Third, unsigned integers with top bit set never fit signed types. */
8670 if (!TYPE_UNSIGNED (type
) && sgn_c
== UNSIGNED
)
8672 int prec
= GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c
))) - 1;
8673 if (prec
< TYPE_PRECISION (TREE_TYPE (c
)))
8675 /* When a tree_cst is converted to a wide-int, the precision
8676 is taken from the type. However, if the precision of the
8677 mode underneath the type is smaller than that, it is
8678 possible that the value will not fit. The test below
8679 fails if any bit is set between the sign bit of the
8680 underlying mode and the top bit of the type. */
8681 if (wi::ne_p (wi::zext (c
, prec
- 1), c
))
8684 else if (wi::neg_p (c
))
8688 /* If we haven't been able to decide at this point, there nothing more we
8689 can check ourselves here. Look at the base type if we have one and it
8690 has the same precision. */
8691 if (TREE_CODE (type
) == INTEGER_TYPE
8692 && TREE_TYPE (type
) != 0
8693 && TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (type
)))
8695 type
= TREE_TYPE (type
);
8699 /* Or to fits_to_tree_p, if nothing else. */
8700 return wi::fits_to_tree_p (c
, type
);
8703 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8704 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8705 represented (assuming two's-complement arithmetic) within the bit
8706 precision of the type are returned instead. */
8709 get_type_static_bounds (const_tree type
, mpz_t min
, mpz_t max
)
8711 if (!POINTER_TYPE_P (type
) && TYPE_MIN_VALUE (type
)
8712 && TREE_CODE (TYPE_MIN_VALUE (type
)) == INTEGER_CST
)
8713 wi::to_mpz (TYPE_MIN_VALUE (type
), min
, TYPE_SIGN (type
));
8716 if (TYPE_UNSIGNED (type
))
8717 mpz_set_ui (min
, 0);
8720 wide_int mn
= wi::min_value (TYPE_PRECISION (type
), SIGNED
);
8721 wi::to_mpz (mn
, min
, SIGNED
);
8725 if (!POINTER_TYPE_P (type
) && TYPE_MAX_VALUE (type
)
8726 && TREE_CODE (TYPE_MAX_VALUE (type
)) == INTEGER_CST
)
8727 wi::to_mpz (TYPE_MAX_VALUE (type
), max
, TYPE_SIGN (type
));
8730 wide_int mn
= wi::max_value (TYPE_PRECISION (type
), TYPE_SIGN (type
));
8731 wi::to_mpz (mn
, max
, TYPE_SIGN (type
));
8735 /* Return true if VAR is an automatic variable defined in function FN. */
8738 auto_var_in_fn_p (const_tree var
, const_tree fn
)
8740 return (DECL_P (var
) && DECL_CONTEXT (var
) == fn
8741 && ((((TREE_CODE (var
) == VAR_DECL
&& ! DECL_EXTERNAL (var
))
8742 || TREE_CODE (var
) == PARM_DECL
)
8743 && ! TREE_STATIC (var
))
8744 || TREE_CODE (var
) == LABEL_DECL
8745 || TREE_CODE (var
) == RESULT_DECL
));
8748 /* Subprogram of following function. Called by walk_tree.
8750 Return *TP if it is an automatic variable or parameter of the
8751 function passed in as DATA. */
8754 find_var_from_fn (tree
*tp
, int *walk_subtrees
, void *data
)
8756 tree fn
= (tree
) data
;
8761 else if (DECL_P (*tp
)
8762 && auto_var_in_fn_p (*tp
, fn
))
8768 /* Returns true if T is, contains, or refers to a type with variable
8769 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8770 arguments, but not the return type. If FN is nonzero, only return
8771 true if a modifier of the type or position of FN is a variable or
8772 parameter inside FN.
8774 This concept is more general than that of C99 'variably modified types':
8775 in C99, a struct type is never variably modified because a VLA may not
8776 appear as a structure member. However, in GNU C code like:
8778 struct S { int i[f()]; };
8780 is valid, and other languages may define similar constructs. */
8783 variably_modified_type_p (tree type
, tree fn
)
8787 /* Test if T is either variable (if FN is zero) or an expression containing
8788 a variable in FN. If TYPE isn't gimplified, return true also if
8789 gimplify_one_sizepos would gimplify the expression into a local
8791 #define RETURN_TRUE_IF_VAR(T) \
8792 do { tree _t = (T); \
8793 if (_t != NULL_TREE \
8794 && _t != error_mark_node \
8795 && TREE_CODE (_t) != INTEGER_CST \
8796 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8798 || (!TYPE_SIZES_GIMPLIFIED (type) \
8799 && !is_gimple_sizepos (_t)) \
8800 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8801 return true; } while (0)
8803 if (type
== error_mark_node
)
8806 /* If TYPE itself has variable size, it is variably modified. */
8807 RETURN_TRUE_IF_VAR (TYPE_SIZE (type
));
8808 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type
));
8810 switch (TREE_CODE (type
))
8813 case REFERENCE_TYPE
:
8815 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8821 /* If TYPE is a function type, it is variably modified if the
8822 return type is variably modified. */
8823 if (variably_modified_type_p (TREE_TYPE (type
), fn
))
8829 case FIXED_POINT_TYPE
:
8832 /* Scalar types are variably modified if their end points
8834 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type
));
8835 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type
));
8840 case QUAL_UNION_TYPE
:
8841 /* We can't see if any of the fields are variably-modified by the
8842 definition we normally use, since that would produce infinite
8843 recursion via pointers. */
8844 /* This is variably modified if some field's type is. */
8845 for (t
= TYPE_FIELDS (type
); t
; t
= DECL_CHAIN (t
))
8846 if (TREE_CODE (t
) == FIELD_DECL
)
8848 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t
));
8849 RETURN_TRUE_IF_VAR (DECL_SIZE (t
));
8850 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t
));
8852 if (TREE_CODE (type
) == QUAL_UNION_TYPE
)
8853 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t
));
8858 /* Do not call ourselves to avoid infinite recursion. This is
8859 variably modified if the element type is. */
8860 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type
)));
8861 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type
)));
8868 /* The current language may have other cases to check, but in general,
8869 all other types are not variably modified. */
8870 return lang_hooks
.tree_inlining
.var_mod_type_p (type
, fn
);
8872 #undef RETURN_TRUE_IF_VAR
8875 /* Given a DECL or TYPE, return the scope in which it was declared, or
8876 NULL_TREE if there is no containing scope. */
8879 get_containing_scope (const_tree t
)
8881 return (TYPE_P (t
) ? TYPE_CONTEXT (t
) : DECL_CONTEXT (t
));
8884 /* Return the innermost context enclosing DECL that is
8885 a FUNCTION_DECL, or zero if none. */
8888 decl_function_context (const_tree decl
)
8892 if (TREE_CODE (decl
) == ERROR_MARK
)
8895 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8896 where we look up the function at runtime. Such functions always take
8897 a first argument of type 'pointer to real context'.
8899 C++ should really be fixed to use DECL_CONTEXT for the real context,
8900 and use something else for the "virtual context". */
8901 else if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_VINDEX (decl
))
8904 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl
)))));
8906 context
= DECL_CONTEXT (decl
);
8908 while (context
&& TREE_CODE (context
) != FUNCTION_DECL
)
8910 if (TREE_CODE (context
) == BLOCK
)
8911 context
= BLOCK_SUPERCONTEXT (context
);
8913 context
= get_containing_scope (context
);
8919 /* Return the innermost context enclosing DECL that is
8920 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8921 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8924 decl_type_context (const_tree decl
)
8926 tree context
= DECL_CONTEXT (decl
);
8929 switch (TREE_CODE (context
))
8931 case NAMESPACE_DECL
:
8932 case TRANSLATION_UNIT_DECL
:
8937 case QUAL_UNION_TYPE
:
8942 context
= DECL_CONTEXT (context
);
8946 context
= BLOCK_SUPERCONTEXT (context
);
8956 /* CALL is a CALL_EXPR. Return the declaration for the function
8957 called, or NULL_TREE if the called function cannot be
8961 get_callee_fndecl (const_tree call
)
8965 if (call
== error_mark_node
)
8966 return error_mark_node
;
8968 /* It's invalid to call this function with anything but a
8970 gcc_assert (TREE_CODE (call
) == CALL_EXPR
);
8972 /* The first operand to the CALL is the address of the function
8974 addr
= CALL_EXPR_FN (call
);
8976 /* If there is no function, return early. */
8977 if (addr
== NULL_TREE
)
8982 /* If this is a readonly function pointer, extract its initial value. */
8983 if (DECL_P (addr
) && TREE_CODE (addr
) != FUNCTION_DECL
8984 && TREE_READONLY (addr
) && ! TREE_THIS_VOLATILE (addr
)
8985 && DECL_INITIAL (addr
))
8986 addr
= DECL_INITIAL (addr
);
8988 /* If the address is just `&f' for some function `f', then we know
8989 that `f' is being called. */
8990 if (TREE_CODE (addr
) == ADDR_EXPR
8991 && TREE_CODE (TREE_OPERAND (addr
, 0)) == FUNCTION_DECL
)
8992 return TREE_OPERAND (addr
, 0);
8994 /* We couldn't figure out what was being called. */
8998 /* Print debugging information about tree nodes generated during the compile,
8999 and any language-specific information. */
9002 dump_tree_statistics (void)
9004 if (GATHER_STATISTICS
)
9007 int total_nodes
, total_bytes
;
9008 fprintf (stderr
, "Kind Nodes Bytes\n");
9009 fprintf (stderr
, "---------------------------------------\n");
9010 total_nodes
= total_bytes
= 0;
9011 for (i
= 0; i
< (int) all_kinds
; i
++)
9013 fprintf (stderr
, "%-20s %7d %10d\n", tree_node_kind_names
[i
],
9014 tree_node_counts
[i
], tree_node_sizes
[i
]);
9015 total_nodes
+= tree_node_counts
[i
];
9016 total_bytes
+= tree_node_sizes
[i
];
9018 fprintf (stderr
, "---------------------------------------\n");
9019 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_nodes
, total_bytes
);
9020 fprintf (stderr
, "---------------------------------------\n");
9021 fprintf (stderr
, "Code Nodes\n");
9022 fprintf (stderr
, "----------------------------\n");
9023 for (i
= 0; i
< (int) MAX_TREE_CODES
; i
++)
9024 fprintf (stderr
, "%-20s %7d\n", get_tree_code_name ((enum tree_code
) i
),
9025 tree_code_counts
[i
]);
9026 fprintf (stderr
, "----------------------------\n");
9027 ssanames_print_statistics ();
9028 phinodes_print_statistics ();
9031 fprintf (stderr
, "(No per-node statistics)\n");
9033 print_type_hash_statistics ();
9034 print_debug_expr_statistics ();
9035 print_value_expr_statistics ();
9036 lang_hooks
.print_statistics ();
9039 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9041 /* Generate a crc32 of a byte. */
9044 crc32_unsigned_bits (unsigned chksum
, unsigned value
, unsigned bits
)
9048 for (ix
= bits
; ix
--; value
<<= 1)
9052 feedback
= (value
^ chksum
) & 0x80000000 ? 0x04c11db7 : 0;
9059 /* Generate a crc32 of a 32-bit unsigned. */
9062 crc32_unsigned (unsigned chksum
, unsigned value
)
9064 return crc32_unsigned_bits (chksum
, value
, 32);
9067 /* Generate a crc32 of a byte. */
9070 crc32_byte (unsigned chksum
, char byte
)
9072 return crc32_unsigned_bits (chksum
, (unsigned) byte
<< 24, 8);
9075 /* Generate a crc32 of a string. */
9078 crc32_string (unsigned chksum
, const char *string
)
9082 chksum
= crc32_byte (chksum
, *string
);
9088 /* P is a string that will be used in a symbol. Mask out any characters
9089 that are not valid in that context. */
9092 clean_symbol_name (char *p
)
9096 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9099 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9106 /* Generate a name for a special-purpose function.
9107 The generated name may need to be unique across the whole link.
9108 Changes to this function may also require corresponding changes to
9109 xstrdup_mask_random.
9110 TYPE is some string to identify the purpose of this function to the
9111 linker or collect2; it must start with an uppercase letter,
9113 I - for constructors
9115 N - for C++ anonymous namespaces
9116 F - for DWARF unwind frame information. */
9119 get_file_function_name (const char *type
)
9125 /* If we already have a name we know to be unique, just use that. */
9126 if (first_global_object_name
)
9127 p
= q
= ASTRDUP (first_global_object_name
);
9128 /* If the target is handling the constructors/destructors, they
9129 will be local to this file and the name is only necessary for
9131 We also assign sub_I and sub_D sufixes to constructors called from
9132 the global static constructors. These are always local. */
9133 else if (((type
[0] == 'I' || type
[0] == 'D') && targetm
.have_ctors_dtors
)
9134 || (strncmp (type
, "sub_", 4) == 0
9135 && (type
[4] == 'I' || type
[4] == 'D')))
9137 const char *file
= main_input_filename
;
9139 file
= LOCATION_FILE (input_location
);
9140 /* Just use the file's basename, because the full pathname
9141 might be quite long. */
9142 p
= q
= ASTRDUP (lbasename (file
));
9146 /* Otherwise, the name must be unique across the entire link.
9147 We don't have anything that we know to be unique to this translation
9148 unit, so use what we do have and throw in some randomness. */
9150 const char *name
= weak_global_object_name
;
9151 const char *file
= main_input_filename
;
9156 file
= LOCATION_FILE (input_location
);
9158 len
= strlen (file
);
9159 q
= (char *) alloca (9 + 17 + len
+ 1);
9160 memcpy (q
, file
, len
+ 1);
9162 snprintf (q
+ len
, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX
,
9163 crc32_string (0, name
), get_random_seed (false));
9168 clean_symbol_name (q
);
9169 buf
= (char *) alloca (sizeof (FILE_FUNCTION_FORMAT
) + strlen (p
)
9172 /* Set up the name of the file-level functions we may need.
9173 Use a global object (which is already required to be unique over
9174 the program) rather than the file name (which imposes extra
9176 sprintf (buf
, FILE_FUNCTION_FORMAT
, type
, p
);
9178 return get_identifier (buf
);
9181 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9183 /* Complain that the tree code of NODE does not match the expected 0
9184 terminated list of trailing codes. The trailing code list can be
9185 empty, for a more vague error message. FILE, LINE, and FUNCTION
9186 are of the caller. */
9189 tree_check_failed (const_tree node
, const char *file
,
9190 int line
, const char *function
, ...)
9194 unsigned length
= 0;
9195 enum tree_code code
;
9197 va_start (args
, function
);
9198 while ((code
= (enum tree_code
) va_arg (args
, int)))
9199 length
+= 4 + strlen (get_tree_code_name (code
));
9204 va_start (args
, function
);
9205 length
+= strlen ("expected ");
9206 buffer
= tmp
= (char *) alloca (length
);
9208 while ((code
= (enum tree_code
) va_arg (args
, int)))
9210 const char *prefix
= length
? " or " : "expected ";
9212 strcpy (tmp
+ length
, prefix
);
9213 length
+= strlen (prefix
);
9214 strcpy (tmp
+ length
, get_tree_code_name (code
));
9215 length
+= strlen (get_tree_code_name (code
));
9220 buffer
= "unexpected node";
9222 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9223 buffer
, get_tree_code_name (TREE_CODE (node
)),
9224 function
, trim_filename (file
), line
);
9227 /* Complain that the tree code of NODE does match the expected 0
9228 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9232 tree_not_check_failed (const_tree node
, const char *file
,
9233 int line
, const char *function
, ...)
9237 unsigned length
= 0;
9238 enum tree_code code
;
9240 va_start (args
, function
);
9241 while ((code
= (enum tree_code
) va_arg (args
, int)))
9242 length
+= 4 + strlen (get_tree_code_name (code
));
9244 va_start (args
, function
);
9245 buffer
= (char *) alloca (length
);
9247 while ((code
= (enum tree_code
) va_arg (args
, int)))
9251 strcpy (buffer
+ length
, " or ");
9254 strcpy (buffer
+ length
, get_tree_code_name (code
));
9255 length
+= strlen (get_tree_code_name (code
));
9259 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9260 buffer
, get_tree_code_name (TREE_CODE (node
)),
9261 function
, trim_filename (file
), line
);
9264 /* Similar to tree_check_failed, except that we check for a class of tree
9265 code, given in CL. */
9268 tree_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9269 const char *file
, int line
, const char *function
)
9272 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9273 TREE_CODE_CLASS_STRING (cl
),
9274 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9275 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9278 /* Similar to tree_check_failed, except that instead of specifying a
9279 dozen codes, use the knowledge that they're all sequential. */
9282 tree_range_check_failed (const_tree node
, const char *file
, int line
,
9283 const char *function
, enum tree_code c1
,
9287 unsigned length
= 0;
9290 for (c
= c1
; c
<= c2
; ++c
)
9291 length
+= 4 + strlen (get_tree_code_name ((enum tree_code
) c
));
9293 length
+= strlen ("expected ");
9294 buffer
= (char *) alloca (length
);
9297 for (c
= c1
; c
<= c2
; ++c
)
9299 const char *prefix
= length
? " or " : "expected ";
9301 strcpy (buffer
+ length
, prefix
);
9302 length
+= strlen (prefix
);
9303 strcpy (buffer
+ length
, get_tree_code_name ((enum tree_code
) c
));
9304 length
+= strlen (get_tree_code_name ((enum tree_code
) c
));
9307 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9308 buffer
, get_tree_code_name (TREE_CODE (node
)),
9309 function
, trim_filename (file
), line
);
9313 /* Similar to tree_check_failed, except that we check that a tree does
9314 not have the specified code, given in CL. */
9317 tree_not_class_check_failed (const_tree node
, const enum tree_code_class cl
,
9318 const char *file
, int line
, const char *function
)
9321 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9322 TREE_CODE_CLASS_STRING (cl
),
9323 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node
))),
9324 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9328 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9331 omp_clause_check_failed (const_tree node
, const char *file
, int line
,
9332 const char *function
, enum omp_clause_code code
)
9334 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9335 omp_clause_code_name
[code
], get_tree_code_name (TREE_CODE (node
)),
9336 function
, trim_filename (file
), line
);
9340 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9343 omp_clause_range_check_failed (const_tree node
, const char *file
, int line
,
9344 const char *function
, enum omp_clause_code c1
,
9345 enum omp_clause_code c2
)
9348 unsigned length
= 0;
9351 for (c
= c1
; c
<= c2
; ++c
)
9352 length
+= 4 + strlen (omp_clause_code_name
[c
]);
9354 length
+= strlen ("expected ");
9355 buffer
= (char *) alloca (length
);
9358 for (c
= c1
; c
<= c2
; ++c
)
9360 const char *prefix
= length
? " or " : "expected ";
9362 strcpy (buffer
+ length
, prefix
);
9363 length
+= strlen (prefix
);
9364 strcpy (buffer
+ length
, omp_clause_code_name
[c
]);
9365 length
+= strlen (omp_clause_code_name
[c
]);
9368 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9369 buffer
, omp_clause_code_name
[TREE_CODE (node
)],
9370 function
, trim_filename (file
), line
);
9374 #undef DEFTREESTRUCT
9375 #define DEFTREESTRUCT(VAL, NAME) NAME,
9377 static const char *ts_enum_names
[] = {
9378 #include "treestruct.def"
9380 #undef DEFTREESTRUCT
9382 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9384 /* Similar to tree_class_check_failed, except that we check for
9385 whether CODE contains the tree structure identified by EN. */
9388 tree_contains_struct_check_failed (const_tree node
,
9389 const enum tree_node_structure_enum en
,
9390 const char *file
, int line
,
9391 const char *function
)
9394 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9396 get_tree_code_name (TREE_CODE (node
)), function
, trim_filename (file
), line
);
9400 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9401 (dynamically sized) vector. */
9404 tree_int_cst_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9405 const char *function
)
9408 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9409 idx
+ 1, len
, function
, trim_filename (file
), line
);
9412 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9413 (dynamically sized) vector. */
9416 tree_vec_elt_check_failed (int idx
, int len
, const char *file
, int line
,
9417 const char *function
)
9420 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9421 idx
+ 1, len
, function
, trim_filename (file
), line
);
9424 /* Similar to above, except that the check is for the bounds of the operand
9425 vector of an expression node EXP. */
9428 tree_operand_check_failed (int idx
, const_tree exp
, const char *file
,
9429 int line
, const char *function
)
9431 enum tree_code code
= TREE_CODE (exp
);
9433 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9434 idx
+ 1, get_tree_code_name (code
), TREE_OPERAND_LENGTH (exp
),
9435 function
, trim_filename (file
), line
);
9438 /* Similar to above, except that the check is for the number of
9439 operands of an OMP_CLAUSE node. */
9442 omp_clause_operand_check_failed (int idx
, const_tree t
, const char *file
,
9443 int line
, const char *function
)
9446 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9447 "in %s, at %s:%d", idx
+ 1, omp_clause_code_name
[OMP_CLAUSE_CODE (t
)],
9448 omp_clause_num_ops
[OMP_CLAUSE_CODE (t
)], function
,
9449 trim_filename (file
), line
);
9451 #endif /* ENABLE_TREE_CHECKING */
9453 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9454 and mapped to the machine mode MODE. Initialize its fields and build
9455 the information necessary for debugging output. */
9458 make_vector_type (tree innertype
, int nunits
, enum machine_mode mode
)
9461 hashval_t hashcode
= 0;
9463 t
= make_node (VECTOR_TYPE
);
9464 TREE_TYPE (t
) = TYPE_MAIN_VARIANT (innertype
);
9465 SET_TYPE_VECTOR_SUBPARTS (t
, nunits
);
9466 SET_TYPE_MODE (t
, mode
);
9468 if (TYPE_STRUCTURAL_EQUALITY_P (innertype
))
9469 SET_TYPE_STRUCTURAL_EQUALITY (t
);
9470 else if (TYPE_CANONICAL (innertype
) != innertype
9471 || mode
!= VOIDmode
)
9473 = make_vector_type (TYPE_CANONICAL (innertype
), nunits
, VOIDmode
);
9477 hashcode
= iterative_hash_host_wide_int (VECTOR_TYPE
, hashcode
);
9478 hashcode
= iterative_hash_host_wide_int (nunits
, hashcode
);
9479 hashcode
= iterative_hash_host_wide_int (mode
, hashcode
);
9480 hashcode
= iterative_hash_object (TYPE_HASH (TREE_TYPE (t
)), hashcode
);
9481 t
= type_hash_canon (hashcode
, t
);
9483 /* We have built a main variant, based on the main variant of the
9484 inner type. Use it to build the variant we return. */
9485 if ((TYPE_ATTRIBUTES (innertype
) || TYPE_QUALS (innertype
))
9486 && TREE_TYPE (t
) != innertype
)
9487 return build_type_attribute_qual_variant (t
,
9488 TYPE_ATTRIBUTES (innertype
),
9489 TYPE_QUALS (innertype
));
9495 make_or_reuse_type (unsigned size
, int unsignedp
)
9497 if (size
== INT_TYPE_SIZE
)
9498 return unsignedp
? unsigned_type_node
: integer_type_node
;
9499 if (size
== CHAR_TYPE_SIZE
)
9500 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
9501 if (size
== SHORT_TYPE_SIZE
)
9502 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
9503 if (size
== LONG_TYPE_SIZE
)
9504 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
9505 if (size
== LONG_LONG_TYPE_SIZE
)
9506 return (unsignedp
? long_long_unsigned_type_node
9507 : long_long_integer_type_node
);
9508 if (size
== 128 && int128_integer_type_node
)
9509 return (unsignedp
? int128_unsigned_type_node
9510 : int128_integer_type_node
);
9513 return make_unsigned_type (size
);
9515 return make_signed_type (size
);
9518 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9521 make_or_reuse_fract_type (unsigned size
, int unsignedp
, int satp
)
9525 if (size
== SHORT_FRACT_TYPE_SIZE
)
9526 return unsignedp
? sat_unsigned_short_fract_type_node
9527 : sat_short_fract_type_node
;
9528 if (size
== FRACT_TYPE_SIZE
)
9529 return unsignedp
? sat_unsigned_fract_type_node
: sat_fract_type_node
;
9530 if (size
== LONG_FRACT_TYPE_SIZE
)
9531 return unsignedp
? sat_unsigned_long_fract_type_node
9532 : sat_long_fract_type_node
;
9533 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9534 return unsignedp
? sat_unsigned_long_long_fract_type_node
9535 : sat_long_long_fract_type_node
;
9539 if (size
== SHORT_FRACT_TYPE_SIZE
)
9540 return unsignedp
? unsigned_short_fract_type_node
9541 : short_fract_type_node
;
9542 if (size
== FRACT_TYPE_SIZE
)
9543 return unsignedp
? unsigned_fract_type_node
: fract_type_node
;
9544 if (size
== LONG_FRACT_TYPE_SIZE
)
9545 return unsignedp
? unsigned_long_fract_type_node
9546 : long_fract_type_node
;
9547 if (size
== LONG_LONG_FRACT_TYPE_SIZE
)
9548 return unsignedp
? unsigned_long_long_fract_type_node
9549 : long_long_fract_type_node
;
9552 return make_fract_type (size
, unsignedp
, satp
);
9555 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9558 make_or_reuse_accum_type (unsigned size
, int unsignedp
, int satp
)
9562 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9563 return unsignedp
? sat_unsigned_short_accum_type_node
9564 : sat_short_accum_type_node
;
9565 if (size
== ACCUM_TYPE_SIZE
)
9566 return unsignedp
? sat_unsigned_accum_type_node
: sat_accum_type_node
;
9567 if (size
== LONG_ACCUM_TYPE_SIZE
)
9568 return unsignedp
? sat_unsigned_long_accum_type_node
9569 : sat_long_accum_type_node
;
9570 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9571 return unsignedp
? sat_unsigned_long_long_accum_type_node
9572 : sat_long_long_accum_type_node
;
9576 if (size
== SHORT_ACCUM_TYPE_SIZE
)
9577 return unsignedp
? unsigned_short_accum_type_node
9578 : short_accum_type_node
;
9579 if (size
== ACCUM_TYPE_SIZE
)
9580 return unsignedp
? unsigned_accum_type_node
: accum_type_node
;
9581 if (size
== LONG_ACCUM_TYPE_SIZE
)
9582 return unsignedp
? unsigned_long_accum_type_node
9583 : long_accum_type_node
;
9584 if (size
== LONG_LONG_ACCUM_TYPE_SIZE
)
9585 return unsignedp
? unsigned_long_long_accum_type_node
9586 : long_long_accum_type_node
;
9589 return make_accum_type (size
, unsignedp
, satp
);
9593 /* Create an atomic variant node for TYPE. This routine is called
9594 during initialization of data types to create the 5 basic atomic
9595 types. The generic build_variant_type function requires these to
9596 already be set up in order to function properly, so cannot be
9597 called from there. If ALIGN is non-zero, then ensure alignment is
9598 overridden to this value. */
9601 build_atomic_base (tree type
, unsigned int align
)
9605 /* Make sure its not already registered. */
9606 if ((t
= get_qualified_type (type
, TYPE_QUAL_ATOMIC
)))
9609 t
= build_variant_type_copy (type
);
9610 set_type_quals (t
, TYPE_QUAL_ATOMIC
);
9613 TYPE_ALIGN (t
) = align
;
9618 /* Create nodes for all integer types (and error_mark_node) using the sizes
9619 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9620 SHORT_DOUBLE specifies whether double should be of the same precision
9624 build_common_tree_nodes (bool signed_char
, bool short_double
)
9626 error_mark_node
= make_node (ERROR_MARK
);
9627 TREE_TYPE (error_mark_node
) = error_mark_node
;
9629 initialize_sizetypes ();
9631 /* Define both `signed char' and `unsigned char'. */
9632 signed_char_type_node
= make_signed_type (CHAR_TYPE_SIZE
);
9633 TYPE_STRING_FLAG (signed_char_type_node
) = 1;
9634 unsigned_char_type_node
= make_unsigned_type (CHAR_TYPE_SIZE
);
9635 TYPE_STRING_FLAG (unsigned_char_type_node
) = 1;
9637 /* Define `char', which is like either `signed char' or `unsigned char'
9638 but not the same as either. */
9641 ? make_signed_type (CHAR_TYPE_SIZE
)
9642 : make_unsigned_type (CHAR_TYPE_SIZE
));
9643 TYPE_STRING_FLAG (char_type_node
) = 1;
9645 short_integer_type_node
= make_signed_type (SHORT_TYPE_SIZE
);
9646 short_unsigned_type_node
= make_unsigned_type (SHORT_TYPE_SIZE
);
9647 integer_type_node
= make_signed_type (INT_TYPE_SIZE
);
9648 unsigned_type_node
= make_unsigned_type (INT_TYPE_SIZE
);
9649 long_integer_type_node
= make_signed_type (LONG_TYPE_SIZE
);
9650 long_unsigned_type_node
= make_unsigned_type (LONG_TYPE_SIZE
);
9651 long_long_integer_type_node
= make_signed_type (LONG_LONG_TYPE_SIZE
);
9652 long_long_unsigned_type_node
= make_unsigned_type (LONG_LONG_TYPE_SIZE
);
9653 #if HOST_BITS_PER_WIDE_INT >= 64
9654 /* TODO: This isn't correct, but as logic depends at the moment on
9655 host's instead of target's wide-integer.
9656 If there is a target not supporting TImode, but has an 128-bit
9657 integer-scalar register, this target check needs to be adjusted. */
9658 if (targetm
.scalar_mode_supported_p (TImode
))
9660 int128_integer_type_node
= make_signed_type (128);
9661 int128_unsigned_type_node
= make_unsigned_type (128);
9665 /* Define a boolean type. This type only represents boolean values but
9666 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9667 boolean_type_node
= make_unsigned_type (BOOL_TYPE_SIZE
);
9668 TREE_SET_CODE (boolean_type_node
, BOOLEAN_TYPE
);
9669 TYPE_PRECISION (boolean_type_node
) = 1;
9670 TYPE_MAX_VALUE (boolean_type_node
) = build_int_cst (boolean_type_node
, 1);
9672 /* Define what type to use for size_t. */
9673 if (strcmp (SIZE_TYPE
, "unsigned int") == 0)
9674 size_type_node
= unsigned_type_node
;
9675 else if (strcmp (SIZE_TYPE
, "long unsigned int") == 0)
9676 size_type_node
= long_unsigned_type_node
;
9677 else if (strcmp (SIZE_TYPE
, "long long unsigned int") == 0)
9678 size_type_node
= long_long_unsigned_type_node
;
9679 else if (strcmp (SIZE_TYPE
, "short unsigned int") == 0)
9680 size_type_node
= short_unsigned_type_node
;
9684 /* Fill in the rest of the sized types. Reuse existing type nodes
9686 intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 0);
9687 intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 0);
9688 intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 0);
9689 intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 0);
9690 intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 0);
9692 unsigned_intQI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (QImode
), 1);
9693 unsigned_intHI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (HImode
), 1);
9694 unsigned_intSI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (SImode
), 1);
9695 unsigned_intDI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (DImode
), 1);
9696 unsigned_intTI_type_node
= make_or_reuse_type (GET_MODE_BITSIZE (TImode
), 1);
9698 /* Don't call build_qualified type for atomics. That routine does
9699 special processing for atomics, and until they are initialized
9700 it's better not to make that call.
9702 Check to see if there is a target override for atomic types. */
9704 atomicQI_type_node
= build_atomic_base (unsigned_intQI_type_node
,
9705 targetm
.atomic_align_for_mode (QImode
));
9706 atomicHI_type_node
= build_atomic_base (unsigned_intHI_type_node
,
9707 targetm
.atomic_align_for_mode (HImode
));
9708 atomicSI_type_node
= build_atomic_base (unsigned_intSI_type_node
,
9709 targetm
.atomic_align_for_mode (SImode
));
9710 atomicDI_type_node
= build_atomic_base (unsigned_intDI_type_node
,
9711 targetm
.atomic_align_for_mode (DImode
));
9712 atomicTI_type_node
= build_atomic_base (unsigned_intTI_type_node
,
9713 targetm
.atomic_align_for_mode (TImode
));
9715 access_public_node
= get_identifier ("public");
9716 access_protected_node
= get_identifier ("protected");
9717 access_private_node
= get_identifier ("private");
9719 /* Define these next since types below may used them. */
9720 integer_zero_node
= build_int_cst (integer_type_node
, 0);
9721 integer_one_node
= build_int_cst (integer_type_node
, 1);
9722 integer_three_node
= build_int_cst (integer_type_node
, 3);
9723 integer_minus_one_node
= build_int_cst (integer_type_node
, -1);
9725 size_zero_node
= size_int (0);
9726 size_one_node
= size_int (1);
9727 bitsize_zero_node
= bitsize_int (0);
9728 bitsize_one_node
= bitsize_int (1);
9729 bitsize_unit_node
= bitsize_int (BITS_PER_UNIT
);
9731 boolean_false_node
= TYPE_MIN_VALUE (boolean_type_node
);
9732 boolean_true_node
= TYPE_MAX_VALUE (boolean_type_node
);
9734 void_type_node
= make_node (VOID_TYPE
);
9735 layout_type (void_type_node
);
9737 /* We are not going to have real types in C with less than byte alignment,
9738 so we might as well not have any types that claim to have it. */
9739 TYPE_ALIGN (void_type_node
) = BITS_PER_UNIT
;
9740 TYPE_USER_ALIGN (void_type_node
) = 0;
9742 void_node
= make_node (VOID_CST
);
9743 TREE_TYPE (void_node
) = void_type_node
;
9745 null_pointer_node
= build_int_cst (build_pointer_type (void_type_node
), 0);
9746 layout_type (TREE_TYPE (null_pointer_node
));
9748 ptr_type_node
= build_pointer_type (void_type_node
);
9750 = build_pointer_type (build_type_variant (void_type_node
, 1, 0));
9751 fileptr_type_node
= ptr_type_node
;
9753 pointer_sized_int_node
= build_nonstandard_integer_type (POINTER_SIZE
, 1);
9755 float_type_node
= make_node (REAL_TYPE
);
9756 TYPE_PRECISION (float_type_node
) = FLOAT_TYPE_SIZE
;
9757 layout_type (float_type_node
);
9759 double_type_node
= make_node (REAL_TYPE
);
9761 TYPE_PRECISION (double_type_node
) = FLOAT_TYPE_SIZE
;
9763 TYPE_PRECISION (double_type_node
) = DOUBLE_TYPE_SIZE
;
9764 layout_type (double_type_node
);
9766 long_double_type_node
= make_node (REAL_TYPE
);
9767 TYPE_PRECISION (long_double_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
9768 layout_type (long_double_type_node
);
9770 float_ptr_type_node
= build_pointer_type (float_type_node
);
9771 double_ptr_type_node
= build_pointer_type (double_type_node
);
9772 long_double_ptr_type_node
= build_pointer_type (long_double_type_node
);
9773 integer_ptr_type_node
= build_pointer_type (integer_type_node
);
9775 /* Fixed size integer types. */
9776 uint16_type_node
= build_nonstandard_integer_type (16, true);
9777 uint32_type_node
= build_nonstandard_integer_type (32, true);
9778 uint64_type_node
= build_nonstandard_integer_type (64, true);
9780 /* Decimal float types. */
9781 dfloat32_type_node
= make_node (REAL_TYPE
);
9782 TYPE_PRECISION (dfloat32_type_node
) = DECIMAL32_TYPE_SIZE
;
9783 layout_type (dfloat32_type_node
);
9784 SET_TYPE_MODE (dfloat32_type_node
, SDmode
);
9785 dfloat32_ptr_type_node
= build_pointer_type (dfloat32_type_node
);
9787 dfloat64_type_node
= make_node (REAL_TYPE
);
9788 TYPE_PRECISION (dfloat64_type_node
) = DECIMAL64_TYPE_SIZE
;
9789 layout_type (dfloat64_type_node
);
9790 SET_TYPE_MODE (dfloat64_type_node
, DDmode
);
9791 dfloat64_ptr_type_node
= build_pointer_type (dfloat64_type_node
);
9793 dfloat128_type_node
= make_node (REAL_TYPE
);
9794 TYPE_PRECISION (dfloat128_type_node
) = DECIMAL128_TYPE_SIZE
;
9795 layout_type (dfloat128_type_node
);
9796 SET_TYPE_MODE (dfloat128_type_node
, TDmode
);
9797 dfloat128_ptr_type_node
= build_pointer_type (dfloat128_type_node
);
9799 complex_integer_type_node
= build_complex_type (integer_type_node
);
9800 complex_float_type_node
= build_complex_type (float_type_node
);
9801 complex_double_type_node
= build_complex_type (double_type_node
);
9802 complex_long_double_type_node
= build_complex_type (long_double_type_node
);
9804 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9805 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9806 sat_ ## KIND ## _type_node = \
9807 make_sat_signed_ ## KIND ## _type (SIZE); \
9808 sat_unsigned_ ## KIND ## _type_node = \
9809 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9810 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9811 unsigned_ ## KIND ## _type_node = \
9812 make_unsigned_ ## KIND ## _type (SIZE);
9814 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9815 sat_ ## WIDTH ## KIND ## _type_node = \
9816 make_sat_signed_ ## KIND ## _type (SIZE); \
9817 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9818 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9819 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9820 unsigned_ ## WIDTH ## KIND ## _type_node = \
9821 make_unsigned_ ## KIND ## _type (SIZE);
9823 /* Make fixed-point type nodes based on four different widths. */
9824 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9825 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9826 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9827 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9828 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9830 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9831 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9832 NAME ## _type_node = \
9833 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9834 u ## NAME ## _type_node = \
9835 make_or_reuse_unsigned_ ## KIND ## _type \
9836 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9837 sat_ ## NAME ## _type_node = \
9838 make_or_reuse_sat_signed_ ## KIND ## _type \
9839 (GET_MODE_BITSIZE (MODE ## mode)); \
9840 sat_u ## NAME ## _type_node = \
9841 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9842 (GET_MODE_BITSIZE (U ## MODE ## mode));
9844 /* Fixed-point type and mode nodes. */
9845 MAKE_FIXED_TYPE_NODE_FAMILY (fract
, FRACT
)
9846 MAKE_FIXED_TYPE_NODE_FAMILY (accum
, ACCUM
)
9847 MAKE_FIXED_MODE_NODE (fract
, qq
, QQ
)
9848 MAKE_FIXED_MODE_NODE (fract
, hq
, HQ
)
9849 MAKE_FIXED_MODE_NODE (fract
, sq
, SQ
)
9850 MAKE_FIXED_MODE_NODE (fract
, dq
, DQ
)
9851 MAKE_FIXED_MODE_NODE (fract
, tq
, TQ
)
9852 MAKE_FIXED_MODE_NODE (accum
, ha
, HA
)
9853 MAKE_FIXED_MODE_NODE (accum
, sa
, SA
)
9854 MAKE_FIXED_MODE_NODE (accum
, da
, DA
)
9855 MAKE_FIXED_MODE_NODE (accum
, ta
, TA
)
9858 tree t
= targetm
.build_builtin_va_list ();
9860 /* Many back-ends define record types without setting TYPE_NAME.
9861 If we copied the record type here, we'd keep the original
9862 record type without a name. This breaks name mangling. So,
9863 don't copy record types and let c_common_nodes_and_builtins()
9864 declare the type to be __builtin_va_list. */
9865 if (TREE_CODE (t
) != RECORD_TYPE
)
9866 t
= build_variant_type_copy (t
);
9868 va_list_type_node
= t
;
9872 /* Modify DECL for given flags.
9873 TM_PURE attribute is set only on types, so the function will modify
9874 DECL's type when ECF_TM_PURE is used. */
9877 set_call_expr_flags (tree decl
, int flags
)
9879 if (flags
& ECF_NOTHROW
)
9880 TREE_NOTHROW (decl
) = 1;
9881 if (flags
& ECF_CONST
)
9882 TREE_READONLY (decl
) = 1;
9883 if (flags
& ECF_PURE
)
9884 DECL_PURE_P (decl
) = 1;
9885 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
9886 DECL_LOOPING_CONST_OR_PURE_P (decl
) = 1;
9887 if (flags
& ECF_NOVOPS
)
9888 DECL_IS_NOVOPS (decl
) = 1;
9889 if (flags
& ECF_NORETURN
)
9890 TREE_THIS_VOLATILE (decl
) = 1;
9891 if (flags
& ECF_MALLOC
)
9892 DECL_IS_MALLOC (decl
) = 1;
9893 if (flags
& ECF_RETURNS_TWICE
)
9894 DECL_IS_RETURNS_TWICE (decl
) = 1;
9895 if (flags
& ECF_LEAF
)
9896 DECL_ATTRIBUTES (decl
) = tree_cons (get_identifier ("leaf"),
9897 NULL
, DECL_ATTRIBUTES (decl
));
9898 if ((flags
& ECF_TM_PURE
) && flag_tm
)
9899 apply_tm_attr (decl
, get_identifier ("transaction_pure"));
9900 /* Looping const or pure is implied by noreturn.
9901 There is currently no way to declare looping const or looping pure alone. */
9902 gcc_assert (!(flags
& ECF_LOOPING_CONST_OR_PURE
)
9903 || ((flags
& ECF_NORETURN
) && (flags
& (ECF_CONST
| ECF_PURE
))));
9907 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9910 local_define_builtin (const char *name
, tree type
, enum built_in_function code
,
9911 const char *library_name
, int ecf_flags
)
9915 decl
= add_builtin_function (name
, type
, code
, BUILT_IN_NORMAL
,
9916 library_name
, NULL_TREE
);
9917 set_call_expr_flags (decl
, ecf_flags
);
9919 set_builtin_decl (code
, decl
, true);
9922 /* Call this function after instantiating all builtins that the language
9923 front end cares about. This will build the rest of the builtins that
9924 are relied upon by the tree optimizers and the middle-end. */
9927 build_common_builtin_nodes (void)
9932 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE
))
9934 ftype
= build_function_type (void_type_node
, void_list_node
);
9935 local_define_builtin ("__builtin_unreachable", ftype
, BUILT_IN_UNREACHABLE
,
9936 "__builtin_unreachable",
9937 ECF_NOTHROW
| ECF_LEAF
| ECF_NORETURN
9938 | ECF_CONST
| ECF_LEAF
);
9941 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
)
9942 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9944 ftype
= build_function_type_list (ptr_type_node
,
9945 ptr_type_node
, const_ptr_type_node
,
9946 size_type_node
, NULL_TREE
);
9948 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY
))
9949 local_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
9950 "memcpy", ECF_NOTHROW
| ECF_LEAF
);
9951 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE
))
9952 local_define_builtin ("__builtin_memmove", ftype
, BUILT_IN_MEMMOVE
,
9953 "memmove", ECF_NOTHROW
| ECF_LEAF
);
9956 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP
))
9958 ftype
= build_function_type_list (integer_type_node
, const_ptr_type_node
,
9959 const_ptr_type_node
, size_type_node
,
9961 local_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
9962 "memcmp", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
9965 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET
))
9967 ftype
= build_function_type_list (ptr_type_node
,
9968 ptr_type_node
, integer_type_node
,
9969 size_type_node
, NULL_TREE
);
9970 local_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
9971 "memset", ECF_NOTHROW
| ECF_LEAF
);
9974 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA
))
9976 ftype
= build_function_type_list (ptr_type_node
,
9977 size_type_node
, NULL_TREE
);
9978 local_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
9979 "alloca", ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
9982 ftype
= build_function_type_list (ptr_type_node
, size_type_node
,
9983 size_type_node
, NULL_TREE
);
9984 local_define_builtin ("__builtin_alloca_with_align", ftype
,
9985 BUILT_IN_ALLOCA_WITH_ALIGN
, "alloca",
9986 ECF_MALLOC
| ECF_NOTHROW
| ECF_LEAF
);
9988 /* If we're checking the stack, `alloca' can throw. */
9989 if (flag_stack_check
)
9991 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA
)) = 0;
9992 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN
)) = 0;
9995 ftype
= build_function_type_list (void_type_node
,
9996 ptr_type_node
, ptr_type_node
,
9997 ptr_type_node
, NULL_TREE
);
9998 local_define_builtin ("__builtin_init_trampoline", ftype
,
9999 BUILT_IN_INIT_TRAMPOLINE
,
10000 "__builtin_init_trampoline", ECF_NOTHROW
| ECF_LEAF
);
10001 local_define_builtin ("__builtin_init_heap_trampoline", ftype
,
10002 BUILT_IN_INIT_HEAP_TRAMPOLINE
,
10003 "__builtin_init_heap_trampoline",
10004 ECF_NOTHROW
| ECF_LEAF
);
10006 ftype
= build_function_type_list (ptr_type_node
, ptr_type_node
, NULL_TREE
);
10007 local_define_builtin ("__builtin_adjust_trampoline", ftype
,
10008 BUILT_IN_ADJUST_TRAMPOLINE
,
10009 "__builtin_adjust_trampoline",
10010 ECF_CONST
| ECF_NOTHROW
);
10012 ftype
= build_function_type_list (void_type_node
,
10013 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10014 local_define_builtin ("__builtin_nonlocal_goto", ftype
,
10015 BUILT_IN_NONLOCAL_GOTO
,
10016 "__builtin_nonlocal_goto",
10017 ECF_NORETURN
| ECF_NOTHROW
);
10019 ftype
= build_function_type_list (void_type_node
,
10020 ptr_type_node
, ptr_type_node
, NULL_TREE
);
10021 local_define_builtin ("__builtin_setjmp_setup", ftype
,
10022 BUILT_IN_SETJMP_SETUP
,
10023 "__builtin_setjmp_setup", ECF_NOTHROW
);
10025 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10026 local_define_builtin ("__builtin_setjmp_receiver", ftype
,
10027 BUILT_IN_SETJMP_RECEIVER
,
10028 "__builtin_setjmp_receiver", ECF_NOTHROW
| ECF_LEAF
);
10030 ftype
= build_function_type_list (ptr_type_node
, NULL_TREE
);
10031 local_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
10032 "__builtin_stack_save", ECF_NOTHROW
| ECF_LEAF
);
10034 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10035 local_define_builtin ("__builtin_stack_restore", ftype
,
10036 BUILT_IN_STACK_RESTORE
,
10037 "__builtin_stack_restore", ECF_NOTHROW
| ECF_LEAF
);
10039 /* If there's a possibility that we might use the ARM EABI, build the
10040 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10041 if (targetm
.arm_eabi_unwinder
)
10043 ftype
= build_function_type_list (void_type_node
, NULL_TREE
);
10044 local_define_builtin ("__builtin_cxa_end_cleanup", ftype
,
10045 BUILT_IN_CXA_END_CLEANUP
,
10046 "__cxa_end_cleanup", ECF_NORETURN
| ECF_LEAF
);
10049 ftype
= build_function_type_list (void_type_node
, ptr_type_node
, NULL_TREE
);
10050 local_define_builtin ("__builtin_unwind_resume", ftype
,
10051 BUILT_IN_UNWIND_RESUME
,
10052 ((targetm_common
.except_unwind_info (&global_options
)
10054 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10057 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS
) == NULL_TREE
)
10059 ftype
= build_function_type_list (ptr_type_node
, integer_type_node
,
10061 local_define_builtin ("__builtin_return_address", ftype
,
10062 BUILT_IN_RETURN_ADDRESS
,
10063 "__builtin_return_address",
10067 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
)
10068 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10070 ftype
= build_function_type_list (void_type_node
, ptr_type_node
,
10071 ptr_type_node
, NULL_TREE
);
10072 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER
))
10073 local_define_builtin ("__cyg_profile_func_enter", ftype
,
10074 BUILT_IN_PROFILE_FUNC_ENTER
,
10075 "__cyg_profile_func_enter", 0);
10076 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT
))
10077 local_define_builtin ("__cyg_profile_func_exit", ftype
,
10078 BUILT_IN_PROFILE_FUNC_EXIT
,
10079 "__cyg_profile_func_exit", 0);
10082 /* The exception object and filter values from the runtime. The argument
10083 must be zero before exception lowering, i.e. from the front end. After
10084 exception lowering, it will be the region number for the exception
10085 landing pad. These functions are PURE instead of CONST to prevent
10086 them from being hoisted past the exception edge that will initialize
10087 its value in the landing pad. */
10088 ftype
= build_function_type_list (ptr_type_node
,
10089 integer_type_node
, NULL_TREE
);
10090 ecf_flags
= ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
;
10091 /* Only use TM_PURE if we we have TM language support. */
10092 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1
))
10093 ecf_flags
|= ECF_TM_PURE
;
10094 local_define_builtin ("__builtin_eh_pointer", ftype
, BUILT_IN_EH_POINTER
,
10095 "__builtin_eh_pointer", ecf_flags
);
10097 tmp
= lang_hooks
.types
.type_for_mode (targetm
.eh_return_filter_mode (), 0);
10098 ftype
= build_function_type_list (tmp
, integer_type_node
, NULL_TREE
);
10099 local_define_builtin ("__builtin_eh_filter", ftype
, BUILT_IN_EH_FILTER
,
10100 "__builtin_eh_filter", ECF_PURE
| ECF_NOTHROW
| ECF_LEAF
);
10102 ftype
= build_function_type_list (void_type_node
,
10103 integer_type_node
, integer_type_node
,
10105 local_define_builtin ("__builtin_eh_copy_values", ftype
,
10106 BUILT_IN_EH_COPY_VALUES
,
10107 "__builtin_eh_copy_values", ECF_NOTHROW
);
10109 /* Complex multiplication and division. These are handled as builtins
10110 rather than optabs because emit_library_call_value doesn't support
10111 complex. Further, we can do slightly better with folding these
10112 beasties if the real and complex parts of the arguments are separate. */
10116 for (mode
= MIN_MODE_COMPLEX_FLOAT
; mode
<= MAX_MODE_COMPLEX_FLOAT
; ++mode
)
10118 char mode_name_buf
[4], *q
;
10120 enum built_in_function mcode
, dcode
;
10121 tree type
, inner_type
;
10122 const char *prefix
= "__";
10124 if (targetm
.libfunc_gnu_prefix
)
10127 type
= lang_hooks
.types
.type_for_mode ((enum machine_mode
) mode
, 0);
10130 inner_type
= TREE_TYPE (type
);
10132 ftype
= build_function_type_list (type
, inner_type
, inner_type
,
10133 inner_type
, inner_type
, NULL_TREE
);
10135 mcode
= ((enum built_in_function
)
10136 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10137 dcode
= ((enum built_in_function
)
10138 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
10140 for (p
= GET_MODE_NAME (mode
), q
= mode_name_buf
; *p
; p
++, q
++)
10144 built_in_names
[mcode
] = concat (prefix
, "mul", mode_name_buf
, "3",
10146 local_define_builtin (built_in_names
[mcode
], ftype
, mcode
,
10147 built_in_names
[mcode
],
10148 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10150 built_in_names
[dcode
] = concat (prefix
, "div", mode_name_buf
, "3",
10152 local_define_builtin (built_in_names
[dcode
], ftype
, dcode
,
10153 built_in_names
[dcode
],
10154 ECF_CONST
| ECF_NOTHROW
| ECF_LEAF
);
10159 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10162 If we requested a pointer to a vector, build up the pointers that
10163 we stripped off while looking for the inner type. Similarly for
10164 return values from functions.
10166 The argument TYPE is the top of the chain, and BOTTOM is the
10167 new type which we will point to. */
10170 reconstruct_complex_type (tree type
, tree bottom
)
10174 if (TREE_CODE (type
) == POINTER_TYPE
)
10176 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10177 outer
= build_pointer_type_for_mode (inner
, TYPE_MODE (type
),
10178 TYPE_REF_CAN_ALIAS_ALL (type
));
10180 else if (TREE_CODE (type
) == REFERENCE_TYPE
)
10182 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10183 outer
= build_reference_type_for_mode (inner
, TYPE_MODE (type
),
10184 TYPE_REF_CAN_ALIAS_ALL (type
));
10186 else if (TREE_CODE (type
) == ARRAY_TYPE
)
10188 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10189 outer
= build_array_type (inner
, TYPE_DOMAIN (type
));
10191 else if (TREE_CODE (type
) == FUNCTION_TYPE
)
10193 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10194 outer
= build_function_type (inner
, TYPE_ARG_TYPES (type
));
10196 else if (TREE_CODE (type
) == METHOD_TYPE
)
10198 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10199 /* The build_method_type_directly() routine prepends 'this' to argument list,
10200 so we must compensate by getting rid of it. */
10202 = build_method_type_directly
10203 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type
))),
10205 TREE_CHAIN (TYPE_ARG_TYPES (type
)));
10207 else if (TREE_CODE (type
) == OFFSET_TYPE
)
10209 inner
= reconstruct_complex_type (TREE_TYPE (type
), bottom
);
10210 outer
= build_offset_type (TYPE_OFFSET_BASETYPE (type
), inner
);
10215 return build_type_attribute_qual_variant (outer
, TYPE_ATTRIBUTES (type
),
10216 TYPE_QUALS (type
));
10219 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10222 build_vector_type_for_mode (tree innertype
, enum machine_mode mode
)
10226 switch (GET_MODE_CLASS (mode
))
10228 case MODE_VECTOR_INT
:
10229 case MODE_VECTOR_FLOAT
:
10230 case MODE_VECTOR_FRACT
:
10231 case MODE_VECTOR_UFRACT
:
10232 case MODE_VECTOR_ACCUM
:
10233 case MODE_VECTOR_UACCUM
:
10234 nunits
= GET_MODE_NUNITS (mode
);
10238 /* Check that there are no leftover bits. */
10239 gcc_assert (GET_MODE_BITSIZE (mode
)
10240 % TREE_INT_CST_LOW (TYPE_SIZE (innertype
)) == 0);
10242 nunits
= GET_MODE_BITSIZE (mode
)
10243 / TREE_INT_CST_LOW (TYPE_SIZE (innertype
));
10247 gcc_unreachable ();
10250 return make_vector_type (innertype
, nunits
, mode
);
10253 /* Similarly, but takes the inner type and number of units, which must be
10257 build_vector_type (tree innertype
, int nunits
)
10259 return make_vector_type (innertype
, nunits
, VOIDmode
);
10262 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10265 build_opaque_vector_type (tree innertype
, int nunits
)
10267 tree t
= make_vector_type (innertype
, nunits
, VOIDmode
);
10269 /* We always build the non-opaque variant before the opaque one,
10270 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10271 cand
= TYPE_NEXT_VARIANT (t
);
10273 && TYPE_VECTOR_OPAQUE (cand
)
10274 && check_qualified_type (cand
, t
, TYPE_QUALS (t
)))
10276 /* Othewise build a variant type and make sure to queue it after
10277 the non-opaque type. */
10278 cand
= build_distinct_type_copy (t
);
10279 TYPE_VECTOR_OPAQUE (cand
) = true;
10280 TYPE_CANONICAL (cand
) = TYPE_CANONICAL (t
);
10281 TYPE_NEXT_VARIANT (cand
) = TYPE_NEXT_VARIANT (t
);
10282 TYPE_NEXT_VARIANT (t
) = cand
;
10283 TYPE_MAIN_VARIANT (cand
) = TYPE_MAIN_VARIANT (t
);
10288 /* Given an initializer INIT, return TRUE if INIT is zero or some
10289 aggregate of zeros. Otherwise return FALSE. */
10291 initializer_zerop (const_tree init
)
10297 switch (TREE_CODE (init
))
10300 return integer_zerop (init
);
10303 /* ??? Note that this is not correct for C4X float formats. There,
10304 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10305 negative exponent. */
10306 return real_zerop (init
)
10307 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init
));
10310 return fixed_zerop (init
);
10313 return integer_zerop (init
)
10314 || (real_zerop (init
)
10315 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init
)))
10316 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init
))));
10321 for (i
= 0; i
< VECTOR_CST_NELTS (init
); ++i
)
10322 if (!initializer_zerop (VECTOR_CST_ELT (init
, i
)))
10329 unsigned HOST_WIDE_INT idx
;
10331 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init
), idx
, elt
)
10332 if (!initializer_zerop (elt
))
10341 /* We need to loop through all elements to handle cases like
10342 "\0" and "\0foobar". */
10343 for (i
= 0; i
< TREE_STRING_LENGTH (init
); ++i
)
10344 if (TREE_STRING_POINTER (init
)[i
] != '\0')
10355 /* Check if vector VEC consists of all the equal elements and
10356 that the number of elements corresponds to the type of VEC.
10357 The function returns first element of the vector
10358 or NULL_TREE if the vector is not uniform. */
10360 uniform_vector_p (const_tree vec
)
10365 if (vec
== NULL_TREE
)
10368 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec
)));
10370 if (TREE_CODE (vec
) == VECTOR_CST
)
10372 first
= VECTOR_CST_ELT (vec
, 0);
10373 for (i
= 1; i
< VECTOR_CST_NELTS (vec
); ++i
)
10374 if (!operand_equal_p (first
, VECTOR_CST_ELT (vec
, i
), 0))
10380 else if (TREE_CODE (vec
) == CONSTRUCTOR
)
10382 first
= error_mark_node
;
10384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec
), i
, t
)
10391 if (!operand_equal_p (first
, t
, 0))
10394 if (i
!= TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec
)))
10403 /* Build an empty statement at location LOC. */
10406 build_empty_stmt (location_t loc
)
10408 tree t
= build1 (NOP_EXPR
, void_type_node
, size_zero_node
);
10409 SET_EXPR_LOCATION (t
, loc
);
10414 /* Build an OpenMP clause with code CODE. LOC is the location of the
10418 build_omp_clause (location_t loc
, enum omp_clause_code code
)
10423 length
= omp_clause_num_ops
[code
];
10424 size
= (sizeof (struct tree_omp_clause
) + (length
- 1) * sizeof (tree
));
10426 record_node_allocation_statistics (OMP_CLAUSE
, size
);
10428 t
= (tree
) ggc_internal_alloc (size
);
10429 memset (t
, 0, size
);
10430 TREE_SET_CODE (t
, OMP_CLAUSE
);
10431 OMP_CLAUSE_SET_CODE (t
, code
);
10432 OMP_CLAUSE_LOCATION (t
) = loc
;
10437 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10438 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10439 Except for the CODE and operand count field, other storage for the
10440 object is initialized to zeros. */
10443 build_vl_exp_stat (enum tree_code code
, int len MEM_STAT_DECL
)
10446 int length
= (len
- 1) * sizeof (tree
) + sizeof (struct tree_exp
);
10448 gcc_assert (TREE_CODE_CLASS (code
) == tcc_vl_exp
);
10449 gcc_assert (len
>= 1);
10451 record_node_allocation_statistics (code
, length
);
10453 t
= ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT
);
10455 TREE_SET_CODE (t
, code
);
10457 /* Can't use TREE_OPERAND to store the length because if checking is
10458 enabled, it will try to check the length before we store it. :-P */
10459 t
->exp
.operands
[0] = build_int_cst (sizetype
, len
);
10464 /* Helper function for build_call_* functions; build a CALL_EXPR with
10465 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10466 the argument slots. */
10469 build_call_1 (tree return_type
, tree fn
, int nargs
)
10473 t
= build_vl_exp (CALL_EXPR
, nargs
+ 3);
10474 TREE_TYPE (t
) = return_type
;
10475 CALL_EXPR_FN (t
) = fn
;
10476 CALL_EXPR_STATIC_CHAIN (t
) = NULL
;
10481 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10482 FN and a null static chain slot. NARGS is the number of call arguments
10483 which are specified as "..." arguments. */
10486 build_call_nary (tree return_type
, tree fn
, int nargs
, ...)
10490 va_start (args
, nargs
);
10491 ret
= build_call_valist (return_type
, fn
, nargs
, args
);
10496 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10497 FN and a null static chain slot. NARGS is the number of call arguments
10498 which are specified as a va_list ARGS. */
10501 build_call_valist (tree return_type
, tree fn
, int nargs
, va_list args
)
10506 t
= build_call_1 (return_type
, fn
, nargs
);
10507 for (i
= 0; i
< nargs
; i
++)
10508 CALL_EXPR_ARG (t
, i
) = va_arg (args
, tree
);
10509 process_call_operands (t
);
10513 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10514 FN and a null static chain slot. NARGS is the number of call arguments
10515 which are specified as a tree array ARGS. */
10518 build_call_array_loc (location_t loc
, tree return_type
, tree fn
,
10519 int nargs
, const tree
*args
)
10524 t
= build_call_1 (return_type
, fn
, nargs
);
10525 for (i
= 0; i
< nargs
; i
++)
10526 CALL_EXPR_ARG (t
, i
) = args
[i
];
10527 process_call_operands (t
);
10528 SET_EXPR_LOCATION (t
, loc
);
10532 /* Like build_call_array, but takes a vec. */
10535 build_call_vec (tree return_type
, tree fn
, vec
<tree
, va_gc
> *args
)
10540 ret
= build_call_1 (return_type
, fn
, vec_safe_length (args
));
10541 FOR_EACH_VEC_SAFE_ELT (args
, ix
, t
)
10542 CALL_EXPR_ARG (ret
, ix
) = t
;
10543 process_call_operands (ret
);
10547 /* Conveniently construct a function call expression. FNDECL names the
10548 function to be called and N arguments are passed in the array
10552 build_call_expr_loc_array (location_t loc
, tree fndecl
, int n
, tree
*argarray
)
10554 tree fntype
= TREE_TYPE (fndecl
);
10555 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10557 return fold_builtin_call_array (loc
, TREE_TYPE (fntype
), fn
, n
, argarray
);
10560 /* Conveniently construct a function call expression. FNDECL names the
10561 function to be called and the arguments are passed in the vector
10565 build_call_expr_loc_vec (location_t loc
, tree fndecl
, vec
<tree
, va_gc
> *vec
)
10567 return build_call_expr_loc_array (loc
, fndecl
, vec_safe_length (vec
),
10568 vec_safe_address (vec
));
10572 /* Conveniently construct a function call expression. FNDECL names the
10573 function to be called, N is the number of arguments, and the "..."
10574 parameters are the argument expressions. */
10577 build_call_expr_loc (location_t loc
, tree fndecl
, int n
, ...)
10580 tree
*argarray
= XALLOCAVEC (tree
, n
);
10584 for (i
= 0; i
< n
; i
++)
10585 argarray
[i
] = va_arg (ap
, tree
);
10587 return build_call_expr_loc_array (loc
, fndecl
, n
, argarray
);
10590 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10591 varargs macros aren't supported by all bootstrap compilers. */
10594 build_call_expr (tree fndecl
, int n
, ...)
10597 tree
*argarray
= XALLOCAVEC (tree
, n
);
10601 for (i
= 0; i
< n
; i
++)
10602 argarray
[i
] = va_arg (ap
, tree
);
10604 return build_call_expr_loc_array (UNKNOWN_LOCATION
, fndecl
, n
, argarray
);
10607 /* Build internal call expression. This is just like CALL_EXPR, except
10608 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10609 internal function. */
10612 build_call_expr_internal_loc (location_t loc
, enum internal_fn ifn
,
10613 tree type
, int n
, ...)
10618 tree fn
= build_call_1 (type
, NULL_TREE
, n
);
10620 for (i
= 0; i
< n
; i
++)
10621 CALL_EXPR_ARG (fn
, i
) = va_arg (ap
, tree
);
10623 SET_EXPR_LOCATION (fn
, loc
);
10624 CALL_EXPR_IFN (fn
) = ifn
;
10628 /* Create a new constant string literal and return a char* pointer to it.
10629 The STRING_CST value is the LEN characters at STR. */
10631 build_string_literal (int len
, const char *str
)
10633 tree t
, elem
, index
, type
;
10635 t
= build_string (len
, str
);
10636 elem
= build_type_variant (char_type_node
, 1, 0);
10637 index
= build_index_type (size_int (len
- 1));
10638 type
= build_array_type (elem
, index
);
10639 TREE_TYPE (t
) = type
;
10640 TREE_CONSTANT (t
) = 1;
10641 TREE_READONLY (t
) = 1;
10642 TREE_STATIC (t
) = 1;
10644 type
= build_pointer_type (elem
);
10645 t
= build1 (ADDR_EXPR
, type
,
10646 build4 (ARRAY_REF
, elem
,
10647 t
, integer_zero_node
, NULL_TREE
, NULL_TREE
));
10653 /* Return true if T (assumed to be a DECL) must be assigned a memory
10657 needs_to_live_in_memory (const_tree t
)
10659 return (TREE_ADDRESSABLE (t
)
10660 || is_global_var (t
)
10661 || (TREE_CODE (t
) == RESULT_DECL
10662 && !DECL_BY_REFERENCE (t
)
10663 && aggregate_value_p (t
, current_function_decl
)));
10666 /* Return value of a constant X and sign-extend it. */
10669 int_cst_value (const_tree x
)
10671 unsigned bits
= TYPE_PRECISION (TREE_TYPE (x
));
10672 unsigned HOST_WIDE_INT val
= TREE_INT_CST_LOW (x
);
10674 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10675 gcc_assert (cst_and_fits_in_hwi (x
));
10677 if (bits
< HOST_BITS_PER_WIDE_INT
)
10679 bool negative
= ((val
>> (bits
- 1)) & 1) != 0;
10681 val
|= (~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1;
10683 val
&= ~((~(unsigned HOST_WIDE_INT
) 0) << (bits
- 1) << 1);
10689 /* If TYPE is an integral or pointer type, return an integer type with
10690 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10691 if TYPE is already an integer type of signedness UNSIGNEDP. */
10694 signed_or_unsigned_type_for (int unsignedp
, tree type
)
10696 if (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
) == unsignedp
)
10699 if (TREE_CODE (type
) == VECTOR_TYPE
)
10701 tree inner
= TREE_TYPE (type
);
10702 tree inner2
= signed_or_unsigned_type_for (unsignedp
, inner
);
10705 if (inner
== inner2
)
10707 return build_vector_type (inner2
, TYPE_VECTOR_SUBPARTS (type
));
10710 if (!INTEGRAL_TYPE_P (type
)
10711 && !POINTER_TYPE_P (type
)
10712 && TREE_CODE (type
) != OFFSET_TYPE
)
10715 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
10718 /* If TYPE is an integral or pointer type, return an integer type with
10719 the same precision which is unsigned, or itself if TYPE is already an
10720 unsigned integer type. */
10723 unsigned_type_for (tree type
)
10725 return signed_or_unsigned_type_for (1, type
);
10728 /* If TYPE is an integral or pointer type, return an integer type with
10729 the same precision which is signed, or itself if TYPE is already a
10730 signed integer type. */
10733 signed_type_for (tree type
)
10735 return signed_or_unsigned_type_for (0, type
);
10738 /* If TYPE is a vector type, return a signed integer vector type with the
10739 same width and number of subparts. Otherwise return boolean_type_node. */
10742 truth_type_for (tree type
)
10744 if (TREE_CODE (type
) == VECTOR_TYPE
)
10746 tree elem
= lang_hooks
.types
.type_for_size
10747 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))), 0);
10748 return build_opaque_vector_type (elem
, TYPE_VECTOR_SUBPARTS (type
));
10751 return boolean_type_node
;
10754 /* Returns the largest value obtainable by casting something in INNER type to
10758 upper_bound_in_type (tree outer
, tree inner
)
10760 unsigned int det
= 0;
10761 unsigned oprec
= TYPE_PRECISION (outer
);
10762 unsigned iprec
= TYPE_PRECISION (inner
);
10765 /* Compute a unique number for every combination. */
10766 det
|= (oprec
> iprec
) ? 4 : 0;
10767 det
|= TYPE_UNSIGNED (outer
) ? 2 : 0;
10768 det
|= TYPE_UNSIGNED (inner
) ? 1 : 0;
10770 /* Determine the exponent to use. */
10775 /* oprec <= iprec, outer: signed, inner: don't care. */
10780 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10784 /* oprec > iprec, outer: signed, inner: signed. */
10788 /* oprec > iprec, outer: signed, inner: unsigned. */
10792 /* oprec > iprec, outer: unsigned, inner: signed. */
10796 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10800 gcc_unreachable ();
10803 return wide_int_to_tree (outer
,
10804 wi::mask (prec
, false, TYPE_PRECISION (outer
)));
10807 /* Returns the smallest value obtainable by casting something in INNER type to
10811 lower_bound_in_type (tree outer
, tree inner
)
10813 unsigned oprec
= TYPE_PRECISION (outer
);
10814 unsigned iprec
= TYPE_PRECISION (inner
);
10816 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10818 if (TYPE_UNSIGNED (outer
)
10819 /* If we are widening something of an unsigned type, OUTER type
10820 contains all values of INNER type. In particular, both INNER
10821 and OUTER types have zero in common. */
10822 || (oprec
> iprec
&& TYPE_UNSIGNED (inner
)))
10823 return build_int_cst (outer
, 0);
10826 /* If we are widening a signed type to another signed type, we
10827 want to obtain -2^^(iprec-1). If we are keeping the
10828 precision or narrowing to a signed type, we want to obtain
10830 unsigned prec
= oprec
> iprec
? iprec
: oprec
;
10831 return wide_int_to_tree (outer
,
10832 wi::mask (prec
- 1, true,
10833 TYPE_PRECISION (outer
)));
10837 /* Return nonzero if two operands that are suitable for PHI nodes are
10838 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10839 SSA_NAME or invariant. Note that this is strictly an optimization.
10840 That is, callers of this function can directly call operand_equal_p
10841 and get the same result, only slower. */
10844 operand_equal_for_phi_arg_p (const_tree arg0
, const_tree arg1
)
10848 if (TREE_CODE (arg0
) == SSA_NAME
|| TREE_CODE (arg1
) == SSA_NAME
)
10850 return operand_equal_p (arg0
, arg1
, 0);
10853 /* Returns number of zeros at the end of binary representation of X. */
10856 num_ending_zeros (const_tree x
)
10858 return build_int_cst (TREE_TYPE (x
), wi::ctz (x
));
10862 #define WALK_SUBTREE(NODE) \
10865 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10871 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10872 be walked whenever a type is seen in the tree. Rest of operands and return
10873 value are as for walk_tree. */
10876 walk_type_fields (tree type
, walk_tree_fn func
, void *data
,
10877 struct pointer_set_t
*pset
, walk_tree_lh lh
)
10879 tree result
= NULL_TREE
;
10881 switch (TREE_CODE (type
))
10884 case REFERENCE_TYPE
:
10886 /* We have to worry about mutually recursive pointers. These can't
10887 be written in C. They can in Ada. It's pathological, but
10888 there's an ACATS test (c38102a) that checks it. Deal with this
10889 by checking if we're pointing to another pointer, that one
10890 points to another pointer, that one does too, and we have no htab.
10891 If so, get a hash table. We check three levels deep to avoid
10892 the cost of the hash table if we don't need one. */
10893 if (POINTER_TYPE_P (TREE_TYPE (type
))
10894 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type
)))
10895 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type
))))
10898 result
= walk_tree_without_duplicates (&TREE_TYPE (type
),
10906 /* ... fall through ... */
10909 WALK_SUBTREE (TREE_TYPE (type
));
10913 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type
));
10915 /* Fall through. */
10917 case FUNCTION_TYPE
:
10918 WALK_SUBTREE (TREE_TYPE (type
));
10922 /* We never want to walk into default arguments. */
10923 for (arg
= TYPE_ARG_TYPES (type
); arg
; arg
= TREE_CHAIN (arg
))
10924 WALK_SUBTREE (TREE_VALUE (arg
));
10929 /* Don't follow this nodes's type if a pointer for fear that
10930 we'll have infinite recursion. If we have a PSET, then we
10933 || (!POINTER_TYPE_P (TREE_TYPE (type
))
10934 && TREE_CODE (TREE_TYPE (type
)) != OFFSET_TYPE
))
10935 WALK_SUBTREE (TREE_TYPE (type
));
10936 WALK_SUBTREE (TYPE_DOMAIN (type
));
10940 WALK_SUBTREE (TREE_TYPE (type
));
10941 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type
));
10951 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10952 called with the DATA and the address of each sub-tree. If FUNC returns a
10953 non-NULL value, the traversal is stopped, and the value returned by FUNC
10954 is returned. If PSET is non-NULL it is used to record the nodes visited,
10955 and to avoid visiting a node more than once. */
10958 walk_tree_1 (tree
*tp
, walk_tree_fn func
, void *data
,
10959 struct pointer_set_t
*pset
, walk_tree_lh lh
)
10961 enum tree_code code
;
10965 #define WALK_SUBTREE_TAIL(NODE) \
10969 goto tail_recurse; \
10974 /* Skip empty subtrees. */
10978 /* Don't walk the same tree twice, if the user has requested
10979 that we avoid doing so. */
10980 if (pset
&& pointer_set_insert (pset
, *tp
))
10983 /* Call the function. */
10985 result
= (*func
) (tp
, &walk_subtrees
, data
);
10987 /* If we found something, return it. */
10991 code
= TREE_CODE (*tp
);
10993 /* Even if we didn't, FUNC may have decided that there was nothing
10994 interesting below this point in the tree. */
10995 if (!walk_subtrees
)
10997 /* But we still need to check our siblings. */
10998 if (code
== TREE_LIST
)
10999 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11000 else if (code
== OMP_CLAUSE
)
11001 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11008 result
= (*lh
) (tp
, &walk_subtrees
, func
, data
, pset
);
11009 if (result
|| !walk_subtrees
)
11016 case IDENTIFIER_NODE
:
11023 case PLACEHOLDER_EXPR
:
11027 /* None of these have subtrees other than those already walked
11032 WALK_SUBTREE (TREE_VALUE (*tp
));
11033 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp
));
11038 int len
= TREE_VEC_LENGTH (*tp
);
11043 /* Walk all elements but the first. */
11045 WALK_SUBTREE (TREE_VEC_ELT (*tp
, len
));
11047 /* Now walk the first one as a tail call. */
11048 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp
, 0));
11052 WALK_SUBTREE (TREE_REALPART (*tp
));
11053 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp
));
11057 unsigned HOST_WIDE_INT idx
;
11058 constructor_elt
*ce
;
11060 for (idx
= 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp
), idx
, &ce
);
11062 WALK_SUBTREE (ce
->value
);
11067 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, 0));
11072 for (decl
= BIND_EXPR_VARS (*tp
); decl
; decl
= DECL_CHAIN (decl
))
11074 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11075 into declarations that are just mentioned, rather than
11076 declared; they don't really belong to this part of the tree.
11077 And, we can see cycles: the initializer for a declaration
11078 can refer to the declaration itself. */
11079 WALK_SUBTREE (DECL_INITIAL (decl
));
11080 WALK_SUBTREE (DECL_SIZE (decl
));
11081 WALK_SUBTREE (DECL_SIZE_UNIT (decl
));
11083 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp
));
11086 case STATEMENT_LIST
:
11088 tree_stmt_iterator i
;
11089 for (i
= tsi_start (*tp
); !tsi_end_p (i
); tsi_next (&i
))
11090 WALK_SUBTREE (*tsi_stmt_ptr (i
));
11095 switch (OMP_CLAUSE_CODE (*tp
))
11097 case OMP_CLAUSE_PRIVATE
:
11098 case OMP_CLAUSE_SHARED
:
11099 case OMP_CLAUSE_FIRSTPRIVATE
:
11100 case OMP_CLAUSE_COPYIN
:
11101 case OMP_CLAUSE_COPYPRIVATE
:
11102 case OMP_CLAUSE_FINAL
:
11103 case OMP_CLAUSE_IF
:
11104 case OMP_CLAUSE_NUM_THREADS
:
11105 case OMP_CLAUSE_SCHEDULE
:
11106 case OMP_CLAUSE_UNIFORM
:
11107 case OMP_CLAUSE_DEPEND
:
11108 case OMP_CLAUSE_NUM_TEAMS
:
11109 case OMP_CLAUSE_THREAD_LIMIT
:
11110 case OMP_CLAUSE_DEVICE
:
11111 case OMP_CLAUSE_DIST_SCHEDULE
:
11112 case OMP_CLAUSE_SAFELEN
:
11113 case OMP_CLAUSE_SIMDLEN
:
11114 case OMP_CLAUSE__LOOPTEMP_
:
11115 case OMP_CLAUSE__SIMDUID_
:
11116 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 0));
11119 case OMP_CLAUSE_NOWAIT
:
11120 case OMP_CLAUSE_ORDERED
:
11121 case OMP_CLAUSE_DEFAULT
:
11122 case OMP_CLAUSE_UNTIED
:
11123 case OMP_CLAUSE_MERGEABLE
:
11124 case OMP_CLAUSE_PROC_BIND
:
11125 case OMP_CLAUSE_INBRANCH
:
11126 case OMP_CLAUSE_NOTINBRANCH
:
11127 case OMP_CLAUSE_FOR
:
11128 case OMP_CLAUSE_PARALLEL
:
11129 case OMP_CLAUSE_SECTIONS
:
11130 case OMP_CLAUSE_TASKGROUP
:
11131 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11133 case OMP_CLAUSE_LASTPRIVATE
:
11134 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11135 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp
));
11136 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11138 case OMP_CLAUSE_COLLAPSE
:
11141 for (i
= 0; i
< 3; i
++)
11142 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11143 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11146 case OMP_CLAUSE_LINEAR
:
11147 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11148 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp
));
11149 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp
));
11150 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11152 case OMP_CLAUSE_ALIGNED
:
11153 case OMP_CLAUSE_FROM
:
11154 case OMP_CLAUSE_TO
:
11155 case OMP_CLAUSE_MAP
:
11156 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp
));
11157 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, 1));
11158 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11160 case OMP_CLAUSE_REDUCTION
:
11163 for (i
= 0; i
< 4; i
++)
11164 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp
, i
));
11165 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp
));
11169 gcc_unreachable ();
11177 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11178 But, we only want to walk once. */
11179 len
= (TREE_OPERAND (*tp
, 3) == TREE_OPERAND (*tp
, 1)) ? 2 : 3;
11180 for (i
= 0; i
< len
; ++i
)
11181 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11182 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
));
11186 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11187 defining. We only want to walk into these fields of a type in this
11188 case and not in the general case of a mere reference to the type.
11190 The criterion is as follows: if the field can be an expression, it
11191 must be walked only here. This should be in keeping with the fields
11192 that are directly gimplified in gimplify_type_sizes in order for the
11193 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11194 variable-sized types.
11196 Note that DECLs get walked as part of processing the BIND_EXPR. */
11197 if (TREE_CODE (DECL_EXPR_DECL (*tp
)) == TYPE_DECL
)
11199 tree
*type_p
= &TREE_TYPE (DECL_EXPR_DECL (*tp
));
11200 if (TREE_CODE (*type_p
) == ERROR_MARK
)
11203 /* Call the function for the type. See if it returns anything or
11204 doesn't want us to continue. If we are to continue, walk both
11205 the normal fields and those for the declaration case. */
11206 result
= (*func
) (type_p
, &walk_subtrees
, data
);
11207 if (result
|| !walk_subtrees
)
11210 /* But do not walk a pointed-to type since it may itself need to
11211 be walked in the declaration case if it isn't anonymous. */
11212 if (!POINTER_TYPE_P (*type_p
))
11214 result
= walk_type_fields (*type_p
, func
, data
, pset
, lh
);
11219 /* If this is a record type, also walk the fields. */
11220 if (RECORD_OR_UNION_TYPE_P (*type_p
))
11224 for (field
= TYPE_FIELDS (*type_p
); field
;
11225 field
= DECL_CHAIN (field
))
11227 /* We'd like to look at the type of the field, but we can
11228 easily get infinite recursion. So assume it's pointed
11229 to elsewhere in the tree. Also, ignore things that
11231 if (TREE_CODE (field
) != FIELD_DECL
)
11234 WALK_SUBTREE (DECL_FIELD_OFFSET (field
));
11235 WALK_SUBTREE (DECL_SIZE (field
));
11236 WALK_SUBTREE (DECL_SIZE_UNIT (field
));
11237 if (TREE_CODE (*type_p
) == QUAL_UNION_TYPE
)
11238 WALK_SUBTREE (DECL_QUALIFIER (field
));
11242 /* Same for scalar types. */
11243 else if (TREE_CODE (*type_p
) == BOOLEAN_TYPE
11244 || TREE_CODE (*type_p
) == ENUMERAL_TYPE
11245 || TREE_CODE (*type_p
) == INTEGER_TYPE
11246 || TREE_CODE (*type_p
) == FIXED_POINT_TYPE
11247 || TREE_CODE (*type_p
) == REAL_TYPE
)
11249 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p
));
11250 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p
));
11253 WALK_SUBTREE (TYPE_SIZE (*type_p
));
11254 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p
));
11259 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code
)))
11263 /* Walk over all the sub-trees of this operand. */
11264 len
= TREE_OPERAND_LENGTH (*tp
);
11266 /* Go through the subtrees. We need to do this in forward order so
11267 that the scope of a FOR_EXPR is handled properly. */
11270 for (i
= 0; i
< len
- 1; ++i
)
11271 WALK_SUBTREE (TREE_OPERAND (*tp
, i
));
11272 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp
, len
- 1));
11275 /* If this is a type, walk the needed fields in the type. */
11276 else if (TYPE_P (*tp
))
11277 return walk_type_fields (*tp
, func
, data
, pset
, lh
);
11281 /* We didn't find what we were looking for. */
11284 #undef WALK_SUBTREE_TAIL
11286 #undef WALK_SUBTREE
11288 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11291 walk_tree_without_duplicates_1 (tree
*tp
, walk_tree_fn func
, void *data
,
11295 struct pointer_set_t
*pset
;
11297 pset
= pointer_set_create ();
11298 result
= walk_tree_1 (tp
, func
, data
, pset
, lh
);
11299 pointer_set_destroy (pset
);
11305 tree_block (tree t
)
11307 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11309 if (IS_EXPR_CODE_CLASS (c
))
11310 return LOCATION_BLOCK (t
->exp
.locus
);
11311 gcc_unreachable ();
11316 tree_set_block (tree t
, tree b
)
11318 const enum tree_code_class c
= TREE_CODE_CLASS (TREE_CODE (t
));
11320 if (IS_EXPR_CODE_CLASS (c
))
11323 t
->exp
.locus
= COMBINE_LOCATION_DATA (line_table
, t
->exp
.locus
, b
);
11325 t
->exp
.locus
= LOCATION_LOCUS (t
->exp
.locus
);
11328 gcc_unreachable ();
11331 /* Create a nameless artificial label and put it in the current
11332 function context. The label has a location of LOC. Returns the
11333 newly created label. */
11336 create_artificial_label (location_t loc
)
11338 tree lab
= build_decl (loc
,
11339 LABEL_DECL
, NULL_TREE
, void_type_node
);
11341 DECL_ARTIFICIAL (lab
) = 1;
11342 DECL_IGNORED_P (lab
) = 1;
11343 DECL_CONTEXT (lab
) = current_function_decl
;
11347 /* Given a tree, try to return a useful variable name that we can use
11348 to prefix a temporary that is being assigned the value of the tree.
11349 I.E. given <temp> = &A, return A. */
11354 tree stripped_decl
;
11357 STRIP_NOPS (stripped_decl
);
11358 if (DECL_P (stripped_decl
) && DECL_NAME (stripped_decl
))
11359 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl
));
11360 else if (TREE_CODE (stripped_decl
) == SSA_NAME
)
11362 tree name
= SSA_NAME_IDENTIFIER (stripped_decl
);
11365 return IDENTIFIER_POINTER (name
);
11369 switch (TREE_CODE (stripped_decl
))
11372 return get_name (TREE_OPERAND (stripped_decl
, 0));
11379 /* Return true if TYPE has a variable argument list. */
11382 stdarg_p (const_tree fntype
)
11384 function_args_iterator args_iter
;
11385 tree n
= NULL_TREE
, t
;
11390 FOREACH_FUNCTION_ARGS (fntype
, t
, args_iter
)
11395 return n
!= NULL_TREE
&& n
!= void_type_node
;
11398 /* Return true if TYPE has a prototype. */
11401 prototype_p (tree fntype
)
11405 gcc_assert (fntype
!= NULL_TREE
);
11407 t
= TYPE_ARG_TYPES (fntype
);
11408 return (t
!= NULL_TREE
);
11411 /* If BLOCK is inlined from an __attribute__((__artificial__))
11412 routine, return pointer to location from where it has been
11415 block_nonartificial_location (tree block
)
11417 location_t
*ret
= NULL
;
11419 while (block
&& TREE_CODE (block
) == BLOCK
11420 && BLOCK_ABSTRACT_ORIGIN (block
))
11422 tree ao
= BLOCK_ABSTRACT_ORIGIN (block
);
11424 while (TREE_CODE (ao
) == BLOCK
11425 && BLOCK_ABSTRACT_ORIGIN (ao
)
11426 && BLOCK_ABSTRACT_ORIGIN (ao
) != ao
)
11427 ao
= BLOCK_ABSTRACT_ORIGIN (ao
);
11429 if (TREE_CODE (ao
) == FUNCTION_DECL
)
11431 /* If AO is an artificial inline, point RET to the
11432 call site locus at which it has been inlined and continue
11433 the loop, in case AO's caller is also an artificial
11435 if (DECL_DECLARED_INLINE_P (ao
)
11436 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao
)))
11437 ret
= &BLOCK_SOURCE_LOCATION (block
);
11441 else if (TREE_CODE (ao
) != BLOCK
)
11444 block
= BLOCK_SUPERCONTEXT (block
);
11450 /* If EXP is inlined from an __attribute__((__artificial__))
11451 function, return the location of the original call expression. */
11454 tree_nonartificial_location (tree exp
)
11456 location_t
*loc
= block_nonartificial_location (TREE_BLOCK (exp
));
11461 return EXPR_LOCATION (exp
);
11465 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11468 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11471 cl_option_hash_hash (const void *x
)
11473 const_tree
const t
= (const_tree
) x
;
11477 hashval_t hash
= 0;
11479 if (TREE_CODE (t
) == OPTIMIZATION_NODE
)
11481 p
= (const char *)TREE_OPTIMIZATION (t
);
11482 len
= sizeof (struct cl_optimization
);
11485 else if (TREE_CODE (t
) == TARGET_OPTION_NODE
)
11487 p
= (const char *)TREE_TARGET_OPTION (t
);
11488 len
= sizeof (struct cl_target_option
);
11492 gcc_unreachable ();
11494 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11496 for (i
= 0; i
< len
; i
++)
11498 hash
= (hash
<< 4) ^ ((i
<< 2) | p
[i
]);
11503 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11504 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11508 cl_option_hash_eq (const void *x
, const void *y
)
11510 const_tree
const xt
= (const_tree
) x
;
11511 const_tree
const yt
= (const_tree
) y
;
11516 if (TREE_CODE (xt
) != TREE_CODE (yt
))
11519 if (TREE_CODE (xt
) == OPTIMIZATION_NODE
)
11521 xp
= (const char *)TREE_OPTIMIZATION (xt
);
11522 yp
= (const char *)TREE_OPTIMIZATION (yt
);
11523 len
= sizeof (struct cl_optimization
);
11526 else if (TREE_CODE (xt
) == TARGET_OPTION_NODE
)
11528 xp
= (const char *)TREE_TARGET_OPTION (xt
);
11529 yp
= (const char *)TREE_TARGET_OPTION (yt
);
11530 len
= sizeof (struct cl_target_option
);
11534 gcc_unreachable ();
11536 return (memcmp (xp
, yp
, len
) == 0);
11539 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11542 build_optimization_node (struct gcc_options
*opts
)
11547 /* Use the cache of optimization nodes. */
11549 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node
),
11552 slot
= htab_find_slot (cl_option_hash_table
, cl_optimization_node
, INSERT
);
11556 /* Insert this one into the hash table. */
11557 t
= cl_optimization_node
;
11560 /* Make a new node for next time round. */
11561 cl_optimization_node
= make_node (OPTIMIZATION_NODE
);
11567 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11570 build_target_option_node (struct gcc_options
*opts
)
11575 /* Use the cache of optimization nodes. */
11577 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node
),
11580 slot
= htab_find_slot (cl_option_hash_table
, cl_target_option_node
, INSERT
);
11584 /* Insert this one into the hash table. */
11585 t
= cl_target_option_node
;
11588 /* Make a new node for next time round. */
11589 cl_target_option_node
= make_node (TARGET_OPTION_NODE
);
11595 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11596 Called through htab_traverse. */
11599 prepare_target_option_node_for_pch (void **slot
, void *)
11601 tree node
= (tree
) *slot
;
11602 if (TREE_CODE (node
) == TARGET_OPTION_NODE
)
11603 TREE_TARGET_GLOBALS (node
) = NULL
;
11607 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11608 so that they aren't saved during PCH writing. */
11611 prepare_target_option_nodes_for_pch (void)
11613 htab_traverse (cl_option_hash_table
, prepare_target_option_node_for_pch
,
11617 /* Determine the "ultimate origin" of a block. The block may be an inlined
11618 instance of an inlined instance of a block which is local to an inline
11619 function, so we have to trace all of the way back through the origin chain
11620 to find out what sort of node actually served as the original seed for the
11624 block_ultimate_origin (const_tree block
)
11626 tree immediate_origin
= BLOCK_ABSTRACT_ORIGIN (block
);
11628 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11629 nodes in the function to point to themselves; ignore that if
11630 we're trying to output the abstract instance of this function. */
11631 if (BLOCK_ABSTRACT (block
) && immediate_origin
== block
)
11634 if (immediate_origin
== NULL_TREE
)
11639 tree lookahead
= immediate_origin
;
11643 ret_val
= lookahead
;
11644 lookahead
= (TREE_CODE (ret_val
) == BLOCK
11645 ? BLOCK_ABSTRACT_ORIGIN (ret_val
) : NULL
);
11647 while (lookahead
!= NULL
&& lookahead
!= ret_val
);
11649 /* The block's abstract origin chain may not be the *ultimate* origin of
11650 the block. It could lead to a DECL that has an abstract origin set.
11651 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11652 will give us if it has one). Note that DECL's abstract origins are
11653 supposed to be the most distant ancestor (or so decl_ultimate_origin
11654 claims), so we don't need to loop following the DECL origins. */
11655 if (DECL_P (ret_val
))
11656 return DECL_ORIGIN (ret_val
);
11662 /* Return true iff conversion in EXP generates no instruction. Mark
11663 it inline so that we fully inline into the stripping functions even
11664 though we have two uses of this function. */
11667 tree_nop_conversion (const_tree exp
)
11669 tree outer_type
, inner_type
;
11671 if (!CONVERT_EXPR_P (exp
)
11672 && TREE_CODE (exp
) != NON_LVALUE_EXPR
)
11674 if (TREE_OPERAND (exp
, 0) == error_mark_node
)
11677 outer_type
= TREE_TYPE (exp
);
11678 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11683 /* Use precision rather then machine mode when we can, which gives
11684 the correct answer even for submode (bit-field) types. */
11685 if ((INTEGRAL_TYPE_P (outer_type
)
11686 || POINTER_TYPE_P (outer_type
)
11687 || TREE_CODE (outer_type
) == OFFSET_TYPE
)
11688 && (INTEGRAL_TYPE_P (inner_type
)
11689 || POINTER_TYPE_P (inner_type
)
11690 || TREE_CODE (inner_type
) == OFFSET_TYPE
))
11691 return TYPE_PRECISION (outer_type
) == TYPE_PRECISION (inner_type
);
11693 /* Otherwise fall back on comparing machine modes (e.g. for
11694 aggregate types, floats). */
11695 return TYPE_MODE (outer_type
) == TYPE_MODE (inner_type
);
11698 /* Return true iff conversion in EXP generates no instruction. Don't
11699 consider conversions changing the signedness. */
11702 tree_sign_nop_conversion (const_tree exp
)
11704 tree outer_type
, inner_type
;
11706 if (!tree_nop_conversion (exp
))
11709 outer_type
= TREE_TYPE (exp
);
11710 inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
11712 return (TYPE_UNSIGNED (outer_type
) == TYPE_UNSIGNED (inner_type
)
11713 && POINTER_TYPE_P (outer_type
) == POINTER_TYPE_P (inner_type
));
11716 /* Strip conversions from EXP according to tree_nop_conversion and
11717 return the resulting expression. */
11720 tree_strip_nop_conversions (tree exp
)
11722 while (tree_nop_conversion (exp
))
11723 exp
= TREE_OPERAND (exp
, 0);
11727 /* Strip conversions from EXP according to tree_sign_nop_conversion
11728 and return the resulting expression. */
11731 tree_strip_sign_nop_conversions (tree exp
)
11733 while (tree_sign_nop_conversion (exp
))
11734 exp
= TREE_OPERAND (exp
, 0);
11738 /* Avoid any floating point extensions from EXP. */
11740 strip_float_extensions (tree exp
)
11742 tree sub
, expt
, subt
;
11744 /* For floating point constant look up the narrowest type that can hold
11745 it properly and handle it like (type)(narrowest_type)constant.
11746 This way we can optimize for instance a=a*2.0 where "a" is float
11747 but 2.0 is double constant. */
11748 if (TREE_CODE (exp
) == REAL_CST
&& !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp
)))
11750 REAL_VALUE_TYPE orig
;
11753 orig
= TREE_REAL_CST (exp
);
11754 if (TYPE_PRECISION (TREE_TYPE (exp
)) > TYPE_PRECISION (float_type_node
)
11755 && exact_real_truncate (TYPE_MODE (float_type_node
), &orig
))
11756 type
= float_type_node
;
11757 else if (TYPE_PRECISION (TREE_TYPE (exp
))
11758 > TYPE_PRECISION (double_type_node
)
11759 && exact_real_truncate (TYPE_MODE (double_type_node
), &orig
))
11760 type
= double_type_node
;
11762 return build_real (type
, real_value_truncate (TYPE_MODE (type
), orig
));
11765 if (!CONVERT_EXPR_P (exp
))
11768 sub
= TREE_OPERAND (exp
, 0);
11769 subt
= TREE_TYPE (sub
);
11770 expt
= TREE_TYPE (exp
);
11772 if (!FLOAT_TYPE_P (subt
))
11775 if (DECIMAL_FLOAT_TYPE_P (expt
) != DECIMAL_FLOAT_TYPE_P (subt
))
11778 if (TYPE_PRECISION (subt
) > TYPE_PRECISION (expt
))
11781 return strip_float_extensions (sub
);
11784 /* Strip out all handled components that produce invariant
11788 strip_invariant_refs (const_tree op
)
11790 while (handled_component_p (op
))
11792 switch (TREE_CODE (op
))
11795 case ARRAY_RANGE_REF
:
11796 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
11797 || TREE_OPERAND (op
, 2) != NULL_TREE
11798 || TREE_OPERAND (op
, 3) != NULL_TREE
)
11802 case COMPONENT_REF
:
11803 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
11809 op
= TREE_OPERAND (op
, 0);
11815 static GTY(()) tree gcc_eh_personality_decl
;
11817 /* Return the GCC personality function decl. */
11820 lhd_gcc_personality (void)
11822 if (!gcc_eh_personality_decl
)
11823 gcc_eh_personality_decl
= build_personality_function ("gcc");
11824 return gcc_eh_personality_decl
;
11827 /* TARGET is a call target of GIMPLE call statement
11828 (obtained by gimple_call_fn). Return true if it is
11829 OBJ_TYPE_REF representing an virtual call of C++ method.
11830 (As opposed to OBJ_TYPE_REF representing objc calls
11831 through a cast where middle-end devirtualization machinery
11835 virtual_method_call_p (tree target
)
11837 if (TREE_CODE (target
) != OBJ_TYPE_REF
)
11839 target
= TREE_TYPE (target
);
11840 gcc_checking_assert (TREE_CODE (target
) == POINTER_TYPE
);
11841 target
= TREE_TYPE (target
);
11842 if (TREE_CODE (target
) == FUNCTION_TYPE
)
11844 gcc_checking_assert (TREE_CODE (target
) == METHOD_TYPE
);
11848 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11851 obj_type_ref_class (tree ref
)
11853 gcc_checking_assert (TREE_CODE (ref
) == OBJ_TYPE_REF
);
11854 ref
= TREE_TYPE (ref
);
11855 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11856 ref
= TREE_TYPE (ref
);
11857 /* We look for type THIS points to. ObjC also builds
11858 OBJ_TYPE_REF with non-method calls, Their first parameter
11859 ID however also corresponds to class type. */
11860 gcc_checking_assert (TREE_CODE (ref
) == METHOD_TYPE
11861 || TREE_CODE (ref
) == FUNCTION_TYPE
);
11862 ref
= TREE_VALUE (TYPE_ARG_TYPES (ref
));
11863 gcc_checking_assert (TREE_CODE (ref
) == POINTER_TYPE
);
11864 return TREE_TYPE (ref
);
11867 /* Return true if T is in anonymous namespace. */
11870 type_in_anonymous_namespace_p (const_tree t
)
11872 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11873 bulitin types; those have CONTEXT NULL. */
11874 if (!TYPE_CONTEXT (t
))
11876 return (TYPE_STUB_DECL (t
) && !TREE_PUBLIC (TYPE_STUB_DECL (t
)));
11879 /* Try to find a base info of BINFO that would have its field decl at offset
11880 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11881 found, return, otherwise return NULL_TREE. */
11884 get_binfo_at_offset (tree binfo
, HOST_WIDE_INT offset
, tree expected_type
)
11886 tree type
= BINFO_TYPE (binfo
);
11890 HOST_WIDE_INT pos
, size
;
11894 if (types_same_for_odr (type
, expected_type
))
11899 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
11901 if (TREE_CODE (fld
) != FIELD_DECL
)
11904 pos
= int_bit_position (fld
);
11905 size
= tree_to_uhwi (DECL_SIZE (fld
));
11906 if (pos
<= offset
&& (pos
+ size
) > offset
)
11909 if (!fld
|| TREE_CODE (TREE_TYPE (fld
)) != RECORD_TYPE
)
11912 if (!DECL_ARTIFICIAL (fld
))
11914 binfo
= TYPE_BINFO (TREE_TYPE (fld
));
11918 /* Offset 0 indicates the primary base, whose vtable contents are
11919 represented in the binfo for the derived class. */
11920 else if (offset
!= 0)
11922 tree base_binfo
, binfo2
= binfo
;
11924 /* Find BINFO corresponding to FLD. This is bit harder
11925 by a fact that in virtual inheritance we may need to walk down
11926 the non-virtual inheritance chain. */
11929 tree containing_binfo
= NULL
, found_binfo
= NULL
;
11930 for (i
= 0; BINFO_BASE_ITERATE (binfo2
, i
, base_binfo
); i
++)
11931 if (types_same_for_odr (TREE_TYPE (base_binfo
), TREE_TYPE (fld
)))
11933 found_binfo
= base_binfo
;
11937 if ((tree_to_shwi (BINFO_OFFSET (base_binfo
))
11938 - tree_to_shwi (BINFO_OFFSET (binfo
)))
11939 * BITS_PER_UNIT
< pos
11940 /* Rule out types with no virtual methods or we can get confused
11941 here by zero sized bases. */
11942 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo
)))
11943 && (!containing_binfo
11944 || (tree_to_shwi (BINFO_OFFSET (containing_binfo
))
11945 < tree_to_shwi (BINFO_OFFSET (base_binfo
)))))
11946 containing_binfo
= base_binfo
;
11949 binfo
= found_binfo
;
11952 if (!containing_binfo
)
11954 binfo2
= containing_binfo
;
11958 type
= TREE_TYPE (fld
);
11963 /* Returns true if X is a typedef decl. */
11966 is_typedef_decl (tree x
)
11968 return (x
&& TREE_CODE (x
) == TYPE_DECL
11969 && DECL_ORIGINAL_TYPE (x
) != NULL_TREE
);
11972 /* Returns true iff TYPE is a type variant created for a typedef. */
11975 typedef_variant_p (tree type
)
11977 return is_typedef_decl (TYPE_NAME (type
));
11980 /* Warn about a use of an identifier which was marked deprecated. */
11982 warn_deprecated_use (tree node
, tree attr
)
11986 if (node
== 0 || !warn_deprecated_decl
)
11992 attr
= DECL_ATTRIBUTES (node
);
11993 else if (TYPE_P (node
))
11995 tree decl
= TYPE_STUB_DECL (node
);
11997 attr
= lookup_attribute ("deprecated",
11998 TYPE_ATTRIBUTES (TREE_TYPE (decl
)));
12003 attr
= lookup_attribute ("deprecated", attr
);
12006 msg
= TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
)));
12012 expanded_location xloc
= expand_location (DECL_SOURCE_LOCATION (node
));
12014 warning (OPT_Wdeprecated_declarations
,
12015 "%qD is deprecated (declared at %r%s:%d%R): %s",
12016 node
, "locus", xloc
.file
, xloc
.line
, msg
);
12018 warning (OPT_Wdeprecated_declarations
,
12019 "%qD is deprecated (declared at %r%s:%d%R)",
12020 node
, "locus", xloc
.file
, xloc
.line
);
12022 else if (TYPE_P (node
))
12024 tree what
= NULL_TREE
;
12025 tree decl
= TYPE_STUB_DECL (node
);
12027 if (TYPE_NAME (node
))
12029 if (TREE_CODE (TYPE_NAME (node
)) == IDENTIFIER_NODE
)
12030 what
= TYPE_NAME (node
);
12031 else if (TREE_CODE (TYPE_NAME (node
)) == TYPE_DECL
12032 && DECL_NAME (TYPE_NAME (node
)))
12033 what
= DECL_NAME (TYPE_NAME (node
));
12038 expanded_location xloc
12039 = expand_location (DECL_SOURCE_LOCATION (decl
));
12043 warning (OPT_Wdeprecated_declarations
,
12044 "%qE is deprecated (declared at %r%s:%d%R): %s",
12045 what
, "locus", xloc
.file
, xloc
.line
, msg
);
12047 warning (OPT_Wdeprecated_declarations
,
12048 "%qE is deprecated (declared at %r%s:%d%R)",
12049 what
, "locus", xloc
.file
, xloc
.line
);
12054 warning (OPT_Wdeprecated_declarations
,
12055 "type is deprecated (declared at %r%s:%d%R): %s",
12056 "locus", xloc
.file
, xloc
.line
, msg
);
12058 warning (OPT_Wdeprecated_declarations
,
12059 "type is deprecated (declared at %r%s:%d%R)",
12060 "locus", xloc
.file
, xloc
.line
);
12068 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated: %s",
12071 warning (OPT_Wdeprecated_declarations
, "%qE is deprecated", what
);
12076 warning (OPT_Wdeprecated_declarations
, "type is deprecated: %s",
12079 warning (OPT_Wdeprecated_declarations
, "type is deprecated");
12085 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12086 somewhere in it. */
12089 contains_bitfld_component_ref_p (const_tree ref
)
12091 while (handled_component_p (ref
))
12093 if (TREE_CODE (ref
) == COMPONENT_REF
12094 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1)))
12096 ref
= TREE_OPERAND (ref
, 0);
12102 /* Try to determine whether a TRY_CATCH expression can fall through.
12103 This is a subroutine of block_may_fallthru. */
12106 try_catch_may_fallthru (const_tree stmt
)
12108 tree_stmt_iterator i
;
12110 /* If the TRY block can fall through, the whole TRY_CATCH can
12112 if (block_may_fallthru (TREE_OPERAND (stmt
, 0)))
12115 i
= tsi_start (TREE_OPERAND (stmt
, 1));
12116 switch (TREE_CODE (tsi_stmt (i
)))
12119 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12120 catch expression and a body. The whole TRY_CATCH may fall
12121 through iff any of the catch bodies falls through. */
12122 for (; !tsi_end_p (i
); tsi_next (&i
))
12124 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i
))))
12129 case EH_FILTER_EXPR
:
12130 /* The exception filter expression only matters if there is an
12131 exception. If the exception does not match EH_FILTER_TYPES,
12132 we will execute EH_FILTER_FAILURE, and we will fall through
12133 if that falls through. If the exception does match
12134 EH_FILTER_TYPES, the stack unwinder will continue up the
12135 stack, so we will not fall through. We don't know whether we
12136 will throw an exception which matches EH_FILTER_TYPES or not,
12137 so we just ignore EH_FILTER_TYPES and assume that we might
12138 throw an exception which doesn't match. */
12139 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i
)));
12142 /* This case represents statements to be executed when an
12143 exception occurs. Those statements are implicitly followed
12144 by a RESX statement to resume execution after the exception.
12145 So in this case the TRY_CATCH never falls through. */
12150 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12151 need not be 100% accurate; simply be conservative and return true if we
12152 don't know. This is used only to avoid stupidly generating extra code.
12153 If we're wrong, we'll just delete the extra code later. */
12156 block_may_fallthru (const_tree block
)
12158 /* This CONST_CAST is okay because expr_last returns its argument
12159 unmodified and we assign it to a const_tree. */
12160 const_tree stmt
= expr_last (CONST_CAST_TREE (block
));
12162 switch (stmt
? TREE_CODE (stmt
) : ERROR_MARK
)
12166 /* Easy cases. If the last statement of the block implies
12167 control transfer, then we can't fall through. */
12171 /* If SWITCH_LABELS is set, this is lowered, and represents a
12172 branch to a selected label and hence can not fall through.
12173 Otherwise SWITCH_BODY is set, and the switch can fall
12175 return SWITCH_LABELS (stmt
) == NULL_TREE
;
12178 if (block_may_fallthru (COND_EXPR_THEN (stmt
)))
12180 return block_may_fallthru (COND_EXPR_ELSE (stmt
));
12183 return block_may_fallthru (BIND_EXPR_BODY (stmt
));
12185 case TRY_CATCH_EXPR
:
12186 return try_catch_may_fallthru (stmt
);
12188 case TRY_FINALLY_EXPR
:
12189 /* The finally clause is always executed after the try clause,
12190 so if it does not fall through, then the try-finally will not
12191 fall through. Otherwise, if the try clause does not fall
12192 through, then when the finally clause falls through it will
12193 resume execution wherever the try clause was going. So the
12194 whole try-finally will only fall through if both the try
12195 clause and the finally clause fall through. */
12196 return (block_may_fallthru (TREE_OPERAND (stmt
, 0))
12197 && block_may_fallthru (TREE_OPERAND (stmt
, 1)));
12200 if (TREE_CODE (TREE_OPERAND (stmt
, 1)) == CALL_EXPR
)
12201 stmt
= TREE_OPERAND (stmt
, 1);
12207 /* Functions that do not return do not fall through. */
12208 return (call_expr_flags (stmt
) & ECF_NORETURN
) == 0;
12210 case CLEANUP_POINT_EXPR
:
12211 return block_may_fallthru (TREE_OPERAND (stmt
, 0));
12214 return block_may_fallthru (TREE_OPERAND (stmt
, 1));
12220 return lang_hooks
.block_may_fallthru (stmt
);
12224 /* True if we are using EH to handle cleanups. */
12225 static bool using_eh_for_cleanups_flag
= false;
12227 /* This routine is called from front ends to indicate eh should be used for
12230 using_eh_for_cleanups (void)
12232 using_eh_for_cleanups_flag
= true;
12235 /* Query whether EH is used for cleanups. */
12237 using_eh_for_cleanups_p (void)
12239 return using_eh_for_cleanups_flag
;
12242 /* Wrapper for tree_code_name to ensure that tree code is valid */
12244 get_tree_code_name (enum tree_code code
)
12246 const char *invalid
= "<invalid tree code>";
12248 if (code
>= MAX_TREE_CODES
)
12251 return tree_code_name
[code
];
12254 /* Drops the TREE_OVERFLOW flag from T. */
12257 drop_tree_overflow (tree t
)
12259 gcc_checking_assert (TREE_OVERFLOW (t
));
12261 /* For tree codes with a sharing machinery re-build the result. */
12262 if (TREE_CODE (t
) == INTEGER_CST
)
12263 return wide_int_to_tree (TREE_TYPE (t
), t
);
12265 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12266 and drop the flag. */
12268 TREE_OVERFLOW (t
) = 0;
12272 /* Given a memory reference expression T, return its base address.
12273 The base address of a memory reference expression is the main
12274 object being referenced. For instance, the base address for
12275 'array[i].fld[j]' is 'array'. You can think of this as stripping
12276 away the offset part from a memory address.
12278 This function calls handled_component_p to strip away all the inner
12279 parts of the memory reference until it reaches the base object. */
12282 get_base_address (tree t
)
12284 while (handled_component_p (t
))
12285 t
= TREE_OPERAND (t
, 0);
12287 if ((TREE_CODE (t
) == MEM_REF
12288 || TREE_CODE (t
) == TARGET_MEM_REF
)
12289 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
12290 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
12292 /* ??? Either the alias oracle or all callers need to properly deal
12293 with WITH_SIZE_EXPRs before we can look through those. */
12294 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
12300 #include "gt-tree.h"