1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
47 htab_t gimple_canonical_types
;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
49 htab_t type_hash_cache
;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
51 htab_t canonical_type_hash_cache
;
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST
size_t gimple_ops_offset_
[] = {
60 #include "gsstruct.def"
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size
[] = {
66 #include "gsstruct.def"
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name
[] = {
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST
enum gimple_statement_structure_enum gss_for_code_
[] = {
84 int gimple_alloc_counts
[(int) gimple_alloc_kind_all
];
85 int gimple_alloc_sizes
[(int) gimple_alloc_kind_all
];
87 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
88 static const char * const gimple_alloc_kind_names
[] = {
95 /* Private API manipulation functions shared only with some
97 extern void gimple_set_stored_syms (gimple
, bitmap
, bitmap_obstack
*);
98 extern void gimple_set_loaded_syms (gimple
, bitmap
, bitmap_obstack
*);
100 /* Gimple tuple constructors.
101 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
102 be passed a NULL to start with an empty sequence. */
104 /* Set the code for statement G to CODE. */
107 gimple_set_code (gimple g
, enum gimple_code code
)
109 g
->gsbase
.code
= code
;
112 /* Return the number of bytes needed to hold a GIMPLE statement with
116 gimple_size (enum gimple_code code
)
118 return gsstruct_code_size
[gss_for_code (code
)];
121 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
125 gimple_alloc_stat (enum gimple_code code
, unsigned num_ops MEM_STAT_DECL
)
130 size
= gimple_size (code
);
132 size
+= sizeof (tree
) * (num_ops
- 1);
134 if (GATHER_STATISTICS
)
136 enum gimple_alloc_kind kind
= gimple_alloc_kind (code
);
137 gimple_alloc_counts
[(int) kind
]++;
138 gimple_alloc_sizes
[(int) kind
] += size
;
141 stmt
= ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT
);
142 gimple_set_code (stmt
, code
);
143 gimple_set_num_ops (stmt
, num_ops
);
145 /* Do not call gimple_set_modified here as it has other side
146 effects and this tuple is still not completely built. */
147 stmt
->gsbase
.modified
= 1;
148 gimple_init_singleton (stmt
);
153 /* Set SUBCODE to be the code of the expression computed by statement G. */
156 gimple_set_subcode (gimple g
, unsigned subcode
)
158 /* We only have 16 bits for the RHS code. Assert that we are not
160 gcc_assert (subcode
< (1 << 16));
161 g
->gsbase
.subcode
= subcode
;
166 /* Build a tuple with operands. CODE is the statement to build (which
167 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
168 for the new tuple. NUM_OPS is the number of operands to allocate. */
170 #define gimple_build_with_ops(c, s, n) \
171 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
174 gimple_build_with_ops_stat (enum gimple_code code
, unsigned subcode
,
175 unsigned num_ops MEM_STAT_DECL
)
177 gimple s
= gimple_alloc_stat (code
, num_ops PASS_MEM_STAT
);
178 gimple_set_subcode (s
, subcode
);
184 /* Build a GIMPLE_RETURN statement returning RETVAL. */
187 gimple_build_return (tree retval
)
189 gimple s
= gimple_build_with_ops (GIMPLE_RETURN
, ERROR_MARK
, 1);
191 gimple_return_set_retval (s
, retval
);
195 /* Reset alias information on call S. */
198 gimple_call_reset_alias_info (gimple s
)
200 if (gimple_call_flags (s
) & ECF_CONST
)
201 memset (gimple_call_use_set (s
), 0, sizeof (struct pt_solution
));
203 pt_solution_reset (gimple_call_use_set (s
));
204 if (gimple_call_flags (s
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
205 memset (gimple_call_clobber_set (s
), 0, sizeof (struct pt_solution
));
207 pt_solution_reset (gimple_call_clobber_set (s
));
210 /* Helper for gimple_build_call, gimple_build_call_valist,
211 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
212 components of a GIMPLE_CALL statement to function FN with NARGS
216 gimple_build_call_1 (tree fn
, unsigned nargs
)
218 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
219 if (TREE_CODE (fn
) == FUNCTION_DECL
)
220 fn
= build_fold_addr_expr (fn
);
221 gimple_set_op (s
, 1, fn
);
222 gimple_call_set_fntype (s
, TREE_TYPE (TREE_TYPE (fn
)));
223 gimple_call_reset_alias_info (s
);
228 /* Build a GIMPLE_CALL statement to function FN with the arguments
229 specified in vector ARGS. */
232 gimple_build_call_vec (tree fn
, VEC(tree
, heap
) *args
)
235 unsigned nargs
= VEC_length (tree
, args
);
236 gimple call
= gimple_build_call_1 (fn
, nargs
);
238 for (i
= 0; i
< nargs
; i
++)
239 gimple_call_set_arg (call
, i
, VEC_index (tree
, args
, i
));
245 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
246 arguments. The ... are the arguments. */
249 gimple_build_call (tree fn
, unsigned nargs
, ...)
255 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
257 call
= gimple_build_call_1 (fn
, nargs
);
259 va_start (ap
, nargs
);
260 for (i
= 0; i
< nargs
; i
++)
261 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
268 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
269 arguments. AP contains the arguments. */
272 gimple_build_call_valist (tree fn
, unsigned nargs
, va_list ap
)
277 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
279 call
= gimple_build_call_1 (fn
, nargs
);
281 for (i
= 0; i
< nargs
; i
++)
282 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
288 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
289 Build the basic components of a GIMPLE_CALL statement to internal
290 function FN with NARGS arguments. */
293 gimple_build_call_internal_1 (enum internal_fn fn
, unsigned nargs
)
295 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
296 s
->gsbase
.subcode
|= GF_CALL_INTERNAL
;
297 gimple_call_set_internal_fn (s
, fn
);
298 gimple_call_reset_alias_info (s
);
303 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
304 the number of arguments. The ... are the arguments. */
307 gimple_build_call_internal (enum internal_fn fn
, unsigned nargs
, ...)
313 call
= gimple_build_call_internal_1 (fn
, nargs
);
314 va_start (ap
, nargs
);
315 for (i
= 0; i
< nargs
; i
++)
316 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
323 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
324 specified in vector ARGS. */
327 gimple_build_call_internal_vec (enum internal_fn fn
, VEC(tree
, heap
) *args
)
332 nargs
= VEC_length (tree
, args
);
333 call
= gimple_build_call_internal_1 (fn
, nargs
);
334 for (i
= 0; i
< nargs
; i
++)
335 gimple_call_set_arg (call
, i
, VEC_index (tree
, args
, i
));
341 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
342 assumed to be in GIMPLE form already. Minimal checking is done of
346 gimple_build_call_from_tree (tree t
)
350 tree fndecl
= get_callee_fndecl (t
);
352 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
354 nargs
= call_expr_nargs (t
);
355 call
= gimple_build_call_1 (fndecl
? fndecl
: CALL_EXPR_FN (t
), nargs
);
357 for (i
= 0; i
< nargs
; i
++)
358 gimple_call_set_arg (call
, i
, CALL_EXPR_ARG (t
, i
));
360 gimple_set_block (call
, TREE_BLOCK (t
));
362 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
363 gimple_call_set_chain (call
, CALL_EXPR_STATIC_CHAIN (t
));
364 gimple_call_set_tail (call
, CALL_EXPR_TAILCALL (t
));
365 gimple_call_set_return_slot_opt (call
, CALL_EXPR_RETURN_SLOT_OPT (t
));
367 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
368 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
369 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
370 gimple_call_set_alloca_for_var (call
, CALL_ALLOCA_FOR_VAR_P (t
));
372 gimple_call_set_from_thunk (call
, CALL_FROM_THUNK_P (t
));
373 gimple_call_set_va_arg_pack (call
, CALL_EXPR_VA_ARG_PACK (t
));
374 gimple_call_set_nothrow (call
, TREE_NOTHROW (t
));
375 gimple_set_no_warning (call
, TREE_NO_WARNING (t
));
381 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
382 *OP1_P, *OP2_P and *OP3_P respectively. */
385 extract_ops_from_tree_1 (tree expr
, enum tree_code
*subcode_p
, tree
*op1_p
,
386 tree
*op2_p
, tree
*op3_p
)
388 enum gimple_rhs_class grhs_class
;
390 *subcode_p
= TREE_CODE (expr
);
391 grhs_class
= get_gimple_rhs_class (*subcode_p
);
393 if (grhs_class
== GIMPLE_TERNARY_RHS
)
395 *op1_p
= TREE_OPERAND (expr
, 0);
396 *op2_p
= TREE_OPERAND (expr
, 1);
397 *op3_p
= TREE_OPERAND (expr
, 2);
399 else if (grhs_class
== GIMPLE_BINARY_RHS
)
401 *op1_p
= TREE_OPERAND (expr
, 0);
402 *op2_p
= TREE_OPERAND (expr
, 1);
405 else if (grhs_class
== GIMPLE_UNARY_RHS
)
407 *op1_p
= TREE_OPERAND (expr
, 0);
411 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
422 /* Build a GIMPLE_ASSIGN statement.
424 LHS of the assignment.
425 RHS of the assignment which can be unary or binary. */
428 gimple_build_assign_stat (tree lhs
, tree rhs MEM_STAT_DECL
)
430 enum tree_code subcode
;
433 extract_ops_from_tree_1 (rhs
, &subcode
, &op1
, &op2
, &op3
);
434 return gimple_build_assign_with_ops_stat (subcode
, lhs
, op1
, op2
, op3
439 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
440 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
441 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
444 gimple_build_assign_with_ops_stat (enum tree_code subcode
, tree lhs
, tree op1
,
445 tree op2
, tree op3 MEM_STAT_DECL
)
450 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
452 num_ops
= get_gimple_rhs_num_ops (subcode
) + 1;
454 p
= gimple_build_with_ops_stat (GIMPLE_ASSIGN
, (unsigned)subcode
, num_ops
456 gimple_assign_set_lhs (p
, lhs
);
457 gimple_assign_set_rhs1 (p
, op1
);
460 gcc_assert (num_ops
> 2);
461 gimple_assign_set_rhs2 (p
, op2
);
466 gcc_assert (num_ops
> 3);
467 gimple_assign_set_rhs3 (p
, op3
);
474 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
476 DST/SRC are the destination and source respectively. You can pass
477 ungimplified trees in DST or SRC, in which case they will be
478 converted to a gimple operand if necessary.
480 This function returns the newly created GIMPLE_ASSIGN tuple. */
483 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
485 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
486 gimplify_and_add (t
, seq_p
);
488 return gimple_seq_last_stmt (*seq_p
);
492 /* Build a GIMPLE_COND statement.
494 PRED is the condition used to compare LHS and the RHS.
495 T_LABEL is the label to jump to if the condition is true.
496 F_LABEL is the label to jump to otherwise. */
499 gimple_build_cond (enum tree_code pred_code
, tree lhs
, tree rhs
,
500 tree t_label
, tree f_label
)
504 gcc_assert (TREE_CODE_CLASS (pred_code
) == tcc_comparison
);
505 p
= gimple_build_with_ops (GIMPLE_COND
, pred_code
, 4);
506 gimple_cond_set_lhs (p
, lhs
);
507 gimple_cond_set_rhs (p
, rhs
);
508 gimple_cond_set_true_label (p
, t_label
);
509 gimple_cond_set_false_label (p
, f_label
);
514 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
517 gimple_cond_get_ops_from_tree (tree cond
, enum tree_code
*code_p
,
518 tree
*lhs_p
, tree
*rhs_p
)
520 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond
)) == tcc_comparison
521 || TREE_CODE (cond
) == TRUTH_NOT_EXPR
522 || is_gimple_min_invariant (cond
)
523 || SSA_VAR_P (cond
));
525 extract_ops_from_tree (cond
, code_p
, lhs_p
, rhs_p
);
527 /* Canonicalize conditionals of the form 'if (!VAL)'. */
528 if (*code_p
== TRUTH_NOT_EXPR
)
531 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
532 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
534 /* Canonicalize conditionals of the form 'if (VAL)' */
535 else if (TREE_CODE_CLASS (*code_p
) != tcc_comparison
)
538 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
539 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
544 /* Build a GIMPLE_COND statement from the conditional expression tree
545 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
548 gimple_build_cond_from_tree (tree cond
, tree t_label
, tree f_label
)
553 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
554 return gimple_build_cond (code
, lhs
, rhs
, t_label
, f_label
);
557 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
558 boolean expression tree COND. */
561 gimple_cond_set_condition_from_tree (gimple stmt
, tree cond
)
566 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
567 gimple_cond_set_condition (stmt
, code
, lhs
, rhs
);
570 /* Build a GIMPLE_LABEL statement for LABEL. */
573 gimple_build_label (tree label
)
575 gimple p
= gimple_build_with_ops (GIMPLE_LABEL
, ERROR_MARK
, 1);
576 gimple_label_set_label (p
, label
);
580 /* Build a GIMPLE_GOTO statement to label DEST. */
583 gimple_build_goto (tree dest
)
585 gimple p
= gimple_build_with_ops (GIMPLE_GOTO
, ERROR_MARK
, 1);
586 gimple_goto_set_dest (p
, dest
);
591 /* Build a GIMPLE_NOP statement. */
594 gimple_build_nop (void)
596 return gimple_alloc (GIMPLE_NOP
, 0);
600 /* Build a GIMPLE_BIND statement.
601 VARS are the variables in BODY.
602 BLOCK is the containing block. */
605 gimple_build_bind (tree vars
, gimple_seq body
, tree block
)
607 gimple p
= gimple_alloc (GIMPLE_BIND
, 0);
608 gimple_bind_set_vars (p
, vars
);
610 gimple_bind_set_body (p
, body
);
612 gimple_bind_set_block (p
, block
);
616 /* Helper function to set the simple fields of a asm stmt.
618 STRING is a pointer to a string that is the asm blocks assembly code.
619 NINPUT is the number of register inputs.
620 NOUTPUT is the number of register outputs.
621 NCLOBBERS is the number of clobbered registers.
625 gimple_build_asm_1 (const char *string
, unsigned ninputs
, unsigned noutputs
,
626 unsigned nclobbers
, unsigned nlabels
)
629 int size
= strlen (string
);
631 /* ASMs with labels cannot have outputs. This should have been
632 enforced by the front end. */
633 gcc_assert (nlabels
== 0 || noutputs
== 0);
635 p
= gimple_build_with_ops (GIMPLE_ASM
, ERROR_MARK
,
636 ninputs
+ noutputs
+ nclobbers
+ nlabels
);
638 p
->gimple_asm
.ni
= ninputs
;
639 p
->gimple_asm
.no
= noutputs
;
640 p
->gimple_asm
.nc
= nclobbers
;
641 p
->gimple_asm
.nl
= nlabels
;
642 p
->gimple_asm
.string
= ggc_alloc_string (string
, size
);
644 if (GATHER_STATISTICS
)
645 gimple_alloc_sizes
[(int) gimple_alloc_kind (GIMPLE_ASM
)] += size
;
650 /* Build a GIMPLE_ASM statement.
652 STRING is the assembly code.
653 NINPUT is the number of register inputs.
654 NOUTPUT is the number of register outputs.
655 NCLOBBERS is the number of clobbered registers.
656 INPUTS is a vector of the input register parameters.
657 OUTPUTS is a vector of the output register parameters.
658 CLOBBERS is a vector of the clobbered register parameters.
659 LABELS is a vector of destination labels. */
662 gimple_build_asm_vec (const char *string
, VEC(tree
,gc
)* inputs
,
663 VEC(tree
,gc
)* outputs
, VEC(tree
,gc
)* clobbers
,
664 VEC(tree
,gc
)* labels
)
669 p
= gimple_build_asm_1 (string
,
670 VEC_length (tree
, inputs
),
671 VEC_length (tree
, outputs
),
672 VEC_length (tree
, clobbers
),
673 VEC_length (tree
, labels
));
675 for (i
= 0; i
< VEC_length (tree
, inputs
); i
++)
676 gimple_asm_set_input_op (p
, i
, VEC_index (tree
, inputs
, i
));
678 for (i
= 0; i
< VEC_length (tree
, outputs
); i
++)
679 gimple_asm_set_output_op (p
, i
, VEC_index (tree
, outputs
, i
));
681 for (i
= 0; i
< VEC_length (tree
, clobbers
); i
++)
682 gimple_asm_set_clobber_op (p
, i
, VEC_index (tree
, clobbers
, i
));
684 for (i
= 0; i
< VEC_length (tree
, labels
); i
++)
685 gimple_asm_set_label_op (p
, i
, VEC_index (tree
, labels
, i
));
690 /* Build a GIMPLE_CATCH statement.
692 TYPES are the catch types.
693 HANDLER is the exception handler. */
696 gimple_build_catch (tree types
, gimple_seq handler
)
698 gimple p
= gimple_alloc (GIMPLE_CATCH
, 0);
699 gimple_catch_set_types (p
, types
);
701 gimple_catch_set_handler (p
, handler
);
706 /* Build a GIMPLE_EH_FILTER statement.
708 TYPES are the filter's types.
709 FAILURE is the filter's failure action. */
712 gimple_build_eh_filter (tree types
, gimple_seq failure
)
714 gimple p
= gimple_alloc (GIMPLE_EH_FILTER
, 0);
715 gimple_eh_filter_set_types (p
, types
);
717 gimple_eh_filter_set_failure (p
, failure
);
722 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
725 gimple_build_eh_must_not_throw (tree decl
)
727 gimple p
= gimple_alloc (GIMPLE_EH_MUST_NOT_THROW
, 0);
729 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
730 gcc_assert (flags_from_decl_or_type (decl
) & ECF_NORETURN
);
731 gimple_eh_must_not_throw_set_fndecl (p
, decl
);
736 /* Build a GIMPLE_EH_ELSE statement. */
739 gimple_build_eh_else (gimple_seq n_body
, gimple_seq e_body
)
741 gimple p
= gimple_alloc (GIMPLE_EH_ELSE
, 0);
742 gimple_eh_else_set_n_body (p
, n_body
);
743 gimple_eh_else_set_e_body (p
, e_body
);
747 /* Build a GIMPLE_TRY statement.
749 EVAL is the expression to evaluate.
750 CLEANUP is the cleanup expression.
751 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
752 whether this is a try/catch or a try/finally respectively. */
755 gimple_build_try (gimple_seq eval
, gimple_seq cleanup
,
756 enum gimple_try_flags kind
)
760 gcc_assert (kind
== GIMPLE_TRY_CATCH
|| kind
== GIMPLE_TRY_FINALLY
);
761 p
= gimple_alloc (GIMPLE_TRY
, 0);
762 gimple_set_subcode (p
, kind
);
764 gimple_try_set_eval (p
, eval
);
766 gimple_try_set_cleanup (p
, cleanup
);
771 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
773 CLEANUP is the cleanup expression. */
776 gimple_build_wce (gimple_seq cleanup
)
778 gimple p
= gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR
, 0);
780 gimple_wce_set_cleanup (p
, cleanup
);
786 /* Build a GIMPLE_RESX statement. */
789 gimple_build_resx (int region
)
791 gimple p
= gimple_build_with_ops (GIMPLE_RESX
, ERROR_MARK
, 0);
792 p
->gimple_eh_ctrl
.region
= region
;
797 /* The helper for constructing a gimple switch statement.
798 INDEX is the switch's index.
799 NLABELS is the number of labels in the switch excluding the default.
800 DEFAULT_LABEL is the default label for the switch statement. */
803 gimple_build_switch_nlabels (unsigned nlabels
, tree index
, tree default_label
)
805 /* nlabels + 1 default label + 1 index. */
806 gimple p
= gimple_build_with_ops (GIMPLE_SWITCH
, ERROR_MARK
,
807 1 + (default_label
!= NULL
) + nlabels
);
808 gimple_switch_set_index (p
, index
);
810 gimple_switch_set_default_label (p
, default_label
);
815 /* Build a GIMPLE_SWITCH statement.
817 INDEX is the switch's index.
818 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
819 ... are the labels excluding the default. */
822 gimple_build_switch (unsigned nlabels
, tree index
, tree default_label
, ...)
826 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
828 /* Store the rest of the labels. */
829 va_start (al
, default_label
);
830 offset
= (default_label
!= NULL
);
831 for (i
= 0; i
< nlabels
; i
++)
832 gimple_switch_set_label (p
, i
+ offset
, va_arg (al
, tree
));
839 /* Build a GIMPLE_SWITCH statement.
841 INDEX is the switch's index.
842 DEFAULT_LABEL is the default label
843 ARGS is a vector of labels excluding the default. */
846 gimple_build_switch_vec (tree index
, tree default_label
, VEC(tree
, heap
) *args
)
848 unsigned i
, offset
, nlabels
= VEC_length (tree
, args
);
849 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
851 /* Copy the labels from the vector to the switch statement. */
852 offset
= (default_label
!= NULL
);
853 for (i
= 0; i
< nlabels
; i
++)
854 gimple_switch_set_label (p
, i
+ offset
, VEC_index (tree
, args
, i
));
859 /* Build a GIMPLE_EH_DISPATCH statement. */
862 gimple_build_eh_dispatch (int region
)
864 gimple p
= gimple_build_with_ops (GIMPLE_EH_DISPATCH
, ERROR_MARK
, 0);
865 p
->gimple_eh_ctrl
.region
= region
;
869 /* Build a new GIMPLE_DEBUG_BIND statement.
871 VAR is bound to VALUE; block and location are taken from STMT. */
874 gimple_build_debug_bind_stat (tree var
, tree value
, gimple stmt MEM_STAT_DECL
)
876 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
877 (unsigned)GIMPLE_DEBUG_BIND
, 2
880 gimple_debug_bind_set_var (p
, var
);
881 gimple_debug_bind_set_value (p
, value
);
884 gimple_set_block (p
, gimple_block (stmt
));
885 gimple_set_location (p
, gimple_location (stmt
));
892 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
894 VAR is bound to VALUE; block and location are taken from STMT. */
897 gimple_build_debug_source_bind_stat (tree var
, tree value
,
898 gimple stmt MEM_STAT_DECL
)
900 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
901 (unsigned)GIMPLE_DEBUG_SOURCE_BIND
, 2
904 gimple_debug_source_bind_set_var (p
, var
);
905 gimple_debug_source_bind_set_value (p
, value
);
908 gimple_set_block (p
, gimple_block (stmt
));
909 gimple_set_location (p
, gimple_location (stmt
));
916 /* Build a GIMPLE_OMP_CRITICAL statement.
918 BODY is the sequence of statements for which only one thread can execute.
919 NAME is optional identifier for this critical block. */
922 gimple_build_omp_critical (gimple_seq body
, tree name
)
924 gimple p
= gimple_alloc (GIMPLE_OMP_CRITICAL
, 0);
925 gimple_omp_critical_set_name (p
, name
);
927 gimple_omp_set_body (p
, body
);
932 /* Build a GIMPLE_OMP_FOR statement.
934 BODY is sequence of statements inside the for loop.
935 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
936 lastprivate, reductions, ordered, schedule, and nowait.
937 COLLAPSE is the collapse count.
938 PRE_BODY is the sequence of statements that are loop invariant. */
941 gimple_build_omp_for (gimple_seq body
, tree clauses
, size_t collapse
,
944 gimple p
= gimple_alloc (GIMPLE_OMP_FOR
, 0);
946 gimple_omp_set_body (p
, body
);
947 gimple_omp_for_set_clauses (p
, clauses
);
948 p
->gimple_omp_for
.collapse
= collapse
;
949 p
->gimple_omp_for
.iter
950 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse
);
952 gimple_omp_for_set_pre_body (p
, pre_body
);
958 /* Build a GIMPLE_OMP_PARALLEL statement.
960 BODY is sequence of statements which are executed in parallel.
961 CLAUSES, are the OMP parallel construct's clauses.
962 CHILD_FN is the function created for the parallel threads to execute.
963 DATA_ARG are the shared data argument(s). */
966 gimple_build_omp_parallel (gimple_seq body
, tree clauses
, tree child_fn
,
969 gimple p
= gimple_alloc (GIMPLE_OMP_PARALLEL
, 0);
971 gimple_omp_set_body (p
, body
);
972 gimple_omp_parallel_set_clauses (p
, clauses
);
973 gimple_omp_parallel_set_child_fn (p
, child_fn
);
974 gimple_omp_parallel_set_data_arg (p
, data_arg
);
980 /* Build a GIMPLE_OMP_TASK statement.
982 BODY is sequence of statements which are executed by the explicit task.
983 CLAUSES, are the OMP parallel construct's clauses.
984 CHILD_FN is the function created for the parallel threads to execute.
985 DATA_ARG are the shared data argument(s).
986 COPY_FN is the optional function for firstprivate initialization.
987 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
990 gimple_build_omp_task (gimple_seq body
, tree clauses
, tree child_fn
,
991 tree data_arg
, tree copy_fn
, tree arg_size
,
994 gimple p
= gimple_alloc (GIMPLE_OMP_TASK
, 0);
996 gimple_omp_set_body (p
, body
);
997 gimple_omp_task_set_clauses (p
, clauses
);
998 gimple_omp_task_set_child_fn (p
, child_fn
);
999 gimple_omp_task_set_data_arg (p
, data_arg
);
1000 gimple_omp_task_set_copy_fn (p
, copy_fn
);
1001 gimple_omp_task_set_arg_size (p
, arg_size
);
1002 gimple_omp_task_set_arg_align (p
, arg_align
);
1008 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1010 BODY is the sequence of statements in the section. */
1013 gimple_build_omp_section (gimple_seq body
)
1015 gimple p
= gimple_alloc (GIMPLE_OMP_SECTION
, 0);
1017 gimple_omp_set_body (p
, body
);
1023 /* Build a GIMPLE_OMP_MASTER statement.
1025 BODY is the sequence of statements to be executed by just the master. */
1028 gimple_build_omp_master (gimple_seq body
)
1030 gimple p
= gimple_alloc (GIMPLE_OMP_MASTER
, 0);
1032 gimple_omp_set_body (p
, body
);
1038 /* Build a GIMPLE_OMP_CONTINUE statement.
1040 CONTROL_DEF is the definition of the control variable.
1041 CONTROL_USE is the use of the control variable. */
1044 gimple_build_omp_continue (tree control_def
, tree control_use
)
1046 gimple p
= gimple_alloc (GIMPLE_OMP_CONTINUE
, 0);
1047 gimple_omp_continue_set_control_def (p
, control_def
);
1048 gimple_omp_continue_set_control_use (p
, control_use
);
1052 /* Build a GIMPLE_OMP_ORDERED statement.
1054 BODY is the sequence of statements inside a loop that will executed in
1058 gimple_build_omp_ordered (gimple_seq body
)
1060 gimple p
= gimple_alloc (GIMPLE_OMP_ORDERED
, 0);
1062 gimple_omp_set_body (p
, body
);
1068 /* Build a GIMPLE_OMP_RETURN statement.
1069 WAIT_P is true if this is a non-waiting return. */
1072 gimple_build_omp_return (bool wait_p
)
1074 gimple p
= gimple_alloc (GIMPLE_OMP_RETURN
, 0);
1076 gimple_omp_return_set_nowait (p
);
1082 /* Build a GIMPLE_OMP_SECTIONS statement.
1084 BODY is a sequence of section statements.
1085 CLAUSES are any of the OMP sections contsruct's clauses: private,
1086 firstprivate, lastprivate, reduction, and nowait. */
1089 gimple_build_omp_sections (gimple_seq body
, tree clauses
)
1091 gimple p
= gimple_alloc (GIMPLE_OMP_SECTIONS
, 0);
1093 gimple_omp_set_body (p
, body
);
1094 gimple_omp_sections_set_clauses (p
, clauses
);
1100 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1103 gimple_build_omp_sections_switch (void)
1105 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH
, 0);
1109 /* Build a GIMPLE_OMP_SINGLE statement.
1111 BODY is the sequence of statements that will be executed once.
1112 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1113 copyprivate, nowait. */
1116 gimple_build_omp_single (gimple_seq body
, tree clauses
)
1118 gimple p
= gimple_alloc (GIMPLE_OMP_SINGLE
, 0);
1120 gimple_omp_set_body (p
, body
);
1121 gimple_omp_single_set_clauses (p
, clauses
);
1127 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1130 gimple_build_omp_atomic_load (tree lhs
, tree rhs
)
1132 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD
, 0);
1133 gimple_omp_atomic_load_set_lhs (p
, lhs
);
1134 gimple_omp_atomic_load_set_rhs (p
, rhs
);
1138 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1140 VAL is the value we are storing. */
1143 gimple_build_omp_atomic_store (tree val
)
1145 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_STORE
, 0);
1146 gimple_omp_atomic_store_set_val (p
, val
);
1150 /* Build a GIMPLE_TRANSACTION statement. */
1153 gimple_build_transaction (gimple_seq body
, tree label
)
1155 gimple p
= gimple_alloc (GIMPLE_TRANSACTION
, 0);
1156 gimple_transaction_set_body (p
, body
);
1157 gimple_transaction_set_label (p
, label
);
1161 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1162 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1165 gimple_build_predict (enum br_predictor predictor
, enum prediction outcome
)
1167 gimple p
= gimple_alloc (GIMPLE_PREDICT
, 0);
1168 /* Ensure all the predictors fit into the lower bits of the subcode. */
1169 gcc_assert ((int) END_PREDICTORS
<= GF_PREDICT_TAKEN
);
1170 gimple_predict_set_predictor (p
, predictor
);
1171 gimple_predict_set_outcome (p
, outcome
);
1175 #if defined ENABLE_GIMPLE_CHECKING
1176 /* Complain of a gimple type mismatch and die. */
1179 gimple_check_failed (const_gimple gs
, const char *file
, int line
,
1180 const char *function
, enum gimple_code code
,
1181 enum tree_code subcode
)
1183 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1184 gimple_code_name
[code
],
1185 tree_code_name
[subcode
],
1186 gimple_code_name
[gimple_code (gs
)],
1187 gs
->gsbase
.subcode
> 0
1188 ? tree_code_name
[gs
->gsbase
.subcode
]
1190 function
, trim_filename (file
), line
);
1192 #endif /* ENABLE_GIMPLE_CHECKING */
1195 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1196 *SEQ_P is NULL, a new sequence is allocated. */
1199 gimple_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
1201 gimple_stmt_iterator si
;
1205 si
= gsi_last (*seq_p
);
1206 gsi_insert_after (&si
, gs
, GSI_NEW_STMT
);
1210 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1211 NULL, a new sequence is allocated. */
1214 gimple_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
1216 gimple_stmt_iterator si
;
1220 si
= gsi_last (*dst_p
);
1221 gsi_insert_seq_after (&si
, src
, GSI_NEW_STMT
);
1225 /* Helper function of empty_body_p. Return true if STMT is an empty
1229 empty_stmt_p (gimple stmt
)
1231 if (gimple_code (stmt
) == GIMPLE_NOP
)
1233 if (gimple_code (stmt
) == GIMPLE_BIND
)
1234 return empty_body_p (gimple_bind_body (stmt
));
1239 /* Return true if BODY contains nothing but empty statements. */
1242 empty_body_p (gimple_seq body
)
1244 gimple_stmt_iterator i
;
1246 if (gimple_seq_empty_p (body
))
1248 for (i
= gsi_start (body
); !gsi_end_p (i
); gsi_next (&i
))
1249 if (!empty_stmt_p (gsi_stmt (i
))
1250 && !is_gimple_debug (gsi_stmt (i
)))
1257 /* Perform a deep copy of sequence SRC and return the result. */
1260 gimple_seq_copy (gimple_seq src
)
1262 gimple_stmt_iterator gsi
;
1263 gimple_seq new_seq
= NULL
;
1266 for (gsi
= gsi_start (src
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1268 stmt
= gimple_copy (gsi_stmt (gsi
));
1269 gimple_seq_add_stmt (&new_seq
, stmt
);
1276 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1277 on each one. WI is as in walk_gimple_stmt.
1279 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1280 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1281 produced the value is returned if this statement has not been
1282 removed by a callback (wi->removed_stmt). If the statement has
1283 been removed, NULL is returned.
1285 Otherwise, all the statements are walked and NULL returned. */
1288 walk_gimple_seq_mod (gimple_seq
*pseq
, walk_stmt_fn callback_stmt
,
1289 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1291 gimple_stmt_iterator gsi
;
1293 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
); )
1295 tree ret
= walk_gimple_stmt (&gsi
, callback_stmt
, callback_op
, wi
);
1298 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1301 wi
->callback_result
= ret
;
1303 return wi
->removed_stmt
? NULL
: gsi_stmt (gsi
);
1306 if (!wi
->removed_stmt
)
1311 wi
->callback_result
= NULL_TREE
;
1317 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1318 changed by the callbacks. */
1321 walk_gimple_seq (gimple_seq seq
, walk_stmt_fn callback_stmt
,
1322 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1324 gimple_seq seq2
= seq
;
1325 gimple ret
= walk_gimple_seq_mod (&seq2
, callback_stmt
, callback_op
, wi
);
1326 gcc_assert (seq2
== seq
);
1331 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1334 walk_gimple_asm (gimple stmt
, walk_tree_fn callback_op
,
1335 struct walk_stmt_info
*wi
)
1339 const char **oconstraints
;
1341 const char *constraint
;
1342 bool allows_mem
, allows_reg
, is_inout
;
1344 noutputs
= gimple_asm_noutputs (stmt
);
1345 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1350 for (i
= 0; i
< noutputs
; i
++)
1352 op
= gimple_asm_output_op (stmt
, i
);
1353 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1354 oconstraints
[i
] = constraint
;
1355 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
, &allows_reg
,
1358 wi
->val_only
= (allows_reg
|| !allows_mem
);
1359 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1364 n
= gimple_asm_ninputs (stmt
);
1365 for (i
= 0; i
< n
; i
++)
1367 op
= gimple_asm_input_op (stmt
, i
);
1368 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1369 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1370 oconstraints
, &allows_mem
, &allows_reg
);
1373 wi
->val_only
= (allows_reg
|| !allows_mem
);
1374 /* Although input "m" is not really a LHS, we need a lvalue. */
1375 wi
->is_lhs
= !wi
->val_only
;
1377 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1385 wi
->val_only
= true;
1388 n
= gimple_asm_nlabels (stmt
);
1389 for (i
= 0; i
< n
; i
++)
1391 op
= gimple_asm_label_op (stmt
, i
);
1392 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1401 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1402 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1404 CALLBACK_OP is called on each operand of STMT via walk_tree.
1405 Additional parameters to walk_tree must be stored in WI. For each operand
1406 OP, walk_tree is called as:
1408 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1410 If CALLBACK_OP returns non-NULL for an operand, the remaining
1411 operands are not scanned.
1413 The return value is that returned by the last call to walk_tree, or
1414 NULL_TREE if no CALLBACK_OP is specified. */
1417 walk_gimple_op (gimple stmt
, walk_tree_fn callback_op
,
1418 struct walk_stmt_info
*wi
)
1420 struct pointer_set_t
*pset
= (wi
) ? wi
->pset
: NULL
;
1422 tree ret
= NULL_TREE
;
1424 switch (gimple_code (stmt
))
1427 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1428 is a register variable, we may use a COMPONENT_REF on the RHS. */
1431 tree lhs
= gimple_assign_lhs (stmt
);
1433 = (is_gimple_reg_type (TREE_TYPE (lhs
)) && !is_gimple_reg (lhs
))
1434 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
1437 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
1439 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
,
1445 /* Walk the LHS. If the RHS is appropriate for a memory, we
1446 may use a COMPONENT_REF on the LHS. */
1449 /* If the RHS is of a non-renamable type or is a register variable,
1450 we may use a COMPONENT_REF on the LHS. */
1451 tree rhs1
= gimple_assign_rhs1 (stmt
);
1453 = (is_gimple_reg_type (TREE_TYPE (rhs1
)) && !is_gimple_reg (rhs1
))
1454 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
1458 ret
= walk_tree (gimple_op_ptr (stmt
, 0), callback_op
, wi
, pset
);
1464 wi
->val_only
= true;
1473 wi
->val_only
= true;
1476 ret
= walk_tree (gimple_call_chain_ptr (stmt
), callback_op
, wi
, pset
);
1480 ret
= walk_tree (gimple_call_fn_ptr (stmt
), callback_op
, wi
, pset
);
1484 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1488 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt
, i
)));
1489 ret
= walk_tree (gimple_call_arg_ptr (stmt
, i
), callback_op
, wi
,
1495 if (gimple_call_lhs (stmt
))
1501 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt
)));
1504 ret
= walk_tree (gimple_call_lhs_ptr (stmt
), callback_op
, wi
, pset
);
1512 wi
->val_only
= true;
1517 ret
= walk_tree (gimple_catch_types_ptr (stmt
), callback_op
, wi
,
1523 case GIMPLE_EH_FILTER
:
1524 ret
= walk_tree (gimple_eh_filter_types_ptr (stmt
), callback_op
, wi
,
1531 ret
= walk_gimple_asm (stmt
, callback_op
, wi
);
1536 case GIMPLE_OMP_CONTINUE
:
1537 ret
= walk_tree (gimple_omp_continue_control_def_ptr (stmt
),
1538 callback_op
, wi
, pset
);
1542 ret
= walk_tree (gimple_omp_continue_control_use_ptr (stmt
),
1543 callback_op
, wi
, pset
);
1548 case GIMPLE_OMP_CRITICAL
:
1549 ret
= walk_tree (gimple_omp_critical_name_ptr (stmt
), callback_op
, wi
,
1555 case GIMPLE_OMP_FOR
:
1556 ret
= walk_tree (gimple_omp_for_clauses_ptr (stmt
), callback_op
, wi
,
1560 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1562 ret
= walk_tree (gimple_omp_for_index_ptr (stmt
, i
), callback_op
,
1566 ret
= walk_tree (gimple_omp_for_initial_ptr (stmt
, i
), callback_op
,
1570 ret
= walk_tree (gimple_omp_for_final_ptr (stmt
, i
), callback_op
,
1574 ret
= walk_tree (gimple_omp_for_incr_ptr (stmt
, i
), callback_op
,
1581 case GIMPLE_OMP_PARALLEL
:
1582 ret
= walk_tree (gimple_omp_parallel_clauses_ptr (stmt
), callback_op
,
1586 ret
= walk_tree (gimple_omp_parallel_child_fn_ptr (stmt
), callback_op
,
1590 ret
= walk_tree (gimple_omp_parallel_data_arg_ptr (stmt
), callback_op
,
1596 case GIMPLE_OMP_TASK
:
1597 ret
= walk_tree (gimple_omp_task_clauses_ptr (stmt
), callback_op
,
1601 ret
= walk_tree (gimple_omp_task_child_fn_ptr (stmt
), callback_op
,
1605 ret
= walk_tree (gimple_omp_task_data_arg_ptr (stmt
), callback_op
,
1609 ret
= walk_tree (gimple_omp_task_copy_fn_ptr (stmt
), callback_op
,
1613 ret
= walk_tree (gimple_omp_task_arg_size_ptr (stmt
), callback_op
,
1617 ret
= walk_tree (gimple_omp_task_arg_align_ptr (stmt
), callback_op
,
1623 case GIMPLE_OMP_SECTIONS
:
1624 ret
= walk_tree (gimple_omp_sections_clauses_ptr (stmt
), callback_op
,
1629 ret
= walk_tree (gimple_omp_sections_control_ptr (stmt
), callback_op
,
1636 case GIMPLE_OMP_SINGLE
:
1637 ret
= walk_tree (gimple_omp_single_clauses_ptr (stmt
), callback_op
, wi
,
1643 case GIMPLE_OMP_ATOMIC_LOAD
:
1644 ret
= walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt
), callback_op
, wi
,
1649 ret
= walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt
), callback_op
, wi
,
1655 case GIMPLE_OMP_ATOMIC_STORE
:
1656 ret
= walk_tree (gimple_omp_atomic_store_val_ptr (stmt
), callback_op
,
1662 case GIMPLE_TRANSACTION
:
1663 ret
= walk_tree (gimple_transaction_label_ptr (stmt
), callback_op
,
1669 /* Tuples that do not have operands. */
1672 case GIMPLE_OMP_RETURN
:
1673 case GIMPLE_PREDICT
:
1678 enum gimple_statement_structure_enum gss
;
1679 gss
= gimple_statement_structure (stmt
);
1680 if (gss
== GSS_WITH_OPS
|| gss
== GSS_WITH_MEM_OPS
)
1681 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1683 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
, pset
);
1695 /* Walk the current statement in GSI (optionally using traversal state
1696 stored in WI). If WI is NULL, no state is kept during traversal.
1697 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1698 that it has handled all the operands of the statement, its return
1699 value is returned. Otherwise, the return value from CALLBACK_STMT
1700 is discarded and its operands are scanned.
1702 If CALLBACK_STMT is NULL or it didn't handle the operands,
1703 CALLBACK_OP is called on each operand of the statement via
1704 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1705 operand, the remaining operands are not scanned. In this case, the
1706 return value from CALLBACK_OP is returned.
1708 In any other case, NULL_TREE is returned. */
1711 walk_gimple_stmt (gimple_stmt_iterator
*gsi
, walk_stmt_fn callback_stmt
,
1712 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1716 gimple stmt
= gsi_stmt (*gsi
);
1721 wi
->removed_stmt
= false;
1723 if (wi
->want_locations
&& gimple_has_location (stmt
))
1724 input_location
= gimple_location (stmt
);
1729 /* Invoke the statement callback. Return if the callback handled
1730 all of STMT operands by itself. */
1733 bool handled_ops
= false;
1734 tree_ret
= callback_stmt (gsi
, &handled_ops
, wi
);
1738 /* If CALLBACK_STMT did not handle operands, it should not have
1739 a value to return. */
1740 gcc_assert (tree_ret
== NULL
);
1742 if (wi
&& wi
->removed_stmt
)
1745 /* Re-read stmt in case the callback changed it. */
1746 stmt
= gsi_stmt (*gsi
);
1749 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1752 tree_ret
= walk_gimple_op (stmt
, callback_op
, wi
);
1757 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1758 switch (gimple_code (stmt
))
1761 ret
= walk_gimple_seq_mod (gimple_bind_body_ptr (stmt
), callback_stmt
,
1764 return wi
->callback_result
;
1768 ret
= walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt
), callback_stmt
,
1771 return wi
->callback_result
;
1774 case GIMPLE_EH_FILTER
:
1775 ret
= walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt
), callback_stmt
,
1778 return wi
->callback_result
;
1781 case GIMPLE_EH_ELSE
:
1782 ret
= walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt
),
1783 callback_stmt
, callback_op
, wi
);
1785 return wi
->callback_result
;
1786 ret
= walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt
),
1787 callback_stmt
, callback_op
, wi
);
1789 return wi
->callback_result
;
1793 ret
= walk_gimple_seq_mod (gimple_try_eval_ptr (stmt
), callback_stmt
, callback_op
,
1796 return wi
->callback_result
;
1798 ret
= walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt
), callback_stmt
,
1801 return wi
->callback_result
;
1804 case GIMPLE_OMP_FOR
:
1805 ret
= walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
), callback_stmt
,
1808 return wi
->callback_result
;
1811 case GIMPLE_OMP_CRITICAL
:
1812 case GIMPLE_OMP_MASTER
:
1813 case GIMPLE_OMP_ORDERED
:
1814 case GIMPLE_OMP_SECTION
:
1815 case GIMPLE_OMP_PARALLEL
:
1816 case GIMPLE_OMP_TASK
:
1817 case GIMPLE_OMP_SECTIONS
:
1818 case GIMPLE_OMP_SINGLE
:
1819 ret
= walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), callback_stmt
,
1822 return wi
->callback_result
;
1825 case GIMPLE_WITH_CLEANUP_EXPR
:
1826 ret
= walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt
), callback_stmt
,
1829 return wi
->callback_result
;
1832 case GIMPLE_TRANSACTION
:
1833 ret
= walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt
),
1834 callback_stmt
, callback_op
, wi
);
1836 return wi
->callback_result
;
1840 gcc_assert (!gimple_has_substatements (stmt
));
1848 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1851 gimple_set_body (tree fndecl
, gimple_seq seq
)
1853 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1856 /* If FNDECL still does not have a function structure associated
1857 with it, then it does not make sense for it to receive a
1859 gcc_assert (seq
== NULL
);
1862 fn
->gimple_body
= seq
;
1866 /* Return the body of GIMPLE statements for function FN. After the
1867 CFG pass, the function body doesn't exist anymore because it has
1868 been split up into basic blocks. In this case, it returns
1872 gimple_body (tree fndecl
)
1874 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1875 return fn
? fn
->gimple_body
: NULL
;
1878 /* Return true when FNDECL has Gimple body either in unlowered
1881 gimple_has_body_p (tree fndecl
)
1883 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1884 return (gimple_body (fndecl
) || (fn
&& fn
->cfg
));
1887 /* Return true if calls C1 and C2 are known to go to the same function. */
1890 gimple_call_same_target_p (const_gimple c1
, const_gimple c2
)
1892 if (gimple_call_internal_p (c1
))
1893 return (gimple_call_internal_p (c2
)
1894 && gimple_call_internal_fn (c1
) == gimple_call_internal_fn (c2
));
1896 return (gimple_call_fn (c1
) == gimple_call_fn (c2
)
1897 || (gimple_call_fndecl (c1
)
1898 && gimple_call_fndecl (c1
) == gimple_call_fndecl (c2
)));
1901 /* Detect flags from a GIMPLE_CALL. This is just like
1902 call_expr_flags, but for gimple tuples. */
1905 gimple_call_flags (const_gimple stmt
)
1908 tree decl
= gimple_call_fndecl (stmt
);
1911 flags
= flags_from_decl_or_type (decl
);
1912 else if (gimple_call_internal_p (stmt
))
1913 flags
= internal_fn_flags (gimple_call_internal_fn (stmt
));
1915 flags
= flags_from_decl_or_type (gimple_call_fntype (stmt
));
1917 if (stmt
->gsbase
.subcode
& GF_CALL_NOTHROW
)
1918 flags
|= ECF_NOTHROW
;
1923 /* Return the "fn spec" string for call STMT. */
1926 gimple_call_fnspec (const_gimple stmt
)
1930 type
= gimple_call_fntype (stmt
);
1934 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
1938 return TREE_VALUE (TREE_VALUE (attr
));
1941 /* Detects argument flags for argument number ARG on call STMT. */
1944 gimple_call_arg_flags (const_gimple stmt
, unsigned arg
)
1946 tree attr
= gimple_call_fnspec (stmt
);
1948 if (!attr
|| 1 + arg
>= (unsigned) TREE_STRING_LENGTH (attr
))
1951 switch (TREE_STRING_POINTER (attr
)[1 + arg
])
1958 return EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
;
1961 return EAF_NOCLOBBER
| EAF_NOESCAPE
;
1964 return EAF_DIRECT
| EAF_NOESCAPE
;
1967 return EAF_NOESCAPE
;
1975 /* Detects return flags for the call STMT. */
1978 gimple_call_return_flags (const_gimple stmt
)
1982 if (gimple_call_flags (stmt
) & ECF_MALLOC
)
1985 attr
= gimple_call_fnspec (stmt
);
1986 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
1989 switch (TREE_STRING_POINTER (attr
)[0])
1995 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
2007 /* Return true if GS is a copy assignment. */
2010 gimple_assign_copy_p (gimple gs
)
2012 return (gimple_assign_single_p (gs
)
2013 && is_gimple_val (gimple_op (gs
, 1)));
2017 /* Return true if GS is a SSA_NAME copy assignment. */
2020 gimple_assign_ssa_name_copy_p (gimple gs
)
2022 return (gimple_assign_single_p (gs
)
2023 && TREE_CODE (gimple_assign_lhs (gs
)) == SSA_NAME
2024 && TREE_CODE (gimple_assign_rhs1 (gs
)) == SSA_NAME
);
2028 /* Return true if GS is an assignment with a unary RHS, but the
2029 operator has no effect on the assigned value. The logic is adapted
2030 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2031 instances in which STRIP_NOPS was previously applied to the RHS of
2034 NOTE: In the use cases that led to the creation of this function
2035 and of gimple_assign_single_p, it is typical to test for either
2036 condition and to proceed in the same manner. In each case, the
2037 assigned value is represented by the single RHS operand of the
2038 assignment. I suspect there may be cases where gimple_assign_copy_p,
2039 gimple_assign_single_p, or equivalent logic is used where a similar
2040 treatment of unary NOPs is appropriate. */
2043 gimple_assign_unary_nop_p (gimple gs
)
2045 return (is_gimple_assign (gs
)
2046 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
))
2047 || gimple_assign_rhs_code (gs
) == NON_LVALUE_EXPR
)
2048 && gimple_assign_rhs1 (gs
) != error_mark_node
2049 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))
2050 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs
)))));
2053 /* Set BB to be the basic block holding G. */
2056 gimple_set_bb (gimple stmt
, basic_block bb
)
2058 stmt
->gsbase
.bb
= bb
;
2060 /* If the statement is a label, add the label to block-to-labels map
2061 so that we can speed up edge creation for GIMPLE_GOTOs. */
2062 if (cfun
->cfg
&& gimple_code (stmt
) == GIMPLE_LABEL
)
2067 t
= gimple_label_label (stmt
);
2068 uid
= LABEL_DECL_UID (t
);
2071 unsigned old_len
= VEC_length (basic_block
, label_to_block_map
);
2072 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
2073 if (old_len
<= (unsigned) uid
)
2075 unsigned new_len
= 3 * uid
/ 2 + 1;
2077 VEC_safe_grow_cleared (basic_block
, gc
, label_to_block_map
,
2082 VEC_replace (basic_block
, label_to_block_map
, uid
, bb
);
2087 /* Modify the RHS of the assignment pointed-to by GSI using the
2088 operands in the expression tree EXPR.
2090 NOTE: The statement pointed-to by GSI may be reallocated if it
2091 did not have enough operand slots.
2093 This function is useful to convert an existing tree expression into
2094 the flat representation used for the RHS of a GIMPLE assignment.
2095 It will reallocate memory as needed to expand or shrink the number
2096 of operand slots needed to represent EXPR.
2098 NOTE: If you find yourself building a tree and then calling this
2099 function, you are most certainly doing it the slow way. It is much
2100 better to build a new assignment or to use the function
2101 gimple_assign_set_rhs_with_ops, which does not require an
2102 expression tree to be built. */
2105 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator
*gsi
, tree expr
)
2107 enum tree_code subcode
;
2110 extract_ops_from_tree_1 (expr
, &subcode
, &op1
, &op2
, &op3
);
2111 gimple_assign_set_rhs_with_ops_1 (gsi
, subcode
, op1
, op2
, op3
);
2115 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2116 operands OP1, OP2 and OP3.
2118 NOTE: The statement pointed-to by GSI may be reallocated if it
2119 did not have enough operand slots. */
2122 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
2123 tree op1
, tree op2
, tree op3
)
2125 unsigned new_rhs_ops
= get_gimple_rhs_num_ops (code
);
2126 gimple stmt
= gsi_stmt (*gsi
);
2128 /* If the new CODE needs more operands, allocate a new statement. */
2129 if (gimple_num_ops (stmt
) < new_rhs_ops
+ 1)
2131 tree lhs
= gimple_assign_lhs (stmt
);
2132 gimple new_stmt
= gimple_alloc (gimple_code (stmt
), new_rhs_ops
+ 1);
2133 memcpy (new_stmt
, stmt
, gimple_size (gimple_code (stmt
)));
2134 gimple_init_singleton (new_stmt
);
2135 gsi_replace (gsi
, new_stmt
, true);
2138 /* The LHS needs to be reset as this also changes the SSA name
2140 gimple_assign_set_lhs (stmt
, lhs
);
2143 gimple_set_num_ops (stmt
, new_rhs_ops
+ 1);
2144 gimple_set_subcode (stmt
, code
);
2145 gimple_assign_set_rhs1 (stmt
, op1
);
2146 if (new_rhs_ops
> 1)
2147 gimple_assign_set_rhs2 (stmt
, op2
);
2148 if (new_rhs_ops
> 2)
2149 gimple_assign_set_rhs3 (stmt
, op3
);
2153 /* Return the LHS of a statement that performs an assignment,
2154 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2155 for a call to a function that returns no value, or for a
2156 statement other than an assignment or a call. */
2159 gimple_get_lhs (const_gimple stmt
)
2161 enum gimple_code code
= gimple_code (stmt
);
2163 if (code
== GIMPLE_ASSIGN
)
2164 return gimple_assign_lhs (stmt
);
2165 else if (code
== GIMPLE_CALL
)
2166 return gimple_call_lhs (stmt
);
2172 /* Set the LHS of a statement that performs an assignment,
2173 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2176 gimple_set_lhs (gimple stmt
, tree lhs
)
2178 enum gimple_code code
= gimple_code (stmt
);
2180 if (code
== GIMPLE_ASSIGN
)
2181 gimple_assign_set_lhs (stmt
, lhs
);
2182 else if (code
== GIMPLE_CALL
)
2183 gimple_call_set_lhs (stmt
, lhs
);
2188 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2189 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2190 expression with a different value.
2192 This will update any annotations (say debug bind stmts) referring
2193 to the original LHS, so that they use the RHS instead. This is
2194 done even if NLHS and LHS are the same, for it is understood that
2195 the RHS will be modified afterwards, and NLHS will not be assigned
2196 an equivalent value.
2198 Adjusting any non-annotation uses of the LHS, if needed, is a
2199 responsibility of the caller.
2201 The effect of this call should be pretty much the same as that of
2202 inserting a copy of STMT before STMT, and then removing the
2203 original stmt, at which time gsi_remove() would have update
2204 annotations, but using this function saves all the inserting,
2205 copying and removing. */
2208 gimple_replace_lhs (gimple stmt
, tree nlhs
)
2210 if (MAY_HAVE_DEBUG_STMTS
)
2212 tree lhs
= gimple_get_lhs (stmt
);
2214 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
2216 insert_debug_temp_for_var_def (NULL
, lhs
);
2219 gimple_set_lhs (stmt
, nlhs
);
2222 /* Return a deep copy of statement STMT. All the operands from STMT
2223 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2224 and VUSE operand arrays are set to empty in the new copy. The new
2225 copy isn't part of any sequence. */
2228 gimple_copy (gimple stmt
)
2230 enum gimple_code code
= gimple_code (stmt
);
2231 unsigned num_ops
= gimple_num_ops (stmt
);
2232 gimple copy
= gimple_alloc (code
, num_ops
);
2235 /* Shallow copy all the fields from STMT. */
2236 memcpy (copy
, stmt
, gimple_size (code
));
2237 gimple_init_singleton (copy
);
2239 /* If STMT has sub-statements, deep-copy them as well. */
2240 if (gimple_has_substatements (stmt
))
2245 switch (gimple_code (stmt
))
2248 new_seq
= gimple_seq_copy (gimple_bind_body (stmt
));
2249 gimple_bind_set_body (copy
, new_seq
);
2250 gimple_bind_set_vars (copy
, unshare_expr (gimple_bind_vars (stmt
)));
2251 gimple_bind_set_block (copy
, gimple_bind_block (stmt
));
2255 new_seq
= gimple_seq_copy (gimple_catch_handler (stmt
));
2256 gimple_catch_set_handler (copy
, new_seq
);
2257 t
= unshare_expr (gimple_catch_types (stmt
));
2258 gimple_catch_set_types (copy
, t
);
2261 case GIMPLE_EH_FILTER
:
2262 new_seq
= gimple_seq_copy (gimple_eh_filter_failure (stmt
));
2263 gimple_eh_filter_set_failure (copy
, new_seq
);
2264 t
= unshare_expr (gimple_eh_filter_types (stmt
));
2265 gimple_eh_filter_set_types (copy
, t
);
2268 case GIMPLE_EH_ELSE
:
2269 new_seq
= gimple_seq_copy (gimple_eh_else_n_body (stmt
));
2270 gimple_eh_else_set_n_body (copy
, new_seq
);
2271 new_seq
= gimple_seq_copy (gimple_eh_else_e_body (stmt
));
2272 gimple_eh_else_set_e_body (copy
, new_seq
);
2276 new_seq
= gimple_seq_copy (gimple_try_eval (stmt
));
2277 gimple_try_set_eval (copy
, new_seq
);
2278 new_seq
= gimple_seq_copy (gimple_try_cleanup (stmt
));
2279 gimple_try_set_cleanup (copy
, new_seq
);
2282 case GIMPLE_OMP_FOR
:
2283 new_seq
= gimple_seq_copy (gimple_omp_for_pre_body (stmt
));
2284 gimple_omp_for_set_pre_body (copy
, new_seq
);
2285 t
= unshare_expr (gimple_omp_for_clauses (stmt
));
2286 gimple_omp_for_set_clauses (copy
, t
);
2287 copy
->gimple_omp_for
.iter
2288 = ggc_alloc_vec_gimple_omp_for_iter
2289 (gimple_omp_for_collapse (stmt
));
2290 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2292 gimple_omp_for_set_cond (copy
, i
,
2293 gimple_omp_for_cond (stmt
, i
));
2294 gimple_omp_for_set_index (copy
, i
,
2295 gimple_omp_for_index (stmt
, i
));
2296 t
= unshare_expr (gimple_omp_for_initial (stmt
, i
));
2297 gimple_omp_for_set_initial (copy
, i
, t
);
2298 t
= unshare_expr (gimple_omp_for_final (stmt
, i
));
2299 gimple_omp_for_set_final (copy
, i
, t
);
2300 t
= unshare_expr (gimple_omp_for_incr (stmt
, i
));
2301 gimple_omp_for_set_incr (copy
, i
, t
);
2305 case GIMPLE_OMP_PARALLEL
:
2306 t
= unshare_expr (gimple_omp_parallel_clauses (stmt
));
2307 gimple_omp_parallel_set_clauses (copy
, t
);
2308 t
= unshare_expr (gimple_omp_parallel_child_fn (stmt
));
2309 gimple_omp_parallel_set_child_fn (copy
, t
);
2310 t
= unshare_expr (gimple_omp_parallel_data_arg (stmt
));
2311 gimple_omp_parallel_set_data_arg (copy
, t
);
2314 case GIMPLE_OMP_TASK
:
2315 t
= unshare_expr (gimple_omp_task_clauses (stmt
));
2316 gimple_omp_task_set_clauses (copy
, t
);
2317 t
= unshare_expr (gimple_omp_task_child_fn (stmt
));
2318 gimple_omp_task_set_child_fn (copy
, t
);
2319 t
= unshare_expr (gimple_omp_task_data_arg (stmt
));
2320 gimple_omp_task_set_data_arg (copy
, t
);
2321 t
= unshare_expr (gimple_omp_task_copy_fn (stmt
));
2322 gimple_omp_task_set_copy_fn (copy
, t
);
2323 t
= unshare_expr (gimple_omp_task_arg_size (stmt
));
2324 gimple_omp_task_set_arg_size (copy
, t
);
2325 t
= unshare_expr (gimple_omp_task_arg_align (stmt
));
2326 gimple_omp_task_set_arg_align (copy
, t
);
2329 case GIMPLE_OMP_CRITICAL
:
2330 t
= unshare_expr (gimple_omp_critical_name (stmt
));
2331 gimple_omp_critical_set_name (copy
, t
);
2334 case GIMPLE_OMP_SECTIONS
:
2335 t
= unshare_expr (gimple_omp_sections_clauses (stmt
));
2336 gimple_omp_sections_set_clauses (copy
, t
);
2337 t
= unshare_expr (gimple_omp_sections_control (stmt
));
2338 gimple_omp_sections_set_control (copy
, t
);
2341 case GIMPLE_OMP_SINGLE
:
2342 case GIMPLE_OMP_SECTION
:
2343 case GIMPLE_OMP_MASTER
:
2344 case GIMPLE_OMP_ORDERED
:
2346 new_seq
= gimple_seq_copy (gimple_omp_body (stmt
));
2347 gimple_omp_set_body (copy
, new_seq
);
2350 case GIMPLE_TRANSACTION
:
2351 new_seq
= gimple_seq_copy (gimple_transaction_body (stmt
));
2352 gimple_transaction_set_body (copy
, new_seq
);
2355 case GIMPLE_WITH_CLEANUP_EXPR
:
2356 new_seq
= gimple_seq_copy (gimple_wce_cleanup (stmt
));
2357 gimple_wce_set_cleanup (copy
, new_seq
);
2365 /* Make copy of operands. */
2368 for (i
= 0; i
< num_ops
; i
++)
2369 gimple_set_op (copy
, i
, unshare_expr (gimple_op (stmt
, i
)));
2371 /* Clear out SSA operand vectors on COPY. */
2372 if (gimple_has_ops (stmt
))
2374 gimple_set_def_ops (copy
, NULL
);
2375 gimple_set_use_ops (copy
, NULL
);
2378 if (gimple_has_mem_ops (stmt
))
2380 gimple_set_vdef (copy
, gimple_vdef (stmt
));
2381 gimple_set_vuse (copy
, gimple_vuse (stmt
));
2384 /* SSA operands need to be updated. */
2385 gimple_set_modified (copy
, true);
2392 /* Return true if statement S has side-effects. We consider a
2393 statement to have side effects if:
2395 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2396 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2399 gimple_has_side_effects (const_gimple s
)
2401 if (is_gimple_debug (s
))
2404 /* We don't have to scan the arguments to check for
2405 volatile arguments, though, at present, we still
2406 do a scan to check for TREE_SIDE_EFFECTS. */
2407 if (gimple_has_volatile_ops (s
))
2410 if (gimple_code (s
) == GIMPLE_ASM
2411 && gimple_asm_volatile_p (s
))
2414 if (is_gimple_call (s
))
2416 int flags
= gimple_call_flags (s
);
2418 /* An infinite loop is considered a side effect. */
2419 if (!(flags
& (ECF_CONST
| ECF_PURE
))
2420 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
2429 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2430 Return true if S can trap. When INCLUDE_MEM is true, check whether
2431 the memory operations could trap. When INCLUDE_STORES is true and
2432 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2435 gimple_could_trap_p_1 (gimple s
, bool include_mem
, bool include_stores
)
2437 tree t
, div
= NULL_TREE
;
2442 unsigned i
, start
= (is_gimple_assign (s
) && !include_stores
) ? 1 : 0;
2444 for (i
= start
; i
< gimple_num_ops (s
); i
++)
2445 if (tree_could_trap_p (gimple_op (s
, i
)))
2449 switch (gimple_code (s
))
2452 return gimple_asm_volatile_p (s
);
2455 t
= gimple_call_fndecl (s
);
2456 /* Assume that calls to weak functions may trap. */
2457 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
2462 t
= gimple_expr_type (s
);
2463 op
= gimple_assign_rhs_code (s
);
2464 if (get_gimple_rhs_class (op
) == GIMPLE_BINARY_RHS
)
2465 div
= gimple_assign_rhs2 (s
);
2466 return (operation_could_trap_p (op
, FLOAT_TYPE_P (t
),
2467 (INTEGRAL_TYPE_P (t
)
2468 && TYPE_OVERFLOW_TRAPS (t
)),
2478 /* Return true if statement S can trap. */
2481 gimple_could_trap_p (gimple s
)
2483 return gimple_could_trap_p_1 (s
, true, true);
2486 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2489 gimple_assign_rhs_could_trap_p (gimple s
)
2491 gcc_assert (is_gimple_assign (s
));
2492 return gimple_could_trap_p_1 (s
, true, false);
2496 /* Print debugging information for gimple stmts generated. */
2499 dump_gimple_statistics (void)
2501 int i
, total_tuples
= 0, total_bytes
= 0;
2503 if (! GATHER_STATISTICS
)
2505 fprintf (stderr
, "No gimple statistics\n");
2509 fprintf (stderr
, "\nGIMPLE statements\n");
2510 fprintf (stderr
, "Kind Stmts Bytes\n");
2511 fprintf (stderr
, "---------------------------------------\n");
2512 for (i
= 0; i
< (int) gimple_alloc_kind_all
; ++i
)
2514 fprintf (stderr
, "%-20s %7d %10d\n", gimple_alloc_kind_names
[i
],
2515 gimple_alloc_counts
[i
], gimple_alloc_sizes
[i
]);
2516 total_tuples
+= gimple_alloc_counts
[i
];
2517 total_bytes
+= gimple_alloc_sizes
[i
];
2519 fprintf (stderr
, "---------------------------------------\n");
2520 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_tuples
, total_bytes
);
2521 fprintf (stderr
, "---------------------------------------\n");
2525 /* Return the number of operands needed on the RHS of a GIMPLE
2526 assignment for an expression with tree code CODE. */
2529 get_gimple_rhs_num_ops (enum tree_code code
)
2531 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
2533 if (rhs_class
== GIMPLE_UNARY_RHS
|| rhs_class
== GIMPLE_SINGLE_RHS
)
2535 else if (rhs_class
== GIMPLE_BINARY_RHS
)
2537 else if (rhs_class
== GIMPLE_TERNARY_RHS
)
2543 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2545 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2546 : ((TYPE) == tcc_binary \
2547 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2548 : ((TYPE) == tcc_constant \
2549 || (TYPE) == tcc_declaration \
2550 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2551 : ((SYM) == TRUTH_AND_EXPR \
2552 || (SYM) == TRUTH_OR_EXPR \
2553 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2554 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2555 : ((SYM) == COND_EXPR \
2556 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2557 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2558 || (SYM) == DOT_PROD_EXPR \
2559 || (SYM) == REALIGN_LOAD_EXPR \
2560 || (SYM) == VEC_COND_EXPR \
2561 || (SYM) == VEC_PERM_EXPR \
2562 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2563 : ((SYM) == CONSTRUCTOR \
2564 || (SYM) == OBJ_TYPE_REF \
2565 || (SYM) == ASSERT_EXPR \
2566 || (SYM) == ADDR_EXPR \
2567 || (SYM) == WITH_SIZE_EXPR \
2568 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2569 : GIMPLE_INVALID_RHS),
2570 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2572 const unsigned char gimple_rhs_class_table
[] = {
2573 #include "all-tree.def"
2577 #undef END_OF_BASE_TREE_CODES
2579 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2581 /* Validation of GIMPLE expressions. */
2583 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2586 is_gimple_lvalue (tree t
)
2588 return (is_gimple_addressable (t
)
2589 || TREE_CODE (t
) == WITH_SIZE_EXPR
2590 /* These are complex lvalues, but don't have addresses, so they
2592 || TREE_CODE (t
) == BIT_FIELD_REF
);
2595 /* Return true if T is a GIMPLE condition. */
2598 is_gimple_condexpr (tree t
)
2600 return (is_gimple_val (t
) || (COMPARISON_CLASS_P (t
)
2601 && !tree_could_throw_p (t
)
2602 && is_gimple_val (TREE_OPERAND (t
, 0))
2603 && is_gimple_val (TREE_OPERAND (t
, 1))));
2606 /* Return true if T is something whose address can be taken. */
2609 is_gimple_addressable (tree t
)
2611 return (is_gimple_id (t
) || handled_component_p (t
)
2612 || TREE_CODE (t
) == MEM_REF
);
2615 /* Return true if T is a valid gimple constant. */
2618 is_gimple_constant (const_tree t
)
2620 switch (TREE_CODE (t
))
2630 /* Vector constant constructors are gimple invariant. */
2632 if (TREE_TYPE (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2633 return TREE_CONSTANT (t
);
2642 /* Return true if T is a gimple address. */
2645 is_gimple_address (const_tree t
)
2649 if (TREE_CODE (t
) != ADDR_EXPR
)
2652 op
= TREE_OPERAND (t
, 0);
2653 while (handled_component_p (op
))
2655 if ((TREE_CODE (op
) == ARRAY_REF
2656 || TREE_CODE (op
) == ARRAY_RANGE_REF
)
2657 && !is_gimple_val (TREE_OPERAND (op
, 1)))
2660 op
= TREE_OPERAND (op
, 0);
2663 if (CONSTANT_CLASS_P (op
) || TREE_CODE (op
) == MEM_REF
)
2666 switch (TREE_CODE (op
))
2681 /* Return true if T is a gimple invariant address. */
2684 is_gimple_invariant_address (const_tree t
)
2688 if (TREE_CODE (t
) != ADDR_EXPR
)
2691 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2695 if (TREE_CODE (op
) == MEM_REF
)
2697 const_tree op0
= TREE_OPERAND (op
, 0);
2698 return (TREE_CODE (op0
) == ADDR_EXPR
2699 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2700 || decl_address_invariant_p (TREE_OPERAND (op0
, 0))));
2703 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
2706 /* Return true if T is a gimple invariant address at IPA level
2707 (so addresses of variables on stack are not allowed). */
2710 is_gimple_ip_invariant_address (const_tree t
)
2714 if (TREE_CODE (t
) != ADDR_EXPR
)
2717 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2721 if (TREE_CODE (op
) == MEM_REF
)
2723 const_tree op0
= TREE_OPERAND (op
, 0);
2724 return (TREE_CODE (op0
) == ADDR_EXPR
2725 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2726 || decl_address_ip_invariant_p (TREE_OPERAND (op0
, 0))));
2729 return CONSTANT_CLASS_P (op
) || decl_address_ip_invariant_p (op
);
2732 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2733 form of function invariant. */
2736 is_gimple_min_invariant (const_tree t
)
2738 if (TREE_CODE (t
) == ADDR_EXPR
)
2739 return is_gimple_invariant_address (t
);
2741 return is_gimple_constant (t
);
2744 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2745 form of gimple minimal invariant. */
2748 is_gimple_ip_invariant (const_tree t
)
2750 if (TREE_CODE (t
) == ADDR_EXPR
)
2751 return is_gimple_ip_invariant_address (t
);
2753 return is_gimple_constant (t
);
2756 /* Return true if T is a variable. */
2759 is_gimple_variable (tree t
)
2761 return (TREE_CODE (t
) == VAR_DECL
2762 || TREE_CODE (t
) == PARM_DECL
2763 || TREE_CODE (t
) == RESULT_DECL
2764 || TREE_CODE (t
) == SSA_NAME
);
2767 /* Return true if T is a GIMPLE identifier (something with an address). */
2770 is_gimple_id (tree t
)
2772 return (is_gimple_variable (t
)
2773 || TREE_CODE (t
) == FUNCTION_DECL
2774 || TREE_CODE (t
) == LABEL_DECL
2775 || TREE_CODE (t
) == CONST_DECL
2776 /* Allow string constants, since they are addressable. */
2777 || TREE_CODE (t
) == STRING_CST
);
2780 /* Return true if T is a non-aggregate register variable. */
2783 is_gimple_reg (tree t
)
2785 if (virtual_operand_p (t
))
2788 if (TREE_CODE (t
) == SSA_NAME
)
2791 if (!is_gimple_variable (t
))
2794 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2797 /* A volatile decl is not acceptable because we can't reuse it as
2798 needed. We need to copy it into a temp first. */
2799 if (TREE_THIS_VOLATILE (t
))
2802 /* We define "registers" as things that can be renamed as needed,
2803 which with our infrastructure does not apply to memory. */
2804 if (needs_to_live_in_memory (t
))
2807 /* Hard register variables are an interesting case. For those that
2808 are call-clobbered, we don't know where all the calls are, since
2809 we don't (want to) take into account which operations will turn
2810 into libcalls at the rtl level. For those that are call-saved,
2811 we don't currently model the fact that calls may in fact change
2812 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2813 level, and so miss variable changes that might imply. All around,
2814 it seems safest to not do too much optimization with these at the
2815 tree level at all. We'll have to rely on the rtl optimizers to
2816 clean this up, as there we've got all the appropriate bits exposed. */
2817 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
2820 /* Complex and vector values must have been put into SSA-like form.
2821 That is, no assignments to the individual components. */
2822 if (TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
2823 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2824 return DECL_GIMPLE_REG_P (t
);
2830 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2833 is_gimple_val (tree t
)
2835 /* Make loads from volatiles and memory vars explicit. */
2836 if (is_gimple_variable (t
)
2837 && is_gimple_reg_type (TREE_TYPE (t
))
2838 && !is_gimple_reg (t
))
2841 return (is_gimple_variable (t
) || is_gimple_min_invariant (t
));
2844 /* Similarly, but accept hard registers as inputs to asm statements. */
2847 is_gimple_asm_val (tree t
)
2849 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
2852 return is_gimple_val (t
);
2855 /* Return true if T is a GIMPLE minimal lvalue. */
2858 is_gimple_min_lval (tree t
)
2860 if (!(t
= CONST_CAST_TREE (strip_invariant_refs (t
))))
2862 return (is_gimple_id (t
) || TREE_CODE (t
) == MEM_REF
);
2865 /* Return true if T is a valid function operand of a CALL_EXPR. */
2868 is_gimple_call_addr (tree t
)
2870 return (TREE_CODE (t
) == OBJ_TYPE_REF
|| is_gimple_val (t
));
2873 /* Return true if T is a valid address operand of a MEM_REF. */
2876 is_gimple_mem_ref_addr (tree t
)
2878 return (is_gimple_reg (t
)
2879 || TREE_CODE (t
) == INTEGER_CST
2880 || (TREE_CODE (t
) == ADDR_EXPR
2881 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 0))
2882 || decl_address_invariant_p (TREE_OPERAND (t
, 0)))));
2886 /* Given a memory reference expression T, return its base address.
2887 The base address of a memory reference expression is the main
2888 object being referenced. For instance, the base address for
2889 'array[i].fld[j]' is 'array'. You can think of this as stripping
2890 away the offset part from a memory address.
2892 This function calls handled_component_p to strip away all the inner
2893 parts of the memory reference until it reaches the base object. */
2896 get_base_address (tree t
)
2898 while (handled_component_p (t
))
2899 t
= TREE_OPERAND (t
, 0);
2901 if ((TREE_CODE (t
) == MEM_REF
2902 || TREE_CODE (t
) == TARGET_MEM_REF
)
2903 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
2904 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
2906 if (TREE_CODE (t
) == SSA_NAME
2908 || TREE_CODE (t
) == STRING_CST
2909 || TREE_CODE (t
) == CONSTRUCTOR
2910 || INDIRECT_REF_P (t
)
2911 || TREE_CODE (t
) == MEM_REF
2912 || TREE_CODE (t
) == TARGET_MEM_REF
)
2919 recalculate_side_effects (tree t
)
2921 enum tree_code code
= TREE_CODE (t
);
2922 int len
= TREE_OPERAND_LENGTH (t
);
2925 switch (TREE_CODE_CLASS (code
))
2927 case tcc_expression
:
2933 case PREDECREMENT_EXPR
:
2934 case PREINCREMENT_EXPR
:
2935 case POSTDECREMENT_EXPR
:
2936 case POSTINCREMENT_EXPR
:
2937 /* All of these have side-effects, no matter what their
2946 case tcc_comparison
: /* a comparison expression */
2947 case tcc_unary
: /* a unary arithmetic expression */
2948 case tcc_binary
: /* a binary arithmetic expression */
2949 case tcc_reference
: /* a reference */
2950 case tcc_vl_exp
: /* a function call */
2951 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2952 for (i
= 0; i
< len
; ++i
)
2954 tree op
= TREE_OPERAND (t
, i
);
2955 if (op
&& TREE_SIDE_EFFECTS (op
))
2956 TREE_SIDE_EFFECTS (t
) = 1;
2961 /* No side-effects. */
2969 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2970 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2971 we failed to create one. */
2974 canonicalize_cond_expr_cond (tree t
)
2976 /* Strip conversions around boolean operations. */
2977 if (CONVERT_EXPR_P (t
)
2978 && (truth_value_p (TREE_CODE (TREE_OPERAND (t
, 0)))
2979 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0)))
2981 t
= TREE_OPERAND (t
, 0);
2983 /* For !x use x == 0. */
2984 if (TREE_CODE (t
) == TRUTH_NOT_EXPR
)
2986 tree top0
= TREE_OPERAND (t
, 0);
2987 t
= build2 (EQ_EXPR
, TREE_TYPE (t
),
2988 top0
, build_int_cst (TREE_TYPE (top0
), 0));
2990 /* For cmp ? 1 : 0 use cmp. */
2991 else if (TREE_CODE (t
) == COND_EXPR
2992 && COMPARISON_CLASS_P (TREE_OPERAND (t
, 0))
2993 && integer_onep (TREE_OPERAND (t
, 1))
2994 && integer_zerop (TREE_OPERAND (t
, 2)))
2996 tree top0
= TREE_OPERAND (t
, 0);
2997 t
= build2 (TREE_CODE (top0
), TREE_TYPE (t
),
2998 TREE_OPERAND (top0
, 0), TREE_OPERAND (top0
, 1));
3001 if (is_gimple_condexpr (t
))
3007 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3008 the positions marked by the set ARGS_TO_SKIP. */
3011 gimple_call_copy_skip_args (gimple stmt
, bitmap args_to_skip
)
3014 int nargs
= gimple_call_num_args (stmt
);
3015 VEC(tree
, heap
) *vargs
= VEC_alloc (tree
, heap
, nargs
);
3018 for (i
= 0; i
< nargs
; i
++)
3019 if (!bitmap_bit_p (args_to_skip
, i
))
3020 VEC_quick_push (tree
, vargs
, gimple_call_arg (stmt
, i
));
3022 if (gimple_call_internal_p (stmt
))
3023 new_stmt
= gimple_build_call_internal_vec (gimple_call_internal_fn (stmt
),
3026 new_stmt
= gimple_build_call_vec (gimple_call_fn (stmt
), vargs
);
3027 VEC_free (tree
, heap
, vargs
);
3028 if (gimple_call_lhs (stmt
))
3029 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3031 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3032 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3034 gimple_set_block (new_stmt
, gimple_block (stmt
));
3035 if (gimple_has_location (stmt
))
3036 gimple_set_location (new_stmt
, gimple_location (stmt
));
3037 gimple_call_copy_flags (new_stmt
, stmt
);
3038 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3040 gimple_set_modified (new_stmt
, true);
3046 enum gtc_mode
{ GTC_MERGE
= 0, GTC_DIAG
= 1 };
3048 static hashval_t
gimple_type_hash (const void *);
3050 /* Structure used to maintain a cache of some type pairs compared by
3051 gimple_types_compatible_p when comparing aggregate types. There are
3052 three possible values for SAME_P:
3054 -2: The pair (T1, T2) has just been inserted in the table.
3055 0: T1 and T2 are different types.
3056 1: T1 and T2 are the same type.
3058 The two elements in the SAME_P array are indexed by the comparison
3065 signed char same_p
[2];
3067 typedef struct type_pair_d
*type_pair_t
;
3068 DEF_VEC_P(type_pair_t
);
3069 DEF_VEC_ALLOC_P(type_pair_t
,heap
);
3071 #define GIMPLE_TYPE_PAIR_SIZE 16381
3072 struct type_pair_d
*type_pair_cache
;
3075 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3076 entry if none existed. */
3078 static inline type_pair_t
3079 lookup_type_pair (tree t1
, tree t2
)
3082 unsigned int uid1
, uid2
;
3084 if (type_pair_cache
== NULL
)
3085 type_pair_cache
= XCNEWVEC (struct type_pair_d
, GIMPLE_TYPE_PAIR_SIZE
);
3087 if (TYPE_UID (t1
) < TYPE_UID (t2
))
3089 uid1
= TYPE_UID (t1
);
3090 uid2
= TYPE_UID (t2
);
3094 uid1
= TYPE_UID (t2
);
3095 uid2
= TYPE_UID (t1
);
3097 gcc_checking_assert (uid1
!= uid2
);
3099 /* iterative_hash_hashval_t imply an function calls.
3100 We know that UIDS are in limited range. */
3101 index
= ((((unsigned HOST_WIDE_INT
)uid1
<< HOST_BITS_PER_WIDE_INT
/ 2) + uid2
)
3102 % GIMPLE_TYPE_PAIR_SIZE
);
3103 if (type_pair_cache
[index
].uid1
== uid1
3104 && type_pair_cache
[index
].uid2
== uid2
)
3105 return &type_pair_cache
[index
];
3107 type_pair_cache
[index
].uid1
= uid1
;
3108 type_pair_cache
[index
].uid2
= uid2
;
3109 type_pair_cache
[index
].same_p
[0] = -2;
3110 type_pair_cache
[index
].same_p
[1] = -2;
3112 return &type_pair_cache
[index
];
3115 /* Per pointer state for the SCC finding. The on_sccstack flag
3116 is not strictly required, it is true when there is no hash value
3117 recorded for the type and false otherwise. But querying that
3122 unsigned int dfsnum
;
3131 static unsigned int next_dfs_num
;
3132 static unsigned int gtc_next_dfs_num
;
3135 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3137 typedef struct GTY(()) gimple_type_leader_entry_s
{
3140 } gimple_type_leader_entry
;
3142 #define GIMPLE_TYPE_LEADER_SIZE 16381
3143 static GTY((deletable
, length("GIMPLE_TYPE_LEADER_SIZE")))
3144 gimple_type_leader_entry
*gimple_type_leader
;
3146 /* Lookup an existing leader for T and return it or NULL_TREE, if
3147 there is none in the cache. */
3150 gimple_lookup_type_leader (tree t
)
3152 gimple_type_leader_entry
*leader
;
3154 if (!gimple_type_leader
)
3157 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
3158 if (leader
->type
!= t
)
3161 return leader
->leader
;
3164 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3165 true then if any type has no name return false, otherwise return
3166 true if both types have no names. */
3169 compare_type_names_p (tree t1
, tree t2
)
3171 tree name1
= TYPE_NAME (t1
);
3172 tree name2
= TYPE_NAME (t2
);
3174 if ((name1
!= NULL_TREE
) != (name2
!= NULL_TREE
))
3177 if (name1
== NULL_TREE
)
3180 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3181 if (TREE_CODE (name1
) != TREE_CODE (name2
))
3184 if (TREE_CODE (name1
) == TYPE_DECL
)
3185 name1
= DECL_NAME (name1
);
3186 gcc_checking_assert (!name1
|| TREE_CODE (name1
) == IDENTIFIER_NODE
);
3188 if (TREE_CODE (name2
) == TYPE_DECL
)
3189 name2
= DECL_NAME (name2
);
3190 gcc_checking_assert (!name2
|| TREE_CODE (name2
) == IDENTIFIER_NODE
);
3192 /* Identifiers can be compared with pointer equality rather
3193 than a string comparison. */
3200 /* Return true if the field decls F1 and F2 are at the same offset.
3202 This is intended to be used on GIMPLE types only. */
3205 gimple_compare_field_offset (tree f1
, tree f2
)
3207 if (DECL_OFFSET_ALIGN (f1
) == DECL_OFFSET_ALIGN (f2
))
3209 tree offset1
= DECL_FIELD_OFFSET (f1
);
3210 tree offset2
= DECL_FIELD_OFFSET (f2
);
3211 return ((offset1
== offset2
3212 /* Once gimplification is done, self-referential offsets are
3213 instantiated as operand #2 of the COMPONENT_REF built for
3214 each access and reset. Therefore, they are not relevant
3215 anymore and fields are interchangeable provided that they
3216 represent the same access. */
3217 || (TREE_CODE (offset1
) == PLACEHOLDER_EXPR
3218 && TREE_CODE (offset2
) == PLACEHOLDER_EXPR
3219 && (DECL_SIZE (f1
) == DECL_SIZE (f2
)
3220 || (TREE_CODE (DECL_SIZE (f1
)) == PLACEHOLDER_EXPR
3221 && TREE_CODE (DECL_SIZE (f2
)) == PLACEHOLDER_EXPR
)
3222 || operand_equal_p (DECL_SIZE (f1
), DECL_SIZE (f2
), 0))
3223 && DECL_ALIGN (f1
) == DECL_ALIGN (f2
))
3224 || operand_equal_p (offset1
, offset2
, 0))
3225 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1
),
3226 DECL_FIELD_BIT_OFFSET (f2
)));
3229 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3230 should be, so handle differing ones specially by decomposing
3231 the offset into a byte and bit offset manually. */
3232 if (host_integerp (DECL_FIELD_OFFSET (f1
), 0)
3233 && host_integerp (DECL_FIELD_OFFSET (f2
), 0))
3235 unsigned HOST_WIDE_INT byte_offset1
, byte_offset2
;
3236 unsigned HOST_WIDE_INT bit_offset1
, bit_offset2
;
3237 bit_offset1
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1
));
3238 byte_offset1
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1
))
3239 + bit_offset1
/ BITS_PER_UNIT
);
3240 bit_offset2
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2
));
3241 byte_offset2
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2
))
3242 + bit_offset2
/ BITS_PER_UNIT
);
3243 if (byte_offset1
!= byte_offset2
)
3245 return bit_offset1
% BITS_PER_UNIT
== bit_offset2
% BITS_PER_UNIT
;
3252 gimple_types_compatible_p_1 (tree
, tree
, type_pair_t
,
3253 VEC(type_pair_t
, heap
) **,
3254 struct pointer_map_t
*, struct obstack
*);
3256 /* DFS visit the edge from the callers type pair with state *STATE to
3257 the pair T1, T2 while operating in FOR_MERGING_P mode.
3258 Update the merging status if it is not part of the SCC containing the
3259 callers pair and return it.
3260 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3263 gtc_visit (tree t1
, tree t2
,
3265 VEC(type_pair_t
, heap
) **sccstack
,
3266 struct pointer_map_t
*sccstate
,
3267 struct obstack
*sccstate_obstack
)
3269 struct sccs
*cstate
= NULL
;
3272 tree leader1
, leader2
;
3274 /* Check first for the obvious case of pointer identity. */
3278 /* Check that we have two types to compare. */
3279 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
3282 /* Can't be the same type if the types don't have the same code. */
3283 if (TREE_CODE (t1
) != TREE_CODE (t2
))
3286 /* Can't be the same type if they have different CV qualifiers. */
3287 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
3290 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
3293 /* Void types and nullptr types are always the same. */
3294 if (TREE_CODE (t1
) == VOID_TYPE
3295 || TREE_CODE (t1
) == NULLPTR_TYPE
)
3298 /* Can't be the same type if they have different alignment or mode. */
3299 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
3300 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
3303 /* Do some simple checks before doing three hashtable queries. */
3304 if (INTEGRAL_TYPE_P (t1
)
3305 || SCALAR_FLOAT_TYPE_P (t1
)
3306 || FIXED_POINT_TYPE_P (t1
)
3307 || TREE_CODE (t1
) == VECTOR_TYPE
3308 || TREE_CODE (t1
) == COMPLEX_TYPE
3309 || TREE_CODE (t1
) == OFFSET_TYPE
3310 || POINTER_TYPE_P (t1
))
3312 /* Can't be the same type if they have different sign or precision. */
3313 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
3314 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
3317 if (TREE_CODE (t1
) == INTEGER_TYPE
3318 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
3321 /* That's all we need to check for float and fixed-point types. */
3322 if (SCALAR_FLOAT_TYPE_P (t1
)
3323 || FIXED_POINT_TYPE_P (t1
))
3326 /* For other types fall through to more complex checks. */
3329 /* If the types have been previously registered and found equal
3331 leader1
= gimple_lookup_type_leader (t1
);
3332 leader2
= gimple_lookup_type_leader (t2
);
3335 || (leader1
&& leader1
== leader2
))
3338 /* If the hash values of t1 and t2 are different the types can't
3339 possibly be the same. This helps keeping the type-pair hashtable
3340 small, only tracking comparisons for hash collisions. */
3341 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
3344 /* Allocate a new cache entry for this comparison. */
3345 p
= lookup_type_pair (t1
, t2
);
3346 if (p
->same_p
[GTC_MERGE
] == 0 || p
->same_p
[GTC_MERGE
] == 1)
3348 /* We have already decided whether T1 and T2 are the
3349 same, return the cached result. */
3350 return p
->same_p
[GTC_MERGE
] == 1;
3353 if ((slot
= pointer_map_contains (sccstate
, p
)) != NULL
)
3354 cstate
= (struct sccs
*)*slot
;
3355 /* Not yet visited. DFS recurse. */
3358 gimple_types_compatible_p_1 (t1
, t2
, p
,
3359 sccstack
, sccstate
, sccstate_obstack
);
3360 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, p
);
3361 state
->low
= MIN (state
->low
, cstate
->low
);
3363 /* If the type is still on the SCC stack adjust the parents low. */
3364 if (cstate
->dfsnum
< state
->dfsnum
3365 && cstate
->on_sccstack
)
3366 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
3368 /* Return the current lattice value. We start with an equality
3369 assumption so types part of a SCC will be optimistically
3370 treated equal unless proven otherwise. */
3371 return cstate
->u
.same_p
;
3374 /* Worker for gimple_types_compatible.
3375 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3378 gimple_types_compatible_p_1 (tree t1
, tree t2
, type_pair_t p
,
3379 VEC(type_pair_t
, heap
) **sccstack
,
3380 struct pointer_map_t
*sccstate
,
3381 struct obstack
*sccstate_obstack
)
3385 gcc_assert (p
->same_p
[GTC_MERGE
] == -2);
3387 state
= XOBNEW (sccstate_obstack
, struct sccs
);
3388 *pointer_map_insert (sccstate
, p
) = state
;
3390 VEC_safe_push (type_pair_t
, heap
, *sccstack
, p
);
3391 state
->dfsnum
= gtc_next_dfs_num
++;
3392 state
->low
= state
->dfsnum
;
3393 state
->on_sccstack
= true;
3394 /* Start with an equality assumption. As we DFS recurse into child
3395 SCCs this assumption may get revisited. */
3396 state
->u
.same_p
= 1;
3398 /* The struct tags shall compare equal. */
3399 if (!compare_type_names_p (t1
, t2
))
3400 goto different_types
;
3402 /* We may not merge typedef types to the same type in different
3405 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
3406 && DECL_CONTEXT (TYPE_NAME (t1
))
3407 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1
))))
3409 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1
)),
3410 DECL_CONTEXT (TYPE_NAME (t2
)),
3411 state
, sccstack
, sccstate
, sccstate_obstack
))
3412 goto different_types
;
3415 /* If their attributes are not the same they can't be the same type. */
3416 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
3417 goto different_types
;
3419 /* Do type-specific comparisons. */
3420 switch (TREE_CODE (t1
))
3424 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3425 state
, sccstack
, sccstate
, sccstate_obstack
))
3426 goto different_types
;
3430 /* Array types are the same if the element types are the same and
3431 the number of elements are the same. */
3432 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3433 state
, sccstack
, sccstate
, sccstate_obstack
)
3434 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
3435 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
3436 goto different_types
;
3439 tree i1
= TYPE_DOMAIN (t1
);
3440 tree i2
= TYPE_DOMAIN (t2
);
3442 /* For an incomplete external array, the type domain can be
3443 NULL_TREE. Check this condition also. */
3444 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
3446 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
3447 goto different_types
;
3450 tree min1
= TYPE_MIN_VALUE (i1
);
3451 tree min2
= TYPE_MIN_VALUE (i2
);
3452 tree max1
= TYPE_MAX_VALUE (i1
);
3453 tree max2
= TYPE_MAX_VALUE (i2
);
3455 /* The minimum/maximum values have to be the same. */
3458 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
3459 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
3460 || operand_equal_p (min1
, min2
, 0))))
3463 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
3464 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
3465 || operand_equal_p (max1
, max2
, 0)))))
3468 goto different_types
;
3473 /* Method types should belong to the same class. */
3474 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
),
3475 state
, sccstack
, sccstate
, sccstate_obstack
))
3476 goto different_types
;
3481 /* Function types are the same if the return type and arguments types
3483 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3484 state
, sccstack
, sccstate
, sccstate_obstack
))
3485 goto different_types
;
3487 if (!comp_type_attributes (t1
, t2
))
3488 goto different_types
;
3490 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
3494 tree parms1
, parms2
;
3496 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
3498 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
3500 if (!gtc_visit (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
3501 state
, sccstack
, sccstate
, sccstate_obstack
))
3502 goto different_types
;
3505 if (parms1
|| parms2
)
3506 goto different_types
;
3513 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3514 state
, sccstack
, sccstate
, sccstate_obstack
)
3515 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1
),
3516 TYPE_OFFSET_BASETYPE (t2
),
3517 state
, sccstack
, sccstate
, sccstate_obstack
))
3518 goto different_types
;
3524 case REFERENCE_TYPE
:
3526 /* If the two pointers have different ref-all attributes,
3527 they can't be the same type. */
3528 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
3529 goto different_types
;
3531 /* Otherwise, pointer and reference types are the same if the
3532 pointed-to types are the same. */
3533 if (gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3534 state
, sccstack
, sccstate
, sccstate_obstack
))
3537 goto different_types
;
3543 tree min1
= TYPE_MIN_VALUE (t1
);
3544 tree max1
= TYPE_MAX_VALUE (t1
);
3545 tree min2
= TYPE_MIN_VALUE (t2
);
3546 tree max2
= TYPE_MAX_VALUE (t2
);
3547 bool min_equal_p
= false;
3548 bool max_equal_p
= false;
3550 /* If either type has a minimum value, the other type must
3552 if (min1
== NULL_TREE
&& min2
== NULL_TREE
)
3554 else if (min1
&& min2
&& operand_equal_p (min1
, min2
, 0))
3557 /* Likewise, if either type has a maximum value, the other
3558 type must have the same. */
3559 if (max1
== NULL_TREE
&& max2
== NULL_TREE
)
3561 else if (max1
&& max2
&& operand_equal_p (max1
, max2
, 0))
3564 if (!min_equal_p
|| !max_equal_p
)
3565 goto different_types
;
3572 /* FIXME lto, we cannot check bounds on enumeral types because
3573 different front ends will produce different values.
3574 In C, enumeral types are integers, while in C++ each element
3575 will have its own symbolic value. We should decide how enums
3576 are to be represented in GIMPLE and have each front end lower
3580 /* For enumeral types, all the values must be the same. */
3581 if (TYPE_VALUES (t1
) == TYPE_VALUES (t2
))
3584 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
3586 v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
3588 tree c1
= TREE_VALUE (v1
);
3589 tree c2
= TREE_VALUE (v2
);
3591 if (TREE_CODE (c1
) == CONST_DECL
)
3592 c1
= DECL_INITIAL (c1
);
3594 if (TREE_CODE (c2
) == CONST_DECL
)
3595 c2
= DECL_INITIAL (c2
);
3597 if (tree_int_cst_equal (c1
, c2
) != 1)
3598 goto different_types
;
3600 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
3601 goto different_types
;
3604 /* If one enumeration has more values than the other, they
3605 are not the same. */
3607 goto different_types
;
3614 case QUAL_UNION_TYPE
:
3618 /* For aggregate types, all the fields must be the same. */
3619 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
3621 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
3623 /* Different field kinds are not compatible. */
3624 if (TREE_CODE (f1
) != TREE_CODE (f2
))
3625 goto different_types
;
3626 /* Field decls must have the same name and offset. */
3627 if (TREE_CODE (f1
) == FIELD_DECL
3628 && (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
3629 || !gimple_compare_field_offset (f1
, f2
)))
3630 goto different_types
;
3631 /* All entities should have the same name and type. */
3632 if (DECL_NAME (f1
) != DECL_NAME (f2
)
3633 || !gtc_visit (TREE_TYPE (f1
), TREE_TYPE (f2
),
3634 state
, sccstack
, sccstate
, sccstate_obstack
))
3635 goto different_types
;
3638 /* If one aggregate has more fields than the other, they
3639 are not the same. */
3641 goto different_types
;
3650 /* Common exit path for types that are not compatible. */
3652 state
->u
.same_p
= 0;
3655 /* Common exit path for types that are compatible. */
3657 gcc_assert (state
->u
.same_p
== 1);
3660 if (state
->low
== state
->dfsnum
)
3664 /* Pop off the SCC and set its cache values to the final
3665 comparison result. */
3668 struct sccs
*cstate
;
3669 x
= VEC_pop (type_pair_t
, *sccstack
);
3670 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
3671 cstate
->on_sccstack
= false;
3672 x
->same_p
[GTC_MERGE
] = state
->u
.same_p
;
3677 return state
->u
.same_p
;
3680 /* Return true iff T1 and T2 are structurally identical. When
3681 FOR_MERGING_P is true the an incomplete type and a complete type
3682 are considered different, otherwise they are considered compatible. */
3685 gimple_types_compatible_p (tree t1
, tree t2
)
3687 VEC(type_pair_t
, heap
) *sccstack
= NULL
;
3688 struct pointer_map_t
*sccstate
;
3689 struct obstack sccstate_obstack
;
3690 type_pair_t p
= NULL
;
3692 tree leader1
, leader2
;
3694 /* Before starting to set up the SCC machinery handle simple cases. */
3696 /* Check first for the obvious case of pointer identity. */
3700 /* Check that we have two types to compare. */
3701 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
3704 /* Can't be the same type if the types don't have the same code. */
3705 if (TREE_CODE (t1
) != TREE_CODE (t2
))
3708 /* Can't be the same type if they have different CV qualifiers. */
3709 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
3712 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
3715 /* Void types and nullptr types are always the same. */
3716 if (TREE_CODE (t1
) == VOID_TYPE
3717 || TREE_CODE (t1
) == NULLPTR_TYPE
)
3720 /* Can't be the same type if they have different alignment or mode. */
3721 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
3722 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
3725 /* Do some simple checks before doing three hashtable queries. */
3726 if (INTEGRAL_TYPE_P (t1
)
3727 || SCALAR_FLOAT_TYPE_P (t1
)
3728 || FIXED_POINT_TYPE_P (t1
)
3729 || TREE_CODE (t1
) == VECTOR_TYPE
3730 || TREE_CODE (t1
) == COMPLEX_TYPE
3731 || TREE_CODE (t1
) == OFFSET_TYPE
3732 || POINTER_TYPE_P (t1
))
3734 /* Can't be the same type if they have different sign or precision. */
3735 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
3736 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
3739 if (TREE_CODE (t1
) == INTEGER_TYPE
3740 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
3743 /* That's all we need to check for float and fixed-point types. */
3744 if (SCALAR_FLOAT_TYPE_P (t1
)
3745 || FIXED_POINT_TYPE_P (t1
))
3748 /* For other types fall through to more complex checks. */
3751 /* If the types have been previously registered and found equal
3753 leader1
= gimple_lookup_type_leader (t1
);
3754 leader2
= gimple_lookup_type_leader (t2
);
3757 || (leader1
&& leader1
== leader2
))
3760 /* If the hash values of t1 and t2 are different the types can't
3761 possibly be the same. This helps keeping the type-pair hashtable
3762 small, only tracking comparisons for hash collisions. */
3763 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
3766 /* If we've visited this type pair before (in the case of aggregates
3767 with self-referential types), and we made a decision, return it. */
3768 p
= lookup_type_pair (t1
, t2
);
3769 if (p
->same_p
[GTC_MERGE
] == 0 || p
->same_p
[GTC_MERGE
] == 1)
3771 /* We have already decided whether T1 and T2 are the
3772 same, return the cached result. */
3773 return p
->same_p
[GTC_MERGE
] == 1;
3776 /* Now set up the SCC machinery for the comparison. */
3777 gtc_next_dfs_num
= 1;
3778 sccstate
= pointer_map_create ();
3779 gcc_obstack_init (&sccstate_obstack
);
3780 res
= gimple_types_compatible_p_1 (t1
, t2
, p
,
3781 &sccstack
, sccstate
, &sccstate_obstack
);
3782 VEC_free (type_pair_t
, heap
, sccstack
);
3783 pointer_map_destroy (sccstate
);
3784 obstack_free (&sccstate_obstack
, NULL
);
3791 iterative_hash_gimple_type (tree
, hashval_t
, VEC(tree
, heap
) **,
3792 struct pointer_map_t
*, struct obstack
*);
3794 /* DFS visit the edge from the callers type with state *STATE to T.
3795 Update the callers type hash V with the hash for T if it is not part
3796 of the SCC containing the callers type and return it.
3797 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3800 visit (tree t
, struct sccs
*state
, hashval_t v
,
3801 VEC (tree
, heap
) **sccstack
,
3802 struct pointer_map_t
*sccstate
,
3803 struct obstack
*sccstate_obstack
)
3805 struct sccs
*cstate
= NULL
;
3806 struct tree_int_map m
;
3809 /* If there is a hash value recorded for this type then it can't
3810 possibly be part of our parent SCC. Simply mix in its hash. */
3812 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
3814 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, v
);
3816 if ((slot
= pointer_map_contains (sccstate
, t
)) != NULL
)
3817 cstate
= (struct sccs
*)*slot
;
3821 /* Not yet visited. DFS recurse. */
3822 tem
= iterative_hash_gimple_type (t
, v
,
3823 sccstack
, sccstate
, sccstate_obstack
);
3825 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, t
);
3826 state
->low
= MIN (state
->low
, cstate
->low
);
3827 /* If the type is no longer on the SCC stack and thus is not part
3828 of the parents SCC mix in its hash value. Otherwise we will
3829 ignore the type for hashing purposes and return the unaltered
3831 if (!cstate
->on_sccstack
)
3834 if (cstate
->dfsnum
< state
->dfsnum
3835 && cstate
->on_sccstack
)
3836 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
3838 /* We are part of our parents SCC, skip this type during hashing
3839 and return the unaltered hash value. */
3843 /* Hash NAME with the previous hash value V and return it. */
3846 iterative_hash_name (tree name
, hashval_t v
)
3850 v
= iterative_hash_hashval_t (TREE_CODE (name
), v
);
3851 if (TREE_CODE (name
) == TYPE_DECL
)
3852 name
= DECL_NAME (name
);
3855 gcc_assert (TREE_CODE (name
) == IDENTIFIER_NODE
);
3856 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name
), v
);
3859 /* A type, hashvalue pair for sorting SCC members. */
3861 struct type_hash_pair
{
3866 /* Compare two type, hashvalue pairs. */
3869 type_hash_pair_compare (const void *p1_
, const void *p2_
)
3871 const struct type_hash_pair
*p1
= (const struct type_hash_pair
*) p1_
;
3872 const struct type_hash_pair
*p2
= (const struct type_hash_pair
*) p2_
;
3873 if (p1
->hash
< p2
->hash
)
3875 else if (p1
->hash
> p2
->hash
)
3880 /* Returning a hash value for gimple type TYPE combined with VAL.
3881 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3883 To hash a type we end up hashing in types that are reachable.
3884 Through pointers we can end up with cycles which messes up the
3885 required property that we need to compute the same hash value
3886 for structurally equivalent types. To avoid this we have to
3887 hash all types in a cycle (the SCC) in a commutative way. The
3888 easiest way is to not mix in the hashes of the SCC members at
3889 all. To make this work we have to delay setting the hash
3890 values of the SCC until it is complete. */
3893 iterative_hash_gimple_type (tree type
, hashval_t val
,
3894 VEC(tree
, heap
) **sccstack
,
3895 struct pointer_map_t
*sccstate
,
3896 struct obstack
*sccstate_obstack
)
3902 /* Not visited during this DFS walk. */
3903 gcc_checking_assert (!pointer_map_contains (sccstate
, type
));
3904 state
= XOBNEW (sccstate_obstack
, struct sccs
);
3905 *pointer_map_insert (sccstate
, type
) = state
;
3907 VEC_safe_push (tree
, heap
, *sccstack
, type
);
3908 state
->dfsnum
= next_dfs_num
++;
3909 state
->low
= state
->dfsnum
;
3910 state
->on_sccstack
= true;
3912 /* Combine a few common features of types so that types are grouped into
3913 smaller sets; when searching for existing matching types to merge,
3914 only existing types having the same features as the new type will be
3916 v
= iterative_hash_name (TYPE_NAME (type
), 0);
3917 if (TYPE_NAME (type
)
3918 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
3919 && DECL_CONTEXT (TYPE_NAME (type
))
3920 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type
))))
3921 v
= visit (DECL_CONTEXT (TYPE_NAME (type
)), state
, v
,
3922 sccstack
, sccstate
, sccstate_obstack
);
3923 v
= iterative_hash_hashval_t (TREE_CODE (type
), v
);
3924 v
= iterative_hash_hashval_t (TYPE_QUALS (type
), v
);
3925 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
3927 /* Do not hash the types size as this will cause differences in
3928 hash values for the complete vs. the incomplete type variant. */
3930 /* Incorporate common features of numerical types. */
3931 if (INTEGRAL_TYPE_P (type
)
3932 || SCALAR_FLOAT_TYPE_P (type
)
3933 || FIXED_POINT_TYPE_P (type
))
3935 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
3936 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
3937 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
3940 /* For pointer and reference types, fold in information about the type
3942 if (POINTER_TYPE_P (type
))
3943 v
= visit (TREE_TYPE (type
), state
, v
,
3944 sccstack
, sccstate
, sccstate_obstack
);
3946 /* For integer types hash the types min/max values and the string flag. */
3947 if (TREE_CODE (type
) == INTEGER_TYPE
)
3949 /* OMP lowering can introduce error_mark_node in place of
3950 random local decls in types. */
3951 if (TYPE_MIN_VALUE (type
) != error_mark_node
)
3952 v
= iterative_hash_expr (TYPE_MIN_VALUE (type
), v
);
3953 if (TYPE_MAX_VALUE (type
) != error_mark_node
)
3954 v
= iterative_hash_expr (TYPE_MAX_VALUE (type
), v
);
3955 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
3958 /* For array types hash the domain and the string flag. */
3959 if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
3961 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
3962 v
= visit (TYPE_DOMAIN (type
), state
, v
,
3963 sccstack
, sccstate
, sccstate_obstack
);
3966 /* Recurse for aggregates with a single element type. */
3967 if (TREE_CODE (type
) == ARRAY_TYPE
3968 || TREE_CODE (type
) == COMPLEX_TYPE
3969 || TREE_CODE (type
) == VECTOR_TYPE
)
3970 v
= visit (TREE_TYPE (type
), state
, v
,
3971 sccstack
, sccstate
, sccstate_obstack
);
3973 /* Incorporate function return and argument types. */
3974 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
3979 /* For method types also incorporate their parent class. */
3980 if (TREE_CODE (type
) == METHOD_TYPE
)
3981 v
= visit (TYPE_METHOD_BASETYPE (type
), state
, v
,
3982 sccstack
, sccstate
, sccstate_obstack
);
3984 /* Check result and argument types. */
3985 v
= visit (TREE_TYPE (type
), state
, v
,
3986 sccstack
, sccstate
, sccstate_obstack
);
3987 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
3989 v
= visit (TREE_VALUE (p
), state
, v
,
3990 sccstack
, sccstate
, sccstate_obstack
);
3994 v
= iterative_hash_hashval_t (na
, v
);
3997 if (RECORD_OR_UNION_TYPE_P (type
))
4002 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
4004 v
= iterative_hash_name (DECL_NAME (f
), v
);
4005 v
= visit (TREE_TYPE (f
), state
, v
,
4006 sccstack
, sccstate
, sccstate_obstack
);
4010 v
= iterative_hash_hashval_t (nf
, v
);
4013 /* Record hash for us. */
4016 /* See if we found an SCC. */
4017 if (state
->low
== state
->dfsnum
)
4020 struct tree_int_map
*m
;
4022 /* Pop off the SCC and set its hash values. */
4023 x
= VEC_pop (tree
, *sccstack
);
4024 /* Optimize SCC size one. */
4027 state
->on_sccstack
= false;
4028 m
= ggc_alloc_cleared_tree_int_map ();
4031 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
4032 gcc_assert (!*slot
);
4037 struct sccs
*cstate
;
4038 unsigned first
, i
, size
, j
;
4039 struct type_hash_pair
*pairs
;
4040 /* Pop off the SCC and build an array of type, hash pairs. */
4041 first
= VEC_length (tree
, *sccstack
) - 1;
4042 while (VEC_index (tree
, *sccstack
, first
) != type
)
4044 size
= VEC_length (tree
, *sccstack
) - first
+ 1;
4045 pairs
= XALLOCAVEC (struct type_hash_pair
, size
);
4047 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
4048 cstate
->on_sccstack
= false;
4050 pairs
[i
].hash
= cstate
->u
.hash
;
4053 x
= VEC_pop (tree
, *sccstack
);
4054 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
4055 cstate
->on_sccstack
= false;
4058 pairs
[i
].hash
= cstate
->u
.hash
;
4061 gcc_assert (i
+ 1 == size
);
4062 /* Sort the arrays of type, hash pairs so that when we mix in
4063 all members of the SCC the hash value becomes independent on
4064 the order we visited the SCC. Disregard hashes equal to
4065 the hash of the type we mix into because we cannot guarantee
4066 a stable sort for those across different TUs. */
4067 qsort (pairs
, size
, sizeof (struct type_hash_pair
),
4068 type_hash_pair_compare
);
4069 for (i
= 0; i
< size
; ++i
)
4072 m
= ggc_alloc_cleared_tree_int_map ();
4073 m
->base
.from
= pairs
[i
].type
;
4074 hash
= pairs
[i
].hash
;
4075 /* Skip same hashes. */
4076 for (j
= i
+ 1; j
< size
&& pairs
[j
].hash
== pairs
[i
].hash
; ++j
)
4078 for (; j
< size
; ++j
)
4079 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
4080 for (j
= 0; pairs
[j
].hash
!= pairs
[i
].hash
; ++j
)
4081 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
4083 if (pairs
[i
].type
== type
)
4085 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
4086 gcc_assert (!*slot
);
4092 return iterative_hash_hashval_t (v
, val
);
4096 /* Returns a hash value for P (assumed to be a type). The hash value
4097 is computed using some distinguishing features of the type. Note
4098 that we cannot use pointer hashing here as we may be dealing with
4099 two distinct instances of the same type.
4101 This function should produce the same hash value for two compatible
4102 types according to gimple_types_compatible_p. */
4105 gimple_type_hash (const void *p
)
4107 const_tree t
= (const_tree
) p
;
4108 VEC(tree
, heap
) *sccstack
= NULL
;
4109 struct pointer_map_t
*sccstate
;
4110 struct obstack sccstate_obstack
;
4113 struct tree_int_map m
;
4115 if (type_hash_cache
== NULL
)
4116 type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
4117 tree_int_map_eq
, NULL
);
4119 m
.base
.from
= CONST_CAST_TREE (t
);
4120 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
4122 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, 0);
4124 /* Perform a DFS walk and pre-hash all reachable types. */
4126 sccstate
= pointer_map_create ();
4127 gcc_obstack_init (&sccstate_obstack
);
4128 val
= iterative_hash_gimple_type (CONST_CAST_TREE (t
), 0,
4129 &sccstack
, sccstate
, &sccstate_obstack
);
4130 VEC_free (tree
, heap
, sccstack
);
4131 pointer_map_destroy (sccstate
);
4132 obstack_free (&sccstate_obstack
, NULL
);
4137 /* Returning a hash value for gimple type TYPE combined with VAL.
4139 The hash value returned is equal for types considered compatible
4140 by gimple_canonical_types_compatible_p. */
4143 iterative_hash_canonical_type (tree type
, hashval_t val
)
4147 struct tree_int_map
*mp
, m
;
4150 if ((slot
= htab_find_slot (canonical_type_hash_cache
, &m
, INSERT
))
4152 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, val
);
4154 /* Combine a few common features of types so that types are grouped into
4155 smaller sets; when searching for existing matching types to merge,
4156 only existing types having the same features as the new type will be
4158 v
= iterative_hash_hashval_t (TREE_CODE (type
), 0);
4159 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
4160 v
= iterative_hash_hashval_t (TYPE_ALIGN (type
), v
);
4161 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
4163 /* Incorporate common features of numerical types. */
4164 if (INTEGRAL_TYPE_P (type
)
4165 || SCALAR_FLOAT_TYPE_P (type
)
4166 || FIXED_POINT_TYPE_P (type
)
4167 || TREE_CODE (type
) == VECTOR_TYPE
4168 || TREE_CODE (type
) == COMPLEX_TYPE
4169 || TREE_CODE (type
) == OFFSET_TYPE
4170 || POINTER_TYPE_P (type
))
4172 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
4173 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
4176 /* For pointer and reference types, fold in information about the type
4177 pointed to but do not recurse to the pointed-to type. */
4178 if (POINTER_TYPE_P (type
))
4180 v
= iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type
), v
);
4181 v
= iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type
)), v
);
4182 v
= iterative_hash_hashval_t (TYPE_RESTRICT (type
), v
);
4183 v
= iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type
)), v
);
4186 /* For integer types hash only the string flag. */
4187 if (TREE_CODE (type
) == INTEGER_TYPE
)
4188 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4190 /* For array types hash the domain bounds and the string flag. */
4191 if (TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_DOMAIN (type
))
4193 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4194 /* OMP lowering can introduce error_mark_node in place of
4195 random local decls in types. */
4196 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
4197 v
= iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)), v
);
4198 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) != error_mark_node
)
4199 v
= iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)), v
);
4202 /* Recurse for aggregates with a single element type. */
4203 if (TREE_CODE (type
) == ARRAY_TYPE
4204 || TREE_CODE (type
) == COMPLEX_TYPE
4205 || TREE_CODE (type
) == VECTOR_TYPE
)
4206 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
4208 /* Incorporate function return and argument types. */
4209 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
4214 /* For method types also incorporate their parent class. */
4215 if (TREE_CODE (type
) == METHOD_TYPE
)
4216 v
= iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type
), v
);
4218 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
4220 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
4222 v
= iterative_hash_canonical_type (TREE_VALUE (p
), v
);
4226 v
= iterative_hash_hashval_t (na
, v
);
4229 if (RECORD_OR_UNION_TYPE_P (type
))
4234 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
4235 if (TREE_CODE (f
) == FIELD_DECL
)
4237 v
= iterative_hash_canonical_type (TREE_TYPE (f
), v
);
4241 v
= iterative_hash_hashval_t (nf
, v
);
4244 /* Cache the just computed hash value. */
4245 mp
= ggc_alloc_cleared_tree_int_map ();
4246 mp
->base
.from
= type
;
4248 *slot
= (void *) mp
;
4250 return iterative_hash_hashval_t (v
, val
);
4254 gimple_canonical_type_hash (const void *p
)
4256 if (canonical_type_hash_cache
== NULL
)
4257 canonical_type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
4258 tree_int_map_eq
, NULL
);
4260 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree
) p
), 0);
4264 /* Returns nonzero if P1 and P2 are equal. */
4267 gimple_type_eq (const void *p1
, const void *p2
)
4269 const_tree t1
= (const_tree
) p1
;
4270 const_tree t2
= (const_tree
) p2
;
4271 return gimple_types_compatible_p (CONST_CAST_TREE (t1
),
4272 CONST_CAST_TREE (t2
));
4276 /* Worker for gimple_register_type.
4277 Register type T in the global type table gimple_types.
4278 When REGISTERING_MV is false first recurse for the main variant of T. */
4281 gimple_register_type_1 (tree t
, bool registering_mv
)
4284 gimple_type_leader_entry
*leader
;
4286 /* If we registered this type before return the cached result. */
4287 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
4288 if (leader
->type
== t
)
4289 return leader
->leader
;
4291 /* Always register the main variant first. This is important so we
4292 pick up the non-typedef variants as canonical, otherwise we'll end
4293 up taking typedef ids for structure tags during comparison.
4294 It also makes sure that main variants will be merged to main variants.
4295 As we are operating on a possibly partially fixed up type graph
4296 do not bother to recurse more than once, otherwise we may end up
4298 If we are registering a main variant it will either remain its
4299 own main variant or it will be merged to something else in which
4300 case we do not care for the main variant leader. */
4302 && TYPE_MAIN_VARIANT (t
) != t
)
4303 gimple_register_type_1 (TYPE_MAIN_VARIANT (t
), true);
4305 /* See if we already have an equivalent type registered. */
4306 slot
= htab_find_slot (gimple_types
, t
, INSERT
);
4308 && *(tree
*)slot
!= t
)
4310 tree new_type
= (tree
) *((tree
*) slot
);
4312 leader
->leader
= new_type
;
4316 /* If not, insert it to the cache and the hash. */
4323 /* Register type T in the global type table gimple_types.
4324 If another type T', compatible with T, already existed in
4325 gimple_types then return T', otherwise return T. This is used by
4326 LTO to merge identical types read from different TUs. */
4329 gimple_register_type (tree t
)
4331 gcc_assert (TYPE_P (t
));
4333 if (!gimple_type_leader
)
4334 gimple_type_leader
= ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4335 (GIMPLE_TYPE_LEADER_SIZE
);
4337 if (gimple_types
== NULL
)
4338 gimple_types
= htab_create_ggc (16381, gimple_type_hash
, gimple_type_eq
, 0);
4340 return gimple_register_type_1 (t
, false);
4343 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4344 the middle-end types_compatible_p function. It needs to avoid
4345 claiming types are different for types that should be treated
4346 the same with respect to TBAA. Canonical types are also used
4347 for IL consistency checks via the useless_type_conversion_p
4348 predicate which does not handle all type kinds itself but falls
4349 back to pointer-comparison of TYPE_CANONICAL for aggregates
4352 /* Return true iff T1 and T2 are structurally identical for what
4353 TBAA is concerned. */
4356 gimple_canonical_types_compatible_p (tree t1
, tree t2
)
4358 /* Before starting to set up the SCC machinery handle simple cases. */
4360 /* Check first for the obvious case of pointer identity. */
4364 /* Check that we have two types to compare. */
4365 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
4368 /* If the types have been previously registered and found equal
4370 if (TYPE_CANONICAL (t1
)
4371 && TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
))
4374 /* Can't be the same type if the types don't have the same code. */
4375 if (TREE_CODE (t1
) != TREE_CODE (t2
))
4378 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
4381 /* Qualifiers do not matter for canonical type comparison purposes. */
4383 /* Void types and nullptr types are always the same. */
4384 if (TREE_CODE (t1
) == VOID_TYPE
4385 || TREE_CODE (t1
) == NULLPTR_TYPE
)
4388 /* Can't be the same type if they have different alignment, or mode. */
4389 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
4390 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
4393 /* Non-aggregate types can be handled cheaply. */
4394 if (INTEGRAL_TYPE_P (t1
)
4395 || SCALAR_FLOAT_TYPE_P (t1
)
4396 || FIXED_POINT_TYPE_P (t1
)
4397 || TREE_CODE (t1
) == VECTOR_TYPE
4398 || TREE_CODE (t1
) == COMPLEX_TYPE
4399 || TREE_CODE (t1
) == OFFSET_TYPE
4400 || POINTER_TYPE_P (t1
))
4402 /* Can't be the same type if they have different sign or precision. */
4403 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
4404 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
4407 if (TREE_CODE (t1
) == INTEGER_TYPE
4408 && TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
))
4411 /* For canonical type comparisons we do not want to build SCCs
4412 so we cannot compare pointed-to types. But we can, for now,
4413 require the same pointed-to type kind and match what
4414 useless_type_conversion_p would do. */
4415 if (POINTER_TYPE_P (t1
))
4417 /* If the two pointers have different ref-all attributes,
4418 they can't be the same type. */
4419 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
4422 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
4423 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
4426 if (TYPE_RESTRICT (t1
) != TYPE_RESTRICT (t2
))
4429 if (TREE_CODE (TREE_TYPE (t1
)) != TREE_CODE (TREE_TYPE (t2
)))
4433 /* Tail-recurse to components. */
4434 if (TREE_CODE (t1
) == VECTOR_TYPE
4435 || TREE_CODE (t1
) == COMPLEX_TYPE
)
4436 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
4442 /* Do type-specific comparisons. */
4443 switch (TREE_CODE (t1
))
4446 /* Array types are the same if the element types are the same and
4447 the number of elements are the same. */
4448 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
))
4449 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
4450 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
4454 tree i1
= TYPE_DOMAIN (t1
);
4455 tree i2
= TYPE_DOMAIN (t2
);
4457 /* For an incomplete external array, the type domain can be
4458 NULL_TREE. Check this condition also. */
4459 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
4461 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
4465 tree min1
= TYPE_MIN_VALUE (i1
);
4466 tree min2
= TYPE_MIN_VALUE (i2
);
4467 tree max1
= TYPE_MAX_VALUE (i1
);
4468 tree max2
= TYPE_MAX_VALUE (i2
);
4470 /* The minimum/maximum values have to be the same. */
4473 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
4474 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
4475 || operand_equal_p (min1
, min2
, 0))))
4478 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
4479 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
4480 || operand_equal_p (max1
, max2
, 0)))))
4489 /* Function types are the same if the return type and arguments types
4491 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
4494 if (!comp_type_attributes (t1
, t2
))
4497 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
4501 tree parms1
, parms2
;
4503 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
4505 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
4507 if (!gimple_canonical_types_compatible_p
4508 (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
4512 if (parms1
|| parms2
)
4520 case QUAL_UNION_TYPE
:
4524 /* For aggregate types, all the fields must be the same. */
4525 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
4527 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
4529 /* Skip non-fields. */
4530 while (f1
&& TREE_CODE (f1
) != FIELD_DECL
)
4531 f1
= TREE_CHAIN (f1
);
4532 while (f2
&& TREE_CODE (f2
) != FIELD_DECL
)
4533 f2
= TREE_CHAIN (f2
);
4536 /* The fields must have the same name, offset and type. */
4537 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
4538 || !gimple_compare_field_offset (f1
, f2
)
4539 || !gimple_canonical_types_compatible_p
4540 (TREE_TYPE (f1
), TREE_TYPE (f2
)))
4544 /* If one aggregate has more fields than the other, they
4545 are not the same. */
4558 /* Returns nonzero if P1 and P2 are equal. */
4561 gimple_canonical_type_eq (const void *p1
, const void *p2
)
4563 const_tree t1
= (const_tree
) p1
;
4564 const_tree t2
= (const_tree
) p2
;
4565 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1
),
4566 CONST_CAST_TREE (t2
));
4569 /* Register type T in the global type table gimple_types.
4570 If another type T', compatible with T, already existed in
4571 gimple_types then return T', otherwise return T. This is used by
4572 LTO to merge identical types read from different TUs.
4574 ??? This merging does not exactly match how the tree.c middle-end
4575 functions will assign TYPE_CANONICAL when new types are created
4576 during optimization (which at least happens for pointer and array
4580 gimple_register_canonical_type (tree t
)
4584 gcc_assert (TYPE_P (t
));
4586 if (TYPE_CANONICAL (t
))
4587 return TYPE_CANONICAL (t
);
4589 if (gimple_canonical_types
== NULL
)
4590 gimple_canonical_types
= htab_create_ggc (16381, gimple_canonical_type_hash
,
4591 gimple_canonical_type_eq
, 0);
4593 slot
= htab_find_slot (gimple_canonical_types
, t
, INSERT
);
4595 && *(tree
*)slot
!= t
)
4597 tree new_type
= (tree
) *((tree
*) slot
);
4599 TYPE_CANONICAL (t
) = new_type
;
4604 TYPE_CANONICAL (t
) = t
;
4612 /* Show statistics on references to the global type table gimple_types. */
4615 print_gimple_types_stats (void)
4618 fprintf (stderr
, "GIMPLE type table: size %ld, %ld elements, "
4619 "%ld searches, %ld collisions (ratio: %f)\n",
4620 (long) htab_size (gimple_types
),
4621 (long) htab_elements (gimple_types
),
4622 (long) gimple_types
->searches
,
4623 (long) gimple_types
->collisions
,
4624 htab_collisions (gimple_types
));
4626 fprintf (stderr
, "GIMPLE type table is empty\n");
4627 if (type_hash_cache
)
4628 fprintf (stderr
, "GIMPLE type hash table: size %ld, %ld elements, "
4629 "%ld searches, %ld collisions (ratio: %f)\n",
4630 (long) htab_size (type_hash_cache
),
4631 (long) htab_elements (type_hash_cache
),
4632 (long) type_hash_cache
->searches
,
4633 (long) type_hash_cache
->collisions
,
4634 htab_collisions (type_hash_cache
));
4636 fprintf (stderr
, "GIMPLE type hash table is empty\n");
4637 if (gimple_canonical_types
)
4638 fprintf (stderr
, "GIMPLE canonical type table: size %ld, %ld elements, "
4639 "%ld searches, %ld collisions (ratio: %f)\n",
4640 (long) htab_size (gimple_canonical_types
),
4641 (long) htab_elements (gimple_canonical_types
),
4642 (long) gimple_canonical_types
->searches
,
4643 (long) gimple_canonical_types
->collisions
,
4644 htab_collisions (gimple_canonical_types
));
4646 fprintf (stderr
, "GIMPLE canonical type table is empty\n");
4647 if (canonical_type_hash_cache
)
4648 fprintf (stderr
, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4649 "%ld searches, %ld collisions (ratio: %f)\n",
4650 (long) htab_size (canonical_type_hash_cache
),
4651 (long) htab_elements (canonical_type_hash_cache
),
4652 (long) canonical_type_hash_cache
->searches
,
4653 (long) canonical_type_hash_cache
->collisions
,
4654 htab_collisions (canonical_type_hash_cache
));
4656 fprintf (stderr
, "GIMPLE canonical type hash table is empty\n");
4659 /* Free the gimple type hashtables used for LTO type merging. */
4662 free_gimple_type_tables (void)
4664 /* Last chance to print stats for the tables. */
4665 if (flag_lto_report
)
4666 print_gimple_types_stats ();
4670 htab_delete (gimple_types
);
4671 gimple_types
= NULL
;
4673 if (gimple_canonical_types
)
4675 htab_delete (gimple_canonical_types
);
4676 gimple_canonical_types
= NULL
;
4678 if (type_hash_cache
)
4680 htab_delete (type_hash_cache
);
4681 type_hash_cache
= NULL
;
4683 if (canonical_type_hash_cache
)
4685 htab_delete (canonical_type_hash_cache
);
4686 canonical_type_hash_cache
= NULL
;
4688 if (type_pair_cache
)
4690 free (type_pair_cache
);
4691 type_pair_cache
= NULL
;
4693 gimple_type_leader
= NULL
;
4697 /* Return a type the same as TYPE except unsigned or
4698 signed according to UNSIGNEDP. */
4701 gimple_signed_or_unsigned_type (bool unsignedp
, tree type
)
4705 type1
= TYPE_MAIN_VARIANT (type
);
4706 if (type1
== signed_char_type_node
4707 || type1
== char_type_node
4708 || type1
== unsigned_char_type_node
)
4709 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
4710 if (type1
== integer_type_node
|| type1
== unsigned_type_node
)
4711 return unsignedp
? unsigned_type_node
: integer_type_node
;
4712 if (type1
== short_integer_type_node
|| type1
== short_unsigned_type_node
)
4713 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
4714 if (type1
== long_integer_type_node
|| type1
== long_unsigned_type_node
)
4715 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
4716 if (type1
== long_long_integer_type_node
4717 || type1
== long_long_unsigned_type_node
)
4719 ? long_long_unsigned_type_node
4720 : long_long_integer_type_node
;
4721 if (int128_integer_type_node
&& (type1
== int128_integer_type_node
|| type1
== int128_unsigned_type_node
))
4723 ? int128_unsigned_type_node
4724 : int128_integer_type_node
;
4725 #if HOST_BITS_PER_WIDE_INT >= 64
4726 if (type1
== intTI_type_node
|| type1
== unsigned_intTI_type_node
)
4727 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
4729 if (type1
== intDI_type_node
|| type1
== unsigned_intDI_type_node
)
4730 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
4731 if (type1
== intSI_type_node
|| type1
== unsigned_intSI_type_node
)
4732 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
4733 if (type1
== intHI_type_node
|| type1
== unsigned_intHI_type_node
)
4734 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
4735 if (type1
== intQI_type_node
|| type1
== unsigned_intQI_type_node
)
4736 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
4738 #define GIMPLE_FIXED_TYPES(NAME) \
4739 if (type1 == short_ ## NAME ## _type_node \
4740 || type1 == unsigned_short_ ## NAME ## _type_node) \
4741 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4742 : short_ ## NAME ## _type_node; \
4743 if (type1 == NAME ## _type_node \
4744 || type1 == unsigned_ ## NAME ## _type_node) \
4745 return unsignedp ? unsigned_ ## NAME ## _type_node \
4746 : NAME ## _type_node; \
4747 if (type1 == long_ ## NAME ## _type_node \
4748 || type1 == unsigned_long_ ## NAME ## _type_node) \
4749 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4750 : long_ ## NAME ## _type_node; \
4751 if (type1 == long_long_ ## NAME ## _type_node \
4752 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4753 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4754 : long_long_ ## NAME ## _type_node;
4756 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4757 if (type1 == NAME ## _type_node \
4758 || type1 == u ## NAME ## _type_node) \
4759 return unsignedp ? u ## NAME ## _type_node \
4760 : NAME ## _type_node;
4762 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4763 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4764 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4765 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4766 : sat_ ## short_ ## NAME ## _type_node; \
4767 if (type1 == sat_ ## NAME ## _type_node \
4768 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4769 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4770 : sat_ ## NAME ## _type_node; \
4771 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4772 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4773 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4774 : sat_ ## long_ ## NAME ## _type_node; \
4775 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4776 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4777 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4778 : sat_ ## long_long_ ## NAME ## _type_node;
4780 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4781 if (type1 == sat_ ## NAME ## _type_node \
4782 || type1 == sat_ ## u ## NAME ## _type_node) \
4783 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4784 : sat_ ## NAME ## _type_node;
4786 GIMPLE_FIXED_TYPES (fract
);
4787 GIMPLE_FIXED_TYPES_SAT (fract
);
4788 GIMPLE_FIXED_TYPES (accum
);
4789 GIMPLE_FIXED_TYPES_SAT (accum
);
4791 GIMPLE_FIXED_MODE_TYPES (qq
);
4792 GIMPLE_FIXED_MODE_TYPES (hq
);
4793 GIMPLE_FIXED_MODE_TYPES (sq
);
4794 GIMPLE_FIXED_MODE_TYPES (dq
);
4795 GIMPLE_FIXED_MODE_TYPES (tq
);
4796 GIMPLE_FIXED_MODE_TYPES_SAT (qq
);
4797 GIMPLE_FIXED_MODE_TYPES_SAT (hq
);
4798 GIMPLE_FIXED_MODE_TYPES_SAT (sq
);
4799 GIMPLE_FIXED_MODE_TYPES_SAT (dq
);
4800 GIMPLE_FIXED_MODE_TYPES_SAT (tq
);
4801 GIMPLE_FIXED_MODE_TYPES (ha
);
4802 GIMPLE_FIXED_MODE_TYPES (sa
);
4803 GIMPLE_FIXED_MODE_TYPES (da
);
4804 GIMPLE_FIXED_MODE_TYPES (ta
);
4805 GIMPLE_FIXED_MODE_TYPES_SAT (ha
);
4806 GIMPLE_FIXED_MODE_TYPES_SAT (sa
);
4807 GIMPLE_FIXED_MODE_TYPES_SAT (da
);
4808 GIMPLE_FIXED_MODE_TYPES_SAT (ta
);
4810 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4811 the precision; they have precision set to match their range, but
4812 may use a wider mode to match an ABI. If we change modes, we may
4813 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4814 the precision as well, so as to yield correct results for
4815 bit-field types. C++ does not have these separate bit-field
4816 types, and producing a signed or unsigned variant of an
4817 ENUMERAL_TYPE may cause other problems as well. */
4818 if (!INTEGRAL_TYPE_P (type
)
4819 || TYPE_UNSIGNED (type
) == unsignedp
)
4822 #define TYPE_OK(node) \
4823 (TYPE_MODE (type) == TYPE_MODE (node) \
4824 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4825 if (TYPE_OK (signed_char_type_node
))
4826 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
4827 if (TYPE_OK (integer_type_node
))
4828 return unsignedp
? unsigned_type_node
: integer_type_node
;
4829 if (TYPE_OK (short_integer_type_node
))
4830 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
4831 if (TYPE_OK (long_integer_type_node
))
4832 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
4833 if (TYPE_OK (long_long_integer_type_node
))
4835 ? long_long_unsigned_type_node
4836 : long_long_integer_type_node
);
4837 if (int128_integer_type_node
&& TYPE_OK (int128_integer_type_node
))
4839 ? int128_unsigned_type_node
4840 : int128_integer_type_node
);
4842 #if HOST_BITS_PER_WIDE_INT >= 64
4843 if (TYPE_OK (intTI_type_node
))
4844 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
4846 if (TYPE_OK (intDI_type_node
))
4847 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
4848 if (TYPE_OK (intSI_type_node
))
4849 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
4850 if (TYPE_OK (intHI_type_node
))
4851 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
4852 if (TYPE_OK (intQI_type_node
))
4853 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
4855 #undef GIMPLE_FIXED_TYPES
4856 #undef GIMPLE_FIXED_MODE_TYPES
4857 #undef GIMPLE_FIXED_TYPES_SAT
4858 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4861 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
4865 /* Return an unsigned type the same as TYPE in other respects. */
4868 gimple_unsigned_type (tree type
)
4870 return gimple_signed_or_unsigned_type (true, type
);
4874 /* Return a signed type the same as TYPE in other respects. */
4877 gimple_signed_type (tree type
)
4879 return gimple_signed_or_unsigned_type (false, type
);
4883 /* Return the typed-based alias set for T, which may be an expression
4884 or a type. Return -1 if we don't do anything special. */
4887 gimple_get_alias_set (tree t
)
4891 /* Permit type-punning when accessing a union, provided the access
4892 is directly through the union. For example, this code does not
4893 permit taking the address of a union member and then storing
4894 through it. Even the type-punning allowed here is a GCC
4895 extension, albeit a common and useful one; the C standard says
4896 that such accesses have implementation-defined behavior. */
4898 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
4899 u
= TREE_OPERAND (u
, 0))
4900 if (TREE_CODE (u
) == COMPONENT_REF
4901 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
4904 /* That's all the expressions we handle specially. */
4908 /* For convenience, follow the C standard when dealing with
4909 character types. Any object may be accessed via an lvalue that
4910 has character type. */
4911 if (t
== char_type_node
4912 || t
== signed_char_type_node
4913 || t
== unsigned_char_type_node
)
4916 /* Allow aliasing between signed and unsigned variants of the same
4917 type. We treat the signed variant as canonical. */
4918 if (TREE_CODE (t
) == INTEGER_TYPE
&& TYPE_UNSIGNED (t
))
4920 tree t1
= gimple_signed_type (t
);
4922 /* t1 == t can happen for boolean nodes which are always unsigned. */
4924 return get_alias_set (t1
);
4931 /* Data structure used to count the number of dereferences to PTR
4932 inside an expression. */
4936 unsigned num_stores
;
4940 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4941 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4944 count_ptr_derefs (tree
*tp
, int *walk_subtrees
, void *data
)
4946 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
4947 struct count_ptr_d
*count_p
= (struct count_ptr_d
*) wi_p
->info
;
4949 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4950 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4951 the address of 'fld' as 'ptr + offsetof(fld)'. */
4952 if (TREE_CODE (*tp
) == ADDR_EXPR
)
4958 if (TREE_CODE (*tp
) == MEM_REF
&& TREE_OPERAND (*tp
, 0) == count_p
->ptr
)
4961 count_p
->num_stores
++;
4963 count_p
->num_loads
++;
4969 /* Count the number of direct and indirect uses for pointer PTR in
4970 statement STMT. The number of direct uses is stored in
4971 *NUM_USES_P. Indirect references are counted separately depending
4972 on whether they are store or load operations. The counts are
4973 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4976 count_uses_and_derefs (tree ptr
, gimple stmt
, unsigned *num_uses_p
,
4977 unsigned *num_loads_p
, unsigned *num_stores_p
)
4986 /* Find out the total number of uses of PTR in STMT. */
4987 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, i
, SSA_OP_USE
)
4991 /* Now count the number of indirect references to PTR. This is
4992 truly awful, but we don't have much choice. There are no parent
4993 pointers inside INDIRECT_REFs, so an expression like
4994 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
4995 find all the indirect and direct uses of x_1 inside. The only
4996 shortcut we can take is the fact that GIMPLE only allows
4997 INDIRECT_REFs inside the expressions below. */
4998 if (is_gimple_assign (stmt
)
4999 || gimple_code (stmt
) == GIMPLE_RETURN
5000 || gimple_code (stmt
) == GIMPLE_ASM
5001 || is_gimple_call (stmt
))
5003 struct walk_stmt_info wi
;
5004 struct count_ptr_d count
;
5007 count
.num_stores
= 0;
5008 count
.num_loads
= 0;
5010 memset (&wi
, 0, sizeof (wi
));
5012 walk_gimple_op (stmt
, count_ptr_derefs
, &wi
);
5014 *num_stores_p
= count
.num_stores
;
5015 *num_loads_p
= count
.num_loads
;
5018 gcc_assert (*num_uses_p
>= *num_loads_p
+ *num_stores_p
);
5021 /* From a tree operand OP return the base of a load or store operation
5022 or NULL_TREE if OP is not a load or a store. */
5025 get_base_loadstore (tree op
)
5027 while (handled_component_p (op
))
5028 op
= TREE_OPERAND (op
, 0);
5030 || INDIRECT_REF_P (op
)
5031 || TREE_CODE (op
) == MEM_REF
5032 || TREE_CODE (op
) == TARGET_MEM_REF
)
5037 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5038 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5039 passing the STMT, the base of the operand and DATA to it. The base
5040 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5041 or the argument of an address expression.
5042 Returns the results of these callbacks or'ed. */
5045 walk_stmt_load_store_addr_ops (gimple stmt
, void *data
,
5046 bool (*visit_load
)(gimple
, tree
, void *),
5047 bool (*visit_store
)(gimple
, tree
, void *),
5048 bool (*visit_addr
)(gimple
, tree
, void *))
5052 if (gimple_assign_single_p (stmt
))
5057 lhs
= get_base_loadstore (gimple_assign_lhs (stmt
));
5059 ret
|= visit_store (stmt
, lhs
, data
);
5061 rhs
= gimple_assign_rhs1 (stmt
);
5062 while (handled_component_p (rhs
))
5063 rhs
= TREE_OPERAND (rhs
, 0);
5066 if (TREE_CODE (rhs
) == ADDR_EXPR
)
5067 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
5068 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
5069 && TREE_CODE (TMR_BASE (rhs
)) == ADDR_EXPR
)
5070 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (rhs
), 0), data
);
5071 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
5072 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs
)) == ADDR_EXPR
)
5073 ret
|= visit_addr (stmt
, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs
),
5075 else if (TREE_CODE (rhs
) == CONSTRUCTOR
)
5080 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), ix
, val
)
5081 if (TREE_CODE (val
) == ADDR_EXPR
)
5082 ret
|= visit_addr (stmt
, TREE_OPERAND (val
, 0), data
);
5083 else if (TREE_CODE (val
) == OBJ_TYPE_REF
5084 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val
)) == ADDR_EXPR
)
5085 ret
|= visit_addr (stmt
,
5086 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val
),
5089 lhs
= gimple_assign_lhs (stmt
);
5090 if (TREE_CODE (lhs
) == TARGET_MEM_REF
5091 && TREE_CODE (TMR_BASE (lhs
)) == ADDR_EXPR
)
5092 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (lhs
), 0), data
);
5096 rhs
= get_base_loadstore (rhs
);
5098 ret
|= visit_load (stmt
, rhs
, data
);
5102 && (is_gimple_assign (stmt
)
5103 || gimple_code (stmt
) == GIMPLE_COND
))
5105 for (i
= 0; i
< gimple_num_ops (stmt
); ++i
)
5107 tree op
= gimple_op (stmt
, i
);
5108 if (op
== NULL_TREE
)
5110 else if (TREE_CODE (op
) == ADDR_EXPR
)
5111 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5112 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5113 tree with two operands. */
5114 else if (i
== 1 && COMPARISON_CLASS_P (op
))
5116 if (TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
5117 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 0),
5119 if (TREE_CODE (TREE_OPERAND (op
, 1)) == ADDR_EXPR
)
5120 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 1),
5125 else if (is_gimple_call (stmt
))
5129 tree lhs
= gimple_call_lhs (stmt
);
5132 lhs
= get_base_loadstore (lhs
);
5134 ret
|= visit_store (stmt
, lhs
, data
);
5137 if (visit_load
|| visit_addr
)
5138 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5140 tree rhs
= gimple_call_arg (stmt
, i
);
5142 && TREE_CODE (rhs
) == ADDR_EXPR
)
5143 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
5144 else if (visit_load
)
5146 rhs
= get_base_loadstore (rhs
);
5148 ret
|= visit_load (stmt
, rhs
, data
);
5152 && gimple_call_chain (stmt
)
5153 && TREE_CODE (gimple_call_chain (stmt
)) == ADDR_EXPR
)
5154 ret
|= visit_addr (stmt
, TREE_OPERAND (gimple_call_chain (stmt
), 0),
5157 && gimple_call_return_slot_opt_p (stmt
)
5158 && gimple_call_lhs (stmt
) != NULL_TREE
5159 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
5160 ret
|= visit_addr (stmt
, gimple_call_lhs (stmt
), data
);
5162 else if (gimple_code (stmt
) == GIMPLE_ASM
)
5165 const char *constraint
;
5166 const char **oconstraints
;
5167 bool allows_mem
, allows_reg
, is_inout
;
5168 noutputs
= gimple_asm_noutputs (stmt
);
5169 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5170 if (visit_store
|| visit_addr
)
5171 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
5173 tree link
= gimple_asm_output_op (stmt
, i
);
5174 tree op
= get_base_loadstore (TREE_VALUE (link
));
5175 if (op
&& visit_store
)
5176 ret
|= visit_store (stmt
, op
, data
);
5179 constraint
= TREE_STRING_POINTER
5180 (TREE_VALUE (TREE_PURPOSE (link
)));
5181 oconstraints
[i
] = constraint
;
5182 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
5183 &allows_reg
, &is_inout
);
5184 if (op
&& !allows_reg
&& allows_mem
)
5185 ret
|= visit_addr (stmt
, op
, data
);
5188 if (visit_load
|| visit_addr
)
5189 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
5191 tree link
= gimple_asm_input_op (stmt
, i
);
5192 tree op
= TREE_VALUE (link
);
5194 && TREE_CODE (op
) == ADDR_EXPR
)
5195 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5196 else if (visit_load
|| visit_addr
)
5198 op
= get_base_loadstore (op
);
5202 ret
|= visit_load (stmt
, op
, data
);
5205 constraint
= TREE_STRING_POINTER
5206 (TREE_VALUE (TREE_PURPOSE (link
)));
5207 parse_input_constraint (&constraint
, 0, 0, noutputs
,
5209 &allows_mem
, &allows_reg
);
5210 if (!allows_reg
&& allows_mem
)
5211 ret
|= visit_addr (stmt
, op
, data
);
5217 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
5219 tree op
= gimple_return_retval (stmt
);
5223 && TREE_CODE (op
) == ADDR_EXPR
)
5224 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5225 else if (visit_load
)
5227 op
= get_base_loadstore (op
);
5229 ret
|= visit_load (stmt
, op
, data
);
5234 && gimple_code (stmt
) == GIMPLE_PHI
)
5236 for (i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
5238 tree op
= PHI_ARG_DEF (stmt
, i
);
5239 if (TREE_CODE (op
) == ADDR_EXPR
)
5240 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5247 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5248 should make a faster clone for this case. */
5251 walk_stmt_load_store_ops (gimple stmt
, void *data
,
5252 bool (*visit_load
)(gimple
, tree
, void *),
5253 bool (*visit_store
)(gimple
, tree
, void *))
5255 return walk_stmt_load_store_addr_ops (stmt
, data
,
5256 visit_load
, visit_store
, NULL
);
5259 /* Helper for gimple_ior_addresses_taken_1. */
5262 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED
,
5263 tree addr
, void *data
)
5265 bitmap addresses_taken
= (bitmap
)data
;
5266 addr
= get_base_address (addr
);
5270 bitmap_set_bit (addresses_taken
, DECL_UID (addr
));
5276 /* Set the bit for the uid of all decls that have their address taken
5277 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5278 were any in this stmt. */
5281 gimple_ior_addresses_taken (bitmap addresses_taken
, gimple stmt
)
5283 return walk_stmt_load_store_addr_ops (stmt
, addresses_taken
, NULL
, NULL
,
5284 gimple_ior_addresses_taken_1
);
5288 /* Return a printable name for symbol DECL. */
5291 gimple_decl_printable_name (tree decl
, int verbosity
)
5293 if (!DECL_NAME (decl
))
5296 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
5298 const char *str
, *mangled_str
;
5299 int dmgl_opts
= DMGL_NO_OPTS
;
5303 dmgl_opts
= DMGL_VERBOSE
5307 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5308 dmgl_opts
|= DMGL_PARAMS
;
5311 mangled_str
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
5312 str
= cplus_demangle_v3 (mangled_str
, dmgl_opts
);
5313 return (str
) ? str
: mangled_str
;
5316 return IDENTIFIER_POINTER (DECL_NAME (decl
));
5319 /* Return true when STMT is builtins call to CODE. */
5322 gimple_call_builtin_p (gimple stmt
, enum built_in_function code
)
5325 return (is_gimple_call (stmt
)
5326 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL
5327 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
5328 && DECL_FUNCTION_CODE (fndecl
) == code
);
5331 /* Return true if STMT clobbers memory. STMT is required to be a
5335 gimple_asm_clobbers_memory_p (const_gimple stmt
)
5339 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
5341 tree op
= gimple_asm_clobber_op (stmt
, i
);
5342 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op
)), "memory") == 0)
5348 #include "gt-gimple.h"