1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
47 htab_t gimple_canonical_types
;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
49 htab_t type_hash_cache
;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
51 htab_t canonical_type_hash_cache
;
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST
size_t gimple_ops_offset_
[] = {
60 #include "gsstruct.def"
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size
[] = {
66 #include "gsstruct.def"
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name
[] = {
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST
enum gimple_statement_structure_enum gss_for_code_
[] = {
82 #ifdef GATHER_STATISTICS
85 int gimple_alloc_counts
[(int) gimple_alloc_kind_all
];
86 int gimple_alloc_sizes
[(int) gimple_alloc_kind_all
];
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names
[] = {
97 #endif /* GATHER_STATISTICS */
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable
)) struct gimple_seq_d
*gimple_seq_cache
;
103 /* Private API manipulation functions shared only with some
105 extern void gimple_set_stored_syms (gimple
, bitmap
, bitmap_obstack
*);
106 extern void gimple_set_loaded_syms (gimple
, bitmap
, bitmap_obstack
*);
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
112 /* Set the code for statement G to CODE. */
115 gimple_set_code (gimple g
, enum gimple_code code
)
117 g
->gsbase
.code
= code
;
120 /* Return the number of bytes needed to hold a GIMPLE statement with
124 gimple_size (enum gimple_code code
)
126 return gsstruct_code_size
[gss_for_code (code
)];
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
133 gimple_alloc_stat (enum gimple_code code
, unsigned num_ops MEM_STAT_DECL
)
138 size
= gimple_size (code
);
140 size
+= sizeof (tree
) * (num_ops
- 1);
142 #ifdef GATHER_STATISTICS
144 enum gimple_alloc_kind kind
= gimple_alloc_kind (code
);
145 gimple_alloc_counts
[(int) kind
]++;
146 gimple_alloc_sizes
[(int) kind
] += size
;
150 stmt
= ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT
);
151 gimple_set_code (stmt
, code
);
152 gimple_set_num_ops (stmt
, num_ops
);
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt
->gsbase
.modified
= 1;
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
164 gimple_set_subcode (gimple g
, unsigned subcode
)
166 /* We only have 16 bits for the RHS code. Assert that we are not
168 gcc_assert (subcode
< (1 << 16));
169 g
->gsbase
.subcode
= subcode
;
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
182 gimple_build_with_ops_stat (enum gimple_code code
, unsigned subcode
,
183 unsigned num_ops MEM_STAT_DECL
)
185 gimple s
= gimple_alloc_stat (code
, num_ops PASS_MEM_STAT
);
186 gimple_set_subcode (s
, subcode
);
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
195 gimple_build_return (tree retval
)
197 gimple s
= gimple_build_with_ops (GIMPLE_RETURN
, ERROR_MARK
, 1);
199 gimple_return_set_retval (s
, retval
);
203 /* Reset alias information on call S. */
206 gimple_call_reset_alias_info (gimple s
)
208 if (gimple_call_flags (s
) & ECF_CONST
)
209 memset (gimple_call_use_set (s
), 0, sizeof (struct pt_solution
));
211 pt_solution_reset (gimple_call_use_set (s
));
212 if (gimple_call_flags (s
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
213 memset (gimple_call_clobber_set (s
), 0, sizeof (struct pt_solution
));
215 pt_solution_reset (gimple_call_clobber_set (s
));
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
224 gimple_build_call_1 (tree fn
, unsigned nargs
)
226 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
227 if (TREE_CODE (fn
) == FUNCTION_DECL
)
228 fn
= build_fold_addr_expr (fn
);
229 gimple_set_op (s
, 1, fn
);
230 gimple_call_set_fntype (s
, TREE_TYPE (TREE_TYPE (fn
)));
231 gimple_call_reset_alias_info (s
);
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
240 gimple_build_call_vec (tree fn
, VEC(tree
, heap
) *args
)
243 unsigned nargs
= VEC_length (tree
, args
);
244 gimple call
= gimple_build_call_1 (fn
, nargs
);
246 for (i
= 0; i
< nargs
; i
++)
247 gimple_call_set_arg (call
, i
, VEC_index (tree
, args
, i
));
253 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
257 gimple_build_call (tree fn
, unsigned nargs
, ...)
263 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
265 call
= gimple_build_call_1 (fn
, nargs
);
267 va_start (ap
, nargs
);
268 for (i
= 0; i
< nargs
; i
++)
269 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
276 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
280 gimple_build_call_valist (tree fn
, unsigned nargs
, va_list ap
)
285 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
287 call
= gimple_build_call_1 (fn
, nargs
);
289 for (i
= 0; i
< nargs
; i
++)
290 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
301 gimple_build_call_internal_1 (enum internal_fn fn
, unsigned nargs
)
303 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
304 s
->gsbase
.subcode
|= GF_CALL_INTERNAL
;
305 gimple_call_set_internal_fn (s
, fn
);
306 gimple_call_reset_alias_info (s
);
311 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
315 gimple_build_call_internal (enum internal_fn fn
, unsigned nargs
, ...)
321 call
= gimple_build_call_internal_1 (fn
, nargs
);
322 va_start (ap
, nargs
);
323 for (i
= 0; i
< nargs
; i
++)
324 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
335 gimple_build_call_internal_vec (enum internal_fn fn
, VEC(tree
, heap
) *args
)
340 nargs
= VEC_length (tree
, args
);
341 call
= gimple_build_call_internal_1 (fn
, nargs
);
342 for (i
= 0; i
< nargs
; i
++)
343 gimple_call_set_arg (call
, i
, VEC_index (tree
, args
, i
));
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
354 gimple_build_call_from_tree (tree t
)
358 tree fndecl
= get_callee_fndecl (t
);
360 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
362 nargs
= call_expr_nargs (t
);
363 call
= gimple_build_call_1 (fndecl
? fndecl
: CALL_EXPR_FN (t
), nargs
);
365 for (i
= 0; i
< nargs
; i
++)
366 gimple_call_set_arg (call
, i
, CALL_EXPR_ARG (t
, i
));
368 gimple_set_block (call
, TREE_BLOCK (t
));
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call
, CALL_EXPR_STATIC_CHAIN (t
));
372 gimple_call_set_tail (call
, CALL_EXPR_TAILCALL (t
));
373 gimple_call_set_cannot_inline (call
, CALL_CANNOT_INLINE_P (t
));
374 gimple_call_set_return_slot_opt (call
, CALL_EXPR_RETURN_SLOT_OPT (t
));
376 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
377 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
378 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
379 gimple_call_set_alloca_for_var (call
, CALL_ALLOCA_FOR_VAR_P (t
));
381 gimple_call_set_from_thunk (call
, CALL_FROM_THUNK_P (t
));
382 gimple_call_set_va_arg_pack (call
, CALL_EXPR_VA_ARG_PACK (t
));
383 gimple_call_set_nothrow (call
, TREE_NOTHROW (t
));
384 gimple_set_no_warning (call
, TREE_NO_WARNING (t
));
390 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
391 *OP1_P, *OP2_P and *OP3_P respectively. */
394 extract_ops_from_tree_1 (tree expr
, enum tree_code
*subcode_p
, tree
*op1_p
,
395 tree
*op2_p
, tree
*op3_p
)
397 enum gimple_rhs_class grhs_class
;
399 *subcode_p
= TREE_CODE (expr
);
400 grhs_class
= get_gimple_rhs_class (*subcode_p
);
402 if (grhs_class
== GIMPLE_TERNARY_RHS
)
404 *op1_p
= TREE_OPERAND (expr
, 0);
405 *op2_p
= TREE_OPERAND (expr
, 1);
406 *op3_p
= TREE_OPERAND (expr
, 2);
408 else if (grhs_class
== GIMPLE_BINARY_RHS
)
410 *op1_p
= TREE_OPERAND (expr
, 0);
411 *op2_p
= TREE_OPERAND (expr
, 1);
414 else if (grhs_class
== GIMPLE_UNARY_RHS
)
416 *op1_p
= TREE_OPERAND (expr
, 0);
420 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
431 /* Build a GIMPLE_ASSIGN statement.
433 LHS of the assignment.
434 RHS of the assignment which can be unary or binary. */
437 gimple_build_assign_stat (tree lhs
, tree rhs MEM_STAT_DECL
)
439 enum tree_code subcode
;
442 extract_ops_from_tree_1 (rhs
, &subcode
, &op1
, &op2
, &op3
);
443 return gimple_build_assign_with_ops_stat (subcode
, lhs
, op1
, op2
, op3
448 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
449 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
450 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
453 gimple_build_assign_with_ops_stat (enum tree_code subcode
, tree lhs
, tree op1
,
454 tree op2
, tree op3 MEM_STAT_DECL
)
459 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
461 num_ops
= get_gimple_rhs_num_ops (subcode
) + 1;
463 p
= gimple_build_with_ops_stat (GIMPLE_ASSIGN
, (unsigned)subcode
, num_ops
465 gimple_assign_set_lhs (p
, lhs
);
466 gimple_assign_set_rhs1 (p
, op1
);
469 gcc_assert (num_ops
> 2);
470 gimple_assign_set_rhs2 (p
, op2
);
475 gcc_assert (num_ops
> 3);
476 gimple_assign_set_rhs3 (p
, op3
);
483 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
485 DST/SRC are the destination and source respectively. You can pass
486 ungimplified trees in DST or SRC, in which case they will be
487 converted to a gimple operand if necessary.
489 This function returns the newly created GIMPLE_ASSIGN tuple. */
492 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
494 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
495 gimplify_and_add (t
, seq_p
);
497 return gimple_seq_last_stmt (*seq_p
);
501 /* Build a GIMPLE_COND statement.
503 PRED is the condition used to compare LHS and the RHS.
504 T_LABEL is the label to jump to if the condition is true.
505 F_LABEL is the label to jump to otherwise. */
508 gimple_build_cond (enum tree_code pred_code
, tree lhs
, tree rhs
,
509 tree t_label
, tree f_label
)
513 gcc_assert (TREE_CODE_CLASS (pred_code
) == tcc_comparison
);
514 p
= gimple_build_with_ops (GIMPLE_COND
, pred_code
, 4);
515 gimple_cond_set_lhs (p
, lhs
);
516 gimple_cond_set_rhs (p
, rhs
);
517 gimple_cond_set_true_label (p
, t_label
);
518 gimple_cond_set_false_label (p
, f_label
);
523 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
526 gimple_cond_get_ops_from_tree (tree cond
, enum tree_code
*code_p
,
527 tree
*lhs_p
, tree
*rhs_p
)
529 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond
)) == tcc_comparison
530 || TREE_CODE (cond
) == TRUTH_NOT_EXPR
531 || is_gimple_min_invariant (cond
)
532 || SSA_VAR_P (cond
));
534 extract_ops_from_tree (cond
, code_p
, lhs_p
, rhs_p
);
536 /* Canonicalize conditionals of the form 'if (!VAL)'. */
537 if (*code_p
== TRUTH_NOT_EXPR
)
540 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
541 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
543 /* Canonicalize conditionals of the form 'if (VAL)' */
544 else if (TREE_CODE_CLASS (*code_p
) != tcc_comparison
)
547 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
548 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
553 /* Build a GIMPLE_COND statement from the conditional expression tree
554 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
557 gimple_build_cond_from_tree (tree cond
, tree t_label
, tree f_label
)
562 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
563 return gimple_build_cond (code
, lhs
, rhs
, t_label
, f_label
);
566 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
567 boolean expression tree COND. */
570 gimple_cond_set_condition_from_tree (gimple stmt
, tree cond
)
575 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
576 gimple_cond_set_condition (stmt
, code
, lhs
, rhs
);
579 /* Build a GIMPLE_LABEL statement for LABEL. */
582 gimple_build_label (tree label
)
584 gimple p
= gimple_build_with_ops (GIMPLE_LABEL
, ERROR_MARK
, 1);
585 gimple_label_set_label (p
, label
);
589 /* Build a GIMPLE_GOTO statement to label DEST. */
592 gimple_build_goto (tree dest
)
594 gimple p
= gimple_build_with_ops (GIMPLE_GOTO
, ERROR_MARK
, 1);
595 gimple_goto_set_dest (p
, dest
);
600 /* Build a GIMPLE_NOP statement. */
603 gimple_build_nop (void)
605 return gimple_alloc (GIMPLE_NOP
, 0);
609 /* Build a GIMPLE_BIND statement.
610 VARS are the variables in BODY.
611 BLOCK is the containing block. */
614 gimple_build_bind (tree vars
, gimple_seq body
, tree block
)
616 gimple p
= gimple_alloc (GIMPLE_BIND
, 0);
617 gimple_bind_set_vars (p
, vars
);
619 gimple_bind_set_body (p
, body
);
621 gimple_bind_set_block (p
, block
);
625 /* Helper function to set the simple fields of a asm stmt.
627 STRING is a pointer to a string that is the asm blocks assembly code.
628 NINPUT is the number of register inputs.
629 NOUTPUT is the number of register outputs.
630 NCLOBBERS is the number of clobbered registers.
634 gimple_build_asm_1 (const char *string
, unsigned ninputs
, unsigned noutputs
,
635 unsigned nclobbers
, unsigned nlabels
)
638 int size
= strlen (string
);
640 /* ASMs with labels cannot have outputs. This should have been
641 enforced by the front end. */
642 gcc_assert (nlabels
== 0 || noutputs
== 0);
644 p
= gimple_build_with_ops (GIMPLE_ASM
, ERROR_MARK
,
645 ninputs
+ noutputs
+ nclobbers
+ nlabels
);
647 p
->gimple_asm
.ni
= ninputs
;
648 p
->gimple_asm
.no
= noutputs
;
649 p
->gimple_asm
.nc
= nclobbers
;
650 p
->gimple_asm
.nl
= nlabels
;
651 p
->gimple_asm
.string
= ggc_alloc_string (string
, size
);
653 #ifdef GATHER_STATISTICS
654 gimple_alloc_sizes
[(int) gimple_alloc_kind (GIMPLE_ASM
)] += size
;
660 /* Build a GIMPLE_ASM statement.
662 STRING is the assembly code.
663 NINPUT is the number of register inputs.
664 NOUTPUT is the number of register outputs.
665 NCLOBBERS is the number of clobbered registers.
666 INPUTS is a vector of the input register parameters.
667 OUTPUTS is a vector of the output register parameters.
668 CLOBBERS is a vector of the clobbered register parameters.
669 LABELS is a vector of destination labels. */
672 gimple_build_asm_vec (const char *string
, VEC(tree
,gc
)* inputs
,
673 VEC(tree
,gc
)* outputs
, VEC(tree
,gc
)* clobbers
,
674 VEC(tree
,gc
)* labels
)
679 p
= gimple_build_asm_1 (string
,
680 VEC_length (tree
, inputs
),
681 VEC_length (tree
, outputs
),
682 VEC_length (tree
, clobbers
),
683 VEC_length (tree
, labels
));
685 for (i
= 0; i
< VEC_length (tree
, inputs
); i
++)
686 gimple_asm_set_input_op (p
, i
, VEC_index (tree
, inputs
, i
));
688 for (i
= 0; i
< VEC_length (tree
, outputs
); i
++)
689 gimple_asm_set_output_op (p
, i
, VEC_index (tree
, outputs
, i
));
691 for (i
= 0; i
< VEC_length (tree
, clobbers
); i
++)
692 gimple_asm_set_clobber_op (p
, i
, VEC_index (tree
, clobbers
, i
));
694 for (i
= 0; i
< VEC_length (tree
, labels
); i
++)
695 gimple_asm_set_label_op (p
, i
, VEC_index (tree
, labels
, i
));
700 /* Build a GIMPLE_CATCH statement.
702 TYPES are the catch types.
703 HANDLER is the exception handler. */
706 gimple_build_catch (tree types
, gimple_seq handler
)
708 gimple p
= gimple_alloc (GIMPLE_CATCH
, 0);
709 gimple_catch_set_types (p
, types
);
711 gimple_catch_set_handler (p
, handler
);
716 /* Build a GIMPLE_EH_FILTER statement.
718 TYPES are the filter's types.
719 FAILURE is the filter's failure action. */
722 gimple_build_eh_filter (tree types
, gimple_seq failure
)
724 gimple p
= gimple_alloc (GIMPLE_EH_FILTER
, 0);
725 gimple_eh_filter_set_types (p
, types
);
727 gimple_eh_filter_set_failure (p
, failure
);
732 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
735 gimple_build_eh_must_not_throw (tree decl
)
737 gimple p
= gimple_alloc (GIMPLE_EH_MUST_NOT_THROW
, 0);
739 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
740 gcc_assert (flags_from_decl_or_type (decl
) & ECF_NORETURN
);
741 gimple_eh_must_not_throw_set_fndecl (p
, decl
);
746 /* Build a GIMPLE_TRY statement.
748 EVAL is the expression to evaluate.
749 CLEANUP is the cleanup expression.
750 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
751 whether this is a try/catch or a try/finally respectively. */
754 gimple_build_try (gimple_seq eval
, gimple_seq cleanup
,
755 enum gimple_try_flags kind
)
759 gcc_assert (kind
== GIMPLE_TRY_CATCH
|| kind
== GIMPLE_TRY_FINALLY
);
760 p
= gimple_alloc (GIMPLE_TRY
, 0);
761 gimple_set_subcode (p
, kind
);
763 gimple_try_set_eval (p
, eval
);
765 gimple_try_set_cleanup (p
, cleanup
);
770 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
772 CLEANUP is the cleanup expression. */
775 gimple_build_wce (gimple_seq cleanup
)
777 gimple p
= gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR
, 0);
779 gimple_wce_set_cleanup (p
, cleanup
);
785 /* Build a GIMPLE_RESX statement. */
788 gimple_build_resx (int region
)
790 gimple p
= gimple_build_with_ops (GIMPLE_RESX
, ERROR_MARK
, 0);
791 p
->gimple_eh_ctrl
.region
= region
;
796 /* The helper for constructing a gimple switch statement.
797 INDEX is the switch's index.
798 NLABELS is the number of labels in the switch excluding the default.
799 DEFAULT_LABEL is the default label for the switch statement. */
802 gimple_build_switch_nlabels (unsigned nlabels
, tree index
, tree default_label
)
804 /* nlabels + 1 default label + 1 index. */
805 gimple p
= gimple_build_with_ops (GIMPLE_SWITCH
, ERROR_MARK
,
806 1 + (default_label
!= NULL
) + nlabels
);
807 gimple_switch_set_index (p
, index
);
809 gimple_switch_set_default_label (p
, default_label
);
814 /* Build a GIMPLE_SWITCH statement.
816 INDEX is the switch's index.
817 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
818 ... are the labels excluding the default. */
821 gimple_build_switch (unsigned nlabels
, tree index
, tree default_label
, ...)
825 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
827 /* Store the rest of the labels. */
828 va_start (al
, default_label
);
829 offset
= (default_label
!= NULL
);
830 for (i
= 0; i
< nlabels
; i
++)
831 gimple_switch_set_label (p
, i
+ offset
, va_arg (al
, tree
));
838 /* Build a GIMPLE_SWITCH statement.
840 INDEX is the switch's index.
841 DEFAULT_LABEL is the default label
842 ARGS is a vector of labels excluding the default. */
845 gimple_build_switch_vec (tree index
, tree default_label
, VEC(tree
, heap
) *args
)
847 unsigned i
, offset
, nlabels
= VEC_length (tree
, args
);
848 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
850 /* Copy the labels from the vector to the switch statement. */
851 offset
= (default_label
!= NULL
);
852 for (i
= 0; i
< nlabels
; i
++)
853 gimple_switch_set_label (p
, i
+ offset
, VEC_index (tree
, args
, i
));
858 /* Build a GIMPLE_EH_DISPATCH statement. */
861 gimple_build_eh_dispatch (int region
)
863 gimple p
= gimple_build_with_ops (GIMPLE_EH_DISPATCH
, ERROR_MARK
, 0);
864 p
->gimple_eh_ctrl
.region
= region
;
868 /* Build a new GIMPLE_DEBUG_BIND statement.
870 VAR is bound to VALUE; block and location are taken from STMT. */
873 gimple_build_debug_bind_stat (tree var
, tree value
, gimple stmt MEM_STAT_DECL
)
875 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
876 (unsigned)GIMPLE_DEBUG_BIND
, 2
879 gimple_debug_bind_set_var (p
, var
);
880 gimple_debug_bind_set_value (p
, value
);
883 gimple_set_block (p
, gimple_block (stmt
));
884 gimple_set_location (p
, gimple_location (stmt
));
891 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
893 VAR is bound to VALUE; block and location are taken from STMT. */
896 gimple_build_debug_source_bind_stat (tree var
, tree value
,
897 gimple stmt MEM_STAT_DECL
)
899 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
900 (unsigned)GIMPLE_DEBUG_SOURCE_BIND
, 2
903 gimple_debug_source_bind_set_var (p
, var
);
904 gimple_debug_source_bind_set_value (p
, value
);
907 gimple_set_block (p
, gimple_block (stmt
));
908 gimple_set_location (p
, gimple_location (stmt
));
915 /* Build a GIMPLE_OMP_CRITICAL statement.
917 BODY is the sequence of statements for which only one thread can execute.
918 NAME is optional identifier for this critical block. */
921 gimple_build_omp_critical (gimple_seq body
, tree name
)
923 gimple p
= gimple_alloc (GIMPLE_OMP_CRITICAL
, 0);
924 gimple_omp_critical_set_name (p
, name
);
926 gimple_omp_set_body (p
, body
);
931 /* Build a GIMPLE_OMP_FOR statement.
933 BODY is sequence of statements inside the for loop.
934 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
935 lastprivate, reductions, ordered, schedule, and nowait.
936 COLLAPSE is the collapse count.
937 PRE_BODY is the sequence of statements that are loop invariant. */
940 gimple_build_omp_for (gimple_seq body
, tree clauses
, size_t collapse
,
943 gimple p
= gimple_alloc (GIMPLE_OMP_FOR
, 0);
945 gimple_omp_set_body (p
, body
);
946 gimple_omp_for_set_clauses (p
, clauses
);
947 p
->gimple_omp_for
.collapse
= collapse
;
948 p
->gimple_omp_for
.iter
949 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse
);
951 gimple_omp_for_set_pre_body (p
, pre_body
);
957 /* Build a GIMPLE_OMP_PARALLEL statement.
959 BODY is sequence of statements which are executed in parallel.
960 CLAUSES, are the OMP parallel construct's clauses.
961 CHILD_FN is the function created for the parallel threads to execute.
962 DATA_ARG are the shared data argument(s). */
965 gimple_build_omp_parallel (gimple_seq body
, tree clauses
, tree child_fn
,
968 gimple p
= gimple_alloc (GIMPLE_OMP_PARALLEL
, 0);
970 gimple_omp_set_body (p
, body
);
971 gimple_omp_parallel_set_clauses (p
, clauses
);
972 gimple_omp_parallel_set_child_fn (p
, child_fn
);
973 gimple_omp_parallel_set_data_arg (p
, data_arg
);
979 /* Build a GIMPLE_OMP_TASK statement.
981 BODY is sequence of statements which are executed by the explicit task.
982 CLAUSES, are the OMP parallel construct's clauses.
983 CHILD_FN is the function created for the parallel threads to execute.
984 DATA_ARG are the shared data argument(s).
985 COPY_FN is the optional function for firstprivate initialization.
986 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
989 gimple_build_omp_task (gimple_seq body
, tree clauses
, tree child_fn
,
990 tree data_arg
, tree copy_fn
, tree arg_size
,
993 gimple p
= gimple_alloc (GIMPLE_OMP_TASK
, 0);
995 gimple_omp_set_body (p
, body
);
996 gimple_omp_task_set_clauses (p
, clauses
);
997 gimple_omp_task_set_child_fn (p
, child_fn
);
998 gimple_omp_task_set_data_arg (p
, data_arg
);
999 gimple_omp_task_set_copy_fn (p
, copy_fn
);
1000 gimple_omp_task_set_arg_size (p
, arg_size
);
1001 gimple_omp_task_set_arg_align (p
, arg_align
);
1007 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1009 BODY is the sequence of statements in the section. */
1012 gimple_build_omp_section (gimple_seq body
)
1014 gimple p
= gimple_alloc (GIMPLE_OMP_SECTION
, 0);
1016 gimple_omp_set_body (p
, body
);
1022 /* Build a GIMPLE_OMP_MASTER statement.
1024 BODY is the sequence of statements to be executed by just the master. */
1027 gimple_build_omp_master (gimple_seq body
)
1029 gimple p
= gimple_alloc (GIMPLE_OMP_MASTER
, 0);
1031 gimple_omp_set_body (p
, body
);
1037 /* Build a GIMPLE_OMP_CONTINUE statement.
1039 CONTROL_DEF is the definition of the control variable.
1040 CONTROL_USE is the use of the control variable. */
1043 gimple_build_omp_continue (tree control_def
, tree control_use
)
1045 gimple p
= gimple_alloc (GIMPLE_OMP_CONTINUE
, 0);
1046 gimple_omp_continue_set_control_def (p
, control_def
);
1047 gimple_omp_continue_set_control_use (p
, control_use
);
1051 /* Build a GIMPLE_OMP_ORDERED statement.
1053 BODY is the sequence of statements inside a loop that will executed in
1057 gimple_build_omp_ordered (gimple_seq body
)
1059 gimple p
= gimple_alloc (GIMPLE_OMP_ORDERED
, 0);
1061 gimple_omp_set_body (p
, body
);
1067 /* Build a GIMPLE_OMP_RETURN statement.
1068 WAIT_P is true if this is a non-waiting return. */
1071 gimple_build_omp_return (bool wait_p
)
1073 gimple p
= gimple_alloc (GIMPLE_OMP_RETURN
, 0);
1075 gimple_omp_return_set_nowait (p
);
1081 /* Build a GIMPLE_OMP_SECTIONS statement.
1083 BODY is a sequence of section statements.
1084 CLAUSES are any of the OMP sections contsruct's clauses: private,
1085 firstprivate, lastprivate, reduction, and nowait. */
1088 gimple_build_omp_sections (gimple_seq body
, tree clauses
)
1090 gimple p
= gimple_alloc (GIMPLE_OMP_SECTIONS
, 0);
1092 gimple_omp_set_body (p
, body
);
1093 gimple_omp_sections_set_clauses (p
, clauses
);
1099 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1102 gimple_build_omp_sections_switch (void)
1104 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH
, 0);
1108 /* Build a GIMPLE_OMP_SINGLE statement.
1110 BODY is the sequence of statements that will be executed once.
1111 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1112 copyprivate, nowait. */
1115 gimple_build_omp_single (gimple_seq body
, tree clauses
)
1117 gimple p
= gimple_alloc (GIMPLE_OMP_SINGLE
, 0);
1119 gimple_omp_set_body (p
, body
);
1120 gimple_omp_single_set_clauses (p
, clauses
);
1126 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1129 gimple_build_omp_atomic_load (tree lhs
, tree rhs
)
1131 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD
, 0);
1132 gimple_omp_atomic_load_set_lhs (p
, lhs
);
1133 gimple_omp_atomic_load_set_rhs (p
, rhs
);
1137 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1139 VAL is the value we are storing. */
1142 gimple_build_omp_atomic_store (tree val
)
1144 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_STORE
, 0);
1145 gimple_omp_atomic_store_set_val (p
, val
);
1149 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1150 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1153 gimple_build_predict (enum br_predictor predictor
, enum prediction outcome
)
1155 gimple p
= gimple_alloc (GIMPLE_PREDICT
, 0);
1156 /* Ensure all the predictors fit into the lower bits of the subcode. */
1157 gcc_assert ((int) END_PREDICTORS
<= GF_PREDICT_TAKEN
);
1158 gimple_predict_set_predictor (p
, predictor
);
1159 gimple_predict_set_outcome (p
, outcome
);
1163 #if defined ENABLE_GIMPLE_CHECKING
1164 /* Complain of a gimple type mismatch and die. */
1167 gimple_check_failed (const_gimple gs
, const char *file
, int line
,
1168 const char *function
, enum gimple_code code
,
1169 enum tree_code subcode
)
1171 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1172 gimple_code_name
[code
],
1173 tree_code_name
[subcode
],
1174 gimple_code_name
[gimple_code (gs
)],
1175 gs
->gsbase
.subcode
> 0
1176 ? tree_code_name
[gs
->gsbase
.subcode
]
1178 function
, trim_filename (file
), line
);
1180 #endif /* ENABLE_GIMPLE_CHECKING */
1183 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1184 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1188 gimple_seq_alloc (void)
1190 gimple_seq seq
= gimple_seq_cache
;
1193 gimple_seq_cache
= gimple_seq_cache
->next_free
;
1194 gcc_assert (gimple_seq_cache
!= seq
);
1195 memset (seq
, 0, sizeof (*seq
));
1199 seq
= ggc_alloc_cleared_gimple_seq_d ();
1200 #ifdef GATHER_STATISTICS
1201 gimple_alloc_counts
[(int) gimple_alloc_kind_seq
]++;
1202 gimple_alloc_sizes
[(int) gimple_alloc_kind_seq
] += sizeof (*seq
);
1209 /* Return SEQ to the free pool of GIMPLE sequences. */
1212 gimple_seq_free (gimple_seq seq
)
1217 gcc_assert (gimple_seq_first (seq
) == NULL
);
1218 gcc_assert (gimple_seq_last (seq
) == NULL
);
1220 /* If this triggers, it's a sign that the same list is being freed
1222 gcc_assert (seq
!= gimple_seq_cache
|| gimple_seq_cache
== NULL
);
1224 /* Add SEQ to the pool of free sequences. */
1225 seq
->next_free
= gimple_seq_cache
;
1226 gimple_seq_cache
= seq
;
1230 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1231 *SEQ_P is NULL, a new sequence is allocated. */
1234 gimple_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
1236 gimple_stmt_iterator si
;
1242 *seq_p
= gimple_seq_alloc ();
1244 si
= gsi_last (*seq_p
);
1245 gsi_insert_after (&si
, gs
, GSI_NEW_STMT
);
1249 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1250 NULL, a new sequence is allocated. */
1253 gimple_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
1255 gimple_stmt_iterator si
;
1261 *dst_p
= gimple_seq_alloc ();
1263 si
= gsi_last (*dst_p
);
1264 gsi_insert_seq_after (&si
, src
, GSI_NEW_STMT
);
1268 /* Helper function of empty_body_p. Return true if STMT is an empty
1272 empty_stmt_p (gimple stmt
)
1274 if (gimple_code (stmt
) == GIMPLE_NOP
)
1276 if (gimple_code (stmt
) == GIMPLE_BIND
)
1277 return empty_body_p (gimple_bind_body (stmt
));
1282 /* Return true if BODY contains nothing but empty statements. */
1285 empty_body_p (gimple_seq body
)
1287 gimple_stmt_iterator i
;
1289 if (gimple_seq_empty_p (body
))
1291 for (i
= gsi_start (body
); !gsi_end_p (i
); gsi_next (&i
))
1292 if (!empty_stmt_p (gsi_stmt (i
))
1293 && !is_gimple_debug (gsi_stmt (i
)))
1300 /* Perform a deep copy of sequence SRC and return the result. */
1303 gimple_seq_copy (gimple_seq src
)
1305 gimple_stmt_iterator gsi
;
1306 gimple_seq new_seq
= gimple_seq_alloc ();
1309 for (gsi
= gsi_start (src
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1311 stmt
= gimple_copy (gsi_stmt (gsi
));
1312 gimple_seq_add_stmt (&new_seq
, stmt
);
1319 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1320 on each one. WI is as in walk_gimple_stmt.
1322 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1323 value is stored in WI->CALLBACK_RESULT and the statement that
1324 produced the value is returned.
1326 Otherwise, all the statements are walked and NULL returned. */
1329 walk_gimple_seq (gimple_seq seq
, walk_stmt_fn callback_stmt
,
1330 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1332 gimple_stmt_iterator gsi
;
1334 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1336 tree ret
= walk_gimple_stmt (&gsi
, callback_stmt
, callback_op
, wi
);
1339 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1342 wi
->callback_result
= ret
;
1343 return gsi_stmt (gsi
);
1348 wi
->callback_result
= NULL_TREE
;
1354 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1357 walk_gimple_asm (gimple stmt
, walk_tree_fn callback_op
,
1358 struct walk_stmt_info
*wi
)
1362 const char **oconstraints
;
1364 const char *constraint
;
1365 bool allows_mem
, allows_reg
, is_inout
;
1367 noutputs
= gimple_asm_noutputs (stmt
);
1368 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1373 for (i
= 0; i
< noutputs
; i
++)
1375 op
= gimple_asm_output_op (stmt
, i
);
1376 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1377 oconstraints
[i
] = constraint
;
1378 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
, &allows_reg
,
1381 wi
->val_only
= (allows_reg
|| !allows_mem
);
1382 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1387 n
= gimple_asm_ninputs (stmt
);
1388 for (i
= 0; i
< n
; i
++)
1390 op
= gimple_asm_input_op (stmt
, i
);
1391 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1392 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1393 oconstraints
, &allows_mem
, &allows_reg
);
1396 wi
->val_only
= (allows_reg
|| !allows_mem
);
1397 /* Although input "m" is not really a LHS, we need a lvalue. */
1398 wi
->is_lhs
= !wi
->val_only
;
1400 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1408 wi
->val_only
= true;
1411 n
= gimple_asm_nlabels (stmt
);
1412 for (i
= 0; i
< n
; i
++)
1414 op
= gimple_asm_label_op (stmt
, i
);
1415 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1424 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1425 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1427 CALLBACK_OP is called on each operand of STMT via walk_tree.
1428 Additional parameters to walk_tree must be stored in WI. For each operand
1429 OP, walk_tree is called as:
1431 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1433 If CALLBACK_OP returns non-NULL for an operand, the remaining
1434 operands are not scanned.
1436 The return value is that returned by the last call to walk_tree, or
1437 NULL_TREE if no CALLBACK_OP is specified. */
1440 walk_gimple_op (gimple stmt
, walk_tree_fn callback_op
,
1441 struct walk_stmt_info
*wi
)
1443 struct pointer_set_t
*pset
= (wi
) ? wi
->pset
: NULL
;
1445 tree ret
= NULL_TREE
;
1447 switch (gimple_code (stmt
))
1450 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1451 is a register variable, we may use a COMPONENT_REF on the RHS. */
1454 tree lhs
= gimple_assign_lhs (stmt
);
1456 = (is_gimple_reg_type (TREE_TYPE (lhs
)) && !is_gimple_reg (lhs
))
1457 || !gimple_assign_single_p (stmt
);
1460 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
1462 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
,
1468 /* Walk the LHS. If the RHS is appropriate for a memory, we
1469 may use a COMPONENT_REF on the LHS. */
1472 /* If the RHS has more than 1 operand, it is not appropriate
1474 wi
->val_only
= !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt
))
1475 || !gimple_assign_single_p (stmt
);
1479 ret
= walk_tree (gimple_op_ptr (stmt
, 0), callback_op
, wi
, pset
);
1485 wi
->val_only
= true;
1494 wi
->val_only
= true;
1497 ret
= walk_tree (gimple_call_chain_ptr (stmt
), callback_op
, wi
, pset
);
1501 ret
= walk_tree (gimple_call_fn_ptr (stmt
), callback_op
, wi
, pset
);
1505 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1509 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt
, i
)));
1510 ret
= walk_tree (gimple_call_arg_ptr (stmt
, i
), callback_op
, wi
,
1516 if (gimple_call_lhs (stmt
))
1522 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt
)));
1525 ret
= walk_tree (gimple_call_lhs_ptr (stmt
), callback_op
, wi
, pset
);
1533 wi
->val_only
= true;
1538 ret
= walk_tree (gimple_catch_types_ptr (stmt
), callback_op
, wi
,
1544 case GIMPLE_EH_FILTER
:
1545 ret
= walk_tree (gimple_eh_filter_types_ptr (stmt
), callback_op
, wi
,
1552 ret
= walk_gimple_asm (stmt
, callback_op
, wi
);
1557 case GIMPLE_OMP_CONTINUE
:
1558 ret
= walk_tree (gimple_omp_continue_control_def_ptr (stmt
),
1559 callback_op
, wi
, pset
);
1563 ret
= walk_tree (gimple_omp_continue_control_use_ptr (stmt
),
1564 callback_op
, wi
, pset
);
1569 case GIMPLE_OMP_CRITICAL
:
1570 ret
= walk_tree (gimple_omp_critical_name_ptr (stmt
), callback_op
, wi
,
1576 case GIMPLE_OMP_FOR
:
1577 ret
= walk_tree (gimple_omp_for_clauses_ptr (stmt
), callback_op
, wi
,
1581 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1583 ret
= walk_tree (gimple_omp_for_index_ptr (stmt
, i
), callback_op
,
1587 ret
= walk_tree (gimple_omp_for_initial_ptr (stmt
, i
), callback_op
,
1591 ret
= walk_tree (gimple_omp_for_final_ptr (stmt
, i
), callback_op
,
1595 ret
= walk_tree (gimple_omp_for_incr_ptr (stmt
, i
), callback_op
,
1602 case GIMPLE_OMP_PARALLEL
:
1603 ret
= walk_tree (gimple_omp_parallel_clauses_ptr (stmt
), callback_op
,
1607 ret
= walk_tree (gimple_omp_parallel_child_fn_ptr (stmt
), callback_op
,
1611 ret
= walk_tree (gimple_omp_parallel_data_arg_ptr (stmt
), callback_op
,
1617 case GIMPLE_OMP_TASK
:
1618 ret
= walk_tree (gimple_omp_task_clauses_ptr (stmt
), callback_op
,
1622 ret
= walk_tree (gimple_omp_task_child_fn_ptr (stmt
), callback_op
,
1626 ret
= walk_tree (gimple_omp_task_data_arg_ptr (stmt
), callback_op
,
1630 ret
= walk_tree (gimple_omp_task_copy_fn_ptr (stmt
), callback_op
,
1634 ret
= walk_tree (gimple_omp_task_arg_size_ptr (stmt
), callback_op
,
1638 ret
= walk_tree (gimple_omp_task_arg_align_ptr (stmt
), callback_op
,
1644 case GIMPLE_OMP_SECTIONS
:
1645 ret
= walk_tree (gimple_omp_sections_clauses_ptr (stmt
), callback_op
,
1650 ret
= walk_tree (gimple_omp_sections_control_ptr (stmt
), callback_op
,
1657 case GIMPLE_OMP_SINGLE
:
1658 ret
= walk_tree (gimple_omp_single_clauses_ptr (stmt
), callback_op
, wi
,
1664 case GIMPLE_OMP_ATOMIC_LOAD
:
1665 ret
= walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt
), callback_op
, wi
,
1670 ret
= walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt
), callback_op
, wi
,
1676 case GIMPLE_OMP_ATOMIC_STORE
:
1677 ret
= walk_tree (gimple_omp_atomic_store_val_ptr (stmt
), callback_op
,
1683 /* Tuples that do not have operands. */
1686 case GIMPLE_OMP_RETURN
:
1687 case GIMPLE_PREDICT
:
1692 enum gimple_statement_structure_enum gss
;
1693 gss
= gimple_statement_structure (stmt
);
1694 if (gss
== GSS_WITH_OPS
|| gss
== GSS_WITH_MEM_OPS
)
1695 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1697 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
, pset
);
1709 /* Walk the current statement in GSI (optionally using traversal state
1710 stored in WI). If WI is NULL, no state is kept during traversal.
1711 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1712 that it has handled all the operands of the statement, its return
1713 value is returned. Otherwise, the return value from CALLBACK_STMT
1714 is discarded and its operands are scanned.
1716 If CALLBACK_STMT is NULL or it didn't handle the operands,
1717 CALLBACK_OP is called on each operand of the statement via
1718 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1719 operand, the remaining operands are not scanned. In this case, the
1720 return value from CALLBACK_OP is returned.
1722 In any other case, NULL_TREE is returned. */
1725 walk_gimple_stmt (gimple_stmt_iterator
*gsi
, walk_stmt_fn callback_stmt
,
1726 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1730 gimple stmt
= gsi_stmt (*gsi
);
1735 if (wi
&& wi
->want_locations
&& gimple_has_location (stmt
))
1736 input_location
= gimple_location (stmt
);
1740 /* Invoke the statement callback. Return if the callback handled
1741 all of STMT operands by itself. */
1744 bool handled_ops
= false;
1745 tree_ret
= callback_stmt (gsi
, &handled_ops
, wi
);
1749 /* If CALLBACK_STMT did not handle operands, it should not have
1750 a value to return. */
1751 gcc_assert (tree_ret
== NULL
);
1753 /* Re-read stmt in case the callback changed it. */
1754 stmt
= gsi_stmt (*gsi
);
1757 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1760 tree_ret
= walk_gimple_op (stmt
, callback_op
, wi
);
1765 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1766 switch (gimple_code (stmt
))
1769 ret
= walk_gimple_seq (gimple_bind_body (stmt
), callback_stmt
,
1772 return wi
->callback_result
;
1776 ret
= walk_gimple_seq (gimple_catch_handler (stmt
), callback_stmt
,
1779 return wi
->callback_result
;
1782 case GIMPLE_EH_FILTER
:
1783 ret
= walk_gimple_seq (gimple_eh_filter_failure (stmt
), callback_stmt
,
1786 return wi
->callback_result
;
1790 ret
= walk_gimple_seq (gimple_try_eval (stmt
), callback_stmt
, callback_op
,
1793 return wi
->callback_result
;
1795 ret
= walk_gimple_seq (gimple_try_cleanup (stmt
), callback_stmt
,
1798 return wi
->callback_result
;
1801 case GIMPLE_OMP_FOR
:
1802 ret
= walk_gimple_seq (gimple_omp_for_pre_body (stmt
), callback_stmt
,
1805 return wi
->callback_result
;
1808 case GIMPLE_OMP_CRITICAL
:
1809 case GIMPLE_OMP_MASTER
:
1810 case GIMPLE_OMP_ORDERED
:
1811 case GIMPLE_OMP_SECTION
:
1812 case GIMPLE_OMP_PARALLEL
:
1813 case GIMPLE_OMP_TASK
:
1814 case GIMPLE_OMP_SECTIONS
:
1815 case GIMPLE_OMP_SINGLE
:
1816 ret
= walk_gimple_seq (gimple_omp_body (stmt
), callback_stmt
, callback_op
,
1819 return wi
->callback_result
;
1822 case GIMPLE_WITH_CLEANUP_EXPR
:
1823 ret
= walk_gimple_seq (gimple_wce_cleanup (stmt
), callback_stmt
,
1826 return wi
->callback_result
;
1830 gcc_assert (!gimple_has_substatements (stmt
));
1838 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1841 gimple_set_body (tree fndecl
, gimple_seq seq
)
1843 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1846 /* If FNDECL still does not have a function structure associated
1847 with it, then it does not make sense for it to receive a
1849 gcc_assert (seq
== NULL
);
1852 fn
->gimple_body
= seq
;
1856 /* Return the body of GIMPLE statements for function FN. After the
1857 CFG pass, the function body doesn't exist anymore because it has
1858 been split up into basic blocks. In this case, it returns
1862 gimple_body (tree fndecl
)
1864 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1865 return fn
? fn
->gimple_body
: NULL
;
1868 /* Return true when FNDECL has Gimple body either in unlowered
1871 gimple_has_body_p (tree fndecl
)
1873 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1874 return (gimple_body (fndecl
) || (fn
&& fn
->cfg
));
1877 /* Return true if calls C1 and C2 are known to go to the same function. */
1880 gimple_call_same_target_p (const_gimple c1
, const_gimple c2
)
1882 if (gimple_call_internal_p (c1
))
1883 return (gimple_call_internal_p (c2
)
1884 && gimple_call_internal_fn (c1
) == gimple_call_internal_fn (c2
));
1886 return (gimple_call_fn (c1
) == gimple_call_fn (c2
)
1887 || (gimple_call_fndecl (c1
)
1888 && gimple_call_fndecl (c1
) == gimple_call_fndecl (c2
)));
1891 /* Detect flags from a GIMPLE_CALL. This is just like
1892 call_expr_flags, but for gimple tuples. */
1895 gimple_call_flags (const_gimple stmt
)
1898 tree decl
= gimple_call_fndecl (stmt
);
1901 flags
= flags_from_decl_or_type (decl
);
1902 else if (gimple_call_internal_p (stmt
))
1903 flags
= internal_fn_flags (gimple_call_internal_fn (stmt
));
1905 flags
= flags_from_decl_or_type (gimple_call_fntype (stmt
));
1907 if (stmt
->gsbase
.subcode
& GF_CALL_NOTHROW
)
1908 flags
|= ECF_NOTHROW
;
1913 /* Return the "fn spec" string for call STMT. */
1916 gimple_call_fnspec (const_gimple stmt
)
1920 type
= gimple_call_fntype (stmt
);
1924 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
1928 return TREE_VALUE (TREE_VALUE (attr
));
1931 /* Detects argument flags for argument number ARG on call STMT. */
1934 gimple_call_arg_flags (const_gimple stmt
, unsigned arg
)
1936 tree attr
= gimple_call_fnspec (stmt
);
1938 if (!attr
|| 1 + arg
>= (unsigned) TREE_STRING_LENGTH (attr
))
1941 switch (TREE_STRING_POINTER (attr
)[1 + arg
])
1948 return EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
;
1951 return EAF_NOCLOBBER
| EAF_NOESCAPE
;
1954 return EAF_DIRECT
| EAF_NOESCAPE
;
1957 return EAF_NOESCAPE
;
1965 /* Detects return flags for the call STMT. */
1968 gimple_call_return_flags (const_gimple stmt
)
1972 if (gimple_call_flags (stmt
) & ECF_MALLOC
)
1975 attr
= gimple_call_fnspec (stmt
);
1976 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
1979 switch (TREE_STRING_POINTER (attr
)[0])
1985 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
1997 /* Return true if GS is a copy assignment. */
2000 gimple_assign_copy_p (gimple gs
)
2002 return (gimple_assign_single_p (gs
)
2003 && is_gimple_val (gimple_op (gs
, 1)));
2007 /* Return true if GS is a SSA_NAME copy assignment. */
2010 gimple_assign_ssa_name_copy_p (gimple gs
)
2012 return (gimple_assign_single_p (gs
)
2013 && TREE_CODE (gimple_assign_lhs (gs
)) == SSA_NAME
2014 && TREE_CODE (gimple_assign_rhs1 (gs
)) == SSA_NAME
);
2018 /* Return true if GS is an assignment with a unary RHS, but the
2019 operator has no effect on the assigned value. The logic is adapted
2020 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2021 instances in which STRIP_NOPS was previously applied to the RHS of
2024 NOTE: In the use cases that led to the creation of this function
2025 and of gimple_assign_single_p, it is typical to test for either
2026 condition and to proceed in the same manner. In each case, the
2027 assigned value is represented by the single RHS operand of the
2028 assignment. I suspect there may be cases where gimple_assign_copy_p,
2029 gimple_assign_single_p, or equivalent logic is used where a similar
2030 treatment of unary NOPs is appropriate. */
2033 gimple_assign_unary_nop_p (gimple gs
)
2035 return (is_gimple_assign (gs
)
2036 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
))
2037 || gimple_assign_rhs_code (gs
) == NON_LVALUE_EXPR
)
2038 && gimple_assign_rhs1 (gs
) != error_mark_node
2039 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))
2040 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs
)))));
2043 /* Set BB to be the basic block holding G. */
2046 gimple_set_bb (gimple stmt
, basic_block bb
)
2048 stmt
->gsbase
.bb
= bb
;
2050 /* If the statement is a label, add the label to block-to-labels map
2051 so that we can speed up edge creation for GIMPLE_GOTOs. */
2052 if (cfun
->cfg
&& gimple_code (stmt
) == GIMPLE_LABEL
)
2057 t
= gimple_label_label (stmt
);
2058 uid
= LABEL_DECL_UID (t
);
2061 unsigned old_len
= VEC_length (basic_block
, label_to_block_map
);
2062 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
2063 if (old_len
<= (unsigned) uid
)
2065 unsigned new_len
= 3 * uid
/ 2 + 1;
2067 VEC_safe_grow_cleared (basic_block
, gc
, label_to_block_map
,
2072 VEC_replace (basic_block
, label_to_block_map
, uid
, bb
);
2077 /* Modify the RHS of the assignment pointed-to by GSI using the
2078 operands in the expression tree EXPR.
2080 NOTE: The statement pointed-to by GSI may be reallocated if it
2081 did not have enough operand slots.
2083 This function is useful to convert an existing tree expression into
2084 the flat representation used for the RHS of a GIMPLE assignment.
2085 It will reallocate memory as needed to expand or shrink the number
2086 of operand slots needed to represent EXPR.
2088 NOTE: If you find yourself building a tree and then calling this
2089 function, you are most certainly doing it the slow way. It is much
2090 better to build a new assignment or to use the function
2091 gimple_assign_set_rhs_with_ops, which does not require an
2092 expression tree to be built. */
2095 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator
*gsi
, tree expr
)
2097 enum tree_code subcode
;
2100 extract_ops_from_tree_1 (expr
, &subcode
, &op1
, &op2
, &op3
);
2101 gimple_assign_set_rhs_with_ops_1 (gsi
, subcode
, op1
, op2
, op3
);
2105 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2106 operands OP1, OP2 and OP3.
2108 NOTE: The statement pointed-to by GSI may be reallocated if it
2109 did not have enough operand slots. */
2112 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
2113 tree op1
, tree op2
, tree op3
)
2115 unsigned new_rhs_ops
= get_gimple_rhs_num_ops (code
);
2116 gimple stmt
= gsi_stmt (*gsi
);
2118 /* If the new CODE needs more operands, allocate a new statement. */
2119 if (gimple_num_ops (stmt
) < new_rhs_ops
+ 1)
2121 tree lhs
= gimple_assign_lhs (stmt
);
2122 gimple new_stmt
= gimple_alloc (gimple_code (stmt
), new_rhs_ops
+ 1);
2123 memcpy (new_stmt
, stmt
, gimple_size (gimple_code (stmt
)));
2124 gsi_replace (gsi
, new_stmt
, true);
2127 /* The LHS needs to be reset as this also changes the SSA name
2129 gimple_assign_set_lhs (stmt
, lhs
);
2132 gimple_set_num_ops (stmt
, new_rhs_ops
+ 1);
2133 gimple_set_subcode (stmt
, code
);
2134 gimple_assign_set_rhs1 (stmt
, op1
);
2135 if (new_rhs_ops
> 1)
2136 gimple_assign_set_rhs2 (stmt
, op2
);
2137 if (new_rhs_ops
> 2)
2138 gimple_assign_set_rhs3 (stmt
, op3
);
2142 /* Return the LHS of a statement that performs an assignment,
2143 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2144 for a call to a function that returns no value, or for a
2145 statement other than an assignment or a call. */
2148 gimple_get_lhs (const_gimple stmt
)
2150 enum gimple_code code
= gimple_code (stmt
);
2152 if (code
== GIMPLE_ASSIGN
)
2153 return gimple_assign_lhs (stmt
);
2154 else if (code
== GIMPLE_CALL
)
2155 return gimple_call_lhs (stmt
);
2161 /* Set the LHS of a statement that performs an assignment,
2162 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2165 gimple_set_lhs (gimple stmt
, tree lhs
)
2167 enum gimple_code code
= gimple_code (stmt
);
2169 if (code
== GIMPLE_ASSIGN
)
2170 gimple_assign_set_lhs (stmt
, lhs
);
2171 else if (code
== GIMPLE_CALL
)
2172 gimple_call_set_lhs (stmt
, lhs
);
2177 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2178 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2179 expression with a different value.
2181 This will update any annotations (say debug bind stmts) referring
2182 to the original LHS, so that they use the RHS instead. This is
2183 done even if NLHS and LHS are the same, for it is understood that
2184 the RHS will be modified afterwards, and NLHS will not be assigned
2185 an equivalent value.
2187 Adjusting any non-annotation uses of the LHS, if needed, is a
2188 responsibility of the caller.
2190 The effect of this call should be pretty much the same as that of
2191 inserting a copy of STMT before STMT, and then removing the
2192 original stmt, at which time gsi_remove() would have update
2193 annotations, but using this function saves all the inserting,
2194 copying and removing. */
2197 gimple_replace_lhs (gimple stmt
, tree nlhs
)
2199 if (MAY_HAVE_DEBUG_STMTS
)
2201 tree lhs
= gimple_get_lhs (stmt
);
2203 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
2205 insert_debug_temp_for_var_def (NULL
, lhs
);
2208 gimple_set_lhs (stmt
, nlhs
);
2211 /* Return a deep copy of statement STMT. All the operands from STMT
2212 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2213 and VUSE operand arrays are set to empty in the new copy. */
2216 gimple_copy (gimple stmt
)
2218 enum gimple_code code
= gimple_code (stmt
);
2219 unsigned num_ops
= gimple_num_ops (stmt
);
2220 gimple copy
= gimple_alloc (code
, num_ops
);
2223 /* Shallow copy all the fields from STMT. */
2224 memcpy (copy
, stmt
, gimple_size (code
));
2226 /* If STMT has sub-statements, deep-copy them as well. */
2227 if (gimple_has_substatements (stmt
))
2232 switch (gimple_code (stmt
))
2235 new_seq
= gimple_seq_copy (gimple_bind_body (stmt
));
2236 gimple_bind_set_body (copy
, new_seq
);
2237 gimple_bind_set_vars (copy
, unshare_expr (gimple_bind_vars (stmt
)));
2238 gimple_bind_set_block (copy
, gimple_bind_block (stmt
));
2242 new_seq
= gimple_seq_copy (gimple_catch_handler (stmt
));
2243 gimple_catch_set_handler (copy
, new_seq
);
2244 t
= unshare_expr (gimple_catch_types (stmt
));
2245 gimple_catch_set_types (copy
, t
);
2248 case GIMPLE_EH_FILTER
:
2249 new_seq
= gimple_seq_copy (gimple_eh_filter_failure (stmt
));
2250 gimple_eh_filter_set_failure (copy
, new_seq
);
2251 t
= unshare_expr (gimple_eh_filter_types (stmt
));
2252 gimple_eh_filter_set_types (copy
, t
);
2256 new_seq
= gimple_seq_copy (gimple_try_eval (stmt
));
2257 gimple_try_set_eval (copy
, new_seq
);
2258 new_seq
= gimple_seq_copy (gimple_try_cleanup (stmt
));
2259 gimple_try_set_cleanup (copy
, new_seq
);
2262 case GIMPLE_OMP_FOR
:
2263 new_seq
= gimple_seq_copy (gimple_omp_for_pre_body (stmt
));
2264 gimple_omp_for_set_pre_body (copy
, new_seq
);
2265 t
= unshare_expr (gimple_omp_for_clauses (stmt
));
2266 gimple_omp_for_set_clauses (copy
, t
);
2267 copy
->gimple_omp_for
.iter
2268 = ggc_alloc_vec_gimple_omp_for_iter
2269 (gimple_omp_for_collapse (stmt
));
2270 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2272 gimple_omp_for_set_cond (copy
, i
,
2273 gimple_omp_for_cond (stmt
, i
));
2274 gimple_omp_for_set_index (copy
, i
,
2275 gimple_omp_for_index (stmt
, i
));
2276 t
= unshare_expr (gimple_omp_for_initial (stmt
, i
));
2277 gimple_omp_for_set_initial (copy
, i
, t
);
2278 t
= unshare_expr (gimple_omp_for_final (stmt
, i
));
2279 gimple_omp_for_set_final (copy
, i
, t
);
2280 t
= unshare_expr (gimple_omp_for_incr (stmt
, i
));
2281 gimple_omp_for_set_incr (copy
, i
, t
);
2285 case GIMPLE_OMP_PARALLEL
:
2286 t
= unshare_expr (gimple_omp_parallel_clauses (stmt
));
2287 gimple_omp_parallel_set_clauses (copy
, t
);
2288 t
= unshare_expr (gimple_omp_parallel_child_fn (stmt
));
2289 gimple_omp_parallel_set_child_fn (copy
, t
);
2290 t
= unshare_expr (gimple_omp_parallel_data_arg (stmt
));
2291 gimple_omp_parallel_set_data_arg (copy
, t
);
2294 case GIMPLE_OMP_TASK
:
2295 t
= unshare_expr (gimple_omp_task_clauses (stmt
));
2296 gimple_omp_task_set_clauses (copy
, t
);
2297 t
= unshare_expr (gimple_omp_task_child_fn (stmt
));
2298 gimple_omp_task_set_child_fn (copy
, t
);
2299 t
= unshare_expr (gimple_omp_task_data_arg (stmt
));
2300 gimple_omp_task_set_data_arg (copy
, t
);
2301 t
= unshare_expr (gimple_omp_task_copy_fn (stmt
));
2302 gimple_omp_task_set_copy_fn (copy
, t
);
2303 t
= unshare_expr (gimple_omp_task_arg_size (stmt
));
2304 gimple_omp_task_set_arg_size (copy
, t
);
2305 t
= unshare_expr (gimple_omp_task_arg_align (stmt
));
2306 gimple_omp_task_set_arg_align (copy
, t
);
2309 case GIMPLE_OMP_CRITICAL
:
2310 t
= unshare_expr (gimple_omp_critical_name (stmt
));
2311 gimple_omp_critical_set_name (copy
, t
);
2314 case GIMPLE_OMP_SECTIONS
:
2315 t
= unshare_expr (gimple_omp_sections_clauses (stmt
));
2316 gimple_omp_sections_set_clauses (copy
, t
);
2317 t
= unshare_expr (gimple_omp_sections_control (stmt
));
2318 gimple_omp_sections_set_control (copy
, t
);
2321 case GIMPLE_OMP_SINGLE
:
2322 case GIMPLE_OMP_SECTION
:
2323 case GIMPLE_OMP_MASTER
:
2324 case GIMPLE_OMP_ORDERED
:
2326 new_seq
= gimple_seq_copy (gimple_omp_body (stmt
));
2327 gimple_omp_set_body (copy
, new_seq
);
2330 case GIMPLE_WITH_CLEANUP_EXPR
:
2331 new_seq
= gimple_seq_copy (gimple_wce_cleanup (stmt
));
2332 gimple_wce_set_cleanup (copy
, new_seq
);
2340 /* Make copy of operands. */
2343 for (i
= 0; i
< num_ops
; i
++)
2344 gimple_set_op (copy
, i
, unshare_expr (gimple_op (stmt
, i
)));
2346 /* Clear out SSA operand vectors on COPY. */
2347 if (gimple_has_ops (stmt
))
2349 gimple_set_def_ops (copy
, NULL
);
2350 gimple_set_use_ops (copy
, NULL
);
2353 if (gimple_has_mem_ops (stmt
))
2355 gimple_set_vdef (copy
, gimple_vdef (stmt
));
2356 gimple_set_vuse (copy
, gimple_vuse (stmt
));
2359 /* SSA operands need to be updated. */
2360 gimple_set_modified (copy
, true);
2367 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2368 a MODIFIED field. */
2371 gimple_set_modified (gimple s
, bool modifiedp
)
2373 if (gimple_has_ops (s
))
2374 s
->gsbase
.modified
= (unsigned) modifiedp
;
2378 /* Return true if statement S has side-effects. We consider a
2379 statement to have side effects if:
2381 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2382 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2385 gimple_has_side_effects (const_gimple s
)
2389 if (is_gimple_debug (s
))
2392 /* We don't have to scan the arguments to check for
2393 volatile arguments, though, at present, we still
2394 do a scan to check for TREE_SIDE_EFFECTS. */
2395 if (gimple_has_volatile_ops (s
))
2398 if (gimple_code (s
) == GIMPLE_ASM
2399 && gimple_asm_volatile_p (s
))
2402 if (is_gimple_call (s
))
2404 unsigned nargs
= gimple_call_num_args (s
);
2407 if (!(gimple_call_flags (s
) & (ECF_CONST
| ECF_PURE
)))
2409 else if (gimple_call_flags (s
) & ECF_LOOPING_CONST_OR_PURE
)
2410 /* An infinite loop is considered a side effect. */
2413 if (gimple_call_lhs (s
)
2414 && TREE_SIDE_EFFECTS (gimple_call_lhs (s
)))
2416 gcc_checking_assert (gimple_has_volatile_ops (s
));
2420 fn
= gimple_call_fn (s
);
2421 if (fn
&& TREE_SIDE_EFFECTS (fn
))
2424 for (i
= 0; i
< nargs
; i
++)
2425 if (TREE_SIDE_EFFECTS (gimple_call_arg (s
, i
)))
2427 gcc_checking_assert (gimple_has_volatile_ops (s
));
2435 for (i
= 0; i
< gimple_num_ops (s
); i
++)
2437 tree op
= gimple_op (s
, i
);
2438 if (op
&& TREE_SIDE_EFFECTS (op
))
2440 gcc_checking_assert (gimple_has_volatile_ops (s
));
2449 /* Return true if the RHS of statement S has side effects.
2450 We may use it to determine if it is admissable to replace
2451 an assignment or call with a copy of a previously-computed
2452 value. In such cases, side-effects due to the LHS are
2456 gimple_rhs_has_side_effects (const_gimple s
)
2460 if (is_gimple_call (s
))
2462 unsigned nargs
= gimple_call_num_args (s
);
2465 if (!(gimple_call_flags (s
) & (ECF_CONST
| ECF_PURE
)))
2468 /* We cannot use gimple_has_volatile_ops here,
2469 because we must ignore a volatile LHS. */
2470 fn
= gimple_call_fn (s
);
2471 if (fn
&& (TREE_SIDE_EFFECTS (fn
) || TREE_THIS_VOLATILE (fn
)))
2473 gcc_assert (gimple_has_volatile_ops (s
));
2477 for (i
= 0; i
< nargs
; i
++)
2478 if (TREE_SIDE_EFFECTS (gimple_call_arg (s
, i
))
2479 || TREE_THIS_VOLATILE (gimple_call_arg (s
, i
)))
2484 else if (is_gimple_assign (s
))
2486 /* Skip the first operand, the LHS. */
2487 for (i
= 1; i
< gimple_num_ops (s
); i
++)
2488 if (TREE_SIDE_EFFECTS (gimple_op (s
, i
))
2489 || TREE_THIS_VOLATILE (gimple_op (s
, i
)))
2491 gcc_assert (gimple_has_volatile_ops (s
));
2495 else if (is_gimple_debug (s
))
2499 /* For statements without an LHS, examine all arguments. */
2500 for (i
= 0; i
< gimple_num_ops (s
); i
++)
2501 if (TREE_SIDE_EFFECTS (gimple_op (s
, i
))
2502 || TREE_THIS_VOLATILE (gimple_op (s
, i
)))
2504 gcc_assert (gimple_has_volatile_ops (s
));
2512 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2513 Return true if S can trap. When INCLUDE_MEM is true, check whether
2514 the memory operations could trap. When INCLUDE_STORES is true and
2515 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2518 gimple_could_trap_p_1 (gimple s
, bool include_mem
, bool include_stores
)
2520 tree t
, div
= NULL_TREE
;
2525 unsigned i
, start
= (is_gimple_assign (s
) && !include_stores
) ? 1 : 0;
2527 for (i
= start
; i
< gimple_num_ops (s
); i
++)
2528 if (tree_could_trap_p (gimple_op (s
, i
)))
2532 switch (gimple_code (s
))
2535 return gimple_asm_volatile_p (s
);
2538 t
= gimple_call_fndecl (s
);
2539 /* Assume that calls to weak functions may trap. */
2540 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
2545 t
= gimple_expr_type (s
);
2546 op
= gimple_assign_rhs_code (s
);
2547 if (get_gimple_rhs_class (op
) == GIMPLE_BINARY_RHS
)
2548 div
= gimple_assign_rhs2 (s
);
2549 return (operation_could_trap_p (op
, FLOAT_TYPE_P (t
),
2550 (INTEGRAL_TYPE_P (t
)
2551 && TYPE_OVERFLOW_TRAPS (t
)),
2561 /* Return true if statement S can trap. */
2564 gimple_could_trap_p (gimple s
)
2566 return gimple_could_trap_p_1 (s
, true, true);
2569 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2572 gimple_assign_rhs_could_trap_p (gimple s
)
2574 gcc_assert (is_gimple_assign (s
));
2575 return gimple_could_trap_p_1 (s
, true, false);
2579 /* Print debugging information for gimple stmts generated. */
2582 dump_gimple_statistics (void)
2584 #ifdef GATHER_STATISTICS
2585 int i
, total_tuples
= 0, total_bytes
= 0;
2587 fprintf (stderr
, "\nGIMPLE statements\n");
2588 fprintf (stderr
, "Kind Stmts Bytes\n");
2589 fprintf (stderr
, "---------------------------------------\n");
2590 for (i
= 0; i
< (int) gimple_alloc_kind_all
; ++i
)
2592 fprintf (stderr
, "%-20s %7d %10d\n", gimple_alloc_kind_names
[i
],
2593 gimple_alloc_counts
[i
], gimple_alloc_sizes
[i
]);
2594 total_tuples
+= gimple_alloc_counts
[i
];
2595 total_bytes
+= gimple_alloc_sizes
[i
];
2597 fprintf (stderr
, "---------------------------------------\n");
2598 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_tuples
, total_bytes
);
2599 fprintf (stderr
, "---------------------------------------\n");
2601 fprintf (stderr
, "No gimple statistics\n");
2606 /* Return the number of operands needed on the RHS of a GIMPLE
2607 assignment for an expression with tree code CODE. */
2610 get_gimple_rhs_num_ops (enum tree_code code
)
2612 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
2614 if (rhs_class
== GIMPLE_UNARY_RHS
|| rhs_class
== GIMPLE_SINGLE_RHS
)
2616 else if (rhs_class
== GIMPLE_BINARY_RHS
)
2618 else if (rhs_class
== GIMPLE_TERNARY_RHS
)
2624 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2626 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2627 : ((TYPE) == tcc_binary \
2628 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2629 : ((TYPE) == tcc_constant \
2630 || (TYPE) == tcc_declaration \
2631 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2632 : ((SYM) == TRUTH_AND_EXPR \
2633 || (SYM) == TRUTH_OR_EXPR \
2634 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2635 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2636 : ((SYM) == COND_EXPR \
2637 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2638 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2639 || (SYM) == DOT_PROD_EXPR \
2640 || (SYM) == REALIGN_LOAD_EXPR \
2641 || (SYM) == VEC_COND_EXPR \
2642 || (SYM) == VEC_PERM_EXPR \
2643 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2644 : ((SYM) == CONSTRUCTOR \
2645 || (SYM) == OBJ_TYPE_REF \
2646 || (SYM) == ASSERT_EXPR \
2647 || (SYM) == ADDR_EXPR \
2648 || (SYM) == WITH_SIZE_EXPR \
2649 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2650 : GIMPLE_INVALID_RHS),
2651 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2653 const unsigned char gimple_rhs_class_table
[] = {
2654 #include "all-tree.def"
2658 #undef END_OF_BASE_TREE_CODES
2660 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2662 /* Validation of GIMPLE expressions. */
2664 /* Returns true iff T is a valid RHS for an assignment to a renamed
2665 user -- or front-end generated artificial -- variable. */
2668 is_gimple_reg_rhs (tree t
)
2670 return get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
;
2673 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2674 LHS, or for a call argument. */
2677 is_gimple_mem_rhs (tree t
)
2679 /* If we're dealing with a renamable type, either source or dest must be
2680 a renamed variable. */
2681 if (is_gimple_reg_type (TREE_TYPE (t
)))
2682 return is_gimple_val (t
);
2684 return is_gimple_val (t
) || is_gimple_lvalue (t
);
2687 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2690 is_gimple_lvalue (tree t
)
2692 return (is_gimple_addressable (t
)
2693 || TREE_CODE (t
) == WITH_SIZE_EXPR
2694 /* These are complex lvalues, but don't have addresses, so they
2696 || TREE_CODE (t
) == BIT_FIELD_REF
);
2699 /* Return true if T is a GIMPLE condition. */
2702 is_gimple_condexpr (tree t
)
2704 return (is_gimple_val (t
) || (COMPARISON_CLASS_P (t
)
2705 && !tree_could_throw_p (t
)
2706 && is_gimple_val (TREE_OPERAND (t
, 0))
2707 && is_gimple_val (TREE_OPERAND (t
, 1))));
2710 /* Return true if T is something whose address can be taken. */
2713 is_gimple_addressable (tree t
)
2715 return (is_gimple_id (t
) || handled_component_p (t
)
2716 || TREE_CODE (t
) == MEM_REF
);
2719 /* Return true if T is a valid gimple constant. */
2722 is_gimple_constant (const_tree t
)
2724 switch (TREE_CODE (t
))
2734 /* Vector constant constructors are gimple invariant. */
2736 if (TREE_TYPE (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2737 return TREE_CONSTANT (t
);
2746 /* Return true if T is a gimple address. */
2749 is_gimple_address (const_tree t
)
2753 if (TREE_CODE (t
) != ADDR_EXPR
)
2756 op
= TREE_OPERAND (t
, 0);
2757 while (handled_component_p (op
))
2759 if ((TREE_CODE (op
) == ARRAY_REF
2760 || TREE_CODE (op
) == ARRAY_RANGE_REF
)
2761 && !is_gimple_val (TREE_OPERAND (op
, 1)))
2764 op
= TREE_OPERAND (op
, 0);
2767 if (CONSTANT_CLASS_P (op
) || TREE_CODE (op
) == MEM_REF
)
2770 switch (TREE_CODE (op
))
2785 /* Strip out all handled components that produce invariant
2789 strip_invariant_refs (const_tree op
)
2791 while (handled_component_p (op
))
2793 switch (TREE_CODE (op
))
2796 case ARRAY_RANGE_REF
:
2797 if (!is_gimple_constant (TREE_OPERAND (op
, 1))
2798 || TREE_OPERAND (op
, 2) != NULL_TREE
2799 || TREE_OPERAND (op
, 3) != NULL_TREE
)
2804 if (TREE_OPERAND (op
, 2) != NULL_TREE
)
2810 op
= TREE_OPERAND (op
, 0);
2816 /* Return true if T is a gimple invariant address. */
2819 is_gimple_invariant_address (const_tree t
)
2823 if (TREE_CODE (t
) != ADDR_EXPR
)
2826 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2830 if (TREE_CODE (op
) == MEM_REF
)
2832 const_tree op0
= TREE_OPERAND (op
, 0);
2833 return (TREE_CODE (op0
) == ADDR_EXPR
2834 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2835 || decl_address_invariant_p (TREE_OPERAND (op0
, 0))));
2838 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
2841 /* Return true if T is a gimple invariant address at IPA level
2842 (so addresses of variables on stack are not allowed). */
2845 is_gimple_ip_invariant_address (const_tree t
)
2849 if (TREE_CODE (t
) != ADDR_EXPR
)
2852 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2854 return op
&& (CONSTANT_CLASS_P (op
) || decl_address_ip_invariant_p (op
));
2857 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2858 form of function invariant. */
2861 is_gimple_min_invariant (const_tree t
)
2863 if (TREE_CODE (t
) == ADDR_EXPR
)
2864 return is_gimple_invariant_address (t
);
2866 return is_gimple_constant (t
);
2869 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2870 form of gimple minimal invariant. */
2873 is_gimple_ip_invariant (const_tree t
)
2875 if (TREE_CODE (t
) == ADDR_EXPR
)
2876 return is_gimple_ip_invariant_address (t
);
2878 return is_gimple_constant (t
);
2881 /* Return true if T looks like a valid GIMPLE statement. */
2884 is_gimple_stmt (tree t
)
2886 const enum tree_code code
= TREE_CODE (t
);
2891 /* The only valid NOP_EXPR is the empty statement. */
2892 return IS_EMPTY_STMT (t
);
2896 /* These are only valid if they're void. */
2897 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
2903 case CASE_LABEL_EXPR
:
2904 case TRY_CATCH_EXPR
:
2905 case TRY_FINALLY_EXPR
:
2906 case EH_FILTER_EXPR
:
2909 case STATEMENT_LIST
:
2919 /* These are always void. */
2925 /* These are valid regardless of their type. */
2933 /* Return true if T is a variable. */
2936 is_gimple_variable (tree t
)
2938 return (TREE_CODE (t
) == VAR_DECL
2939 || TREE_CODE (t
) == PARM_DECL
2940 || TREE_CODE (t
) == RESULT_DECL
2941 || TREE_CODE (t
) == SSA_NAME
);
2944 /* Return true if T is a GIMPLE identifier (something with an address). */
2947 is_gimple_id (tree t
)
2949 return (is_gimple_variable (t
)
2950 || TREE_CODE (t
) == FUNCTION_DECL
2951 || TREE_CODE (t
) == LABEL_DECL
2952 || TREE_CODE (t
) == CONST_DECL
2953 /* Allow string constants, since they are addressable. */
2954 || TREE_CODE (t
) == STRING_CST
);
2957 /* Return true if TYPE is a suitable type for a scalar register variable. */
2960 is_gimple_reg_type (tree type
)
2962 return !AGGREGATE_TYPE_P (type
);
2965 /* Return true if T is a non-aggregate register variable. */
2968 is_gimple_reg (tree t
)
2970 if (TREE_CODE (t
) == SSA_NAME
)
2971 t
= SSA_NAME_VAR (t
);
2973 if (!is_gimple_variable (t
))
2976 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2979 /* A volatile decl is not acceptable because we can't reuse it as
2980 needed. We need to copy it into a temp first. */
2981 if (TREE_THIS_VOLATILE (t
))
2984 /* We define "registers" as things that can be renamed as needed,
2985 which with our infrastructure does not apply to memory. */
2986 if (needs_to_live_in_memory (t
))
2989 /* Hard register variables are an interesting case. For those that
2990 are call-clobbered, we don't know where all the calls are, since
2991 we don't (want to) take into account which operations will turn
2992 into libcalls at the rtl level. For those that are call-saved,
2993 we don't currently model the fact that calls may in fact change
2994 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2995 level, and so miss variable changes that might imply. All around,
2996 it seems safest to not do too much optimization with these at the
2997 tree level at all. We'll have to rely on the rtl optimizers to
2998 clean this up, as there we've got all the appropriate bits exposed. */
2999 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
3002 /* Complex and vector values must have been put into SSA-like form.
3003 That is, no assignments to the individual components. */
3004 if (TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
3005 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
3006 return DECL_GIMPLE_REG_P (t
);
3012 /* Return true if T is a GIMPLE variable whose address is not needed. */
3015 is_gimple_non_addressable (tree t
)
3017 if (TREE_CODE (t
) == SSA_NAME
)
3018 t
= SSA_NAME_VAR (t
);
3020 return (is_gimple_variable (t
) && ! needs_to_live_in_memory (t
));
3023 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
3026 is_gimple_val (tree t
)
3028 /* Make loads from volatiles and memory vars explicit. */
3029 if (is_gimple_variable (t
)
3030 && is_gimple_reg_type (TREE_TYPE (t
))
3031 && !is_gimple_reg (t
))
3034 return (is_gimple_variable (t
) || is_gimple_min_invariant (t
));
3037 /* Similarly, but accept hard registers as inputs to asm statements. */
3040 is_gimple_asm_val (tree t
)
3042 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
3045 return is_gimple_val (t
);
3048 /* Return true if T is a GIMPLE minimal lvalue. */
3051 is_gimple_min_lval (tree t
)
3053 if (!(t
= CONST_CAST_TREE (strip_invariant_refs (t
))))
3055 return (is_gimple_id (t
) || TREE_CODE (t
) == MEM_REF
);
3058 /* Return true if T is a valid function operand of a CALL_EXPR. */
3061 is_gimple_call_addr (tree t
)
3063 return (TREE_CODE (t
) == OBJ_TYPE_REF
|| is_gimple_val (t
));
3066 /* Return true if T is a valid address operand of a MEM_REF. */
3069 is_gimple_mem_ref_addr (tree t
)
3071 return (is_gimple_reg (t
)
3072 || TREE_CODE (t
) == INTEGER_CST
3073 || (TREE_CODE (t
) == ADDR_EXPR
3074 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 0))
3075 || decl_address_invariant_p (TREE_OPERAND (t
, 0)))));
3078 /* If T makes a function call, return the corresponding CALL_EXPR operand.
3079 Otherwise, return NULL_TREE. */
3082 get_call_expr_in (tree t
)
3084 if (TREE_CODE (t
) == MODIFY_EXPR
)
3085 t
= TREE_OPERAND (t
, 1);
3086 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
3087 t
= TREE_OPERAND (t
, 0);
3088 if (TREE_CODE (t
) == CALL_EXPR
)
3094 /* Given a memory reference expression T, return its base address.
3095 The base address of a memory reference expression is the main
3096 object being referenced. For instance, the base address for
3097 'array[i].fld[j]' is 'array'. You can think of this as stripping
3098 away the offset part from a memory address.
3100 This function calls handled_component_p to strip away all the inner
3101 parts of the memory reference until it reaches the base object. */
3104 get_base_address (tree t
)
3106 while (handled_component_p (t
))
3107 t
= TREE_OPERAND (t
, 0);
3109 if ((TREE_CODE (t
) == MEM_REF
3110 || TREE_CODE (t
) == TARGET_MEM_REF
)
3111 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
3112 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
3114 if (TREE_CODE (t
) == SSA_NAME
3116 || TREE_CODE (t
) == STRING_CST
3117 || TREE_CODE (t
) == CONSTRUCTOR
3118 || INDIRECT_REF_P (t
)
3119 || TREE_CODE (t
) == MEM_REF
3120 || TREE_CODE (t
) == TARGET_MEM_REF
)
3127 recalculate_side_effects (tree t
)
3129 enum tree_code code
= TREE_CODE (t
);
3130 int len
= TREE_OPERAND_LENGTH (t
);
3133 switch (TREE_CODE_CLASS (code
))
3135 case tcc_expression
:
3141 case PREDECREMENT_EXPR
:
3142 case PREINCREMENT_EXPR
:
3143 case POSTDECREMENT_EXPR
:
3144 case POSTINCREMENT_EXPR
:
3145 /* All of these have side-effects, no matter what their
3154 case tcc_comparison
: /* a comparison expression */
3155 case tcc_unary
: /* a unary arithmetic expression */
3156 case tcc_binary
: /* a binary arithmetic expression */
3157 case tcc_reference
: /* a reference */
3158 case tcc_vl_exp
: /* a function call */
3159 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3160 for (i
= 0; i
< len
; ++i
)
3162 tree op
= TREE_OPERAND (t
, i
);
3163 if (op
&& TREE_SIDE_EFFECTS (op
))
3164 TREE_SIDE_EFFECTS (t
) = 1;
3169 /* No side-effects. */
3177 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3178 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3179 we failed to create one. */
3182 canonicalize_cond_expr_cond (tree t
)
3184 /* Strip conversions around boolean operations. */
3185 if (CONVERT_EXPR_P (t
)
3186 && (truth_value_p (TREE_CODE (TREE_OPERAND (t
, 0)))
3187 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0)))
3189 t
= TREE_OPERAND (t
, 0);
3191 /* For !x use x == 0. */
3192 if (TREE_CODE (t
) == TRUTH_NOT_EXPR
)
3194 tree top0
= TREE_OPERAND (t
, 0);
3195 t
= build2 (EQ_EXPR
, TREE_TYPE (t
),
3196 top0
, build_int_cst (TREE_TYPE (top0
), 0));
3198 /* For cmp ? 1 : 0 use cmp. */
3199 else if (TREE_CODE (t
) == COND_EXPR
3200 && COMPARISON_CLASS_P (TREE_OPERAND (t
, 0))
3201 && integer_onep (TREE_OPERAND (t
, 1))
3202 && integer_zerop (TREE_OPERAND (t
, 2)))
3204 tree top0
= TREE_OPERAND (t
, 0);
3205 t
= build2 (TREE_CODE (top0
), TREE_TYPE (t
),
3206 TREE_OPERAND (top0
, 0), TREE_OPERAND (top0
, 1));
3209 if (is_gimple_condexpr (t
))
3215 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3216 the positions marked by the set ARGS_TO_SKIP. */
3219 gimple_call_copy_skip_args (gimple stmt
, bitmap args_to_skip
)
3222 int nargs
= gimple_call_num_args (stmt
);
3223 VEC(tree
, heap
) *vargs
= VEC_alloc (tree
, heap
, nargs
);
3226 for (i
= 0; i
< nargs
; i
++)
3227 if (!bitmap_bit_p (args_to_skip
, i
))
3228 VEC_quick_push (tree
, vargs
, gimple_call_arg (stmt
, i
));
3230 if (gimple_call_internal_p (stmt
))
3231 new_stmt
= gimple_build_call_internal_vec (gimple_call_internal_fn (stmt
),
3234 new_stmt
= gimple_build_call_vec (gimple_call_fn (stmt
), vargs
);
3235 VEC_free (tree
, heap
, vargs
);
3236 if (gimple_call_lhs (stmt
))
3237 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3239 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3240 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3242 gimple_set_block (new_stmt
, gimple_block (stmt
));
3243 if (gimple_has_location (stmt
))
3244 gimple_set_location (new_stmt
, gimple_location (stmt
));
3245 gimple_call_copy_flags (new_stmt
, stmt
);
3246 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3248 gimple_set_modified (new_stmt
, true);
3254 enum gtc_mode
{ GTC_MERGE
= 0, GTC_DIAG
= 1 };
3256 static hashval_t
gimple_type_hash (const void *);
3258 /* Structure used to maintain a cache of some type pairs compared by
3259 gimple_types_compatible_p when comparing aggregate types. There are
3260 three possible values for SAME_P:
3262 -2: The pair (T1, T2) has just been inserted in the table.
3263 0: T1 and T2 are different types.
3264 1: T1 and T2 are the same type.
3266 The two elements in the SAME_P array are indexed by the comparison
3273 signed char same_p
[2];
3275 typedef struct type_pair_d
*type_pair_t
;
3276 DEF_VEC_P(type_pair_t
);
3277 DEF_VEC_ALLOC_P(type_pair_t
,heap
);
3279 #define GIMPLE_TYPE_PAIR_SIZE 16381
3280 struct type_pair_d
*type_pair_cache
;
3283 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3284 entry if none existed. */
3286 static inline type_pair_t
3287 lookup_type_pair (tree t1
, tree t2
)
3290 unsigned int uid1
, uid2
;
3292 if (type_pair_cache
== NULL
)
3293 type_pair_cache
= XCNEWVEC (struct type_pair_d
, GIMPLE_TYPE_PAIR_SIZE
);
3295 if (TYPE_UID (t1
) < TYPE_UID (t2
))
3297 uid1
= TYPE_UID (t1
);
3298 uid2
= TYPE_UID (t2
);
3302 uid1
= TYPE_UID (t2
);
3303 uid2
= TYPE_UID (t1
);
3305 gcc_checking_assert (uid1
!= uid2
);
3307 /* iterative_hash_hashval_t imply an function calls.
3308 We know that UIDS are in limited range. */
3309 index
= ((((unsigned HOST_WIDE_INT
)uid1
<< HOST_BITS_PER_WIDE_INT
/ 2) + uid2
)
3310 % GIMPLE_TYPE_PAIR_SIZE
);
3311 if (type_pair_cache
[index
].uid1
== uid1
3312 && type_pair_cache
[index
].uid2
== uid2
)
3313 return &type_pair_cache
[index
];
3315 type_pair_cache
[index
].uid1
= uid1
;
3316 type_pair_cache
[index
].uid2
= uid2
;
3317 type_pair_cache
[index
].same_p
[0] = -2;
3318 type_pair_cache
[index
].same_p
[1] = -2;
3320 return &type_pair_cache
[index
];
3323 /* Per pointer state for the SCC finding. The on_sccstack flag
3324 is not strictly required, it is true when there is no hash value
3325 recorded for the type and false otherwise. But querying that
3330 unsigned int dfsnum
;
3339 static unsigned int next_dfs_num
;
3340 static unsigned int gtc_next_dfs_num
;
3343 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3345 typedef struct GTY(()) gimple_type_leader_entry_s
{
3348 } gimple_type_leader_entry
;
3350 #define GIMPLE_TYPE_LEADER_SIZE 16381
3351 static GTY((deletable
, length("GIMPLE_TYPE_LEADER_SIZE")))
3352 gimple_type_leader_entry
*gimple_type_leader
;
3354 /* Lookup an existing leader for T and return it or NULL_TREE, if
3355 there is none in the cache. */
3358 gimple_lookup_type_leader (tree t
)
3360 gimple_type_leader_entry
*leader
;
3362 if (!gimple_type_leader
)
3365 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
3366 if (leader
->type
!= t
)
3369 return leader
->leader
;
3372 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3373 true then if any type has no name return false, otherwise return
3374 true if both types have no names. */
3377 compare_type_names_p (tree t1
, tree t2
)
3379 tree name1
= TYPE_NAME (t1
);
3380 tree name2
= TYPE_NAME (t2
);
3382 if (name1
&& TREE_CODE (name1
) == TYPE_DECL
)
3383 name1
= DECL_NAME (name1
);
3384 gcc_checking_assert (!name1
|| TREE_CODE (name1
) == IDENTIFIER_NODE
);
3386 if (name2
&& TREE_CODE (name2
) == TYPE_DECL
)
3387 name2
= DECL_NAME (name2
);
3388 gcc_checking_assert (!name2
|| TREE_CODE (name2
) == IDENTIFIER_NODE
);
3390 /* Identifiers can be compared with pointer equality rather
3391 than a string comparison. */
3398 /* Return true if the field decls F1 and F2 are at the same offset.
3400 This is intended to be used on GIMPLE types only. */
3403 gimple_compare_field_offset (tree f1
, tree f2
)
3405 if (DECL_OFFSET_ALIGN (f1
) == DECL_OFFSET_ALIGN (f2
))
3407 tree offset1
= DECL_FIELD_OFFSET (f1
);
3408 tree offset2
= DECL_FIELD_OFFSET (f2
);
3409 return ((offset1
== offset2
3410 /* Once gimplification is done, self-referential offsets are
3411 instantiated as operand #2 of the COMPONENT_REF built for
3412 each access and reset. Therefore, they are not relevant
3413 anymore and fields are interchangeable provided that they
3414 represent the same access. */
3415 || (TREE_CODE (offset1
) == PLACEHOLDER_EXPR
3416 && TREE_CODE (offset2
) == PLACEHOLDER_EXPR
3417 && (DECL_SIZE (f1
) == DECL_SIZE (f2
)
3418 || (TREE_CODE (DECL_SIZE (f1
)) == PLACEHOLDER_EXPR
3419 && TREE_CODE (DECL_SIZE (f2
)) == PLACEHOLDER_EXPR
)
3420 || operand_equal_p (DECL_SIZE (f1
), DECL_SIZE (f2
), 0))
3421 && DECL_ALIGN (f1
) == DECL_ALIGN (f2
))
3422 || operand_equal_p (offset1
, offset2
, 0))
3423 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1
),
3424 DECL_FIELD_BIT_OFFSET (f2
)));
3427 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3428 should be, so handle differing ones specially by decomposing
3429 the offset into a byte and bit offset manually. */
3430 if (host_integerp (DECL_FIELD_OFFSET (f1
), 0)
3431 && host_integerp (DECL_FIELD_OFFSET (f2
), 0))
3433 unsigned HOST_WIDE_INT byte_offset1
, byte_offset2
;
3434 unsigned HOST_WIDE_INT bit_offset1
, bit_offset2
;
3435 bit_offset1
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1
));
3436 byte_offset1
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1
))
3437 + bit_offset1
/ BITS_PER_UNIT
);
3438 bit_offset2
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2
));
3439 byte_offset2
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2
))
3440 + bit_offset2
/ BITS_PER_UNIT
);
3441 if (byte_offset1
!= byte_offset2
)
3443 return bit_offset1
% BITS_PER_UNIT
== bit_offset2
% BITS_PER_UNIT
;
3450 gimple_types_compatible_p_1 (tree
, tree
, type_pair_t
,
3451 VEC(type_pair_t
, heap
) **,
3452 struct pointer_map_t
*, struct obstack
*);
3454 /* DFS visit the edge from the callers type pair with state *STATE to
3455 the pair T1, T2 while operating in FOR_MERGING_P mode.
3456 Update the merging status if it is not part of the SCC containing the
3457 callers pair and return it.
3458 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3461 gtc_visit (tree t1
, tree t2
,
3463 VEC(type_pair_t
, heap
) **sccstack
,
3464 struct pointer_map_t
*sccstate
,
3465 struct obstack
*sccstate_obstack
)
3467 struct sccs
*cstate
= NULL
;
3470 tree leader1
, leader2
;
3472 /* Check first for the obvious case of pointer identity. */
3476 /* Check that we have two types to compare. */
3477 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
3480 /* Can't be the same type if the types don't have the same code. */
3481 if (TREE_CODE (t1
) != TREE_CODE (t2
))
3484 /* Can't be the same type if they have different CV qualifiers. */
3485 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
3488 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
3491 /* Void types and nullptr types are always the same. */
3492 if (TREE_CODE (t1
) == VOID_TYPE
3493 || TREE_CODE (t1
) == NULLPTR_TYPE
)
3496 /* Can't be the same type if they have different alignment or mode. */
3497 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
3498 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
3501 /* Do some simple checks before doing three hashtable queries. */
3502 if (INTEGRAL_TYPE_P (t1
)
3503 || SCALAR_FLOAT_TYPE_P (t1
)
3504 || FIXED_POINT_TYPE_P (t1
)
3505 || TREE_CODE (t1
) == VECTOR_TYPE
3506 || TREE_CODE (t1
) == COMPLEX_TYPE
3507 || TREE_CODE (t1
) == OFFSET_TYPE
3508 || POINTER_TYPE_P (t1
))
3510 /* Can't be the same type if they have different sign or precision. */
3511 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
3512 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
3515 if (TREE_CODE (t1
) == INTEGER_TYPE
3516 && (TYPE_IS_SIZETYPE (t1
) != TYPE_IS_SIZETYPE (t2
)
3517 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)))
3520 /* That's all we need to check for float and fixed-point types. */
3521 if (SCALAR_FLOAT_TYPE_P (t1
)
3522 || FIXED_POINT_TYPE_P (t1
))
3525 /* For other types fall thru to more complex checks. */
3528 /* If the types have been previously registered and found equal
3530 leader1
= gimple_lookup_type_leader (t1
);
3531 leader2
= gimple_lookup_type_leader (t2
);
3534 || (leader1
&& leader1
== leader2
))
3537 /* If the hash values of t1 and t2 are different the types can't
3538 possibly be the same. This helps keeping the type-pair hashtable
3539 small, only tracking comparisons for hash collisions. */
3540 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
3543 /* Allocate a new cache entry for this comparison. */
3544 p
= lookup_type_pair (t1
, t2
);
3545 if (p
->same_p
[GTC_MERGE
] == 0 || p
->same_p
[GTC_MERGE
] == 1)
3547 /* We have already decided whether T1 and T2 are the
3548 same, return the cached result. */
3549 return p
->same_p
[GTC_MERGE
] == 1;
3552 if ((slot
= pointer_map_contains (sccstate
, p
)) != NULL
)
3553 cstate
= (struct sccs
*)*slot
;
3554 /* Not yet visited. DFS recurse. */
3557 gimple_types_compatible_p_1 (t1
, t2
, p
,
3558 sccstack
, sccstate
, sccstate_obstack
);
3559 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, p
);
3560 state
->low
= MIN (state
->low
, cstate
->low
);
3562 /* If the type is still on the SCC stack adjust the parents low. */
3563 if (cstate
->dfsnum
< state
->dfsnum
3564 && cstate
->on_sccstack
)
3565 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
3567 /* Return the current lattice value. We start with an equality
3568 assumption so types part of a SCC will be optimistically
3569 treated equal unless proven otherwise. */
3570 return cstate
->u
.same_p
;
3573 /* Worker for gimple_types_compatible.
3574 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3577 gimple_types_compatible_p_1 (tree t1
, tree t2
, type_pair_t p
,
3578 VEC(type_pair_t
, heap
) **sccstack
,
3579 struct pointer_map_t
*sccstate
,
3580 struct obstack
*sccstate_obstack
)
3584 gcc_assert (p
->same_p
[GTC_MERGE
] == -2);
3586 state
= XOBNEW (sccstate_obstack
, struct sccs
);
3587 *pointer_map_insert (sccstate
, p
) = state
;
3589 VEC_safe_push (type_pair_t
, heap
, *sccstack
, p
);
3590 state
->dfsnum
= gtc_next_dfs_num
++;
3591 state
->low
= state
->dfsnum
;
3592 state
->on_sccstack
= true;
3593 /* Start with an equality assumption. As we DFS recurse into child
3594 SCCs this assumption may get revisited. */
3595 state
->u
.same_p
= 1;
3597 /* The struct tags shall compare equal. */
3598 if (!compare_type_names_p (t1
, t2
))
3599 goto different_types
;
3601 /* If their attributes are not the same they can't be the same type. */
3602 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
3603 goto different_types
;
3605 /* Do type-specific comparisons. */
3606 switch (TREE_CODE (t1
))
3610 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3611 state
, sccstack
, sccstate
, sccstate_obstack
))
3612 goto different_types
;
3616 /* Array types are the same if the element types are the same and
3617 the number of elements are the same. */
3618 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3619 state
, sccstack
, sccstate
, sccstate_obstack
)
3620 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
3621 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
3622 goto different_types
;
3625 tree i1
= TYPE_DOMAIN (t1
);
3626 tree i2
= TYPE_DOMAIN (t2
);
3628 /* For an incomplete external array, the type domain can be
3629 NULL_TREE. Check this condition also. */
3630 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
3632 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
3633 goto different_types
;
3634 /* If for a complete array type the possibly gimplified sizes
3635 are different the types are different. */
3636 else if (((TYPE_SIZE (i1
) != NULL
) ^ (TYPE_SIZE (i2
) != NULL
))
3639 && !operand_equal_p (TYPE_SIZE (i1
), TYPE_SIZE (i2
), 0)))
3640 goto different_types
;
3643 tree min1
= TYPE_MIN_VALUE (i1
);
3644 tree min2
= TYPE_MIN_VALUE (i2
);
3645 tree max1
= TYPE_MAX_VALUE (i1
);
3646 tree max2
= TYPE_MAX_VALUE (i2
);
3648 /* The minimum/maximum values have to be the same. */
3651 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
3652 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
3653 || operand_equal_p (min1
, min2
, 0))))
3656 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
3657 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
3658 || operand_equal_p (max1
, max2
, 0)))))
3661 goto different_types
;
3666 /* Method types should belong to the same class. */
3667 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
),
3668 state
, sccstack
, sccstate
, sccstate_obstack
))
3669 goto different_types
;
3674 /* Function types are the same if the return type and arguments types
3676 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3677 state
, sccstack
, sccstate
, sccstate_obstack
))
3678 goto different_types
;
3680 if (!comp_type_attributes (t1
, t2
))
3681 goto different_types
;
3683 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
3687 tree parms1
, parms2
;
3689 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
3691 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
3693 if (!gtc_visit (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
3694 state
, sccstack
, sccstate
, sccstate_obstack
))
3695 goto different_types
;
3698 if (parms1
|| parms2
)
3699 goto different_types
;
3706 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3707 state
, sccstack
, sccstate
, sccstate_obstack
)
3708 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1
),
3709 TYPE_OFFSET_BASETYPE (t2
),
3710 state
, sccstack
, sccstate
, sccstate_obstack
))
3711 goto different_types
;
3717 case REFERENCE_TYPE
:
3719 /* If the two pointers have different ref-all attributes,
3720 they can't be the same type. */
3721 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
3722 goto different_types
;
3724 /* Otherwise, pointer and reference types are the same if the
3725 pointed-to types are the same. */
3726 if (gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3727 state
, sccstack
, sccstate
, sccstate_obstack
))
3730 goto different_types
;
3736 tree min1
= TYPE_MIN_VALUE (t1
);
3737 tree max1
= TYPE_MAX_VALUE (t1
);
3738 tree min2
= TYPE_MIN_VALUE (t2
);
3739 tree max2
= TYPE_MAX_VALUE (t2
);
3740 bool min_equal_p
= false;
3741 bool max_equal_p
= false;
3743 /* If either type has a minimum value, the other type must
3745 if (min1
== NULL_TREE
&& min2
== NULL_TREE
)
3747 else if (min1
&& min2
&& operand_equal_p (min1
, min2
, 0))
3750 /* Likewise, if either type has a maximum value, the other
3751 type must have the same. */
3752 if (max1
== NULL_TREE
&& max2
== NULL_TREE
)
3754 else if (max1
&& max2
&& operand_equal_p (max1
, max2
, 0))
3757 if (!min_equal_p
|| !max_equal_p
)
3758 goto different_types
;
3765 /* FIXME lto, we cannot check bounds on enumeral types because
3766 different front ends will produce different values.
3767 In C, enumeral types are integers, while in C++ each element
3768 will have its own symbolic value. We should decide how enums
3769 are to be represented in GIMPLE and have each front end lower
3773 /* For enumeral types, all the values must be the same. */
3774 if (TYPE_VALUES (t1
) == TYPE_VALUES (t2
))
3777 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
3779 v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
3781 tree c1
= TREE_VALUE (v1
);
3782 tree c2
= TREE_VALUE (v2
);
3784 if (TREE_CODE (c1
) == CONST_DECL
)
3785 c1
= DECL_INITIAL (c1
);
3787 if (TREE_CODE (c2
) == CONST_DECL
)
3788 c2
= DECL_INITIAL (c2
);
3790 if (tree_int_cst_equal (c1
, c2
) != 1)
3791 goto different_types
;
3793 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
3794 goto different_types
;
3797 /* If one enumeration has more values than the other, they
3798 are not the same. */
3800 goto different_types
;
3807 case QUAL_UNION_TYPE
:
3811 /* For aggregate types, all the fields must be the same. */
3812 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
3814 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
3816 /* Different field kinds are not compatible. */
3817 if (TREE_CODE (f1
) != TREE_CODE (f2
))
3818 goto different_types
;
3819 /* Field decls must have the same name and offset. */
3820 if (TREE_CODE (f1
) == FIELD_DECL
3821 && (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
3822 || !gimple_compare_field_offset (f1
, f2
)))
3823 goto different_types
;
3824 /* All entities should have the same name and type. */
3825 if (DECL_NAME (f1
) != DECL_NAME (f2
)
3826 || !gtc_visit (TREE_TYPE (f1
), TREE_TYPE (f2
),
3827 state
, sccstack
, sccstate
, sccstate_obstack
))
3828 goto different_types
;
3831 /* If one aggregate has more fields than the other, they
3832 are not the same. */
3834 goto different_types
;
3843 /* Common exit path for types that are not compatible. */
3845 state
->u
.same_p
= 0;
3848 /* Common exit path for types that are compatible. */
3850 gcc_assert (state
->u
.same_p
== 1);
3853 if (state
->low
== state
->dfsnum
)
3857 /* Pop off the SCC and set its cache values to the final
3858 comparison result. */
3861 struct sccs
*cstate
;
3862 x
= VEC_pop (type_pair_t
, *sccstack
);
3863 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
3864 cstate
->on_sccstack
= false;
3865 x
->same_p
[GTC_MERGE
] = state
->u
.same_p
;
3870 return state
->u
.same_p
;
3873 /* Return true iff T1 and T2 are structurally identical. When
3874 FOR_MERGING_P is true the an incomplete type and a complete type
3875 are considered different, otherwise they are considered compatible. */
3878 gimple_types_compatible_p (tree t1
, tree t2
)
3880 VEC(type_pair_t
, heap
) *sccstack
= NULL
;
3881 struct pointer_map_t
*sccstate
;
3882 struct obstack sccstate_obstack
;
3883 type_pair_t p
= NULL
;
3885 tree leader1
, leader2
;
3887 /* Before starting to set up the SCC machinery handle simple cases. */
3889 /* Check first for the obvious case of pointer identity. */
3893 /* Check that we have two types to compare. */
3894 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
3897 /* Can't be the same type if the types don't have the same code. */
3898 if (TREE_CODE (t1
) != TREE_CODE (t2
))
3901 /* Can't be the same type if they have different CV qualifiers. */
3902 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
3905 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
3908 /* Void types and nullptr types are always the same. */
3909 if (TREE_CODE (t1
) == VOID_TYPE
3910 || TREE_CODE (t1
) == NULLPTR_TYPE
)
3913 /* Can't be the same type if they have different alignment or mode. */
3914 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
3915 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
3918 /* Do some simple checks before doing three hashtable queries. */
3919 if (INTEGRAL_TYPE_P (t1
)
3920 || SCALAR_FLOAT_TYPE_P (t1
)
3921 || FIXED_POINT_TYPE_P (t1
)
3922 || TREE_CODE (t1
) == VECTOR_TYPE
3923 || TREE_CODE (t1
) == COMPLEX_TYPE
3924 || TREE_CODE (t1
) == OFFSET_TYPE
3925 || POINTER_TYPE_P (t1
))
3927 /* Can't be the same type if they have different sign or precision. */
3928 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
3929 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
3932 if (TREE_CODE (t1
) == INTEGER_TYPE
3933 && (TYPE_IS_SIZETYPE (t1
) != TYPE_IS_SIZETYPE (t2
)
3934 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)))
3937 /* That's all we need to check for float and fixed-point types. */
3938 if (SCALAR_FLOAT_TYPE_P (t1
)
3939 || FIXED_POINT_TYPE_P (t1
))
3942 /* For other types fall thru to more complex checks. */
3945 /* If the types have been previously registered and found equal
3947 leader1
= gimple_lookup_type_leader (t1
);
3948 leader2
= gimple_lookup_type_leader (t2
);
3951 || (leader1
&& leader1
== leader2
))
3954 /* If the hash values of t1 and t2 are different the types can't
3955 possibly be the same. This helps keeping the type-pair hashtable
3956 small, only tracking comparisons for hash collisions. */
3957 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
3960 /* If we've visited this type pair before (in the case of aggregates
3961 with self-referential types), and we made a decision, return it. */
3962 p
= lookup_type_pair (t1
, t2
);
3963 if (p
->same_p
[GTC_MERGE
] == 0 || p
->same_p
[GTC_MERGE
] == 1)
3965 /* We have already decided whether T1 and T2 are the
3966 same, return the cached result. */
3967 return p
->same_p
[GTC_MERGE
] == 1;
3970 /* Now set up the SCC machinery for the comparison. */
3971 gtc_next_dfs_num
= 1;
3972 sccstate
= pointer_map_create ();
3973 gcc_obstack_init (&sccstate_obstack
);
3974 res
= gimple_types_compatible_p_1 (t1
, t2
, p
,
3975 &sccstack
, sccstate
, &sccstate_obstack
);
3976 VEC_free (type_pair_t
, heap
, sccstack
);
3977 pointer_map_destroy (sccstate
);
3978 obstack_free (&sccstate_obstack
, NULL
);
3985 iterative_hash_gimple_type (tree
, hashval_t
, VEC(tree
, heap
) **,
3986 struct pointer_map_t
*, struct obstack
*);
3988 /* DFS visit the edge from the callers type with state *STATE to T.
3989 Update the callers type hash V with the hash for T if it is not part
3990 of the SCC containing the callers type and return it.
3991 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3994 visit (tree t
, struct sccs
*state
, hashval_t v
,
3995 VEC (tree
, heap
) **sccstack
,
3996 struct pointer_map_t
*sccstate
,
3997 struct obstack
*sccstate_obstack
)
3999 struct sccs
*cstate
= NULL
;
4000 struct tree_int_map m
;
4003 /* If there is a hash value recorded for this type then it can't
4004 possibly be part of our parent SCC. Simply mix in its hash. */
4006 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
4008 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, v
);
4010 if ((slot
= pointer_map_contains (sccstate
, t
)) != NULL
)
4011 cstate
= (struct sccs
*)*slot
;
4015 /* Not yet visited. DFS recurse. */
4016 tem
= iterative_hash_gimple_type (t
, v
,
4017 sccstack
, sccstate
, sccstate_obstack
);
4019 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, t
);
4020 state
->low
= MIN (state
->low
, cstate
->low
);
4021 /* If the type is no longer on the SCC stack and thus is not part
4022 of the parents SCC mix in its hash value. Otherwise we will
4023 ignore the type for hashing purposes and return the unaltered
4025 if (!cstate
->on_sccstack
)
4028 if (cstate
->dfsnum
< state
->dfsnum
4029 && cstate
->on_sccstack
)
4030 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
4032 /* We are part of our parents SCC, skip this type during hashing
4033 and return the unaltered hash value. */
4037 /* Hash NAME with the previous hash value V and return it. */
4040 iterative_hash_name (tree name
, hashval_t v
)
4044 if (TREE_CODE (name
) == TYPE_DECL
)
4045 name
= DECL_NAME (name
);
4048 gcc_assert (TREE_CODE (name
) == IDENTIFIER_NODE
);
4049 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name
), v
);
4052 /* A type, hashvalue pair for sorting SCC members. */
4054 struct type_hash_pair
{
4059 /* Compare two type, hashvalue pairs. */
4062 type_hash_pair_compare (const void *p1_
, const void *p2_
)
4064 const struct type_hash_pair
*p1
= (const struct type_hash_pair
*) p1_
;
4065 const struct type_hash_pair
*p2
= (const struct type_hash_pair
*) p2_
;
4066 if (p1
->hash
< p2
->hash
)
4068 else if (p1
->hash
> p2
->hash
)
4073 /* Returning a hash value for gimple type TYPE combined with VAL.
4074 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4076 To hash a type we end up hashing in types that are reachable.
4077 Through pointers we can end up with cycles which messes up the
4078 required property that we need to compute the same hash value
4079 for structurally equivalent types. To avoid this we have to
4080 hash all types in a cycle (the SCC) in a commutative way. The
4081 easiest way is to not mix in the hashes of the SCC members at
4082 all. To make this work we have to delay setting the hash
4083 values of the SCC until it is complete. */
4086 iterative_hash_gimple_type (tree type
, hashval_t val
,
4087 VEC(tree
, heap
) **sccstack
,
4088 struct pointer_map_t
*sccstate
,
4089 struct obstack
*sccstate_obstack
)
4095 /* Not visited during this DFS walk. */
4096 gcc_checking_assert (!pointer_map_contains (sccstate
, type
));
4097 state
= XOBNEW (sccstate_obstack
, struct sccs
);
4098 *pointer_map_insert (sccstate
, type
) = state
;
4100 VEC_safe_push (tree
, heap
, *sccstack
, type
);
4101 state
->dfsnum
= next_dfs_num
++;
4102 state
->low
= state
->dfsnum
;
4103 state
->on_sccstack
= true;
4105 /* Combine a few common features of types so that types are grouped into
4106 smaller sets; when searching for existing matching types to merge,
4107 only existing types having the same features as the new type will be
4109 v
= iterative_hash_name (TYPE_NAME (type
), 0);
4110 v
= iterative_hash_hashval_t (TREE_CODE (type
), v
);
4111 v
= iterative_hash_hashval_t (TYPE_QUALS (type
), v
);
4112 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
4114 /* Do not hash the types size as this will cause differences in
4115 hash values for the complete vs. the incomplete type variant. */
4117 /* Incorporate common features of numerical types. */
4118 if (INTEGRAL_TYPE_P (type
)
4119 || SCALAR_FLOAT_TYPE_P (type
)
4120 || FIXED_POINT_TYPE_P (type
))
4122 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
4123 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
4124 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
4127 /* For pointer and reference types, fold in information about the type
4129 if (POINTER_TYPE_P (type
))
4130 v
= visit (TREE_TYPE (type
), state
, v
,
4131 sccstack
, sccstate
, sccstate_obstack
);
4133 /* For integer types hash the types min/max values and the string flag. */
4134 if (TREE_CODE (type
) == INTEGER_TYPE
)
4136 /* OMP lowering can introduce error_mark_node in place of
4137 random local decls in types. */
4138 if (TYPE_MIN_VALUE (type
) != error_mark_node
)
4139 v
= iterative_hash_expr (TYPE_MIN_VALUE (type
), v
);
4140 if (TYPE_MAX_VALUE (type
) != error_mark_node
)
4141 v
= iterative_hash_expr (TYPE_MAX_VALUE (type
), v
);
4142 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4145 /* For array types hash their domain and the string flag. */
4146 if (TREE_CODE (type
) == ARRAY_TYPE
4147 && TYPE_DOMAIN (type
))
4149 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4150 v
= visit (TYPE_DOMAIN (type
), state
, v
,
4151 sccstack
, sccstate
, sccstate_obstack
);
4154 /* Recurse for aggregates with a single element type. */
4155 if (TREE_CODE (type
) == ARRAY_TYPE
4156 || TREE_CODE (type
) == COMPLEX_TYPE
4157 || TREE_CODE (type
) == VECTOR_TYPE
)
4158 v
= visit (TREE_TYPE (type
), state
, v
,
4159 sccstack
, sccstate
, sccstate_obstack
);
4161 /* Incorporate function return and argument types. */
4162 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
4167 /* For method types also incorporate their parent class. */
4168 if (TREE_CODE (type
) == METHOD_TYPE
)
4169 v
= visit (TYPE_METHOD_BASETYPE (type
), state
, v
,
4170 sccstack
, sccstate
, sccstate_obstack
);
4172 /* Check result and argument types. */
4173 v
= visit (TREE_TYPE (type
), state
, v
,
4174 sccstack
, sccstate
, sccstate_obstack
);
4175 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
4177 v
= visit (TREE_VALUE (p
), state
, v
,
4178 sccstack
, sccstate
, sccstate_obstack
);
4182 v
= iterative_hash_hashval_t (na
, v
);
4185 if (TREE_CODE (type
) == RECORD_TYPE
4186 || TREE_CODE (type
) == UNION_TYPE
4187 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4192 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
4194 v
= iterative_hash_name (DECL_NAME (f
), v
);
4195 v
= visit (TREE_TYPE (f
), state
, v
,
4196 sccstack
, sccstate
, sccstate_obstack
);
4200 v
= iterative_hash_hashval_t (nf
, v
);
4203 /* Record hash for us. */
4206 /* See if we found an SCC. */
4207 if (state
->low
== state
->dfsnum
)
4210 struct tree_int_map
*m
;
4212 /* Pop off the SCC and set its hash values. */
4213 x
= VEC_pop (tree
, *sccstack
);
4214 /* Optimize SCC size one. */
4217 state
->on_sccstack
= false;
4218 m
= ggc_alloc_cleared_tree_int_map ();
4221 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
4222 gcc_assert (!*slot
);
4227 struct sccs
*cstate
;
4228 unsigned first
, i
, size
, j
;
4229 struct type_hash_pair
*pairs
;
4230 /* Pop off the SCC and build an array of type, hash pairs. */
4231 first
= VEC_length (tree
, *sccstack
) - 1;
4232 while (VEC_index (tree
, *sccstack
, first
) != type
)
4234 size
= VEC_length (tree
, *sccstack
) - first
+ 1;
4235 pairs
= XALLOCAVEC (struct type_hash_pair
, size
);
4237 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
4238 cstate
->on_sccstack
= false;
4240 pairs
[i
].hash
= cstate
->u
.hash
;
4243 x
= VEC_pop (tree
, *sccstack
);
4244 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
4245 cstate
->on_sccstack
= false;
4248 pairs
[i
].hash
= cstate
->u
.hash
;
4251 gcc_assert (i
+ 1 == size
);
4252 /* Sort the arrays of type, hash pairs so that when we mix in
4253 all members of the SCC the hash value becomes independent on
4254 the order we visited the SCC. Disregard hashes equal to
4255 the hash of the type we mix into because we cannot guarantee
4256 a stable sort for those across different TUs. */
4257 qsort (pairs
, size
, sizeof (struct type_hash_pair
),
4258 type_hash_pair_compare
);
4259 for (i
= 0; i
< size
; ++i
)
4262 m
= ggc_alloc_cleared_tree_int_map ();
4263 m
->base
.from
= pairs
[i
].type
;
4264 hash
= pairs
[i
].hash
;
4265 /* Skip same hashes. */
4266 for (j
= i
+ 1; j
< size
&& pairs
[j
].hash
== pairs
[i
].hash
; ++j
)
4268 for (; j
< size
; ++j
)
4269 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
4270 for (j
= 0; pairs
[j
].hash
!= pairs
[i
].hash
; ++j
)
4271 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
4273 if (pairs
[i
].type
== type
)
4275 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
4276 gcc_assert (!*slot
);
4282 return iterative_hash_hashval_t (v
, val
);
4286 /* Returns a hash value for P (assumed to be a type). The hash value
4287 is computed using some distinguishing features of the type. Note
4288 that we cannot use pointer hashing here as we may be dealing with
4289 two distinct instances of the same type.
4291 This function should produce the same hash value for two compatible
4292 types according to gimple_types_compatible_p. */
4295 gimple_type_hash (const void *p
)
4297 const_tree t
= (const_tree
) p
;
4298 VEC(tree
, heap
) *sccstack
= NULL
;
4299 struct pointer_map_t
*sccstate
;
4300 struct obstack sccstate_obstack
;
4303 struct tree_int_map m
;
4305 if (type_hash_cache
== NULL
)
4306 type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
4307 tree_int_map_eq
, NULL
);
4309 m
.base
.from
= CONST_CAST_TREE (t
);
4310 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
4312 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, 0);
4314 /* Perform a DFS walk and pre-hash all reachable types. */
4316 sccstate
= pointer_map_create ();
4317 gcc_obstack_init (&sccstate_obstack
);
4318 val
= iterative_hash_gimple_type (CONST_CAST_TREE (t
), 0,
4319 &sccstack
, sccstate
, &sccstate_obstack
);
4320 VEC_free (tree
, heap
, sccstack
);
4321 pointer_map_destroy (sccstate
);
4322 obstack_free (&sccstate_obstack
, NULL
);
4327 /* Returning a hash value for gimple type TYPE combined with VAL.
4329 The hash value returned is equal for types considered compatible
4330 by gimple_canonical_types_compatible_p. */
4333 iterative_hash_canonical_type (tree type
, hashval_t val
)
4337 struct tree_int_map
*mp
, m
;
4340 if ((slot
= htab_find_slot (canonical_type_hash_cache
, &m
, INSERT
))
4342 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, val
);
4344 /* Combine a few common features of types so that types are grouped into
4345 smaller sets; when searching for existing matching types to merge,
4346 only existing types having the same features as the new type will be
4348 v
= iterative_hash_hashval_t (TREE_CODE (type
), 0);
4349 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
4350 v
= iterative_hash_hashval_t (TYPE_ALIGN (type
), v
);
4351 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
4353 /* Incorporate common features of numerical types. */
4354 if (INTEGRAL_TYPE_P (type
)
4355 || SCALAR_FLOAT_TYPE_P (type
)
4356 || FIXED_POINT_TYPE_P (type
)
4357 || TREE_CODE (type
) == VECTOR_TYPE
4358 || TREE_CODE (type
) == COMPLEX_TYPE
4359 || TREE_CODE (type
) == OFFSET_TYPE
4360 || POINTER_TYPE_P (type
))
4362 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
4363 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
4366 /* For pointer and reference types, fold in information about the type
4367 pointed to but do not recurse to the pointed-to type. */
4368 if (POINTER_TYPE_P (type
))
4370 v
= iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type
), v
);
4371 v
= iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type
)), v
);
4372 v
= iterative_hash_hashval_t (TYPE_RESTRICT (type
), v
);
4373 v
= iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type
)), v
);
4376 /* For integer types hash the types min/max values and the string flag. */
4377 if (TREE_CODE (type
) == INTEGER_TYPE
)
4379 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4380 v
= iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type
), v
);
4383 /* For array types hash their domain and the string flag. */
4384 if (TREE_CODE (type
) == ARRAY_TYPE
4385 && TYPE_DOMAIN (type
))
4387 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4388 v
= iterative_hash_canonical_type (TYPE_DOMAIN (type
), v
);
4391 /* Recurse for aggregates with a single element type. */
4392 if (TREE_CODE (type
) == ARRAY_TYPE
4393 || TREE_CODE (type
) == COMPLEX_TYPE
4394 || TREE_CODE (type
) == VECTOR_TYPE
)
4395 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
4397 /* Incorporate function return and argument types. */
4398 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
4403 /* For method types also incorporate their parent class. */
4404 if (TREE_CODE (type
) == METHOD_TYPE
)
4405 v
= iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type
), v
);
4407 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
4409 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
4411 v
= iterative_hash_canonical_type (TREE_VALUE (p
), v
);
4415 v
= iterative_hash_hashval_t (na
, v
);
4418 if (TREE_CODE (type
) == RECORD_TYPE
4419 || TREE_CODE (type
) == UNION_TYPE
4420 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4425 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
4426 if (TREE_CODE (f
) == FIELD_DECL
)
4428 v
= iterative_hash_canonical_type (TREE_TYPE (f
), v
);
4432 v
= iterative_hash_hashval_t (nf
, v
);
4435 /* Cache the just computed hash value. */
4436 mp
= ggc_alloc_cleared_tree_int_map ();
4437 mp
->base
.from
= type
;
4439 *slot
= (void *) mp
;
4441 return iterative_hash_hashval_t (v
, val
);
4445 gimple_canonical_type_hash (const void *p
)
4447 if (canonical_type_hash_cache
== NULL
)
4448 canonical_type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
4449 tree_int_map_eq
, NULL
);
4451 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree
) p
), 0);
4455 /* Returns nonzero if P1 and P2 are equal. */
4458 gimple_type_eq (const void *p1
, const void *p2
)
4460 const_tree t1
= (const_tree
) p1
;
4461 const_tree t2
= (const_tree
) p2
;
4462 return gimple_types_compatible_p (CONST_CAST_TREE (t1
),
4463 CONST_CAST_TREE (t2
));
4467 /* Worker for gimple_register_type.
4468 Register type T in the global type table gimple_types.
4469 When REGISTERING_MV is false first recurse for the main variant of T. */
4472 gimple_register_type_1 (tree t
, bool registering_mv
)
4475 gimple_type_leader_entry
*leader
;
4477 /* If we registered this type before return the cached result. */
4478 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
4479 if (leader
->type
== t
)
4480 return leader
->leader
;
4482 /* Always register the main variant first. This is important so we
4483 pick up the non-typedef variants as canonical, otherwise we'll end
4484 up taking typedef ids for structure tags during comparison.
4485 It also makes sure that main variants will be merged to main variants.
4486 As we are operating on a possibly partially fixed up type graph
4487 do not bother to recurse more than once, otherwise we may end up
4489 If we are registering a main variant it will either remain its
4490 own main variant or it will be merged to something else in which
4491 case we do not care for the main variant leader. */
4493 && TYPE_MAIN_VARIANT (t
) != t
)
4494 gimple_register_type_1 (TYPE_MAIN_VARIANT (t
), true);
4496 /* See if we already have an equivalent type registered. */
4497 slot
= htab_find_slot (gimple_types
, t
, INSERT
);
4499 && *(tree
*)slot
!= t
)
4501 tree new_type
= (tree
) *((tree
*) slot
);
4503 leader
->leader
= new_type
;
4507 /* If not, insert it to the cache and the hash. */
4514 /* Register type T in the global type table gimple_types.
4515 If another type T', compatible with T, already existed in
4516 gimple_types then return T', otherwise return T. This is used by
4517 LTO to merge identical types read from different TUs. */
4520 gimple_register_type (tree t
)
4522 gcc_assert (TYPE_P (t
));
4524 if (!gimple_type_leader
)
4525 gimple_type_leader
= ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4526 (GIMPLE_TYPE_LEADER_SIZE
);
4528 if (gimple_types
== NULL
)
4529 gimple_types
= htab_create_ggc (16381, gimple_type_hash
, gimple_type_eq
, 0);
4531 return gimple_register_type_1 (t
, false);
4534 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4535 the middle-end types_compatible_p function. It needs to avoid
4536 claiming types are different for types that should be treated
4537 the same with respect to TBAA. Canonical types are also used
4538 for IL consistency checks via the useless_type_conversion_p
4539 predicate which does not handle all type kinds itself but falls
4540 back to pointer-comparison of TYPE_CANONICAL for aggregates
4543 /* Return true iff T1 and T2 are structurally identical for what
4544 TBAA is concerned. */
4547 gimple_canonical_types_compatible_p (tree t1
, tree t2
)
4549 /* Before starting to set up the SCC machinery handle simple cases. */
4551 /* Check first for the obvious case of pointer identity. */
4555 /* Check that we have two types to compare. */
4556 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
4559 /* If the types have been previously registered and found equal
4561 if (TYPE_CANONICAL (t1
)
4562 && TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
))
4565 /* Can't be the same type if the types don't have the same code. */
4566 if (TREE_CODE (t1
) != TREE_CODE (t2
))
4569 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
4572 /* Qualifiers do not matter for canonical type comparison purposes. */
4574 /* Void types and nullptr types are always the same. */
4575 if (TREE_CODE (t1
) == VOID_TYPE
4576 || TREE_CODE (t1
) == NULLPTR_TYPE
)
4579 /* Can't be the same type if they have different alignment, or mode. */
4580 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
4581 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
4584 /* Non-aggregate types can be handled cheaply. */
4585 if (INTEGRAL_TYPE_P (t1
)
4586 || SCALAR_FLOAT_TYPE_P (t1
)
4587 || FIXED_POINT_TYPE_P (t1
)
4588 || TREE_CODE (t1
) == VECTOR_TYPE
4589 || TREE_CODE (t1
) == COMPLEX_TYPE
4590 || TREE_CODE (t1
) == OFFSET_TYPE
4591 || POINTER_TYPE_P (t1
))
4593 /* Can't be the same type if they have different sign or precision. */
4594 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
4595 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
4598 if (TREE_CODE (t1
) == INTEGER_TYPE
4599 && (TYPE_IS_SIZETYPE (t1
) != TYPE_IS_SIZETYPE (t2
)
4600 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)))
4603 /* For canonical type comparisons we do not want to build SCCs
4604 so we cannot compare pointed-to types. But we can, for now,
4605 require the same pointed-to type kind and match what
4606 useless_type_conversion_p would do. */
4607 if (POINTER_TYPE_P (t1
))
4609 /* If the two pointers have different ref-all attributes,
4610 they can't be the same type. */
4611 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
4614 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
4615 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
4618 if (TYPE_RESTRICT (t1
) != TYPE_RESTRICT (t2
))
4621 if (TREE_CODE (TREE_TYPE (t1
)) != TREE_CODE (TREE_TYPE (t2
)))
4625 /* Tail-recurse to components. */
4626 if (TREE_CODE (t1
) == VECTOR_TYPE
4627 || TREE_CODE (t1
) == COMPLEX_TYPE
)
4628 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
4634 /* If their attributes are not the same they can't be the same type. */
4635 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
4638 /* Do type-specific comparisons. */
4639 switch (TREE_CODE (t1
))
4642 /* Array types are the same if the element types are the same and
4643 the number of elements are the same. */
4644 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
))
4645 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
4646 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
4650 tree i1
= TYPE_DOMAIN (t1
);
4651 tree i2
= TYPE_DOMAIN (t2
);
4653 /* For an incomplete external array, the type domain can be
4654 NULL_TREE. Check this condition also. */
4655 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
4657 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
4659 /* If for a complete array type the possibly gimplified sizes
4660 are different the types are different. */
4661 else if (((TYPE_SIZE (i1
) != NULL
) ^ (TYPE_SIZE (i2
) != NULL
))
4664 && !operand_equal_p (TYPE_SIZE (i1
), TYPE_SIZE (i2
), 0)))
4668 tree min1
= TYPE_MIN_VALUE (i1
);
4669 tree min2
= TYPE_MIN_VALUE (i2
);
4670 tree max1
= TYPE_MAX_VALUE (i1
);
4671 tree max2
= TYPE_MAX_VALUE (i2
);
4673 /* The minimum/maximum values have to be the same. */
4676 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
4677 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
4678 || operand_equal_p (min1
, min2
, 0))))
4681 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
4682 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
4683 || operand_equal_p (max1
, max2
, 0)))))
4691 /* Method types should belong to the same class. */
4692 if (!gimple_canonical_types_compatible_p
4693 (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
)))
4699 /* Function types are the same if the return type and arguments types
4701 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
4704 if (!comp_type_attributes (t1
, t2
))
4707 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
4711 tree parms1
, parms2
;
4713 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
4715 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
4717 if (!gimple_canonical_types_compatible_p
4718 (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
4722 if (parms1
|| parms2
)
4730 case QUAL_UNION_TYPE
:
4734 /* For aggregate types, all the fields must be the same. */
4735 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
4737 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
4739 /* Skip non-fields. */
4740 while (f1
&& TREE_CODE (f1
) != FIELD_DECL
)
4741 f1
= TREE_CHAIN (f1
);
4742 while (f2
&& TREE_CODE (f2
) != FIELD_DECL
)
4743 f2
= TREE_CHAIN (f2
);
4746 /* The fields must have the same name, offset and type. */
4747 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
4748 || !gimple_compare_field_offset (f1
, f2
)
4749 || !gimple_canonical_types_compatible_p
4750 (TREE_TYPE (f1
), TREE_TYPE (f2
)))
4754 /* If one aggregate has more fields than the other, they
4755 are not the same. */
4768 /* Returns nonzero if P1 and P2 are equal. */
4771 gimple_canonical_type_eq (const void *p1
, const void *p2
)
4773 const_tree t1
= (const_tree
) p1
;
4774 const_tree t2
= (const_tree
) p2
;
4775 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1
),
4776 CONST_CAST_TREE (t2
));
4779 /* Register type T in the global type table gimple_types.
4780 If another type T', compatible with T, already existed in
4781 gimple_types then return T', otherwise return T. This is used by
4782 LTO to merge identical types read from different TUs.
4784 ??? This merging does not exactly match how the tree.c middle-end
4785 functions will assign TYPE_CANONICAL when new types are created
4786 during optimization (which at least happens for pointer and array
4790 gimple_register_canonical_type (tree t
)
4794 gcc_assert (TYPE_P (t
));
4796 if (TYPE_CANONICAL (t
))
4797 return TYPE_CANONICAL (t
);
4799 if (gimple_canonical_types
== NULL
)
4800 gimple_canonical_types
= htab_create_ggc (16381, gimple_canonical_type_hash
,
4801 gimple_canonical_type_eq
, 0);
4803 slot
= htab_find_slot (gimple_canonical_types
, t
, INSERT
);
4805 && *(tree
*)slot
!= t
)
4807 tree new_type
= (tree
) *((tree
*) slot
);
4809 TYPE_CANONICAL (t
) = new_type
;
4814 TYPE_CANONICAL (t
) = t
;
4822 /* Show statistics on references to the global type table gimple_types. */
4825 print_gimple_types_stats (void)
4828 fprintf (stderr
, "GIMPLE type table: size %ld, %ld elements, "
4829 "%ld searches, %ld collisions (ratio: %f)\n",
4830 (long) htab_size (gimple_types
),
4831 (long) htab_elements (gimple_types
),
4832 (long) gimple_types
->searches
,
4833 (long) gimple_types
->collisions
,
4834 htab_collisions (gimple_types
));
4836 fprintf (stderr
, "GIMPLE type table is empty\n");
4837 if (type_hash_cache
)
4838 fprintf (stderr
, "GIMPLE type hash table: size %ld, %ld elements, "
4839 "%ld searches, %ld collisions (ratio: %f)\n",
4840 (long) htab_size (type_hash_cache
),
4841 (long) htab_elements (type_hash_cache
),
4842 (long) type_hash_cache
->searches
,
4843 (long) type_hash_cache
->collisions
,
4844 htab_collisions (type_hash_cache
));
4846 fprintf (stderr
, "GIMPLE type hash table is empty\n");
4847 if (gimple_canonical_types
)
4848 fprintf (stderr
, "GIMPLE canonical type table: size %ld, %ld elements, "
4849 "%ld searches, %ld collisions (ratio: %f)\n",
4850 (long) htab_size (gimple_canonical_types
),
4851 (long) htab_elements (gimple_canonical_types
),
4852 (long) gimple_canonical_types
->searches
,
4853 (long) gimple_canonical_types
->collisions
,
4854 htab_collisions (gimple_canonical_types
));
4856 fprintf (stderr
, "GIMPLE canonical type table is empty\n");
4857 if (canonical_type_hash_cache
)
4858 fprintf (stderr
, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4859 "%ld searches, %ld collisions (ratio: %f)\n",
4860 (long) htab_size (canonical_type_hash_cache
),
4861 (long) htab_elements (canonical_type_hash_cache
),
4862 (long) canonical_type_hash_cache
->searches
,
4863 (long) canonical_type_hash_cache
->collisions
,
4864 htab_collisions (canonical_type_hash_cache
));
4866 fprintf (stderr
, "GIMPLE canonical type hash table is empty\n");
4869 /* Free the gimple type hashtables used for LTO type merging. */
4872 free_gimple_type_tables (void)
4874 /* Last chance to print stats for the tables. */
4875 if (flag_lto_report
)
4876 print_gimple_types_stats ();
4880 htab_delete (gimple_types
);
4881 gimple_types
= NULL
;
4883 if (gimple_canonical_types
)
4885 htab_delete (gimple_canonical_types
);
4886 gimple_canonical_types
= NULL
;
4888 if (type_hash_cache
)
4890 htab_delete (type_hash_cache
);
4891 type_hash_cache
= NULL
;
4893 if (canonical_type_hash_cache
)
4895 htab_delete (canonical_type_hash_cache
);
4896 canonical_type_hash_cache
= NULL
;
4898 if (type_pair_cache
)
4900 free (type_pair_cache
);
4901 type_pair_cache
= NULL
;
4903 gimple_type_leader
= NULL
;
4907 /* Return a type the same as TYPE except unsigned or
4908 signed according to UNSIGNEDP. */
4911 gimple_signed_or_unsigned_type (bool unsignedp
, tree type
)
4915 type1
= TYPE_MAIN_VARIANT (type
);
4916 if (type1
== signed_char_type_node
4917 || type1
== char_type_node
4918 || type1
== unsigned_char_type_node
)
4919 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
4920 if (type1
== integer_type_node
|| type1
== unsigned_type_node
)
4921 return unsignedp
? unsigned_type_node
: integer_type_node
;
4922 if (type1
== short_integer_type_node
|| type1
== short_unsigned_type_node
)
4923 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
4924 if (type1
== long_integer_type_node
|| type1
== long_unsigned_type_node
)
4925 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
4926 if (type1
== long_long_integer_type_node
4927 || type1
== long_long_unsigned_type_node
)
4929 ? long_long_unsigned_type_node
4930 : long_long_integer_type_node
;
4931 if (int128_integer_type_node
&& (type1
== int128_integer_type_node
|| type1
== int128_unsigned_type_node
))
4933 ? int128_unsigned_type_node
4934 : int128_integer_type_node
;
4935 #if HOST_BITS_PER_WIDE_INT >= 64
4936 if (type1
== intTI_type_node
|| type1
== unsigned_intTI_type_node
)
4937 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
4939 if (type1
== intDI_type_node
|| type1
== unsigned_intDI_type_node
)
4940 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
4941 if (type1
== intSI_type_node
|| type1
== unsigned_intSI_type_node
)
4942 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
4943 if (type1
== intHI_type_node
|| type1
== unsigned_intHI_type_node
)
4944 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
4945 if (type1
== intQI_type_node
|| type1
== unsigned_intQI_type_node
)
4946 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
4948 #define GIMPLE_FIXED_TYPES(NAME) \
4949 if (type1 == short_ ## NAME ## _type_node \
4950 || type1 == unsigned_short_ ## NAME ## _type_node) \
4951 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4952 : short_ ## NAME ## _type_node; \
4953 if (type1 == NAME ## _type_node \
4954 || type1 == unsigned_ ## NAME ## _type_node) \
4955 return unsignedp ? unsigned_ ## NAME ## _type_node \
4956 : NAME ## _type_node; \
4957 if (type1 == long_ ## NAME ## _type_node \
4958 || type1 == unsigned_long_ ## NAME ## _type_node) \
4959 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4960 : long_ ## NAME ## _type_node; \
4961 if (type1 == long_long_ ## NAME ## _type_node \
4962 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4963 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4964 : long_long_ ## NAME ## _type_node;
4966 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4967 if (type1 == NAME ## _type_node \
4968 || type1 == u ## NAME ## _type_node) \
4969 return unsignedp ? u ## NAME ## _type_node \
4970 : NAME ## _type_node;
4972 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4973 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4974 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4975 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4976 : sat_ ## short_ ## NAME ## _type_node; \
4977 if (type1 == sat_ ## NAME ## _type_node \
4978 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4979 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4980 : sat_ ## NAME ## _type_node; \
4981 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4982 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4983 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4984 : sat_ ## long_ ## NAME ## _type_node; \
4985 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4986 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4987 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4988 : sat_ ## long_long_ ## NAME ## _type_node;
4990 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4991 if (type1 == sat_ ## NAME ## _type_node \
4992 || type1 == sat_ ## u ## NAME ## _type_node) \
4993 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4994 : sat_ ## NAME ## _type_node;
4996 GIMPLE_FIXED_TYPES (fract
);
4997 GIMPLE_FIXED_TYPES_SAT (fract
);
4998 GIMPLE_FIXED_TYPES (accum
);
4999 GIMPLE_FIXED_TYPES_SAT (accum
);
5001 GIMPLE_FIXED_MODE_TYPES (qq
);
5002 GIMPLE_FIXED_MODE_TYPES (hq
);
5003 GIMPLE_FIXED_MODE_TYPES (sq
);
5004 GIMPLE_FIXED_MODE_TYPES (dq
);
5005 GIMPLE_FIXED_MODE_TYPES (tq
);
5006 GIMPLE_FIXED_MODE_TYPES_SAT (qq
);
5007 GIMPLE_FIXED_MODE_TYPES_SAT (hq
);
5008 GIMPLE_FIXED_MODE_TYPES_SAT (sq
);
5009 GIMPLE_FIXED_MODE_TYPES_SAT (dq
);
5010 GIMPLE_FIXED_MODE_TYPES_SAT (tq
);
5011 GIMPLE_FIXED_MODE_TYPES (ha
);
5012 GIMPLE_FIXED_MODE_TYPES (sa
);
5013 GIMPLE_FIXED_MODE_TYPES (da
);
5014 GIMPLE_FIXED_MODE_TYPES (ta
);
5015 GIMPLE_FIXED_MODE_TYPES_SAT (ha
);
5016 GIMPLE_FIXED_MODE_TYPES_SAT (sa
);
5017 GIMPLE_FIXED_MODE_TYPES_SAT (da
);
5018 GIMPLE_FIXED_MODE_TYPES_SAT (ta
);
5020 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
5021 the precision; they have precision set to match their range, but
5022 may use a wider mode to match an ABI. If we change modes, we may
5023 wind up with bad conversions. For INTEGER_TYPEs in C, must check
5024 the precision as well, so as to yield correct results for
5025 bit-field types. C++ does not have these separate bit-field
5026 types, and producing a signed or unsigned variant of an
5027 ENUMERAL_TYPE may cause other problems as well. */
5028 if (!INTEGRAL_TYPE_P (type
)
5029 || TYPE_UNSIGNED (type
) == unsignedp
)
5032 #define TYPE_OK(node) \
5033 (TYPE_MODE (type) == TYPE_MODE (node) \
5034 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5035 if (TYPE_OK (signed_char_type_node
))
5036 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
5037 if (TYPE_OK (integer_type_node
))
5038 return unsignedp
? unsigned_type_node
: integer_type_node
;
5039 if (TYPE_OK (short_integer_type_node
))
5040 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
5041 if (TYPE_OK (long_integer_type_node
))
5042 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
5043 if (TYPE_OK (long_long_integer_type_node
))
5045 ? long_long_unsigned_type_node
5046 : long_long_integer_type_node
);
5047 if (int128_integer_type_node
&& TYPE_OK (int128_integer_type_node
))
5049 ? int128_unsigned_type_node
5050 : int128_integer_type_node
);
5052 #if HOST_BITS_PER_WIDE_INT >= 64
5053 if (TYPE_OK (intTI_type_node
))
5054 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
5056 if (TYPE_OK (intDI_type_node
))
5057 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
5058 if (TYPE_OK (intSI_type_node
))
5059 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
5060 if (TYPE_OK (intHI_type_node
))
5061 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
5062 if (TYPE_OK (intQI_type_node
))
5063 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
5065 #undef GIMPLE_FIXED_TYPES
5066 #undef GIMPLE_FIXED_MODE_TYPES
5067 #undef GIMPLE_FIXED_TYPES_SAT
5068 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5071 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
5075 /* Return an unsigned type the same as TYPE in other respects. */
5078 gimple_unsigned_type (tree type
)
5080 return gimple_signed_or_unsigned_type (true, type
);
5084 /* Return a signed type the same as TYPE in other respects. */
5087 gimple_signed_type (tree type
)
5089 return gimple_signed_or_unsigned_type (false, type
);
5093 /* Return the typed-based alias set for T, which may be an expression
5094 or a type. Return -1 if we don't do anything special. */
5097 gimple_get_alias_set (tree t
)
5101 /* Permit type-punning when accessing a union, provided the access
5102 is directly through the union. For example, this code does not
5103 permit taking the address of a union member and then storing
5104 through it. Even the type-punning allowed here is a GCC
5105 extension, albeit a common and useful one; the C standard says
5106 that such accesses have implementation-defined behavior. */
5108 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
5109 u
= TREE_OPERAND (u
, 0))
5110 if (TREE_CODE (u
) == COMPONENT_REF
5111 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
5114 /* That's all the expressions we handle specially. */
5118 /* For convenience, follow the C standard when dealing with
5119 character types. Any object may be accessed via an lvalue that
5120 has character type. */
5121 if (t
== char_type_node
5122 || t
== signed_char_type_node
5123 || t
== unsigned_char_type_node
)
5126 /* Allow aliasing between signed and unsigned variants of the same
5127 type. We treat the signed variant as canonical. */
5128 if (TREE_CODE (t
) == INTEGER_TYPE
&& TYPE_UNSIGNED (t
))
5130 tree t1
= gimple_signed_type (t
);
5132 /* t1 == t can happen for boolean nodes which are always unsigned. */
5134 return get_alias_set (t1
);
5141 /* Data structure used to count the number of dereferences to PTR
5142 inside an expression. */
5146 unsigned num_stores
;
5150 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5151 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5154 count_ptr_derefs (tree
*tp
, int *walk_subtrees
, void *data
)
5156 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
5157 struct count_ptr_d
*count_p
= (struct count_ptr_d
*) wi_p
->info
;
5159 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5160 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5161 the address of 'fld' as 'ptr + offsetof(fld)'. */
5162 if (TREE_CODE (*tp
) == ADDR_EXPR
)
5168 if (TREE_CODE (*tp
) == MEM_REF
&& TREE_OPERAND (*tp
, 0) == count_p
->ptr
)
5171 count_p
->num_stores
++;
5173 count_p
->num_loads
++;
5179 /* Count the number of direct and indirect uses for pointer PTR in
5180 statement STMT. The number of direct uses is stored in
5181 *NUM_USES_P. Indirect references are counted separately depending
5182 on whether they are store or load operations. The counts are
5183 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5186 count_uses_and_derefs (tree ptr
, gimple stmt
, unsigned *num_uses_p
,
5187 unsigned *num_loads_p
, unsigned *num_stores_p
)
5196 /* Find out the total number of uses of PTR in STMT. */
5197 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, i
, SSA_OP_USE
)
5201 /* Now count the number of indirect references to PTR. This is
5202 truly awful, but we don't have much choice. There are no parent
5203 pointers inside INDIRECT_REFs, so an expression like
5204 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5205 find all the indirect and direct uses of x_1 inside. The only
5206 shortcut we can take is the fact that GIMPLE only allows
5207 INDIRECT_REFs inside the expressions below. */
5208 if (is_gimple_assign (stmt
)
5209 || gimple_code (stmt
) == GIMPLE_RETURN
5210 || gimple_code (stmt
) == GIMPLE_ASM
5211 || is_gimple_call (stmt
))
5213 struct walk_stmt_info wi
;
5214 struct count_ptr_d count
;
5217 count
.num_stores
= 0;
5218 count
.num_loads
= 0;
5220 memset (&wi
, 0, sizeof (wi
));
5222 walk_gimple_op (stmt
, count_ptr_derefs
, &wi
);
5224 *num_stores_p
= count
.num_stores
;
5225 *num_loads_p
= count
.num_loads
;
5228 gcc_assert (*num_uses_p
>= *num_loads_p
+ *num_stores_p
);
5231 /* From a tree operand OP return the base of a load or store operation
5232 or NULL_TREE if OP is not a load or a store. */
5235 get_base_loadstore (tree op
)
5237 while (handled_component_p (op
))
5238 op
= TREE_OPERAND (op
, 0);
5240 || INDIRECT_REF_P (op
)
5241 || TREE_CODE (op
) == MEM_REF
5242 || TREE_CODE (op
) == TARGET_MEM_REF
)
5247 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5248 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5249 passing the STMT, the base of the operand and DATA to it. The base
5250 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5251 or the argument of an address expression.
5252 Returns the results of these callbacks or'ed. */
5255 walk_stmt_load_store_addr_ops (gimple stmt
, void *data
,
5256 bool (*visit_load
)(gimple
, tree
, void *),
5257 bool (*visit_store
)(gimple
, tree
, void *),
5258 bool (*visit_addr
)(gimple
, tree
, void *))
5262 if (gimple_assign_single_p (stmt
))
5267 lhs
= get_base_loadstore (gimple_assign_lhs (stmt
));
5269 ret
|= visit_store (stmt
, lhs
, data
);
5271 rhs
= gimple_assign_rhs1 (stmt
);
5272 while (handled_component_p (rhs
))
5273 rhs
= TREE_OPERAND (rhs
, 0);
5276 if (TREE_CODE (rhs
) == ADDR_EXPR
)
5277 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
5278 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
5279 && TREE_CODE (TMR_BASE (rhs
)) == ADDR_EXPR
)
5280 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (rhs
), 0), data
);
5281 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
5282 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs
)) == ADDR_EXPR
)
5283 ret
|= visit_addr (stmt
, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs
),
5285 else if (TREE_CODE (rhs
) == CONSTRUCTOR
)
5290 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), ix
, val
)
5291 if (TREE_CODE (val
) == ADDR_EXPR
)
5292 ret
|= visit_addr (stmt
, TREE_OPERAND (val
, 0), data
);
5293 else if (TREE_CODE (val
) == OBJ_TYPE_REF
5294 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val
)) == ADDR_EXPR
)
5295 ret
|= visit_addr (stmt
,
5296 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val
),
5299 lhs
= gimple_assign_lhs (stmt
);
5300 if (TREE_CODE (lhs
) == TARGET_MEM_REF
5301 && TREE_CODE (TMR_BASE (lhs
)) == ADDR_EXPR
)
5302 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (lhs
), 0), data
);
5306 rhs
= get_base_loadstore (rhs
);
5308 ret
|= visit_load (stmt
, rhs
, data
);
5312 && (is_gimple_assign (stmt
)
5313 || gimple_code (stmt
) == GIMPLE_COND
))
5315 for (i
= 0; i
< gimple_num_ops (stmt
); ++i
)
5317 tree op
= gimple_op (stmt
, i
);
5318 if (op
== NULL_TREE
)
5320 else if (TREE_CODE (op
) == ADDR_EXPR
)
5321 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5322 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5323 tree with two operands. */
5324 else if (i
== 1 && COMPARISON_CLASS_P (op
))
5326 if (TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
5327 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 0),
5329 if (TREE_CODE (TREE_OPERAND (op
, 1)) == ADDR_EXPR
)
5330 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 1),
5335 else if (is_gimple_call (stmt
))
5339 tree lhs
= gimple_call_lhs (stmt
);
5342 lhs
= get_base_loadstore (lhs
);
5344 ret
|= visit_store (stmt
, lhs
, data
);
5347 if (visit_load
|| visit_addr
)
5348 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5350 tree rhs
= gimple_call_arg (stmt
, i
);
5352 && TREE_CODE (rhs
) == ADDR_EXPR
)
5353 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
5354 else if (visit_load
)
5356 rhs
= get_base_loadstore (rhs
);
5358 ret
|= visit_load (stmt
, rhs
, data
);
5362 && gimple_call_chain (stmt
)
5363 && TREE_CODE (gimple_call_chain (stmt
)) == ADDR_EXPR
)
5364 ret
|= visit_addr (stmt
, TREE_OPERAND (gimple_call_chain (stmt
), 0),
5367 && gimple_call_return_slot_opt_p (stmt
)
5368 && gimple_call_lhs (stmt
) != NULL_TREE
5369 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
5370 ret
|= visit_addr (stmt
, gimple_call_lhs (stmt
), data
);
5372 else if (gimple_code (stmt
) == GIMPLE_ASM
)
5375 const char *constraint
;
5376 const char **oconstraints
;
5377 bool allows_mem
, allows_reg
, is_inout
;
5378 noutputs
= gimple_asm_noutputs (stmt
);
5379 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5380 if (visit_store
|| visit_addr
)
5381 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
5383 tree link
= gimple_asm_output_op (stmt
, i
);
5384 tree op
= get_base_loadstore (TREE_VALUE (link
));
5385 if (op
&& visit_store
)
5386 ret
|= visit_store (stmt
, op
, data
);
5389 constraint
= TREE_STRING_POINTER
5390 (TREE_VALUE (TREE_PURPOSE (link
)));
5391 oconstraints
[i
] = constraint
;
5392 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
5393 &allows_reg
, &is_inout
);
5394 if (op
&& !allows_reg
&& allows_mem
)
5395 ret
|= visit_addr (stmt
, op
, data
);
5398 if (visit_load
|| visit_addr
)
5399 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
5401 tree link
= gimple_asm_input_op (stmt
, i
);
5402 tree op
= TREE_VALUE (link
);
5404 && TREE_CODE (op
) == ADDR_EXPR
)
5405 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5406 else if (visit_load
|| visit_addr
)
5408 op
= get_base_loadstore (op
);
5412 ret
|= visit_load (stmt
, op
, data
);
5415 constraint
= TREE_STRING_POINTER
5416 (TREE_VALUE (TREE_PURPOSE (link
)));
5417 parse_input_constraint (&constraint
, 0, 0, noutputs
,
5419 &allows_mem
, &allows_reg
);
5420 if (!allows_reg
&& allows_mem
)
5421 ret
|= visit_addr (stmt
, op
, data
);
5427 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
5429 tree op
= gimple_return_retval (stmt
);
5433 && TREE_CODE (op
) == ADDR_EXPR
)
5434 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5435 else if (visit_load
)
5437 op
= get_base_loadstore (op
);
5439 ret
|= visit_load (stmt
, op
, data
);
5444 && gimple_code (stmt
) == GIMPLE_PHI
)
5446 for (i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
5448 tree op
= PHI_ARG_DEF (stmt
, i
);
5449 if (TREE_CODE (op
) == ADDR_EXPR
)
5450 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5457 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5458 should make a faster clone for this case. */
5461 walk_stmt_load_store_ops (gimple stmt
, void *data
,
5462 bool (*visit_load
)(gimple
, tree
, void *),
5463 bool (*visit_store
)(gimple
, tree
, void *))
5465 return walk_stmt_load_store_addr_ops (stmt
, data
,
5466 visit_load
, visit_store
, NULL
);
5469 /* Helper for gimple_ior_addresses_taken_1. */
5472 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED
,
5473 tree addr
, void *data
)
5475 bitmap addresses_taken
= (bitmap
)data
;
5476 addr
= get_base_address (addr
);
5480 bitmap_set_bit (addresses_taken
, DECL_UID (addr
));
5486 /* Set the bit for the uid of all decls that have their address taken
5487 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5488 were any in this stmt. */
5491 gimple_ior_addresses_taken (bitmap addresses_taken
, gimple stmt
)
5493 return walk_stmt_load_store_addr_ops (stmt
, addresses_taken
, NULL
, NULL
,
5494 gimple_ior_addresses_taken_1
);
5498 /* Return a printable name for symbol DECL. */
5501 gimple_decl_printable_name (tree decl
, int verbosity
)
5503 if (!DECL_NAME (decl
))
5506 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
5508 const char *str
, *mangled_str
;
5509 int dmgl_opts
= DMGL_NO_OPTS
;
5513 dmgl_opts
= DMGL_VERBOSE
5517 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5518 dmgl_opts
|= DMGL_PARAMS
;
5521 mangled_str
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
5522 str
= cplus_demangle_v3 (mangled_str
, dmgl_opts
);
5523 return (str
) ? str
: mangled_str
;
5526 return IDENTIFIER_POINTER (DECL_NAME (decl
));
5529 /* Return true when STMT is builtins call to CODE. */
5532 gimple_call_builtin_p (gimple stmt
, enum built_in_function code
)
5535 return (is_gimple_call (stmt
)
5536 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL
5537 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
5538 && DECL_FUNCTION_CODE (fndecl
) == code
);
5541 /* Return true if STMT clobbers memory. STMT is required to be a
5545 gimple_asm_clobbers_memory_p (const_gimple stmt
)
5549 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
5551 tree op
= gimple_asm_clobber_op (stmt
, i
);
5552 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op
)), "memory") == 0)
5558 #include "gt-gimple.h"