1 /* Gimple walk support.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-iterator.h"
29 #include "gimple-walk.h"
30 #include "gimple-walk.h"
33 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
34 on each one. WI is as in walk_gimple_stmt.
36 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
37 value is stored in WI->CALLBACK_RESULT. Also, the statement that
38 produced the value is returned if this statement has not been
39 removed by a callback (wi->removed_stmt). If the statement has
40 been removed, NULL is returned.
42 Otherwise, all the statements are walked and NULL returned. */
45 walk_gimple_seq_mod (gimple_seq
*pseq
, walk_stmt_fn callback_stmt
,
46 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
48 gimple_stmt_iterator gsi
;
50 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
); )
52 tree ret
= walk_gimple_stmt (&gsi
, callback_stmt
, callback_op
, wi
);
55 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
58 wi
->callback_result
= ret
;
60 return wi
->removed_stmt
? NULL
: gsi_stmt (gsi
);
63 if (!wi
->removed_stmt
)
68 wi
->callback_result
= NULL_TREE
;
74 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
75 changed by the callbacks. */
78 walk_gimple_seq (gimple_seq seq
, walk_stmt_fn callback_stmt
,
79 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
81 gimple_seq seq2
= seq
;
82 gimple ret
= walk_gimple_seq_mod (&seq2
, callback_stmt
, callback_op
, wi
);
83 gcc_assert (seq2
== seq
);
88 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
91 walk_gimple_asm (gimple stmt
, walk_tree_fn callback_op
,
92 struct walk_stmt_info
*wi
)
96 const char **oconstraints
;
98 const char *constraint
;
99 bool allows_mem
, allows_reg
, is_inout
;
101 noutputs
= gimple_asm_noutputs (stmt
);
102 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
107 for (i
= 0; i
< noutputs
; i
++)
109 op
= gimple_asm_output_op (stmt
, i
);
110 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
111 oconstraints
[i
] = constraint
;
112 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
, &allows_reg
,
115 wi
->val_only
= (allows_reg
|| !allows_mem
);
116 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
121 n
= gimple_asm_ninputs (stmt
);
122 for (i
= 0; i
< n
; i
++)
124 op
= gimple_asm_input_op (stmt
, i
);
125 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
126 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
127 oconstraints
, &allows_mem
, &allows_reg
);
130 wi
->val_only
= (allows_reg
|| !allows_mem
);
131 /* Although input "m" is not really a LHS, we need a lvalue. */
132 wi
->is_lhs
= !wi
->val_only
;
134 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
145 n
= gimple_asm_nlabels (stmt
);
146 for (i
= 0; i
< n
; i
++)
148 op
= gimple_asm_label_op (stmt
, i
);
149 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
158 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
159 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
161 CALLBACK_OP is called on each operand of STMT via walk_tree.
162 Additional parameters to walk_tree must be stored in WI. For each operand
163 OP, walk_tree is called as:
165 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
167 If CALLBACK_OP returns non-NULL for an operand, the remaining
168 operands are not scanned.
170 The return value is that returned by the last call to walk_tree, or
171 NULL_TREE if no CALLBACK_OP is specified. */
174 walk_gimple_op (gimple stmt
, walk_tree_fn callback_op
,
175 struct walk_stmt_info
*wi
)
177 struct pointer_set_t
*pset
= (wi
) ? wi
->pset
: NULL
;
179 tree ret
= NULL_TREE
;
181 switch (gimple_code (stmt
))
184 /* Walk the RHS operands. If the LHS is of a non-renamable type or
185 is a register variable, we may use a COMPONENT_REF on the RHS. */
188 tree lhs
= gimple_assign_lhs (stmt
);
190 = (is_gimple_reg_type (TREE_TYPE (lhs
)) && !is_gimple_reg (lhs
))
191 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
194 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
196 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
,
202 /* Walk the LHS. If the RHS is appropriate for a memory, we
203 may use a COMPONENT_REF on the LHS. */
206 /* If the RHS is of a non-renamable type or is a register variable,
207 we may use a COMPONENT_REF on the LHS. */
208 tree rhs1
= gimple_assign_rhs1 (stmt
);
210 = (is_gimple_reg_type (TREE_TYPE (rhs1
)) && !is_gimple_reg (rhs1
))
211 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
215 ret
= walk_tree (gimple_op_ptr (stmt
, 0), callback_op
, wi
, pset
);
233 ret
= walk_tree (gimple_call_chain_ptr (stmt
), callback_op
, wi
, pset
);
237 ret
= walk_tree (gimple_call_fn_ptr (stmt
), callback_op
, wi
, pset
);
241 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
245 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt
, i
)));
246 ret
= walk_tree (gimple_call_arg_ptr (stmt
, i
), callback_op
, wi
,
252 if (gimple_call_lhs (stmt
))
258 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt
)));
261 ret
= walk_tree (gimple_call_lhs_ptr (stmt
), callback_op
, wi
, pset
);
274 ret
= walk_tree (gimple_catch_types_ptr (stmt
), callback_op
, wi
,
280 case GIMPLE_EH_FILTER
:
281 ret
= walk_tree (gimple_eh_filter_types_ptr (stmt
), callback_op
, wi
,
288 ret
= walk_gimple_asm (stmt
, callback_op
, wi
);
293 case GIMPLE_OMP_CONTINUE
:
294 ret
= walk_tree (gimple_omp_continue_control_def_ptr (stmt
),
295 callback_op
, wi
, pset
);
299 ret
= walk_tree (gimple_omp_continue_control_use_ptr (stmt
),
300 callback_op
, wi
, pset
);
305 case GIMPLE_OMP_CRITICAL
:
306 ret
= walk_tree (gimple_omp_critical_name_ptr (stmt
), callback_op
, wi
,
313 ret
= walk_tree (gimple_omp_for_clauses_ptr (stmt
), callback_op
, wi
,
317 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
319 ret
= walk_tree (gimple_omp_for_index_ptr (stmt
, i
), callback_op
,
323 ret
= walk_tree (gimple_omp_for_initial_ptr (stmt
, i
), callback_op
,
327 ret
= walk_tree (gimple_omp_for_final_ptr (stmt
, i
), callback_op
,
331 ret
= walk_tree (gimple_omp_for_incr_ptr (stmt
, i
), callback_op
,
338 case GIMPLE_OMP_PARALLEL
:
339 ret
= walk_tree (gimple_omp_parallel_clauses_ptr (stmt
), callback_op
,
343 ret
= walk_tree (gimple_omp_parallel_child_fn_ptr (stmt
), callback_op
,
347 ret
= walk_tree (gimple_omp_parallel_data_arg_ptr (stmt
), callback_op
,
353 case GIMPLE_OMP_TASK
:
354 ret
= walk_tree (gimple_omp_task_clauses_ptr (stmt
), callback_op
,
358 ret
= walk_tree (gimple_omp_task_child_fn_ptr (stmt
), callback_op
,
362 ret
= walk_tree (gimple_omp_task_data_arg_ptr (stmt
), callback_op
,
366 ret
= walk_tree (gimple_omp_task_copy_fn_ptr (stmt
), callback_op
,
370 ret
= walk_tree (gimple_omp_task_arg_size_ptr (stmt
), callback_op
,
374 ret
= walk_tree (gimple_omp_task_arg_align_ptr (stmt
), callback_op
,
380 case GIMPLE_OMP_SECTIONS
:
381 ret
= walk_tree (gimple_omp_sections_clauses_ptr (stmt
), callback_op
,
386 ret
= walk_tree (gimple_omp_sections_control_ptr (stmt
), callback_op
,
393 case GIMPLE_OMP_SINGLE
:
394 ret
= walk_tree (gimple_omp_single_clauses_ptr (stmt
), callback_op
, wi
,
400 case GIMPLE_OMP_TARGET
:
401 ret
= walk_tree (gimple_omp_target_clauses_ptr (stmt
), callback_op
, wi
,
407 case GIMPLE_OMP_TEAMS
:
408 ret
= walk_tree (gimple_omp_teams_clauses_ptr (stmt
), callback_op
, wi
,
414 case GIMPLE_OMP_ATOMIC_LOAD
:
415 ret
= walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt
), callback_op
, wi
,
420 ret
= walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt
), callback_op
, wi
,
426 case GIMPLE_OMP_ATOMIC_STORE
:
427 ret
= walk_tree (gimple_omp_atomic_store_val_ptr (stmt
), callback_op
,
433 case GIMPLE_TRANSACTION
:
434 ret
= walk_tree (gimple_transaction_label_ptr (stmt
), callback_op
,
440 case GIMPLE_OMP_RETURN
:
441 ret
= walk_tree (gimple_omp_return_lhs_ptr (stmt
), callback_op
, wi
,
447 /* Tuples that do not have operands. */
455 enum gimple_statement_structure_enum gss
;
456 gss
= gimple_statement_structure (stmt
);
457 if (gss
== GSS_WITH_OPS
|| gss
== GSS_WITH_MEM_OPS
)
458 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
460 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
, pset
);
472 /* Walk the current statement in GSI (optionally using traversal state
473 stored in WI). If WI is NULL, no state is kept during traversal.
474 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
475 that it has handled all the operands of the statement, its return
476 value is returned. Otherwise, the return value from CALLBACK_STMT
477 is discarded and its operands are scanned.
479 If CALLBACK_STMT is NULL or it didn't handle the operands,
480 CALLBACK_OP is called on each operand of the statement via
481 walk_gimple_op. If walk_gimple_op returns non-NULL for any
482 operand, the remaining operands are not scanned. In this case, the
483 return value from CALLBACK_OP is returned.
485 In any other case, NULL_TREE is returned. */
488 walk_gimple_stmt (gimple_stmt_iterator
*gsi
, walk_stmt_fn callback_stmt
,
489 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
493 gimple stmt
= gsi_stmt (*gsi
);
498 wi
->removed_stmt
= false;
500 if (wi
->want_locations
&& gimple_has_location (stmt
))
501 input_location
= gimple_location (stmt
);
506 /* Invoke the statement callback. Return if the callback handled
507 all of STMT operands by itself. */
510 bool handled_ops
= false;
511 tree_ret
= callback_stmt (gsi
, &handled_ops
, wi
);
515 /* If CALLBACK_STMT did not handle operands, it should not have
516 a value to return. */
517 gcc_assert (tree_ret
== NULL
);
519 if (wi
&& wi
->removed_stmt
)
522 /* Re-read stmt in case the callback changed it. */
523 stmt
= gsi_stmt (*gsi
);
526 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
529 tree_ret
= walk_gimple_op (stmt
, callback_op
, wi
);
534 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
535 switch (gimple_code (stmt
))
538 ret
= walk_gimple_seq_mod (gimple_bind_body_ptr (stmt
), callback_stmt
,
541 return wi
->callback_result
;
545 ret
= walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt
), callback_stmt
,
548 return wi
->callback_result
;
551 case GIMPLE_EH_FILTER
:
552 ret
= walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt
), callback_stmt
,
555 return wi
->callback_result
;
559 ret
= walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt
),
560 callback_stmt
, callback_op
, wi
);
562 return wi
->callback_result
;
563 ret
= walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt
),
564 callback_stmt
, callback_op
, wi
);
566 return wi
->callback_result
;
570 ret
= walk_gimple_seq_mod (gimple_try_eval_ptr (stmt
), callback_stmt
, callback_op
,
573 return wi
->callback_result
;
575 ret
= walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt
), callback_stmt
,
578 return wi
->callback_result
;
582 ret
= walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
), callback_stmt
,
585 return wi
->callback_result
;
588 case GIMPLE_OMP_CRITICAL
:
589 case GIMPLE_OMP_MASTER
:
590 case GIMPLE_OMP_TASKGROUP
:
591 case GIMPLE_OMP_ORDERED
:
592 case GIMPLE_OMP_SECTION
:
593 case GIMPLE_OMP_PARALLEL
:
594 case GIMPLE_OMP_TASK
:
595 case GIMPLE_OMP_SECTIONS
:
596 case GIMPLE_OMP_SINGLE
:
597 case GIMPLE_OMP_TARGET
:
598 case GIMPLE_OMP_TEAMS
:
599 ret
= walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), callback_stmt
,
602 return wi
->callback_result
;
605 case GIMPLE_WITH_CLEANUP_EXPR
:
606 ret
= walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt
), callback_stmt
,
609 return wi
->callback_result
;
612 case GIMPLE_TRANSACTION
:
613 ret
= walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt
),
614 callback_stmt
, callback_op
, wi
);
616 return wi
->callback_result
;
620 gcc_assert (!gimple_has_substatements (stmt
));
627 /* From a tree operand OP return the base of a load or store operation
628 or NULL_TREE if OP is not a load or a store. */
631 get_base_loadstore (tree op
)
633 while (handled_component_p (op
))
634 op
= TREE_OPERAND (op
, 0);
636 || INDIRECT_REF_P (op
)
637 || TREE_CODE (op
) == MEM_REF
638 || TREE_CODE (op
) == TARGET_MEM_REF
)
644 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
645 VISIT_ADDR if non-NULL on loads, store and address-taken operands
646 passing the STMT, the base of the operand and DATA to it. The base
647 will be either a decl, an indirect reference (including TARGET_MEM_REF)
648 or the argument of an address expression.
649 Returns the results of these callbacks or'ed. */
652 walk_stmt_load_store_addr_ops (gimple stmt
, void *data
,
653 bool (*visit_load
)(gimple
, tree
, void *),
654 bool (*visit_store
)(gimple
, tree
, void *),
655 bool (*visit_addr
)(gimple
, tree
, void *))
659 if (gimple_assign_single_p (stmt
))
664 lhs
= get_base_loadstore (gimple_assign_lhs (stmt
));
666 ret
|= visit_store (stmt
, lhs
, data
);
668 rhs
= gimple_assign_rhs1 (stmt
);
669 while (handled_component_p (rhs
))
670 rhs
= TREE_OPERAND (rhs
, 0);
673 if (TREE_CODE (rhs
) == ADDR_EXPR
)
674 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
675 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
676 && TREE_CODE (TMR_BASE (rhs
)) == ADDR_EXPR
)
677 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (rhs
), 0), data
);
678 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
679 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs
)) == ADDR_EXPR
)
680 ret
|= visit_addr (stmt
, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs
),
682 else if (TREE_CODE (rhs
) == CONSTRUCTOR
)
687 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), ix
, val
)
688 if (TREE_CODE (val
) == ADDR_EXPR
)
689 ret
|= visit_addr (stmt
, TREE_OPERAND (val
, 0), data
);
690 else if (TREE_CODE (val
) == OBJ_TYPE_REF
691 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val
)) == ADDR_EXPR
)
692 ret
|= visit_addr (stmt
,
693 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val
),
696 lhs
= gimple_assign_lhs (stmt
);
697 if (TREE_CODE (lhs
) == TARGET_MEM_REF
698 && TREE_CODE (TMR_BASE (lhs
)) == ADDR_EXPR
)
699 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (lhs
), 0), data
);
703 rhs
= get_base_loadstore (rhs
);
705 ret
|= visit_load (stmt
, rhs
, data
);
709 && (is_gimple_assign (stmt
)
710 || gimple_code (stmt
) == GIMPLE_COND
))
712 for (i
= 0; i
< gimple_num_ops (stmt
); ++i
)
714 tree op
= gimple_op (stmt
, i
);
717 else if (TREE_CODE (op
) == ADDR_EXPR
)
718 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
719 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
720 tree with two operands. */
721 else if (i
== 1 && COMPARISON_CLASS_P (op
))
723 if (TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
724 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 0),
726 if (TREE_CODE (TREE_OPERAND (op
, 1)) == ADDR_EXPR
)
727 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 1),
732 else if (is_gimple_call (stmt
))
736 tree lhs
= gimple_call_lhs (stmt
);
739 lhs
= get_base_loadstore (lhs
);
741 ret
|= visit_store (stmt
, lhs
, data
);
744 if (visit_load
|| visit_addr
)
745 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
747 tree rhs
= gimple_call_arg (stmt
, i
);
749 && TREE_CODE (rhs
) == ADDR_EXPR
)
750 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
753 rhs
= get_base_loadstore (rhs
);
755 ret
|= visit_load (stmt
, rhs
, data
);
759 && gimple_call_chain (stmt
)
760 && TREE_CODE (gimple_call_chain (stmt
)) == ADDR_EXPR
)
761 ret
|= visit_addr (stmt
, TREE_OPERAND (gimple_call_chain (stmt
), 0),
764 && gimple_call_return_slot_opt_p (stmt
)
765 && gimple_call_lhs (stmt
) != NULL_TREE
766 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
767 ret
|= visit_addr (stmt
, gimple_call_lhs (stmt
), data
);
769 else if (gimple_code (stmt
) == GIMPLE_ASM
)
772 const char *constraint
;
773 const char **oconstraints
;
774 bool allows_mem
, allows_reg
, is_inout
;
775 noutputs
= gimple_asm_noutputs (stmt
);
776 oconstraints
= XALLOCAVEC (const char *, noutputs
);
777 if (visit_store
|| visit_addr
)
778 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
780 tree link
= gimple_asm_output_op (stmt
, i
);
781 tree op
= get_base_loadstore (TREE_VALUE (link
));
782 if (op
&& visit_store
)
783 ret
|= visit_store (stmt
, op
, data
);
786 constraint
= TREE_STRING_POINTER
787 (TREE_VALUE (TREE_PURPOSE (link
)));
788 oconstraints
[i
] = constraint
;
789 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
790 &allows_reg
, &is_inout
);
791 if (op
&& !allows_reg
&& allows_mem
)
792 ret
|= visit_addr (stmt
, op
, data
);
795 if (visit_load
|| visit_addr
)
796 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
798 tree link
= gimple_asm_input_op (stmt
, i
);
799 tree op
= TREE_VALUE (link
);
801 && TREE_CODE (op
) == ADDR_EXPR
)
802 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
803 else if (visit_load
|| visit_addr
)
805 op
= get_base_loadstore (op
);
809 ret
|= visit_load (stmt
, op
, data
);
812 constraint
= TREE_STRING_POINTER
813 (TREE_VALUE (TREE_PURPOSE (link
)));
814 parse_input_constraint (&constraint
, 0, 0, noutputs
,
816 &allows_mem
, &allows_reg
);
817 if (!allows_reg
&& allows_mem
)
818 ret
|= visit_addr (stmt
, op
, data
);
824 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
826 tree op
= gimple_return_retval (stmt
);
830 && TREE_CODE (op
) == ADDR_EXPR
)
831 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
834 op
= get_base_loadstore (op
);
836 ret
|= visit_load (stmt
, op
, data
);
841 && gimple_code (stmt
) == GIMPLE_PHI
)
843 for (i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
845 tree op
= gimple_phi_arg_def (stmt
, i
);
846 if (TREE_CODE (op
) == ADDR_EXPR
)
847 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
851 && gimple_code (stmt
) == GIMPLE_GOTO
)
853 tree op
= gimple_goto_dest (stmt
);
854 if (TREE_CODE (op
) == ADDR_EXPR
)
855 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
861 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
862 should make a faster clone for this case. */
865 walk_stmt_load_store_ops (gimple stmt
, void *data
,
866 bool (*visit_load
)(gimple
, tree
, void *),
867 bool (*visit_store
)(gimple
, tree
, void *))
869 return walk_stmt_load_store_addr_ops (stmt
, data
,
870 visit_load
, visit_store
, NULL
);