1 /* Gimple walk support.
3 Copyright (C) 2007-2023 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "gimple-iterator.h"
29 #include "gimple-walk.h"
32 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
33 on each one. WI is as in walk_gimple_stmt.
35 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
36 value is stored in WI->CALLBACK_RESULT. Also, the statement that
37 produced the value is returned if this statement has not been
38 removed by a callback (wi->removed_stmt). If the statement has
39 been removed, NULL is returned.
41 Otherwise, all the statements are walked and NULL returned. */
44 walk_gimple_seq_mod (gimple_seq
*pseq
, walk_stmt_fn callback_stmt
,
45 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
47 gimple_stmt_iterator gsi
;
49 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
); )
51 tree ret
= walk_gimple_stmt (&gsi
, callback_stmt
, callback_op
, wi
);
54 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
57 wi
->callback_result
= ret
;
60 if (!wi
->removed_stmt
)
65 wi
->removed_stmt
= false;
70 if (!wi
->removed_stmt
)
73 wi
->removed_stmt
= false;
77 wi
->callback_result
= NULL_TREE
;
83 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
84 changed by the callbacks. */
87 walk_gimple_seq (gimple_seq seq
, walk_stmt_fn callback_stmt
,
88 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
90 gimple_seq seq2
= seq
;
91 gimple
*ret
= walk_gimple_seq_mod (&seq2
, callback_stmt
, callback_op
, wi
);
92 gcc_assert (seq2
== seq
);
97 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
100 walk_gimple_asm (gasm
*stmt
, walk_tree_fn callback_op
,
101 struct walk_stmt_info
*wi
)
105 const char **oconstraints
;
107 const char *constraint
;
108 bool allows_mem
, allows_reg
, is_inout
;
110 noutputs
= gimple_asm_noutputs (stmt
);
111 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
113 for (i
= 0; i
< noutputs
; i
++)
115 op
= gimple_asm_output_op (stmt
, i
);
116 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
117 oconstraints
[i
] = constraint
;
120 if (parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
121 &allows_reg
, &is_inout
))
122 wi
->val_only
= (allows_reg
|| !allows_mem
);
126 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
131 n
= gimple_asm_ninputs (stmt
);
132 for (i
= 0; i
< n
; i
++)
134 op
= gimple_asm_input_op (stmt
, i
);
135 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
139 if (parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
140 oconstraints
, &allows_mem
, &allows_reg
))
142 wi
->val_only
= (allows_reg
|| !allows_mem
);
143 /* Although input "m" is not really a LHS, we need a lvalue. */
144 wi
->is_lhs
= !wi
->val_only
;
147 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
158 n
= gimple_asm_nlabels (stmt
);
159 for (i
= 0; i
< n
; i
++)
161 op
= gimple_asm_label_op (stmt
, i
);
162 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
171 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
172 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
174 CALLBACK_OP is called on each operand of STMT via walk_tree.
175 Additional parameters to walk_tree must be stored in WI. For each operand
176 OP, walk_tree is called as:
178 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
180 If CALLBACK_OP returns non-NULL for an operand, the remaining
181 operands are not scanned.
183 The return value is that returned by the last call to walk_tree, or
184 NULL_TREE if no CALLBACK_OP is specified. */
187 walk_gimple_op (gimple
*stmt
, walk_tree_fn callback_op
,
188 struct walk_stmt_info
*wi
)
190 hash_set
<tree
> *pset
= (wi
) ? wi
->pset
: NULL
;
192 tree ret
= NULL_TREE
;
197 switch (gimple_code (stmt
))
200 /* Walk the RHS operands. If the LHS is of a non-renamable type or
201 is a register variable, we may use a COMPONENT_REF on the RHS. */
204 tree lhs
= gimple_assign_lhs (stmt
);
206 = (is_gimple_reg_type (TREE_TYPE (lhs
)) && !is_gimple_reg (lhs
))
207 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
210 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
212 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
,
218 /* Walk the LHS. If the RHS is appropriate for a memory, we
219 may use a COMPONENT_REF on the LHS. */
222 /* If the RHS is of a non-renamable type or is a register variable,
223 we may use a COMPONENT_REF on the LHS. */
224 tree rhs1
= gimple_assign_rhs1 (stmt
);
226 = (is_gimple_reg_type (TREE_TYPE (rhs1
)) && !is_gimple_reg (rhs1
))
227 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
231 ret
= walk_tree (gimple_op_ptr (stmt
, 0), callback_op
, wi
, pset
);
249 ret
= walk_tree (gimple_call_chain_ptr (as_a
<gcall
*> (stmt
)),
250 callback_op
, wi
, pset
);
254 ret
= walk_tree (gimple_call_fn_ptr (stmt
), callback_op
, wi
, pset
);
258 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
262 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt
, i
)));
263 ret
= walk_tree (gimple_call_arg_ptr (stmt
, i
), callback_op
, wi
,
269 if (gimple_call_lhs (stmt
))
275 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt
)));
278 ret
= walk_tree (gimple_call_lhs_ptr (stmt
), callback_op
, wi
, pset
);
291 ret
= walk_tree (gimple_catch_types_ptr (as_a
<gcatch
*> (stmt
)),
292 callback_op
, wi
, pset
);
297 case GIMPLE_EH_FILTER
:
298 ret
= walk_tree (gimple_eh_filter_types_ptr (stmt
), callback_op
, wi
,
305 ret
= walk_gimple_asm (as_a
<gasm
*> (stmt
), callback_op
, wi
);
310 case GIMPLE_OMP_CONTINUE
:
312 gomp_continue
*cont_stmt
= as_a
<gomp_continue
*> (stmt
);
313 ret
= walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt
),
314 callback_op
, wi
, pset
);
318 ret
= walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt
),
319 callback_op
, wi
, pset
);
325 case GIMPLE_OMP_CRITICAL
:
327 gomp_critical
*omp_stmt
= as_a
<gomp_critical
*> (stmt
);
328 ret
= walk_tree (gimple_omp_critical_name_ptr (omp_stmt
),
329 callback_op
, wi
, pset
);
332 ret
= walk_tree (gimple_omp_critical_clauses_ptr (omp_stmt
),
333 callback_op
, wi
, pset
);
339 case GIMPLE_OMP_ORDERED
:
341 gomp_ordered
*omp_stmt
= as_a
<gomp_ordered
*> (stmt
);
342 ret
= walk_tree (gimple_omp_ordered_clauses_ptr (omp_stmt
),
343 callback_op
, wi
, pset
);
349 case GIMPLE_OMP_SCAN
:
351 gomp_scan
*scan_stmt
= as_a
<gomp_scan
*> (stmt
);
352 ret
= walk_tree (gimple_omp_scan_clauses_ptr (scan_stmt
),
353 callback_op
, wi
, pset
);
360 ret
= walk_tree (gimple_omp_for_clauses_ptr (stmt
), callback_op
, wi
,
364 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
366 ret
= walk_tree (gimple_omp_for_index_ptr (stmt
, i
), callback_op
,
370 ret
= walk_tree (gimple_omp_for_initial_ptr (stmt
, i
), callback_op
,
374 ret
= walk_tree (gimple_omp_for_final_ptr (stmt
, i
), callback_op
,
378 ret
= walk_tree (gimple_omp_for_incr_ptr (stmt
, i
), callback_op
,
385 case GIMPLE_OMP_PARALLEL
:
387 gomp_parallel
*omp_par_stmt
= as_a
<gomp_parallel
*> (stmt
);
388 ret
= walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt
),
389 callback_op
, wi
, pset
);
392 ret
= walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt
),
393 callback_op
, wi
, pset
);
396 ret
= walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt
),
397 callback_op
, wi
, pset
);
403 case GIMPLE_OMP_TASK
:
404 ret
= walk_tree (gimple_omp_task_clauses_ptr (stmt
), callback_op
,
408 ret
= walk_tree (gimple_omp_task_child_fn_ptr (stmt
), callback_op
,
412 ret
= walk_tree (gimple_omp_task_data_arg_ptr (stmt
), callback_op
,
416 ret
= walk_tree (gimple_omp_task_copy_fn_ptr (stmt
), callback_op
,
420 ret
= walk_tree (gimple_omp_task_arg_size_ptr (stmt
), callback_op
,
424 ret
= walk_tree (gimple_omp_task_arg_align_ptr (stmt
), callback_op
,
430 case GIMPLE_OMP_SECTIONS
:
431 ret
= walk_tree (gimple_omp_sections_clauses_ptr (stmt
), callback_op
,
435 ret
= walk_tree (gimple_omp_sections_control_ptr (stmt
), callback_op
,
442 case GIMPLE_OMP_SINGLE
:
443 ret
= walk_tree (gimple_omp_single_clauses_ptr (stmt
), callback_op
, wi
,
449 case GIMPLE_OMP_TARGET
:
451 gomp_target
*omp_stmt
= as_a
<gomp_target
*> (stmt
);
452 ret
= walk_tree (gimple_omp_target_clauses_ptr (omp_stmt
),
453 callback_op
, wi
, pset
);
456 ret
= walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt
),
457 callback_op
, wi
, pset
);
460 ret
= walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt
),
461 callback_op
, wi
, pset
);
467 case GIMPLE_OMP_TEAMS
:
468 ret
= walk_tree (gimple_omp_teams_clauses_ptr (stmt
), callback_op
, wi
,
474 case GIMPLE_OMP_ATOMIC_LOAD
:
476 gomp_atomic_load
*omp_stmt
= as_a
<gomp_atomic_load
*> (stmt
);
477 ret
= walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt
),
478 callback_op
, wi
, pset
);
481 ret
= walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt
),
482 callback_op
, wi
, pset
);
488 case GIMPLE_OMP_ATOMIC_STORE
:
490 gomp_atomic_store
*omp_stmt
= as_a
<gomp_atomic_store
*> (stmt
);
491 ret
= walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt
),
492 callback_op
, wi
, pset
);
499 ret
= walk_tree (gimple_assume_guard_ptr (stmt
), callback_op
, wi
, pset
);
504 case GIMPLE_TRANSACTION
:
506 gtransaction
*txn
= as_a
<gtransaction
*> (stmt
);
508 ret
= walk_tree (gimple_transaction_label_norm_ptr (txn
),
509 callback_op
, wi
, pset
);
512 ret
= walk_tree (gimple_transaction_label_uninst_ptr (txn
),
513 callback_op
, wi
, pset
);
516 ret
= walk_tree (gimple_transaction_label_over_ptr (txn
),
517 callback_op
, wi
, pset
);
523 case GIMPLE_OMP_RETURN
:
524 ret
= walk_tree (gimple_omp_return_lhs_ptr (stmt
), callback_op
, wi
,
530 /* Tuples that do not have operands. */
537 /* PHIs are not GSS_WITH_OPS so we need to handle them explicitely. */
539 gphi
*phi
= as_a
<gphi
*> (stmt
);
545 ret
= walk_tree (gimple_phi_result_ptr (phi
), callback_op
, wi
, pset
);
550 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
552 ret
= walk_tree (gimple_phi_arg_def_ptr (phi
, i
),
553 callback_op
, wi
, pset
);
562 enum gimple_statement_structure_enum gss
;
563 gss
= gimple_statement_structure (stmt
);
564 if (gss
== GSS_WITH_OPS
|| gss
== GSS_WITH_MEM_OPS
)
565 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
567 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
, pset
);
579 /* Walk the current statement in GSI (optionally using traversal state
580 stored in WI). If WI is NULL, no state is kept during traversal.
581 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
582 that it has handled all the operands of the statement, its return
583 value is returned. Otherwise, the return value from CALLBACK_STMT
584 is discarded and its operands are scanned.
586 If CALLBACK_STMT is NULL or it didn't handle the operands,
587 CALLBACK_OP is called on each operand of the statement via
588 walk_gimple_op. If walk_gimple_op returns non-NULL for any
589 operand, the remaining operands are not scanned. In this case, the
590 return value from CALLBACK_OP is returned.
592 In any other case, NULL_TREE is returned. */
595 walk_gimple_stmt (gimple_stmt_iterator
*gsi
, walk_stmt_fn callback_stmt
,
596 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
600 gimple
*stmt
= gsi_stmt (*gsi
);
605 wi
->removed_stmt
= false;
607 if (wi
->want_locations
&& gimple_has_location (stmt
))
608 input_location
= gimple_location (stmt
);
613 /* Invoke the statement callback. Return if the callback handled
614 all of STMT operands by itself. */
617 bool handled_ops
= false;
618 tree_ret
= callback_stmt (gsi
, &handled_ops
, wi
);
622 /* If CALLBACK_STMT did not handle operands, it should not have
623 a value to return. */
624 gcc_assert (tree_ret
== NULL
);
626 if (wi
&& wi
->removed_stmt
)
629 /* Re-read stmt in case the callback changed it. */
630 stmt
= gsi_stmt (*gsi
);
633 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
636 tree_ret
= walk_gimple_op (stmt
, callback_op
, wi
);
641 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
642 switch (gimple_code (stmt
))
645 ret
= walk_gimple_seq_mod (gimple_bind_body_ptr (as_a
<gbind
*> (stmt
)),
646 callback_stmt
, callback_op
, wi
);
648 return wi
->callback_result
;
652 ret
= walk_gimple_seq_mod (gimple_catch_handler_ptr (
653 as_a
<gcatch
*> (stmt
)),
654 callback_stmt
, callback_op
, wi
);
656 return wi
->callback_result
;
659 case GIMPLE_EH_FILTER
:
660 ret
= walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt
), callback_stmt
,
663 return wi
->callback_result
;
668 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
669 ret
= walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt
),
670 callback_stmt
, callback_op
, wi
);
672 return wi
->callback_result
;
673 ret
= walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt
),
674 callback_stmt
, callback_op
, wi
);
676 return wi
->callback_result
;
681 ret
= walk_gimple_seq_mod (gimple_try_eval_ptr (stmt
), callback_stmt
, callback_op
,
684 return wi
->callback_result
;
686 ret
= walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt
), callback_stmt
,
689 return wi
->callback_result
;
693 ret
= walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
), callback_stmt
,
696 return wi
->callback_result
;
699 case GIMPLE_OMP_CRITICAL
:
700 case GIMPLE_OMP_MASTER
:
701 case GIMPLE_OMP_MASKED
:
702 case GIMPLE_OMP_TASKGROUP
:
703 case GIMPLE_OMP_ORDERED
:
704 case GIMPLE_OMP_SCAN
:
705 case GIMPLE_OMP_SECTION
:
706 case GIMPLE_OMP_STRUCTURED_BLOCK
:
707 case GIMPLE_OMP_PARALLEL
:
708 case GIMPLE_OMP_TASK
:
709 case GIMPLE_OMP_SCOPE
:
710 case GIMPLE_OMP_SECTIONS
:
711 case GIMPLE_OMP_SINGLE
:
712 case GIMPLE_OMP_TARGET
:
713 case GIMPLE_OMP_TEAMS
:
714 ret
= walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), callback_stmt
,
717 return wi
->callback_result
;
720 case GIMPLE_WITH_CLEANUP_EXPR
:
721 ret
= walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt
), callback_stmt
,
724 return wi
->callback_result
;
728 ret
= walk_gimple_seq_mod (gimple_assume_body_ptr (stmt
),
729 callback_stmt
, callback_op
, wi
);
731 return wi
->callback_result
;
734 case GIMPLE_TRANSACTION
:
735 ret
= walk_gimple_seq_mod (gimple_transaction_body_ptr (
736 as_a
<gtransaction
*> (stmt
)),
737 callback_stmt
, callback_op
, wi
);
739 return wi
->callback_result
;
743 gcc_assert (!gimple_has_substatements (stmt
));
750 /* From a tree operand OP return the base of a load or store operation
751 or NULL_TREE if OP is not a load or a store. */
754 get_base_loadstore (tree op
)
756 while (handled_component_p (op
))
757 op
= TREE_OPERAND (op
, 0);
759 || INDIRECT_REF_P (op
)
760 || TREE_CODE (op
) == MEM_REF
761 || TREE_CODE (op
) == TARGET_MEM_REF
)
767 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
768 VISIT_ADDR if non-NULL on loads, store and address-taken operands
769 passing the STMT, the base of the operand, the operand itself containing
770 the base and DATA to it. The base will be either a decl, an indirect
771 reference (including TARGET_MEM_REF) or the argument of an address
773 Returns the results of these callbacks or'ed. */
776 walk_stmt_load_store_addr_ops (gimple
*stmt
, void *data
,
777 walk_stmt_load_store_addr_fn visit_load
,
778 walk_stmt_load_store_addr_fn visit_store
,
779 walk_stmt_load_store_addr_fn visit_addr
)
783 if (gimple_assign_single_p (stmt
))
788 arg
= gimple_assign_lhs (stmt
);
789 lhs
= get_base_loadstore (arg
);
791 ret
|= visit_store (stmt
, lhs
, arg
, data
);
793 arg
= gimple_assign_rhs1 (stmt
);
795 while (handled_component_p (rhs
))
796 rhs
= TREE_OPERAND (rhs
, 0);
799 if (TREE_CODE (rhs
) == ADDR_EXPR
)
800 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), arg
, data
);
801 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
802 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs
)) == ADDR_EXPR
)
803 ret
|= visit_addr (stmt
, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs
),
805 else if (TREE_CODE (rhs
) == CONSTRUCTOR
)
810 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), ix
, val
)
811 if (TREE_CODE (val
) == ADDR_EXPR
)
812 ret
|= visit_addr (stmt
, TREE_OPERAND (val
, 0), arg
, data
);
813 else if (TREE_CODE (val
) == OBJ_TYPE_REF
814 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val
)) == ADDR_EXPR
)
815 ret
|= visit_addr (stmt
,
816 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val
),
822 rhs
= get_base_loadstore (rhs
);
824 ret
|= visit_load (stmt
, rhs
, arg
, data
);
828 && (is_gimple_assign (stmt
)
829 || gimple_code (stmt
) == GIMPLE_COND
))
831 for (i
= 0; i
< gimple_num_ops (stmt
); ++i
)
833 tree op
= gimple_op (stmt
, i
);
836 else if (TREE_CODE (op
) == ADDR_EXPR
)
837 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), op
, data
);
838 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
839 tree with two operands. */
840 else if (i
== 1 && COMPARISON_CLASS_P (op
))
842 if (TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
843 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 0),
845 if (TREE_CODE (TREE_OPERAND (op
, 1)) == ADDR_EXPR
)
846 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 1),
851 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
855 tree arg
= gimple_call_lhs (call_stmt
);
858 tree lhs
= get_base_loadstore (arg
);
860 ret
|= visit_store (stmt
, lhs
, arg
, data
);
863 if (visit_load
|| visit_addr
)
864 for (i
= 0; i
< gimple_call_num_args (call_stmt
); ++i
)
866 tree arg
= gimple_call_arg (call_stmt
, i
);
868 && TREE_CODE (arg
) == ADDR_EXPR
)
869 ret
|= visit_addr (stmt
, TREE_OPERAND (arg
, 0), arg
, data
);
872 tree rhs
= get_base_loadstore (arg
);
874 ret
|= visit_load (stmt
, rhs
, arg
, data
);
878 && gimple_call_chain (call_stmt
)
879 && TREE_CODE (gimple_call_chain (call_stmt
)) == ADDR_EXPR
)
880 ret
|= visit_addr (stmt
, TREE_OPERAND (gimple_call_chain (call_stmt
), 0),
881 gimple_call_chain (call_stmt
), data
);
883 && gimple_call_return_slot_opt_p (call_stmt
)
884 && gimple_call_lhs (call_stmt
) != NULL_TREE
885 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt
))))
886 ret
|= visit_addr (stmt
, gimple_call_lhs (call_stmt
),
887 gimple_call_lhs (call_stmt
), data
);
889 else if (gasm
*asm_stmt
= dyn_cast
<gasm
*> (stmt
))
892 const char *constraint
;
893 const char **oconstraints
;
894 bool allows_mem
, allows_reg
, is_inout
;
895 noutputs
= gimple_asm_noutputs (asm_stmt
);
896 oconstraints
= XALLOCAVEC (const char *, noutputs
);
897 if (visit_store
|| visit_addr
)
898 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
900 tree link
= gimple_asm_output_op (asm_stmt
, i
);
901 tree op
= get_base_loadstore (TREE_VALUE (link
));
902 if (op
&& visit_store
)
903 ret
|= visit_store (stmt
, op
, TREE_VALUE (link
), data
);
906 constraint
= TREE_STRING_POINTER
907 (TREE_VALUE (TREE_PURPOSE (link
)));
908 oconstraints
[i
] = constraint
;
909 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
910 &allows_reg
, &is_inout
);
911 if (op
&& !allows_reg
&& allows_mem
)
912 ret
|= visit_addr (stmt
, op
, TREE_VALUE (link
), data
);
915 if (visit_load
|| visit_addr
)
916 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
918 tree link
= gimple_asm_input_op (asm_stmt
, i
);
919 tree op
= TREE_VALUE (link
);
921 && TREE_CODE (op
) == ADDR_EXPR
)
922 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), op
, data
);
923 else if (visit_load
|| visit_addr
)
925 op
= get_base_loadstore (op
);
929 ret
|= visit_load (stmt
, op
, TREE_VALUE (link
), data
);
932 constraint
= TREE_STRING_POINTER
933 (TREE_VALUE (TREE_PURPOSE (link
)));
934 parse_input_constraint (&constraint
, 0, 0, noutputs
,
936 &allows_mem
, &allows_reg
);
937 if (!allows_reg
&& allows_mem
)
938 ret
|= visit_addr (stmt
, op
, TREE_VALUE (link
),
945 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
947 tree op
= gimple_return_retval (return_stmt
);
951 && TREE_CODE (op
) == ADDR_EXPR
)
952 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), op
, data
);
955 tree base
= get_base_loadstore (op
);
957 ret
|= visit_load (stmt
, base
, op
, data
);
962 && gimple_code (stmt
) == GIMPLE_PHI
)
964 for (i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
966 tree op
= gimple_phi_arg_def (stmt
, i
);
967 if (TREE_CODE (op
) == ADDR_EXPR
)
968 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), op
, data
);
972 && gimple_code (stmt
) == GIMPLE_GOTO
)
974 tree op
= gimple_goto_dest (stmt
);
975 if (TREE_CODE (op
) == ADDR_EXPR
)
976 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), op
, data
);
982 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
983 should make a faster clone for this case. */
986 walk_stmt_load_store_ops (gimple
*stmt
, void *data
,
987 walk_stmt_load_store_addr_fn visit_load
,
988 walk_stmt_load_store_addr_fn visit_store
)
990 return walk_stmt_load_store_addr_ops (stmt
, data
,
991 visit_load
, visit_store
, NULL
);