testsuite, i386: fix -fhardened test
[official-gcc.git] / gcc / gimple-walk.cc
blob20df7e2d7e40249d3db5aeb7dc1477e7ffbd5049
1 /* Gimple walk support.
3 Copyright (C) 2007-2023 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "gimple-walk.h"
30 #include "stmt.h"
32 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
33 on each one. WI is as in walk_gimple_stmt.
35 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
36 value is stored in WI->CALLBACK_RESULT. Also, the statement that
37 produced the value is returned if this statement has not been
38 removed by a callback (wi->removed_stmt). If the statement has
39 been removed, NULL is returned.
41 Otherwise, all the statements are walked and NULL returned. */
43 gimple *
44 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
45 walk_tree_fn callback_op, struct walk_stmt_info *wi)
47 gimple_stmt_iterator gsi;
49 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
51 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
52 if (ret)
54 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
55 to hold it. */
56 gcc_assert (wi);
57 wi->callback_result = ret;
59 gimple *g;
60 if (!wi->removed_stmt)
61 g = gsi_stmt (gsi);
62 else
64 g = NULL;
65 wi->removed_stmt = false;
67 return g;
70 if (!wi->removed_stmt)
71 gsi_next (&gsi);
72 else
73 wi->removed_stmt = false;
76 if (wi)
77 wi->callback_result = NULL_TREE;
79 return NULL;
83 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
84 changed by the callbacks. */
86 gimple *
87 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
88 walk_tree_fn callback_op, struct walk_stmt_info *wi)
90 gimple_seq seq2 = seq;
91 gimple *ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
92 gcc_assert (seq2 == seq);
93 return ret;
97 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
99 static tree
100 walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
101 struct walk_stmt_info *wi)
103 tree ret, op;
104 unsigned noutputs;
105 const char **oconstraints;
106 unsigned i, n;
107 const char *constraint;
108 bool allows_mem, allows_reg, is_inout;
110 noutputs = gimple_asm_noutputs (stmt);
111 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
113 for (i = 0; i < noutputs; i++)
115 op = gimple_asm_output_op (stmt, i);
116 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
117 oconstraints[i] = constraint;
118 if (wi)
120 if (parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
121 &allows_reg, &is_inout))
122 wi->val_only = (allows_reg || !allows_mem);
124 if (wi)
125 wi->is_lhs = true;
126 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
127 if (ret)
128 return ret;
131 n = gimple_asm_ninputs (stmt);
132 for (i = 0; i < n; i++)
134 op = gimple_asm_input_op (stmt, i);
135 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
137 if (wi)
139 if (parse_input_constraint (&constraint, 0, 0, noutputs, 0,
140 oconstraints, &allows_mem, &allows_reg))
142 wi->val_only = (allows_reg || !allows_mem);
143 /* Although input "m" is not really a LHS, we need a lvalue. */
144 wi->is_lhs = !wi->val_only;
147 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
148 if (ret)
149 return ret;
152 if (wi)
154 wi->is_lhs = false;
155 wi->val_only = true;
158 n = gimple_asm_nlabels (stmt);
159 for (i = 0; i < n; i++)
161 op = gimple_asm_label_op (stmt, i);
162 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
163 if (ret)
164 return ret;
167 return NULL_TREE;
171 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
172 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
174 CALLBACK_OP is called on each operand of STMT via walk_tree.
175 Additional parameters to walk_tree must be stored in WI. For each operand
176 OP, walk_tree is called as:
178 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
180 If CALLBACK_OP returns non-NULL for an operand, the remaining
181 operands are not scanned.
183 The return value is that returned by the last call to walk_tree, or
184 NULL_TREE if no CALLBACK_OP is specified. */
186 tree
187 walk_gimple_op (gimple *stmt, walk_tree_fn callback_op,
188 struct walk_stmt_info *wi)
190 hash_set<tree> *pset = (wi) ? wi->pset : NULL;
191 unsigned i;
192 tree ret = NULL_TREE;
194 if (wi)
195 wi->stmt = stmt;
197 switch (gimple_code (stmt))
199 case GIMPLE_ASSIGN:
200 /* Walk the RHS operands. If the LHS is of a non-renamable type or
201 is a register variable, we may use a COMPONENT_REF on the RHS. */
202 if (wi)
204 tree lhs = gimple_assign_lhs (stmt);
205 wi->val_only
206 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
207 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
210 for (i = 1; i < gimple_num_ops (stmt); i++)
212 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
213 pset);
214 if (ret)
215 return ret;
218 /* Walk the LHS. If the RHS is appropriate for a memory, we
219 may use a COMPONENT_REF on the LHS. */
220 if (wi)
222 /* If the RHS is of a non-renamable type or is a register variable,
223 we may use a COMPONENT_REF on the LHS. */
224 tree rhs1 = gimple_assign_rhs1 (stmt);
225 wi->val_only
226 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
227 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
228 wi->is_lhs = true;
231 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
232 if (ret)
233 return ret;
235 if (wi)
237 wi->val_only = true;
238 wi->is_lhs = false;
240 break;
242 case GIMPLE_CALL:
243 if (wi)
245 wi->is_lhs = false;
246 wi->val_only = true;
249 ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
250 callback_op, wi, pset);
251 if (ret)
252 return ret;
254 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
255 if (ret)
256 return ret;
258 for (i = 0; i < gimple_call_num_args (stmt); i++)
260 if (wi)
261 wi->val_only
262 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
263 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
264 pset);
265 if (ret)
266 return ret;
269 if (gimple_call_lhs (stmt))
271 if (wi)
273 wi->is_lhs = true;
274 wi->val_only
275 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
278 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
279 if (ret)
280 return ret;
283 if (wi)
285 wi->is_lhs = false;
286 wi->val_only = true;
288 break;
290 case GIMPLE_CATCH:
291 ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
292 callback_op, wi, pset);
293 if (ret)
294 return ret;
295 break;
297 case GIMPLE_EH_FILTER:
298 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
299 pset);
300 if (ret)
301 return ret;
302 break;
304 case GIMPLE_ASM:
305 ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
306 if (ret)
307 return ret;
308 break;
310 case GIMPLE_OMP_CONTINUE:
312 gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
313 ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
314 callback_op, wi, pset);
315 if (ret)
316 return ret;
318 ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
319 callback_op, wi, pset);
320 if (ret)
321 return ret;
323 break;
325 case GIMPLE_OMP_CRITICAL:
327 gomp_critical *omp_stmt = as_a <gomp_critical *> (stmt);
328 ret = walk_tree (gimple_omp_critical_name_ptr (omp_stmt),
329 callback_op, wi, pset);
330 if (ret)
331 return ret;
332 ret = walk_tree (gimple_omp_critical_clauses_ptr (omp_stmt),
333 callback_op, wi, pset);
334 if (ret)
335 return ret;
337 break;
339 case GIMPLE_OMP_ORDERED:
341 gomp_ordered *omp_stmt = as_a <gomp_ordered *> (stmt);
342 ret = walk_tree (gimple_omp_ordered_clauses_ptr (omp_stmt),
343 callback_op, wi, pset);
344 if (ret)
345 return ret;
347 break;
349 case GIMPLE_OMP_SCAN:
351 gomp_scan *scan_stmt = as_a <gomp_scan *> (stmt);
352 ret = walk_tree (gimple_omp_scan_clauses_ptr (scan_stmt),
353 callback_op, wi, pset);
354 if (ret)
355 return ret;
357 break;
359 case GIMPLE_OMP_FOR:
360 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
361 pset);
362 if (ret)
363 return ret;
364 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
366 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
367 wi, pset);
368 if (ret)
369 return ret;
370 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
371 wi, pset);
372 if (ret)
373 return ret;
374 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
375 wi, pset);
376 if (ret)
377 return ret;
378 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
379 wi, pset);
380 if (ret)
381 return ret;
383 break;
385 case GIMPLE_OMP_PARALLEL:
387 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
388 ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
389 callback_op, wi, pset);
390 if (ret)
391 return ret;
392 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
393 callback_op, wi, pset);
394 if (ret)
395 return ret;
396 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
397 callback_op, wi, pset);
398 if (ret)
399 return ret;
401 break;
403 case GIMPLE_OMP_TASK:
404 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
405 wi, pset);
406 if (ret)
407 return ret;
408 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
409 wi, pset);
410 if (ret)
411 return ret;
412 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
413 wi, pset);
414 if (ret)
415 return ret;
416 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
417 wi, pset);
418 if (ret)
419 return ret;
420 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
421 wi, pset);
422 if (ret)
423 return ret;
424 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
425 wi, pset);
426 if (ret)
427 return ret;
428 break;
430 case GIMPLE_OMP_SECTIONS:
431 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
432 wi, pset);
433 if (ret)
434 return ret;
435 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
436 wi, pset);
437 if (ret)
438 return ret;
440 break;
442 case GIMPLE_OMP_SINGLE:
443 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
444 pset);
445 if (ret)
446 return ret;
447 break;
449 case GIMPLE_OMP_TARGET:
451 gomp_target *omp_stmt = as_a <gomp_target *> (stmt);
452 ret = walk_tree (gimple_omp_target_clauses_ptr (omp_stmt),
453 callback_op, wi, pset);
454 if (ret)
455 return ret;
456 ret = walk_tree (gimple_omp_target_child_fn_ptr (omp_stmt),
457 callback_op, wi, pset);
458 if (ret)
459 return ret;
460 ret = walk_tree (gimple_omp_target_data_arg_ptr (omp_stmt),
461 callback_op, wi, pset);
462 if (ret)
463 return ret;
465 break;
467 case GIMPLE_OMP_TEAMS:
468 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
469 pset);
470 if (ret)
471 return ret;
472 break;
474 case GIMPLE_OMP_ATOMIC_LOAD:
476 gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
477 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
478 callback_op, wi, pset);
479 if (ret)
480 return ret;
481 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
482 callback_op, wi, pset);
483 if (ret)
484 return ret;
486 break;
488 case GIMPLE_OMP_ATOMIC_STORE:
490 gomp_atomic_store *omp_stmt = as_a <gomp_atomic_store *> (stmt);
491 ret = walk_tree (gimple_omp_atomic_store_val_ptr (omp_stmt),
492 callback_op, wi, pset);
493 if (ret)
494 return ret;
496 break;
498 case GIMPLE_ASSUME:
499 ret = walk_tree (gimple_assume_guard_ptr (stmt), callback_op, wi, pset);
500 if (ret)
501 return ret;
502 break;
504 case GIMPLE_TRANSACTION:
506 gtransaction *txn = as_a <gtransaction *> (stmt);
508 ret = walk_tree (gimple_transaction_label_norm_ptr (txn),
509 callback_op, wi, pset);
510 if (ret)
511 return ret;
512 ret = walk_tree (gimple_transaction_label_uninst_ptr (txn),
513 callback_op, wi, pset);
514 if (ret)
515 return ret;
516 ret = walk_tree (gimple_transaction_label_over_ptr (txn),
517 callback_op, wi, pset);
518 if (ret)
519 return ret;
521 break;
523 case GIMPLE_OMP_RETURN:
524 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
525 pset);
526 if (ret)
527 return ret;
528 break;
530 /* Tuples that do not have operands. */
531 case GIMPLE_NOP:
532 case GIMPLE_RESX:
533 case GIMPLE_PREDICT:
534 break;
536 case GIMPLE_PHI:
537 /* PHIs are not GSS_WITH_OPS so we need to handle them explicitely. */
539 gphi *phi = as_a <gphi *> (stmt);
540 if (wi)
542 wi->val_only = true;
543 wi->is_lhs = true;
545 ret = walk_tree (gimple_phi_result_ptr (phi), callback_op, wi, pset);
546 if (wi)
547 wi->is_lhs = false;
548 if (ret)
549 return ret;
550 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
552 ret = walk_tree (gimple_phi_arg_def_ptr (phi, i),
553 callback_op, wi, pset);
554 if (ret)
555 return ret;
557 break;
560 default:
562 enum gimple_statement_structure_enum gss;
563 gss = gimple_statement_structure (stmt);
564 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
565 for (i = 0; i < gimple_num_ops (stmt); i++)
567 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
568 if (ret)
569 return ret;
572 break;
575 return NULL_TREE;
579 /* Walk the current statement in GSI (optionally using traversal state
580 stored in WI). If WI is NULL, no state is kept during traversal.
581 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
582 that it has handled all the operands of the statement, its return
583 value is returned. Otherwise, the return value from CALLBACK_STMT
584 is discarded and its operands are scanned.
586 If CALLBACK_STMT is NULL or it didn't handle the operands,
587 CALLBACK_OP is called on each operand of the statement via
588 walk_gimple_op. If walk_gimple_op returns non-NULL for any
589 operand, the remaining operands are not scanned. In this case, the
590 return value from CALLBACK_OP is returned.
592 In any other case, NULL_TREE is returned. */
594 tree
595 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
596 walk_tree_fn callback_op, struct walk_stmt_info *wi)
598 gimple *ret;
599 tree tree_ret;
600 gimple *stmt = gsi_stmt (*gsi);
602 if (wi)
604 wi->gsi = *gsi;
605 wi->removed_stmt = false;
607 if (wi->want_locations && gimple_has_location (stmt))
608 input_location = gimple_location (stmt);
611 ret = NULL;
613 /* Invoke the statement callback. Return if the callback handled
614 all of STMT operands by itself. */
615 if (callback_stmt)
617 bool handled_ops = false;
618 tree_ret = callback_stmt (gsi, &handled_ops, wi);
619 if (handled_ops)
620 return tree_ret;
622 /* If CALLBACK_STMT did not handle operands, it should not have
623 a value to return. */
624 gcc_assert (tree_ret == NULL);
626 if (wi && wi->removed_stmt)
627 return NULL;
629 /* Re-read stmt in case the callback changed it. */
630 stmt = gsi_stmt (*gsi);
633 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
634 if (callback_op)
636 tree_ret = walk_gimple_op (stmt, callback_op, wi);
637 if (tree_ret)
638 return tree_ret;
641 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
642 switch (gimple_code (stmt))
644 case GIMPLE_BIND:
645 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
646 callback_stmt, callback_op, wi);
647 if (ret)
648 return wi->callback_result;
649 break;
651 case GIMPLE_CATCH:
652 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
653 as_a <gcatch *> (stmt)),
654 callback_stmt, callback_op, wi);
655 if (ret)
656 return wi->callback_result;
657 break;
659 case GIMPLE_EH_FILTER:
660 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
661 callback_op, wi);
662 if (ret)
663 return wi->callback_result;
664 break;
666 case GIMPLE_EH_ELSE:
668 geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
669 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
670 callback_stmt, callback_op, wi);
671 if (ret)
672 return wi->callback_result;
673 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
674 callback_stmt, callback_op, wi);
675 if (ret)
676 return wi->callback_result;
678 break;
680 case GIMPLE_TRY:
681 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
682 wi);
683 if (ret)
684 return wi->callback_result;
686 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
687 callback_op, wi);
688 if (ret)
689 return wi->callback_result;
690 break;
692 case GIMPLE_OMP_FOR:
693 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
694 callback_op, wi);
695 if (ret)
696 return wi->callback_result;
698 /* FALL THROUGH. */
699 case GIMPLE_OMP_CRITICAL:
700 case GIMPLE_OMP_MASTER:
701 case GIMPLE_OMP_MASKED:
702 case GIMPLE_OMP_TASKGROUP:
703 case GIMPLE_OMP_ORDERED:
704 case GIMPLE_OMP_SCAN:
705 case GIMPLE_OMP_SECTION:
706 case GIMPLE_OMP_STRUCTURED_BLOCK:
707 case GIMPLE_OMP_PARALLEL:
708 case GIMPLE_OMP_TASK:
709 case GIMPLE_OMP_SCOPE:
710 case GIMPLE_OMP_SECTIONS:
711 case GIMPLE_OMP_SINGLE:
712 case GIMPLE_OMP_TARGET:
713 case GIMPLE_OMP_TEAMS:
714 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
715 callback_op, wi);
716 if (ret)
717 return wi->callback_result;
718 break;
720 case GIMPLE_WITH_CLEANUP_EXPR:
721 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
722 callback_op, wi);
723 if (ret)
724 return wi->callback_result;
725 break;
727 case GIMPLE_ASSUME:
728 ret = walk_gimple_seq_mod (gimple_assume_body_ptr (stmt),
729 callback_stmt, callback_op, wi);
730 if (ret)
731 return wi->callback_result;
732 break;
734 case GIMPLE_TRANSACTION:
735 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
736 as_a <gtransaction *> (stmt)),
737 callback_stmt, callback_op, wi);
738 if (ret)
739 return wi->callback_result;
740 break;
742 default:
743 gcc_assert (!gimple_has_substatements (stmt));
744 break;
747 return NULL;
750 /* From a tree operand OP return the base of a load or store operation
751 or NULL_TREE if OP is not a load or a store. */
753 static tree
754 get_base_loadstore (tree op)
756 while (handled_component_p (op))
757 op = TREE_OPERAND (op, 0);
758 if (DECL_P (op)
759 || INDIRECT_REF_P (op)
760 || TREE_CODE (op) == MEM_REF
761 || TREE_CODE (op) == TARGET_MEM_REF)
762 return op;
763 return NULL_TREE;
767 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
768 VISIT_ADDR if non-NULL on loads, store and address-taken operands
769 passing the STMT, the base of the operand, the operand itself containing
770 the base and DATA to it. The base will be either a decl, an indirect
771 reference (including TARGET_MEM_REF) or the argument of an address
772 expression.
773 Returns the results of these callbacks or'ed. */
775 bool
776 walk_stmt_load_store_addr_ops (gimple *stmt, void *data,
777 walk_stmt_load_store_addr_fn visit_load,
778 walk_stmt_load_store_addr_fn visit_store,
779 walk_stmt_load_store_addr_fn visit_addr)
781 bool ret = false;
782 unsigned i;
783 if (gimple_assign_single_p (stmt))
785 tree lhs, rhs, arg;
786 if (visit_store)
788 arg = gimple_assign_lhs (stmt);
789 lhs = get_base_loadstore (arg);
790 if (lhs)
791 ret |= visit_store (stmt, lhs, arg, data);
793 arg = gimple_assign_rhs1 (stmt);
794 rhs = arg;
795 while (handled_component_p (rhs))
796 rhs = TREE_OPERAND (rhs, 0);
797 if (visit_addr)
799 if (TREE_CODE (rhs) == ADDR_EXPR)
800 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), arg, data);
801 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
802 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
803 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
804 0), arg, data);
805 else if (TREE_CODE (rhs) == CONSTRUCTOR)
807 unsigned int ix;
808 tree val;
810 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
811 if (TREE_CODE (val) == ADDR_EXPR)
812 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), arg, data);
813 else if (TREE_CODE (val) == OBJ_TYPE_REF
814 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
815 ret |= visit_addr (stmt,
816 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
817 0), arg, data);
820 if (visit_load)
822 rhs = get_base_loadstore (rhs);
823 if (rhs)
824 ret |= visit_load (stmt, rhs, arg, data);
827 else if (visit_addr
828 && (is_gimple_assign (stmt)
829 || gimple_code (stmt) == GIMPLE_COND))
831 for (i = 0; i < gimple_num_ops (stmt); ++i)
833 tree op = gimple_op (stmt, i);
834 if (op == NULL_TREE)
836 else if (TREE_CODE (op) == ADDR_EXPR)
837 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
838 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
839 tree with two operands. */
840 else if (i == 1 && COMPARISON_CLASS_P (op))
842 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
843 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
844 0), op, data);
845 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
846 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
847 0), op, data);
851 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
853 if (visit_store)
855 tree arg = gimple_call_lhs (call_stmt);
856 if (arg)
858 tree lhs = get_base_loadstore (arg);
859 if (lhs)
860 ret |= visit_store (stmt, lhs, arg, data);
863 if (visit_load || visit_addr)
864 for (i = 0; i < gimple_call_num_args (call_stmt); ++i)
866 tree arg = gimple_call_arg (call_stmt, i);
867 if (visit_addr
868 && TREE_CODE (arg) == ADDR_EXPR)
869 ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data);
870 else if (visit_load)
872 tree rhs = get_base_loadstore (arg);
873 if (rhs)
874 ret |= visit_load (stmt, rhs, arg, data);
877 if (visit_addr
878 && gimple_call_chain (call_stmt)
879 && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR)
880 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0),
881 gimple_call_chain (call_stmt), data);
882 if (visit_addr
883 && gimple_call_return_slot_opt_p (call_stmt)
884 && gimple_call_lhs (call_stmt) != NULL_TREE
885 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt))))
886 ret |= visit_addr (stmt, gimple_call_lhs (call_stmt),
887 gimple_call_lhs (call_stmt), data);
889 else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
891 unsigned noutputs;
892 const char *constraint;
893 const char **oconstraints;
894 bool allows_mem, allows_reg, is_inout;
895 noutputs = gimple_asm_noutputs (asm_stmt);
896 oconstraints = XALLOCAVEC (const char *, noutputs);
897 if (visit_store || visit_addr)
898 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
900 tree link = gimple_asm_output_op (asm_stmt, i);
901 tree op = get_base_loadstore (TREE_VALUE (link));
902 if (op && visit_store)
903 ret |= visit_store (stmt, op, TREE_VALUE (link), data);
904 if (visit_addr)
906 constraint = TREE_STRING_POINTER
907 (TREE_VALUE (TREE_PURPOSE (link)));
908 oconstraints[i] = constraint;
909 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
910 &allows_reg, &is_inout);
911 if (op && !allows_reg && allows_mem)
912 ret |= visit_addr (stmt, op, TREE_VALUE (link), data);
915 if (visit_load || visit_addr)
916 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
918 tree link = gimple_asm_input_op (asm_stmt, i);
919 tree op = TREE_VALUE (link);
920 if (visit_addr
921 && TREE_CODE (op) == ADDR_EXPR)
922 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
923 else if (visit_load || visit_addr)
925 op = get_base_loadstore (op);
926 if (op)
928 if (visit_load)
929 ret |= visit_load (stmt, op, TREE_VALUE (link), data);
930 if (visit_addr)
932 constraint = TREE_STRING_POINTER
933 (TREE_VALUE (TREE_PURPOSE (link)));
934 parse_input_constraint (&constraint, 0, 0, noutputs,
935 0, oconstraints,
936 &allows_mem, &allows_reg);
937 if (!allows_reg && allows_mem)
938 ret |= visit_addr (stmt, op, TREE_VALUE (link),
939 data);
945 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
947 tree op = gimple_return_retval (return_stmt);
948 if (op)
950 if (visit_addr
951 && TREE_CODE (op) == ADDR_EXPR)
952 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
953 else if (visit_load)
955 tree base = get_base_loadstore (op);
956 if (base)
957 ret |= visit_load (stmt, base, op, data);
961 else if (visit_addr
962 && gimple_code (stmt) == GIMPLE_PHI)
964 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
966 tree op = gimple_phi_arg_def (stmt, i);
967 if (TREE_CODE (op) == ADDR_EXPR)
968 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
971 else if (visit_addr
972 && gimple_code (stmt) == GIMPLE_GOTO)
974 tree op = gimple_goto_dest (stmt);
975 if (TREE_CODE (op) == ADDR_EXPR)
976 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), op, data);
979 return ret;
982 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
983 should make a faster clone for this case. */
985 bool
986 walk_stmt_load_store_ops (gimple *stmt, void *data,
987 walk_stmt_load_store_addr_fn visit_load,
988 walk_stmt_load_store_addr_fn visit_store)
990 return walk_stmt_load_store_addr_ops (stmt, data,
991 visit_load, visit_store, NULL);