Make std::vector<bool> meet C++11 allocator requirements.
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blob8187a10754cd8282cf3f72636132f831a4bb7a19
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tree.h"
40 #include "hash-map.h"
41 #include "hash-table.h"
42 #include "predict.h"
43 #include "vec.h"
44 #include "hashtab.h"
45 #include "hash-set.h"
46 #include "machmode.h"
47 #include "tm.h"
48 #include "hard-reg-set.h"
49 #include "input.h"
50 #include "function.h"
51 #include "dominance.h"
52 #include "cfg.h"
53 #include "basic-block.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-expr.h"
57 #include "is-a.h"
58 #include "gimple.h"
59 #include "gimple-iterator.h"
60 #include "gimplify-me.h"
61 #include "stor-layout.h"
62 #include "expr.h"
63 #include "tree-pass.h"
64 #include "cfgloop.h"
65 #include "gimple-pretty-print.h"
66 #include "gimple-ssa.h"
67 #include "tree-cfg.h"
68 #include "tree-phinodes.h"
69 #include "ssa-iterators.h"
70 #include "stringpool.h"
71 #include "tree-ssanames.h"
72 #include "domwalk.h"
73 #include "expmed.h"
74 #include "params.h"
75 #include "tree-ssa-address.h"
76 #include "tree-affine.h"
77 #include "wide-int-print.h"
78 #include "builtins.h"
80 /* Information about a strength reduction candidate. Each statement
81 in the candidate table represents an expression of one of the
82 following forms (the special case of CAND_REF will be described
83 later):
85 (CAND_MULT) S1: X = (B + i) * S
86 (CAND_ADD) S1: X = B + (i * S)
88 Here X and B are SSA names, i is an integer constant, and S is
89 either an SSA name or a constant. We call B the "base," i the
90 "index", and S the "stride."
92 Any statement S0 that dominates S1 and is of the form:
94 (CAND_MULT) S0: Y = (B + i') * S
95 (CAND_ADD) S0: Y = B + (i' * S)
97 is called a "basis" for S1. In both cases, S1 may be replaced by
99 S1': X = Y + (i - i') * S,
101 where (i - i') * S is folded to the extent possible.
103 All gimple statements are visited in dominator order, and each
104 statement that may contribute to one of the forms of S1 above is
105 given at least one entry in the candidate table. Such statements
106 include addition, pointer addition, subtraction, multiplication,
107 negation, copies, and nontrivial type casts. If a statement may
108 represent more than one expression of the forms of S1 above,
109 multiple "interpretations" are stored in the table and chained
110 together. Examples:
112 * An add of two SSA names may treat either operand as the base.
113 * A multiply of two SSA names, likewise.
114 * A copy or cast may be thought of as either a CAND_MULT with
115 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
117 Candidate records are allocated from an obstack. They are addressed
118 both from a hash table keyed on S1, and from a vector of candidate
119 pointers arranged in predominator order.
121 Opportunity note
122 ----------------
123 Currently we don't recognize:
125 S0: Y = (S * i') - B
126 S1: X = (S * i) - B
128 as a strength reduction opportunity, even though this S1 would
129 also be replaceable by the S1' above. This can be added if it
130 comes up in practice.
132 Strength reduction in addressing
133 --------------------------------
134 There is another kind of candidate known as CAND_REF. A CAND_REF
135 describes a statement containing a memory reference having
136 complex addressing that might benefit from strength reduction.
137 Specifically, we are interested in references for which
138 get_inner_reference returns a base address, offset, and bitpos as
139 follows:
141 base: MEM_REF (T1, C1)
142 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
143 bitpos: C4 * BITS_PER_UNIT
145 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
146 arbitrary integer constants. Note that C2 may be zero, in which
147 case the offset will be MULT_EXPR (T2, C3).
149 When this pattern is recognized, the original memory reference
150 can be replaced with:
152 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
153 C1 + (C2 * C3) + C4)
155 which distributes the multiply to allow constant folding. When
156 two or more addressing expressions can be represented by MEM_REFs
157 of this form, differing only in the constants C1, C2, and C4,
158 making this substitution produces more efficient addressing during
159 the RTL phases. When there are not at least two expressions with
160 the same values of T1, T2, and C3, there is nothing to be gained
161 by the replacement.
163 Strength reduction of CAND_REFs uses the same infrastructure as
164 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
165 field, MULT_EXPR (T2, C3) in the stride (S) field, and
166 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
167 is thus another CAND_REF with the same B and S values. When at
168 least two CAND_REFs are chained together using the basis relation,
169 each of them is replaced as above, resulting in improved code
170 generation for addressing.
172 Conditional candidates
173 ======================
175 Conditional candidates are best illustrated with an example.
176 Consider the code sequence:
178 (1) x_0 = ...;
179 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
180 if (...)
181 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
182 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
183 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
184 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
186 Here strength reduction is complicated by the uncertain value of x_2.
187 A legitimate transformation is:
189 (1) x_0 = ...;
190 (2) a_0 = x_0 * 5;
191 if (...)
193 (3) [x_1 = x_0 + 1;]
194 (3a) t_1 = a_0 + 5;
196 (4) [x_2 = PHI <x_0, x_1>;]
197 (4a) t_2 = PHI <a_0, t_1>;
198 (5) [x_3 = x_2 + 1;]
199 (6r) a_1 = t_2 + 5;
201 where the bracketed instructions may go dead.
203 To recognize this opportunity, we have to observe that statement (6)
204 has a "hidden basis" (2). The hidden basis is unlike a normal basis
205 in that the statement and the hidden basis have different base SSA
206 names (x_2 and x_0, respectively). The relationship is established
207 when a statement's base name (x_2) is defined by a phi statement (4),
208 each argument of which (x_0, x_1) has an identical "derived base name."
209 If the argument is defined by a candidate (as x_1 is by (3)) that is a
210 CAND_ADD having a stride of 1, the derived base name of the argument is
211 the base name of the candidate (x_0). Otherwise, the argument itself
212 is its derived base name (as is the case with argument x_0).
214 The hidden basis for statement (6) is the nearest dominating candidate
215 whose base name is the derived base name (x_0) of the feeding phi (4),
216 and whose stride is identical to that of the statement. We can then
217 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
218 allowing the final replacement of (6) by the strength-reduced (6r).
220 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
221 A CAND_PHI is not a candidate for replacement, but is maintained in the
222 candidate table to ease discovery of hidden bases. Any phi statement
223 whose arguments share a common derived base name is entered into the
224 table with the derived base name, an (arbitrary) index of zero, and a
225 stride of 1. A statement with a hidden basis can then be detected by
226 simply looking up its feeding phi definition in the candidate table,
227 extracting the derived base name, and searching for a basis in the
228 usual manner after substituting the derived base name.
230 Note that the transformation is only valid when the original phi and
231 the statements that define the phi's arguments are all at the same
232 position in the loop hierarchy. */
235 /* Index into the candidate vector, offset by 1. VECs are zero-based,
236 while cand_idx's are one-based, with zero indicating null. */
237 typedef unsigned cand_idx;
239 /* The kind of candidate. */
240 enum cand_kind
242 CAND_MULT,
243 CAND_ADD,
244 CAND_REF,
245 CAND_PHI
248 struct slsr_cand_d
250 /* The candidate statement S1. */
251 gimple cand_stmt;
253 /* The base expression B: often an SSA name, but not always. */
254 tree base_expr;
256 /* The stride S. */
257 tree stride;
259 /* The index constant i. */
260 widest_int index;
262 /* The type of the candidate. This is normally the type of base_expr,
263 but casts may have occurred when combining feeding instructions.
264 A candidate can only be a basis for candidates of the same final type.
265 (For CAND_REFs, this is the type to be used for operand 1 of the
266 replacement MEM_REF.) */
267 tree cand_type;
269 /* The kind of candidate (CAND_MULT, etc.). */
270 enum cand_kind kind;
272 /* Index of this candidate in the candidate vector. */
273 cand_idx cand_num;
275 /* Index of the next candidate record for the same statement.
276 A statement may be useful in more than one way (e.g., due to
277 commutativity). So we can have multiple "interpretations"
278 of a statement. */
279 cand_idx next_interp;
281 /* Index of the basis statement S0, if any, in the candidate vector. */
282 cand_idx basis;
284 /* First candidate for which this candidate is a basis, if one exists. */
285 cand_idx dependent;
287 /* Next candidate having the same basis as this one. */
288 cand_idx sibling;
290 /* If this is a conditional candidate, the CAND_PHI candidate
291 that defines the base SSA name B. */
292 cand_idx def_phi;
294 /* Savings that can be expected from eliminating dead code if this
295 candidate is replaced. */
296 int dead_savings;
299 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
300 typedef const struct slsr_cand_d *const_slsr_cand_t;
302 /* Pointers to candidates are chained together as part of a mapping
303 from base expressions to the candidates that use them. */
305 struct cand_chain_d
307 /* Base expression for the chain of candidates: often, but not
308 always, an SSA name. */
309 tree base_expr;
311 /* Pointer to a candidate. */
312 slsr_cand_t cand;
314 /* Chain pointer. */
315 struct cand_chain_d *next;
319 typedef struct cand_chain_d cand_chain, *cand_chain_t;
320 typedef const struct cand_chain_d *const_cand_chain_t;
322 /* Information about a unique "increment" associated with candidates
323 having an SSA name for a stride. An increment is the difference
324 between the index of the candidate and the index of its basis,
325 i.e., (i - i') as discussed in the module commentary.
327 When we are not going to generate address arithmetic we treat
328 increments that differ only in sign as the same, allowing sharing
329 of the cost of initializers. The absolute value of the increment
330 is stored in the incr_info. */
332 struct incr_info_d
334 /* The increment that relates a candidate to its basis. */
335 widest_int incr;
337 /* How many times the increment occurs in the candidate tree. */
338 unsigned count;
340 /* Cost of replacing candidates using this increment. Negative and
341 zero costs indicate replacement should be performed. */
342 int cost;
344 /* If this increment is profitable but is not -1, 0, or 1, it requires
345 an initializer T_0 = stride * incr to be found or introduced in the
346 nearest common dominator of all candidates. This field holds T_0
347 for subsequent use. */
348 tree initializer;
350 /* If the initializer was found to already exist, this is the block
351 where it was found. */
352 basic_block init_bb;
355 typedef struct incr_info_d incr_info, *incr_info_t;
357 /* Candidates are maintained in a vector. If candidate X dominates
358 candidate Y, then X appears before Y in the vector; but the
359 converse does not necessarily hold. */
360 static vec<slsr_cand_t> cand_vec;
362 enum cost_consts
364 COST_NEUTRAL = 0,
365 COST_INFINITE = 1000
368 enum stride_status
370 UNKNOWN_STRIDE = 0,
371 KNOWN_STRIDE = 1
374 enum phi_adjust_status
376 NOT_PHI_ADJUST = 0,
377 PHI_ADJUST = 1
380 enum count_phis_status
382 DONT_COUNT_PHIS = 0,
383 COUNT_PHIS = 1
386 /* Pointer map embodying a mapping from statements to candidates. */
387 static hash_map<gimple, slsr_cand_t> *stmt_cand_map;
389 /* Obstack for candidates. */
390 static struct obstack cand_obstack;
392 /* Obstack for candidate chains. */
393 static struct obstack chain_obstack;
395 /* An array INCR_VEC of incr_infos is used during analysis of related
396 candidates having an SSA name for a stride. INCR_VEC_LEN describes
397 its current length. MAX_INCR_VEC_LEN is used to avoid costly
398 pathological cases. */
399 static incr_info_t incr_vec;
400 static unsigned incr_vec_len;
401 const int MAX_INCR_VEC_LEN = 16;
403 /* For a chain of candidates with unknown stride, indicates whether or not
404 we must generate pointer arithmetic when replacing statements. */
405 static bool address_arithmetic_p;
407 /* Forward function declarations. */
408 static slsr_cand_t base_cand_from_table (tree);
409 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
410 static bool legal_cast_p_1 (tree, tree);
412 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
414 static slsr_cand_t
415 lookup_cand (cand_idx idx)
417 return cand_vec[idx - 1];
420 /* Helper for hashing a candidate chain header. */
422 struct cand_chain_hasher : typed_noop_remove <cand_chain>
424 typedef cand_chain value_type;
425 typedef cand_chain compare_type;
426 static inline hashval_t hash (const value_type *);
427 static inline bool equal (const value_type *, const compare_type *);
430 inline hashval_t
431 cand_chain_hasher::hash (const value_type *p)
433 tree base_expr = p->base_expr;
434 return iterative_hash_expr (base_expr, 0);
437 inline bool
438 cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
440 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
443 /* Hash table embodying a mapping from base exprs to chains of candidates. */
444 static hash_table<cand_chain_hasher> *base_cand_map;
446 /* Pointer map used by tree_to_aff_combination_expand. */
447 static hash_map<tree, name_expansion *> *name_expansions;
448 /* Pointer map embodying a mapping from bases to alternative bases. */
449 static hash_map<tree, tree> *alt_base_map;
451 /* Given BASE, use the tree affine combiniation facilities to
452 find the underlying tree expression for BASE, with any
453 immediate offset excluded.
455 N.B. we should eliminate this backtracking with better forward
456 analysis in a future release. */
458 static tree
459 get_alternative_base (tree base)
461 tree *result = alt_base_map->get (base);
463 if (result == NULL)
465 tree expr;
466 aff_tree aff;
468 tree_to_aff_combination_expand (base, TREE_TYPE (base),
469 &aff, &name_expansions);
470 aff.offset = 0;
471 expr = aff_combination_to_tree (&aff);
473 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
475 return expr == base ? NULL : expr;
478 return *result;
481 /* Look in the candidate table for a CAND_PHI that defines BASE and
482 return it if found; otherwise return NULL. */
484 static cand_idx
485 find_phi_def (tree base)
487 slsr_cand_t c;
489 if (TREE_CODE (base) != SSA_NAME)
490 return 0;
492 c = base_cand_from_table (base);
494 if (!c || c->kind != CAND_PHI)
495 return 0;
497 return c->cand_num;
500 /* Helper routine for find_basis_for_candidate. May be called twice:
501 once for the candidate's base expr, and optionally again either for
502 the candidate's phi definition or for a CAND_REF's alternative base
503 expression. */
505 static slsr_cand_t
506 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
508 cand_chain mapping_key;
509 cand_chain_t chain;
510 slsr_cand_t basis = NULL;
512 // Limit potential of N^2 behavior for long candidate chains.
513 int iters = 0;
514 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
516 mapping_key.base_expr = base_expr;
517 chain = base_cand_map->find (&mapping_key);
519 for (; chain && iters < max_iters; chain = chain->next, ++iters)
521 slsr_cand_t one_basis = chain->cand;
523 if (one_basis->kind != c->kind
524 || one_basis->cand_stmt == c->cand_stmt
525 || !operand_equal_p (one_basis->stride, c->stride, 0)
526 || !types_compatible_p (one_basis->cand_type, c->cand_type)
527 || !dominated_by_p (CDI_DOMINATORS,
528 gimple_bb (c->cand_stmt),
529 gimple_bb (one_basis->cand_stmt)))
530 continue;
532 if (!basis || basis->cand_num < one_basis->cand_num)
533 basis = one_basis;
536 return basis;
539 /* Use the base expr from candidate C to look for possible candidates
540 that can serve as a basis for C. Each potential basis must also
541 appear in a block that dominates the candidate statement and have
542 the same stride and type. If more than one possible basis exists,
543 the one with highest index in the vector is chosen; this will be
544 the most immediately dominating basis. */
546 static int
547 find_basis_for_candidate (slsr_cand_t c)
549 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
551 /* If a candidate doesn't have a basis using its base expression,
552 it may have a basis hidden by one or more intervening phis. */
553 if (!basis && c->def_phi)
555 basic_block basis_bb, phi_bb;
556 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
557 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
559 if (basis)
561 /* A hidden basis must dominate the phi-definition of the
562 candidate's base name. */
563 phi_bb = gimple_bb (phi_cand->cand_stmt);
564 basis_bb = gimple_bb (basis->cand_stmt);
566 if (phi_bb == basis_bb
567 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
569 basis = NULL;
570 c->basis = 0;
573 /* If we found a hidden basis, estimate additional dead-code
574 savings if the phi and its feeding statements can be removed. */
575 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
576 c->dead_savings += phi_cand->dead_savings;
580 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
582 tree alt_base_expr = get_alternative_base (c->base_expr);
583 if (alt_base_expr)
584 basis = find_basis_for_base_expr (c, alt_base_expr);
587 if (basis)
589 c->sibling = basis->dependent;
590 basis->dependent = c->cand_num;
591 return basis->cand_num;
594 return 0;
597 /* Record a mapping from BASE to C, indicating that C may potentially serve
598 as a basis using that base expression. BASE may be the same as
599 C->BASE_EXPR; alternatively BASE can be a different tree that share the
600 underlining expression of C->BASE_EXPR. */
602 static void
603 record_potential_basis (slsr_cand_t c, tree base)
605 cand_chain_t node;
606 cand_chain **slot;
608 gcc_assert (base);
610 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
611 node->base_expr = base;
612 node->cand = c;
613 node->next = NULL;
614 slot = base_cand_map->find_slot (node, INSERT);
616 if (*slot)
618 cand_chain_t head = (cand_chain_t) (*slot);
619 node->next = head->next;
620 head->next = node;
622 else
623 *slot = node;
626 /* Allocate storage for a new candidate and initialize its fields.
627 Attempt to find a basis for the candidate.
629 For CAND_REF, an alternative base may also be recorded and used
630 to find a basis. This helps cases where the expression hidden
631 behind BASE (which is usually an SSA_NAME) has immediate offset,
632 e.g.
634 a2[i][j] = 1;
635 a2[i + 20][j] = 2; */
637 static slsr_cand_t
638 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
639 const widest_int &index, tree stride, tree ctype,
640 unsigned savings)
642 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
643 sizeof (slsr_cand));
644 c->cand_stmt = gs;
645 c->base_expr = base;
646 c->stride = stride;
647 c->index = index;
648 c->cand_type = ctype;
649 c->kind = kind;
650 c->cand_num = cand_vec.length () + 1;
651 c->next_interp = 0;
652 c->dependent = 0;
653 c->sibling = 0;
654 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
655 c->dead_savings = savings;
657 cand_vec.safe_push (c);
659 if (kind == CAND_PHI)
660 c->basis = 0;
661 else
662 c->basis = find_basis_for_candidate (c);
664 record_potential_basis (c, base);
665 if (flag_expensive_optimizations && kind == CAND_REF)
667 tree alt_base = get_alternative_base (base);
668 if (alt_base)
669 record_potential_basis (c, alt_base);
672 return c;
675 /* Determine the target cost of statement GS when compiling according
676 to SPEED. */
678 static int
679 stmt_cost (gimple gs, bool speed)
681 tree lhs, rhs1, rhs2;
682 machine_mode lhs_mode;
684 gcc_assert (is_gimple_assign (gs));
685 lhs = gimple_assign_lhs (gs);
686 rhs1 = gimple_assign_rhs1 (gs);
687 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
689 switch (gimple_assign_rhs_code (gs))
691 case MULT_EXPR:
692 rhs2 = gimple_assign_rhs2 (gs);
694 if (tree_fits_shwi_p (rhs2))
695 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
697 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
698 return mul_cost (speed, lhs_mode);
700 case PLUS_EXPR:
701 case POINTER_PLUS_EXPR:
702 case MINUS_EXPR:
703 return add_cost (speed, lhs_mode);
705 case NEGATE_EXPR:
706 return neg_cost (speed, lhs_mode);
708 CASE_CONVERT:
709 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
711 /* Note that we don't assign costs to copies that in most cases
712 will go away. */
713 default:
717 gcc_unreachable ();
718 return 0;
721 /* Look up the defining statement for BASE_IN and return a pointer
722 to its candidate in the candidate table, if any; otherwise NULL.
723 Only CAND_ADD and CAND_MULT candidates are returned. */
725 static slsr_cand_t
726 base_cand_from_table (tree base_in)
728 slsr_cand_t *result;
730 gimple def = SSA_NAME_DEF_STMT (base_in);
731 if (!def)
732 return (slsr_cand_t) NULL;
734 result = stmt_cand_map->get (def);
736 if (result && (*result)->kind != CAND_REF)
737 return *result;
739 return (slsr_cand_t) NULL;
742 /* Add an entry to the statement-to-candidate mapping. */
744 static void
745 add_cand_for_stmt (gimple gs, slsr_cand_t c)
747 gcc_assert (!stmt_cand_map->put (gs, c));
750 /* Given PHI which contains a phi statement, determine whether it
751 satisfies all the requirements of a phi candidate. If so, create
752 a candidate. Note that a CAND_PHI never has a basis itself, but
753 is used to help find a basis for subsequent candidates. */
755 static void
756 slsr_process_phi (gimple phi, bool speed)
758 unsigned i;
759 tree arg0_base = NULL_TREE, base_type;
760 slsr_cand_t c;
761 struct loop *cand_loop = gimple_bb (phi)->loop_father;
762 unsigned savings = 0;
764 /* A CAND_PHI requires each of its arguments to have the same
765 derived base name. (See the module header commentary for a
766 definition of derived base names.) Furthermore, all feeding
767 definitions must be in the same position in the loop hierarchy
768 as PHI. */
770 for (i = 0; i < gimple_phi_num_args (phi); i++)
772 slsr_cand_t arg_cand;
773 tree arg = gimple_phi_arg_def (phi, i);
774 tree derived_base_name = NULL_TREE;
775 gimple arg_stmt = NULL;
776 basic_block arg_bb = NULL;
778 if (TREE_CODE (arg) != SSA_NAME)
779 return;
781 arg_cand = base_cand_from_table (arg);
783 if (arg_cand)
785 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
787 if (!arg_cand->next_interp)
788 return;
790 arg_cand = lookup_cand (arg_cand->next_interp);
793 if (!integer_onep (arg_cand->stride))
794 return;
796 derived_base_name = arg_cand->base_expr;
797 arg_stmt = arg_cand->cand_stmt;
798 arg_bb = gimple_bb (arg_stmt);
800 /* Gather potential dead code savings if the phi statement
801 can be removed later on. */
802 if (has_single_use (arg))
804 if (gimple_code (arg_stmt) == GIMPLE_PHI)
805 savings += arg_cand->dead_savings;
806 else
807 savings += stmt_cost (arg_stmt, speed);
810 else
812 derived_base_name = arg;
814 if (SSA_NAME_IS_DEFAULT_DEF (arg))
815 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
816 else
817 gimple_bb (SSA_NAME_DEF_STMT (arg));
820 if (!arg_bb || arg_bb->loop_father != cand_loop)
821 return;
823 if (i == 0)
824 arg0_base = derived_base_name;
825 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
826 return;
829 /* Create the candidate. "alloc_cand_and_find_basis" is named
830 misleadingly for this case, as no basis will be sought for a
831 CAND_PHI. */
832 base_type = TREE_TYPE (arg0_base);
834 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
835 0, integer_one_node, base_type, savings);
837 /* Add the candidate to the statement-candidate mapping. */
838 add_cand_for_stmt (phi, c);
841 /* Given PBASE which is a pointer to tree, look up the defining
842 statement for it and check whether the candidate is in the
843 form of:
845 X = B + (1 * S), S is integer constant
846 X = B + (i * S), S is integer one
848 If so, set PBASE to the candidate's base_expr and return double
849 int (i * S).
850 Otherwise, just return double int zero. */
852 static widest_int
853 backtrace_base_for_ref (tree *pbase)
855 tree base_in = *pbase;
856 slsr_cand_t base_cand;
858 STRIP_NOPS (base_in);
860 /* Strip off widening conversion(s) to handle cases where
861 e.g. 'B' is widened from an 'int' in order to calculate
862 a 64-bit address. */
863 if (CONVERT_EXPR_P (base_in)
864 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
865 base_in = get_unwidened (base_in, NULL_TREE);
867 if (TREE_CODE (base_in) != SSA_NAME)
868 return 0;
870 base_cand = base_cand_from_table (base_in);
872 while (base_cand && base_cand->kind != CAND_PHI)
874 if (base_cand->kind == CAND_ADD
875 && base_cand->index == 1
876 && TREE_CODE (base_cand->stride) == INTEGER_CST)
878 /* X = B + (1 * S), S is integer constant. */
879 *pbase = base_cand->base_expr;
880 return wi::to_widest (base_cand->stride);
882 else if (base_cand->kind == CAND_ADD
883 && TREE_CODE (base_cand->stride) == INTEGER_CST
884 && integer_onep (base_cand->stride))
886 /* X = B + (i * S), S is integer one. */
887 *pbase = base_cand->base_expr;
888 return base_cand->index;
891 if (base_cand->next_interp)
892 base_cand = lookup_cand (base_cand->next_interp);
893 else
894 base_cand = NULL;
897 return 0;
900 /* Look for the following pattern:
902 *PBASE: MEM_REF (T1, C1)
904 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
906 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
908 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
910 *PINDEX: C4 * BITS_PER_UNIT
912 If not present, leave the input values unchanged and return FALSE.
913 Otherwise, modify the input values as follows and return TRUE:
915 *PBASE: T1
916 *POFFSET: MULT_EXPR (T2, C3)
917 *PINDEX: C1 + (C2 * C3) + C4
919 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
920 will be further restructured to:
922 *PBASE: T1
923 *POFFSET: MULT_EXPR (T2', C3)
924 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
926 static bool
927 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
928 tree *ptype)
930 tree base = *pbase, offset = *poffset;
931 widest_int index = *pindex;
932 tree mult_op0, t1, t2, type;
933 widest_int c1, c2, c3, c4, c5;
935 if (!base
936 || !offset
937 || TREE_CODE (base) != MEM_REF
938 || TREE_CODE (offset) != MULT_EXPR
939 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
940 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
941 return false;
943 t1 = TREE_OPERAND (base, 0);
944 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
945 type = TREE_TYPE (TREE_OPERAND (base, 1));
947 mult_op0 = TREE_OPERAND (offset, 0);
948 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
950 if (TREE_CODE (mult_op0) == PLUS_EXPR)
952 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
954 t2 = TREE_OPERAND (mult_op0, 0);
955 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
957 else
958 return false;
960 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
962 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
964 t2 = TREE_OPERAND (mult_op0, 0);
965 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
967 else
968 return false;
970 else
972 t2 = mult_op0;
973 c2 = 0;
976 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
977 c5 = backtrace_base_for_ref (&t2);
979 *pbase = t1;
980 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
981 wide_int_to_tree (sizetype, c3));
982 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
983 *ptype = type;
985 return true;
988 /* Given GS which contains a data reference, create a CAND_REF entry in
989 the candidate table and attempt to find a basis. */
991 static void
992 slsr_process_ref (gimple gs)
994 tree ref_expr, base, offset, type;
995 HOST_WIDE_INT bitsize, bitpos;
996 machine_mode mode;
997 int unsignedp, volatilep;
998 slsr_cand_t c;
1000 if (gimple_vdef (gs))
1001 ref_expr = gimple_assign_lhs (gs);
1002 else
1003 ref_expr = gimple_assign_rhs1 (gs);
1005 if (!handled_component_p (ref_expr)
1006 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1007 || (TREE_CODE (ref_expr) == COMPONENT_REF
1008 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1009 return;
1011 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1012 &unsignedp, &volatilep, false);
1013 widest_int index = bitpos;
1015 if (!restructure_reference (&base, &offset, &index, &type))
1016 return;
1018 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1019 type, 0);
1021 /* Add the candidate to the statement-candidate mapping. */
1022 add_cand_for_stmt (gs, c);
1025 /* Create a candidate entry for a statement GS, where GS multiplies
1026 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1027 about the two SSA names into the new candidate. Return the new
1028 candidate. */
1030 static slsr_cand_t
1031 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1033 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1034 widest_int index;
1035 unsigned savings = 0;
1036 slsr_cand_t c;
1037 slsr_cand_t base_cand = base_cand_from_table (base_in);
1039 /* Look at all interpretations of the base candidate, if necessary,
1040 to find information to propagate into this candidate. */
1041 while (base_cand && !base && base_cand->kind != CAND_PHI)
1044 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1046 /* Y = (B + i') * 1
1047 X = Y * Z
1048 ================
1049 X = (B + i') * Z */
1050 base = base_cand->base_expr;
1051 index = base_cand->index;
1052 stride = stride_in;
1053 ctype = base_cand->cand_type;
1054 if (has_single_use (base_in))
1055 savings = (base_cand->dead_savings
1056 + stmt_cost (base_cand->cand_stmt, speed));
1058 else if (base_cand->kind == CAND_ADD
1059 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1061 /* Y = B + (i' * S), S constant
1062 X = Y * Z
1063 ============================
1064 X = B + ((i' * S) * Z) */
1065 base = base_cand->base_expr;
1066 index = base_cand->index * wi::to_widest (base_cand->stride);
1067 stride = stride_in;
1068 ctype = base_cand->cand_type;
1069 if (has_single_use (base_in))
1070 savings = (base_cand->dead_savings
1071 + stmt_cost (base_cand->cand_stmt, speed));
1074 if (base_cand->next_interp)
1075 base_cand = lookup_cand (base_cand->next_interp);
1076 else
1077 base_cand = NULL;
1080 if (!base)
1082 /* No interpretations had anything useful to propagate, so
1083 produce X = (Y + 0) * Z. */
1084 base = base_in;
1085 index = 0;
1086 stride = stride_in;
1087 ctype = TREE_TYPE (base_in);
1090 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1091 ctype, savings);
1092 return c;
1095 /* Create a candidate entry for a statement GS, where GS multiplies
1096 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1097 information about BASE_IN into the new candidate. Return the new
1098 candidate. */
1100 static slsr_cand_t
1101 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1103 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1104 widest_int index, temp;
1105 unsigned savings = 0;
1106 slsr_cand_t c;
1107 slsr_cand_t base_cand = base_cand_from_table (base_in);
1109 /* Look at all interpretations of the base candidate, if necessary,
1110 to find information to propagate into this candidate. */
1111 while (base_cand && !base && base_cand->kind != CAND_PHI)
1113 if (base_cand->kind == CAND_MULT
1114 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1116 /* Y = (B + i') * S, S constant
1117 X = Y * c
1118 ============================
1119 X = (B + i') * (S * c) */
1120 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1121 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1123 base = base_cand->base_expr;
1124 index = base_cand->index;
1125 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1126 ctype = base_cand->cand_type;
1127 if (has_single_use (base_in))
1128 savings = (base_cand->dead_savings
1129 + stmt_cost (base_cand->cand_stmt, speed));
1132 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1134 /* Y = B + (i' * 1)
1135 X = Y * c
1136 ===========================
1137 X = (B + i') * c */
1138 base = base_cand->base_expr;
1139 index = base_cand->index;
1140 stride = stride_in;
1141 ctype = base_cand->cand_type;
1142 if (has_single_use (base_in))
1143 savings = (base_cand->dead_savings
1144 + stmt_cost (base_cand->cand_stmt, speed));
1146 else if (base_cand->kind == CAND_ADD
1147 && base_cand->index == 1
1148 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1150 /* Y = B + (1 * S), S constant
1151 X = Y * c
1152 ===========================
1153 X = (B + S) * c */
1154 base = base_cand->base_expr;
1155 index = wi::to_widest (base_cand->stride);
1156 stride = stride_in;
1157 ctype = base_cand->cand_type;
1158 if (has_single_use (base_in))
1159 savings = (base_cand->dead_savings
1160 + stmt_cost (base_cand->cand_stmt, speed));
1163 if (base_cand->next_interp)
1164 base_cand = lookup_cand (base_cand->next_interp);
1165 else
1166 base_cand = NULL;
1169 if (!base)
1171 /* No interpretations had anything useful to propagate, so
1172 produce X = (Y + 0) * c. */
1173 base = base_in;
1174 index = 0;
1175 stride = stride_in;
1176 ctype = TREE_TYPE (base_in);
1179 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1180 ctype, savings);
1181 return c;
1184 /* Given GS which is a multiply of scalar integers, make an appropriate
1185 entry in the candidate table. If this is a multiply of two SSA names,
1186 create two CAND_MULT interpretations and attempt to find a basis for
1187 each of them. Otherwise, create a single CAND_MULT and attempt to
1188 find a basis. */
1190 static void
1191 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1193 slsr_cand_t c, c2;
1195 /* If this is a multiply of an SSA name with itself, it is highly
1196 unlikely that we will get a strength reduction opportunity, so
1197 don't record it as a candidate. This simplifies the logic for
1198 finding a basis, so if this is removed that must be considered. */
1199 if (rhs1 == rhs2)
1200 return;
1202 if (TREE_CODE (rhs2) == SSA_NAME)
1204 /* Record an interpretation of this statement in the candidate table
1205 assuming RHS1 is the base expression and RHS2 is the stride. */
1206 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1208 /* Add the first interpretation to the statement-candidate mapping. */
1209 add_cand_for_stmt (gs, c);
1211 /* Record another interpretation of this statement assuming RHS1
1212 is the stride and RHS2 is the base expression. */
1213 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1214 c->next_interp = c2->cand_num;
1216 else
1218 /* Record an interpretation for the multiply-immediate. */
1219 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1221 /* Add the interpretation to the statement-candidate mapping. */
1222 add_cand_for_stmt (gs, c);
1226 /* Create a candidate entry for a statement GS, where GS adds two
1227 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1228 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1229 information about the two SSA names into the new candidate.
1230 Return the new candidate. */
1232 static slsr_cand_t
1233 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1234 bool subtract_p, bool speed)
1236 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1237 widest_int index;
1238 unsigned savings = 0;
1239 slsr_cand_t c;
1240 slsr_cand_t base_cand = base_cand_from_table (base_in);
1241 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1243 /* The most useful transformation is a multiply-immediate feeding
1244 an add or subtract. Look for that first. */
1245 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1247 if (addend_cand->kind == CAND_MULT
1248 && addend_cand->index == 0
1249 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1251 /* Z = (B + 0) * S, S constant
1252 X = Y +/- Z
1253 ===========================
1254 X = Y + ((+/-1 * S) * B) */
1255 base = base_in;
1256 index = wi::to_widest (addend_cand->stride);
1257 if (subtract_p)
1258 index = -index;
1259 stride = addend_cand->base_expr;
1260 ctype = TREE_TYPE (base_in);
1261 if (has_single_use (addend_in))
1262 savings = (addend_cand->dead_savings
1263 + stmt_cost (addend_cand->cand_stmt, speed));
1266 if (addend_cand->next_interp)
1267 addend_cand = lookup_cand (addend_cand->next_interp);
1268 else
1269 addend_cand = NULL;
1272 while (base_cand && !base && base_cand->kind != CAND_PHI)
1274 if (base_cand->kind == CAND_ADD
1275 && (base_cand->index == 0
1276 || operand_equal_p (base_cand->stride,
1277 integer_zero_node, 0)))
1279 /* Y = B + (i' * S), i' * S = 0
1280 X = Y +/- Z
1281 ============================
1282 X = B + (+/-1 * Z) */
1283 base = base_cand->base_expr;
1284 index = subtract_p ? -1 : 1;
1285 stride = addend_in;
1286 ctype = base_cand->cand_type;
1287 if (has_single_use (base_in))
1288 savings = (base_cand->dead_savings
1289 + stmt_cost (base_cand->cand_stmt, speed));
1291 else if (subtract_p)
1293 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1295 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1297 if (subtrahend_cand->kind == CAND_MULT
1298 && subtrahend_cand->index == 0
1299 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1301 /* Z = (B + 0) * S, S constant
1302 X = Y - Z
1303 ===========================
1304 Value: X = Y + ((-1 * S) * B) */
1305 base = base_in;
1306 index = wi::to_widest (subtrahend_cand->stride);
1307 index = -index;
1308 stride = subtrahend_cand->base_expr;
1309 ctype = TREE_TYPE (base_in);
1310 if (has_single_use (addend_in))
1311 savings = (subtrahend_cand->dead_savings
1312 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1315 if (subtrahend_cand->next_interp)
1316 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1317 else
1318 subtrahend_cand = NULL;
1322 if (base_cand->next_interp)
1323 base_cand = lookup_cand (base_cand->next_interp);
1324 else
1325 base_cand = NULL;
1328 if (!base)
1330 /* No interpretations had anything useful to propagate, so
1331 produce X = Y + (1 * Z). */
1332 base = base_in;
1333 index = subtract_p ? -1 : 1;
1334 stride = addend_in;
1335 ctype = TREE_TYPE (base_in);
1338 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1339 ctype, savings);
1340 return c;
1343 /* Create a candidate entry for a statement GS, where GS adds SSA
1344 name BASE_IN to constant INDEX_IN. Propagate any known information
1345 about BASE_IN into the new candidate. Return the new candidate. */
1347 static slsr_cand_t
1348 create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
1349 bool speed)
1351 enum cand_kind kind = CAND_ADD;
1352 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1353 widest_int index, multiple;
1354 unsigned savings = 0;
1355 slsr_cand_t c;
1356 slsr_cand_t base_cand = base_cand_from_table (base_in);
1358 while (base_cand && !base && base_cand->kind != CAND_PHI)
1360 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1362 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1363 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1364 sign, &multiple))
1366 /* Y = (B + i') * S, S constant, c = kS for some integer k
1367 X = Y + c
1368 ============================
1369 X = (B + (i'+ k)) * S
1371 Y = B + (i' * S), S constant, c = kS for some integer k
1372 X = Y + c
1373 ============================
1374 X = (B + (i'+ k)) * S */
1375 kind = base_cand->kind;
1376 base = base_cand->base_expr;
1377 index = base_cand->index + multiple;
1378 stride = base_cand->stride;
1379 ctype = base_cand->cand_type;
1380 if (has_single_use (base_in))
1381 savings = (base_cand->dead_savings
1382 + stmt_cost (base_cand->cand_stmt, speed));
1385 if (base_cand->next_interp)
1386 base_cand = lookup_cand (base_cand->next_interp);
1387 else
1388 base_cand = NULL;
1391 if (!base)
1393 /* No interpretations had anything useful to propagate, so
1394 produce X = Y + (c * 1). */
1395 kind = CAND_ADD;
1396 base = base_in;
1397 index = index_in;
1398 stride = integer_one_node;
1399 ctype = TREE_TYPE (base_in);
1402 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1403 ctype, savings);
1404 return c;
1407 /* Given GS which is an add or subtract of scalar integers or pointers,
1408 make at least one appropriate entry in the candidate table. */
1410 static void
1411 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1413 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1414 slsr_cand_t c = NULL, c2;
1416 if (TREE_CODE (rhs2) == SSA_NAME)
1418 /* First record an interpretation assuming RHS1 is the base expression
1419 and RHS2 is the stride. But it doesn't make sense for the
1420 stride to be a pointer, so don't record a candidate in that case. */
1421 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1423 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1425 /* Add the first interpretation to the statement-candidate
1426 mapping. */
1427 add_cand_for_stmt (gs, c);
1430 /* If the two RHS operands are identical, or this is a subtract,
1431 we're done. */
1432 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1433 return;
1435 /* Otherwise, record another interpretation assuming RHS2 is the
1436 base expression and RHS1 is the stride, again provided that the
1437 stride is not a pointer. */
1438 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1440 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1441 if (c)
1442 c->next_interp = c2->cand_num;
1443 else
1444 add_cand_for_stmt (gs, c2);
1447 else
1449 /* Record an interpretation for the add-immediate. */
1450 widest_int index = wi::to_widest (rhs2);
1451 if (subtract_p)
1452 index = -index;
1454 c = create_add_imm_cand (gs, rhs1, index, speed);
1456 /* Add the interpretation to the statement-candidate mapping. */
1457 add_cand_for_stmt (gs, c);
1461 /* Given GS which is a negate of a scalar integer, make an appropriate
1462 entry in the candidate table. A negate is equivalent to a multiply
1463 by -1. */
1465 static void
1466 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1468 /* Record a CAND_MULT interpretation for the multiply by -1. */
1469 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1471 /* Add the interpretation to the statement-candidate mapping. */
1472 add_cand_for_stmt (gs, c);
1475 /* Help function for legal_cast_p, operating on two trees. Checks
1476 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1477 for more details. */
1479 static bool
1480 legal_cast_p_1 (tree lhs, tree rhs)
1482 tree lhs_type, rhs_type;
1483 unsigned lhs_size, rhs_size;
1484 bool lhs_wraps, rhs_wraps;
1486 lhs_type = TREE_TYPE (lhs);
1487 rhs_type = TREE_TYPE (rhs);
1488 lhs_size = TYPE_PRECISION (lhs_type);
1489 rhs_size = TYPE_PRECISION (rhs_type);
1490 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1491 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1493 if (lhs_size < rhs_size
1494 || (rhs_wraps && !lhs_wraps)
1495 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1496 return false;
1498 return true;
1501 /* Return TRUE if GS is a statement that defines an SSA name from
1502 a conversion and is legal for us to combine with an add and multiply
1503 in the candidate table. For example, suppose we have:
1505 A = B + i;
1506 C = (type) A;
1507 D = C * S;
1509 Without the type-cast, we would create a CAND_MULT for D with base B,
1510 index i, and stride S. We want to record this candidate only if it
1511 is equivalent to apply the type cast following the multiply:
1513 A = B + i;
1514 E = A * S;
1515 D = (type) E;
1517 We will record the type with the candidate for D. This allows us
1518 to use a similar previous candidate as a basis. If we have earlier seen
1520 A' = B + i';
1521 C' = (type) A';
1522 D' = C' * S;
1524 we can replace D with
1526 D = D' + (i - i') * S;
1528 But if moving the type-cast would change semantics, we mustn't do this.
1530 This is legitimate for casts from a non-wrapping integral type to
1531 any integral type of the same or larger size. It is not legitimate
1532 to convert a wrapping type to a non-wrapping type, or to a wrapping
1533 type of a different size. I.e., with a wrapping type, we must
1534 assume that the addition B + i could wrap, in which case performing
1535 the multiply before or after one of the "illegal" type casts will
1536 have different semantics. */
1538 static bool
1539 legal_cast_p (gimple gs, tree rhs)
1541 if (!is_gimple_assign (gs)
1542 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1543 return false;
1545 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1548 /* Given GS which is a cast to a scalar integer type, determine whether
1549 the cast is legal for strength reduction. If so, make at least one
1550 appropriate entry in the candidate table. */
1552 static void
1553 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1555 tree lhs, ctype;
1556 slsr_cand_t base_cand, c, c2;
1557 unsigned savings = 0;
1559 if (!legal_cast_p (gs, rhs1))
1560 return;
1562 lhs = gimple_assign_lhs (gs);
1563 base_cand = base_cand_from_table (rhs1);
1564 ctype = TREE_TYPE (lhs);
1566 if (base_cand && base_cand->kind != CAND_PHI)
1568 while (base_cand)
1570 /* Propagate all data from the base candidate except the type,
1571 which comes from the cast, and the base candidate's cast,
1572 which is no longer applicable. */
1573 if (has_single_use (rhs1))
1574 savings = (base_cand->dead_savings
1575 + stmt_cost (base_cand->cand_stmt, speed));
1577 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1578 base_cand->base_expr,
1579 base_cand->index, base_cand->stride,
1580 ctype, savings);
1581 if (base_cand->next_interp)
1582 base_cand = lookup_cand (base_cand->next_interp);
1583 else
1584 base_cand = NULL;
1587 else
1589 /* If nothing is known about the RHS, create fresh CAND_ADD and
1590 CAND_MULT interpretations:
1592 X = Y + (0 * 1)
1593 X = (Y + 0) * 1
1595 The first of these is somewhat arbitrary, but the choice of
1596 1 for the stride simplifies the logic for propagating casts
1597 into their uses. */
1598 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1599 0, integer_one_node, ctype, 0);
1600 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1601 0, integer_one_node, ctype, 0);
1602 c->next_interp = c2->cand_num;
1605 /* Add the first (or only) interpretation to the statement-candidate
1606 mapping. */
1607 add_cand_for_stmt (gs, c);
1610 /* Given GS which is a copy of a scalar integer type, make at least one
1611 appropriate entry in the candidate table.
1613 This interface is included for completeness, but is unnecessary
1614 if this pass immediately follows a pass that performs copy
1615 propagation, such as DOM. */
1617 static void
1618 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1620 slsr_cand_t base_cand, c, c2;
1621 unsigned savings = 0;
1623 base_cand = base_cand_from_table (rhs1);
1625 if (base_cand && base_cand->kind != CAND_PHI)
1627 while (base_cand)
1629 /* Propagate all data from the base candidate. */
1630 if (has_single_use (rhs1))
1631 savings = (base_cand->dead_savings
1632 + stmt_cost (base_cand->cand_stmt, speed));
1634 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1635 base_cand->base_expr,
1636 base_cand->index, base_cand->stride,
1637 base_cand->cand_type, savings);
1638 if (base_cand->next_interp)
1639 base_cand = lookup_cand (base_cand->next_interp);
1640 else
1641 base_cand = NULL;
1644 else
1646 /* If nothing is known about the RHS, create fresh CAND_ADD and
1647 CAND_MULT interpretations:
1649 X = Y + (0 * 1)
1650 X = (Y + 0) * 1
1652 The first of these is somewhat arbitrary, but the choice of
1653 1 for the stride simplifies the logic for propagating casts
1654 into their uses. */
1655 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1656 0, integer_one_node, TREE_TYPE (rhs1), 0);
1657 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1658 0, integer_one_node, TREE_TYPE (rhs1), 0);
1659 c->next_interp = c2->cand_num;
1662 /* Add the first (or only) interpretation to the statement-candidate
1663 mapping. */
1664 add_cand_for_stmt (gs, c);
1667 class find_candidates_dom_walker : public dom_walker
1669 public:
1670 find_candidates_dom_walker (cdi_direction direction)
1671 : dom_walker (direction) {}
1672 virtual void before_dom_children (basic_block);
1675 /* Find strength-reduction candidates in block BB. */
1677 void
1678 find_candidates_dom_walker::before_dom_children (basic_block bb)
1680 bool speed = optimize_bb_for_speed_p (bb);
1681 gimple_stmt_iterator gsi;
1683 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1684 slsr_process_phi (gsi_stmt (gsi), speed);
1686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1688 gimple gs = gsi_stmt (gsi);
1690 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1691 slsr_process_ref (gs);
1693 else if (is_gimple_assign (gs)
1694 && SCALAR_INT_MODE_P
1695 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1697 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1699 switch (gimple_assign_rhs_code (gs))
1701 case MULT_EXPR:
1702 case PLUS_EXPR:
1703 rhs1 = gimple_assign_rhs1 (gs);
1704 rhs2 = gimple_assign_rhs2 (gs);
1705 /* Should never happen, but currently some buggy situations
1706 in earlier phases put constants in rhs1. */
1707 if (TREE_CODE (rhs1) != SSA_NAME)
1708 continue;
1709 break;
1711 /* Possible future opportunity: rhs1 of a ptr+ can be
1712 an ADDR_EXPR. */
1713 case POINTER_PLUS_EXPR:
1714 case MINUS_EXPR:
1715 rhs2 = gimple_assign_rhs2 (gs);
1716 /* Fall-through. */
1718 CASE_CONVERT:
1719 case MODIFY_EXPR:
1720 case NEGATE_EXPR:
1721 rhs1 = gimple_assign_rhs1 (gs);
1722 if (TREE_CODE (rhs1) != SSA_NAME)
1723 continue;
1724 break;
1726 default:
1730 switch (gimple_assign_rhs_code (gs))
1732 case MULT_EXPR:
1733 slsr_process_mul (gs, rhs1, rhs2, speed);
1734 break;
1736 case PLUS_EXPR:
1737 case POINTER_PLUS_EXPR:
1738 case MINUS_EXPR:
1739 slsr_process_add (gs, rhs1, rhs2, speed);
1740 break;
1742 case NEGATE_EXPR:
1743 slsr_process_neg (gs, rhs1, speed);
1744 break;
1746 CASE_CONVERT:
1747 slsr_process_cast (gs, rhs1, speed);
1748 break;
1750 case MODIFY_EXPR:
1751 slsr_process_copy (gs, rhs1, speed);
1752 break;
1754 default:
1761 /* Dump a candidate for debug. */
1763 static void
1764 dump_candidate (slsr_cand_t c)
1766 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1767 gimple_bb (c->cand_stmt)->index);
1768 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1769 switch (c->kind)
1771 case CAND_MULT:
1772 fputs (" MULT : (", dump_file);
1773 print_generic_expr (dump_file, c->base_expr, 0);
1774 fputs (" + ", dump_file);
1775 print_decs (c->index, dump_file);
1776 fputs (") * ", dump_file);
1777 print_generic_expr (dump_file, c->stride, 0);
1778 fputs (" : ", dump_file);
1779 break;
1780 case CAND_ADD:
1781 fputs (" ADD : ", dump_file);
1782 print_generic_expr (dump_file, c->base_expr, 0);
1783 fputs (" + (", dump_file);
1784 print_decs (c->index, dump_file);
1785 fputs (" * ", dump_file);
1786 print_generic_expr (dump_file, c->stride, 0);
1787 fputs (") : ", dump_file);
1788 break;
1789 case CAND_REF:
1790 fputs (" REF : ", dump_file);
1791 print_generic_expr (dump_file, c->base_expr, 0);
1792 fputs (" + (", dump_file);
1793 print_generic_expr (dump_file, c->stride, 0);
1794 fputs (") + ", dump_file);
1795 print_decs (c->index, dump_file);
1796 fputs (" : ", dump_file);
1797 break;
1798 case CAND_PHI:
1799 fputs (" PHI : ", dump_file);
1800 print_generic_expr (dump_file, c->base_expr, 0);
1801 fputs (" + (unknown * ", dump_file);
1802 print_generic_expr (dump_file, c->stride, 0);
1803 fputs (") : ", dump_file);
1804 break;
1805 default:
1806 gcc_unreachable ();
1808 print_generic_expr (dump_file, c->cand_type, 0);
1809 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1810 c->basis, c->dependent, c->sibling);
1811 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1812 c->next_interp, c->dead_savings);
1813 if (c->def_phi)
1814 fprintf (dump_file, " phi: %d\n", c->def_phi);
1815 fputs ("\n", dump_file);
1818 /* Dump the candidate vector for debug. */
1820 static void
1821 dump_cand_vec (void)
1823 unsigned i;
1824 slsr_cand_t c;
1826 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1828 FOR_EACH_VEC_ELT (cand_vec, i, c)
1829 dump_candidate (c);
1832 /* Callback used to dump the candidate chains hash table. */
1835 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1837 const_cand_chain_t chain = *slot;
1838 cand_chain_t p;
1840 print_generic_expr (dump_file, chain->base_expr, 0);
1841 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1843 for (p = chain->next; p; p = p->next)
1844 fprintf (dump_file, " -> %d", p->cand->cand_num);
1846 fputs ("\n", dump_file);
1847 return 1;
1850 /* Dump the candidate chains. */
1852 static void
1853 dump_cand_chains (void)
1855 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1856 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1857 (NULL);
1858 fputs ("\n", dump_file);
1861 /* Dump the increment vector for debug. */
1863 static void
1864 dump_incr_vec (void)
1866 if (dump_file && (dump_flags & TDF_DETAILS))
1868 unsigned i;
1870 fprintf (dump_file, "\nIncrement vector:\n\n");
1872 for (i = 0; i < incr_vec_len; i++)
1874 fprintf (dump_file, "%3d increment: ", i);
1875 print_decs (incr_vec[i].incr, dump_file);
1876 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1877 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1878 fputs ("\n initializer: ", dump_file);
1879 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1880 fputs ("\n\n", dump_file);
1885 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1886 data reference. */
1888 static void
1889 replace_ref (tree *expr, slsr_cand_t c)
1891 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1892 unsigned HOST_WIDE_INT misalign;
1893 unsigned align;
1895 /* Ensure the memory reference carries the minimum alignment
1896 requirement for the data type. See PR58041. */
1897 get_object_alignment_1 (*expr, &align, &misalign);
1898 if (misalign != 0)
1899 align = (misalign & -misalign);
1900 if (align < TYPE_ALIGN (acc_type))
1901 acc_type = build_aligned_type (acc_type, align);
1903 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1904 c->base_expr, c->stride);
1905 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1906 wide_int_to_tree (c->cand_type, c->index));
1908 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1909 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1910 TREE_OPERAND (mem_ref, 0)
1911 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1912 /*simple_p=*/true, NULL,
1913 /*before=*/true, GSI_SAME_STMT);
1914 copy_ref_info (mem_ref, *expr);
1915 *expr = mem_ref;
1916 update_stmt (c->cand_stmt);
1919 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1920 dependent of candidate C with an equivalent strength-reduced data
1921 reference. */
1923 static void
1924 replace_refs (slsr_cand_t c)
1926 if (dump_file && (dump_flags & TDF_DETAILS))
1928 fputs ("Replacing reference: ", dump_file);
1929 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1932 if (gimple_vdef (c->cand_stmt))
1934 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1935 replace_ref (lhs, c);
1937 else
1939 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1940 replace_ref (rhs, c);
1943 if (dump_file && (dump_flags & TDF_DETAILS))
1945 fputs ("With: ", dump_file);
1946 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1947 fputs ("\n", dump_file);
1950 if (c->sibling)
1951 replace_refs (lookup_cand (c->sibling));
1953 if (c->dependent)
1954 replace_refs (lookup_cand (c->dependent));
1957 /* Return TRUE if candidate C is dependent upon a PHI. */
1959 static bool
1960 phi_dependent_cand_p (slsr_cand_t c)
1962 /* A candidate is not necessarily dependent upon a PHI just because
1963 it has a phi definition for its base name. It may have a basis
1964 that relies upon the same phi definition, in which case the PHI
1965 is irrelevant to this candidate. */
1966 return (c->def_phi
1967 && c->basis
1968 && lookup_cand (c->basis)->def_phi != c->def_phi);
1971 /* Calculate the increment required for candidate C relative to
1972 its basis. */
1974 static widest_int
1975 cand_increment (slsr_cand_t c)
1977 slsr_cand_t basis;
1979 /* If the candidate doesn't have a basis, just return its own
1980 index. This is useful in record_increments to help us find
1981 an existing initializer. Also, if the candidate's basis is
1982 hidden by a phi, then its own index will be the increment
1983 from the newly introduced phi basis. */
1984 if (!c->basis || phi_dependent_cand_p (c))
1985 return c->index;
1987 basis = lookup_cand (c->basis);
1988 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1989 return c->index - basis->index;
1992 /* Calculate the increment required for candidate C relative to
1993 its basis. If we aren't going to generate pointer arithmetic
1994 for this candidate, return the absolute value of that increment
1995 instead. */
1997 static inline widest_int
1998 cand_abs_increment (slsr_cand_t c)
2000 widest_int increment = cand_increment (c);
2002 if (!address_arithmetic_p && wi::neg_p (increment))
2003 increment = -increment;
2005 return increment;
2008 /* Return TRUE iff candidate C has already been replaced under
2009 another interpretation. */
2011 static inline bool
2012 cand_already_replaced (slsr_cand_t c)
2014 return (gimple_bb (c->cand_stmt) == 0);
2017 /* Common logic used by replace_unconditional_candidate and
2018 replace_conditional_candidate. */
2020 static void
2021 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2023 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2024 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2026 /* It is highly unlikely, but possible, that the resulting
2027 bump doesn't fit in a HWI. Abandon the replacement
2028 in this case. This does not affect siblings or dependents
2029 of C. Restriction to signed HWI is conservative for unsigned
2030 types but allows for safe negation without twisted logic. */
2031 if (wi::fits_shwi_p (bump)
2032 && bump.to_shwi () != HOST_WIDE_INT_MIN
2033 /* It is not useful to replace casts, copies, or adds of
2034 an SSA name and a constant. */
2035 && cand_code != MODIFY_EXPR
2036 && !CONVERT_EXPR_CODE_P (cand_code)
2037 && cand_code != PLUS_EXPR
2038 && cand_code != POINTER_PLUS_EXPR
2039 && cand_code != MINUS_EXPR)
2041 enum tree_code code = PLUS_EXPR;
2042 tree bump_tree;
2043 gimple stmt_to_print = NULL;
2045 /* If the basis name and the candidate's LHS have incompatible
2046 types, introduce a cast. */
2047 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2048 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2049 if (wi::neg_p (bump))
2051 code = MINUS_EXPR;
2052 bump = -bump;
2055 bump_tree = wide_int_to_tree (target_type, bump);
2057 if (dump_file && (dump_flags & TDF_DETAILS))
2059 fputs ("Replacing: ", dump_file);
2060 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2063 if (bump == 0)
2065 tree lhs = gimple_assign_lhs (c->cand_stmt);
2066 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
2067 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2068 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2069 gsi_replace (&gsi, copy_stmt, false);
2070 c->cand_stmt = copy_stmt;
2071 if (dump_file && (dump_flags & TDF_DETAILS))
2072 stmt_to_print = copy_stmt;
2074 else
2076 tree rhs1, rhs2;
2077 if (cand_code != NEGATE_EXPR) {
2078 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2079 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2081 if (cand_code != NEGATE_EXPR
2082 && ((operand_equal_p (rhs1, basis_name, 0)
2083 && operand_equal_p (rhs2, bump_tree, 0))
2084 || (operand_equal_p (rhs1, bump_tree, 0)
2085 && operand_equal_p (rhs2, basis_name, 0))))
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2089 fputs ("(duplicate, not actually replacing)", dump_file);
2090 stmt_to_print = c->cand_stmt;
2093 else
2095 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2096 gimple_assign_set_rhs_with_ops (&gsi, code,
2097 basis_name, bump_tree);
2098 update_stmt (gsi_stmt (gsi));
2099 c->cand_stmt = gsi_stmt (gsi);
2100 if (dump_file && (dump_flags & TDF_DETAILS))
2101 stmt_to_print = gsi_stmt (gsi);
2105 if (dump_file && (dump_flags & TDF_DETAILS))
2107 fputs ("With: ", dump_file);
2108 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2109 fputs ("\n", dump_file);
2114 /* Replace candidate C with an add or subtract. Note that we only
2115 operate on CAND_MULTs with known strides, so we will never generate
2116 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2117 X = Y + ((i - i') * S), as described in the module commentary. The
2118 folded value ((i - i') * S) is referred to here as the "bump." */
2120 static void
2121 replace_unconditional_candidate (slsr_cand_t c)
2123 slsr_cand_t basis;
2125 if (cand_already_replaced (c))
2126 return;
2128 basis = lookup_cand (c->basis);
2129 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2131 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2134 /* Return the index in the increment vector of the given INCREMENT,
2135 or -1 if not found. The latter can occur if more than
2136 MAX_INCR_VEC_LEN increments have been found. */
2138 static inline int
2139 incr_vec_index (const widest_int &increment)
2141 unsigned i;
2143 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2146 if (i < incr_vec_len)
2147 return i;
2148 else
2149 return -1;
2152 /* Create a new statement along edge E to add BASIS_NAME to the product
2153 of INCREMENT and the stride of candidate C. Create and return a new
2154 SSA name from *VAR to be used as the LHS of the new statement.
2155 KNOWN_STRIDE is true iff C's stride is a constant. */
2157 static tree
2158 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2159 widest_int increment, edge e, location_t loc,
2160 bool known_stride)
2162 basic_block insert_bb;
2163 gimple_stmt_iterator gsi;
2164 tree lhs, basis_type;
2165 gimple new_stmt;
2167 /* If the add candidate along this incoming edge has the same
2168 index as C's hidden basis, the hidden basis represents this
2169 edge correctly. */
2170 if (increment == 0)
2171 return basis_name;
2173 basis_type = TREE_TYPE (basis_name);
2174 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2176 if (known_stride)
2178 tree bump_tree;
2179 enum tree_code code = PLUS_EXPR;
2180 widest_int bump = increment * wi::to_widest (c->stride);
2181 if (wi::neg_p (bump))
2183 code = MINUS_EXPR;
2184 bump = -bump;
2187 bump_tree = wide_int_to_tree (basis_type, bump);
2188 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2189 bump_tree);
2191 else
2193 int i;
2194 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2195 i = incr_vec_index (negate_incr ? -increment : increment);
2196 gcc_assert (i >= 0);
2198 if (incr_vec[i].initializer)
2200 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2201 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2202 incr_vec[i].initializer);
2204 else if (increment == 1)
2205 new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, lhs, basis_name,
2206 c->stride);
2207 else if (increment == -1)
2208 new_stmt = gimple_build_assign_with_ops (MINUS_EXPR, lhs, basis_name,
2209 c->stride);
2210 else
2211 gcc_unreachable ();
2214 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2215 gsi = gsi_last_bb (insert_bb);
2217 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2218 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2219 else
2220 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2222 gimple_set_location (new_stmt, loc);
2224 if (dump_file && (dump_flags & TDF_DETAILS))
2226 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2227 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2230 return lhs;
2233 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2234 is hidden by the phi node FROM_PHI, create a new phi node in the same
2235 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2236 with its phi arguments representing conditional adjustments to the
2237 hidden basis along conditional incoming paths. Those adjustments are
2238 made by creating add statements (and sometimes recursively creating
2239 phis) along those incoming paths. LOC is the location to attach to
2240 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2241 constant. */
2243 static tree
2244 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2245 location_t loc, bool known_stride)
2247 int i;
2248 tree name, phi_arg;
2249 gimple phi;
2250 vec<tree> phi_args;
2251 slsr_cand_t basis = lookup_cand (c->basis);
2252 int nargs = gimple_phi_num_args (from_phi);
2253 basic_block phi_bb = gimple_bb (from_phi);
2254 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2255 phi_args.create (nargs);
2257 /* Process each argument of the existing phi that represents
2258 conditionally-executed add candidates. */
2259 for (i = 0; i < nargs; i++)
2261 edge e = (*phi_bb->preds)[i];
2262 tree arg = gimple_phi_arg_def (from_phi, i);
2263 tree feeding_def;
2265 /* If the phi argument is the base name of the CAND_PHI, then
2266 this incoming arc should use the hidden basis. */
2267 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2268 if (basis->index == 0)
2269 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2270 else
2272 widest_int incr = -basis->index;
2273 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2274 e, loc, known_stride);
2276 else
2278 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2280 /* If there is another phi along this incoming edge, we must
2281 process it in the same fashion to ensure that all basis
2282 adjustments are made along its incoming edges. */
2283 if (gimple_code (arg_def) == GIMPLE_PHI)
2284 feeding_def = create_phi_basis (c, arg_def, basis_name,
2285 loc, known_stride);
2286 else
2288 slsr_cand_t arg_cand = base_cand_from_table (arg);
2289 widest_int diff = arg_cand->index - basis->index;
2290 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2291 e, loc, known_stride);
2295 /* Because of recursion, we need to save the arguments in a vector
2296 so we can create the PHI statement all at once. Otherwise the
2297 storage for the half-created PHI can be reclaimed. */
2298 phi_args.safe_push (feeding_def);
2301 /* Create the new phi basis. */
2302 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2303 phi = create_phi_node (name, phi_bb);
2304 SSA_NAME_DEF_STMT (name) = phi;
2306 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2308 edge e = (*phi_bb->preds)[i];
2309 add_phi_arg (phi, phi_arg, e, loc);
2312 update_stmt (phi);
2314 if (dump_file && (dump_flags & TDF_DETAILS))
2316 fputs ("Introducing new phi basis: ", dump_file);
2317 print_gimple_stmt (dump_file, phi, 0, 0);
2320 return name;
2323 /* Given a candidate C whose basis is hidden by at least one intervening
2324 phi, introduce a matching number of new phis to represent its basis
2325 adjusted by conditional increments along possible incoming paths. Then
2326 replace C as though it were an unconditional candidate, using the new
2327 basis. */
2329 static void
2330 replace_conditional_candidate (slsr_cand_t c)
2332 tree basis_name, name;
2333 slsr_cand_t basis;
2334 location_t loc;
2336 /* Look up the LHS SSA name from C's basis. This will be the
2337 RHS1 of the adds we will introduce to create new phi arguments. */
2338 basis = lookup_cand (c->basis);
2339 basis_name = gimple_assign_lhs (basis->cand_stmt);
2341 /* Create a new phi statement which will represent C's true basis
2342 after the transformation is complete. */
2343 loc = gimple_location (c->cand_stmt);
2344 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2345 basis_name, loc, KNOWN_STRIDE);
2346 /* Replace C with an add of the new basis phi and a constant. */
2347 widest_int bump = c->index * wi::to_widest (c->stride);
2349 replace_mult_candidate (c, name, bump);
2352 /* Compute the expected costs of inserting basis adjustments for
2353 candidate C with phi-definition PHI. The cost of inserting
2354 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2355 which are themselves phi results, recursively calculate costs
2356 for those phis as well. */
2358 static int
2359 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2361 unsigned i;
2362 int cost = 0;
2363 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2365 /* If we work our way back to a phi that isn't dominated by the hidden
2366 basis, this isn't a candidate for replacement. Indicate this by
2367 returning an unreasonably high cost. It's not easy to detect
2368 these situations when determining the basis, so we defer the
2369 decision until now. */
2370 basic_block phi_bb = gimple_bb (phi);
2371 slsr_cand_t basis = lookup_cand (c->basis);
2372 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2374 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2375 return COST_INFINITE;
2377 for (i = 0; i < gimple_phi_num_args (phi); i++)
2379 tree arg = gimple_phi_arg_def (phi, i);
2381 if (arg != phi_cand->base_expr)
2383 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2385 if (gimple_code (arg_def) == GIMPLE_PHI)
2386 cost += phi_add_costs (arg_def, c, one_add_cost);
2387 else
2389 slsr_cand_t arg_cand = base_cand_from_table (arg);
2391 if (arg_cand->index != c->index)
2392 cost += one_add_cost;
2397 return cost;
2400 /* For candidate C, each sibling of candidate C, and each dependent of
2401 candidate C, determine whether the candidate is dependent upon a
2402 phi that hides its basis. If not, replace the candidate unconditionally.
2403 Otherwise, determine whether the cost of introducing compensation code
2404 for the candidate is offset by the gains from strength reduction. If
2405 so, replace the candidate and introduce the compensation code. */
2407 static void
2408 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2410 if (phi_dependent_cand_p (c))
2412 if (c->kind == CAND_MULT)
2414 /* A candidate dependent upon a phi will replace a multiply by
2415 a constant with an add, and will insert at most one add for
2416 each phi argument. Add these costs with the potential dead-code
2417 savings to determine profitability. */
2418 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2419 int mult_savings = stmt_cost (c->cand_stmt, speed);
2420 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2421 tree phi_result = gimple_phi_result (phi);
2422 int one_add_cost = add_cost (speed,
2423 TYPE_MODE (TREE_TYPE (phi_result)));
2424 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2425 int cost = add_costs - mult_savings - c->dead_savings;
2427 if (dump_file && (dump_flags & TDF_DETAILS))
2429 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2430 fprintf (dump_file, " add_costs = %d\n", add_costs);
2431 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2432 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2433 fprintf (dump_file, " cost = %d\n", cost);
2434 if (cost <= COST_NEUTRAL)
2435 fputs (" Replacing...\n", dump_file);
2436 else
2437 fputs (" Not replaced.\n", dump_file);
2440 if (cost <= COST_NEUTRAL)
2441 replace_conditional_candidate (c);
2444 else
2445 replace_unconditional_candidate (c);
2447 if (c->sibling)
2448 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2450 if (c->dependent)
2451 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2454 /* Count the number of candidates in the tree rooted at C that have
2455 not already been replaced under other interpretations. */
2457 static int
2458 count_candidates (slsr_cand_t c)
2460 unsigned count = cand_already_replaced (c) ? 0 : 1;
2462 if (c->sibling)
2463 count += count_candidates (lookup_cand (c->sibling));
2465 if (c->dependent)
2466 count += count_candidates (lookup_cand (c->dependent));
2468 return count;
2471 /* Increase the count of INCREMENT by one in the increment vector.
2472 INCREMENT is associated with candidate C. If INCREMENT is to be
2473 conditionally executed as part of a conditional candidate replacement,
2474 IS_PHI_ADJUST is true, otherwise false. If an initializer
2475 T_0 = stride * I is provided by a candidate that dominates all
2476 candidates with the same increment, also record T_0 for subsequent use. */
2478 static void
2479 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2481 bool found = false;
2482 unsigned i;
2484 /* Treat increments that differ only in sign as identical so as to
2485 share initializers, unless we are generating pointer arithmetic. */
2486 if (!address_arithmetic_p && wi::neg_p (increment))
2487 increment = -increment;
2489 for (i = 0; i < incr_vec_len; i++)
2491 if (incr_vec[i].incr == increment)
2493 incr_vec[i].count++;
2494 found = true;
2496 /* If we previously recorded an initializer that doesn't
2497 dominate this candidate, it's not going to be useful to
2498 us after all. */
2499 if (incr_vec[i].initializer
2500 && !dominated_by_p (CDI_DOMINATORS,
2501 gimple_bb (c->cand_stmt),
2502 incr_vec[i].init_bb))
2504 incr_vec[i].initializer = NULL_TREE;
2505 incr_vec[i].init_bb = NULL;
2508 break;
2512 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2514 /* The first time we see an increment, create the entry for it.
2515 If this is the root candidate which doesn't have a basis, set
2516 the count to zero. We're only processing it so it can possibly
2517 provide an initializer for other candidates. */
2518 incr_vec[incr_vec_len].incr = increment;
2519 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2520 incr_vec[incr_vec_len].cost = COST_INFINITE;
2522 /* Optimistically record the first occurrence of this increment
2523 as providing an initializer (if it does); we will revise this
2524 opinion later if it doesn't dominate all other occurrences.
2525 Exception: increments of -1, 0, 1 never need initializers;
2526 and phi adjustments don't ever provide initializers. */
2527 if (c->kind == CAND_ADD
2528 && !is_phi_adjust
2529 && c->index == increment
2530 && (wi::gts_p (increment, 1)
2531 || wi::lts_p (increment, -1))
2532 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2533 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2535 tree t0 = NULL_TREE;
2536 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2537 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2538 if (operand_equal_p (rhs1, c->base_expr, 0))
2539 t0 = rhs2;
2540 else if (operand_equal_p (rhs2, c->base_expr, 0))
2541 t0 = rhs1;
2542 if (t0
2543 && SSA_NAME_DEF_STMT (t0)
2544 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2546 incr_vec[incr_vec_len].initializer = t0;
2547 incr_vec[incr_vec_len++].init_bb
2548 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2550 else
2552 incr_vec[incr_vec_len].initializer = NULL_TREE;
2553 incr_vec[incr_vec_len++].init_bb = NULL;
2556 else
2558 incr_vec[incr_vec_len].initializer = NULL_TREE;
2559 incr_vec[incr_vec_len++].init_bb = NULL;
2564 /* Given phi statement PHI that hides a candidate from its BASIS, find
2565 the increments along each incoming arc (recursively handling additional
2566 phis that may be present) and record them. These increments are the
2567 difference in index between the index-adjusting statements and the
2568 index of the basis. */
2570 static void
2571 record_phi_increments (slsr_cand_t basis, gimple phi)
2573 unsigned i;
2574 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2576 for (i = 0; i < gimple_phi_num_args (phi); i++)
2578 tree arg = gimple_phi_arg_def (phi, i);
2580 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2582 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2584 if (gimple_code (arg_def) == GIMPLE_PHI)
2585 record_phi_increments (basis, arg_def);
2586 else
2588 slsr_cand_t arg_cand = base_cand_from_table (arg);
2589 widest_int diff = arg_cand->index - basis->index;
2590 record_increment (arg_cand, diff, PHI_ADJUST);
2596 /* Determine how many times each unique increment occurs in the set
2597 of candidates rooted at C's parent, recording the data in the
2598 increment vector. For each unique increment I, if an initializer
2599 T_0 = stride * I is provided by a candidate that dominates all
2600 candidates with the same increment, also record T_0 for subsequent
2601 use. */
2603 static void
2604 record_increments (slsr_cand_t c)
2606 if (!cand_already_replaced (c))
2608 if (!phi_dependent_cand_p (c))
2609 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2610 else
2612 /* A candidate with a basis hidden by a phi will have one
2613 increment for its relationship to the index represented by
2614 the phi, and potentially additional increments along each
2615 incoming edge. For the root of the dependency tree (which
2616 has no basis), process just the initial index in case it has
2617 an initializer that can be used by subsequent candidates. */
2618 record_increment (c, c->index, NOT_PHI_ADJUST);
2620 if (c->basis)
2621 record_phi_increments (lookup_cand (c->basis),
2622 lookup_cand (c->def_phi)->cand_stmt);
2626 if (c->sibling)
2627 record_increments (lookup_cand (c->sibling));
2629 if (c->dependent)
2630 record_increments (lookup_cand (c->dependent));
2633 /* Add up and return the costs of introducing add statements that
2634 require the increment INCR on behalf of candidate C and phi
2635 statement PHI. Accumulate into *SAVINGS the potential savings
2636 from removing existing statements that feed PHI and have no other
2637 uses. */
2639 static int
2640 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
2642 unsigned i;
2643 int cost = 0;
2644 slsr_cand_t basis = lookup_cand (c->basis);
2645 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2647 for (i = 0; i < gimple_phi_num_args (phi); i++)
2649 tree arg = gimple_phi_arg_def (phi, i);
2651 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2653 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2655 if (gimple_code (arg_def) == GIMPLE_PHI)
2657 int feeding_savings = 0;
2658 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2659 if (has_single_use (gimple_phi_result (arg_def)))
2660 *savings += feeding_savings;
2662 else
2664 slsr_cand_t arg_cand = base_cand_from_table (arg);
2665 widest_int diff = arg_cand->index - basis->index;
2667 if (incr == diff)
2669 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2670 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2671 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2672 if (has_single_use (lhs))
2673 *savings += stmt_cost (arg_cand->cand_stmt, true);
2679 return cost;
2682 /* Return the first candidate in the tree rooted at C that has not
2683 already been replaced, favoring siblings over dependents. */
2685 static slsr_cand_t
2686 unreplaced_cand_in_tree (slsr_cand_t c)
2688 if (!cand_already_replaced (c))
2689 return c;
2691 if (c->sibling)
2693 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2694 if (sib)
2695 return sib;
2698 if (c->dependent)
2700 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2701 if (dep)
2702 return dep;
2705 return NULL;
2708 /* Return TRUE if the candidates in the tree rooted at C should be
2709 optimized for speed, else FALSE. We estimate this based on the block
2710 containing the most dominant candidate in the tree that has not yet
2711 been replaced. */
2713 static bool
2714 optimize_cands_for_speed_p (slsr_cand_t c)
2716 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2717 gcc_assert (c2);
2718 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2721 /* Add COST_IN to the lowest cost of any dependent path starting at
2722 candidate C or any of its siblings, counting only candidates along
2723 such paths with increment INCR. Assume that replacing a candidate
2724 reduces cost by REPL_SAVINGS. Also account for savings from any
2725 statements that would go dead. If COUNT_PHIS is true, include
2726 costs of introducing feeding statements for conditional candidates. */
2728 static int
2729 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2730 const widest_int &incr, bool count_phis)
2732 int local_cost, sib_cost, savings = 0;
2733 widest_int cand_incr = cand_abs_increment (c);
2735 if (cand_already_replaced (c))
2736 local_cost = cost_in;
2737 else if (incr == cand_incr)
2738 local_cost = cost_in - repl_savings - c->dead_savings;
2739 else
2740 local_cost = cost_in - c->dead_savings;
2742 if (count_phis
2743 && phi_dependent_cand_p (c)
2744 && !cand_already_replaced (c))
2746 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2747 local_cost += phi_incr_cost (c, incr, phi, &savings);
2749 if (has_single_use (gimple_phi_result (phi)))
2750 local_cost -= savings;
2753 if (c->dependent)
2754 local_cost = lowest_cost_path (local_cost, repl_savings,
2755 lookup_cand (c->dependent), incr,
2756 count_phis);
2758 if (c->sibling)
2760 sib_cost = lowest_cost_path (cost_in, repl_savings,
2761 lookup_cand (c->sibling), incr,
2762 count_phis);
2763 local_cost = MIN (local_cost, sib_cost);
2766 return local_cost;
2769 /* Compute the total savings that would accrue from all replacements
2770 in the candidate tree rooted at C, counting only candidates with
2771 increment INCR. Assume that replacing a candidate reduces cost
2772 by REPL_SAVINGS. Also account for savings from statements that
2773 would go dead. */
2775 static int
2776 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2777 bool count_phis)
2779 int savings = 0;
2780 widest_int cand_incr = cand_abs_increment (c);
2782 if (incr == cand_incr && !cand_already_replaced (c))
2783 savings += repl_savings + c->dead_savings;
2785 if (count_phis
2786 && phi_dependent_cand_p (c)
2787 && !cand_already_replaced (c))
2789 int phi_savings = 0;
2790 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2791 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2793 if (has_single_use (gimple_phi_result (phi)))
2794 savings += phi_savings;
2797 if (c->dependent)
2798 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2799 count_phis);
2801 if (c->sibling)
2802 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2803 count_phis);
2805 return savings;
2808 /* Use target-specific costs to determine and record which increments
2809 in the current candidate tree are profitable to replace, assuming
2810 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2811 the candidate tree.
2813 One slight limitation here is that we don't account for the possible
2814 introduction of casts in some cases. See replace_one_candidate for
2815 the cases where these are introduced. This should probably be cleaned
2816 up sometime. */
2818 static void
2819 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2821 unsigned i;
2823 for (i = 0; i < incr_vec_len; i++)
2825 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2827 /* If somehow this increment is bigger than a HWI, we won't
2828 be optimizing candidates that use it. And if the increment
2829 has a count of zero, nothing will be done with it. */
2830 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2831 incr_vec[i].cost = COST_INFINITE;
2833 /* Increments of 0, 1, and -1 are always profitable to replace,
2834 because they always replace a multiply or add with an add or
2835 copy, and may cause one or more existing instructions to go
2836 dead. Exception: -1 can't be assumed to be profitable for
2837 pointer addition. */
2838 else if (incr == 0
2839 || incr == 1
2840 || (incr == -1
2841 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2842 != POINTER_PLUS_EXPR)))
2843 incr_vec[i].cost = COST_NEUTRAL;
2845 /* FORNOW: If we need to add an initializer, give up if a cast from
2846 the candidate's type to its stride's type can lose precision.
2847 This could eventually be handled better by expressly retaining the
2848 result of a cast to a wider type in the stride. Example:
2850 short int _1;
2851 _2 = (int) _1;
2852 _3 = _2 * 10;
2853 _4 = x + _3; ADD: x + (10 * _1) : int
2854 _5 = _2 * 15;
2855 _6 = x + _3; ADD: x + (15 * _1) : int
2857 Right now replacing _6 would cause insertion of an initializer
2858 of the form "short int T = _1 * 5;" followed by a cast to
2859 int, which could overflow incorrectly. Had we recorded _2 or
2860 (int)_1 as the stride, this wouldn't happen. However, doing
2861 this breaks other opportunities, so this will require some
2862 care. */
2863 else if (!incr_vec[i].initializer
2864 && TREE_CODE (first_dep->stride) != INTEGER_CST
2865 && !legal_cast_p_1 (first_dep->stride,
2866 gimple_assign_lhs (first_dep->cand_stmt)))
2868 incr_vec[i].cost = COST_INFINITE;
2870 /* If we need to add an initializer, make sure we don't introduce
2871 a multiply by a pointer type, which can happen in certain cast
2872 scenarios. FIXME: When cleaning up these cast issues, we can
2873 afford to introduce the multiply provided we cast out to an
2874 unsigned int of appropriate size. */
2875 else if (!incr_vec[i].initializer
2876 && TREE_CODE (first_dep->stride) != INTEGER_CST
2877 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2879 incr_vec[i].cost = COST_INFINITE;
2881 /* For any other increment, if this is a multiply candidate, we
2882 must introduce a temporary T and initialize it with
2883 T_0 = stride * increment. When optimizing for speed, walk the
2884 candidate tree to calculate the best cost reduction along any
2885 path; if it offsets the fixed cost of inserting the initializer,
2886 replacing the increment is profitable. When optimizing for
2887 size, instead calculate the total cost reduction from replacing
2888 all candidates with this increment. */
2889 else if (first_dep->kind == CAND_MULT)
2891 int cost = mult_by_coeff_cost (incr, mode, speed);
2892 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2893 if (speed)
2894 cost = lowest_cost_path (cost, repl_savings, first_dep,
2895 incr_vec[i].incr, COUNT_PHIS);
2896 else
2897 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2898 COUNT_PHIS);
2900 incr_vec[i].cost = cost;
2903 /* If this is an add candidate, the initializer may already
2904 exist, so only calculate the cost of the initializer if it
2905 doesn't. We are replacing one add with another here, so the
2906 known replacement savings is zero. We will account for removal
2907 of dead instructions in lowest_cost_path or total_savings. */
2908 else
2910 int cost = 0;
2911 if (!incr_vec[i].initializer)
2912 cost = mult_by_coeff_cost (incr, mode, speed);
2914 if (speed)
2915 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2916 DONT_COUNT_PHIS);
2917 else
2918 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2919 DONT_COUNT_PHIS);
2921 incr_vec[i].cost = cost;
2926 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2927 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2928 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2929 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2930 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2932 static basic_block
2933 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2934 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2936 basic_block ncd;
2938 if (!bb1)
2940 *where = c2;
2941 return bb2;
2944 if (!bb2)
2946 *where = c1;
2947 return bb1;
2950 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2952 /* If both candidates are in the same block, the earlier
2953 candidate wins. */
2954 if (bb1 == ncd && bb2 == ncd)
2956 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2957 *where = c2;
2958 else
2959 *where = c1;
2962 /* Otherwise, if one of them produced a candidate in the
2963 dominator, that one wins. */
2964 else if (bb1 == ncd)
2965 *where = c1;
2967 else if (bb2 == ncd)
2968 *where = c2;
2970 /* If neither matches the dominator, neither wins. */
2971 else
2972 *where = NULL;
2974 return ncd;
2977 /* Consider all candidates that feed PHI. Find the nearest common
2978 dominator of those candidates requiring the given increment INCR.
2979 Further find and return the nearest common dominator of this result
2980 with block NCD. If the returned block contains one or more of the
2981 candidates, return the earliest candidate in the block in *WHERE. */
2983 static basic_block
2984 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gimple phi,
2985 basic_block ncd, slsr_cand_t *where)
2987 unsigned i;
2988 slsr_cand_t basis = lookup_cand (c->basis);
2989 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2991 for (i = 0; i < gimple_phi_num_args (phi); i++)
2993 tree arg = gimple_phi_arg_def (phi, i);
2995 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2997 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2999 if (gimple_code (arg_def) == GIMPLE_PHI)
3000 ncd = ncd_with_phi (c, incr, arg_def, ncd, where);
3001 else
3003 slsr_cand_t arg_cand = base_cand_from_table (arg);
3004 widest_int diff = arg_cand->index - basis->index;
3005 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3007 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3008 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3013 return ncd;
3016 /* Consider the candidate C together with any candidates that feed
3017 C's phi dependence (if any). Find and return the nearest common
3018 dominator of those candidates requiring the given increment INCR.
3019 If the returned block contains one or more of the candidates,
3020 return the earliest candidate in the block in *WHERE. */
3022 static basic_block
3023 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3025 basic_block ncd = NULL;
3027 if (cand_abs_increment (c) == incr)
3029 ncd = gimple_bb (c->cand_stmt);
3030 *where = c;
3033 if (phi_dependent_cand_p (c))
3034 ncd = ncd_with_phi (c, incr, lookup_cand (c->def_phi)->cand_stmt,
3035 ncd, where);
3037 return ncd;
3040 /* Consider all candidates in the tree rooted at C for which INCR
3041 represents the required increment of C relative to its basis.
3042 Find and return the basic block that most nearly dominates all
3043 such candidates. If the returned block contains one or more of
3044 the candidates, return the earliest candidate in the block in
3045 *WHERE. */
3047 static basic_block
3048 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3049 slsr_cand_t *where)
3051 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3052 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3054 /* First find the NCD of all siblings and dependents. */
3055 if (c->sibling)
3056 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3057 incr, &sib_where);
3058 if (c->dependent)
3059 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3060 incr, &dep_where);
3061 if (!sib_ncd && !dep_ncd)
3063 new_where = NULL;
3064 ncd = NULL;
3066 else if (sib_ncd && !dep_ncd)
3068 new_where = sib_where;
3069 ncd = sib_ncd;
3071 else if (dep_ncd && !sib_ncd)
3073 new_where = dep_where;
3074 ncd = dep_ncd;
3076 else
3077 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3078 dep_where, &new_where);
3080 /* If the candidate's increment doesn't match the one we're interested
3081 in (and nor do any increments for feeding defs of a phi-dependence),
3082 then the result depends only on siblings and dependents. */
3083 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3085 if (!this_ncd || cand_already_replaced (c))
3087 *where = new_where;
3088 return ncd;
3091 /* Otherwise, compare this candidate with the result from all siblings
3092 and dependents. */
3093 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3095 return ncd;
3098 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3100 static inline bool
3101 profitable_increment_p (unsigned index)
3103 return (incr_vec[index].cost <= COST_NEUTRAL);
3106 /* For each profitable increment in the increment vector not equal to
3107 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3108 dominator of all statements in the candidate chain rooted at C
3109 that require that increment, and insert an initializer
3110 T_0 = stride * increment at that location. Record T_0 with the
3111 increment record. */
3113 static void
3114 insert_initializers (slsr_cand_t c)
3116 unsigned i;
3118 for (i = 0; i < incr_vec_len; i++)
3120 basic_block bb;
3121 slsr_cand_t where = NULL;
3122 gimple init_stmt;
3123 tree stride_type, new_name, incr_tree;
3124 widest_int incr = incr_vec[i].incr;
3126 if (!profitable_increment_p (i)
3127 || incr == 1
3128 || (incr == -1
3129 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3130 || incr == 0)
3131 continue;
3133 /* We may have already identified an existing initializer that
3134 will suffice. */
3135 if (incr_vec[i].initializer)
3137 if (dump_file && (dump_flags & TDF_DETAILS))
3139 fputs ("Using existing initializer: ", dump_file);
3140 print_gimple_stmt (dump_file,
3141 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3142 0, 0);
3144 continue;
3147 /* Find the block that most closely dominates all candidates
3148 with this increment. If there is at least one candidate in
3149 that block, the earliest one will be returned in WHERE. */
3150 bb = nearest_common_dominator_for_cands (c, incr, &where);
3152 /* Create a new SSA name to hold the initializer's value. */
3153 stride_type = TREE_TYPE (c->stride);
3154 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3155 incr_vec[i].initializer = new_name;
3157 /* Create the initializer and insert it in the latest possible
3158 dominating position. */
3159 incr_tree = wide_int_to_tree (stride_type, incr);
3160 init_stmt = gimple_build_assign_with_ops (MULT_EXPR, new_name,
3161 c->stride, incr_tree);
3162 if (where)
3164 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3165 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3166 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3168 else
3170 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3171 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3173 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3174 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3175 else
3176 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3178 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3181 if (dump_file && (dump_flags & TDF_DETAILS))
3183 fputs ("Inserting initializer: ", dump_file);
3184 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3189 /* Return TRUE iff all required increments for candidates feeding PHI
3190 are profitable to replace on behalf of candidate C. */
3192 static bool
3193 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3195 unsigned i;
3196 slsr_cand_t basis = lookup_cand (c->basis);
3197 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3199 for (i = 0; i < gimple_phi_num_args (phi); i++)
3201 tree arg = gimple_phi_arg_def (phi, i);
3203 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3205 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3207 if (gimple_code (arg_def) == GIMPLE_PHI)
3209 if (!all_phi_incrs_profitable (c, arg_def))
3210 return false;
3212 else
3214 int j;
3215 slsr_cand_t arg_cand = base_cand_from_table (arg);
3216 widest_int increment = arg_cand->index - basis->index;
3218 if (!address_arithmetic_p && wi::neg_p (increment))
3219 increment = -increment;
3221 j = incr_vec_index (increment);
3223 if (dump_file && (dump_flags & TDF_DETAILS))
3225 fprintf (dump_file, " Conditional candidate %d, phi: ",
3226 c->cand_num);
3227 print_gimple_stmt (dump_file, phi, 0, 0);
3228 fputs (" increment: ", dump_file);
3229 print_decs (increment, dump_file);
3230 if (j < 0)
3231 fprintf (dump_file,
3232 "\n Not replaced; incr_vec overflow.\n");
3233 else {
3234 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3235 if (profitable_increment_p (j))
3236 fputs (" Replacing...\n", dump_file);
3237 else
3238 fputs (" Not replaced.\n", dump_file);
3242 if (j < 0 || !profitable_increment_p (j))
3243 return false;
3248 return true;
3251 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3252 type TO_TYPE, and insert it in front of the statement represented
3253 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3254 the new SSA name. */
3256 static tree
3257 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3259 tree cast_lhs;
3260 gimple cast_stmt;
3261 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3263 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3264 cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, cast_lhs,
3265 from_expr, NULL_TREE);
3266 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3267 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3269 if (dump_file && (dump_flags & TDF_DETAILS))
3271 fputs (" Inserting: ", dump_file);
3272 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3275 return cast_lhs;
3278 /* Replace the RHS of the statement represented by candidate C with
3279 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3280 leave C unchanged or just interchange its operands. The original
3281 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3282 If the replacement was made and we are doing a details dump,
3283 return the revised statement, else NULL. */
3285 static gimple
3286 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3287 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3288 slsr_cand_t c)
3290 if (new_code != old_code
3291 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3292 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3293 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3294 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3296 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3297 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3298 update_stmt (gsi_stmt (gsi));
3299 c->cand_stmt = gsi_stmt (gsi);
3301 if (dump_file && (dump_flags & TDF_DETAILS))
3302 return gsi_stmt (gsi);
3305 else if (dump_file && (dump_flags & TDF_DETAILS))
3306 fputs (" (duplicate, not actually replacing)\n", dump_file);
3308 return NULL;
3311 /* Strength-reduce the statement represented by candidate C by replacing
3312 it with an equivalent addition or subtraction. I is the index into
3313 the increment vector identifying C's increment. NEW_VAR is used to
3314 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3315 is the rhs1 to use in creating the add/subtract. */
3317 static void
3318 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3320 gimple stmt_to_print = NULL;
3321 tree orig_rhs1, orig_rhs2;
3322 tree rhs2;
3323 enum tree_code orig_code, repl_code;
3324 widest_int cand_incr;
3326 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3327 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3328 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3329 cand_incr = cand_increment (c);
3331 if (dump_file && (dump_flags & TDF_DETAILS))
3333 fputs ("Replacing: ", dump_file);
3334 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3335 stmt_to_print = c->cand_stmt;
3338 if (address_arithmetic_p)
3339 repl_code = POINTER_PLUS_EXPR;
3340 else
3341 repl_code = PLUS_EXPR;
3343 /* If the increment has an initializer T_0, replace the candidate
3344 statement with an add of the basis name and the initializer. */
3345 if (incr_vec[i].initializer)
3347 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3348 tree orig_type = TREE_TYPE (orig_rhs2);
3350 if (types_compatible_p (orig_type, init_type))
3351 rhs2 = incr_vec[i].initializer;
3352 else
3353 rhs2 = introduce_cast_before_cand (c, orig_type,
3354 incr_vec[i].initializer);
3356 if (incr_vec[i].incr != cand_incr)
3358 gcc_assert (repl_code == PLUS_EXPR);
3359 repl_code = MINUS_EXPR;
3362 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3363 orig_code, orig_rhs1, orig_rhs2,
3367 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3368 with a subtract of the stride from the basis name, a copy
3369 from the basis name, or an add of the stride to the basis
3370 name, respectively. It may be necessary to introduce a
3371 cast (or reuse an existing cast). */
3372 else if (cand_incr == 1)
3374 tree stride_type = TREE_TYPE (c->stride);
3375 tree orig_type = TREE_TYPE (orig_rhs2);
3377 if (types_compatible_p (orig_type, stride_type))
3378 rhs2 = c->stride;
3379 else
3380 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3382 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3383 orig_code, orig_rhs1, orig_rhs2,
3387 else if (cand_incr == -1)
3389 tree stride_type = TREE_TYPE (c->stride);
3390 tree orig_type = TREE_TYPE (orig_rhs2);
3391 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3393 if (types_compatible_p (orig_type, stride_type))
3394 rhs2 = c->stride;
3395 else
3396 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3398 if (orig_code != MINUS_EXPR
3399 || !operand_equal_p (basis_name, orig_rhs1, 0)
3400 || !operand_equal_p (rhs2, orig_rhs2, 0))
3402 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3403 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3404 update_stmt (gsi_stmt (gsi));
3405 c->cand_stmt = gsi_stmt (gsi);
3407 if (dump_file && (dump_flags & TDF_DETAILS))
3408 stmt_to_print = gsi_stmt (gsi);
3410 else if (dump_file && (dump_flags & TDF_DETAILS))
3411 fputs (" (duplicate, not actually replacing)\n", dump_file);
3414 else if (cand_incr == 0)
3416 tree lhs = gimple_assign_lhs (c->cand_stmt);
3417 tree lhs_type = TREE_TYPE (lhs);
3418 tree basis_type = TREE_TYPE (basis_name);
3420 if (types_compatible_p (lhs_type, basis_type))
3422 gimple copy_stmt = gimple_build_assign (lhs, basis_name);
3423 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3424 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3425 gsi_replace (&gsi, copy_stmt, false);
3426 c->cand_stmt = copy_stmt;
3428 if (dump_file && (dump_flags & TDF_DETAILS))
3429 stmt_to_print = copy_stmt;
3431 else
3433 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3434 gimple cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
3435 basis_name,
3436 NULL_TREE);
3437 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3438 gsi_replace (&gsi, cast_stmt, false);
3439 c->cand_stmt = cast_stmt;
3441 if (dump_file && (dump_flags & TDF_DETAILS))
3442 stmt_to_print = cast_stmt;
3445 else
3446 gcc_unreachable ();
3448 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3450 fputs ("With: ", dump_file);
3451 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3452 fputs ("\n", dump_file);
3456 /* For each candidate in the tree rooted at C, replace it with
3457 an increment if such has been shown to be profitable. */
3459 static void
3460 replace_profitable_candidates (slsr_cand_t c)
3462 if (!cand_already_replaced (c))
3464 widest_int increment = cand_abs_increment (c);
3465 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3466 int i;
3468 i = incr_vec_index (increment);
3470 /* Only process profitable increments. Nothing useful can be done
3471 to a cast or copy. */
3472 if (i >= 0
3473 && profitable_increment_p (i)
3474 && orig_code != MODIFY_EXPR
3475 && !CONVERT_EXPR_CODE_P (orig_code))
3477 if (phi_dependent_cand_p (c))
3479 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3481 if (all_phi_incrs_profitable (c, phi))
3483 /* Look up the LHS SSA name from C's basis. This will be
3484 the RHS1 of the adds we will introduce to create new
3485 phi arguments. */
3486 slsr_cand_t basis = lookup_cand (c->basis);
3487 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3489 /* Create a new phi statement that will represent C's true
3490 basis after the transformation is complete. */
3491 location_t loc = gimple_location (c->cand_stmt);
3492 tree name = create_phi_basis (c, phi, basis_name,
3493 loc, UNKNOWN_STRIDE);
3495 /* Replace C with an add of the new basis phi and the
3496 increment. */
3497 replace_one_candidate (c, i, name);
3500 else
3502 slsr_cand_t basis = lookup_cand (c->basis);
3503 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3504 replace_one_candidate (c, i, basis_name);
3509 if (c->sibling)
3510 replace_profitable_candidates (lookup_cand (c->sibling));
3512 if (c->dependent)
3513 replace_profitable_candidates (lookup_cand (c->dependent));
3516 /* Analyze costs of related candidates in the candidate vector,
3517 and make beneficial replacements. */
3519 static void
3520 analyze_candidates_and_replace (void)
3522 unsigned i;
3523 slsr_cand_t c;
3525 /* Each candidate that has a null basis and a non-null
3526 dependent is the root of a tree of related statements.
3527 Analyze each tree to determine a subset of those
3528 statements that can be replaced with maximum benefit. */
3529 FOR_EACH_VEC_ELT (cand_vec, i, c)
3531 slsr_cand_t first_dep;
3533 if (c->basis != 0 || c->dependent == 0)
3534 continue;
3536 if (dump_file && (dump_flags & TDF_DETAILS))
3537 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3538 c->cand_num);
3540 first_dep = lookup_cand (c->dependent);
3542 /* If this is a chain of CAND_REFs, unconditionally replace
3543 each of them with a strength-reduced data reference. */
3544 if (c->kind == CAND_REF)
3545 replace_refs (c);
3547 /* If the common stride of all related candidates is a known
3548 constant, each candidate without a phi-dependence can be
3549 profitably replaced. Each replaces a multiply by a single
3550 add, with the possibility that a feeding add also goes dead.
3551 A candidate with a phi-dependence is replaced only if the
3552 compensation code it requires is offset by the strength
3553 reduction savings. */
3554 else if (TREE_CODE (c->stride) == INTEGER_CST)
3555 replace_uncond_cands_and_profitable_phis (first_dep);
3557 /* When the stride is an SSA name, it may still be profitable
3558 to replace some or all of the dependent candidates, depending
3559 on whether the introduced increments can be reused, or are
3560 less expensive to calculate than the replaced statements. */
3561 else
3563 machine_mode mode;
3564 bool speed;
3566 /* Determine whether we'll be generating pointer arithmetic
3567 when replacing candidates. */
3568 address_arithmetic_p = (c->kind == CAND_ADD
3569 && POINTER_TYPE_P (c->cand_type));
3571 /* If all candidates have already been replaced under other
3572 interpretations, nothing remains to be done. */
3573 if (!count_candidates (c))
3574 continue;
3576 /* Construct an array of increments for this candidate chain. */
3577 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3578 incr_vec_len = 0;
3579 record_increments (c);
3581 /* Determine which increments are profitable to replace. */
3582 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3583 speed = optimize_cands_for_speed_p (c);
3584 analyze_increments (first_dep, mode, speed);
3586 /* Insert initializers of the form T_0 = stride * increment
3587 for use in profitable replacements. */
3588 insert_initializers (first_dep);
3589 dump_incr_vec ();
3591 /* Perform the replacements. */
3592 replace_profitable_candidates (first_dep);
3593 free (incr_vec);
3598 namespace {
3600 const pass_data pass_data_strength_reduction =
3602 GIMPLE_PASS, /* type */
3603 "slsr", /* name */
3604 OPTGROUP_NONE, /* optinfo_flags */
3605 TV_GIMPLE_SLSR, /* tv_id */
3606 ( PROP_cfg | PROP_ssa ), /* properties_required */
3607 0, /* properties_provided */
3608 0, /* properties_destroyed */
3609 0, /* todo_flags_start */
3610 0, /* todo_flags_finish */
3613 class pass_strength_reduction : public gimple_opt_pass
3615 public:
3616 pass_strength_reduction (gcc::context *ctxt)
3617 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3620 /* opt_pass methods: */
3621 virtual bool gate (function *) { return flag_tree_slsr; }
3622 virtual unsigned int execute (function *);
3624 }; // class pass_strength_reduction
3626 unsigned
3627 pass_strength_reduction::execute (function *fun)
3629 /* Create the obstack where candidates will reside. */
3630 gcc_obstack_init (&cand_obstack);
3632 /* Allocate the candidate vector. */
3633 cand_vec.create (128);
3635 /* Allocate the mapping from statements to candidate indices. */
3636 stmt_cand_map = new hash_map<gimple, slsr_cand_t>;
3638 /* Create the obstack where candidate chains will reside. */
3639 gcc_obstack_init (&chain_obstack);
3641 /* Allocate the mapping from base expressions to candidate chains. */
3642 base_cand_map = new hash_table<cand_chain_hasher> (500);
3644 /* Allocate the mapping from bases to alternative bases. */
3645 alt_base_map = new hash_map<tree, tree>;
3647 /* Initialize the loop optimizer. We need to detect flow across
3648 back edges, and this gives us dominator information as well. */
3649 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3651 /* Walk the CFG in predominator order looking for strength reduction
3652 candidates. */
3653 find_candidates_dom_walker (CDI_DOMINATORS)
3654 .walk (fun->cfg->x_entry_block_ptr);
3656 if (dump_file && (dump_flags & TDF_DETAILS))
3658 dump_cand_vec ();
3659 dump_cand_chains ();
3662 delete alt_base_map;
3663 free_affine_expand_cache (&name_expansions);
3665 /* Analyze costs and make appropriate replacements. */
3666 analyze_candidates_and_replace ();
3668 loop_optimizer_finalize ();
3669 delete base_cand_map;
3670 base_cand_map = NULL;
3671 obstack_free (&chain_obstack, NULL);
3672 delete stmt_cand_map;
3673 cand_vec.release ();
3674 obstack_free (&cand_obstack, NULL);
3676 return 0;
3679 } // anon namespace
3681 gimple_opt_pass *
3682 make_pass_strength_reduction (gcc::context *ctxt)
3684 return new pass_strength_reduction (ctxt);