Daily bump.
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blobe719129df24082a12168da201361641fc56edfdc
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2014 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tree.h"
40 #include "hash-map.h"
41 #include "hash-table.h"
42 #include "predict.h"
43 #include "vec.h"
44 #include "hashtab.h"
45 #include "hash-set.h"
46 #include "machmode.h"
47 #include "tm.h"
48 #include "hard-reg-set.h"
49 #include "input.h"
50 #include "function.h"
51 #include "dominance.h"
52 #include "cfg.h"
53 #include "basic-block.h"
54 #include "tree-ssa-alias.h"
55 #include "internal-fn.h"
56 #include "gimple-expr.h"
57 #include "is-a.h"
58 #include "gimple.h"
59 #include "gimple-iterator.h"
60 #include "gimplify-me.h"
61 #include "stor-layout.h"
62 #include "expr.h"
63 #include "tree-pass.h"
64 #include "cfgloop.h"
65 #include "gimple-pretty-print.h"
66 #include "gimple-ssa.h"
67 #include "tree-cfg.h"
68 #include "tree-phinodes.h"
69 #include "ssa-iterators.h"
70 #include "stringpool.h"
71 #include "tree-ssanames.h"
72 #include "domwalk.h"
73 #include "expmed.h"
74 #include "params.h"
75 #include "tree-ssa-address.h"
76 #include "tree-affine.h"
77 #include "wide-int-print.h"
78 #include "builtins.h"
80 /* Information about a strength reduction candidate. Each statement
81 in the candidate table represents an expression of one of the
82 following forms (the special case of CAND_REF will be described
83 later):
85 (CAND_MULT) S1: X = (B + i) * S
86 (CAND_ADD) S1: X = B + (i * S)
88 Here X and B are SSA names, i is an integer constant, and S is
89 either an SSA name or a constant. We call B the "base," i the
90 "index", and S the "stride."
92 Any statement S0 that dominates S1 and is of the form:
94 (CAND_MULT) S0: Y = (B + i') * S
95 (CAND_ADD) S0: Y = B + (i' * S)
97 is called a "basis" for S1. In both cases, S1 may be replaced by
99 S1': X = Y + (i - i') * S,
101 where (i - i') * S is folded to the extent possible.
103 All gimple statements are visited in dominator order, and each
104 statement that may contribute to one of the forms of S1 above is
105 given at least one entry in the candidate table. Such statements
106 include addition, pointer addition, subtraction, multiplication,
107 negation, copies, and nontrivial type casts. If a statement may
108 represent more than one expression of the forms of S1 above,
109 multiple "interpretations" are stored in the table and chained
110 together. Examples:
112 * An add of two SSA names may treat either operand as the base.
113 * A multiply of two SSA names, likewise.
114 * A copy or cast may be thought of as either a CAND_MULT with
115 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
117 Candidate records are allocated from an obstack. They are addressed
118 both from a hash table keyed on S1, and from a vector of candidate
119 pointers arranged in predominator order.
121 Opportunity note
122 ----------------
123 Currently we don't recognize:
125 S0: Y = (S * i') - B
126 S1: X = (S * i) - B
128 as a strength reduction opportunity, even though this S1 would
129 also be replaceable by the S1' above. This can be added if it
130 comes up in practice.
132 Strength reduction in addressing
133 --------------------------------
134 There is another kind of candidate known as CAND_REF. A CAND_REF
135 describes a statement containing a memory reference having
136 complex addressing that might benefit from strength reduction.
137 Specifically, we are interested in references for which
138 get_inner_reference returns a base address, offset, and bitpos as
139 follows:
141 base: MEM_REF (T1, C1)
142 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
143 bitpos: C4 * BITS_PER_UNIT
145 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
146 arbitrary integer constants. Note that C2 may be zero, in which
147 case the offset will be MULT_EXPR (T2, C3).
149 When this pattern is recognized, the original memory reference
150 can be replaced with:
152 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
153 C1 + (C2 * C3) + C4)
155 which distributes the multiply to allow constant folding. When
156 two or more addressing expressions can be represented by MEM_REFs
157 of this form, differing only in the constants C1, C2, and C4,
158 making this substitution produces more efficient addressing during
159 the RTL phases. When there are not at least two expressions with
160 the same values of T1, T2, and C3, there is nothing to be gained
161 by the replacement.
163 Strength reduction of CAND_REFs uses the same infrastructure as
164 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
165 field, MULT_EXPR (T2, C3) in the stride (S) field, and
166 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
167 is thus another CAND_REF with the same B and S values. When at
168 least two CAND_REFs are chained together using the basis relation,
169 each of them is replaced as above, resulting in improved code
170 generation for addressing.
172 Conditional candidates
173 ======================
175 Conditional candidates are best illustrated with an example.
176 Consider the code sequence:
178 (1) x_0 = ...;
179 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
180 if (...)
181 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
182 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
183 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
184 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
186 Here strength reduction is complicated by the uncertain value of x_2.
187 A legitimate transformation is:
189 (1) x_0 = ...;
190 (2) a_0 = x_0 * 5;
191 if (...)
193 (3) [x_1 = x_0 + 1;]
194 (3a) t_1 = a_0 + 5;
196 (4) [x_2 = PHI <x_0, x_1>;]
197 (4a) t_2 = PHI <a_0, t_1>;
198 (5) [x_3 = x_2 + 1;]
199 (6r) a_1 = t_2 + 5;
201 where the bracketed instructions may go dead.
203 To recognize this opportunity, we have to observe that statement (6)
204 has a "hidden basis" (2). The hidden basis is unlike a normal basis
205 in that the statement and the hidden basis have different base SSA
206 names (x_2 and x_0, respectively). The relationship is established
207 when a statement's base name (x_2) is defined by a phi statement (4),
208 each argument of which (x_0, x_1) has an identical "derived base name."
209 If the argument is defined by a candidate (as x_1 is by (3)) that is a
210 CAND_ADD having a stride of 1, the derived base name of the argument is
211 the base name of the candidate (x_0). Otherwise, the argument itself
212 is its derived base name (as is the case with argument x_0).
214 The hidden basis for statement (6) is the nearest dominating candidate
215 whose base name is the derived base name (x_0) of the feeding phi (4),
216 and whose stride is identical to that of the statement. We can then
217 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
218 allowing the final replacement of (6) by the strength-reduced (6r).
220 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
221 A CAND_PHI is not a candidate for replacement, but is maintained in the
222 candidate table to ease discovery of hidden bases. Any phi statement
223 whose arguments share a common derived base name is entered into the
224 table with the derived base name, an (arbitrary) index of zero, and a
225 stride of 1. A statement with a hidden basis can then be detected by
226 simply looking up its feeding phi definition in the candidate table,
227 extracting the derived base name, and searching for a basis in the
228 usual manner after substituting the derived base name.
230 Note that the transformation is only valid when the original phi and
231 the statements that define the phi's arguments are all at the same
232 position in the loop hierarchy. */
235 /* Index into the candidate vector, offset by 1. VECs are zero-based,
236 while cand_idx's are one-based, with zero indicating null. */
237 typedef unsigned cand_idx;
239 /* The kind of candidate. */
240 enum cand_kind
242 CAND_MULT,
243 CAND_ADD,
244 CAND_REF,
245 CAND_PHI
248 struct slsr_cand_d
250 /* The candidate statement S1. */
251 gimple cand_stmt;
253 /* The base expression B: often an SSA name, but not always. */
254 tree base_expr;
256 /* The stride S. */
257 tree stride;
259 /* The index constant i. */
260 widest_int index;
262 /* The type of the candidate. This is normally the type of base_expr,
263 but casts may have occurred when combining feeding instructions.
264 A candidate can only be a basis for candidates of the same final type.
265 (For CAND_REFs, this is the type to be used for operand 1 of the
266 replacement MEM_REF.) */
267 tree cand_type;
269 /* The kind of candidate (CAND_MULT, etc.). */
270 enum cand_kind kind;
272 /* Index of this candidate in the candidate vector. */
273 cand_idx cand_num;
275 /* Index of the next candidate record for the same statement.
276 A statement may be useful in more than one way (e.g., due to
277 commutativity). So we can have multiple "interpretations"
278 of a statement. */
279 cand_idx next_interp;
281 /* Index of the basis statement S0, if any, in the candidate vector. */
282 cand_idx basis;
284 /* First candidate for which this candidate is a basis, if one exists. */
285 cand_idx dependent;
287 /* Next candidate having the same basis as this one. */
288 cand_idx sibling;
290 /* If this is a conditional candidate, the CAND_PHI candidate
291 that defines the base SSA name B. */
292 cand_idx def_phi;
294 /* Savings that can be expected from eliminating dead code if this
295 candidate is replaced. */
296 int dead_savings;
299 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
300 typedef const struct slsr_cand_d *const_slsr_cand_t;
302 /* Pointers to candidates are chained together as part of a mapping
303 from base expressions to the candidates that use them. */
305 struct cand_chain_d
307 /* Base expression for the chain of candidates: often, but not
308 always, an SSA name. */
309 tree base_expr;
311 /* Pointer to a candidate. */
312 slsr_cand_t cand;
314 /* Chain pointer. */
315 struct cand_chain_d *next;
319 typedef struct cand_chain_d cand_chain, *cand_chain_t;
320 typedef const struct cand_chain_d *const_cand_chain_t;
322 /* Information about a unique "increment" associated with candidates
323 having an SSA name for a stride. An increment is the difference
324 between the index of the candidate and the index of its basis,
325 i.e., (i - i') as discussed in the module commentary.
327 When we are not going to generate address arithmetic we treat
328 increments that differ only in sign as the same, allowing sharing
329 of the cost of initializers. The absolute value of the increment
330 is stored in the incr_info. */
332 struct incr_info_d
334 /* The increment that relates a candidate to its basis. */
335 widest_int incr;
337 /* How many times the increment occurs in the candidate tree. */
338 unsigned count;
340 /* Cost of replacing candidates using this increment. Negative and
341 zero costs indicate replacement should be performed. */
342 int cost;
344 /* If this increment is profitable but is not -1, 0, or 1, it requires
345 an initializer T_0 = stride * incr to be found or introduced in the
346 nearest common dominator of all candidates. This field holds T_0
347 for subsequent use. */
348 tree initializer;
350 /* If the initializer was found to already exist, this is the block
351 where it was found. */
352 basic_block init_bb;
355 typedef struct incr_info_d incr_info, *incr_info_t;
357 /* Candidates are maintained in a vector. If candidate X dominates
358 candidate Y, then X appears before Y in the vector; but the
359 converse does not necessarily hold. */
360 static vec<slsr_cand_t> cand_vec;
362 enum cost_consts
364 COST_NEUTRAL = 0,
365 COST_INFINITE = 1000
368 enum stride_status
370 UNKNOWN_STRIDE = 0,
371 KNOWN_STRIDE = 1
374 enum phi_adjust_status
376 NOT_PHI_ADJUST = 0,
377 PHI_ADJUST = 1
380 enum count_phis_status
382 DONT_COUNT_PHIS = 0,
383 COUNT_PHIS = 1
386 /* Pointer map embodying a mapping from statements to candidates. */
387 static hash_map<gimple, slsr_cand_t> *stmt_cand_map;
389 /* Obstack for candidates. */
390 static struct obstack cand_obstack;
392 /* Obstack for candidate chains. */
393 static struct obstack chain_obstack;
395 /* An array INCR_VEC of incr_infos is used during analysis of related
396 candidates having an SSA name for a stride. INCR_VEC_LEN describes
397 its current length. MAX_INCR_VEC_LEN is used to avoid costly
398 pathological cases. */
399 static incr_info_t incr_vec;
400 static unsigned incr_vec_len;
401 const int MAX_INCR_VEC_LEN = 16;
403 /* For a chain of candidates with unknown stride, indicates whether or not
404 we must generate pointer arithmetic when replacing statements. */
405 static bool address_arithmetic_p;
407 /* Forward function declarations. */
408 static slsr_cand_t base_cand_from_table (tree);
409 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
410 static bool legal_cast_p_1 (tree, tree);
412 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
414 static slsr_cand_t
415 lookup_cand (cand_idx idx)
417 return cand_vec[idx - 1];
420 /* Helper for hashing a candidate chain header. */
422 struct cand_chain_hasher : typed_noop_remove <cand_chain>
424 typedef cand_chain value_type;
425 typedef cand_chain compare_type;
426 static inline hashval_t hash (const value_type *);
427 static inline bool equal (const value_type *, const compare_type *);
430 inline hashval_t
431 cand_chain_hasher::hash (const value_type *p)
433 tree base_expr = p->base_expr;
434 return iterative_hash_expr (base_expr, 0);
437 inline bool
438 cand_chain_hasher::equal (const value_type *chain1, const compare_type *chain2)
440 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
443 /* Hash table embodying a mapping from base exprs to chains of candidates. */
444 static hash_table<cand_chain_hasher> *base_cand_map;
446 /* Pointer map used by tree_to_aff_combination_expand. */
447 static hash_map<tree, name_expansion *> *name_expansions;
448 /* Pointer map embodying a mapping from bases to alternative bases. */
449 static hash_map<tree, tree> *alt_base_map;
451 /* Given BASE, use the tree affine combiniation facilities to
452 find the underlying tree expression for BASE, with any
453 immediate offset excluded.
455 N.B. we should eliminate this backtracking with better forward
456 analysis in a future release. */
458 static tree
459 get_alternative_base (tree base)
461 tree *result = alt_base_map->get (base);
463 if (result == NULL)
465 tree expr;
466 aff_tree aff;
468 tree_to_aff_combination_expand (base, TREE_TYPE (base),
469 &aff, &name_expansions);
470 aff.offset = 0;
471 expr = aff_combination_to_tree (&aff);
473 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
475 return expr == base ? NULL : expr;
478 return *result;
481 /* Look in the candidate table for a CAND_PHI that defines BASE and
482 return it if found; otherwise return NULL. */
484 static cand_idx
485 find_phi_def (tree base)
487 slsr_cand_t c;
489 if (TREE_CODE (base) != SSA_NAME)
490 return 0;
492 c = base_cand_from_table (base);
494 if (!c || c->kind != CAND_PHI)
495 return 0;
497 return c->cand_num;
500 /* Helper routine for find_basis_for_candidate. May be called twice:
501 once for the candidate's base expr, and optionally again either for
502 the candidate's phi definition or for a CAND_REF's alternative base
503 expression. */
505 static slsr_cand_t
506 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
508 cand_chain mapping_key;
509 cand_chain_t chain;
510 slsr_cand_t basis = NULL;
512 // Limit potential of N^2 behavior for long candidate chains.
513 int iters = 0;
514 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
516 mapping_key.base_expr = base_expr;
517 chain = base_cand_map->find (&mapping_key);
519 for (; chain && iters < max_iters; chain = chain->next, ++iters)
521 slsr_cand_t one_basis = chain->cand;
523 if (one_basis->kind != c->kind
524 || one_basis->cand_stmt == c->cand_stmt
525 || !operand_equal_p (one_basis->stride, c->stride, 0)
526 || !types_compatible_p (one_basis->cand_type, c->cand_type)
527 || !dominated_by_p (CDI_DOMINATORS,
528 gimple_bb (c->cand_stmt),
529 gimple_bb (one_basis->cand_stmt)))
530 continue;
532 if (!basis || basis->cand_num < one_basis->cand_num)
533 basis = one_basis;
536 return basis;
539 /* Use the base expr from candidate C to look for possible candidates
540 that can serve as a basis for C. Each potential basis must also
541 appear in a block that dominates the candidate statement and have
542 the same stride and type. If more than one possible basis exists,
543 the one with highest index in the vector is chosen; this will be
544 the most immediately dominating basis. */
546 static int
547 find_basis_for_candidate (slsr_cand_t c)
549 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
551 /* If a candidate doesn't have a basis using its base expression,
552 it may have a basis hidden by one or more intervening phis. */
553 if (!basis && c->def_phi)
555 basic_block basis_bb, phi_bb;
556 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
557 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
559 if (basis)
561 /* A hidden basis must dominate the phi-definition of the
562 candidate's base name. */
563 phi_bb = gimple_bb (phi_cand->cand_stmt);
564 basis_bb = gimple_bb (basis->cand_stmt);
566 if (phi_bb == basis_bb
567 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
569 basis = NULL;
570 c->basis = 0;
573 /* If we found a hidden basis, estimate additional dead-code
574 savings if the phi and its feeding statements can be removed. */
575 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
576 c->dead_savings += phi_cand->dead_savings;
580 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
582 tree alt_base_expr = get_alternative_base (c->base_expr);
583 if (alt_base_expr)
584 basis = find_basis_for_base_expr (c, alt_base_expr);
587 if (basis)
589 c->sibling = basis->dependent;
590 basis->dependent = c->cand_num;
591 return basis->cand_num;
594 return 0;
597 /* Record a mapping from BASE to C, indicating that C may potentially serve
598 as a basis using that base expression. BASE may be the same as
599 C->BASE_EXPR; alternatively BASE can be a different tree that share the
600 underlining expression of C->BASE_EXPR. */
602 static void
603 record_potential_basis (slsr_cand_t c, tree base)
605 cand_chain_t node;
606 cand_chain **slot;
608 gcc_assert (base);
610 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
611 node->base_expr = base;
612 node->cand = c;
613 node->next = NULL;
614 slot = base_cand_map->find_slot (node, INSERT);
616 if (*slot)
618 cand_chain_t head = (cand_chain_t) (*slot);
619 node->next = head->next;
620 head->next = node;
622 else
623 *slot = node;
626 /* Allocate storage for a new candidate and initialize its fields.
627 Attempt to find a basis for the candidate.
629 For CAND_REF, an alternative base may also be recorded and used
630 to find a basis. This helps cases where the expression hidden
631 behind BASE (which is usually an SSA_NAME) has immediate offset,
632 e.g.
634 a2[i][j] = 1;
635 a2[i + 20][j] = 2; */
637 static slsr_cand_t
638 alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
639 const widest_int &index, tree stride, tree ctype,
640 unsigned savings)
642 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
643 sizeof (slsr_cand));
644 c->cand_stmt = gs;
645 c->base_expr = base;
646 c->stride = stride;
647 c->index = index;
648 c->cand_type = ctype;
649 c->kind = kind;
650 c->cand_num = cand_vec.length () + 1;
651 c->next_interp = 0;
652 c->dependent = 0;
653 c->sibling = 0;
654 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
655 c->dead_savings = savings;
657 cand_vec.safe_push (c);
659 if (kind == CAND_PHI)
660 c->basis = 0;
661 else
662 c->basis = find_basis_for_candidate (c);
664 record_potential_basis (c, base);
665 if (flag_expensive_optimizations && kind == CAND_REF)
667 tree alt_base = get_alternative_base (base);
668 if (alt_base)
669 record_potential_basis (c, alt_base);
672 return c;
675 /* Determine the target cost of statement GS when compiling according
676 to SPEED. */
678 static int
679 stmt_cost (gimple gs, bool speed)
681 tree lhs, rhs1, rhs2;
682 machine_mode lhs_mode;
684 gcc_assert (is_gimple_assign (gs));
685 lhs = gimple_assign_lhs (gs);
686 rhs1 = gimple_assign_rhs1 (gs);
687 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
689 switch (gimple_assign_rhs_code (gs))
691 case MULT_EXPR:
692 rhs2 = gimple_assign_rhs2 (gs);
694 if (tree_fits_shwi_p (rhs2))
695 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
697 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
698 return mul_cost (speed, lhs_mode);
700 case PLUS_EXPR:
701 case POINTER_PLUS_EXPR:
702 case MINUS_EXPR:
703 return add_cost (speed, lhs_mode);
705 case NEGATE_EXPR:
706 return neg_cost (speed, lhs_mode);
708 CASE_CONVERT:
709 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
711 /* Note that we don't assign costs to copies that in most cases
712 will go away. */
713 default:
717 gcc_unreachable ();
718 return 0;
721 /* Look up the defining statement for BASE_IN and return a pointer
722 to its candidate in the candidate table, if any; otherwise NULL.
723 Only CAND_ADD and CAND_MULT candidates are returned. */
725 static slsr_cand_t
726 base_cand_from_table (tree base_in)
728 slsr_cand_t *result;
730 gimple def = SSA_NAME_DEF_STMT (base_in);
731 if (!def)
732 return (slsr_cand_t) NULL;
734 result = stmt_cand_map->get (def);
736 if (result && (*result)->kind != CAND_REF)
737 return *result;
739 return (slsr_cand_t) NULL;
742 /* Add an entry to the statement-to-candidate mapping. */
744 static void
745 add_cand_for_stmt (gimple gs, slsr_cand_t c)
747 gcc_assert (!stmt_cand_map->put (gs, c));
750 /* Given PHI which contains a phi statement, determine whether it
751 satisfies all the requirements of a phi candidate. If so, create
752 a candidate. Note that a CAND_PHI never has a basis itself, but
753 is used to help find a basis for subsequent candidates. */
755 static void
756 slsr_process_phi (gphi *phi, bool speed)
758 unsigned i;
759 tree arg0_base = NULL_TREE, base_type;
760 slsr_cand_t c;
761 struct loop *cand_loop = gimple_bb (phi)->loop_father;
762 unsigned savings = 0;
764 /* A CAND_PHI requires each of its arguments to have the same
765 derived base name. (See the module header commentary for a
766 definition of derived base names.) Furthermore, all feeding
767 definitions must be in the same position in the loop hierarchy
768 as PHI. */
770 for (i = 0; i < gimple_phi_num_args (phi); i++)
772 slsr_cand_t arg_cand;
773 tree arg = gimple_phi_arg_def (phi, i);
774 tree derived_base_name = NULL_TREE;
775 gimple arg_stmt = NULL;
776 basic_block arg_bb = NULL;
778 if (TREE_CODE (arg) != SSA_NAME)
779 return;
781 arg_cand = base_cand_from_table (arg);
783 if (arg_cand)
785 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
787 if (!arg_cand->next_interp)
788 return;
790 arg_cand = lookup_cand (arg_cand->next_interp);
793 if (!integer_onep (arg_cand->stride))
794 return;
796 derived_base_name = arg_cand->base_expr;
797 arg_stmt = arg_cand->cand_stmt;
798 arg_bb = gimple_bb (arg_stmt);
800 /* Gather potential dead code savings if the phi statement
801 can be removed later on. */
802 if (has_single_use (arg))
804 if (gimple_code (arg_stmt) == GIMPLE_PHI)
805 savings += arg_cand->dead_savings;
806 else
807 savings += stmt_cost (arg_stmt, speed);
810 else
812 derived_base_name = arg;
814 if (SSA_NAME_IS_DEFAULT_DEF (arg))
815 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
816 else
817 gimple_bb (SSA_NAME_DEF_STMT (arg));
820 if (!arg_bb || arg_bb->loop_father != cand_loop)
821 return;
823 if (i == 0)
824 arg0_base = derived_base_name;
825 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
826 return;
829 /* Create the candidate. "alloc_cand_and_find_basis" is named
830 misleadingly for this case, as no basis will be sought for a
831 CAND_PHI. */
832 base_type = TREE_TYPE (arg0_base);
834 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
835 0, integer_one_node, base_type, savings);
837 /* Add the candidate to the statement-candidate mapping. */
838 add_cand_for_stmt (phi, c);
841 /* Given PBASE which is a pointer to tree, look up the defining
842 statement for it and check whether the candidate is in the
843 form of:
845 X = B + (1 * S), S is integer constant
846 X = B + (i * S), S is integer one
848 If so, set PBASE to the candidate's base_expr and return double
849 int (i * S).
850 Otherwise, just return double int zero. */
852 static widest_int
853 backtrace_base_for_ref (tree *pbase)
855 tree base_in = *pbase;
856 slsr_cand_t base_cand;
858 STRIP_NOPS (base_in);
860 /* Strip off widening conversion(s) to handle cases where
861 e.g. 'B' is widened from an 'int' in order to calculate
862 a 64-bit address. */
863 if (CONVERT_EXPR_P (base_in)
864 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
865 base_in = get_unwidened (base_in, NULL_TREE);
867 if (TREE_CODE (base_in) != SSA_NAME)
868 return 0;
870 base_cand = base_cand_from_table (base_in);
872 while (base_cand && base_cand->kind != CAND_PHI)
874 if (base_cand->kind == CAND_ADD
875 && base_cand->index == 1
876 && TREE_CODE (base_cand->stride) == INTEGER_CST)
878 /* X = B + (1 * S), S is integer constant. */
879 *pbase = base_cand->base_expr;
880 return wi::to_widest (base_cand->stride);
882 else if (base_cand->kind == CAND_ADD
883 && TREE_CODE (base_cand->stride) == INTEGER_CST
884 && integer_onep (base_cand->stride))
886 /* X = B + (i * S), S is integer one. */
887 *pbase = base_cand->base_expr;
888 return base_cand->index;
891 if (base_cand->next_interp)
892 base_cand = lookup_cand (base_cand->next_interp);
893 else
894 base_cand = NULL;
897 return 0;
900 /* Look for the following pattern:
902 *PBASE: MEM_REF (T1, C1)
904 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
906 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
908 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
910 *PINDEX: C4 * BITS_PER_UNIT
912 If not present, leave the input values unchanged and return FALSE.
913 Otherwise, modify the input values as follows and return TRUE:
915 *PBASE: T1
916 *POFFSET: MULT_EXPR (T2, C3)
917 *PINDEX: C1 + (C2 * C3) + C4
919 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
920 will be further restructured to:
922 *PBASE: T1
923 *POFFSET: MULT_EXPR (T2', C3)
924 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
926 static bool
927 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
928 tree *ptype)
930 tree base = *pbase, offset = *poffset;
931 widest_int index = *pindex;
932 tree mult_op0, t1, t2, type;
933 widest_int c1, c2, c3, c4, c5;
935 if (!base
936 || !offset
937 || TREE_CODE (base) != MEM_REF
938 || TREE_CODE (offset) != MULT_EXPR
939 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
940 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
941 return false;
943 t1 = TREE_OPERAND (base, 0);
944 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
945 type = TREE_TYPE (TREE_OPERAND (base, 1));
947 mult_op0 = TREE_OPERAND (offset, 0);
948 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
950 if (TREE_CODE (mult_op0) == PLUS_EXPR)
952 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
954 t2 = TREE_OPERAND (mult_op0, 0);
955 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
957 else
958 return false;
960 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
962 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
964 t2 = TREE_OPERAND (mult_op0, 0);
965 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
967 else
968 return false;
970 else
972 t2 = mult_op0;
973 c2 = 0;
976 c4 = wi::lrshift (index, LOG2_BITS_PER_UNIT);
977 c5 = backtrace_base_for_ref (&t2);
979 *pbase = t1;
980 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
981 wide_int_to_tree (sizetype, c3));
982 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
983 *ptype = type;
985 return true;
988 /* Given GS which contains a data reference, create a CAND_REF entry in
989 the candidate table and attempt to find a basis. */
991 static void
992 slsr_process_ref (gimple gs)
994 tree ref_expr, base, offset, type;
995 HOST_WIDE_INT bitsize, bitpos;
996 machine_mode mode;
997 int unsignedp, volatilep;
998 slsr_cand_t c;
1000 if (gimple_vdef (gs))
1001 ref_expr = gimple_assign_lhs (gs);
1002 else
1003 ref_expr = gimple_assign_rhs1 (gs);
1005 if (!handled_component_p (ref_expr)
1006 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1007 || (TREE_CODE (ref_expr) == COMPONENT_REF
1008 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1009 return;
1011 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1012 &unsignedp, &volatilep, false);
1013 widest_int index = bitpos;
1015 if (!restructure_reference (&base, &offset, &index, &type))
1016 return;
1018 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1019 type, 0);
1021 /* Add the candidate to the statement-candidate mapping. */
1022 add_cand_for_stmt (gs, c);
1025 /* Create a candidate entry for a statement GS, where GS multiplies
1026 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1027 about the two SSA names into the new candidate. Return the new
1028 candidate. */
1030 static slsr_cand_t
1031 create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1033 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1034 widest_int index;
1035 unsigned savings = 0;
1036 slsr_cand_t c;
1037 slsr_cand_t base_cand = base_cand_from_table (base_in);
1039 /* Look at all interpretations of the base candidate, if necessary,
1040 to find information to propagate into this candidate. */
1041 while (base_cand && !base && base_cand->kind != CAND_PHI)
1044 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1046 /* Y = (B + i') * 1
1047 X = Y * Z
1048 ================
1049 X = (B + i') * Z */
1050 base = base_cand->base_expr;
1051 index = base_cand->index;
1052 stride = stride_in;
1053 ctype = base_cand->cand_type;
1054 if (has_single_use (base_in))
1055 savings = (base_cand->dead_savings
1056 + stmt_cost (base_cand->cand_stmt, speed));
1058 else if (base_cand->kind == CAND_ADD
1059 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1061 /* Y = B + (i' * S), S constant
1062 X = Y * Z
1063 ============================
1064 X = B + ((i' * S) * Z) */
1065 base = base_cand->base_expr;
1066 index = base_cand->index * wi::to_widest (base_cand->stride);
1067 stride = stride_in;
1068 ctype = base_cand->cand_type;
1069 if (has_single_use (base_in))
1070 savings = (base_cand->dead_savings
1071 + stmt_cost (base_cand->cand_stmt, speed));
1074 if (base_cand->next_interp)
1075 base_cand = lookup_cand (base_cand->next_interp);
1076 else
1077 base_cand = NULL;
1080 if (!base)
1082 /* No interpretations had anything useful to propagate, so
1083 produce X = (Y + 0) * Z. */
1084 base = base_in;
1085 index = 0;
1086 stride = stride_in;
1087 ctype = TREE_TYPE (base_in);
1090 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1091 ctype, savings);
1092 return c;
1095 /* Create a candidate entry for a statement GS, where GS multiplies
1096 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1097 information about BASE_IN into the new candidate. Return the new
1098 candidate. */
1100 static slsr_cand_t
1101 create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
1103 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1104 widest_int index, temp;
1105 unsigned savings = 0;
1106 slsr_cand_t c;
1107 slsr_cand_t base_cand = base_cand_from_table (base_in);
1109 /* Look at all interpretations of the base candidate, if necessary,
1110 to find information to propagate into this candidate. */
1111 while (base_cand && !base && base_cand->kind != CAND_PHI)
1113 if (base_cand->kind == CAND_MULT
1114 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1116 /* Y = (B + i') * S, S constant
1117 X = Y * c
1118 ============================
1119 X = (B + i') * (S * c) */
1120 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1121 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1123 base = base_cand->base_expr;
1124 index = base_cand->index;
1125 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1126 ctype = base_cand->cand_type;
1127 if (has_single_use (base_in))
1128 savings = (base_cand->dead_savings
1129 + stmt_cost (base_cand->cand_stmt, speed));
1132 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1134 /* Y = B + (i' * 1)
1135 X = Y * c
1136 ===========================
1137 X = (B + i') * c */
1138 base = base_cand->base_expr;
1139 index = base_cand->index;
1140 stride = stride_in;
1141 ctype = base_cand->cand_type;
1142 if (has_single_use (base_in))
1143 savings = (base_cand->dead_savings
1144 + stmt_cost (base_cand->cand_stmt, speed));
1146 else if (base_cand->kind == CAND_ADD
1147 && base_cand->index == 1
1148 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1150 /* Y = B + (1 * S), S constant
1151 X = Y * c
1152 ===========================
1153 X = (B + S) * c */
1154 base = base_cand->base_expr;
1155 index = wi::to_widest (base_cand->stride);
1156 stride = stride_in;
1157 ctype = base_cand->cand_type;
1158 if (has_single_use (base_in))
1159 savings = (base_cand->dead_savings
1160 + stmt_cost (base_cand->cand_stmt, speed));
1163 if (base_cand->next_interp)
1164 base_cand = lookup_cand (base_cand->next_interp);
1165 else
1166 base_cand = NULL;
1169 if (!base)
1171 /* No interpretations had anything useful to propagate, so
1172 produce X = (Y + 0) * c. */
1173 base = base_in;
1174 index = 0;
1175 stride = stride_in;
1176 ctype = TREE_TYPE (base_in);
1179 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1180 ctype, savings);
1181 return c;
1184 /* Given GS which is a multiply of scalar integers, make an appropriate
1185 entry in the candidate table. If this is a multiply of two SSA names,
1186 create two CAND_MULT interpretations and attempt to find a basis for
1187 each of them. Otherwise, create a single CAND_MULT and attempt to
1188 find a basis. */
1190 static void
1191 slsr_process_mul (gimple gs, tree rhs1, tree rhs2, bool speed)
1193 slsr_cand_t c, c2;
1195 /* If this is a multiply of an SSA name with itself, it is highly
1196 unlikely that we will get a strength reduction opportunity, so
1197 don't record it as a candidate. This simplifies the logic for
1198 finding a basis, so if this is removed that must be considered. */
1199 if (rhs1 == rhs2)
1200 return;
1202 if (TREE_CODE (rhs2) == SSA_NAME)
1204 /* Record an interpretation of this statement in the candidate table
1205 assuming RHS1 is the base expression and RHS2 is the stride. */
1206 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1208 /* Add the first interpretation to the statement-candidate mapping. */
1209 add_cand_for_stmt (gs, c);
1211 /* Record another interpretation of this statement assuming RHS1
1212 is the stride and RHS2 is the base expression. */
1213 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1214 c->next_interp = c2->cand_num;
1216 else
1218 /* Record an interpretation for the multiply-immediate. */
1219 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1221 /* Add the interpretation to the statement-candidate mapping. */
1222 add_cand_for_stmt (gs, c);
1226 /* Create a candidate entry for a statement GS, where GS adds two
1227 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1228 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1229 information about the two SSA names into the new candidate.
1230 Return the new candidate. */
1232 static slsr_cand_t
1233 create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
1234 bool subtract_p, bool speed)
1236 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1237 widest_int index;
1238 unsigned savings = 0;
1239 slsr_cand_t c;
1240 slsr_cand_t base_cand = base_cand_from_table (base_in);
1241 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1243 /* The most useful transformation is a multiply-immediate feeding
1244 an add or subtract. Look for that first. */
1245 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1247 if (addend_cand->kind == CAND_MULT
1248 && addend_cand->index == 0
1249 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1251 /* Z = (B + 0) * S, S constant
1252 X = Y +/- Z
1253 ===========================
1254 X = Y + ((+/-1 * S) * B) */
1255 base = base_in;
1256 index = wi::to_widest (addend_cand->stride);
1257 if (subtract_p)
1258 index = -index;
1259 stride = addend_cand->base_expr;
1260 ctype = TREE_TYPE (base_in);
1261 if (has_single_use (addend_in))
1262 savings = (addend_cand->dead_savings
1263 + stmt_cost (addend_cand->cand_stmt, speed));
1266 if (addend_cand->next_interp)
1267 addend_cand = lookup_cand (addend_cand->next_interp);
1268 else
1269 addend_cand = NULL;
1272 while (base_cand && !base && base_cand->kind != CAND_PHI)
1274 if (base_cand->kind == CAND_ADD
1275 && (base_cand->index == 0
1276 || operand_equal_p (base_cand->stride,
1277 integer_zero_node, 0)))
1279 /* Y = B + (i' * S), i' * S = 0
1280 X = Y +/- Z
1281 ============================
1282 X = B + (+/-1 * Z) */
1283 base = base_cand->base_expr;
1284 index = subtract_p ? -1 : 1;
1285 stride = addend_in;
1286 ctype = base_cand->cand_type;
1287 if (has_single_use (base_in))
1288 savings = (base_cand->dead_savings
1289 + stmt_cost (base_cand->cand_stmt, speed));
1291 else if (subtract_p)
1293 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1295 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1297 if (subtrahend_cand->kind == CAND_MULT
1298 && subtrahend_cand->index == 0
1299 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1301 /* Z = (B + 0) * S, S constant
1302 X = Y - Z
1303 ===========================
1304 Value: X = Y + ((-1 * S) * B) */
1305 base = base_in;
1306 index = wi::to_widest (subtrahend_cand->stride);
1307 index = -index;
1308 stride = subtrahend_cand->base_expr;
1309 ctype = TREE_TYPE (base_in);
1310 if (has_single_use (addend_in))
1311 savings = (subtrahend_cand->dead_savings
1312 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1315 if (subtrahend_cand->next_interp)
1316 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1317 else
1318 subtrahend_cand = NULL;
1322 if (base_cand->next_interp)
1323 base_cand = lookup_cand (base_cand->next_interp);
1324 else
1325 base_cand = NULL;
1328 if (!base)
1330 /* No interpretations had anything useful to propagate, so
1331 produce X = Y + (1 * Z). */
1332 base = base_in;
1333 index = subtract_p ? -1 : 1;
1334 stride = addend_in;
1335 ctype = TREE_TYPE (base_in);
1338 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1339 ctype, savings);
1340 return c;
1343 /* Create a candidate entry for a statement GS, where GS adds SSA
1344 name BASE_IN to constant INDEX_IN. Propagate any known information
1345 about BASE_IN into the new candidate. Return the new candidate. */
1347 static slsr_cand_t
1348 create_add_imm_cand (gimple gs, tree base_in, const widest_int &index_in,
1349 bool speed)
1351 enum cand_kind kind = CAND_ADD;
1352 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1353 widest_int index, multiple;
1354 unsigned savings = 0;
1355 slsr_cand_t c;
1356 slsr_cand_t base_cand = base_cand_from_table (base_in);
1358 while (base_cand && !base && base_cand->kind != CAND_PHI)
1360 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1362 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1363 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1364 sign, &multiple))
1366 /* Y = (B + i') * S, S constant, c = kS for some integer k
1367 X = Y + c
1368 ============================
1369 X = (B + (i'+ k)) * S
1371 Y = B + (i' * S), S constant, c = kS for some integer k
1372 X = Y + c
1373 ============================
1374 X = (B + (i'+ k)) * S */
1375 kind = base_cand->kind;
1376 base = base_cand->base_expr;
1377 index = base_cand->index + multiple;
1378 stride = base_cand->stride;
1379 ctype = base_cand->cand_type;
1380 if (has_single_use (base_in))
1381 savings = (base_cand->dead_savings
1382 + stmt_cost (base_cand->cand_stmt, speed));
1385 if (base_cand->next_interp)
1386 base_cand = lookup_cand (base_cand->next_interp);
1387 else
1388 base_cand = NULL;
1391 if (!base)
1393 /* No interpretations had anything useful to propagate, so
1394 produce X = Y + (c * 1). */
1395 kind = CAND_ADD;
1396 base = base_in;
1397 index = index_in;
1398 stride = integer_one_node;
1399 ctype = TREE_TYPE (base_in);
1402 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1403 ctype, savings);
1404 return c;
1407 /* Given GS which is an add or subtract of scalar integers or pointers,
1408 make at least one appropriate entry in the candidate table. */
1410 static void
1411 slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
1413 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1414 slsr_cand_t c = NULL, c2;
1416 if (TREE_CODE (rhs2) == SSA_NAME)
1418 /* First record an interpretation assuming RHS1 is the base expression
1419 and RHS2 is the stride. But it doesn't make sense for the
1420 stride to be a pointer, so don't record a candidate in that case. */
1421 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1423 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1425 /* Add the first interpretation to the statement-candidate
1426 mapping. */
1427 add_cand_for_stmt (gs, c);
1430 /* If the two RHS operands are identical, or this is a subtract,
1431 we're done. */
1432 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1433 return;
1435 /* Otherwise, record another interpretation assuming RHS2 is the
1436 base expression and RHS1 is the stride, again provided that the
1437 stride is not a pointer. */
1438 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1440 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1441 if (c)
1442 c->next_interp = c2->cand_num;
1443 else
1444 add_cand_for_stmt (gs, c2);
1447 else
1449 /* Record an interpretation for the add-immediate. */
1450 widest_int index = wi::to_widest (rhs2);
1451 if (subtract_p)
1452 index = -index;
1454 c = create_add_imm_cand (gs, rhs1, index, speed);
1456 /* Add the interpretation to the statement-candidate mapping. */
1457 add_cand_for_stmt (gs, c);
1461 /* Given GS which is a negate of a scalar integer, make an appropriate
1462 entry in the candidate table. A negate is equivalent to a multiply
1463 by -1. */
1465 static void
1466 slsr_process_neg (gimple gs, tree rhs1, bool speed)
1468 /* Record a CAND_MULT interpretation for the multiply by -1. */
1469 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1471 /* Add the interpretation to the statement-candidate mapping. */
1472 add_cand_for_stmt (gs, c);
1475 /* Help function for legal_cast_p, operating on two trees. Checks
1476 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1477 for more details. */
1479 static bool
1480 legal_cast_p_1 (tree lhs, tree rhs)
1482 tree lhs_type, rhs_type;
1483 unsigned lhs_size, rhs_size;
1484 bool lhs_wraps, rhs_wraps;
1486 lhs_type = TREE_TYPE (lhs);
1487 rhs_type = TREE_TYPE (rhs);
1488 lhs_size = TYPE_PRECISION (lhs_type);
1489 rhs_size = TYPE_PRECISION (rhs_type);
1490 lhs_wraps = TYPE_OVERFLOW_WRAPS (lhs_type);
1491 rhs_wraps = TYPE_OVERFLOW_WRAPS (rhs_type);
1493 if (lhs_size < rhs_size
1494 || (rhs_wraps && !lhs_wraps)
1495 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1496 return false;
1498 return true;
1501 /* Return TRUE if GS is a statement that defines an SSA name from
1502 a conversion and is legal for us to combine with an add and multiply
1503 in the candidate table. For example, suppose we have:
1505 A = B + i;
1506 C = (type) A;
1507 D = C * S;
1509 Without the type-cast, we would create a CAND_MULT for D with base B,
1510 index i, and stride S. We want to record this candidate only if it
1511 is equivalent to apply the type cast following the multiply:
1513 A = B + i;
1514 E = A * S;
1515 D = (type) E;
1517 We will record the type with the candidate for D. This allows us
1518 to use a similar previous candidate as a basis. If we have earlier seen
1520 A' = B + i';
1521 C' = (type) A';
1522 D' = C' * S;
1524 we can replace D with
1526 D = D' + (i - i') * S;
1528 But if moving the type-cast would change semantics, we mustn't do this.
1530 This is legitimate for casts from a non-wrapping integral type to
1531 any integral type of the same or larger size. It is not legitimate
1532 to convert a wrapping type to a non-wrapping type, or to a wrapping
1533 type of a different size. I.e., with a wrapping type, we must
1534 assume that the addition B + i could wrap, in which case performing
1535 the multiply before or after one of the "illegal" type casts will
1536 have different semantics. */
1538 static bool
1539 legal_cast_p (gimple gs, tree rhs)
1541 if (!is_gimple_assign (gs)
1542 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1543 return false;
1545 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1548 /* Given GS which is a cast to a scalar integer type, determine whether
1549 the cast is legal for strength reduction. If so, make at least one
1550 appropriate entry in the candidate table. */
1552 static void
1553 slsr_process_cast (gimple gs, tree rhs1, bool speed)
1555 tree lhs, ctype;
1556 slsr_cand_t base_cand, c, c2;
1557 unsigned savings = 0;
1559 if (!legal_cast_p (gs, rhs1))
1560 return;
1562 lhs = gimple_assign_lhs (gs);
1563 base_cand = base_cand_from_table (rhs1);
1564 ctype = TREE_TYPE (lhs);
1566 if (base_cand && base_cand->kind != CAND_PHI)
1568 while (base_cand)
1570 /* Propagate all data from the base candidate except the type,
1571 which comes from the cast, and the base candidate's cast,
1572 which is no longer applicable. */
1573 if (has_single_use (rhs1))
1574 savings = (base_cand->dead_savings
1575 + stmt_cost (base_cand->cand_stmt, speed));
1577 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1578 base_cand->base_expr,
1579 base_cand->index, base_cand->stride,
1580 ctype, savings);
1581 if (base_cand->next_interp)
1582 base_cand = lookup_cand (base_cand->next_interp);
1583 else
1584 base_cand = NULL;
1587 else
1589 /* If nothing is known about the RHS, create fresh CAND_ADD and
1590 CAND_MULT interpretations:
1592 X = Y + (0 * 1)
1593 X = (Y + 0) * 1
1595 The first of these is somewhat arbitrary, but the choice of
1596 1 for the stride simplifies the logic for propagating casts
1597 into their uses. */
1598 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1599 0, integer_one_node, ctype, 0);
1600 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1601 0, integer_one_node, ctype, 0);
1602 c->next_interp = c2->cand_num;
1605 /* Add the first (or only) interpretation to the statement-candidate
1606 mapping. */
1607 add_cand_for_stmt (gs, c);
1610 /* Given GS which is a copy of a scalar integer type, make at least one
1611 appropriate entry in the candidate table.
1613 This interface is included for completeness, but is unnecessary
1614 if this pass immediately follows a pass that performs copy
1615 propagation, such as DOM. */
1617 static void
1618 slsr_process_copy (gimple gs, tree rhs1, bool speed)
1620 slsr_cand_t base_cand, c, c2;
1621 unsigned savings = 0;
1623 base_cand = base_cand_from_table (rhs1);
1625 if (base_cand && base_cand->kind != CAND_PHI)
1627 while (base_cand)
1629 /* Propagate all data from the base candidate. */
1630 if (has_single_use (rhs1))
1631 savings = (base_cand->dead_savings
1632 + stmt_cost (base_cand->cand_stmt, speed));
1634 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1635 base_cand->base_expr,
1636 base_cand->index, base_cand->stride,
1637 base_cand->cand_type, savings);
1638 if (base_cand->next_interp)
1639 base_cand = lookup_cand (base_cand->next_interp);
1640 else
1641 base_cand = NULL;
1644 else
1646 /* If nothing is known about the RHS, create fresh CAND_ADD and
1647 CAND_MULT interpretations:
1649 X = Y + (0 * 1)
1650 X = (Y + 0) * 1
1652 The first of these is somewhat arbitrary, but the choice of
1653 1 for the stride simplifies the logic for propagating casts
1654 into their uses. */
1655 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1656 0, integer_one_node, TREE_TYPE (rhs1), 0);
1657 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1658 0, integer_one_node, TREE_TYPE (rhs1), 0);
1659 c->next_interp = c2->cand_num;
1662 /* Add the first (or only) interpretation to the statement-candidate
1663 mapping. */
1664 add_cand_for_stmt (gs, c);
1667 class find_candidates_dom_walker : public dom_walker
1669 public:
1670 find_candidates_dom_walker (cdi_direction direction)
1671 : dom_walker (direction) {}
1672 virtual void before_dom_children (basic_block);
1675 /* Find strength-reduction candidates in block BB. */
1677 void
1678 find_candidates_dom_walker::before_dom_children (basic_block bb)
1680 bool speed = optimize_bb_for_speed_p (bb);
1682 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1683 gsi_next (&gsi))
1684 slsr_process_phi (gsi.phi (), speed);
1686 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1687 gsi_next (&gsi))
1689 gimple gs = gsi_stmt (gsi);
1691 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1692 slsr_process_ref (gs);
1694 else if (is_gimple_assign (gs)
1695 && SCALAR_INT_MODE_P
1696 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1698 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1700 switch (gimple_assign_rhs_code (gs))
1702 case MULT_EXPR:
1703 case PLUS_EXPR:
1704 rhs1 = gimple_assign_rhs1 (gs);
1705 rhs2 = gimple_assign_rhs2 (gs);
1706 /* Should never happen, but currently some buggy situations
1707 in earlier phases put constants in rhs1. */
1708 if (TREE_CODE (rhs1) != SSA_NAME)
1709 continue;
1710 break;
1712 /* Possible future opportunity: rhs1 of a ptr+ can be
1713 an ADDR_EXPR. */
1714 case POINTER_PLUS_EXPR:
1715 case MINUS_EXPR:
1716 rhs2 = gimple_assign_rhs2 (gs);
1717 /* Fall-through. */
1719 CASE_CONVERT:
1720 case MODIFY_EXPR:
1721 case NEGATE_EXPR:
1722 rhs1 = gimple_assign_rhs1 (gs);
1723 if (TREE_CODE (rhs1) != SSA_NAME)
1724 continue;
1725 break;
1727 default:
1731 switch (gimple_assign_rhs_code (gs))
1733 case MULT_EXPR:
1734 slsr_process_mul (gs, rhs1, rhs2, speed);
1735 break;
1737 case PLUS_EXPR:
1738 case POINTER_PLUS_EXPR:
1739 case MINUS_EXPR:
1740 slsr_process_add (gs, rhs1, rhs2, speed);
1741 break;
1743 case NEGATE_EXPR:
1744 slsr_process_neg (gs, rhs1, speed);
1745 break;
1747 CASE_CONVERT:
1748 slsr_process_cast (gs, rhs1, speed);
1749 break;
1751 case MODIFY_EXPR:
1752 slsr_process_copy (gs, rhs1, speed);
1753 break;
1755 default:
1762 /* Dump a candidate for debug. */
1764 static void
1765 dump_candidate (slsr_cand_t c)
1767 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1768 gimple_bb (c->cand_stmt)->index);
1769 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1770 switch (c->kind)
1772 case CAND_MULT:
1773 fputs (" MULT : (", dump_file);
1774 print_generic_expr (dump_file, c->base_expr, 0);
1775 fputs (" + ", dump_file);
1776 print_decs (c->index, dump_file);
1777 fputs (") * ", dump_file);
1778 print_generic_expr (dump_file, c->stride, 0);
1779 fputs (" : ", dump_file);
1780 break;
1781 case CAND_ADD:
1782 fputs (" ADD : ", dump_file);
1783 print_generic_expr (dump_file, c->base_expr, 0);
1784 fputs (" + (", dump_file);
1785 print_decs (c->index, dump_file);
1786 fputs (" * ", dump_file);
1787 print_generic_expr (dump_file, c->stride, 0);
1788 fputs (") : ", dump_file);
1789 break;
1790 case CAND_REF:
1791 fputs (" REF : ", dump_file);
1792 print_generic_expr (dump_file, c->base_expr, 0);
1793 fputs (" + (", dump_file);
1794 print_generic_expr (dump_file, c->stride, 0);
1795 fputs (") + ", dump_file);
1796 print_decs (c->index, dump_file);
1797 fputs (" : ", dump_file);
1798 break;
1799 case CAND_PHI:
1800 fputs (" PHI : ", dump_file);
1801 print_generic_expr (dump_file, c->base_expr, 0);
1802 fputs (" + (unknown * ", dump_file);
1803 print_generic_expr (dump_file, c->stride, 0);
1804 fputs (") : ", dump_file);
1805 break;
1806 default:
1807 gcc_unreachable ();
1809 print_generic_expr (dump_file, c->cand_type, 0);
1810 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1811 c->basis, c->dependent, c->sibling);
1812 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1813 c->next_interp, c->dead_savings);
1814 if (c->def_phi)
1815 fprintf (dump_file, " phi: %d\n", c->def_phi);
1816 fputs ("\n", dump_file);
1819 /* Dump the candidate vector for debug. */
1821 static void
1822 dump_cand_vec (void)
1824 unsigned i;
1825 slsr_cand_t c;
1827 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1829 FOR_EACH_VEC_ELT (cand_vec, i, c)
1830 dump_candidate (c);
1833 /* Callback used to dump the candidate chains hash table. */
1836 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1838 const_cand_chain_t chain = *slot;
1839 cand_chain_t p;
1841 print_generic_expr (dump_file, chain->base_expr, 0);
1842 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1844 for (p = chain->next; p; p = p->next)
1845 fprintf (dump_file, " -> %d", p->cand->cand_num);
1847 fputs ("\n", dump_file);
1848 return 1;
1851 /* Dump the candidate chains. */
1853 static void
1854 dump_cand_chains (void)
1856 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1857 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1858 (NULL);
1859 fputs ("\n", dump_file);
1862 /* Dump the increment vector for debug. */
1864 static void
1865 dump_incr_vec (void)
1867 if (dump_file && (dump_flags & TDF_DETAILS))
1869 unsigned i;
1871 fprintf (dump_file, "\nIncrement vector:\n\n");
1873 for (i = 0; i < incr_vec_len; i++)
1875 fprintf (dump_file, "%3d increment: ", i);
1876 print_decs (incr_vec[i].incr, dump_file);
1877 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1878 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1879 fputs ("\n initializer: ", dump_file);
1880 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1881 fputs ("\n\n", dump_file);
1886 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1887 data reference. */
1889 static void
1890 replace_ref (tree *expr, slsr_cand_t c)
1892 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1893 unsigned HOST_WIDE_INT misalign;
1894 unsigned align;
1896 /* Ensure the memory reference carries the minimum alignment
1897 requirement for the data type. See PR58041. */
1898 get_object_alignment_1 (*expr, &align, &misalign);
1899 if (misalign != 0)
1900 align = (misalign & -misalign);
1901 if (align < TYPE_ALIGN (acc_type))
1902 acc_type = build_aligned_type (acc_type, align);
1904 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1905 c->base_expr, c->stride);
1906 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1907 wide_int_to_tree (c->cand_type, c->index));
1909 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1910 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1911 TREE_OPERAND (mem_ref, 0)
1912 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1913 /*simple_p=*/true, NULL,
1914 /*before=*/true, GSI_SAME_STMT);
1915 copy_ref_info (mem_ref, *expr);
1916 *expr = mem_ref;
1917 update_stmt (c->cand_stmt);
1920 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1921 dependent of candidate C with an equivalent strength-reduced data
1922 reference. */
1924 static void
1925 replace_refs (slsr_cand_t c)
1927 if (dump_file && (dump_flags & TDF_DETAILS))
1929 fputs ("Replacing reference: ", dump_file);
1930 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1933 if (gimple_vdef (c->cand_stmt))
1935 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1936 replace_ref (lhs, c);
1938 else
1940 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1941 replace_ref (rhs, c);
1944 if (dump_file && (dump_flags & TDF_DETAILS))
1946 fputs ("With: ", dump_file);
1947 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1948 fputs ("\n", dump_file);
1951 if (c->sibling)
1952 replace_refs (lookup_cand (c->sibling));
1954 if (c->dependent)
1955 replace_refs (lookup_cand (c->dependent));
1958 /* Return TRUE if candidate C is dependent upon a PHI. */
1960 static bool
1961 phi_dependent_cand_p (slsr_cand_t c)
1963 /* A candidate is not necessarily dependent upon a PHI just because
1964 it has a phi definition for its base name. It may have a basis
1965 that relies upon the same phi definition, in which case the PHI
1966 is irrelevant to this candidate. */
1967 return (c->def_phi
1968 && c->basis
1969 && lookup_cand (c->basis)->def_phi != c->def_phi);
1972 /* Calculate the increment required for candidate C relative to
1973 its basis. */
1975 static widest_int
1976 cand_increment (slsr_cand_t c)
1978 slsr_cand_t basis;
1980 /* If the candidate doesn't have a basis, just return its own
1981 index. This is useful in record_increments to help us find
1982 an existing initializer. Also, if the candidate's basis is
1983 hidden by a phi, then its own index will be the increment
1984 from the newly introduced phi basis. */
1985 if (!c->basis || phi_dependent_cand_p (c))
1986 return c->index;
1988 basis = lookup_cand (c->basis);
1989 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1990 return c->index - basis->index;
1993 /* Calculate the increment required for candidate C relative to
1994 its basis. If we aren't going to generate pointer arithmetic
1995 for this candidate, return the absolute value of that increment
1996 instead. */
1998 static inline widest_int
1999 cand_abs_increment (slsr_cand_t c)
2001 widest_int increment = cand_increment (c);
2003 if (!address_arithmetic_p && wi::neg_p (increment))
2004 increment = -increment;
2006 return increment;
2009 /* Return TRUE iff candidate C has already been replaced under
2010 another interpretation. */
2012 static inline bool
2013 cand_already_replaced (slsr_cand_t c)
2015 return (gimple_bb (c->cand_stmt) == 0);
2018 /* Common logic used by replace_unconditional_candidate and
2019 replace_conditional_candidate. */
2021 static void
2022 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2024 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2025 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2027 /* It is highly unlikely, but possible, that the resulting
2028 bump doesn't fit in a HWI. Abandon the replacement
2029 in this case. This does not affect siblings or dependents
2030 of C. Restriction to signed HWI is conservative for unsigned
2031 types but allows for safe negation without twisted logic. */
2032 if (wi::fits_shwi_p (bump)
2033 && bump.to_shwi () != HOST_WIDE_INT_MIN
2034 /* It is not useful to replace casts, copies, or adds of
2035 an SSA name and a constant. */
2036 && cand_code != MODIFY_EXPR
2037 && !CONVERT_EXPR_CODE_P (cand_code)
2038 && cand_code != PLUS_EXPR
2039 && cand_code != POINTER_PLUS_EXPR
2040 && cand_code != MINUS_EXPR)
2042 enum tree_code code = PLUS_EXPR;
2043 tree bump_tree;
2044 gimple stmt_to_print = NULL;
2046 /* If the basis name and the candidate's LHS have incompatible
2047 types, introduce a cast. */
2048 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2049 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2050 if (wi::neg_p (bump))
2052 code = MINUS_EXPR;
2053 bump = -bump;
2056 bump_tree = wide_int_to_tree (target_type, bump);
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2060 fputs ("Replacing: ", dump_file);
2061 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2064 if (bump == 0)
2066 tree lhs = gimple_assign_lhs (c->cand_stmt);
2067 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2068 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2069 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2070 gsi_replace (&gsi, copy_stmt, false);
2071 c->cand_stmt = copy_stmt;
2072 if (dump_file && (dump_flags & TDF_DETAILS))
2073 stmt_to_print = copy_stmt;
2075 else
2077 tree rhs1, rhs2;
2078 if (cand_code != NEGATE_EXPR) {
2079 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2080 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2082 if (cand_code != NEGATE_EXPR
2083 && ((operand_equal_p (rhs1, basis_name, 0)
2084 && operand_equal_p (rhs2, bump_tree, 0))
2085 || (operand_equal_p (rhs1, bump_tree, 0)
2086 && operand_equal_p (rhs2, basis_name, 0))))
2088 if (dump_file && (dump_flags & TDF_DETAILS))
2090 fputs ("(duplicate, not actually replacing)", dump_file);
2091 stmt_to_print = c->cand_stmt;
2094 else
2096 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2097 gimple_assign_set_rhs_with_ops (&gsi, code,
2098 basis_name, bump_tree);
2099 update_stmt (gsi_stmt (gsi));
2100 c->cand_stmt = gsi_stmt (gsi);
2101 if (dump_file && (dump_flags & TDF_DETAILS))
2102 stmt_to_print = gsi_stmt (gsi);
2106 if (dump_file && (dump_flags & TDF_DETAILS))
2108 fputs ("With: ", dump_file);
2109 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2110 fputs ("\n", dump_file);
2115 /* Replace candidate C with an add or subtract. Note that we only
2116 operate on CAND_MULTs with known strides, so we will never generate
2117 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2118 X = Y + ((i - i') * S), as described in the module commentary. The
2119 folded value ((i - i') * S) is referred to here as the "bump." */
2121 static void
2122 replace_unconditional_candidate (slsr_cand_t c)
2124 slsr_cand_t basis;
2126 if (cand_already_replaced (c))
2127 return;
2129 basis = lookup_cand (c->basis);
2130 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2132 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2135 /* Return the index in the increment vector of the given INCREMENT,
2136 or -1 if not found. The latter can occur if more than
2137 MAX_INCR_VEC_LEN increments have been found. */
2139 static inline int
2140 incr_vec_index (const widest_int &increment)
2142 unsigned i;
2144 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2147 if (i < incr_vec_len)
2148 return i;
2149 else
2150 return -1;
2153 /* Create a new statement along edge E to add BASIS_NAME to the product
2154 of INCREMENT and the stride of candidate C. Create and return a new
2155 SSA name from *VAR to be used as the LHS of the new statement.
2156 KNOWN_STRIDE is true iff C's stride is a constant. */
2158 static tree
2159 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2160 widest_int increment, edge e, location_t loc,
2161 bool known_stride)
2163 basic_block insert_bb;
2164 gimple_stmt_iterator gsi;
2165 tree lhs, basis_type;
2166 gassign *new_stmt;
2168 /* If the add candidate along this incoming edge has the same
2169 index as C's hidden basis, the hidden basis represents this
2170 edge correctly. */
2171 if (increment == 0)
2172 return basis_name;
2174 basis_type = TREE_TYPE (basis_name);
2175 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2177 if (known_stride)
2179 tree bump_tree;
2180 enum tree_code code = PLUS_EXPR;
2181 widest_int bump = increment * wi::to_widest (c->stride);
2182 if (wi::neg_p (bump))
2184 code = MINUS_EXPR;
2185 bump = -bump;
2188 bump_tree = wide_int_to_tree (basis_type, bump);
2189 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2190 bump_tree);
2192 else
2194 int i;
2195 bool negate_incr = (!address_arithmetic_p && wi::neg_p (increment));
2196 i = incr_vec_index (negate_incr ? -increment : increment);
2197 gcc_assert (i >= 0);
2199 if (incr_vec[i].initializer)
2201 enum tree_code code = negate_incr ? MINUS_EXPR : PLUS_EXPR;
2202 new_stmt = gimple_build_assign_with_ops (code, lhs, basis_name,
2203 incr_vec[i].initializer);
2205 else if (increment == 1)
2206 new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, lhs, basis_name,
2207 c->stride);
2208 else if (increment == -1)
2209 new_stmt = gimple_build_assign_with_ops (MINUS_EXPR, lhs, basis_name,
2210 c->stride);
2211 else
2212 gcc_unreachable ();
2215 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2216 gsi = gsi_last_bb (insert_bb);
2218 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2219 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2220 else
2221 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2223 gimple_set_location (new_stmt, loc);
2225 if (dump_file && (dump_flags & TDF_DETAILS))
2227 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2228 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2231 return lhs;
2234 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2235 is hidden by the phi node FROM_PHI, create a new phi node in the same
2236 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2237 with its phi arguments representing conditional adjustments to the
2238 hidden basis along conditional incoming paths. Those adjustments are
2239 made by creating add statements (and sometimes recursively creating
2240 phis) along those incoming paths. LOC is the location to attach to
2241 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2242 constant. */
2244 static tree
2245 create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
2246 location_t loc, bool known_stride)
2248 int i;
2249 tree name, phi_arg;
2250 gphi *phi;
2251 vec<tree> phi_args;
2252 slsr_cand_t basis = lookup_cand (c->basis);
2253 int nargs = gimple_phi_num_args (from_phi);
2254 basic_block phi_bb = gimple_bb (from_phi);
2255 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (from_phi));
2256 phi_args.create (nargs);
2258 /* Process each argument of the existing phi that represents
2259 conditionally-executed add candidates. */
2260 for (i = 0; i < nargs; i++)
2262 edge e = (*phi_bb->preds)[i];
2263 tree arg = gimple_phi_arg_def (from_phi, i);
2264 tree feeding_def;
2266 /* If the phi argument is the base name of the CAND_PHI, then
2267 this incoming arc should use the hidden basis. */
2268 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2269 if (basis->index == 0)
2270 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2271 else
2273 widest_int incr = -basis->index;
2274 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2275 e, loc, known_stride);
2277 else
2279 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2281 /* If there is another phi along this incoming edge, we must
2282 process it in the same fashion to ensure that all basis
2283 adjustments are made along its incoming edges. */
2284 if (gimple_code (arg_def) == GIMPLE_PHI)
2285 feeding_def = create_phi_basis (c, arg_def, basis_name,
2286 loc, known_stride);
2287 else
2289 slsr_cand_t arg_cand = base_cand_from_table (arg);
2290 widest_int diff = arg_cand->index - basis->index;
2291 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2292 e, loc, known_stride);
2296 /* Because of recursion, we need to save the arguments in a vector
2297 so we can create the PHI statement all at once. Otherwise the
2298 storage for the half-created PHI can be reclaimed. */
2299 phi_args.safe_push (feeding_def);
2302 /* Create the new phi basis. */
2303 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2304 phi = create_phi_node (name, phi_bb);
2305 SSA_NAME_DEF_STMT (name) = phi;
2307 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2309 edge e = (*phi_bb->preds)[i];
2310 add_phi_arg (phi, phi_arg, e, loc);
2313 update_stmt (phi);
2315 if (dump_file && (dump_flags & TDF_DETAILS))
2317 fputs ("Introducing new phi basis: ", dump_file);
2318 print_gimple_stmt (dump_file, phi, 0, 0);
2321 return name;
2324 /* Given a candidate C whose basis is hidden by at least one intervening
2325 phi, introduce a matching number of new phis to represent its basis
2326 adjusted by conditional increments along possible incoming paths. Then
2327 replace C as though it were an unconditional candidate, using the new
2328 basis. */
2330 static void
2331 replace_conditional_candidate (slsr_cand_t c)
2333 tree basis_name, name;
2334 slsr_cand_t basis;
2335 location_t loc;
2337 /* Look up the LHS SSA name from C's basis. This will be the
2338 RHS1 of the adds we will introduce to create new phi arguments. */
2339 basis = lookup_cand (c->basis);
2340 basis_name = gimple_assign_lhs (basis->cand_stmt);
2342 /* Create a new phi statement which will represent C's true basis
2343 after the transformation is complete. */
2344 loc = gimple_location (c->cand_stmt);
2345 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2346 basis_name, loc, KNOWN_STRIDE);
2347 /* Replace C with an add of the new basis phi and a constant. */
2348 widest_int bump = c->index * wi::to_widest (c->stride);
2350 replace_mult_candidate (c, name, bump);
2353 /* Compute the expected costs of inserting basis adjustments for
2354 candidate C with phi-definition PHI. The cost of inserting
2355 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2356 which are themselves phi results, recursively calculate costs
2357 for those phis as well. */
2359 static int
2360 phi_add_costs (gimple phi, slsr_cand_t c, int one_add_cost)
2362 unsigned i;
2363 int cost = 0;
2364 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2366 /* If we work our way back to a phi that isn't dominated by the hidden
2367 basis, this isn't a candidate for replacement. Indicate this by
2368 returning an unreasonably high cost. It's not easy to detect
2369 these situations when determining the basis, so we defer the
2370 decision until now. */
2371 basic_block phi_bb = gimple_bb (phi);
2372 slsr_cand_t basis = lookup_cand (c->basis);
2373 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2375 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2376 return COST_INFINITE;
2378 for (i = 0; i < gimple_phi_num_args (phi); i++)
2380 tree arg = gimple_phi_arg_def (phi, i);
2382 if (arg != phi_cand->base_expr)
2384 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2386 if (gimple_code (arg_def) == GIMPLE_PHI)
2387 cost += phi_add_costs (arg_def, c, one_add_cost);
2388 else
2390 slsr_cand_t arg_cand = base_cand_from_table (arg);
2392 if (arg_cand->index != c->index)
2393 cost += one_add_cost;
2398 return cost;
2401 /* For candidate C, each sibling of candidate C, and each dependent of
2402 candidate C, determine whether the candidate is dependent upon a
2403 phi that hides its basis. If not, replace the candidate unconditionally.
2404 Otherwise, determine whether the cost of introducing compensation code
2405 for the candidate is offset by the gains from strength reduction. If
2406 so, replace the candidate and introduce the compensation code. */
2408 static void
2409 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2411 if (phi_dependent_cand_p (c))
2413 if (c->kind == CAND_MULT)
2415 /* A candidate dependent upon a phi will replace a multiply by
2416 a constant with an add, and will insert at most one add for
2417 each phi argument. Add these costs with the potential dead-code
2418 savings to determine profitability. */
2419 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2420 int mult_savings = stmt_cost (c->cand_stmt, speed);
2421 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2422 tree phi_result = gimple_phi_result (phi);
2423 int one_add_cost = add_cost (speed,
2424 TYPE_MODE (TREE_TYPE (phi_result)));
2425 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2426 int cost = add_costs - mult_savings - c->dead_savings;
2428 if (dump_file && (dump_flags & TDF_DETAILS))
2430 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2431 fprintf (dump_file, " add_costs = %d\n", add_costs);
2432 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2433 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2434 fprintf (dump_file, " cost = %d\n", cost);
2435 if (cost <= COST_NEUTRAL)
2436 fputs (" Replacing...\n", dump_file);
2437 else
2438 fputs (" Not replaced.\n", dump_file);
2441 if (cost <= COST_NEUTRAL)
2442 replace_conditional_candidate (c);
2445 else
2446 replace_unconditional_candidate (c);
2448 if (c->sibling)
2449 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2451 if (c->dependent)
2452 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2455 /* Count the number of candidates in the tree rooted at C that have
2456 not already been replaced under other interpretations. */
2458 static int
2459 count_candidates (slsr_cand_t c)
2461 unsigned count = cand_already_replaced (c) ? 0 : 1;
2463 if (c->sibling)
2464 count += count_candidates (lookup_cand (c->sibling));
2466 if (c->dependent)
2467 count += count_candidates (lookup_cand (c->dependent));
2469 return count;
2472 /* Increase the count of INCREMENT by one in the increment vector.
2473 INCREMENT is associated with candidate C. If INCREMENT is to be
2474 conditionally executed as part of a conditional candidate replacement,
2475 IS_PHI_ADJUST is true, otherwise false. If an initializer
2476 T_0 = stride * I is provided by a candidate that dominates all
2477 candidates with the same increment, also record T_0 for subsequent use. */
2479 static void
2480 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2482 bool found = false;
2483 unsigned i;
2485 /* Treat increments that differ only in sign as identical so as to
2486 share initializers, unless we are generating pointer arithmetic. */
2487 if (!address_arithmetic_p && wi::neg_p (increment))
2488 increment = -increment;
2490 for (i = 0; i < incr_vec_len; i++)
2492 if (incr_vec[i].incr == increment)
2494 incr_vec[i].count++;
2495 found = true;
2497 /* If we previously recorded an initializer that doesn't
2498 dominate this candidate, it's not going to be useful to
2499 us after all. */
2500 if (incr_vec[i].initializer
2501 && !dominated_by_p (CDI_DOMINATORS,
2502 gimple_bb (c->cand_stmt),
2503 incr_vec[i].init_bb))
2505 incr_vec[i].initializer = NULL_TREE;
2506 incr_vec[i].init_bb = NULL;
2509 break;
2513 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2515 /* The first time we see an increment, create the entry for it.
2516 If this is the root candidate which doesn't have a basis, set
2517 the count to zero. We're only processing it so it can possibly
2518 provide an initializer for other candidates. */
2519 incr_vec[incr_vec_len].incr = increment;
2520 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2521 incr_vec[incr_vec_len].cost = COST_INFINITE;
2523 /* Optimistically record the first occurrence of this increment
2524 as providing an initializer (if it does); we will revise this
2525 opinion later if it doesn't dominate all other occurrences.
2526 Exception: increments of -1, 0, 1 never need initializers;
2527 and phi adjustments don't ever provide initializers. */
2528 if (c->kind == CAND_ADD
2529 && !is_phi_adjust
2530 && c->index == increment
2531 && (wi::gts_p (increment, 1)
2532 || wi::lts_p (increment, -1))
2533 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2534 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2536 tree t0 = NULL_TREE;
2537 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2538 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2539 if (operand_equal_p (rhs1, c->base_expr, 0))
2540 t0 = rhs2;
2541 else if (operand_equal_p (rhs2, c->base_expr, 0))
2542 t0 = rhs1;
2543 if (t0
2544 && SSA_NAME_DEF_STMT (t0)
2545 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2547 incr_vec[incr_vec_len].initializer = t0;
2548 incr_vec[incr_vec_len++].init_bb
2549 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2551 else
2553 incr_vec[incr_vec_len].initializer = NULL_TREE;
2554 incr_vec[incr_vec_len++].init_bb = NULL;
2557 else
2559 incr_vec[incr_vec_len].initializer = NULL_TREE;
2560 incr_vec[incr_vec_len++].init_bb = NULL;
2565 /* Given phi statement PHI that hides a candidate from its BASIS, find
2566 the increments along each incoming arc (recursively handling additional
2567 phis that may be present) and record them. These increments are the
2568 difference in index between the index-adjusting statements and the
2569 index of the basis. */
2571 static void
2572 record_phi_increments (slsr_cand_t basis, gimple phi)
2574 unsigned i;
2575 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2577 for (i = 0; i < gimple_phi_num_args (phi); i++)
2579 tree arg = gimple_phi_arg_def (phi, i);
2581 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2583 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2585 if (gimple_code (arg_def) == GIMPLE_PHI)
2586 record_phi_increments (basis, arg_def);
2587 else
2589 slsr_cand_t arg_cand = base_cand_from_table (arg);
2590 widest_int diff = arg_cand->index - basis->index;
2591 record_increment (arg_cand, diff, PHI_ADJUST);
2597 /* Determine how many times each unique increment occurs in the set
2598 of candidates rooted at C's parent, recording the data in the
2599 increment vector. For each unique increment I, if an initializer
2600 T_0 = stride * I is provided by a candidate that dominates all
2601 candidates with the same increment, also record T_0 for subsequent
2602 use. */
2604 static void
2605 record_increments (slsr_cand_t c)
2607 if (!cand_already_replaced (c))
2609 if (!phi_dependent_cand_p (c))
2610 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2611 else
2613 /* A candidate with a basis hidden by a phi will have one
2614 increment for its relationship to the index represented by
2615 the phi, and potentially additional increments along each
2616 incoming edge. For the root of the dependency tree (which
2617 has no basis), process just the initial index in case it has
2618 an initializer that can be used by subsequent candidates. */
2619 record_increment (c, c->index, NOT_PHI_ADJUST);
2621 if (c->basis)
2622 record_phi_increments (lookup_cand (c->basis),
2623 lookup_cand (c->def_phi)->cand_stmt);
2627 if (c->sibling)
2628 record_increments (lookup_cand (c->sibling));
2630 if (c->dependent)
2631 record_increments (lookup_cand (c->dependent));
2634 /* Add up and return the costs of introducing add statements that
2635 require the increment INCR on behalf of candidate C and phi
2636 statement PHI. Accumulate into *SAVINGS the potential savings
2637 from removing existing statements that feed PHI and have no other
2638 uses. */
2640 static int
2641 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple phi, int *savings)
2643 unsigned i;
2644 int cost = 0;
2645 slsr_cand_t basis = lookup_cand (c->basis);
2646 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2648 for (i = 0; i < gimple_phi_num_args (phi); i++)
2650 tree arg = gimple_phi_arg_def (phi, i);
2652 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2654 gimple arg_def = SSA_NAME_DEF_STMT (arg);
2656 if (gimple_code (arg_def) == GIMPLE_PHI)
2658 int feeding_savings = 0;
2659 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2660 if (has_single_use (gimple_phi_result (arg_def)))
2661 *savings += feeding_savings;
2663 else
2665 slsr_cand_t arg_cand = base_cand_from_table (arg);
2666 widest_int diff = arg_cand->index - basis->index;
2668 if (incr == diff)
2670 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2671 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2672 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2673 if (has_single_use (lhs))
2674 *savings += stmt_cost (arg_cand->cand_stmt, true);
2680 return cost;
2683 /* Return the first candidate in the tree rooted at C that has not
2684 already been replaced, favoring siblings over dependents. */
2686 static slsr_cand_t
2687 unreplaced_cand_in_tree (slsr_cand_t c)
2689 if (!cand_already_replaced (c))
2690 return c;
2692 if (c->sibling)
2694 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2695 if (sib)
2696 return sib;
2699 if (c->dependent)
2701 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2702 if (dep)
2703 return dep;
2706 return NULL;
2709 /* Return TRUE if the candidates in the tree rooted at C should be
2710 optimized for speed, else FALSE. We estimate this based on the block
2711 containing the most dominant candidate in the tree that has not yet
2712 been replaced. */
2714 static bool
2715 optimize_cands_for_speed_p (slsr_cand_t c)
2717 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2718 gcc_assert (c2);
2719 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2722 /* Add COST_IN to the lowest cost of any dependent path starting at
2723 candidate C or any of its siblings, counting only candidates along
2724 such paths with increment INCR. Assume that replacing a candidate
2725 reduces cost by REPL_SAVINGS. Also account for savings from any
2726 statements that would go dead. If COUNT_PHIS is true, include
2727 costs of introducing feeding statements for conditional candidates. */
2729 static int
2730 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2731 const widest_int &incr, bool count_phis)
2733 int local_cost, sib_cost, savings = 0;
2734 widest_int cand_incr = cand_abs_increment (c);
2736 if (cand_already_replaced (c))
2737 local_cost = cost_in;
2738 else if (incr == cand_incr)
2739 local_cost = cost_in - repl_savings - c->dead_savings;
2740 else
2741 local_cost = cost_in - c->dead_savings;
2743 if (count_phis
2744 && phi_dependent_cand_p (c)
2745 && !cand_already_replaced (c))
2747 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2748 local_cost += phi_incr_cost (c, incr, phi, &savings);
2750 if (has_single_use (gimple_phi_result (phi)))
2751 local_cost -= savings;
2754 if (c->dependent)
2755 local_cost = lowest_cost_path (local_cost, repl_savings,
2756 lookup_cand (c->dependent), incr,
2757 count_phis);
2759 if (c->sibling)
2761 sib_cost = lowest_cost_path (cost_in, repl_savings,
2762 lookup_cand (c->sibling), incr,
2763 count_phis);
2764 local_cost = MIN (local_cost, sib_cost);
2767 return local_cost;
2770 /* Compute the total savings that would accrue from all replacements
2771 in the candidate tree rooted at C, counting only candidates with
2772 increment INCR. Assume that replacing a candidate reduces cost
2773 by REPL_SAVINGS. Also account for savings from statements that
2774 would go dead. */
2776 static int
2777 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2778 bool count_phis)
2780 int savings = 0;
2781 widest_int cand_incr = cand_abs_increment (c);
2783 if (incr == cand_incr && !cand_already_replaced (c))
2784 savings += repl_savings + c->dead_savings;
2786 if (count_phis
2787 && phi_dependent_cand_p (c)
2788 && !cand_already_replaced (c))
2790 int phi_savings = 0;
2791 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
2792 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2794 if (has_single_use (gimple_phi_result (phi)))
2795 savings += phi_savings;
2798 if (c->dependent)
2799 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2800 count_phis);
2802 if (c->sibling)
2803 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2804 count_phis);
2806 return savings;
2809 /* Use target-specific costs to determine and record which increments
2810 in the current candidate tree are profitable to replace, assuming
2811 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2812 the candidate tree.
2814 One slight limitation here is that we don't account for the possible
2815 introduction of casts in some cases. See replace_one_candidate for
2816 the cases where these are introduced. This should probably be cleaned
2817 up sometime. */
2819 static void
2820 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2822 unsigned i;
2824 for (i = 0; i < incr_vec_len; i++)
2826 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2828 /* If somehow this increment is bigger than a HWI, we won't
2829 be optimizing candidates that use it. And if the increment
2830 has a count of zero, nothing will be done with it. */
2831 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2832 incr_vec[i].cost = COST_INFINITE;
2834 /* Increments of 0, 1, and -1 are always profitable to replace,
2835 because they always replace a multiply or add with an add or
2836 copy, and may cause one or more existing instructions to go
2837 dead. Exception: -1 can't be assumed to be profitable for
2838 pointer addition. */
2839 else if (incr == 0
2840 || incr == 1
2841 || (incr == -1
2842 && (gimple_assign_rhs_code (first_dep->cand_stmt)
2843 != POINTER_PLUS_EXPR)))
2844 incr_vec[i].cost = COST_NEUTRAL;
2846 /* FORNOW: If we need to add an initializer, give up if a cast from
2847 the candidate's type to its stride's type can lose precision.
2848 This could eventually be handled better by expressly retaining the
2849 result of a cast to a wider type in the stride. Example:
2851 short int _1;
2852 _2 = (int) _1;
2853 _3 = _2 * 10;
2854 _4 = x + _3; ADD: x + (10 * _1) : int
2855 _5 = _2 * 15;
2856 _6 = x + _3; ADD: x + (15 * _1) : int
2858 Right now replacing _6 would cause insertion of an initializer
2859 of the form "short int T = _1 * 5;" followed by a cast to
2860 int, which could overflow incorrectly. Had we recorded _2 or
2861 (int)_1 as the stride, this wouldn't happen. However, doing
2862 this breaks other opportunities, so this will require some
2863 care. */
2864 else if (!incr_vec[i].initializer
2865 && TREE_CODE (first_dep->stride) != INTEGER_CST
2866 && !legal_cast_p_1 (first_dep->stride,
2867 gimple_assign_lhs (first_dep->cand_stmt)))
2869 incr_vec[i].cost = COST_INFINITE;
2871 /* If we need to add an initializer, make sure we don't introduce
2872 a multiply by a pointer type, which can happen in certain cast
2873 scenarios. FIXME: When cleaning up these cast issues, we can
2874 afford to introduce the multiply provided we cast out to an
2875 unsigned int of appropriate size. */
2876 else if (!incr_vec[i].initializer
2877 && TREE_CODE (first_dep->stride) != INTEGER_CST
2878 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2880 incr_vec[i].cost = COST_INFINITE;
2882 /* For any other increment, if this is a multiply candidate, we
2883 must introduce a temporary T and initialize it with
2884 T_0 = stride * increment. When optimizing for speed, walk the
2885 candidate tree to calculate the best cost reduction along any
2886 path; if it offsets the fixed cost of inserting the initializer,
2887 replacing the increment is profitable. When optimizing for
2888 size, instead calculate the total cost reduction from replacing
2889 all candidates with this increment. */
2890 else if (first_dep->kind == CAND_MULT)
2892 int cost = mult_by_coeff_cost (incr, mode, speed);
2893 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2894 if (speed)
2895 cost = lowest_cost_path (cost, repl_savings, first_dep,
2896 incr_vec[i].incr, COUNT_PHIS);
2897 else
2898 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2899 COUNT_PHIS);
2901 incr_vec[i].cost = cost;
2904 /* If this is an add candidate, the initializer may already
2905 exist, so only calculate the cost of the initializer if it
2906 doesn't. We are replacing one add with another here, so the
2907 known replacement savings is zero. We will account for removal
2908 of dead instructions in lowest_cost_path or total_savings. */
2909 else
2911 int cost = 0;
2912 if (!incr_vec[i].initializer)
2913 cost = mult_by_coeff_cost (incr, mode, speed);
2915 if (speed)
2916 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2917 DONT_COUNT_PHIS);
2918 else
2919 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2920 DONT_COUNT_PHIS);
2922 incr_vec[i].cost = cost;
2927 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2928 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2929 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2930 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2931 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2933 static basic_block
2934 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2935 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2937 basic_block ncd;
2939 if (!bb1)
2941 *where = c2;
2942 return bb2;
2945 if (!bb2)
2947 *where = c1;
2948 return bb1;
2951 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2953 /* If both candidates are in the same block, the earlier
2954 candidate wins. */
2955 if (bb1 == ncd && bb2 == ncd)
2957 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2958 *where = c2;
2959 else
2960 *where = c1;
2963 /* Otherwise, if one of them produced a candidate in the
2964 dominator, that one wins. */
2965 else if (bb1 == ncd)
2966 *where = c1;
2968 else if (bb2 == ncd)
2969 *where = c2;
2971 /* If neither matches the dominator, neither wins. */
2972 else
2973 *where = NULL;
2975 return ncd;
2978 /* Consider all candidates that feed PHI. Find the nearest common
2979 dominator of those candidates requiring the given increment INCR.
2980 Further find and return the nearest common dominator of this result
2981 with block NCD. If the returned block contains one or more of the
2982 candidates, return the earliest candidate in the block in *WHERE. */
2984 static basic_block
2985 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
2986 basic_block ncd, slsr_cand_t *where)
2988 unsigned i;
2989 slsr_cand_t basis = lookup_cand (c->basis);
2990 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
2992 for (i = 0; i < gimple_phi_num_args (phi); i++)
2994 tree arg = gimple_phi_arg_def (phi, i);
2996 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2998 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3000 if (gimple_code (arg_def) == GIMPLE_PHI)
3001 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3002 where);
3003 else
3005 slsr_cand_t arg_cand = base_cand_from_table (arg);
3006 widest_int diff = arg_cand->index - basis->index;
3007 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3009 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3010 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3015 return ncd;
3018 /* Consider the candidate C together with any candidates that feed
3019 C's phi dependence (if any). Find and return the nearest common
3020 dominator of those candidates requiring the given increment INCR.
3021 If the returned block contains one or more of the candidates,
3022 return the earliest candidate in the block in *WHERE. */
3024 static basic_block
3025 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3027 basic_block ncd = NULL;
3029 if (cand_abs_increment (c) == incr)
3031 ncd = gimple_bb (c->cand_stmt);
3032 *where = c;
3035 if (phi_dependent_cand_p (c))
3036 ncd = ncd_with_phi (c, incr,
3037 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3038 ncd, where);
3040 return ncd;
3043 /* Consider all candidates in the tree rooted at C for which INCR
3044 represents the required increment of C relative to its basis.
3045 Find and return the basic block that most nearly dominates all
3046 such candidates. If the returned block contains one or more of
3047 the candidates, return the earliest candidate in the block in
3048 *WHERE. */
3050 static basic_block
3051 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3052 slsr_cand_t *where)
3054 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3055 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3057 /* First find the NCD of all siblings and dependents. */
3058 if (c->sibling)
3059 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3060 incr, &sib_where);
3061 if (c->dependent)
3062 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3063 incr, &dep_where);
3064 if (!sib_ncd && !dep_ncd)
3066 new_where = NULL;
3067 ncd = NULL;
3069 else if (sib_ncd && !dep_ncd)
3071 new_where = sib_where;
3072 ncd = sib_ncd;
3074 else if (dep_ncd && !sib_ncd)
3076 new_where = dep_where;
3077 ncd = dep_ncd;
3079 else
3080 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3081 dep_where, &new_where);
3083 /* If the candidate's increment doesn't match the one we're interested
3084 in (and nor do any increments for feeding defs of a phi-dependence),
3085 then the result depends only on siblings and dependents. */
3086 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3088 if (!this_ncd || cand_already_replaced (c))
3090 *where = new_where;
3091 return ncd;
3094 /* Otherwise, compare this candidate with the result from all siblings
3095 and dependents. */
3096 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3098 return ncd;
3101 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3103 static inline bool
3104 profitable_increment_p (unsigned index)
3106 return (incr_vec[index].cost <= COST_NEUTRAL);
3109 /* For each profitable increment in the increment vector not equal to
3110 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3111 dominator of all statements in the candidate chain rooted at C
3112 that require that increment, and insert an initializer
3113 T_0 = stride * increment at that location. Record T_0 with the
3114 increment record. */
3116 static void
3117 insert_initializers (slsr_cand_t c)
3119 unsigned i;
3121 for (i = 0; i < incr_vec_len; i++)
3123 basic_block bb;
3124 slsr_cand_t where = NULL;
3125 gassign *init_stmt;
3126 tree stride_type, new_name, incr_tree;
3127 widest_int incr = incr_vec[i].incr;
3129 if (!profitable_increment_p (i)
3130 || incr == 1
3131 || (incr == -1
3132 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3133 || incr == 0)
3134 continue;
3136 /* We may have already identified an existing initializer that
3137 will suffice. */
3138 if (incr_vec[i].initializer)
3140 if (dump_file && (dump_flags & TDF_DETAILS))
3142 fputs ("Using existing initializer: ", dump_file);
3143 print_gimple_stmt (dump_file,
3144 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3145 0, 0);
3147 continue;
3150 /* Find the block that most closely dominates all candidates
3151 with this increment. If there is at least one candidate in
3152 that block, the earliest one will be returned in WHERE. */
3153 bb = nearest_common_dominator_for_cands (c, incr, &where);
3155 /* Create a new SSA name to hold the initializer's value. */
3156 stride_type = TREE_TYPE (c->stride);
3157 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3158 incr_vec[i].initializer = new_name;
3160 /* Create the initializer and insert it in the latest possible
3161 dominating position. */
3162 incr_tree = wide_int_to_tree (stride_type, incr);
3163 init_stmt = gimple_build_assign_with_ops (MULT_EXPR, new_name,
3164 c->stride, incr_tree);
3165 if (where)
3167 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3168 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3169 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3171 else
3173 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3174 gimple basis_stmt = lookup_cand (c->basis)->cand_stmt;
3176 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3177 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3178 else
3179 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3181 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3184 if (dump_file && (dump_flags & TDF_DETAILS))
3186 fputs ("Inserting initializer: ", dump_file);
3187 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3192 /* Return TRUE iff all required increments for candidates feeding PHI
3193 are profitable to replace on behalf of candidate C. */
3195 static bool
3196 all_phi_incrs_profitable (slsr_cand_t c, gimple phi)
3198 unsigned i;
3199 slsr_cand_t basis = lookup_cand (c->basis);
3200 slsr_cand_t phi_cand = base_cand_from_table (gimple_phi_result (phi));
3202 for (i = 0; i < gimple_phi_num_args (phi); i++)
3204 tree arg = gimple_phi_arg_def (phi, i);
3206 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3208 gimple arg_def = SSA_NAME_DEF_STMT (arg);
3210 if (gimple_code (arg_def) == GIMPLE_PHI)
3212 if (!all_phi_incrs_profitable (c, arg_def))
3213 return false;
3215 else
3217 int j;
3218 slsr_cand_t arg_cand = base_cand_from_table (arg);
3219 widest_int increment = arg_cand->index - basis->index;
3221 if (!address_arithmetic_p && wi::neg_p (increment))
3222 increment = -increment;
3224 j = incr_vec_index (increment);
3226 if (dump_file && (dump_flags & TDF_DETAILS))
3228 fprintf (dump_file, " Conditional candidate %d, phi: ",
3229 c->cand_num);
3230 print_gimple_stmt (dump_file, phi, 0, 0);
3231 fputs (" increment: ", dump_file);
3232 print_decs (increment, dump_file);
3233 if (j < 0)
3234 fprintf (dump_file,
3235 "\n Not replaced; incr_vec overflow.\n");
3236 else {
3237 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3238 if (profitable_increment_p (j))
3239 fputs (" Replacing...\n", dump_file);
3240 else
3241 fputs (" Not replaced.\n", dump_file);
3245 if (j < 0 || !profitable_increment_p (j))
3246 return false;
3251 return true;
3254 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3255 type TO_TYPE, and insert it in front of the statement represented
3256 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3257 the new SSA name. */
3259 static tree
3260 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3262 tree cast_lhs;
3263 gassign *cast_stmt;
3264 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3266 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3267 cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, cast_lhs, from_expr);
3268 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3269 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3271 if (dump_file && (dump_flags & TDF_DETAILS))
3273 fputs (" Inserting: ", dump_file);
3274 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3277 return cast_lhs;
3280 /* Replace the RHS of the statement represented by candidate C with
3281 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3282 leave C unchanged or just interchange its operands. The original
3283 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3284 If the replacement was made and we are doing a details dump,
3285 return the revised statement, else NULL. */
3287 static gimple
3288 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3289 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3290 slsr_cand_t c)
3292 if (new_code != old_code
3293 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3294 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3295 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3296 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3298 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3299 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3300 update_stmt (gsi_stmt (gsi));
3301 c->cand_stmt = gsi_stmt (gsi);
3303 if (dump_file && (dump_flags & TDF_DETAILS))
3304 return gsi_stmt (gsi);
3307 else if (dump_file && (dump_flags & TDF_DETAILS))
3308 fputs (" (duplicate, not actually replacing)\n", dump_file);
3310 return NULL;
3313 /* Strength-reduce the statement represented by candidate C by replacing
3314 it with an equivalent addition or subtraction. I is the index into
3315 the increment vector identifying C's increment. NEW_VAR is used to
3316 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3317 is the rhs1 to use in creating the add/subtract. */
3319 static void
3320 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3322 gimple stmt_to_print = NULL;
3323 tree orig_rhs1, orig_rhs2;
3324 tree rhs2;
3325 enum tree_code orig_code, repl_code;
3326 widest_int cand_incr;
3328 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3329 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3330 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3331 cand_incr = cand_increment (c);
3333 if (dump_file && (dump_flags & TDF_DETAILS))
3335 fputs ("Replacing: ", dump_file);
3336 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3337 stmt_to_print = c->cand_stmt;
3340 if (address_arithmetic_p)
3341 repl_code = POINTER_PLUS_EXPR;
3342 else
3343 repl_code = PLUS_EXPR;
3345 /* If the increment has an initializer T_0, replace the candidate
3346 statement with an add of the basis name and the initializer. */
3347 if (incr_vec[i].initializer)
3349 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3350 tree orig_type = TREE_TYPE (orig_rhs2);
3352 if (types_compatible_p (orig_type, init_type))
3353 rhs2 = incr_vec[i].initializer;
3354 else
3355 rhs2 = introduce_cast_before_cand (c, orig_type,
3356 incr_vec[i].initializer);
3358 if (incr_vec[i].incr != cand_incr)
3360 gcc_assert (repl_code == PLUS_EXPR);
3361 repl_code = MINUS_EXPR;
3364 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3365 orig_code, orig_rhs1, orig_rhs2,
3369 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3370 with a subtract of the stride from the basis name, a copy
3371 from the basis name, or an add of the stride to the basis
3372 name, respectively. It may be necessary to introduce a
3373 cast (or reuse an existing cast). */
3374 else if (cand_incr == 1)
3376 tree stride_type = TREE_TYPE (c->stride);
3377 tree orig_type = TREE_TYPE (orig_rhs2);
3379 if (types_compatible_p (orig_type, stride_type))
3380 rhs2 = c->stride;
3381 else
3382 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3384 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3385 orig_code, orig_rhs1, orig_rhs2,
3389 else if (cand_incr == -1)
3391 tree stride_type = TREE_TYPE (c->stride);
3392 tree orig_type = TREE_TYPE (orig_rhs2);
3393 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3395 if (types_compatible_p (orig_type, stride_type))
3396 rhs2 = c->stride;
3397 else
3398 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3400 if (orig_code != MINUS_EXPR
3401 || !operand_equal_p (basis_name, orig_rhs1, 0)
3402 || !operand_equal_p (rhs2, orig_rhs2, 0))
3404 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3405 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3406 update_stmt (gsi_stmt (gsi));
3407 c->cand_stmt = gsi_stmt (gsi);
3409 if (dump_file && (dump_flags & TDF_DETAILS))
3410 stmt_to_print = gsi_stmt (gsi);
3412 else if (dump_file && (dump_flags & TDF_DETAILS))
3413 fputs (" (duplicate, not actually replacing)\n", dump_file);
3416 else if (cand_incr == 0)
3418 tree lhs = gimple_assign_lhs (c->cand_stmt);
3419 tree lhs_type = TREE_TYPE (lhs);
3420 tree basis_type = TREE_TYPE (basis_name);
3422 if (types_compatible_p (lhs_type, basis_type))
3424 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3425 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3426 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3427 gsi_replace (&gsi, copy_stmt, false);
3428 c->cand_stmt = copy_stmt;
3430 if (dump_file && (dump_flags & TDF_DETAILS))
3431 stmt_to_print = copy_stmt;
3433 else
3435 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3436 gassign *cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
3437 basis_name);
3438 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3439 gsi_replace (&gsi, cast_stmt, false);
3440 c->cand_stmt = cast_stmt;
3442 if (dump_file && (dump_flags & TDF_DETAILS))
3443 stmt_to_print = cast_stmt;
3446 else
3447 gcc_unreachable ();
3449 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3451 fputs ("With: ", dump_file);
3452 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3453 fputs ("\n", dump_file);
3457 /* For each candidate in the tree rooted at C, replace it with
3458 an increment if such has been shown to be profitable. */
3460 static void
3461 replace_profitable_candidates (slsr_cand_t c)
3463 if (!cand_already_replaced (c))
3465 widest_int increment = cand_abs_increment (c);
3466 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3467 int i;
3469 i = incr_vec_index (increment);
3471 /* Only process profitable increments. Nothing useful can be done
3472 to a cast or copy. */
3473 if (i >= 0
3474 && profitable_increment_p (i)
3475 && orig_code != MODIFY_EXPR
3476 && !CONVERT_EXPR_CODE_P (orig_code))
3478 if (phi_dependent_cand_p (c))
3480 gimple phi = lookup_cand (c->def_phi)->cand_stmt;
3482 if (all_phi_incrs_profitable (c, phi))
3484 /* Look up the LHS SSA name from C's basis. This will be
3485 the RHS1 of the adds we will introduce to create new
3486 phi arguments. */
3487 slsr_cand_t basis = lookup_cand (c->basis);
3488 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3490 /* Create a new phi statement that will represent C's true
3491 basis after the transformation is complete. */
3492 location_t loc = gimple_location (c->cand_stmt);
3493 tree name = create_phi_basis (c, phi, basis_name,
3494 loc, UNKNOWN_STRIDE);
3496 /* Replace C with an add of the new basis phi and the
3497 increment. */
3498 replace_one_candidate (c, i, name);
3501 else
3503 slsr_cand_t basis = lookup_cand (c->basis);
3504 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3505 replace_one_candidate (c, i, basis_name);
3510 if (c->sibling)
3511 replace_profitable_candidates (lookup_cand (c->sibling));
3513 if (c->dependent)
3514 replace_profitable_candidates (lookup_cand (c->dependent));
3517 /* Analyze costs of related candidates in the candidate vector,
3518 and make beneficial replacements. */
3520 static void
3521 analyze_candidates_and_replace (void)
3523 unsigned i;
3524 slsr_cand_t c;
3526 /* Each candidate that has a null basis and a non-null
3527 dependent is the root of a tree of related statements.
3528 Analyze each tree to determine a subset of those
3529 statements that can be replaced with maximum benefit. */
3530 FOR_EACH_VEC_ELT (cand_vec, i, c)
3532 slsr_cand_t first_dep;
3534 if (c->basis != 0 || c->dependent == 0)
3535 continue;
3537 if (dump_file && (dump_flags & TDF_DETAILS))
3538 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3539 c->cand_num);
3541 first_dep = lookup_cand (c->dependent);
3543 /* If this is a chain of CAND_REFs, unconditionally replace
3544 each of them with a strength-reduced data reference. */
3545 if (c->kind == CAND_REF)
3546 replace_refs (c);
3548 /* If the common stride of all related candidates is a known
3549 constant, each candidate without a phi-dependence can be
3550 profitably replaced. Each replaces a multiply by a single
3551 add, with the possibility that a feeding add also goes dead.
3552 A candidate with a phi-dependence is replaced only if the
3553 compensation code it requires is offset by the strength
3554 reduction savings. */
3555 else if (TREE_CODE (c->stride) == INTEGER_CST)
3556 replace_uncond_cands_and_profitable_phis (first_dep);
3558 /* When the stride is an SSA name, it may still be profitable
3559 to replace some or all of the dependent candidates, depending
3560 on whether the introduced increments can be reused, or are
3561 less expensive to calculate than the replaced statements. */
3562 else
3564 machine_mode mode;
3565 bool speed;
3567 /* Determine whether we'll be generating pointer arithmetic
3568 when replacing candidates. */
3569 address_arithmetic_p = (c->kind == CAND_ADD
3570 && POINTER_TYPE_P (c->cand_type));
3572 /* If all candidates have already been replaced under other
3573 interpretations, nothing remains to be done. */
3574 if (!count_candidates (c))
3575 continue;
3577 /* Construct an array of increments for this candidate chain. */
3578 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3579 incr_vec_len = 0;
3580 record_increments (c);
3582 /* Determine which increments are profitable to replace. */
3583 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3584 speed = optimize_cands_for_speed_p (c);
3585 analyze_increments (first_dep, mode, speed);
3587 /* Insert initializers of the form T_0 = stride * increment
3588 for use in profitable replacements. */
3589 insert_initializers (first_dep);
3590 dump_incr_vec ();
3592 /* Perform the replacements. */
3593 replace_profitable_candidates (first_dep);
3594 free (incr_vec);
3599 namespace {
3601 const pass_data pass_data_strength_reduction =
3603 GIMPLE_PASS, /* type */
3604 "slsr", /* name */
3605 OPTGROUP_NONE, /* optinfo_flags */
3606 TV_GIMPLE_SLSR, /* tv_id */
3607 ( PROP_cfg | PROP_ssa ), /* properties_required */
3608 0, /* properties_provided */
3609 0, /* properties_destroyed */
3610 0, /* todo_flags_start */
3611 0, /* todo_flags_finish */
3614 class pass_strength_reduction : public gimple_opt_pass
3616 public:
3617 pass_strength_reduction (gcc::context *ctxt)
3618 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3621 /* opt_pass methods: */
3622 virtual bool gate (function *) { return flag_tree_slsr; }
3623 virtual unsigned int execute (function *);
3625 }; // class pass_strength_reduction
3627 unsigned
3628 pass_strength_reduction::execute (function *fun)
3630 /* Create the obstack where candidates will reside. */
3631 gcc_obstack_init (&cand_obstack);
3633 /* Allocate the candidate vector. */
3634 cand_vec.create (128);
3636 /* Allocate the mapping from statements to candidate indices. */
3637 stmt_cand_map = new hash_map<gimple, slsr_cand_t>;
3639 /* Create the obstack where candidate chains will reside. */
3640 gcc_obstack_init (&chain_obstack);
3642 /* Allocate the mapping from base expressions to candidate chains. */
3643 base_cand_map = new hash_table<cand_chain_hasher> (500);
3645 /* Allocate the mapping from bases to alternative bases. */
3646 alt_base_map = new hash_map<tree, tree>;
3648 /* Initialize the loop optimizer. We need to detect flow across
3649 back edges, and this gives us dominator information as well. */
3650 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3652 /* Walk the CFG in predominator order looking for strength reduction
3653 candidates. */
3654 find_candidates_dom_walker (CDI_DOMINATORS)
3655 .walk (fun->cfg->x_entry_block_ptr);
3657 if (dump_file && (dump_flags & TDF_DETAILS))
3659 dump_cand_vec ();
3660 dump_cand_chains ();
3663 delete alt_base_map;
3664 free_affine_expand_cache (&name_expansions);
3666 /* Analyze costs and make appropriate replacements. */
3667 analyze_candidates_and_replace ();
3669 loop_optimizer_finalize ();
3670 delete base_cand_map;
3671 base_cand_map = NULL;
3672 obstack_free (&chain_obstack, NULL);
3673 delete stmt_cand_map;
3674 cand_vec.release ();
3675 obstack_free (&cand_obstack, NULL);
3677 return 0;
3680 } // anon namespace
3682 gimple_opt_pass *
3683 make_pass_strength_reduction (gcc::context *ctxt)
3685 return new pass_strength_reduction (ctxt);