read-md.c: Move various state to within class rtx_reader
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blobc6bc5a26ea692f2d232564cb316bc61f1125cbd6
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
72 Any statement S0 that dominates S1 and is of the form:
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
77 is called a "basis" for S1. In both cases, S1 may be replaced by
79 S1': X = Y + (i - i') * S,
81 where (i - i') * S is folded to the extent possible.
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
152 Conditional candidates
153 ======================
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
181 where the bracketed instructions may go dead.
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
219 /* The kind of candidate. */
220 enum cand_kind
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
228 struct slsr_cand_d
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
236 /* The stride S. */
237 tree stride;
239 /* The index constant i. */
240 widest_int index;
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
249 /* The kind of candidate (CAND_MULT, etc.). */
250 enum cand_kind kind;
252 /* Index of this candidate in the candidate vector. */
253 cand_idx cand_num;
255 /* Index of the next candidate record for the same statement.
256 A statement may be useful in more than one way (e.g., due to
257 commutativity). So we can have multiple "interpretations"
258 of a statement. */
259 cand_idx next_interp;
261 /* Index of the basis statement S0, if any, in the candidate vector. */
262 cand_idx basis;
264 /* First candidate for which this candidate is a basis, if one exists. */
265 cand_idx dependent;
267 /* Next candidate having the same basis as this one. */
268 cand_idx sibling;
270 /* If this is a conditional candidate, the CAND_PHI candidate
271 that defines the base SSA name B. */
272 cand_idx def_phi;
274 /* Savings that can be expected from eliminating dead code if this
275 candidate is replaced. */
276 int dead_savings;
279 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
280 typedef const struct slsr_cand_d *const_slsr_cand_t;
282 /* Pointers to candidates are chained together as part of a mapping
283 from base expressions to the candidates that use them. */
285 struct cand_chain_d
287 /* Base expression for the chain of candidates: often, but not
288 always, an SSA name. */
289 tree base_expr;
291 /* Pointer to a candidate. */
292 slsr_cand_t cand;
294 /* Chain pointer. */
295 struct cand_chain_d *next;
299 typedef struct cand_chain_d cand_chain, *cand_chain_t;
300 typedef const struct cand_chain_d *const_cand_chain_t;
302 /* Information about a unique "increment" associated with candidates
303 having an SSA name for a stride. An increment is the difference
304 between the index of the candidate and the index of its basis,
305 i.e., (i - i') as discussed in the module commentary.
307 When we are not going to generate address arithmetic we treat
308 increments that differ only in sign as the same, allowing sharing
309 of the cost of initializers. The absolute value of the increment
310 is stored in the incr_info. */
312 struct incr_info_d
314 /* The increment that relates a candidate to its basis. */
315 widest_int incr;
317 /* How many times the increment occurs in the candidate tree. */
318 unsigned count;
320 /* Cost of replacing candidates using this increment. Negative and
321 zero costs indicate replacement should be performed. */
322 int cost;
324 /* If this increment is profitable but is not -1, 0, or 1, it requires
325 an initializer T_0 = stride * incr to be found or introduced in the
326 nearest common dominator of all candidates. This field holds T_0
327 for subsequent use. */
328 tree initializer;
330 /* If the initializer was found to already exist, this is the block
331 where it was found. */
332 basic_block init_bb;
335 typedef struct incr_info_d incr_info, *incr_info_t;
337 /* Candidates are maintained in a vector. If candidate X dominates
338 candidate Y, then X appears before Y in the vector; but the
339 converse does not necessarily hold. */
340 static vec<slsr_cand_t> cand_vec;
342 enum cost_consts
344 COST_NEUTRAL = 0,
345 COST_INFINITE = 1000
348 enum stride_status
350 UNKNOWN_STRIDE = 0,
351 KNOWN_STRIDE = 1
354 enum phi_adjust_status
356 NOT_PHI_ADJUST = 0,
357 PHI_ADJUST = 1
360 enum count_phis_status
362 DONT_COUNT_PHIS = 0,
363 COUNT_PHIS = 1
366 /* Pointer map embodying a mapping from statements to candidates. */
367 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
369 /* Obstack for candidates. */
370 static struct obstack cand_obstack;
372 /* Obstack for candidate chains. */
373 static struct obstack chain_obstack;
375 /* An array INCR_VEC of incr_infos is used during analysis of related
376 candidates having an SSA name for a stride. INCR_VEC_LEN describes
377 its current length. MAX_INCR_VEC_LEN is used to avoid costly
378 pathological cases. */
379 static incr_info_t incr_vec;
380 static unsigned incr_vec_len;
381 const int MAX_INCR_VEC_LEN = 16;
383 /* For a chain of candidates with unknown stride, indicates whether or not
384 we must generate pointer arithmetic when replacing statements. */
385 static bool address_arithmetic_p;
387 /* Forward function declarations. */
388 static slsr_cand_t base_cand_from_table (tree);
389 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
390 static bool legal_cast_p_1 (tree, tree);
392 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
394 static slsr_cand_t
395 lookup_cand (cand_idx idx)
397 return cand_vec[idx - 1];
400 /* Helper for hashing a candidate chain header. */
402 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
404 static inline hashval_t hash (const cand_chain *);
405 static inline bool equal (const cand_chain *, const cand_chain *);
408 inline hashval_t
409 cand_chain_hasher::hash (const cand_chain *p)
411 tree base_expr = p->base_expr;
412 return iterative_hash_expr (base_expr, 0);
415 inline bool
416 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
418 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
421 /* Hash table embodying a mapping from base exprs to chains of candidates. */
422 static hash_table<cand_chain_hasher> *base_cand_map;
424 /* Pointer map used by tree_to_aff_combination_expand. */
425 static hash_map<tree, name_expansion *> *name_expansions;
426 /* Pointer map embodying a mapping from bases to alternative bases. */
427 static hash_map<tree, tree> *alt_base_map;
429 /* Given BASE, use the tree affine combiniation facilities to
430 find the underlying tree expression for BASE, with any
431 immediate offset excluded.
433 N.B. we should eliminate this backtracking with better forward
434 analysis in a future release. */
436 static tree
437 get_alternative_base (tree base)
439 tree *result = alt_base_map->get (base);
441 if (result == NULL)
443 tree expr;
444 aff_tree aff;
446 tree_to_aff_combination_expand (base, TREE_TYPE (base),
447 &aff, &name_expansions);
448 aff.offset = 0;
449 expr = aff_combination_to_tree (&aff);
451 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
453 return expr == base ? NULL : expr;
456 return *result;
459 /* Look in the candidate table for a CAND_PHI that defines BASE and
460 return it if found; otherwise return NULL. */
462 static cand_idx
463 find_phi_def (tree base)
465 slsr_cand_t c;
467 if (TREE_CODE (base) != SSA_NAME)
468 return 0;
470 c = base_cand_from_table (base);
472 if (!c || c->kind != CAND_PHI)
473 return 0;
475 return c->cand_num;
478 /* Helper routine for find_basis_for_candidate. May be called twice:
479 once for the candidate's base expr, and optionally again either for
480 the candidate's phi definition or for a CAND_REF's alternative base
481 expression. */
483 static slsr_cand_t
484 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
486 cand_chain mapping_key;
487 cand_chain_t chain;
488 slsr_cand_t basis = NULL;
490 // Limit potential of N^2 behavior for long candidate chains.
491 int iters = 0;
492 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
494 mapping_key.base_expr = base_expr;
495 chain = base_cand_map->find (&mapping_key);
497 for (; chain && iters < max_iters; chain = chain->next, ++iters)
499 slsr_cand_t one_basis = chain->cand;
501 if (one_basis->kind != c->kind
502 || one_basis->cand_stmt == c->cand_stmt
503 || !operand_equal_p (one_basis->stride, c->stride, 0)
504 || !types_compatible_p (one_basis->cand_type, c->cand_type)
505 || !dominated_by_p (CDI_DOMINATORS,
506 gimple_bb (c->cand_stmt),
507 gimple_bb (one_basis->cand_stmt)))
508 continue;
510 if (!basis || basis->cand_num < one_basis->cand_num)
511 basis = one_basis;
514 return basis;
517 /* Use the base expr from candidate C to look for possible candidates
518 that can serve as a basis for C. Each potential basis must also
519 appear in a block that dominates the candidate statement and have
520 the same stride and type. If more than one possible basis exists,
521 the one with highest index in the vector is chosen; this will be
522 the most immediately dominating basis. */
524 static int
525 find_basis_for_candidate (slsr_cand_t c)
527 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
529 /* If a candidate doesn't have a basis using its base expression,
530 it may have a basis hidden by one or more intervening phis. */
531 if (!basis && c->def_phi)
533 basic_block basis_bb, phi_bb;
534 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
535 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
537 if (basis)
539 /* A hidden basis must dominate the phi-definition of the
540 candidate's base name. */
541 phi_bb = gimple_bb (phi_cand->cand_stmt);
542 basis_bb = gimple_bb (basis->cand_stmt);
544 if (phi_bb == basis_bb
545 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
547 basis = NULL;
548 c->basis = 0;
551 /* If we found a hidden basis, estimate additional dead-code
552 savings if the phi and its feeding statements can be removed. */
553 if (basis && has_single_use (gimple_phi_result (phi_cand->cand_stmt)))
554 c->dead_savings += phi_cand->dead_savings;
558 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
560 tree alt_base_expr = get_alternative_base (c->base_expr);
561 if (alt_base_expr)
562 basis = find_basis_for_base_expr (c, alt_base_expr);
565 if (basis)
567 c->sibling = basis->dependent;
568 basis->dependent = c->cand_num;
569 return basis->cand_num;
572 return 0;
575 /* Record a mapping from BASE to C, indicating that C may potentially serve
576 as a basis using that base expression. BASE may be the same as
577 C->BASE_EXPR; alternatively BASE can be a different tree that share the
578 underlining expression of C->BASE_EXPR. */
580 static void
581 record_potential_basis (slsr_cand_t c, tree base)
583 cand_chain_t node;
584 cand_chain **slot;
586 gcc_assert (base);
588 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
589 node->base_expr = base;
590 node->cand = c;
591 node->next = NULL;
592 slot = base_cand_map->find_slot (node, INSERT);
594 if (*slot)
596 cand_chain_t head = (cand_chain_t) (*slot);
597 node->next = head->next;
598 head->next = node;
600 else
601 *slot = node;
604 /* Allocate storage for a new candidate and initialize its fields.
605 Attempt to find a basis for the candidate.
607 For CAND_REF, an alternative base may also be recorded and used
608 to find a basis. This helps cases where the expression hidden
609 behind BASE (which is usually an SSA_NAME) has immediate offset,
610 e.g.
612 a2[i][j] = 1;
613 a2[i + 20][j] = 2; */
615 static slsr_cand_t
616 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
617 const widest_int &index, tree stride, tree ctype,
618 unsigned savings)
620 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
621 sizeof (slsr_cand));
622 c->cand_stmt = gs;
623 c->base_expr = base;
624 c->stride = stride;
625 c->index = index;
626 c->cand_type = ctype;
627 c->kind = kind;
628 c->cand_num = cand_vec.length () + 1;
629 c->next_interp = 0;
630 c->dependent = 0;
631 c->sibling = 0;
632 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
633 c->dead_savings = savings;
635 cand_vec.safe_push (c);
637 if (kind == CAND_PHI)
638 c->basis = 0;
639 else
640 c->basis = find_basis_for_candidate (c);
642 record_potential_basis (c, base);
643 if (flag_expensive_optimizations && kind == CAND_REF)
645 tree alt_base = get_alternative_base (base);
646 if (alt_base)
647 record_potential_basis (c, alt_base);
650 return c;
653 /* Determine the target cost of statement GS when compiling according
654 to SPEED. */
656 static int
657 stmt_cost (gimple *gs, bool speed)
659 tree lhs, rhs1, rhs2;
660 machine_mode lhs_mode;
662 gcc_assert (is_gimple_assign (gs));
663 lhs = gimple_assign_lhs (gs);
664 rhs1 = gimple_assign_rhs1 (gs);
665 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
667 switch (gimple_assign_rhs_code (gs))
669 case MULT_EXPR:
670 rhs2 = gimple_assign_rhs2 (gs);
672 if (tree_fits_shwi_p (rhs2))
673 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
675 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
676 return mul_cost (speed, lhs_mode);
678 case PLUS_EXPR:
679 case POINTER_PLUS_EXPR:
680 case MINUS_EXPR:
681 return add_cost (speed, lhs_mode);
683 case NEGATE_EXPR:
684 return neg_cost (speed, lhs_mode);
686 CASE_CONVERT:
687 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
689 /* Note that we don't assign costs to copies that in most cases
690 will go away. */
691 case SSA_NAME:
692 return 0;
694 default:
698 gcc_unreachable ();
699 return 0;
702 /* Look up the defining statement for BASE_IN and return a pointer
703 to its candidate in the candidate table, if any; otherwise NULL.
704 Only CAND_ADD and CAND_MULT candidates are returned. */
706 static slsr_cand_t
707 base_cand_from_table (tree base_in)
709 slsr_cand_t *result;
711 gimple *def = SSA_NAME_DEF_STMT (base_in);
712 if (!def)
713 return (slsr_cand_t) NULL;
715 result = stmt_cand_map->get (def);
717 if (result && (*result)->kind != CAND_REF)
718 return *result;
720 return (slsr_cand_t) NULL;
723 /* Add an entry to the statement-to-candidate mapping. */
725 static void
726 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
728 gcc_assert (!stmt_cand_map->put (gs, c));
731 /* Given PHI which contains a phi statement, determine whether it
732 satisfies all the requirements of a phi candidate. If so, create
733 a candidate. Note that a CAND_PHI never has a basis itself, but
734 is used to help find a basis for subsequent candidates. */
736 static void
737 slsr_process_phi (gphi *phi, bool speed)
739 unsigned i;
740 tree arg0_base = NULL_TREE, base_type;
741 slsr_cand_t c;
742 struct loop *cand_loop = gimple_bb (phi)->loop_father;
743 unsigned savings = 0;
745 /* A CAND_PHI requires each of its arguments to have the same
746 derived base name. (See the module header commentary for a
747 definition of derived base names.) Furthermore, all feeding
748 definitions must be in the same position in the loop hierarchy
749 as PHI. */
751 for (i = 0; i < gimple_phi_num_args (phi); i++)
753 slsr_cand_t arg_cand;
754 tree arg = gimple_phi_arg_def (phi, i);
755 tree derived_base_name = NULL_TREE;
756 gimple *arg_stmt = NULL;
757 basic_block arg_bb = NULL;
759 if (TREE_CODE (arg) != SSA_NAME)
760 return;
762 arg_cand = base_cand_from_table (arg);
764 if (arg_cand)
766 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
768 if (!arg_cand->next_interp)
769 return;
771 arg_cand = lookup_cand (arg_cand->next_interp);
774 if (!integer_onep (arg_cand->stride))
775 return;
777 derived_base_name = arg_cand->base_expr;
778 arg_stmt = arg_cand->cand_stmt;
779 arg_bb = gimple_bb (arg_stmt);
781 /* Gather potential dead code savings if the phi statement
782 can be removed later on. */
783 if (has_single_use (arg))
785 if (gimple_code (arg_stmt) == GIMPLE_PHI)
786 savings += arg_cand->dead_savings;
787 else
788 savings += stmt_cost (arg_stmt, speed);
791 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
793 derived_base_name = arg;
794 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
797 if (!arg_bb || arg_bb->loop_father != cand_loop)
798 return;
800 if (i == 0)
801 arg0_base = derived_base_name;
802 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
803 return;
806 /* Create the candidate. "alloc_cand_and_find_basis" is named
807 misleadingly for this case, as no basis will be sought for a
808 CAND_PHI. */
809 base_type = TREE_TYPE (arg0_base);
811 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
812 0, integer_one_node, base_type, savings);
814 /* Add the candidate to the statement-candidate mapping. */
815 add_cand_for_stmt (phi, c);
818 /* Given PBASE which is a pointer to tree, look up the defining
819 statement for it and check whether the candidate is in the
820 form of:
822 X = B + (1 * S), S is integer constant
823 X = B + (i * S), S is integer one
825 If so, set PBASE to the candidate's base_expr and return double
826 int (i * S).
827 Otherwise, just return double int zero. */
829 static widest_int
830 backtrace_base_for_ref (tree *pbase)
832 tree base_in = *pbase;
833 slsr_cand_t base_cand;
835 STRIP_NOPS (base_in);
837 /* Strip off widening conversion(s) to handle cases where
838 e.g. 'B' is widened from an 'int' in order to calculate
839 a 64-bit address. */
840 if (CONVERT_EXPR_P (base_in)
841 && legal_cast_p_1 (base_in, TREE_OPERAND (base_in, 0)))
842 base_in = get_unwidened (base_in, NULL_TREE);
844 if (TREE_CODE (base_in) != SSA_NAME)
845 return 0;
847 base_cand = base_cand_from_table (base_in);
849 while (base_cand && base_cand->kind != CAND_PHI)
851 if (base_cand->kind == CAND_ADD
852 && base_cand->index == 1
853 && TREE_CODE (base_cand->stride) == INTEGER_CST)
855 /* X = B + (1 * S), S is integer constant. */
856 *pbase = base_cand->base_expr;
857 return wi::to_widest (base_cand->stride);
859 else if (base_cand->kind == CAND_ADD
860 && TREE_CODE (base_cand->stride) == INTEGER_CST
861 && integer_onep (base_cand->stride))
863 /* X = B + (i * S), S is integer one. */
864 *pbase = base_cand->base_expr;
865 return base_cand->index;
868 if (base_cand->next_interp)
869 base_cand = lookup_cand (base_cand->next_interp);
870 else
871 base_cand = NULL;
874 return 0;
877 /* Look for the following pattern:
879 *PBASE: MEM_REF (T1, C1)
881 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
883 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
885 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
887 *PINDEX: C4 * BITS_PER_UNIT
889 If not present, leave the input values unchanged and return FALSE.
890 Otherwise, modify the input values as follows and return TRUE:
892 *PBASE: T1
893 *POFFSET: MULT_EXPR (T2, C3)
894 *PINDEX: C1 + (C2 * C3) + C4
896 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
897 will be further restructured to:
899 *PBASE: T1
900 *POFFSET: MULT_EXPR (T2', C3)
901 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
903 static bool
904 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
905 tree *ptype)
907 tree base = *pbase, offset = *poffset;
908 widest_int index = *pindex;
909 tree mult_op0, t1, t2, type;
910 widest_int c1, c2, c3, c4, c5;
912 if (!base
913 || !offset
914 || TREE_CODE (base) != MEM_REF
915 || TREE_CODE (offset) != MULT_EXPR
916 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
917 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
918 return false;
920 t1 = TREE_OPERAND (base, 0);
921 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
922 type = TREE_TYPE (TREE_OPERAND (base, 1));
924 mult_op0 = TREE_OPERAND (offset, 0);
925 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
927 if (TREE_CODE (mult_op0) == PLUS_EXPR)
929 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
931 t2 = TREE_OPERAND (mult_op0, 0);
932 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
934 else
935 return false;
937 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
939 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
941 t2 = TREE_OPERAND (mult_op0, 0);
942 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
944 else
945 return false;
947 else
949 t2 = mult_op0;
950 c2 = 0;
953 c4 = index >> LOG2_BITS_PER_UNIT;
954 c5 = backtrace_base_for_ref (&t2);
956 *pbase = t1;
957 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
958 wide_int_to_tree (sizetype, c3));
959 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
960 *ptype = type;
962 return true;
965 /* Given GS which contains a data reference, create a CAND_REF entry in
966 the candidate table and attempt to find a basis. */
968 static void
969 slsr_process_ref (gimple *gs)
971 tree ref_expr, base, offset, type;
972 HOST_WIDE_INT bitsize, bitpos;
973 machine_mode mode;
974 int unsignedp, reversep, volatilep;
975 slsr_cand_t c;
977 if (gimple_vdef (gs))
978 ref_expr = gimple_assign_lhs (gs);
979 else
980 ref_expr = gimple_assign_rhs1 (gs);
982 if (!handled_component_p (ref_expr)
983 || TREE_CODE (ref_expr) == BIT_FIELD_REF
984 || (TREE_CODE (ref_expr) == COMPONENT_REF
985 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
986 return;
988 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
989 &unsignedp, &reversep, &volatilep);
990 if (reversep)
991 return;
992 widest_int index = bitpos;
994 if (!restructure_reference (&base, &offset, &index, &type))
995 return;
997 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
998 type, 0);
1000 /* Add the candidate to the statement-candidate mapping. */
1001 add_cand_for_stmt (gs, c);
1004 /* Create a candidate entry for a statement GS, where GS multiplies
1005 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1006 about the two SSA names into the new candidate. Return the new
1007 candidate. */
1009 static slsr_cand_t
1010 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1012 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1013 widest_int index;
1014 unsigned savings = 0;
1015 slsr_cand_t c;
1016 slsr_cand_t base_cand = base_cand_from_table (base_in);
1018 /* Look at all interpretations of the base candidate, if necessary,
1019 to find information to propagate into this candidate. */
1020 while (base_cand && !base && base_cand->kind != CAND_PHI)
1023 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1025 /* Y = (B + i') * 1
1026 X = Y * Z
1027 ================
1028 X = (B + i') * Z */
1029 base = base_cand->base_expr;
1030 index = base_cand->index;
1031 stride = stride_in;
1032 ctype = base_cand->cand_type;
1033 if (has_single_use (base_in))
1034 savings = (base_cand->dead_savings
1035 + stmt_cost (base_cand->cand_stmt, speed));
1037 else if (base_cand->kind == CAND_ADD
1038 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1040 /* Y = B + (i' * S), S constant
1041 X = Y * Z
1042 ============================
1043 X = B + ((i' * S) * Z) */
1044 base = base_cand->base_expr;
1045 index = base_cand->index * wi::to_widest (base_cand->stride);
1046 stride = stride_in;
1047 ctype = base_cand->cand_type;
1048 if (has_single_use (base_in))
1049 savings = (base_cand->dead_savings
1050 + stmt_cost (base_cand->cand_stmt, speed));
1053 if (base_cand->next_interp)
1054 base_cand = lookup_cand (base_cand->next_interp);
1055 else
1056 base_cand = NULL;
1059 if (!base)
1061 /* No interpretations had anything useful to propagate, so
1062 produce X = (Y + 0) * Z. */
1063 base = base_in;
1064 index = 0;
1065 stride = stride_in;
1066 ctype = TREE_TYPE (base_in);
1069 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1070 ctype, savings);
1071 return c;
1074 /* Create a candidate entry for a statement GS, where GS multiplies
1075 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1076 information about BASE_IN into the new candidate. Return the new
1077 candidate. */
1079 static slsr_cand_t
1080 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1082 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1083 widest_int index, temp;
1084 unsigned savings = 0;
1085 slsr_cand_t c;
1086 slsr_cand_t base_cand = base_cand_from_table (base_in);
1088 /* Look at all interpretations of the base candidate, if necessary,
1089 to find information to propagate into this candidate. */
1090 while (base_cand && !base && base_cand->kind != CAND_PHI)
1092 if (base_cand->kind == CAND_MULT
1093 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1095 /* Y = (B + i') * S, S constant
1096 X = Y * c
1097 ============================
1098 X = (B + i') * (S * c) */
1099 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1100 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1102 base = base_cand->base_expr;
1103 index = base_cand->index;
1104 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1105 ctype = base_cand->cand_type;
1106 if (has_single_use (base_in))
1107 savings = (base_cand->dead_savings
1108 + stmt_cost (base_cand->cand_stmt, speed));
1111 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1113 /* Y = B + (i' * 1)
1114 X = Y * c
1115 ===========================
1116 X = (B + i') * c */
1117 base = base_cand->base_expr;
1118 index = base_cand->index;
1119 stride = stride_in;
1120 ctype = base_cand->cand_type;
1121 if (has_single_use (base_in))
1122 savings = (base_cand->dead_savings
1123 + stmt_cost (base_cand->cand_stmt, speed));
1125 else if (base_cand->kind == CAND_ADD
1126 && base_cand->index == 1
1127 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1129 /* Y = B + (1 * S), S constant
1130 X = Y * c
1131 ===========================
1132 X = (B + S) * c */
1133 base = base_cand->base_expr;
1134 index = wi::to_widest (base_cand->stride);
1135 stride = stride_in;
1136 ctype = base_cand->cand_type;
1137 if (has_single_use (base_in))
1138 savings = (base_cand->dead_savings
1139 + stmt_cost (base_cand->cand_stmt, speed));
1142 if (base_cand->next_interp)
1143 base_cand = lookup_cand (base_cand->next_interp);
1144 else
1145 base_cand = NULL;
1148 if (!base)
1150 /* No interpretations had anything useful to propagate, so
1151 produce X = (Y + 0) * c. */
1152 base = base_in;
1153 index = 0;
1154 stride = stride_in;
1155 ctype = TREE_TYPE (base_in);
1158 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1159 ctype, savings);
1160 return c;
1163 /* Given GS which is a multiply of scalar integers, make an appropriate
1164 entry in the candidate table. If this is a multiply of two SSA names,
1165 create two CAND_MULT interpretations and attempt to find a basis for
1166 each of them. Otherwise, create a single CAND_MULT and attempt to
1167 find a basis. */
1169 static void
1170 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1172 slsr_cand_t c, c2;
1174 /* If this is a multiply of an SSA name with itself, it is highly
1175 unlikely that we will get a strength reduction opportunity, so
1176 don't record it as a candidate. This simplifies the logic for
1177 finding a basis, so if this is removed that must be considered. */
1178 if (rhs1 == rhs2)
1179 return;
1181 if (TREE_CODE (rhs2) == SSA_NAME)
1183 /* Record an interpretation of this statement in the candidate table
1184 assuming RHS1 is the base expression and RHS2 is the stride. */
1185 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1187 /* Add the first interpretation to the statement-candidate mapping. */
1188 add_cand_for_stmt (gs, c);
1190 /* Record another interpretation of this statement assuming RHS1
1191 is the stride and RHS2 is the base expression. */
1192 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1193 c->next_interp = c2->cand_num;
1195 else
1197 /* Record an interpretation for the multiply-immediate. */
1198 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1200 /* Add the interpretation to the statement-candidate mapping. */
1201 add_cand_for_stmt (gs, c);
1205 /* Create a candidate entry for a statement GS, where GS adds two
1206 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1207 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1208 information about the two SSA names into the new candidate.
1209 Return the new candidate. */
1211 static slsr_cand_t
1212 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1213 bool subtract_p, bool speed)
1215 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL;
1216 widest_int index;
1217 unsigned savings = 0;
1218 slsr_cand_t c;
1219 slsr_cand_t base_cand = base_cand_from_table (base_in);
1220 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1222 /* The most useful transformation is a multiply-immediate feeding
1223 an add or subtract. Look for that first. */
1224 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1226 if (addend_cand->kind == CAND_MULT
1227 && addend_cand->index == 0
1228 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1230 /* Z = (B + 0) * S, S constant
1231 X = Y +/- Z
1232 ===========================
1233 X = Y + ((+/-1 * S) * B) */
1234 base = base_in;
1235 index = wi::to_widest (addend_cand->stride);
1236 if (subtract_p)
1237 index = -index;
1238 stride = addend_cand->base_expr;
1239 ctype = TREE_TYPE (base_in);
1240 if (has_single_use (addend_in))
1241 savings = (addend_cand->dead_savings
1242 + stmt_cost (addend_cand->cand_stmt, speed));
1245 if (addend_cand->next_interp)
1246 addend_cand = lookup_cand (addend_cand->next_interp);
1247 else
1248 addend_cand = NULL;
1251 while (base_cand && !base && base_cand->kind != CAND_PHI)
1253 if (base_cand->kind == CAND_ADD
1254 && (base_cand->index == 0
1255 || operand_equal_p (base_cand->stride,
1256 integer_zero_node, 0)))
1258 /* Y = B + (i' * S), i' * S = 0
1259 X = Y +/- Z
1260 ============================
1261 X = B + (+/-1 * Z) */
1262 base = base_cand->base_expr;
1263 index = subtract_p ? -1 : 1;
1264 stride = addend_in;
1265 ctype = base_cand->cand_type;
1266 if (has_single_use (base_in))
1267 savings = (base_cand->dead_savings
1268 + stmt_cost (base_cand->cand_stmt, speed));
1270 else if (subtract_p)
1272 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1274 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1276 if (subtrahend_cand->kind == CAND_MULT
1277 && subtrahend_cand->index == 0
1278 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1280 /* Z = (B + 0) * S, S constant
1281 X = Y - Z
1282 ===========================
1283 Value: X = Y + ((-1 * S) * B) */
1284 base = base_in;
1285 index = wi::to_widest (subtrahend_cand->stride);
1286 index = -index;
1287 stride = subtrahend_cand->base_expr;
1288 ctype = TREE_TYPE (base_in);
1289 if (has_single_use (addend_in))
1290 savings = (subtrahend_cand->dead_savings
1291 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1294 if (subtrahend_cand->next_interp)
1295 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1296 else
1297 subtrahend_cand = NULL;
1301 if (base_cand->next_interp)
1302 base_cand = lookup_cand (base_cand->next_interp);
1303 else
1304 base_cand = NULL;
1307 if (!base)
1309 /* No interpretations had anything useful to propagate, so
1310 produce X = Y + (1 * Z). */
1311 base = base_in;
1312 index = subtract_p ? -1 : 1;
1313 stride = addend_in;
1314 ctype = TREE_TYPE (base_in);
1317 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1318 ctype, savings);
1319 return c;
1322 /* Create a candidate entry for a statement GS, where GS adds SSA
1323 name BASE_IN to constant INDEX_IN. Propagate any known information
1324 about BASE_IN into the new candidate. Return the new candidate. */
1326 static slsr_cand_t
1327 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1328 bool speed)
1330 enum cand_kind kind = CAND_ADD;
1331 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1332 widest_int index, multiple;
1333 unsigned savings = 0;
1334 slsr_cand_t c;
1335 slsr_cand_t base_cand = base_cand_from_table (base_in);
1337 while (base_cand && !base && base_cand->kind != CAND_PHI)
1339 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1341 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1342 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1343 sign, &multiple))
1345 /* Y = (B + i') * S, S constant, c = kS for some integer k
1346 X = Y + c
1347 ============================
1348 X = (B + (i'+ k)) * S
1350 Y = B + (i' * S), S constant, c = kS for some integer k
1351 X = Y + c
1352 ============================
1353 X = (B + (i'+ k)) * S */
1354 kind = base_cand->kind;
1355 base = base_cand->base_expr;
1356 index = base_cand->index + multiple;
1357 stride = base_cand->stride;
1358 ctype = base_cand->cand_type;
1359 if (has_single_use (base_in))
1360 savings = (base_cand->dead_savings
1361 + stmt_cost (base_cand->cand_stmt, speed));
1364 if (base_cand->next_interp)
1365 base_cand = lookup_cand (base_cand->next_interp);
1366 else
1367 base_cand = NULL;
1370 if (!base)
1372 /* No interpretations had anything useful to propagate, so
1373 produce X = Y + (c * 1). */
1374 kind = CAND_ADD;
1375 base = base_in;
1376 index = index_in;
1377 stride = integer_one_node;
1378 ctype = TREE_TYPE (base_in);
1381 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1382 ctype, savings);
1383 return c;
1386 /* Given GS which is an add or subtract of scalar integers or pointers,
1387 make at least one appropriate entry in the candidate table. */
1389 static void
1390 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1392 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1393 slsr_cand_t c = NULL, c2;
1395 if (TREE_CODE (rhs2) == SSA_NAME)
1397 /* First record an interpretation assuming RHS1 is the base expression
1398 and RHS2 is the stride. But it doesn't make sense for the
1399 stride to be a pointer, so don't record a candidate in that case. */
1400 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1402 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1404 /* Add the first interpretation to the statement-candidate
1405 mapping. */
1406 add_cand_for_stmt (gs, c);
1409 /* If the two RHS operands are identical, or this is a subtract,
1410 we're done. */
1411 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1412 return;
1414 /* Otherwise, record another interpretation assuming RHS2 is the
1415 base expression and RHS1 is the stride, again provided that the
1416 stride is not a pointer. */
1417 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1419 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1420 if (c)
1421 c->next_interp = c2->cand_num;
1422 else
1423 add_cand_for_stmt (gs, c2);
1426 else
1428 /* Record an interpretation for the add-immediate. */
1429 widest_int index = wi::to_widest (rhs2);
1430 if (subtract_p)
1431 index = -index;
1433 c = create_add_imm_cand (gs, rhs1, index, speed);
1435 /* Add the interpretation to the statement-candidate mapping. */
1436 add_cand_for_stmt (gs, c);
1440 /* Given GS which is a negate of a scalar integer, make an appropriate
1441 entry in the candidate table. A negate is equivalent to a multiply
1442 by -1. */
1444 static void
1445 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1447 /* Record a CAND_MULT interpretation for the multiply by -1. */
1448 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1450 /* Add the interpretation to the statement-candidate mapping. */
1451 add_cand_for_stmt (gs, c);
1454 /* Help function for legal_cast_p, operating on two trees. Checks
1455 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1456 for more details. */
1458 static bool
1459 legal_cast_p_1 (tree lhs, tree rhs)
1461 tree lhs_type, rhs_type;
1462 unsigned lhs_size, rhs_size;
1463 bool lhs_wraps, rhs_wraps;
1465 lhs_type = TREE_TYPE (lhs);
1466 rhs_type = TREE_TYPE (rhs);
1467 lhs_size = TYPE_PRECISION (lhs_type);
1468 rhs_size = TYPE_PRECISION (rhs_type);
1469 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1470 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1472 if (lhs_size < rhs_size
1473 || (rhs_wraps && !lhs_wraps)
1474 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1475 return false;
1477 return true;
1480 /* Return TRUE if GS is a statement that defines an SSA name from
1481 a conversion and is legal for us to combine with an add and multiply
1482 in the candidate table. For example, suppose we have:
1484 A = B + i;
1485 C = (type) A;
1486 D = C * S;
1488 Without the type-cast, we would create a CAND_MULT for D with base B,
1489 index i, and stride S. We want to record this candidate only if it
1490 is equivalent to apply the type cast following the multiply:
1492 A = B + i;
1493 E = A * S;
1494 D = (type) E;
1496 We will record the type with the candidate for D. This allows us
1497 to use a similar previous candidate as a basis. If we have earlier seen
1499 A' = B + i';
1500 C' = (type) A';
1501 D' = C' * S;
1503 we can replace D with
1505 D = D' + (i - i') * S;
1507 But if moving the type-cast would change semantics, we mustn't do this.
1509 This is legitimate for casts from a non-wrapping integral type to
1510 any integral type of the same or larger size. It is not legitimate
1511 to convert a wrapping type to a non-wrapping type, or to a wrapping
1512 type of a different size. I.e., with a wrapping type, we must
1513 assume that the addition B + i could wrap, in which case performing
1514 the multiply before or after one of the "illegal" type casts will
1515 have different semantics. */
1517 static bool
1518 legal_cast_p (gimple *gs, tree rhs)
1520 if (!is_gimple_assign (gs)
1521 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1522 return false;
1524 return legal_cast_p_1 (gimple_assign_lhs (gs), rhs);
1527 /* Given GS which is a cast to a scalar integer type, determine whether
1528 the cast is legal for strength reduction. If so, make at least one
1529 appropriate entry in the candidate table. */
1531 static void
1532 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1534 tree lhs, ctype;
1535 slsr_cand_t base_cand, c = NULL, c2;
1536 unsigned savings = 0;
1538 if (!legal_cast_p (gs, rhs1))
1539 return;
1541 lhs = gimple_assign_lhs (gs);
1542 base_cand = base_cand_from_table (rhs1);
1543 ctype = TREE_TYPE (lhs);
1545 if (base_cand && base_cand->kind != CAND_PHI)
1547 while (base_cand)
1549 /* Propagate all data from the base candidate except the type,
1550 which comes from the cast, and the base candidate's cast,
1551 which is no longer applicable. */
1552 if (has_single_use (rhs1))
1553 savings = (base_cand->dead_savings
1554 + stmt_cost (base_cand->cand_stmt, speed));
1556 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1557 base_cand->base_expr,
1558 base_cand->index, base_cand->stride,
1559 ctype, savings);
1560 if (base_cand->next_interp)
1561 base_cand = lookup_cand (base_cand->next_interp);
1562 else
1563 base_cand = NULL;
1566 else
1568 /* If nothing is known about the RHS, create fresh CAND_ADD and
1569 CAND_MULT interpretations:
1571 X = Y + (0 * 1)
1572 X = (Y + 0) * 1
1574 The first of these is somewhat arbitrary, but the choice of
1575 1 for the stride simplifies the logic for propagating casts
1576 into their uses. */
1577 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1578 0, integer_one_node, ctype, 0);
1579 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1580 0, integer_one_node, ctype, 0);
1581 c->next_interp = c2->cand_num;
1584 /* Add the first (or only) interpretation to the statement-candidate
1585 mapping. */
1586 add_cand_for_stmt (gs, c);
1589 /* Given GS which is a copy of a scalar integer type, make at least one
1590 appropriate entry in the candidate table.
1592 This interface is included for completeness, but is unnecessary
1593 if this pass immediately follows a pass that performs copy
1594 propagation, such as DOM. */
1596 static void
1597 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1599 slsr_cand_t base_cand, c = NULL, c2;
1600 unsigned savings = 0;
1602 base_cand = base_cand_from_table (rhs1);
1604 if (base_cand && base_cand->kind != CAND_PHI)
1606 while (base_cand)
1608 /* Propagate all data from the base candidate. */
1609 if (has_single_use (rhs1))
1610 savings = (base_cand->dead_savings
1611 + stmt_cost (base_cand->cand_stmt, speed));
1613 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1614 base_cand->base_expr,
1615 base_cand->index, base_cand->stride,
1616 base_cand->cand_type, savings);
1617 if (base_cand->next_interp)
1618 base_cand = lookup_cand (base_cand->next_interp);
1619 else
1620 base_cand = NULL;
1623 else
1625 /* If nothing is known about the RHS, create fresh CAND_ADD and
1626 CAND_MULT interpretations:
1628 X = Y + (0 * 1)
1629 X = (Y + 0) * 1
1631 The first of these is somewhat arbitrary, but the choice of
1632 1 for the stride simplifies the logic for propagating casts
1633 into their uses. */
1634 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1,
1635 0, integer_one_node, TREE_TYPE (rhs1), 0);
1636 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1,
1637 0, integer_one_node, TREE_TYPE (rhs1), 0);
1638 c->next_interp = c2->cand_num;
1641 /* Add the first (or only) interpretation to the statement-candidate
1642 mapping. */
1643 add_cand_for_stmt (gs, c);
1646 class find_candidates_dom_walker : public dom_walker
1648 public:
1649 find_candidates_dom_walker (cdi_direction direction)
1650 : dom_walker (direction) {}
1651 virtual edge before_dom_children (basic_block);
1654 /* Find strength-reduction candidates in block BB. */
1656 edge
1657 find_candidates_dom_walker::before_dom_children (basic_block bb)
1659 bool speed = optimize_bb_for_speed_p (bb);
1661 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1662 gsi_next (&gsi))
1663 slsr_process_phi (gsi.phi (), speed);
1665 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1666 gsi_next (&gsi))
1668 gimple *gs = gsi_stmt (gsi);
1670 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1671 slsr_process_ref (gs);
1673 else if (is_gimple_assign (gs)
1674 && SCALAR_INT_MODE_P
1675 (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))))
1677 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1679 switch (gimple_assign_rhs_code (gs))
1681 case MULT_EXPR:
1682 case PLUS_EXPR:
1683 rhs1 = gimple_assign_rhs1 (gs);
1684 rhs2 = gimple_assign_rhs2 (gs);
1685 /* Should never happen, but currently some buggy situations
1686 in earlier phases put constants in rhs1. */
1687 if (TREE_CODE (rhs1) != SSA_NAME)
1688 continue;
1689 break;
1691 /* Possible future opportunity: rhs1 of a ptr+ can be
1692 an ADDR_EXPR. */
1693 case POINTER_PLUS_EXPR:
1694 case MINUS_EXPR:
1695 rhs2 = gimple_assign_rhs2 (gs);
1696 gcc_fallthrough ();
1698 CASE_CONVERT:
1699 case SSA_NAME:
1700 case NEGATE_EXPR:
1701 rhs1 = gimple_assign_rhs1 (gs);
1702 if (TREE_CODE (rhs1) != SSA_NAME)
1703 continue;
1704 break;
1706 default:
1710 switch (gimple_assign_rhs_code (gs))
1712 case MULT_EXPR:
1713 slsr_process_mul (gs, rhs1, rhs2, speed);
1714 break;
1716 case PLUS_EXPR:
1717 case POINTER_PLUS_EXPR:
1718 case MINUS_EXPR:
1719 slsr_process_add (gs, rhs1, rhs2, speed);
1720 break;
1722 case NEGATE_EXPR:
1723 slsr_process_neg (gs, rhs1, speed);
1724 break;
1726 CASE_CONVERT:
1727 slsr_process_cast (gs, rhs1, speed);
1728 break;
1730 case SSA_NAME:
1731 slsr_process_copy (gs, rhs1, speed);
1732 break;
1734 default:
1739 return NULL;
1742 /* Dump a candidate for debug. */
1744 static void
1745 dump_candidate (slsr_cand_t c)
1747 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1748 gimple_bb (c->cand_stmt)->index);
1749 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1750 switch (c->kind)
1752 case CAND_MULT:
1753 fputs (" MULT : (", dump_file);
1754 print_generic_expr (dump_file, c->base_expr, 0);
1755 fputs (" + ", dump_file);
1756 print_decs (c->index, dump_file);
1757 fputs (") * ", dump_file);
1758 print_generic_expr (dump_file, c->stride, 0);
1759 fputs (" : ", dump_file);
1760 break;
1761 case CAND_ADD:
1762 fputs (" ADD : ", dump_file);
1763 print_generic_expr (dump_file, c->base_expr, 0);
1764 fputs (" + (", dump_file);
1765 print_decs (c->index, dump_file);
1766 fputs (" * ", dump_file);
1767 print_generic_expr (dump_file, c->stride, 0);
1768 fputs (") : ", dump_file);
1769 break;
1770 case CAND_REF:
1771 fputs (" REF : ", dump_file);
1772 print_generic_expr (dump_file, c->base_expr, 0);
1773 fputs (" + (", dump_file);
1774 print_generic_expr (dump_file, c->stride, 0);
1775 fputs (") + ", dump_file);
1776 print_decs (c->index, dump_file);
1777 fputs (" : ", dump_file);
1778 break;
1779 case CAND_PHI:
1780 fputs (" PHI : ", dump_file);
1781 print_generic_expr (dump_file, c->base_expr, 0);
1782 fputs (" + (unknown * ", dump_file);
1783 print_generic_expr (dump_file, c->stride, 0);
1784 fputs (") : ", dump_file);
1785 break;
1786 default:
1787 gcc_unreachable ();
1789 print_generic_expr (dump_file, c->cand_type, 0);
1790 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1791 c->basis, c->dependent, c->sibling);
1792 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1793 c->next_interp, c->dead_savings);
1794 if (c->def_phi)
1795 fprintf (dump_file, " phi: %d\n", c->def_phi);
1796 fputs ("\n", dump_file);
1799 /* Dump the candidate vector for debug. */
1801 static void
1802 dump_cand_vec (void)
1804 unsigned i;
1805 slsr_cand_t c;
1807 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1809 FOR_EACH_VEC_ELT (cand_vec, i, c)
1810 dump_candidate (c);
1813 /* Callback used to dump the candidate chains hash table. */
1816 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1818 const_cand_chain_t chain = *slot;
1819 cand_chain_t p;
1821 print_generic_expr (dump_file, chain->base_expr, 0);
1822 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1824 for (p = chain->next; p; p = p->next)
1825 fprintf (dump_file, " -> %d", p->cand->cand_num);
1827 fputs ("\n", dump_file);
1828 return 1;
1831 /* Dump the candidate chains. */
1833 static void
1834 dump_cand_chains (void)
1836 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1837 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1838 (NULL);
1839 fputs ("\n", dump_file);
1842 /* Dump the increment vector for debug. */
1844 static void
1845 dump_incr_vec (void)
1847 if (dump_file && (dump_flags & TDF_DETAILS))
1849 unsigned i;
1851 fprintf (dump_file, "\nIncrement vector:\n\n");
1853 for (i = 0; i < incr_vec_len; i++)
1855 fprintf (dump_file, "%3d increment: ", i);
1856 print_decs (incr_vec[i].incr, dump_file);
1857 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1858 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1859 fputs ("\n initializer: ", dump_file);
1860 print_generic_expr (dump_file, incr_vec[i].initializer, 0);
1861 fputs ("\n\n", dump_file);
1866 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1867 data reference. */
1869 static void
1870 replace_ref (tree *expr, slsr_cand_t c)
1872 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1873 unsigned HOST_WIDE_INT misalign;
1874 unsigned align;
1876 /* Ensure the memory reference carries the minimum alignment
1877 requirement for the data type. See PR58041. */
1878 get_object_alignment_1 (*expr, &align, &misalign);
1879 if (misalign != 0)
1880 align = least_bit_hwi (misalign);
1881 if (align < TYPE_ALIGN (acc_type))
1882 acc_type = build_aligned_type (acc_type, align);
1884 add_expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (c->base_expr),
1885 c->base_expr, c->stride);
1886 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1887 wide_int_to_tree (c->cand_type, c->index));
1889 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1890 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1891 TREE_OPERAND (mem_ref, 0)
1892 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1893 /*simple_p=*/true, NULL,
1894 /*before=*/true, GSI_SAME_STMT);
1895 copy_ref_info (mem_ref, *expr);
1896 *expr = mem_ref;
1897 update_stmt (c->cand_stmt);
1900 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1901 dependent of candidate C with an equivalent strength-reduced data
1902 reference. */
1904 static void
1905 replace_refs (slsr_cand_t c)
1907 if (dump_file && (dump_flags & TDF_DETAILS))
1909 fputs ("Replacing reference: ", dump_file);
1910 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1913 if (gimple_vdef (c->cand_stmt))
1915 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
1916 replace_ref (lhs, c);
1918 else
1920 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
1921 replace_ref (rhs, c);
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1926 fputs ("With: ", dump_file);
1927 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
1928 fputs ("\n", dump_file);
1931 if (c->sibling)
1932 replace_refs (lookup_cand (c->sibling));
1934 if (c->dependent)
1935 replace_refs (lookup_cand (c->dependent));
1938 /* Return TRUE if candidate C is dependent upon a PHI. */
1940 static bool
1941 phi_dependent_cand_p (slsr_cand_t c)
1943 /* A candidate is not necessarily dependent upon a PHI just because
1944 it has a phi definition for its base name. It may have a basis
1945 that relies upon the same phi definition, in which case the PHI
1946 is irrelevant to this candidate. */
1947 return (c->def_phi
1948 && c->basis
1949 && lookup_cand (c->basis)->def_phi != c->def_phi);
1952 /* Calculate the increment required for candidate C relative to
1953 its basis. */
1955 static widest_int
1956 cand_increment (slsr_cand_t c)
1958 slsr_cand_t basis;
1960 /* If the candidate doesn't have a basis, just return its own
1961 index. This is useful in record_increments to help us find
1962 an existing initializer. Also, if the candidate's basis is
1963 hidden by a phi, then its own index will be the increment
1964 from the newly introduced phi basis. */
1965 if (!c->basis || phi_dependent_cand_p (c))
1966 return c->index;
1968 basis = lookup_cand (c->basis);
1969 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
1970 return c->index - basis->index;
1973 /* Calculate the increment required for candidate C relative to
1974 its basis. If we aren't going to generate pointer arithmetic
1975 for this candidate, return the absolute value of that increment
1976 instead. */
1978 static inline widest_int
1979 cand_abs_increment (slsr_cand_t c)
1981 widest_int increment = cand_increment (c);
1983 if (!address_arithmetic_p && wi::neg_p (increment))
1984 increment = -increment;
1986 return increment;
1989 /* Return TRUE iff candidate C has already been replaced under
1990 another interpretation. */
1992 static inline bool
1993 cand_already_replaced (slsr_cand_t c)
1995 return (gimple_bb (c->cand_stmt) == 0);
1998 /* Common logic used by replace_unconditional_candidate and
1999 replace_conditional_candidate. */
2001 static void
2002 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2004 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2005 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2007 /* It is highly unlikely, but possible, that the resulting
2008 bump doesn't fit in a HWI. Abandon the replacement
2009 in this case. This does not affect siblings or dependents
2010 of C. Restriction to signed HWI is conservative for unsigned
2011 types but allows for safe negation without twisted logic. */
2012 if (wi::fits_shwi_p (bump)
2013 && bump.to_shwi () != HOST_WIDE_INT_MIN
2014 /* It is not useful to replace casts, copies, or adds of
2015 an SSA name and a constant. */
2016 && cand_code != SSA_NAME
2017 && !CONVERT_EXPR_CODE_P (cand_code)
2018 && cand_code != PLUS_EXPR
2019 && cand_code != POINTER_PLUS_EXPR
2020 && cand_code != MINUS_EXPR)
2022 enum tree_code code = PLUS_EXPR;
2023 tree bump_tree;
2024 gimple *stmt_to_print = NULL;
2026 /* If the basis name and the candidate's LHS have incompatible
2027 types, introduce a cast. */
2028 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2029 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2030 if (wi::neg_p (bump))
2032 code = MINUS_EXPR;
2033 bump = -bump;
2036 bump_tree = wide_int_to_tree (target_type, bump);
2038 if (dump_file && (dump_flags & TDF_DETAILS))
2040 fputs ("Replacing: ", dump_file);
2041 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
2044 if (bump == 0)
2046 tree lhs = gimple_assign_lhs (c->cand_stmt);
2047 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2048 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2049 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2050 gsi_replace (&gsi, copy_stmt, false);
2051 c->cand_stmt = copy_stmt;
2052 if (dump_file && (dump_flags & TDF_DETAILS))
2053 stmt_to_print = copy_stmt;
2055 else
2057 tree rhs1, rhs2;
2058 if (cand_code != NEGATE_EXPR) {
2059 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2060 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2062 if (cand_code != NEGATE_EXPR
2063 && ((operand_equal_p (rhs1, basis_name, 0)
2064 && operand_equal_p (rhs2, bump_tree, 0))
2065 || (operand_equal_p (rhs1, bump_tree, 0)
2066 && operand_equal_p (rhs2, basis_name, 0))))
2068 if (dump_file && (dump_flags & TDF_DETAILS))
2070 fputs ("(duplicate, not actually replacing)", dump_file);
2071 stmt_to_print = c->cand_stmt;
2074 else
2076 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2077 gimple_assign_set_rhs_with_ops (&gsi, code,
2078 basis_name, bump_tree);
2079 update_stmt (gsi_stmt (gsi));
2080 c->cand_stmt = gsi_stmt (gsi);
2081 if (dump_file && (dump_flags & TDF_DETAILS))
2082 stmt_to_print = gsi_stmt (gsi);
2086 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fputs ("With: ", dump_file);
2089 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
2090 fputs ("\n", dump_file);
2095 /* Replace candidate C with an add or subtract. Note that we only
2096 operate on CAND_MULTs with known strides, so we will never generate
2097 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2098 X = Y + ((i - i') * S), as described in the module commentary. The
2099 folded value ((i - i') * S) is referred to here as the "bump." */
2101 static void
2102 replace_unconditional_candidate (slsr_cand_t c)
2104 slsr_cand_t basis;
2106 if (cand_already_replaced (c))
2107 return;
2109 basis = lookup_cand (c->basis);
2110 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2112 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2115 /* Return the index in the increment vector of the given INCREMENT,
2116 or -1 if not found. The latter can occur if more than
2117 MAX_INCR_VEC_LEN increments have been found. */
2119 static inline int
2120 incr_vec_index (const widest_int &increment)
2122 unsigned i;
2124 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2127 if (i < incr_vec_len)
2128 return i;
2129 else
2130 return -1;
2133 /* Create a new statement along edge E to add BASIS_NAME to the product
2134 of INCREMENT and the stride of candidate C. Create and return a new
2135 SSA name from *VAR to be used as the LHS of the new statement.
2136 KNOWN_STRIDE is true iff C's stride is a constant. */
2138 static tree
2139 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2140 widest_int increment, edge e, location_t loc,
2141 bool known_stride)
2143 basic_block insert_bb;
2144 gimple_stmt_iterator gsi;
2145 tree lhs, basis_type;
2146 gassign *new_stmt;
2148 /* If the add candidate along this incoming edge has the same
2149 index as C's hidden basis, the hidden basis represents this
2150 edge correctly. */
2151 if (increment == 0)
2152 return basis_name;
2154 basis_type = TREE_TYPE (basis_name);
2155 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2157 /* Occasionally people convert integers to pointers without a
2158 cast, leading us into trouble if we aren't careful. */
2159 enum tree_code plus_code
2160 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2162 if (known_stride)
2164 tree bump_tree;
2165 enum tree_code code = plus_code;
2166 widest_int bump = increment * wi::to_widest (c->stride);
2167 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2169 code = MINUS_EXPR;
2170 bump = -bump;
2173 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2174 bump_tree = wide_int_to_tree (stride_type, bump);
2175 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2177 else
2179 int i;
2180 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2181 i = incr_vec_index (negate_incr ? -increment : increment);
2182 gcc_assert (i >= 0);
2184 if (incr_vec[i].initializer)
2186 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2187 new_stmt = gimple_build_assign (lhs, code, basis_name,
2188 incr_vec[i].initializer);
2190 else if (increment == 1)
2191 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, c->stride);
2192 else if (increment == -1)
2193 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name,
2194 c->stride);
2195 else
2196 gcc_unreachable ();
2199 insert_bb = single_succ_p (e->src) ? e->src : split_edge (e);
2200 gsi = gsi_last_bb (insert_bb);
2202 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
2203 gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
2204 else
2205 gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
2207 gimple_set_location (new_stmt, loc);
2209 if (dump_file && (dump_flags & TDF_DETAILS))
2211 fprintf (dump_file, "Inserting in block %d: ", insert_bb->index);
2212 print_gimple_stmt (dump_file, new_stmt, 0, 0);
2215 return lhs;
2218 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2219 is hidden by the phi node FROM_PHI, create a new phi node in the same
2220 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2221 with its phi arguments representing conditional adjustments to the
2222 hidden basis along conditional incoming paths. Those adjustments are
2223 made by creating add statements (and sometimes recursively creating
2224 phis) along those incoming paths. LOC is the location to attach to
2225 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2226 constant. */
2228 static tree
2229 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2230 location_t loc, bool known_stride)
2232 int i;
2233 tree name, phi_arg;
2234 gphi *phi;
2235 slsr_cand_t basis = lookup_cand (c->basis);
2236 int nargs = gimple_phi_num_args (from_phi);
2237 basic_block phi_bb = gimple_bb (from_phi);
2238 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2239 auto_vec<tree> phi_args (nargs);
2241 /* Process each argument of the existing phi that represents
2242 conditionally-executed add candidates. */
2243 for (i = 0; i < nargs; i++)
2245 edge e = (*phi_bb->preds)[i];
2246 tree arg = gimple_phi_arg_def (from_phi, i);
2247 tree feeding_def;
2249 /* If the phi argument is the base name of the CAND_PHI, then
2250 this incoming arc should use the hidden basis. */
2251 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2252 if (basis->index == 0)
2253 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2254 else
2256 widest_int incr = -basis->index;
2257 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2258 e, loc, known_stride);
2260 else
2262 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2264 /* If there is another phi along this incoming edge, we must
2265 process it in the same fashion to ensure that all basis
2266 adjustments are made along its incoming edges. */
2267 if (gimple_code (arg_def) == GIMPLE_PHI)
2268 feeding_def = create_phi_basis (c, arg_def, basis_name,
2269 loc, known_stride);
2270 else
2272 slsr_cand_t arg_cand = base_cand_from_table (arg);
2273 widest_int diff = arg_cand->index - basis->index;
2274 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2275 e, loc, known_stride);
2279 /* Because of recursion, we need to save the arguments in a vector
2280 so we can create the PHI statement all at once. Otherwise the
2281 storage for the half-created PHI can be reclaimed. */
2282 phi_args.safe_push (feeding_def);
2285 /* Create the new phi basis. */
2286 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2287 phi = create_phi_node (name, phi_bb);
2288 SSA_NAME_DEF_STMT (name) = phi;
2290 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2292 edge e = (*phi_bb->preds)[i];
2293 add_phi_arg (phi, phi_arg, e, loc);
2296 update_stmt (phi);
2298 if (dump_file && (dump_flags & TDF_DETAILS))
2300 fputs ("Introducing new phi basis: ", dump_file);
2301 print_gimple_stmt (dump_file, phi, 0, 0);
2304 return name;
2307 /* Given a candidate C whose basis is hidden by at least one intervening
2308 phi, introduce a matching number of new phis to represent its basis
2309 adjusted by conditional increments along possible incoming paths. Then
2310 replace C as though it were an unconditional candidate, using the new
2311 basis. */
2313 static void
2314 replace_conditional_candidate (slsr_cand_t c)
2316 tree basis_name, name;
2317 slsr_cand_t basis;
2318 location_t loc;
2320 /* Look up the LHS SSA name from C's basis. This will be the
2321 RHS1 of the adds we will introduce to create new phi arguments. */
2322 basis = lookup_cand (c->basis);
2323 basis_name = gimple_assign_lhs (basis->cand_stmt);
2325 /* Create a new phi statement which will represent C's true basis
2326 after the transformation is complete. */
2327 loc = gimple_location (c->cand_stmt);
2328 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2329 basis_name, loc, KNOWN_STRIDE);
2330 /* Replace C with an add of the new basis phi and a constant. */
2331 widest_int bump = c->index * wi::to_widest (c->stride);
2333 replace_mult_candidate (c, name, bump);
2336 /* Compute the expected costs of inserting basis adjustments for
2337 candidate C with phi-definition PHI. The cost of inserting
2338 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2339 which are themselves phi results, recursively calculate costs
2340 for those phis as well. */
2342 static int
2343 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2345 unsigned i;
2346 int cost = 0;
2347 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2349 /* If we work our way back to a phi that isn't dominated by the hidden
2350 basis, this isn't a candidate for replacement. Indicate this by
2351 returning an unreasonably high cost. It's not easy to detect
2352 these situations when determining the basis, so we defer the
2353 decision until now. */
2354 basic_block phi_bb = gimple_bb (phi);
2355 slsr_cand_t basis = lookup_cand (c->basis);
2356 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2358 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2359 return COST_INFINITE;
2361 for (i = 0; i < gimple_phi_num_args (phi); i++)
2363 tree arg = gimple_phi_arg_def (phi, i);
2365 if (arg != phi_cand->base_expr)
2367 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2369 if (gimple_code (arg_def) == GIMPLE_PHI)
2370 cost += phi_add_costs (arg_def, c, one_add_cost);
2371 else
2373 slsr_cand_t arg_cand = base_cand_from_table (arg);
2375 if (arg_cand->index != c->index)
2376 cost += one_add_cost;
2381 return cost;
2384 /* For candidate C, each sibling of candidate C, and each dependent of
2385 candidate C, determine whether the candidate is dependent upon a
2386 phi that hides its basis. If not, replace the candidate unconditionally.
2387 Otherwise, determine whether the cost of introducing compensation code
2388 for the candidate is offset by the gains from strength reduction. If
2389 so, replace the candidate and introduce the compensation code. */
2391 static void
2392 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2394 if (phi_dependent_cand_p (c))
2396 if (c->kind == CAND_MULT)
2398 /* A candidate dependent upon a phi will replace a multiply by
2399 a constant with an add, and will insert at most one add for
2400 each phi argument. Add these costs with the potential dead-code
2401 savings to determine profitability. */
2402 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2403 int mult_savings = stmt_cost (c->cand_stmt, speed);
2404 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2405 tree phi_result = gimple_phi_result (phi);
2406 int one_add_cost = add_cost (speed,
2407 TYPE_MODE (TREE_TYPE (phi_result)));
2408 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2409 int cost = add_costs - mult_savings - c->dead_savings;
2411 if (dump_file && (dump_flags & TDF_DETAILS))
2413 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2414 fprintf (dump_file, " add_costs = %d\n", add_costs);
2415 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2416 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2417 fprintf (dump_file, " cost = %d\n", cost);
2418 if (cost <= COST_NEUTRAL)
2419 fputs (" Replacing...\n", dump_file);
2420 else
2421 fputs (" Not replaced.\n", dump_file);
2424 if (cost <= COST_NEUTRAL)
2425 replace_conditional_candidate (c);
2428 else
2429 replace_unconditional_candidate (c);
2431 if (c->sibling)
2432 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2434 if (c->dependent)
2435 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2438 /* Count the number of candidates in the tree rooted at C that have
2439 not already been replaced under other interpretations. */
2441 static int
2442 count_candidates (slsr_cand_t c)
2444 unsigned count = cand_already_replaced (c) ? 0 : 1;
2446 if (c->sibling)
2447 count += count_candidates (lookup_cand (c->sibling));
2449 if (c->dependent)
2450 count += count_candidates (lookup_cand (c->dependent));
2452 return count;
2455 /* Increase the count of INCREMENT by one in the increment vector.
2456 INCREMENT is associated with candidate C. If INCREMENT is to be
2457 conditionally executed as part of a conditional candidate replacement,
2458 IS_PHI_ADJUST is true, otherwise false. If an initializer
2459 T_0 = stride * I is provided by a candidate that dominates all
2460 candidates with the same increment, also record T_0 for subsequent use. */
2462 static void
2463 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2465 bool found = false;
2466 unsigned i;
2468 /* Treat increments that differ only in sign as identical so as to
2469 share initializers, unless we are generating pointer arithmetic. */
2470 if (!address_arithmetic_p && wi::neg_p (increment))
2471 increment = -increment;
2473 for (i = 0; i < incr_vec_len; i++)
2475 if (incr_vec[i].incr == increment)
2477 incr_vec[i].count++;
2478 found = true;
2480 /* If we previously recorded an initializer that doesn't
2481 dominate this candidate, it's not going to be useful to
2482 us after all. */
2483 if (incr_vec[i].initializer
2484 && !dominated_by_p (CDI_DOMINATORS,
2485 gimple_bb (c->cand_stmt),
2486 incr_vec[i].init_bb))
2488 incr_vec[i].initializer = NULL_TREE;
2489 incr_vec[i].init_bb = NULL;
2492 break;
2496 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2498 /* The first time we see an increment, create the entry for it.
2499 If this is the root candidate which doesn't have a basis, set
2500 the count to zero. We're only processing it so it can possibly
2501 provide an initializer for other candidates. */
2502 incr_vec[incr_vec_len].incr = increment;
2503 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2504 incr_vec[incr_vec_len].cost = COST_INFINITE;
2506 /* Optimistically record the first occurrence of this increment
2507 as providing an initializer (if it does); we will revise this
2508 opinion later if it doesn't dominate all other occurrences.
2509 Exception: increments of 0, 1 never need initializers;
2510 and phi adjustments don't ever provide initializers. */
2511 if (c->kind == CAND_ADD
2512 && !is_phi_adjust
2513 && c->index == increment
2514 && (increment > 1 || increment < 0)
2515 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2516 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2518 tree t0 = NULL_TREE;
2519 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2520 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2521 if (operand_equal_p (rhs1, c->base_expr, 0))
2522 t0 = rhs2;
2523 else if (operand_equal_p (rhs2, c->base_expr, 0))
2524 t0 = rhs1;
2525 if (t0
2526 && SSA_NAME_DEF_STMT (t0)
2527 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2529 incr_vec[incr_vec_len].initializer = t0;
2530 incr_vec[incr_vec_len++].init_bb
2531 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2533 else
2535 incr_vec[incr_vec_len].initializer = NULL_TREE;
2536 incr_vec[incr_vec_len++].init_bb = NULL;
2539 else
2541 incr_vec[incr_vec_len].initializer = NULL_TREE;
2542 incr_vec[incr_vec_len++].init_bb = NULL;
2547 /* Given phi statement PHI that hides a candidate from its BASIS, find
2548 the increments along each incoming arc (recursively handling additional
2549 phis that may be present) and record them. These increments are the
2550 difference in index between the index-adjusting statements and the
2551 index of the basis. */
2553 static void
2554 record_phi_increments (slsr_cand_t basis, gimple *phi)
2556 unsigned i;
2557 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2559 for (i = 0; i < gimple_phi_num_args (phi); i++)
2561 tree arg = gimple_phi_arg_def (phi, i);
2563 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2565 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2567 if (gimple_code (arg_def) == GIMPLE_PHI)
2568 record_phi_increments (basis, arg_def);
2569 else
2571 slsr_cand_t arg_cand = base_cand_from_table (arg);
2572 widest_int diff = arg_cand->index - basis->index;
2573 record_increment (arg_cand, diff, PHI_ADJUST);
2579 /* Determine how many times each unique increment occurs in the set
2580 of candidates rooted at C's parent, recording the data in the
2581 increment vector. For each unique increment I, if an initializer
2582 T_0 = stride * I is provided by a candidate that dominates all
2583 candidates with the same increment, also record T_0 for subsequent
2584 use. */
2586 static void
2587 record_increments (slsr_cand_t c)
2589 if (!cand_already_replaced (c))
2591 if (!phi_dependent_cand_p (c))
2592 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2593 else
2595 /* A candidate with a basis hidden by a phi will have one
2596 increment for its relationship to the index represented by
2597 the phi, and potentially additional increments along each
2598 incoming edge. For the root of the dependency tree (which
2599 has no basis), process just the initial index in case it has
2600 an initializer that can be used by subsequent candidates. */
2601 record_increment (c, c->index, NOT_PHI_ADJUST);
2603 if (c->basis)
2604 record_phi_increments (lookup_cand (c->basis),
2605 lookup_cand (c->def_phi)->cand_stmt);
2609 if (c->sibling)
2610 record_increments (lookup_cand (c->sibling));
2612 if (c->dependent)
2613 record_increments (lookup_cand (c->dependent));
2616 /* Add up and return the costs of introducing add statements that
2617 require the increment INCR on behalf of candidate C and phi
2618 statement PHI. Accumulate into *SAVINGS the potential savings
2619 from removing existing statements that feed PHI and have no other
2620 uses. */
2622 static int
2623 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2624 int *savings)
2626 unsigned i;
2627 int cost = 0;
2628 slsr_cand_t basis = lookup_cand (c->basis);
2629 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2631 for (i = 0; i < gimple_phi_num_args (phi); i++)
2633 tree arg = gimple_phi_arg_def (phi, i);
2635 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2637 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2639 if (gimple_code (arg_def) == GIMPLE_PHI)
2641 int feeding_savings = 0;
2642 cost += phi_incr_cost (c, incr, arg_def, &feeding_savings);
2643 if (has_single_use (gimple_phi_result (arg_def)))
2644 *savings += feeding_savings;
2646 else
2648 slsr_cand_t arg_cand = base_cand_from_table (arg);
2649 widest_int diff = arg_cand->index - basis->index;
2651 if (incr == diff)
2653 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2654 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2655 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2656 if (has_single_use (lhs))
2657 *savings += stmt_cost (arg_cand->cand_stmt, true);
2663 return cost;
2666 /* Return the first candidate in the tree rooted at C that has not
2667 already been replaced, favoring siblings over dependents. */
2669 static slsr_cand_t
2670 unreplaced_cand_in_tree (slsr_cand_t c)
2672 if (!cand_already_replaced (c))
2673 return c;
2675 if (c->sibling)
2677 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2678 if (sib)
2679 return sib;
2682 if (c->dependent)
2684 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2685 if (dep)
2686 return dep;
2689 return NULL;
2692 /* Return TRUE if the candidates in the tree rooted at C should be
2693 optimized for speed, else FALSE. We estimate this based on the block
2694 containing the most dominant candidate in the tree that has not yet
2695 been replaced. */
2697 static bool
2698 optimize_cands_for_speed_p (slsr_cand_t c)
2700 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2701 gcc_assert (c2);
2702 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2705 /* Add COST_IN to the lowest cost of any dependent path starting at
2706 candidate C or any of its siblings, counting only candidates along
2707 such paths with increment INCR. Assume that replacing a candidate
2708 reduces cost by REPL_SAVINGS. Also account for savings from any
2709 statements that would go dead. If COUNT_PHIS is true, include
2710 costs of introducing feeding statements for conditional candidates. */
2712 static int
2713 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2714 const widest_int &incr, bool count_phis)
2716 int local_cost, sib_cost, savings = 0;
2717 widest_int cand_incr = cand_abs_increment (c);
2719 if (cand_already_replaced (c))
2720 local_cost = cost_in;
2721 else if (incr == cand_incr)
2722 local_cost = cost_in - repl_savings - c->dead_savings;
2723 else
2724 local_cost = cost_in - c->dead_savings;
2726 if (count_phis
2727 && phi_dependent_cand_p (c)
2728 && !cand_already_replaced (c))
2730 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2731 local_cost += phi_incr_cost (c, incr, phi, &savings);
2733 if (has_single_use (gimple_phi_result (phi)))
2734 local_cost -= savings;
2737 if (c->dependent)
2738 local_cost = lowest_cost_path (local_cost, repl_savings,
2739 lookup_cand (c->dependent), incr,
2740 count_phis);
2742 if (c->sibling)
2744 sib_cost = lowest_cost_path (cost_in, repl_savings,
2745 lookup_cand (c->sibling), incr,
2746 count_phis);
2747 local_cost = MIN (local_cost, sib_cost);
2750 return local_cost;
2753 /* Compute the total savings that would accrue from all replacements
2754 in the candidate tree rooted at C, counting only candidates with
2755 increment INCR. Assume that replacing a candidate reduces cost
2756 by REPL_SAVINGS. Also account for savings from statements that
2757 would go dead. */
2759 static int
2760 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2761 bool count_phis)
2763 int savings = 0;
2764 widest_int cand_incr = cand_abs_increment (c);
2766 if (incr == cand_incr && !cand_already_replaced (c))
2767 savings += repl_savings + c->dead_savings;
2769 if (count_phis
2770 && phi_dependent_cand_p (c)
2771 && !cand_already_replaced (c))
2773 int phi_savings = 0;
2774 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2775 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2777 if (has_single_use (gimple_phi_result (phi)))
2778 savings += phi_savings;
2781 if (c->dependent)
2782 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2783 count_phis);
2785 if (c->sibling)
2786 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2787 count_phis);
2789 return savings;
2792 /* Use target-specific costs to determine and record which increments
2793 in the current candidate tree are profitable to replace, assuming
2794 MODE and SPEED. FIRST_DEP is the first dependent of the root of
2795 the candidate tree.
2797 One slight limitation here is that we don't account for the possible
2798 introduction of casts in some cases. See replace_one_candidate for
2799 the cases where these are introduced. This should probably be cleaned
2800 up sometime. */
2802 static void
2803 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
2805 unsigned i;
2807 for (i = 0; i < incr_vec_len; i++)
2809 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
2811 /* If somehow this increment is bigger than a HWI, we won't
2812 be optimizing candidates that use it. And if the increment
2813 has a count of zero, nothing will be done with it. */
2814 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
2815 incr_vec[i].cost = COST_INFINITE;
2817 /* Increments of 0, 1, and -1 are always profitable to replace,
2818 because they always replace a multiply or add with an add or
2819 copy, and may cause one or more existing instructions to go
2820 dead. Exception: -1 can't be assumed to be profitable for
2821 pointer addition. */
2822 else if (incr == 0
2823 || incr == 1
2824 || (incr == -1
2825 && !POINTER_TYPE_P (first_dep->cand_type)))
2826 incr_vec[i].cost = COST_NEUTRAL;
2828 /* FORNOW: If we need to add an initializer, give up if a cast from
2829 the candidate's type to its stride's type can lose precision.
2830 This could eventually be handled better by expressly retaining the
2831 result of a cast to a wider type in the stride. Example:
2833 short int _1;
2834 _2 = (int) _1;
2835 _3 = _2 * 10;
2836 _4 = x + _3; ADD: x + (10 * _1) : int
2837 _5 = _2 * 15;
2838 _6 = x + _3; ADD: x + (15 * _1) : int
2840 Right now replacing _6 would cause insertion of an initializer
2841 of the form "short int T = _1 * 5;" followed by a cast to
2842 int, which could overflow incorrectly. Had we recorded _2 or
2843 (int)_1 as the stride, this wouldn't happen. However, doing
2844 this breaks other opportunities, so this will require some
2845 care. */
2846 else if (!incr_vec[i].initializer
2847 && TREE_CODE (first_dep->stride) != INTEGER_CST
2848 && !legal_cast_p_1 (first_dep->stride,
2849 gimple_assign_lhs (first_dep->cand_stmt)))
2851 incr_vec[i].cost = COST_INFINITE;
2853 /* If we need to add an initializer, make sure we don't introduce
2854 a multiply by a pointer type, which can happen in certain cast
2855 scenarios. FIXME: When cleaning up these cast issues, we can
2856 afford to introduce the multiply provided we cast out to an
2857 unsigned int of appropriate size. */
2858 else if (!incr_vec[i].initializer
2859 && TREE_CODE (first_dep->stride) != INTEGER_CST
2860 && POINTER_TYPE_P (TREE_TYPE (first_dep->stride)))
2862 incr_vec[i].cost = COST_INFINITE;
2864 /* For any other increment, if this is a multiply candidate, we
2865 must introduce a temporary T and initialize it with
2866 T_0 = stride * increment. When optimizing for speed, walk the
2867 candidate tree to calculate the best cost reduction along any
2868 path; if it offsets the fixed cost of inserting the initializer,
2869 replacing the increment is profitable. When optimizing for
2870 size, instead calculate the total cost reduction from replacing
2871 all candidates with this increment. */
2872 else if (first_dep->kind == CAND_MULT)
2874 int cost = mult_by_coeff_cost (incr, mode, speed);
2875 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
2876 if (speed)
2877 cost = lowest_cost_path (cost, repl_savings, first_dep,
2878 incr_vec[i].incr, COUNT_PHIS);
2879 else
2880 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
2881 COUNT_PHIS);
2883 incr_vec[i].cost = cost;
2886 /* If this is an add candidate, the initializer may already
2887 exist, so only calculate the cost of the initializer if it
2888 doesn't. We are replacing one add with another here, so the
2889 known replacement savings is zero. We will account for removal
2890 of dead instructions in lowest_cost_path or total_savings. */
2891 else
2893 int cost = 0;
2894 if (!incr_vec[i].initializer)
2895 cost = mult_by_coeff_cost (incr, mode, speed);
2897 if (speed)
2898 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
2899 DONT_COUNT_PHIS);
2900 else
2901 cost -= total_savings (0, first_dep, incr_vec[i].incr,
2902 DONT_COUNT_PHIS);
2904 incr_vec[i].cost = cost;
2909 /* Return the nearest common dominator of BB1 and BB2. If the blocks
2910 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
2911 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
2912 return C2 in *WHERE; and if the NCD matches neither, return NULL in
2913 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
2915 static basic_block
2916 ncd_for_two_cands (basic_block bb1, basic_block bb2,
2917 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
2919 basic_block ncd;
2921 if (!bb1)
2923 *where = c2;
2924 return bb2;
2927 if (!bb2)
2929 *where = c1;
2930 return bb1;
2933 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
2935 /* If both candidates are in the same block, the earlier
2936 candidate wins. */
2937 if (bb1 == ncd && bb2 == ncd)
2939 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
2940 *where = c2;
2941 else
2942 *where = c1;
2945 /* Otherwise, if one of them produced a candidate in the
2946 dominator, that one wins. */
2947 else if (bb1 == ncd)
2948 *where = c1;
2950 else if (bb2 == ncd)
2951 *where = c2;
2953 /* If neither matches the dominator, neither wins. */
2954 else
2955 *where = NULL;
2957 return ncd;
2960 /* Consider all candidates that feed PHI. Find the nearest common
2961 dominator of those candidates requiring the given increment INCR.
2962 Further find and return the nearest common dominator of this result
2963 with block NCD. If the returned block contains one or more of the
2964 candidates, return the earliest candidate in the block in *WHERE. */
2966 static basic_block
2967 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
2968 basic_block ncd, slsr_cand_t *where)
2970 unsigned i;
2971 slsr_cand_t basis = lookup_cand (c->basis);
2972 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2974 for (i = 0; i < gimple_phi_num_args (phi); i++)
2976 tree arg = gimple_phi_arg_def (phi, i);
2978 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2980 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2982 if (gimple_code (arg_def) == GIMPLE_PHI)
2983 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
2984 where);
2985 else
2987 slsr_cand_t arg_cand = base_cand_from_table (arg);
2988 widest_int diff = arg_cand->index - basis->index;
2989 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
2991 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
2992 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
2997 return ncd;
3000 /* Consider the candidate C together with any candidates that feed
3001 C's phi dependence (if any). Find and return the nearest common
3002 dominator of those candidates requiring the given increment INCR.
3003 If the returned block contains one or more of the candidates,
3004 return the earliest candidate in the block in *WHERE. */
3006 static basic_block
3007 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3009 basic_block ncd = NULL;
3011 if (cand_abs_increment (c) == incr)
3013 ncd = gimple_bb (c->cand_stmt);
3014 *where = c;
3017 if (phi_dependent_cand_p (c))
3018 ncd = ncd_with_phi (c, incr,
3019 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3020 ncd, where);
3022 return ncd;
3025 /* Consider all candidates in the tree rooted at C for which INCR
3026 represents the required increment of C relative to its basis.
3027 Find and return the basic block that most nearly dominates all
3028 such candidates. If the returned block contains one or more of
3029 the candidates, return the earliest candidate in the block in
3030 *WHERE. */
3032 static basic_block
3033 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3034 slsr_cand_t *where)
3036 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3037 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3039 /* First find the NCD of all siblings and dependents. */
3040 if (c->sibling)
3041 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3042 incr, &sib_where);
3043 if (c->dependent)
3044 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3045 incr, &dep_where);
3046 if (!sib_ncd && !dep_ncd)
3048 new_where = NULL;
3049 ncd = NULL;
3051 else if (sib_ncd && !dep_ncd)
3053 new_where = sib_where;
3054 ncd = sib_ncd;
3056 else if (dep_ncd && !sib_ncd)
3058 new_where = dep_where;
3059 ncd = dep_ncd;
3061 else
3062 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3063 dep_where, &new_where);
3065 /* If the candidate's increment doesn't match the one we're interested
3066 in (and nor do any increments for feeding defs of a phi-dependence),
3067 then the result depends only on siblings and dependents. */
3068 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3070 if (!this_ncd || cand_already_replaced (c))
3072 *where = new_where;
3073 return ncd;
3076 /* Otherwise, compare this candidate with the result from all siblings
3077 and dependents. */
3078 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3080 return ncd;
3083 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3085 static inline bool
3086 profitable_increment_p (unsigned index)
3088 return (incr_vec[index].cost <= COST_NEUTRAL);
3091 /* For each profitable increment in the increment vector not equal to
3092 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3093 dominator of all statements in the candidate chain rooted at C
3094 that require that increment, and insert an initializer
3095 T_0 = stride * increment at that location. Record T_0 with the
3096 increment record. */
3098 static void
3099 insert_initializers (slsr_cand_t c)
3101 unsigned i;
3103 for (i = 0; i < incr_vec_len; i++)
3105 basic_block bb;
3106 slsr_cand_t where = NULL;
3107 gassign *init_stmt;
3108 tree stride_type, new_name, incr_tree;
3109 widest_int incr = incr_vec[i].incr;
3111 if (!profitable_increment_p (i)
3112 || incr == 1
3113 || (incr == -1
3114 && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
3115 || incr == 0)
3116 continue;
3118 /* We may have already identified an existing initializer that
3119 will suffice. */
3120 if (incr_vec[i].initializer)
3122 if (dump_file && (dump_flags & TDF_DETAILS))
3124 fputs ("Using existing initializer: ", dump_file);
3125 print_gimple_stmt (dump_file,
3126 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3127 0, 0);
3129 continue;
3132 /* Find the block that most closely dominates all candidates
3133 with this increment. If there is at least one candidate in
3134 that block, the earliest one will be returned in WHERE. */
3135 bb = nearest_common_dominator_for_cands (c, incr, &where);
3137 /* Create a new SSA name to hold the initializer's value. */
3138 stride_type = TREE_TYPE (c->stride);
3139 new_name = make_temp_ssa_name (stride_type, NULL, "slsr");
3140 incr_vec[i].initializer = new_name;
3142 /* Create the initializer and insert it in the latest possible
3143 dominating position. */
3144 incr_tree = wide_int_to_tree (stride_type, incr);
3145 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3146 c->stride, incr_tree);
3147 if (where)
3149 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3150 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3151 gimple_set_location (init_stmt, gimple_location (where->cand_stmt));
3153 else
3155 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3156 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3158 if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
3159 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3160 else
3161 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3163 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3166 if (dump_file && (dump_flags & TDF_DETAILS))
3168 fputs ("Inserting initializer: ", dump_file);
3169 print_gimple_stmt (dump_file, init_stmt, 0, 0);
3174 /* Return TRUE iff all required increments for candidates feeding PHI
3175 are profitable to replace on behalf of candidate C. */
3177 static bool
3178 all_phi_incrs_profitable (slsr_cand_t c, gimple *phi)
3180 unsigned i;
3181 slsr_cand_t basis = lookup_cand (c->basis);
3182 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3184 for (i = 0; i < gimple_phi_num_args (phi); i++)
3186 tree arg = gimple_phi_arg_def (phi, i);
3188 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3190 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3192 if (gimple_code (arg_def) == GIMPLE_PHI)
3194 if (!all_phi_incrs_profitable (c, arg_def))
3195 return false;
3197 else
3199 int j;
3200 slsr_cand_t arg_cand = base_cand_from_table (arg);
3201 widest_int increment = arg_cand->index - basis->index;
3203 if (!address_arithmetic_p && wi::neg_p (increment))
3204 increment = -increment;
3206 j = incr_vec_index (increment);
3208 if (dump_file && (dump_flags & TDF_DETAILS))
3210 fprintf (dump_file, " Conditional candidate %d, phi: ",
3211 c->cand_num);
3212 print_gimple_stmt (dump_file, phi, 0, 0);
3213 fputs (" increment: ", dump_file);
3214 print_decs (increment, dump_file);
3215 if (j < 0)
3216 fprintf (dump_file,
3217 "\n Not replaced; incr_vec overflow.\n");
3218 else {
3219 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3220 if (profitable_increment_p (j))
3221 fputs (" Replacing...\n", dump_file);
3222 else
3223 fputs (" Not replaced.\n", dump_file);
3227 if (j < 0 || !profitable_increment_p (j))
3228 return false;
3233 return true;
3236 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3237 type TO_TYPE, and insert it in front of the statement represented
3238 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3239 the new SSA name. */
3241 static tree
3242 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3244 tree cast_lhs;
3245 gassign *cast_stmt;
3246 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3248 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3249 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3250 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3251 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3253 if (dump_file && (dump_flags & TDF_DETAILS))
3255 fputs (" Inserting: ", dump_file);
3256 print_gimple_stmt (dump_file, cast_stmt, 0, 0);
3259 return cast_lhs;
3262 /* Replace the RHS of the statement represented by candidate C with
3263 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3264 leave C unchanged or just interchange its operands. The original
3265 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3266 If the replacement was made and we are doing a details dump,
3267 return the revised statement, else NULL. */
3269 static gimple *
3270 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3271 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3272 slsr_cand_t c)
3274 if (new_code != old_code
3275 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3276 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3277 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3278 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3280 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3281 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3282 update_stmt (gsi_stmt (gsi));
3283 c->cand_stmt = gsi_stmt (gsi);
3285 if (dump_file && (dump_flags & TDF_DETAILS))
3286 return gsi_stmt (gsi);
3289 else if (dump_file && (dump_flags & TDF_DETAILS))
3290 fputs (" (duplicate, not actually replacing)\n", dump_file);
3292 return NULL;
3295 /* Strength-reduce the statement represented by candidate C by replacing
3296 it with an equivalent addition or subtraction. I is the index into
3297 the increment vector identifying C's increment. NEW_VAR is used to
3298 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3299 is the rhs1 to use in creating the add/subtract. */
3301 static void
3302 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3304 gimple *stmt_to_print = NULL;
3305 tree orig_rhs1, orig_rhs2;
3306 tree rhs2;
3307 enum tree_code orig_code, repl_code;
3308 widest_int cand_incr;
3310 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3311 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3312 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3313 cand_incr = cand_increment (c);
3315 if (dump_file && (dump_flags & TDF_DETAILS))
3317 fputs ("Replacing: ", dump_file);
3318 print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
3319 stmt_to_print = c->cand_stmt;
3322 if (address_arithmetic_p)
3323 repl_code = POINTER_PLUS_EXPR;
3324 else
3325 repl_code = PLUS_EXPR;
3327 /* If the increment has an initializer T_0, replace the candidate
3328 statement with an add of the basis name and the initializer. */
3329 if (incr_vec[i].initializer)
3331 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3332 tree orig_type = TREE_TYPE (orig_rhs2);
3334 if (types_compatible_p (orig_type, init_type))
3335 rhs2 = incr_vec[i].initializer;
3336 else
3337 rhs2 = introduce_cast_before_cand (c, orig_type,
3338 incr_vec[i].initializer);
3340 if (incr_vec[i].incr != cand_incr)
3342 gcc_assert (repl_code == PLUS_EXPR);
3343 repl_code = MINUS_EXPR;
3346 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3347 orig_code, orig_rhs1, orig_rhs2,
3351 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3352 with a subtract of the stride from the basis name, a copy
3353 from the basis name, or an add of the stride to the basis
3354 name, respectively. It may be necessary to introduce a
3355 cast (or reuse an existing cast). */
3356 else if (cand_incr == 1)
3358 tree stride_type = TREE_TYPE (c->stride);
3359 tree orig_type = TREE_TYPE (orig_rhs2);
3361 if (types_compatible_p (orig_type, stride_type))
3362 rhs2 = c->stride;
3363 else
3364 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3366 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3367 orig_code, orig_rhs1, orig_rhs2,
3371 else if (cand_incr == -1)
3373 tree stride_type = TREE_TYPE (c->stride);
3374 tree orig_type = TREE_TYPE (orig_rhs2);
3375 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3377 if (types_compatible_p (orig_type, stride_type))
3378 rhs2 = c->stride;
3379 else
3380 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3382 if (orig_code != MINUS_EXPR
3383 || !operand_equal_p (basis_name, orig_rhs1, 0)
3384 || !operand_equal_p (rhs2, orig_rhs2, 0))
3386 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3387 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3388 update_stmt (gsi_stmt (gsi));
3389 c->cand_stmt = gsi_stmt (gsi);
3391 if (dump_file && (dump_flags & TDF_DETAILS))
3392 stmt_to_print = gsi_stmt (gsi);
3394 else if (dump_file && (dump_flags & TDF_DETAILS))
3395 fputs (" (duplicate, not actually replacing)\n", dump_file);
3398 else if (cand_incr == 0)
3400 tree lhs = gimple_assign_lhs (c->cand_stmt);
3401 tree lhs_type = TREE_TYPE (lhs);
3402 tree basis_type = TREE_TYPE (basis_name);
3404 if (types_compatible_p (lhs_type, basis_type))
3406 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3407 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3408 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3409 gsi_replace (&gsi, copy_stmt, false);
3410 c->cand_stmt = copy_stmt;
3412 if (dump_file && (dump_flags & TDF_DETAILS))
3413 stmt_to_print = copy_stmt;
3415 else
3417 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3418 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3419 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3420 gsi_replace (&gsi, cast_stmt, false);
3421 c->cand_stmt = cast_stmt;
3423 if (dump_file && (dump_flags & TDF_DETAILS))
3424 stmt_to_print = cast_stmt;
3427 else
3428 gcc_unreachable ();
3430 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3432 fputs ("With: ", dump_file);
3433 print_gimple_stmt (dump_file, stmt_to_print, 0, 0);
3434 fputs ("\n", dump_file);
3438 /* For each candidate in the tree rooted at C, replace it with
3439 an increment if such has been shown to be profitable. */
3441 static void
3442 replace_profitable_candidates (slsr_cand_t c)
3444 if (!cand_already_replaced (c))
3446 widest_int increment = cand_abs_increment (c);
3447 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3448 int i;
3450 i = incr_vec_index (increment);
3452 /* Only process profitable increments. Nothing useful can be done
3453 to a cast or copy. */
3454 if (i >= 0
3455 && profitable_increment_p (i)
3456 && orig_code != SSA_NAME
3457 && !CONVERT_EXPR_CODE_P (orig_code))
3459 if (phi_dependent_cand_p (c))
3461 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
3463 if (all_phi_incrs_profitable (c, phi))
3465 /* Look up the LHS SSA name from C's basis. This will be
3466 the RHS1 of the adds we will introduce to create new
3467 phi arguments. */
3468 slsr_cand_t basis = lookup_cand (c->basis);
3469 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3471 /* Create a new phi statement that will represent C's true
3472 basis after the transformation is complete. */
3473 location_t loc = gimple_location (c->cand_stmt);
3474 tree name = create_phi_basis (c, phi, basis_name,
3475 loc, UNKNOWN_STRIDE);
3477 /* Replace C with an add of the new basis phi and the
3478 increment. */
3479 replace_one_candidate (c, i, name);
3482 else
3484 slsr_cand_t basis = lookup_cand (c->basis);
3485 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3486 replace_one_candidate (c, i, basis_name);
3491 if (c->sibling)
3492 replace_profitable_candidates (lookup_cand (c->sibling));
3494 if (c->dependent)
3495 replace_profitable_candidates (lookup_cand (c->dependent));
3498 /* Analyze costs of related candidates in the candidate vector,
3499 and make beneficial replacements. */
3501 static void
3502 analyze_candidates_and_replace (void)
3504 unsigned i;
3505 slsr_cand_t c;
3507 /* Each candidate that has a null basis and a non-null
3508 dependent is the root of a tree of related statements.
3509 Analyze each tree to determine a subset of those
3510 statements that can be replaced with maximum benefit. */
3511 FOR_EACH_VEC_ELT (cand_vec, i, c)
3513 slsr_cand_t first_dep;
3515 if (c->basis != 0 || c->dependent == 0)
3516 continue;
3518 if (dump_file && (dump_flags & TDF_DETAILS))
3519 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3520 c->cand_num);
3522 first_dep = lookup_cand (c->dependent);
3524 /* If this is a chain of CAND_REFs, unconditionally replace
3525 each of them with a strength-reduced data reference. */
3526 if (c->kind == CAND_REF)
3527 replace_refs (c);
3529 /* If the common stride of all related candidates is a known
3530 constant, each candidate without a phi-dependence can be
3531 profitably replaced. Each replaces a multiply by a single
3532 add, with the possibility that a feeding add also goes dead.
3533 A candidate with a phi-dependence is replaced only if the
3534 compensation code it requires is offset by the strength
3535 reduction savings. */
3536 else if (TREE_CODE (c->stride) == INTEGER_CST)
3537 replace_uncond_cands_and_profitable_phis (first_dep);
3539 /* When the stride is an SSA name, it may still be profitable
3540 to replace some or all of the dependent candidates, depending
3541 on whether the introduced increments can be reused, or are
3542 less expensive to calculate than the replaced statements. */
3543 else
3545 machine_mode mode;
3546 bool speed;
3548 /* Determine whether we'll be generating pointer arithmetic
3549 when replacing candidates. */
3550 address_arithmetic_p = (c->kind == CAND_ADD
3551 && POINTER_TYPE_P (c->cand_type));
3553 /* If all candidates have already been replaced under other
3554 interpretations, nothing remains to be done. */
3555 if (!count_candidates (c))
3556 continue;
3558 /* Construct an array of increments for this candidate chain. */
3559 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3560 incr_vec_len = 0;
3561 record_increments (c);
3563 /* Determine which increments are profitable to replace. */
3564 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3565 speed = optimize_cands_for_speed_p (c);
3566 analyze_increments (first_dep, mode, speed);
3568 /* Insert initializers of the form T_0 = stride * increment
3569 for use in profitable replacements. */
3570 insert_initializers (first_dep);
3571 dump_incr_vec ();
3573 /* Perform the replacements. */
3574 replace_profitable_candidates (first_dep);
3575 free (incr_vec);
3580 namespace {
3582 const pass_data pass_data_strength_reduction =
3584 GIMPLE_PASS, /* type */
3585 "slsr", /* name */
3586 OPTGROUP_NONE, /* optinfo_flags */
3587 TV_GIMPLE_SLSR, /* tv_id */
3588 ( PROP_cfg | PROP_ssa ), /* properties_required */
3589 0, /* properties_provided */
3590 0, /* properties_destroyed */
3591 0, /* todo_flags_start */
3592 0, /* todo_flags_finish */
3595 class pass_strength_reduction : public gimple_opt_pass
3597 public:
3598 pass_strength_reduction (gcc::context *ctxt)
3599 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3602 /* opt_pass methods: */
3603 virtual bool gate (function *) { return flag_tree_slsr; }
3604 virtual unsigned int execute (function *);
3606 }; // class pass_strength_reduction
3608 unsigned
3609 pass_strength_reduction::execute (function *fun)
3611 /* Create the obstack where candidates will reside. */
3612 gcc_obstack_init (&cand_obstack);
3614 /* Allocate the candidate vector. */
3615 cand_vec.create (128);
3617 /* Allocate the mapping from statements to candidate indices. */
3618 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3620 /* Create the obstack where candidate chains will reside. */
3621 gcc_obstack_init (&chain_obstack);
3623 /* Allocate the mapping from base expressions to candidate chains. */
3624 base_cand_map = new hash_table<cand_chain_hasher> (500);
3626 /* Allocate the mapping from bases to alternative bases. */
3627 alt_base_map = new hash_map<tree, tree>;
3629 /* Initialize the loop optimizer. We need to detect flow across
3630 back edges, and this gives us dominator information as well. */
3631 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3633 /* Walk the CFG in predominator order looking for strength reduction
3634 candidates. */
3635 find_candidates_dom_walker (CDI_DOMINATORS)
3636 .walk (fun->cfg->x_entry_block_ptr);
3638 if (dump_file && (dump_flags & TDF_DETAILS))
3640 dump_cand_vec ();
3641 dump_cand_chains ();
3644 delete alt_base_map;
3645 free_affine_expand_cache (&name_expansions);
3647 /* Analyze costs and make appropriate replacements. */
3648 analyze_candidates_and_replace ();
3650 loop_optimizer_finalize ();
3651 delete base_cand_map;
3652 base_cand_map = NULL;
3653 obstack_free (&chain_obstack, NULL);
3654 delete stmt_cand_map;
3655 cand_vec.release ();
3656 obstack_free (&cand_obstack, NULL);
3658 return 0;
3661 } // anon namespace
3663 gimple_opt_pass *
3664 make_pass_strength_reduction (gcc::context *ctxt)
3666 return new pass_strength_reduction (ctxt);