Fix date
[official-gcc.git] / gcc / gimple-ssa-strength-reduction.c
blobb37ce35e36eaed2d203d86925f7bae70c355852b
1 /* Straight-line strength reduction.
2 Copyright (C) 2012-2017 Free Software Foundation, Inc.
3 Contributed by Bill Schmidt, IBM <wschmidt@linux.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* There are many algorithms for performing strength reduction on
22 loops. This is not one of them. IVOPTS handles strength reduction
23 of induction variables just fine. This pass is intended to pick
24 up the crumbs it leaves behind, by considering opportunities for
25 strength reduction along dominator paths.
27 Strength reduction addresses explicit multiplies, and certain
28 multiplies implicit in addressing expressions. It would also be
29 possible to apply strength reduction to divisions and modulos,
30 but such opportunities are relatively uncommon.
32 Strength reduction is also currently restricted to integer operations.
33 If desired, it could be extended to floating-point operations under
34 control of something like -funsafe-math-optimizations. */
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "backend.h"
40 #include "rtl.h"
41 #include "tree.h"
42 #include "gimple.h"
43 #include "cfghooks.h"
44 #include "tree-pass.h"
45 #include "ssa.h"
46 #include "expmed.h"
47 #include "gimple-pretty-print.h"
48 #include "fold-const.h"
49 #include "gimple-iterator.h"
50 #include "gimplify-me.h"
51 #include "stor-layout.h"
52 #include "cfgloop.h"
53 #include "tree-cfg.h"
54 #include "domwalk.h"
55 #include "params.h"
56 #include "tree-ssa-address.h"
57 #include "tree-affine.h"
58 #include "builtins.h"
60 /* Information about a strength reduction candidate. Each statement
61 in the candidate table represents an expression of one of the
62 following forms (the special case of CAND_REF will be described
63 later):
65 (CAND_MULT) S1: X = (B + i) * S
66 (CAND_ADD) S1: X = B + (i * S)
68 Here X and B are SSA names, i is an integer constant, and S is
69 either an SSA name or a constant. We call B the "base," i the
70 "index", and S the "stride."
72 Any statement S0 that dominates S1 and is of the form:
74 (CAND_MULT) S0: Y = (B + i') * S
75 (CAND_ADD) S0: Y = B + (i' * S)
77 is called a "basis" for S1. In both cases, S1 may be replaced by
79 S1': X = Y + (i - i') * S,
81 where (i - i') * S is folded to the extent possible.
83 All gimple statements are visited in dominator order, and each
84 statement that may contribute to one of the forms of S1 above is
85 given at least one entry in the candidate table. Such statements
86 include addition, pointer addition, subtraction, multiplication,
87 negation, copies, and nontrivial type casts. If a statement may
88 represent more than one expression of the forms of S1 above,
89 multiple "interpretations" are stored in the table and chained
90 together. Examples:
92 * An add of two SSA names may treat either operand as the base.
93 * A multiply of two SSA names, likewise.
94 * A copy or cast may be thought of as either a CAND_MULT with
95 i = 0 and S = 1, or as a CAND_ADD with i = 0 or S = 0.
97 Candidate records are allocated from an obstack. They are addressed
98 both from a hash table keyed on S1, and from a vector of candidate
99 pointers arranged in predominator order.
101 Opportunity note
102 ----------------
103 Currently we don't recognize:
105 S0: Y = (S * i') - B
106 S1: X = (S * i) - B
108 as a strength reduction opportunity, even though this S1 would
109 also be replaceable by the S1' above. This can be added if it
110 comes up in practice.
112 Strength reduction in addressing
113 --------------------------------
114 There is another kind of candidate known as CAND_REF. A CAND_REF
115 describes a statement containing a memory reference having
116 complex addressing that might benefit from strength reduction.
117 Specifically, we are interested in references for which
118 get_inner_reference returns a base address, offset, and bitpos as
119 follows:
121 base: MEM_REF (T1, C1)
122 offset: MULT_EXPR (PLUS_EXPR (T2, C2), C3)
123 bitpos: C4 * BITS_PER_UNIT
125 Here T1 and T2 are arbitrary trees, and C1, C2, C3, C4 are
126 arbitrary integer constants. Note that C2 may be zero, in which
127 case the offset will be MULT_EXPR (T2, C3).
129 When this pattern is recognized, the original memory reference
130 can be replaced with:
132 MEM_REF (POINTER_PLUS_EXPR (T1, MULT_EXPR (T2, C3)),
133 C1 + (C2 * C3) + C4)
135 which distributes the multiply to allow constant folding. When
136 two or more addressing expressions can be represented by MEM_REFs
137 of this form, differing only in the constants C1, C2, and C4,
138 making this substitution produces more efficient addressing during
139 the RTL phases. When there are not at least two expressions with
140 the same values of T1, T2, and C3, there is nothing to be gained
141 by the replacement.
143 Strength reduction of CAND_REFs uses the same infrastructure as
144 that used by CAND_MULTs and CAND_ADDs. We record T1 in the base (B)
145 field, MULT_EXPR (T2, C3) in the stride (S) field, and
146 C1 + (C2 * C3) + C4 in the index (i) field. A basis for a CAND_REF
147 is thus another CAND_REF with the same B and S values. When at
148 least two CAND_REFs are chained together using the basis relation,
149 each of them is replaced as above, resulting in improved code
150 generation for addressing.
152 Conditional candidates
153 ======================
155 Conditional candidates are best illustrated with an example.
156 Consider the code sequence:
158 (1) x_0 = ...;
159 (2) a_0 = x_0 * 5; MULT (B: x_0; i: 0; S: 5)
160 if (...)
161 (3) x_1 = x_0 + 1; ADD (B: x_0, i: 1; S: 1)
162 (4) x_2 = PHI <x_0, x_1>; PHI (B: x_0, i: 0, S: 1)
163 (5) x_3 = x_2 + 1; ADD (B: x_2, i: 1, S: 1)
164 (6) a_1 = x_3 * 5; MULT (B: x_2, i: 1; S: 5)
166 Here strength reduction is complicated by the uncertain value of x_2.
167 A legitimate transformation is:
169 (1) x_0 = ...;
170 (2) a_0 = x_0 * 5;
171 if (...)
173 (3) [x_1 = x_0 + 1;]
174 (3a) t_1 = a_0 + 5;
176 (4) [x_2 = PHI <x_0, x_1>;]
177 (4a) t_2 = PHI <a_0, t_1>;
178 (5) [x_3 = x_2 + 1;]
179 (6r) a_1 = t_2 + 5;
181 where the bracketed instructions may go dead.
183 To recognize this opportunity, we have to observe that statement (6)
184 has a "hidden basis" (2). The hidden basis is unlike a normal basis
185 in that the statement and the hidden basis have different base SSA
186 names (x_2 and x_0, respectively). The relationship is established
187 when a statement's base name (x_2) is defined by a phi statement (4),
188 each argument of which (x_0, x_1) has an identical "derived base name."
189 If the argument is defined by a candidate (as x_1 is by (3)) that is a
190 CAND_ADD having a stride of 1, the derived base name of the argument is
191 the base name of the candidate (x_0). Otherwise, the argument itself
192 is its derived base name (as is the case with argument x_0).
194 The hidden basis for statement (6) is the nearest dominating candidate
195 whose base name is the derived base name (x_0) of the feeding phi (4),
196 and whose stride is identical to that of the statement. We can then
197 create the new "phi basis" (4a) and feeding adds along incoming arcs (3a),
198 allowing the final replacement of (6) by the strength-reduced (6r).
200 To facilitate this, a new kind of candidate (CAND_PHI) is introduced.
201 A CAND_PHI is not a candidate for replacement, but is maintained in the
202 candidate table to ease discovery of hidden bases. Any phi statement
203 whose arguments share a common derived base name is entered into the
204 table with the derived base name, an (arbitrary) index of zero, and a
205 stride of 1. A statement with a hidden basis can then be detected by
206 simply looking up its feeding phi definition in the candidate table,
207 extracting the derived base name, and searching for a basis in the
208 usual manner after substituting the derived base name.
210 Note that the transformation is only valid when the original phi and
211 the statements that define the phi's arguments are all at the same
212 position in the loop hierarchy. */
215 /* Index into the candidate vector, offset by 1. VECs are zero-based,
216 while cand_idx's are one-based, with zero indicating null. */
217 typedef unsigned cand_idx;
219 /* The kind of candidate. */
220 enum cand_kind
222 CAND_MULT,
223 CAND_ADD,
224 CAND_REF,
225 CAND_PHI
228 struct slsr_cand_d
230 /* The candidate statement S1. */
231 gimple *cand_stmt;
233 /* The base expression B: often an SSA name, but not always. */
234 tree base_expr;
236 /* The stride S. */
237 tree stride;
239 /* The index constant i. */
240 widest_int index;
242 /* The type of the candidate. This is normally the type of base_expr,
243 but casts may have occurred when combining feeding instructions.
244 A candidate can only be a basis for candidates of the same final type.
245 (For CAND_REFs, this is the type to be used for operand 1 of the
246 replacement MEM_REF.) */
247 tree cand_type;
249 /* The type to be used to interpret the stride field when the stride
250 is not a constant. Normally the same as the type of the recorded
251 stride, but when the stride has been cast we need to maintain that
252 knowledge in order to make legal substitutions without losing
253 precision. When the stride is a constant, this will be sizetype. */
254 tree stride_type;
256 /* The kind of candidate (CAND_MULT, etc.). */
257 enum cand_kind kind;
259 /* Index of this candidate in the candidate vector. */
260 cand_idx cand_num;
262 /* Index of the next candidate record for the same statement.
263 A statement may be useful in more than one way (e.g., due to
264 commutativity). So we can have multiple "interpretations"
265 of a statement. */
266 cand_idx next_interp;
268 /* Index of the basis statement S0, if any, in the candidate vector. */
269 cand_idx basis;
271 /* First candidate for which this candidate is a basis, if one exists. */
272 cand_idx dependent;
274 /* Next candidate having the same basis as this one. */
275 cand_idx sibling;
277 /* If this is a conditional candidate, the CAND_PHI candidate
278 that defines the base SSA name B. */
279 cand_idx def_phi;
281 /* Savings that can be expected from eliminating dead code if this
282 candidate is replaced. */
283 int dead_savings;
285 /* For PHI candidates, use a visited flag to keep from processing the
286 same PHI twice from multiple paths. */
287 int visited;
289 /* We sometimes have to cache a phi basis with a phi candidate to
290 avoid processing it twice. Valid only if visited==1. */
291 tree cached_basis;
294 typedef struct slsr_cand_d slsr_cand, *slsr_cand_t;
295 typedef const struct slsr_cand_d *const_slsr_cand_t;
297 /* Pointers to candidates are chained together as part of a mapping
298 from base expressions to the candidates that use them. */
300 struct cand_chain_d
302 /* Base expression for the chain of candidates: often, but not
303 always, an SSA name. */
304 tree base_expr;
306 /* Pointer to a candidate. */
307 slsr_cand_t cand;
309 /* Chain pointer. */
310 struct cand_chain_d *next;
314 typedef struct cand_chain_d cand_chain, *cand_chain_t;
315 typedef const struct cand_chain_d *const_cand_chain_t;
317 /* Information about a unique "increment" associated with candidates
318 having an SSA name for a stride. An increment is the difference
319 between the index of the candidate and the index of its basis,
320 i.e., (i - i') as discussed in the module commentary.
322 When we are not going to generate address arithmetic we treat
323 increments that differ only in sign as the same, allowing sharing
324 of the cost of initializers. The absolute value of the increment
325 is stored in the incr_info. */
327 struct incr_info_d
329 /* The increment that relates a candidate to its basis. */
330 widest_int incr;
332 /* How many times the increment occurs in the candidate tree. */
333 unsigned count;
335 /* Cost of replacing candidates using this increment. Negative and
336 zero costs indicate replacement should be performed. */
337 int cost;
339 /* If this increment is profitable but is not -1, 0, or 1, it requires
340 an initializer T_0 = stride * incr to be found or introduced in the
341 nearest common dominator of all candidates. This field holds T_0
342 for subsequent use. */
343 tree initializer;
345 /* If the initializer was found to already exist, this is the block
346 where it was found. */
347 basic_block init_bb;
350 typedef struct incr_info_d incr_info, *incr_info_t;
352 /* Candidates are maintained in a vector. If candidate X dominates
353 candidate Y, then X appears before Y in the vector; but the
354 converse does not necessarily hold. */
355 static vec<slsr_cand_t> cand_vec;
357 enum cost_consts
359 COST_NEUTRAL = 0,
360 COST_INFINITE = 1000
363 enum stride_status
365 UNKNOWN_STRIDE = 0,
366 KNOWN_STRIDE = 1
369 enum phi_adjust_status
371 NOT_PHI_ADJUST = 0,
372 PHI_ADJUST = 1
375 enum count_phis_status
377 DONT_COUNT_PHIS = 0,
378 COUNT_PHIS = 1
381 /* Constrain how many PHI nodes we will visit for a conditional
382 candidate (depth and breadth). */
383 const int MAX_SPREAD = 16;
385 /* Pointer map embodying a mapping from statements to candidates. */
386 static hash_map<gimple *, slsr_cand_t> *stmt_cand_map;
388 /* Obstack for candidates. */
389 static struct obstack cand_obstack;
391 /* Obstack for candidate chains. */
392 static struct obstack chain_obstack;
394 /* An array INCR_VEC of incr_infos is used during analysis of related
395 candidates having an SSA name for a stride. INCR_VEC_LEN describes
396 its current length. MAX_INCR_VEC_LEN is used to avoid costly
397 pathological cases. */
398 static incr_info_t incr_vec;
399 static unsigned incr_vec_len;
400 const int MAX_INCR_VEC_LEN = 16;
402 /* For a chain of candidates with unknown stride, indicates whether or not
403 we must generate pointer arithmetic when replacing statements. */
404 static bool address_arithmetic_p;
406 /* Forward function declarations. */
407 static slsr_cand_t base_cand_from_table (tree);
408 static tree introduce_cast_before_cand (slsr_cand_t, tree, tree);
409 static bool legal_cast_p_1 (tree, tree);
411 /* Produce a pointer to the IDX'th candidate in the candidate vector. */
413 static slsr_cand_t
414 lookup_cand (cand_idx idx)
416 return cand_vec[idx - 1];
419 /* Helper for hashing a candidate chain header. */
421 struct cand_chain_hasher : nofree_ptr_hash <cand_chain>
423 static inline hashval_t hash (const cand_chain *);
424 static inline bool equal (const cand_chain *, const cand_chain *);
427 inline hashval_t
428 cand_chain_hasher::hash (const cand_chain *p)
430 tree base_expr = p->base_expr;
431 return iterative_hash_expr (base_expr, 0);
434 inline bool
435 cand_chain_hasher::equal (const cand_chain *chain1, const cand_chain *chain2)
437 return operand_equal_p (chain1->base_expr, chain2->base_expr, 0);
440 /* Hash table embodying a mapping from base exprs to chains of candidates. */
441 static hash_table<cand_chain_hasher> *base_cand_map;
443 /* Pointer map used by tree_to_aff_combination_expand. */
444 static hash_map<tree, name_expansion *> *name_expansions;
445 /* Pointer map embodying a mapping from bases to alternative bases. */
446 static hash_map<tree, tree> *alt_base_map;
448 /* Given BASE, use the tree affine combiniation facilities to
449 find the underlying tree expression for BASE, with any
450 immediate offset excluded.
452 N.B. we should eliminate this backtracking with better forward
453 analysis in a future release. */
455 static tree
456 get_alternative_base (tree base)
458 tree *result = alt_base_map->get (base);
460 if (result == NULL)
462 tree expr;
463 aff_tree aff;
465 tree_to_aff_combination_expand (base, TREE_TYPE (base),
466 &aff, &name_expansions);
467 aff.offset = 0;
468 expr = aff_combination_to_tree (&aff);
470 gcc_assert (!alt_base_map->put (base, base == expr ? NULL : expr));
472 return expr == base ? NULL : expr;
475 return *result;
478 /* Look in the candidate table for a CAND_PHI that defines BASE and
479 return it if found; otherwise return NULL. */
481 static cand_idx
482 find_phi_def (tree base)
484 slsr_cand_t c;
486 if (TREE_CODE (base) != SSA_NAME)
487 return 0;
489 c = base_cand_from_table (base);
491 if (!c || c->kind != CAND_PHI)
492 return 0;
494 return c->cand_num;
497 /* Determine whether all uses of NAME are directly or indirectly
498 used by STMT. That is, we want to know whether if STMT goes
499 dead, the definition of NAME also goes dead. */
500 static bool
501 uses_consumed_by_stmt (tree name, gimple *stmt, unsigned recurse = 0)
503 gimple *use_stmt;
504 imm_use_iterator iter;
505 bool retval = true;
507 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
509 if (use_stmt == stmt || is_gimple_debug (use_stmt))
510 continue;
512 if (!is_gimple_assign (use_stmt)
513 || !gimple_get_lhs (use_stmt)
514 || !is_gimple_reg (gimple_get_lhs (use_stmt))
515 || recurse >= 10
516 || !uses_consumed_by_stmt (gimple_get_lhs (use_stmt), stmt,
517 recurse + 1))
519 retval = false;
520 BREAK_FROM_IMM_USE_STMT (iter);
524 return retval;
527 /* Helper routine for find_basis_for_candidate. May be called twice:
528 once for the candidate's base expr, and optionally again either for
529 the candidate's phi definition or for a CAND_REF's alternative base
530 expression. */
532 static slsr_cand_t
533 find_basis_for_base_expr (slsr_cand_t c, tree base_expr)
535 cand_chain mapping_key;
536 cand_chain_t chain;
537 slsr_cand_t basis = NULL;
539 // Limit potential of N^2 behavior for long candidate chains.
540 int iters = 0;
541 int max_iters = PARAM_VALUE (PARAM_MAX_SLSR_CANDIDATE_SCAN);
543 mapping_key.base_expr = base_expr;
544 chain = base_cand_map->find (&mapping_key);
546 for (; chain && iters < max_iters; chain = chain->next, ++iters)
548 slsr_cand_t one_basis = chain->cand;
550 if (one_basis->kind != c->kind
551 || one_basis->cand_stmt == c->cand_stmt
552 || !operand_equal_p (one_basis->stride, c->stride, 0)
553 || !types_compatible_p (one_basis->cand_type, c->cand_type)
554 || !types_compatible_p (one_basis->stride_type, c->stride_type)
555 || !dominated_by_p (CDI_DOMINATORS,
556 gimple_bb (c->cand_stmt),
557 gimple_bb (one_basis->cand_stmt)))
558 continue;
560 if (!basis || basis->cand_num < one_basis->cand_num)
561 basis = one_basis;
564 return basis;
567 /* Use the base expr from candidate C to look for possible candidates
568 that can serve as a basis for C. Each potential basis must also
569 appear in a block that dominates the candidate statement and have
570 the same stride and type. If more than one possible basis exists,
571 the one with highest index in the vector is chosen; this will be
572 the most immediately dominating basis. */
574 static int
575 find_basis_for_candidate (slsr_cand_t c)
577 slsr_cand_t basis = find_basis_for_base_expr (c, c->base_expr);
579 /* If a candidate doesn't have a basis using its base expression,
580 it may have a basis hidden by one or more intervening phis. */
581 if (!basis && c->def_phi)
583 basic_block basis_bb, phi_bb;
584 slsr_cand_t phi_cand = lookup_cand (c->def_phi);
585 basis = find_basis_for_base_expr (c, phi_cand->base_expr);
587 if (basis)
589 /* A hidden basis must dominate the phi-definition of the
590 candidate's base name. */
591 phi_bb = gimple_bb (phi_cand->cand_stmt);
592 basis_bb = gimple_bb (basis->cand_stmt);
594 if (phi_bb == basis_bb
595 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
597 basis = NULL;
598 c->basis = 0;
601 /* If we found a hidden basis, estimate additional dead-code
602 savings if the phi and its feeding statements can be removed. */
603 tree feeding_var = gimple_phi_result (phi_cand->cand_stmt);
604 if (basis && uses_consumed_by_stmt (feeding_var, c->cand_stmt))
605 c->dead_savings += phi_cand->dead_savings;
609 if (flag_expensive_optimizations && !basis && c->kind == CAND_REF)
611 tree alt_base_expr = get_alternative_base (c->base_expr);
612 if (alt_base_expr)
613 basis = find_basis_for_base_expr (c, alt_base_expr);
616 if (basis)
618 c->sibling = basis->dependent;
619 basis->dependent = c->cand_num;
620 return basis->cand_num;
623 return 0;
626 /* Record a mapping from BASE to C, indicating that C may potentially serve
627 as a basis using that base expression. BASE may be the same as
628 C->BASE_EXPR; alternatively BASE can be a different tree that share the
629 underlining expression of C->BASE_EXPR. */
631 static void
632 record_potential_basis (slsr_cand_t c, tree base)
634 cand_chain_t node;
635 cand_chain **slot;
637 gcc_assert (base);
639 node = (cand_chain_t) obstack_alloc (&chain_obstack, sizeof (cand_chain));
640 node->base_expr = base;
641 node->cand = c;
642 node->next = NULL;
643 slot = base_cand_map->find_slot (node, INSERT);
645 if (*slot)
647 cand_chain_t head = (cand_chain_t) (*slot);
648 node->next = head->next;
649 head->next = node;
651 else
652 *slot = node;
655 /* Allocate storage for a new candidate and initialize its fields.
656 Attempt to find a basis for the candidate.
658 For CAND_REF, an alternative base may also be recorded and used
659 to find a basis. This helps cases where the expression hidden
660 behind BASE (which is usually an SSA_NAME) has immediate offset,
661 e.g.
663 a2[i][j] = 1;
664 a2[i + 20][j] = 2; */
666 static slsr_cand_t
667 alloc_cand_and_find_basis (enum cand_kind kind, gimple *gs, tree base,
668 const widest_int &index, tree stride, tree ctype,
669 tree stype, unsigned savings)
671 slsr_cand_t c = (slsr_cand_t) obstack_alloc (&cand_obstack,
672 sizeof (slsr_cand));
673 c->cand_stmt = gs;
674 c->base_expr = base;
675 c->stride = stride;
676 c->index = index;
677 c->cand_type = ctype;
678 c->stride_type = stype;
679 c->kind = kind;
680 c->cand_num = cand_vec.length () + 1;
681 c->next_interp = 0;
682 c->dependent = 0;
683 c->sibling = 0;
684 c->def_phi = kind == CAND_MULT ? find_phi_def (base) : 0;
685 c->dead_savings = savings;
686 c->visited = 0;
687 c->cached_basis = NULL_TREE;
689 cand_vec.safe_push (c);
691 if (kind == CAND_PHI)
692 c->basis = 0;
693 else
694 c->basis = find_basis_for_candidate (c);
696 record_potential_basis (c, base);
697 if (flag_expensive_optimizations && kind == CAND_REF)
699 tree alt_base = get_alternative_base (base);
700 if (alt_base)
701 record_potential_basis (c, alt_base);
704 return c;
707 /* Determine the target cost of statement GS when compiling according
708 to SPEED. */
710 static int
711 stmt_cost (gimple *gs, bool speed)
713 tree lhs, rhs1, rhs2;
714 machine_mode lhs_mode;
716 gcc_assert (is_gimple_assign (gs));
717 lhs = gimple_assign_lhs (gs);
718 rhs1 = gimple_assign_rhs1 (gs);
719 lhs_mode = TYPE_MODE (TREE_TYPE (lhs));
721 switch (gimple_assign_rhs_code (gs))
723 case MULT_EXPR:
724 rhs2 = gimple_assign_rhs2 (gs);
726 if (tree_fits_shwi_p (rhs2))
727 return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed);
729 gcc_assert (TREE_CODE (rhs1) != INTEGER_CST);
730 return mul_cost (speed, lhs_mode);
732 case PLUS_EXPR:
733 case POINTER_PLUS_EXPR:
734 case MINUS_EXPR:
735 return add_cost (speed, lhs_mode);
737 case NEGATE_EXPR:
738 return neg_cost (speed, lhs_mode);
740 CASE_CONVERT:
741 return convert_cost (lhs_mode, TYPE_MODE (TREE_TYPE (rhs1)), speed);
743 /* Note that we don't assign costs to copies that in most cases
744 will go away. */
745 case SSA_NAME:
746 return 0;
748 default:
752 gcc_unreachable ();
753 return 0;
756 /* Look up the defining statement for BASE_IN and return a pointer
757 to its candidate in the candidate table, if any; otherwise NULL.
758 Only CAND_ADD and CAND_MULT candidates are returned. */
760 static slsr_cand_t
761 base_cand_from_table (tree base_in)
763 slsr_cand_t *result;
765 gimple *def = SSA_NAME_DEF_STMT (base_in);
766 if (!def)
767 return (slsr_cand_t) NULL;
769 result = stmt_cand_map->get (def);
771 if (result && (*result)->kind != CAND_REF)
772 return *result;
774 return (slsr_cand_t) NULL;
777 /* Add an entry to the statement-to-candidate mapping. */
779 static void
780 add_cand_for_stmt (gimple *gs, slsr_cand_t c)
782 gcc_assert (!stmt_cand_map->put (gs, c));
785 /* Given PHI which contains a phi statement, determine whether it
786 satisfies all the requirements of a phi candidate. If so, create
787 a candidate. Note that a CAND_PHI never has a basis itself, but
788 is used to help find a basis for subsequent candidates. */
790 static void
791 slsr_process_phi (gphi *phi, bool speed)
793 unsigned i;
794 tree arg0_base = NULL_TREE, base_type;
795 slsr_cand_t c;
796 struct loop *cand_loop = gimple_bb (phi)->loop_father;
797 unsigned savings = 0;
799 /* A CAND_PHI requires each of its arguments to have the same
800 derived base name. (See the module header commentary for a
801 definition of derived base names.) Furthermore, all feeding
802 definitions must be in the same position in the loop hierarchy
803 as PHI. */
805 for (i = 0; i < gimple_phi_num_args (phi); i++)
807 slsr_cand_t arg_cand;
808 tree arg = gimple_phi_arg_def (phi, i);
809 tree derived_base_name = NULL_TREE;
810 gimple *arg_stmt = NULL;
811 basic_block arg_bb = NULL;
813 if (TREE_CODE (arg) != SSA_NAME)
814 return;
816 arg_cand = base_cand_from_table (arg);
818 if (arg_cand)
820 while (arg_cand->kind != CAND_ADD && arg_cand->kind != CAND_PHI)
822 if (!arg_cand->next_interp)
823 return;
825 arg_cand = lookup_cand (arg_cand->next_interp);
828 if (!integer_onep (arg_cand->stride))
829 return;
831 derived_base_name = arg_cand->base_expr;
832 arg_stmt = arg_cand->cand_stmt;
833 arg_bb = gimple_bb (arg_stmt);
835 /* Gather potential dead code savings if the phi statement
836 can be removed later on. */
837 if (uses_consumed_by_stmt (arg, phi))
839 if (gimple_code (arg_stmt) == GIMPLE_PHI)
840 savings += arg_cand->dead_savings;
841 else
842 savings += stmt_cost (arg_stmt, speed);
845 else if (SSA_NAME_IS_DEFAULT_DEF (arg))
847 derived_base_name = arg;
848 arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
851 if (!arg_bb || arg_bb->loop_father != cand_loop)
852 return;
854 if (i == 0)
855 arg0_base = derived_base_name;
856 else if (!operand_equal_p (derived_base_name, arg0_base, 0))
857 return;
860 /* Create the candidate. "alloc_cand_and_find_basis" is named
861 misleadingly for this case, as no basis will be sought for a
862 CAND_PHI. */
863 base_type = TREE_TYPE (arg0_base);
865 c = alloc_cand_and_find_basis (CAND_PHI, phi, arg0_base,
866 0, integer_one_node, base_type,
867 sizetype, savings);
869 /* Add the candidate to the statement-candidate mapping. */
870 add_cand_for_stmt (phi, c);
873 /* Given PBASE which is a pointer to tree, look up the defining
874 statement for it and check whether the candidate is in the
875 form of:
877 X = B + (1 * S), S is integer constant
878 X = B + (i * S), S is integer one
880 If so, set PBASE to the candidate's base_expr and return double
881 int (i * S).
882 Otherwise, just return double int zero. */
884 static widest_int
885 backtrace_base_for_ref (tree *pbase)
887 tree base_in = *pbase;
888 slsr_cand_t base_cand;
890 STRIP_NOPS (base_in);
892 /* Strip off widening conversion(s) to handle cases where
893 e.g. 'B' is widened from an 'int' in order to calculate
894 a 64-bit address. */
895 if (CONVERT_EXPR_P (base_in)
896 && legal_cast_p_1 (TREE_TYPE (base_in),
897 TREE_TYPE (TREE_OPERAND (base_in, 0))))
898 base_in = get_unwidened (base_in, NULL_TREE);
900 if (TREE_CODE (base_in) != SSA_NAME)
901 return 0;
903 base_cand = base_cand_from_table (base_in);
905 while (base_cand && base_cand->kind != CAND_PHI)
907 if (base_cand->kind == CAND_ADD
908 && base_cand->index == 1
909 && TREE_CODE (base_cand->stride) == INTEGER_CST)
911 /* X = B + (1 * S), S is integer constant. */
912 *pbase = base_cand->base_expr;
913 return wi::to_widest (base_cand->stride);
915 else if (base_cand->kind == CAND_ADD
916 && TREE_CODE (base_cand->stride) == INTEGER_CST
917 && integer_onep (base_cand->stride))
919 /* X = B + (i * S), S is integer one. */
920 *pbase = base_cand->base_expr;
921 return base_cand->index;
924 if (base_cand->next_interp)
925 base_cand = lookup_cand (base_cand->next_interp);
926 else
927 base_cand = NULL;
930 return 0;
933 /* Look for the following pattern:
935 *PBASE: MEM_REF (T1, C1)
937 *POFFSET: MULT_EXPR (T2, C3) [C2 is zero]
939 MULT_EXPR (PLUS_EXPR (T2, C2), C3)
941 MULT_EXPR (MINUS_EXPR (T2, -C2), C3)
943 *PINDEX: C4 * BITS_PER_UNIT
945 If not present, leave the input values unchanged and return FALSE.
946 Otherwise, modify the input values as follows and return TRUE:
948 *PBASE: T1
949 *POFFSET: MULT_EXPR (T2, C3)
950 *PINDEX: C1 + (C2 * C3) + C4
952 When T2 is recorded by a CAND_ADD in the form of (T2' + C5), it
953 will be further restructured to:
955 *PBASE: T1
956 *POFFSET: MULT_EXPR (T2', C3)
957 *PINDEX: C1 + (C2 * C3) + C4 + (C5 * C3) */
959 static bool
960 restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
961 tree *ptype)
963 tree base = *pbase, offset = *poffset;
964 widest_int index = *pindex;
965 tree mult_op0, t1, t2, type;
966 widest_int c1, c2, c3, c4, c5;
968 if (!base
969 || !offset
970 || TREE_CODE (base) != MEM_REF
971 || TREE_CODE (offset) != MULT_EXPR
972 || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
973 || wi::umod_floor (index, BITS_PER_UNIT) != 0)
974 return false;
976 t1 = TREE_OPERAND (base, 0);
977 c1 = widest_int::from (mem_ref_offset (base), SIGNED);
978 type = TREE_TYPE (TREE_OPERAND (base, 1));
980 mult_op0 = TREE_OPERAND (offset, 0);
981 c3 = wi::to_widest (TREE_OPERAND (offset, 1));
983 if (TREE_CODE (mult_op0) == PLUS_EXPR)
985 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
987 t2 = TREE_OPERAND (mult_op0, 0);
988 c2 = wi::to_widest (TREE_OPERAND (mult_op0, 1));
990 else
991 return false;
993 else if (TREE_CODE (mult_op0) == MINUS_EXPR)
995 if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
997 t2 = TREE_OPERAND (mult_op0, 0);
998 c2 = -wi::to_widest (TREE_OPERAND (mult_op0, 1));
1000 else
1001 return false;
1003 else
1005 t2 = mult_op0;
1006 c2 = 0;
1009 c4 = index >> LOG2_BITS_PER_UNIT;
1010 c5 = backtrace_base_for_ref (&t2);
1012 *pbase = t1;
1013 *poffset = fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, t2),
1014 wide_int_to_tree (sizetype, c3));
1015 *pindex = c1 + c2 * c3 + c4 + c5 * c3;
1016 *ptype = type;
1018 return true;
1021 /* Given GS which contains a data reference, create a CAND_REF entry in
1022 the candidate table and attempt to find a basis. */
1024 static void
1025 slsr_process_ref (gimple *gs)
1027 tree ref_expr, base, offset, type;
1028 HOST_WIDE_INT bitsize, bitpos;
1029 machine_mode mode;
1030 int unsignedp, reversep, volatilep;
1031 slsr_cand_t c;
1033 if (gimple_vdef (gs))
1034 ref_expr = gimple_assign_lhs (gs);
1035 else
1036 ref_expr = gimple_assign_rhs1 (gs);
1038 if (!handled_component_p (ref_expr)
1039 || TREE_CODE (ref_expr) == BIT_FIELD_REF
1040 || (TREE_CODE (ref_expr) == COMPONENT_REF
1041 && DECL_BIT_FIELD (TREE_OPERAND (ref_expr, 1))))
1042 return;
1044 base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
1045 &unsignedp, &reversep, &volatilep);
1046 if (reversep)
1047 return;
1048 widest_int index = bitpos;
1050 if (!restructure_reference (&base, &offset, &index, &type))
1051 return;
1053 c = alloc_cand_and_find_basis (CAND_REF, gs, base, index, offset,
1054 type, sizetype, 0);
1056 /* Add the candidate to the statement-candidate mapping. */
1057 add_cand_for_stmt (gs, c);
1060 /* Create a candidate entry for a statement GS, where GS multiplies
1061 two SSA names BASE_IN and STRIDE_IN. Propagate any known information
1062 about the two SSA names into the new candidate. Return the new
1063 candidate. */
1065 static slsr_cand_t
1066 create_mul_ssa_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1068 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1069 tree stype = NULL_TREE;
1070 widest_int index;
1071 unsigned savings = 0;
1072 slsr_cand_t c;
1073 slsr_cand_t base_cand = base_cand_from_table (base_in);
1075 /* Look at all interpretations of the base candidate, if necessary,
1076 to find information to propagate into this candidate. */
1077 while (base_cand && !base && base_cand->kind != CAND_PHI)
1080 if (base_cand->kind == CAND_MULT && integer_onep (base_cand->stride))
1082 /* Y = (B + i') * 1
1083 X = Y * Z
1084 ================
1085 X = (B + i') * Z */
1086 base = base_cand->base_expr;
1087 index = base_cand->index;
1088 stride = stride_in;
1089 ctype = base_cand->cand_type;
1090 stype = TREE_TYPE (stride_in);
1091 if (has_single_use (base_in))
1092 savings = (base_cand->dead_savings
1093 + stmt_cost (base_cand->cand_stmt, speed));
1095 else if (base_cand->kind == CAND_ADD
1096 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1098 /* Y = B + (i' * S), S constant
1099 X = Y * Z
1100 ============================
1101 X = B + ((i' * S) * Z) */
1102 base = base_cand->base_expr;
1103 index = base_cand->index * wi::to_widest (base_cand->stride);
1104 stride = stride_in;
1105 ctype = base_cand->cand_type;
1106 stype = TREE_TYPE (stride_in);
1107 if (has_single_use (base_in))
1108 savings = (base_cand->dead_savings
1109 + stmt_cost (base_cand->cand_stmt, speed));
1112 if (base_cand->next_interp)
1113 base_cand = lookup_cand (base_cand->next_interp);
1114 else
1115 base_cand = NULL;
1118 if (!base)
1120 /* No interpretations had anything useful to propagate, so
1121 produce X = (Y + 0) * Z. */
1122 base = base_in;
1123 index = 0;
1124 stride = stride_in;
1125 ctype = TREE_TYPE (base_in);
1126 stype = TREE_TYPE (stride_in);
1129 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1130 ctype, stype, savings);
1131 return c;
1134 /* Create a candidate entry for a statement GS, where GS multiplies
1135 SSA name BASE_IN by constant STRIDE_IN. Propagate any known
1136 information about BASE_IN into the new candidate. Return the new
1137 candidate. */
1139 static slsr_cand_t
1140 create_mul_imm_cand (gimple *gs, tree base_in, tree stride_in, bool speed)
1142 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1143 widest_int index, temp;
1144 unsigned savings = 0;
1145 slsr_cand_t c;
1146 slsr_cand_t base_cand = base_cand_from_table (base_in);
1148 /* Look at all interpretations of the base candidate, if necessary,
1149 to find information to propagate into this candidate. */
1150 while (base_cand && !base && base_cand->kind != CAND_PHI)
1152 if (base_cand->kind == CAND_MULT
1153 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1155 /* Y = (B + i') * S, S constant
1156 X = Y * c
1157 ============================
1158 X = (B + i') * (S * c) */
1159 temp = wi::to_widest (base_cand->stride) * wi::to_widest (stride_in);
1160 if (wi::fits_to_tree_p (temp, TREE_TYPE (stride_in)))
1162 base = base_cand->base_expr;
1163 index = base_cand->index;
1164 stride = wide_int_to_tree (TREE_TYPE (stride_in), temp);
1165 ctype = base_cand->cand_type;
1166 if (has_single_use (base_in))
1167 savings = (base_cand->dead_savings
1168 + stmt_cost (base_cand->cand_stmt, speed));
1171 else if (base_cand->kind == CAND_ADD && integer_onep (base_cand->stride))
1173 /* Y = B + (i' * 1)
1174 X = Y * c
1175 ===========================
1176 X = (B + i') * c */
1177 base = base_cand->base_expr;
1178 index = base_cand->index;
1179 stride = stride_in;
1180 ctype = base_cand->cand_type;
1181 if (has_single_use (base_in))
1182 savings = (base_cand->dead_savings
1183 + stmt_cost (base_cand->cand_stmt, speed));
1185 else if (base_cand->kind == CAND_ADD
1186 && base_cand->index == 1
1187 && TREE_CODE (base_cand->stride) == INTEGER_CST)
1189 /* Y = B + (1 * S), S constant
1190 X = Y * c
1191 ===========================
1192 X = (B + S) * c */
1193 base = base_cand->base_expr;
1194 index = wi::to_widest (base_cand->stride);
1195 stride = stride_in;
1196 ctype = base_cand->cand_type;
1197 if (has_single_use (base_in))
1198 savings = (base_cand->dead_savings
1199 + stmt_cost (base_cand->cand_stmt, speed));
1202 if (base_cand->next_interp)
1203 base_cand = lookup_cand (base_cand->next_interp);
1204 else
1205 base_cand = NULL;
1208 if (!base)
1210 /* No interpretations had anything useful to propagate, so
1211 produce X = (Y + 0) * c. */
1212 base = base_in;
1213 index = 0;
1214 stride = stride_in;
1215 ctype = TREE_TYPE (base_in);
1218 c = alloc_cand_and_find_basis (CAND_MULT, gs, base, index, stride,
1219 ctype, sizetype, savings);
1220 return c;
1223 /* Given GS which is a multiply of scalar integers, make an appropriate
1224 entry in the candidate table. If this is a multiply of two SSA names,
1225 create two CAND_MULT interpretations and attempt to find a basis for
1226 each of them. Otherwise, create a single CAND_MULT and attempt to
1227 find a basis. */
1229 static void
1230 slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
1232 slsr_cand_t c, c2;
1234 /* If this is a multiply of an SSA name with itself, it is highly
1235 unlikely that we will get a strength reduction opportunity, so
1236 don't record it as a candidate. This simplifies the logic for
1237 finding a basis, so if this is removed that must be considered. */
1238 if (rhs1 == rhs2)
1239 return;
1241 if (TREE_CODE (rhs2) == SSA_NAME)
1243 /* Record an interpretation of this statement in the candidate table
1244 assuming RHS1 is the base expression and RHS2 is the stride. */
1245 c = create_mul_ssa_cand (gs, rhs1, rhs2, speed);
1247 /* Add the first interpretation to the statement-candidate mapping. */
1248 add_cand_for_stmt (gs, c);
1250 /* Record another interpretation of this statement assuming RHS1
1251 is the stride and RHS2 is the base expression. */
1252 c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
1253 c->next_interp = c2->cand_num;
1255 else
1257 /* Record an interpretation for the multiply-immediate. */
1258 c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
1260 /* Add the interpretation to the statement-candidate mapping. */
1261 add_cand_for_stmt (gs, c);
1265 /* Create a candidate entry for a statement GS, where GS adds two
1266 SSA names BASE_IN and ADDEND_IN if SUBTRACT_P is false, and
1267 subtracts ADDEND_IN from BASE_IN otherwise. Propagate any known
1268 information about the two SSA names into the new candidate.
1269 Return the new candidate. */
1271 static slsr_cand_t
1272 create_add_ssa_cand (gimple *gs, tree base_in, tree addend_in,
1273 bool subtract_p, bool speed)
1275 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1276 tree stype = NULL_TREE;
1277 widest_int index;
1278 unsigned savings = 0;
1279 slsr_cand_t c;
1280 slsr_cand_t base_cand = base_cand_from_table (base_in);
1281 slsr_cand_t addend_cand = base_cand_from_table (addend_in);
1283 /* The most useful transformation is a multiply-immediate feeding
1284 an add or subtract. Look for that first. */
1285 while (addend_cand && !base && addend_cand->kind != CAND_PHI)
1287 if (addend_cand->kind == CAND_MULT
1288 && addend_cand->index == 0
1289 && TREE_CODE (addend_cand->stride) == INTEGER_CST)
1291 /* Z = (B + 0) * S, S constant
1292 X = Y +/- Z
1293 ===========================
1294 X = Y + ((+/-1 * S) * B) */
1295 base = base_in;
1296 index = wi::to_widest (addend_cand->stride);
1297 if (subtract_p)
1298 index = -index;
1299 stride = addend_cand->base_expr;
1300 ctype = TREE_TYPE (base_in);
1301 stype = addend_cand->cand_type;
1302 if (has_single_use (addend_in))
1303 savings = (addend_cand->dead_savings
1304 + stmt_cost (addend_cand->cand_stmt, speed));
1307 if (addend_cand->next_interp)
1308 addend_cand = lookup_cand (addend_cand->next_interp);
1309 else
1310 addend_cand = NULL;
1313 while (base_cand && !base && base_cand->kind != CAND_PHI)
1315 if (base_cand->kind == CAND_ADD
1316 && (base_cand->index == 0
1317 || operand_equal_p (base_cand->stride,
1318 integer_zero_node, 0)))
1320 /* Y = B + (i' * S), i' * S = 0
1321 X = Y +/- Z
1322 ============================
1323 X = B + (+/-1 * Z) */
1324 base = base_cand->base_expr;
1325 index = subtract_p ? -1 : 1;
1326 stride = addend_in;
1327 ctype = base_cand->cand_type;
1328 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1329 : TREE_TYPE (addend_in));
1330 if (has_single_use (base_in))
1331 savings = (base_cand->dead_savings
1332 + stmt_cost (base_cand->cand_stmt, speed));
1334 else if (subtract_p)
1336 slsr_cand_t subtrahend_cand = base_cand_from_table (addend_in);
1338 while (subtrahend_cand && !base && subtrahend_cand->kind != CAND_PHI)
1340 if (subtrahend_cand->kind == CAND_MULT
1341 && subtrahend_cand->index == 0
1342 && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
1344 /* Z = (B + 0) * S, S constant
1345 X = Y - Z
1346 ===========================
1347 Value: X = Y + ((-1 * S) * B) */
1348 base = base_in;
1349 index = wi::to_widest (subtrahend_cand->stride);
1350 index = -index;
1351 stride = subtrahend_cand->base_expr;
1352 ctype = TREE_TYPE (base_in);
1353 stype = subtrahend_cand->cand_type;
1354 if (has_single_use (addend_in))
1355 savings = (subtrahend_cand->dead_savings
1356 + stmt_cost (subtrahend_cand->cand_stmt, speed));
1359 if (subtrahend_cand->next_interp)
1360 subtrahend_cand = lookup_cand (subtrahend_cand->next_interp);
1361 else
1362 subtrahend_cand = NULL;
1366 if (base_cand->next_interp)
1367 base_cand = lookup_cand (base_cand->next_interp);
1368 else
1369 base_cand = NULL;
1372 if (!base)
1374 /* No interpretations had anything useful to propagate, so
1375 produce X = Y + (1 * Z). */
1376 base = base_in;
1377 index = subtract_p ? -1 : 1;
1378 stride = addend_in;
1379 ctype = TREE_TYPE (base_in);
1380 stype = (TREE_CODE (addend_in) == INTEGER_CST ? sizetype
1381 : TREE_TYPE (addend_in));
1384 c = alloc_cand_and_find_basis (CAND_ADD, gs, base, index, stride,
1385 ctype, stype, savings);
1386 return c;
1389 /* Create a candidate entry for a statement GS, where GS adds SSA
1390 name BASE_IN to constant INDEX_IN. Propagate any known information
1391 about BASE_IN into the new candidate. Return the new candidate. */
1393 static slsr_cand_t
1394 create_add_imm_cand (gimple *gs, tree base_in, const widest_int &index_in,
1395 bool speed)
1397 enum cand_kind kind = CAND_ADD;
1398 tree base = NULL_TREE, stride = NULL_TREE, ctype = NULL_TREE;
1399 tree stype = NULL_TREE;
1400 widest_int index, multiple;
1401 unsigned savings = 0;
1402 slsr_cand_t c;
1403 slsr_cand_t base_cand = base_cand_from_table (base_in);
1405 while (base_cand && !base && base_cand->kind != CAND_PHI)
1407 signop sign = TYPE_SIGN (TREE_TYPE (base_cand->stride));
1409 if (TREE_CODE (base_cand->stride) == INTEGER_CST
1410 && wi::multiple_of_p (index_in, wi::to_widest (base_cand->stride),
1411 sign, &multiple))
1413 /* Y = (B + i') * S, S constant, c = kS for some integer k
1414 X = Y + c
1415 ============================
1416 X = (B + (i'+ k)) * S
1418 Y = B + (i' * S), S constant, c = kS for some integer k
1419 X = Y + c
1420 ============================
1421 X = (B + (i'+ k)) * S */
1422 kind = base_cand->kind;
1423 base = base_cand->base_expr;
1424 index = base_cand->index + multiple;
1425 stride = base_cand->stride;
1426 ctype = base_cand->cand_type;
1427 stype = base_cand->stride_type;
1428 if (has_single_use (base_in))
1429 savings = (base_cand->dead_savings
1430 + stmt_cost (base_cand->cand_stmt, speed));
1433 if (base_cand->next_interp)
1434 base_cand = lookup_cand (base_cand->next_interp);
1435 else
1436 base_cand = NULL;
1439 if (!base)
1441 /* No interpretations had anything useful to propagate, so
1442 produce X = Y + (c * 1). */
1443 kind = CAND_ADD;
1444 base = base_in;
1445 index = index_in;
1446 stride = integer_one_node;
1447 ctype = TREE_TYPE (base_in);
1448 stype = sizetype;
1451 c = alloc_cand_and_find_basis (kind, gs, base, index, stride,
1452 ctype, stype, savings);
1453 return c;
1456 /* Given GS which is an add or subtract of scalar integers or pointers,
1457 make at least one appropriate entry in the candidate table. */
1459 static void
1460 slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
1462 bool subtract_p = gimple_assign_rhs_code (gs) == MINUS_EXPR;
1463 slsr_cand_t c = NULL, c2;
1465 if (TREE_CODE (rhs2) == SSA_NAME)
1467 /* First record an interpretation assuming RHS1 is the base expression
1468 and RHS2 is the stride. But it doesn't make sense for the
1469 stride to be a pointer, so don't record a candidate in that case. */
1470 if (!POINTER_TYPE_P (TREE_TYPE (rhs2)))
1472 c = create_add_ssa_cand (gs, rhs1, rhs2, subtract_p, speed);
1474 /* Add the first interpretation to the statement-candidate
1475 mapping. */
1476 add_cand_for_stmt (gs, c);
1479 /* If the two RHS operands are identical, or this is a subtract,
1480 we're done. */
1481 if (operand_equal_p (rhs1, rhs2, 0) || subtract_p)
1482 return;
1484 /* Otherwise, record another interpretation assuming RHS2 is the
1485 base expression and RHS1 is the stride, again provided that the
1486 stride is not a pointer. */
1487 if (!POINTER_TYPE_P (TREE_TYPE (rhs1)))
1489 c2 = create_add_ssa_cand (gs, rhs2, rhs1, false, speed);
1490 if (c)
1491 c->next_interp = c2->cand_num;
1492 else
1493 add_cand_for_stmt (gs, c2);
1496 else
1498 /* Record an interpretation for the add-immediate. */
1499 widest_int index = wi::to_widest (rhs2);
1500 if (subtract_p)
1501 index = -index;
1503 c = create_add_imm_cand (gs, rhs1, index, speed);
1505 /* Add the interpretation to the statement-candidate mapping. */
1506 add_cand_for_stmt (gs, c);
1510 /* Given GS which is a negate of a scalar integer, make an appropriate
1511 entry in the candidate table. A negate is equivalent to a multiply
1512 by -1. */
1514 static void
1515 slsr_process_neg (gimple *gs, tree rhs1, bool speed)
1517 /* Record a CAND_MULT interpretation for the multiply by -1. */
1518 slsr_cand_t c = create_mul_imm_cand (gs, rhs1, integer_minus_one_node, speed);
1520 /* Add the interpretation to the statement-candidate mapping. */
1521 add_cand_for_stmt (gs, c);
1524 /* Help function for legal_cast_p, operating on two trees. Checks
1525 whether it's allowable to cast from RHS to LHS. See legal_cast_p
1526 for more details. */
1528 static bool
1529 legal_cast_p_1 (tree lhs_type, tree rhs_type)
1531 unsigned lhs_size, rhs_size;
1532 bool lhs_wraps, rhs_wraps;
1534 lhs_size = TYPE_PRECISION (lhs_type);
1535 rhs_size = TYPE_PRECISION (rhs_type);
1536 lhs_wraps = ANY_INTEGRAL_TYPE_P (lhs_type) && TYPE_OVERFLOW_WRAPS (lhs_type);
1537 rhs_wraps = ANY_INTEGRAL_TYPE_P (rhs_type) && TYPE_OVERFLOW_WRAPS (rhs_type);
1539 if (lhs_size < rhs_size
1540 || (rhs_wraps && !lhs_wraps)
1541 || (rhs_wraps && lhs_wraps && rhs_size != lhs_size))
1542 return false;
1544 return true;
1547 /* Return TRUE if GS is a statement that defines an SSA name from
1548 a conversion and is legal for us to combine with an add and multiply
1549 in the candidate table. For example, suppose we have:
1551 A = B + i;
1552 C = (type) A;
1553 D = C * S;
1555 Without the type-cast, we would create a CAND_MULT for D with base B,
1556 index i, and stride S. We want to record this candidate only if it
1557 is equivalent to apply the type cast following the multiply:
1559 A = B + i;
1560 E = A * S;
1561 D = (type) E;
1563 We will record the type with the candidate for D. This allows us
1564 to use a similar previous candidate as a basis. If we have earlier seen
1566 A' = B + i';
1567 C' = (type) A';
1568 D' = C' * S;
1570 we can replace D with
1572 D = D' + (i - i') * S;
1574 But if moving the type-cast would change semantics, we mustn't do this.
1576 This is legitimate for casts from a non-wrapping integral type to
1577 any integral type of the same or larger size. It is not legitimate
1578 to convert a wrapping type to a non-wrapping type, or to a wrapping
1579 type of a different size. I.e., with a wrapping type, we must
1580 assume that the addition B + i could wrap, in which case performing
1581 the multiply before or after one of the "illegal" type casts will
1582 have different semantics. */
1584 static bool
1585 legal_cast_p (gimple *gs, tree rhs)
1587 if (!is_gimple_assign (gs)
1588 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs)))
1589 return false;
1591 return legal_cast_p_1 (TREE_TYPE (gimple_assign_lhs (gs)), TREE_TYPE (rhs));
1594 /* Given GS which is a cast to a scalar integer type, determine whether
1595 the cast is legal for strength reduction. If so, make at least one
1596 appropriate entry in the candidate table. */
1598 static void
1599 slsr_process_cast (gimple *gs, tree rhs1, bool speed)
1601 tree lhs, ctype;
1602 slsr_cand_t base_cand, c = NULL, c2;
1603 unsigned savings = 0;
1605 if (!legal_cast_p (gs, rhs1))
1606 return;
1608 lhs = gimple_assign_lhs (gs);
1609 base_cand = base_cand_from_table (rhs1);
1610 ctype = TREE_TYPE (lhs);
1612 if (base_cand && base_cand->kind != CAND_PHI)
1614 while (base_cand)
1616 /* Propagate all data from the base candidate except the type,
1617 which comes from the cast, and the base candidate's cast,
1618 which is no longer applicable. */
1619 if (has_single_use (rhs1))
1620 savings = (base_cand->dead_savings
1621 + stmt_cost (base_cand->cand_stmt, speed));
1623 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1624 base_cand->base_expr,
1625 base_cand->index, base_cand->stride,
1626 ctype, base_cand->stride_type,
1627 savings);
1628 if (base_cand->next_interp)
1629 base_cand = lookup_cand (base_cand->next_interp);
1630 else
1631 base_cand = NULL;
1634 else
1636 /* If nothing is known about the RHS, create fresh CAND_ADD and
1637 CAND_MULT interpretations:
1639 X = Y + (0 * 1)
1640 X = (Y + 0) * 1
1642 The first of these is somewhat arbitrary, but the choice of
1643 1 for the stride simplifies the logic for propagating casts
1644 into their uses. */
1645 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1646 integer_one_node, ctype, sizetype, 0);
1647 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1648 integer_one_node, ctype, sizetype, 0);
1649 c->next_interp = c2->cand_num;
1652 /* Add the first (or only) interpretation to the statement-candidate
1653 mapping. */
1654 add_cand_for_stmt (gs, c);
1657 /* Given GS which is a copy of a scalar integer type, make at least one
1658 appropriate entry in the candidate table.
1660 This interface is included for completeness, but is unnecessary
1661 if this pass immediately follows a pass that performs copy
1662 propagation, such as DOM. */
1664 static void
1665 slsr_process_copy (gimple *gs, tree rhs1, bool speed)
1667 slsr_cand_t base_cand, c = NULL, c2;
1668 unsigned savings = 0;
1670 base_cand = base_cand_from_table (rhs1);
1672 if (base_cand && base_cand->kind != CAND_PHI)
1674 while (base_cand)
1676 /* Propagate all data from the base candidate. */
1677 if (has_single_use (rhs1))
1678 savings = (base_cand->dead_savings
1679 + stmt_cost (base_cand->cand_stmt, speed));
1681 c = alloc_cand_and_find_basis (base_cand->kind, gs,
1682 base_cand->base_expr,
1683 base_cand->index, base_cand->stride,
1684 base_cand->cand_type,
1685 base_cand->stride_type, savings);
1686 if (base_cand->next_interp)
1687 base_cand = lookup_cand (base_cand->next_interp);
1688 else
1689 base_cand = NULL;
1692 else
1694 /* If nothing is known about the RHS, create fresh CAND_ADD and
1695 CAND_MULT interpretations:
1697 X = Y + (0 * 1)
1698 X = (Y + 0) * 1
1700 The first of these is somewhat arbitrary, but the choice of
1701 1 for the stride simplifies the logic for propagating casts
1702 into their uses. */
1703 c = alloc_cand_and_find_basis (CAND_ADD, gs, rhs1, 0,
1704 integer_one_node, TREE_TYPE (rhs1),
1705 sizetype, 0);
1706 c2 = alloc_cand_and_find_basis (CAND_MULT, gs, rhs1, 0,
1707 integer_one_node, TREE_TYPE (rhs1),
1708 sizetype, 0);
1709 c->next_interp = c2->cand_num;
1712 /* Add the first (or only) interpretation to the statement-candidate
1713 mapping. */
1714 add_cand_for_stmt (gs, c);
1717 class find_candidates_dom_walker : public dom_walker
1719 public:
1720 find_candidates_dom_walker (cdi_direction direction)
1721 : dom_walker (direction) {}
1722 virtual edge before_dom_children (basic_block);
1725 /* Find strength-reduction candidates in block BB. */
1727 edge
1728 find_candidates_dom_walker::before_dom_children (basic_block bb)
1730 bool speed = optimize_bb_for_speed_p (bb);
1732 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1733 gsi_next (&gsi))
1734 slsr_process_phi (gsi.phi (), speed);
1736 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1737 gsi_next (&gsi))
1739 gimple *gs = gsi_stmt (gsi);
1741 if (gimple_vuse (gs) && gimple_assign_single_p (gs))
1742 slsr_process_ref (gs);
1744 else if (is_gimple_assign (gs)
1745 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))
1746 || POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (gs)))))
1748 tree rhs1 = NULL_TREE, rhs2 = NULL_TREE;
1750 switch (gimple_assign_rhs_code (gs))
1752 case MULT_EXPR:
1753 case PLUS_EXPR:
1754 rhs1 = gimple_assign_rhs1 (gs);
1755 rhs2 = gimple_assign_rhs2 (gs);
1756 /* Should never happen, but currently some buggy situations
1757 in earlier phases put constants in rhs1. */
1758 if (TREE_CODE (rhs1) != SSA_NAME)
1759 continue;
1760 break;
1762 /* Possible future opportunity: rhs1 of a ptr+ can be
1763 an ADDR_EXPR. */
1764 case POINTER_PLUS_EXPR:
1765 case MINUS_EXPR:
1766 rhs2 = gimple_assign_rhs2 (gs);
1767 gcc_fallthrough ();
1769 CASE_CONVERT:
1770 case SSA_NAME:
1771 case NEGATE_EXPR:
1772 rhs1 = gimple_assign_rhs1 (gs);
1773 if (TREE_CODE (rhs1) != SSA_NAME)
1774 continue;
1775 break;
1777 default:
1781 switch (gimple_assign_rhs_code (gs))
1783 case MULT_EXPR:
1784 slsr_process_mul (gs, rhs1, rhs2, speed);
1785 break;
1787 case PLUS_EXPR:
1788 case POINTER_PLUS_EXPR:
1789 case MINUS_EXPR:
1790 slsr_process_add (gs, rhs1, rhs2, speed);
1791 break;
1793 case NEGATE_EXPR:
1794 slsr_process_neg (gs, rhs1, speed);
1795 break;
1797 CASE_CONVERT:
1798 slsr_process_cast (gs, rhs1, speed);
1799 break;
1801 case SSA_NAME:
1802 slsr_process_copy (gs, rhs1, speed);
1803 break;
1805 default:
1810 return NULL;
1813 /* Dump a candidate for debug. */
1815 static void
1816 dump_candidate (slsr_cand_t c)
1818 fprintf (dump_file, "%3d [%d] ", c->cand_num,
1819 gimple_bb (c->cand_stmt)->index);
1820 print_gimple_stmt (dump_file, c->cand_stmt, 0);
1821 switch (c->kind)
1823 case CAND_MULT:
1824 fputs (" MULT : (", dump_file);
1825 print_generic_expr (dump_file, c->base_expr);
1826 fputs (" + ", dump_file);
1827 print_decs (c->index, dump_file);
1828 fputs (") * ", dump_file);
1829 if (TREE_CODE (c->stride) != INTEGER_CST
1830 && c->stride_type != TREE_TYPE (c->stride))
1832 fputs ("(", dump_file);
1833 print_generic_expr (dump_file, c->stride_type);
1834 fputs (")", dump_file);
1836 print_generic_expr (dump_file, c->stride);
1837 fputs (" : ", dump_file);
1838 break;
1839 case CAND_ADD:
1840 fputs (" ADD : ", dump_file);
1841 print_generic_expr (dump_file, c->base_expr);
1842 fputs (" + (", dump_file);
1843 print_decs (c->index, dump_file);
1844 fputs (" * ", dump_file);
1845 if (TREE_CODE (c->stride) != INTEGER_CST
1846 && c->stride_type != TREE_TYPE (c->stride))
1848 fputs ("(", dump_file);
1849 print_generic_expr (dump_file, c->stride_type);
1850 fputs (")", dump_file);
1852 print_generic_expr (dump_file, c->stride);
1853 fputs (") : ", dump_file);
1854 break;
1855 case CAND_REF:
1856 fputs (" REF : ", dump_file);
1857 print_generic_expr (dump_file, c->base_expr);
1858 fputs (" + (", dump_file);
1859 print_generic_expr (dump_file, c->stride);
1860 fputs (") + ", dump_file);
1861 print_decs (c->index, dump_file);
1862 fputs (" : ", dump_file);
1863 break;
1864 case CAND_PHI:
1865 fputs (" PHI : ", dump_file);
1866 print_generic_expr (dump_file, c->base_expr);
1867 fputs (" + (unknown * ", dump_file);
1868 print_generic_expr (dump_file, c->stride);
1869 fputs (") : ", dump_file);
1870 break;
1871 default:
1872 gcc_unreachable ();
1874 print_generic_expr (dump_file, c->cand_type);
1875 fprintf (dump_file, "\n basis: %d dependent: %d sibling: %d\n",
1876 c->basis, c->dependent, c->sibling);
1877 fprintf (dump_file, " next-interp: %d dead-savings: %d\n",
1878 c->next_interp, c->dead_savings);
1879 if (c->def_phi)
1880 fprintf (dump_file, " phi: %d\n", c->def_phi);
1881 fputs ("\n", dump_file);
1884 /* Dump the candidate vector for debug. */
1886 static void
1887 dump_cand_vec (void)
1889 unsigned i;
1890 slsr_cand_t c;
1892 fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
1894 FOR_EACH_VEC_ELT (cand_vec, i, c)
1895 dump_candidate (c);
1898 /* Callback used to dump the candidate chains hash table. */
1901 ssa_base_cand_dump_callback (cand_chain **slot, void *ignored ATTRIBUTE_UNUSED)
1903 const_cand_chain_t chain = *slot;
1904 cand_chain_t p;
1906 print_generic_expr (dump_file, chain->base_expr);
1907 fprintf (dump_file, " -> %d", chain->cand->cand_num);
1909 for (p = chain->next; p; p = p->next)
1910 fprintf (dump_file, " -> %d", p->cand->cand_num);
1912 fputs ("\n", dump_file);
1913 return 1;
1916 /* Dump the candidate chains. */
1918 static void
1919 dump_cand_chains (void)
1921 fprintf (dump_file, "\nStrength reduction candidate chains:\n\n");
1922 base_cand_map->traverse_noresize <void *, ssa_base_cand_dump_callback>
1923 (NULL);
1924 fputs ("\n", dump_file);
1927 /* Dump the increment vector for debug. */
1929 static void
1930 dump_incr_vec (void)
1932 if (dump_file && (dump_flags & TDF_DETAILS))
1934 unsigned i;
1936 fprintf (dump_file, "\nIncrement vector:\n\n");
1938 for (i = 0; i < incr_vec_len; i++)
1940 fprintf (dump_file, "%3d increment: ", i);
1941 print_decs (incr_vec[i].incr, dump_file);
1942 fprintf (dump_file, "\n count: %d", incr_vec[i].count);
1943 fprintf (dump_file, "\n cost: %d", incr_vec[i].cost);
1944 fputs ("\n initializer: ", dump_file);
1945 print_generic_expr (dump_file, incr_vec[i].initializer);
1946 fputs ("\n\n", dump_file);
1951 /* Replace *EXPR in candidate C with an equivalent strength-reduced
1952 data reference. */
1954 static void
1955 replace_ref (tree *expr, slsr_cand_t c)
1957 tree add_expr, mem_ref, acc_type = TREE_TYPE (*expr);
1958 unsigned HOST_WIDE_INT misalign;
1959 unsigned align;
1961 /* Ensure the memory reference carries the minimum alignment
1962 requirement for the data type. See PR58041. */
1963 get_object_alignment_1 (*expr, &align, &misalign);
1964 if (misalign != 0)
1965 align = least_bit_hwi (misalign);
1966 if (align < TYPE_ALIGN (acc_type))
1967 acc_type = build_aligned_type (acc_type, align);
1969 add_expr = fold_build2 (POINTER_PLUS_EXPR, c->cand_type,
1970 c->base_expr, c->stride);
1971 mem_ref = fold_build2 (MEM_REF, acc_type, add_expr,
1972 wide_int_to_tree (c->cand_type, c->index));
1974 /* Gimplify the base addressing expression for the new MEM_REF tree. */
1975 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
1976 TREE_OPERAND (mem_ref, 0)
1977 = force_gimple_operand_gsi (&gsi, TREE_OPERAND (mem_ref, 0),
1978 /*simple_p=*/true, NULL,
1979 /*before=*/true, GSI_SAME_STMT);
1980 copy_ref_info (mem_ref, *expr);
1981 *expr = mem_ref;
1982 update_stmt (c->cand_stmt);
1985 /* Replace CAND_REF candidate C, each sibling of candidate C, and each
1986 dependent of candidate C with an equivalent strength-reduced data
1987 reference. */
1989 static void
1990 replace_refs (slsr_cand_t c)
1992 if (dump_file && (dump_flags & TDF_DETAILS))
1994 fputs ("Replacing reference: ", dump_file);
1995 print_gimple_stmt (dump_file, c->cand_stmt, 0);
1998 if (gimple_vdef (c->cand_stmt))
2000 tree *lhs = gimple_assign_lhs_ptr (c->cand_stmt);
2001 replace_ref (lhs, c);
2003 else
2005 tree *rhs = gimple_assign_rhs1_ptr (c->cand_stmt);
2006 replace_ref (rhs, c);
2009 if (dump_file && (dump_flags & TDF_DETAILS))
2011 fputs ("With: ", dump_file);
2012 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2013 fputs ("\n", dump_file);
2016 if (c->sibling)
2017 replace_refs (lookup_cand (c->sibling));
2019 if (c->dependent)
2020 replace_refs (lookup_cand (c->dependent));
2023 /* Return TRUE if candidate C is dependent upon a PHI. */
2025 static bool
2026 phi_dependent_cand_p (slsr_cand_t c)
2028 /* A candidate is not necessarily dependent upon a PHI just because
2029 it has a phi definition for its base name. It may have a basis
2030 that relies upon the same phi definition, in which case the PHI
2031 is irrelevant to this candidate. */
2032 return (c->def_phi
2033 && c->basis
2034 && lookup_cand (c->basis)->def_phi != c->def_phi);
2037 /* Calculate the increment required for candidate C relative to
2038 its basis. */
2040 static widest_int
2041 cand_increment (slsr_cand_t c)
2043 slsr_cand_t basis;
2045 /* If the candidate doesn't have a basis, just return its own
2046 index. This is useful in record_increments to help us find
2047 an existing initializer. Also, if the candidate's basis is
2048 hidden by a phi, then its own index will be the increment
2049 from the newly introduced phi basis. */
2050 if (!c->basis || phi_dependent_cand_p (c))
2051 return c->index;
2053 basis = lookup_cand (c->basis);
2054 gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
2055 return c->index - basis->index;
2058 /* Calculate the increment required for candidate C relative to
2059 its basis. If we aren't going to generate pointer arithmetic
2060 for this candidate, return the absolute value of that increment
2061 instead. */
2063 static inline widest_int
2064 cand_abs_increment (slsr_cand_t c)
2066 widest_int increment = cand_increment (c);
2068 if (!address_arithmetic_p && wi::neg_p (increment))
2069 increment = -increment;
2071 return increment;
2074 /* Return TRUE iff candidate C has already been replaced under
2075 another interpretation. */
2077 static inline bool
2078 cand_already_replaced (slsr_cand_t c)
2080 return (gimple_bb (c->cand_stmt) == 0);
2083 /* Common logic used by replace_unconditional_candidate and
2084 replace_conditional_candidate. */
2086 static void
2087 replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
2089 tree target_type = TREE_TYPE (gimple_assign_lhs (c->cand_stmt));
2090 enum tree_code cand_code = gimple_assign_rhs_code (c->cand_stmt);
2092 /* It is not useful to replace casts, copies, negates, or adds of
2093 an SSA name and a constant. */
2094 if (cand_code == SSA_NAME
2095 || CONVERT_EXPR_CODE_P (cand_code)
2096 || cand_code == PLUS_EXPR
2097 || cand_code == POINTER_PLUS_EXPR
2098 || cand_code == MINUS_EXPR
2099 || cand_code == NEGATE_EXPR)
2100 return;
2102 enum tree_code code = PLUS_EXPR;
2103 tree bump_tree;
2104 gimple *stmt_to_print = NULL;
2106 if (wi::neg_p (bump))
2108 code = MINUS_EXPR;
2109 bump = -bump;
2112 /* It is possible that the resulting bump doesn't fit in target_type.
2113 Abandon the replacement in this case. This does not affect
2114 siblings or dependents of C. */
2115 if (bump != wi::ext (bump, TYPE_PRECISION (target_type),
2116 TYPE_SIGN (target_type)))
2117 return;
2119 bump_tree = wide_int_to_tree (target_type, bump);
2121 /* If the basis name and the candidate's LHS have incompatible types,
2122 introduce a cast. */
2123 if (!useless_type_conversion_p (target_type, TREE_TYPE (basis_name)))
2124 basis_name = introduce_cast_before_cand (c, target_type, basis_name);
2126 if (dump_file && (dump_flags & TDF_DETAILS))
2128 fputs ("Replacing: ", dump_file);
2129 print_gimple_stmt (dump_file, c->cand_stmt, 0);
2132 if (bump == 0)
2134 tree lhs = gimple_assign_lhs (c->cand_stmt);
2135 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
2136 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2137 slsr_cand_t cc = c;
2138 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
2139 gsi_replace (&gsi, copy_stmt, false);
2140 c->cand_stmt = copy_stmt;
2141 while (cc->next_interp)
2143 cc = lookup_cand (cc->next_interp);
2144 cc->cand_stmt = copy_stmt;
2146 if (dump_file && (dump_flags & TDF_DETAILS))
2147 stmt_to_print = copy_stmt;
2149 else
2151 tree rhs1, rhs2;
2152 if (cand_code != NEGATE_EXPR) {
2153 rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2154 rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2156 if (cand_code != NEGATE_EXPR
2157 && ((operand_equal_p (rhs1, basis_name, 0)
2158 && operand_equal_p (rhs2, bump_tree, 0))
2159 || (operand_equal_p (rhs1, bump_tree, 0)
2160 && operand_equal_p (rhs2, basis_name, 0))))
2162 if (dump_file && (dump_flags & TDF_DETAILS))
2164 fputs ("(duplicate, not actually replacing)", dump_file);
2165 stmt_to_print = c->cand_stmt;
2168 else
2170 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
2171 slsr_cand_t cc = c;
2172 gimple_assign_set_rhs_with_ops (&gsi, code, basis_name, bump_tree);
2173 update_stmt (gsi_stmt (gsi));
2174 c->cand_stmt = gsi_stmt (gsi);
2175 while (cc->next_interp)
2177 cc = lookup_cand (cc->next_interp);
2178 cc->cand_stmt = gsi_stmt (gsi);
2180 if (dump_file && (dump_flags & TDF_DETAILS))
2181 stmt_to_print = gsi_stmt (gsi);
2185 if (dump_file && (dump_flags & TDF_DETAILS))
2187 fputs ("With: ", dump_file);
2188 print_gimple_stmt (dump_file, stmt_to_print, 0);
2189 fputs ("\n", dump_file);
2193 /* Replace candidate C with an add or subtract. Note that we only
2194 operate on CAND_MULTs with known strides, so we will never generate
2195 a POINTER_PLUS_EXPR. Each candidate X = (B + i) * S is replaced by
2196 X = Y + ((i - i') * S), as described in the module commentary. The
2197 folded value ((i - i') * S) is referred to here as the "bump." */
2199 static void
2200 replace_unconditional_candidate (slsr_cand_t c)
2202 slsr_cand_t basis;
2204 if (cand_already_replaced (c))
2205 return;
2207 basis = lookup_cand (c->basis);
2208 widest_int bump = cand_increment (c) * wi::to_widest (c->stride);
2210 replace_mult_candidate (c, gimple_assign_lhs (basis->cand_stmt), bump);
2213 /* Return the index in the increment vector of the given INCREMENT,
2214 or -1 if not found. The latter can occur if more than
2215 MAX_INCR_VEC_LEN increments have been found. */
2217 static inline int
2218 incr_vec_index (const widest_int &increment)
2220 unsigned i;
2222 for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
2225 if (i < incr_vec_len)
2226 return i;
2227 else
2228 return -1;
2231 /* Create a new statement along edge E to add BASIS_NAME to the product
2232 of INCREMENT and the stride of candidate C. Create and return a new
2233 SSA name from *VAR to be used as the LHS of the new statement.
2234 KNOWN_STRIDE is true iff C's stride is a constant. */
2236 static tree
2237 create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
2238 widest_int increment, edge e, location_t loc,
2239 bool known_stride)
2241 tree lhs, basis_type;
2242 gassign *new_stmt, *cast_stmt = NULL;
2244 /* If the add candidate along this incoming edge has the same
2245 index as C's hidden basis, the hidden basis represents this
2246 edge correctly. */
2247 if (increment == 0)
2248 return basis_name;
2250 basis_type = TREE_TYPE (basis_name);
2251 lhs = make_temp_ssa_name (basis_type, NULL, "slsr");
2253 /* Occasionally people convert integers to pointers without a
2254 cast, leading us into trouble if we aren't careful. */
2255 enum tree_code plus_code
2256 = POINTER_TYPE_P (basis_type) ? POINTER_PLUS_EXPR : PLUS_EXPR;
2258 if (known_stride)
2260 tree bump_tree;
2261 enum tree_code code = plus_code;
2262 widest_int bump = increment * wi::to_widest (c->stride);
2263 if (wi::neg_p (bump) && !POINTER_TYPE_P (basis_type))
2265 code = MINUS_EXPR;
2266 bump = -bump;
2269 tree stride_type = POINTER_TYPE_P (basis_type) ? sizetype : basis_type;
2270 bump_tree = wide_int_to_tree (stride_type, bump);
2271 new_stmt = gimple_build_assign (lhs, code, basis_name, bump_tree);
2273 else
2275 int i;
2276 bool negate_incr = !POINTER_TYPE_P (basis_type) && wi::neg_p (increment);
2277 i = incr_vec_index (negate_incr ? -increment : increment);
2278 gcc_assert (i >= 0);
2280 if (incr_vec[i].initializer)
2282 enum tree_code code = negate_incr ? MINUS_EXPR : plus_code;
2283 new_stmt = gimple_build_assign (lhs, code, basis_name,
2284 incr_vec[i].initializer);
2286 else {
2287 tree stride;
2289 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
2291 tree cast_stride = make_temp_ssa_name (c->stride_type, NULL,
2292 "slsr");
2293 cast_stmt = gimple_build_assign (cast_stride, NOP_EXPR,
2294 c->stride);
2295 stride = cast_stride;
2297 else
2298 stride = c->stride;
2300 if (increment == 1)
2301 new_stmt = gimple_build_assign (lhs, plus_code, basis_name, stride);
2302 else if (increment == -1)
2303 new_stmt = gimple_build_assign (lhs, MINUS_EXPR, basis_name, stride);
2304 else
2305 gcc_unreachable ();
2309 if (cast_stmt)
2311 gimple_set_location (cast_stmt, loc);
2312 gsi_insert_on_edge (e, cast_stmt);
2315 gimple_set_location (new_stmt, loc);
2316 gsi_insert_on_edge (e, new_stmt);
2318 if (dump_file && (dump_flags & TDF_DETAILS))
2320 if (cast_stmt)
2322 fprintf (dump_file, "Inserting cast on edge %d->%d: ",
2323 e->src->index, e->dest->index);
2324 print_gimple_stmt (dump_file, cast_stmt, 0);
2326 fprintf (dump_file, "Inserting on edge %d->%d: ", e->src->index,
2327 e->dest->index);
2328 print_gimple_stmt (dump_file, new_stmt, 0);
2331 return lhs;
2334 /* Clear the visited field for a tree of PHI candidates. */
2336 static void
2337 clear_visited (gphi *phi)
2339 unsigned i;
2340 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2342 if (phi_cand->visited)
2344 phi_cand->visited = 0;
2346 for (i = 0; i < gimple_phi_num_args (phi); i++)
2348 tree arg = gimple_phi_arg_def (phi, i);
2349 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2350 if (gimple_code (arg_def) == GIMPLE_PHI)
2351 clear_visited (as_a <gphi *> (arg_def));
2356 /* Recursive helper function for create_phi_basis. */
2358 static tree
2359 create_phi_basis_1 (slsr_cand_t c, gimple *from_phi, tree basis_name,
2360 location_t loc, bool known_stride)
2362 int i;
2363 tree name, phi_arg;
2364 gphi *phi;
2365 slsr_cand_t basis = lookup_cand (c->basis);
2366 int nargs = gimple_phi_num_args (from_phi);
2367 basic_block phi_bb = gimple_bb (from_phi);
2368 slsr_cand_t phi_cand = *stmt_cand_map->get (from_phi);
2369 auto_vec<tree> phi_args (nargs);
2371 if (phi_cand->visited)
2372 return phi_cand->cached_basis;
2373 phi_cand->visited = 1;
2375 /* Process each argument of the existing phi that represents
2376 conditionally-executed add candidates. */
2377 for (i = 0; i < nargs; i++)
2379 edge e = (*phi_bb->preds)[i];
2380 tree arg = gimple_phi_arg_def (from_phi, i);
2381 tree feeding_def;
2383 /* If the phi argument is the base name of the CAND_PHI, then
2384 this incoming arc should use the hidden basis. */
2385 if (operand_equal_p (arg, phi_cand->base_expr, 0))
2386 if (basis->index == 0)
2387 feeding_def = gimple_assign_lhs (basis->cand_stmt);
2388 else
2390 widest_int incr = -basis->index;
2391 feeding_def = create_add_on_incoming_edge (c, basis_name, incr,
2392 e, loc, known_stride);
2394 else
2396 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2398 /* If there is another phi along this incoming edge, we must
2399 process it in the same fashion to ensure that all basis
2400 adjustments are made along its incoming edges. */
2401 if (gimple_code (arg_def) == GIMPLE_PHI)
2402 feeding_def = create_phi_basis_1 (c, arg_def, basis_name,
2403 loc, known_stride);
2404 else
2406 slsr_cand_t arg_cand = base_cand_from_table (arg);
2407 widest_int diff = arg_cand->index - basis->index;
2408 feeding_def = create_add_on_incoming_edge (c, basis_name, diff,
2409 e, loc, known_stride);
2413 /* Because of recursion, we need to save the arguments in a vector
2414 so we can create the PHI statement all at once. Otherwise the
2415 storage for the half-created PHI can be reclaimed. */
2416 phi_args.safe_push (feeding_def);
2419 /* Create the new phi basis. */
2420 name = make_temp_ssa_name (TREE_TYPE (basis_name), NULL, "slsr");
2421 phi = create_phi_node (name, phi_bb);
2422 SSA_NAME_DEF_STMT (name) = phi;
2424 FOR_EACH_VEC_ELT (phi_args, i, phi_arg)
2426 edge e = (*phi_bb->preds)[i];
2427 add_phi_arg (phi, phi_arg, e, loc);
2430 update_stmt (phi);
2432 if (dump_file && (dump_flags & TDF_DETAILS))
2434 fputs ("Introducing new phi basis: ", dump_file);
2435 print_gimple_stmt (dump_file, phi, 0);
2438 phi_cand->cached_basis = name;
2439 return name;
2442 /* Given a candidate C with BASIS_NAME being the LHS of C's basis which
2443 is hidden by the phi node FROM_PHI, create a new phi node in the same
2444 block as FROM_PHI. The new phi is suitable for use as a basis by C,
2445 with its phi arguments representing conditional adjustments to the
2446 hidden basis along conditional incoming paths. Those adjustments are
2447 made by creating add statements (and sometimes recursively creating
2448 phis) along those incoming paths. LOC is the location to attach to
2449 the introduced statements. KNOWN_STRIDE is true iff C's stride is a
2450 constant. */
2452 static tree
2453 create_phi_basis (slsr_cand_t c, gimple *from_phi, tree basis_name,
2454 location_t loc, bool known_stride)
2456 tree retval = create_phi_basis_1 (c, from_phi, basis_name, loc,
2457 known_stride);
2458 gcc_assert (retval);
2459 clear_visited (as_a <gphi *> (from_phi));
2460 return retval;
2463 /* Given a candidate C whose basis is hidden by at least one intervening
2464 phi, introduce a matching number of new phis to represent its basis
2465 adjusted by conditional increments along possible incoming paths. Then
2466 replace C as though it were an unconditional candidate, using the new
2467 basis. */
2469 static void
2470 replace_conditional_candidate (slsr_cand_t c)
2472 tree basis_name, name;
2473 slsr_cand_t basis;
2474 location_t loc;
2476 /* Look up the LHS SSA name from C's basis. This will be the
2477 RHS1 of the adds we will introduce to create new phi arguments. */
2478 basis = lookup_cand (c->basis);
2479 basis_name = gimple_assign_lhs (basis->cand_stmt);
2481 /* Create a new phi statement which will represent C's true basis
2482 after the transformation is complete. */
2483 loc = gimple_location (c->cand_stmt);
2484 name = create_phi_basis (c, lookup_cand (c->def_phi)->cand_stmt,
2485 basis_name, loc, KNOWN_STRIDE);
2487 /* Replace C with an add of the new basis phi and a constant. */
2488 widest_int bump = c->index * wi::to_widest (c->stride);
2490 replace_mult_candidate (c, name, bump);
2493 /* Recursive helper function for phi_add_costs. SPREAD is a measure of
2494 how many PHI nodes we have visited at this point in the tree walk. */
2496 static int
2497 phi_add_costs_1 (gimple *phi, slsr_cand_t c, int one_add_cost, int *spread)
2499 unsigned i;
2500 int cost = 0;
2501 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2503 if (phi_cand->visited)
2504 return 0;
2506 phi_cand->visited = 1;
2507 (*spread)++;
2509 /* If we work our way back to a phi that isn't dominated by the hidden
2510 basis, this isn't a candidate for replacement. Indicate this by
2511 returning an unreasonably high cost. It's not easy to detect
2512 these situations when determining the basis, so we defer the
2513 decision until now. */
2514 basic_block phi_bb = gimple_bb (phi);
2515 slsr_cand_t basis = lookup_cand (c->basis);
2516 basic_block basis_bb = gimple_bb (basis->cand_stmt);
2518 if (phi_bb == basis_bb || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
2519 return COST_INFINITE;
2521 for (i = 0; i < gimple_phi_num_args (phi); i++)
2523 tree arg = gimple_phi_arg_def (phi, i);
2525 if (arg != phi_cand->base_expr)
2527 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2529 if (gimple_code (arg_def) == GIMPLE_PHI)
2531 cost += phi_add_costs_1 (arg_def, c, one_add_cost, spread);
2533 if (cost >= COST_INFINITE || *spread > MAX_SPREAD)
2534 return COST_INFINITE;
2536 else
2538 slsr_cand_t arg_cand = base_cand_from_table (arg);
2540 if (arg_cand->index != c->index)
2541 cost += one_add_cost;
2546 return cost;
2549 /* Compute the expected costs of inserting basis adjustments for
2550 candidate C with phi-definition PHI. The cost of inserting
2551 one adjustment is given by ONE_ADD_COST. If PHI has arguments
2552 which are themselves phi results, recursively calculate costs
2553 for those phis as well. */
2555 static int
2556 phi_add_costs (gimple *phi, slsr_cand_t c, int one_add_cost)
2558 int spread = 0;
2559 int retval = phi_add_costs_1 (phi, c, one_add_cost, &spread);
2560 clear_visited (as_a <gphi *> (phi));
2561 return retval;
2563 /* For candidate C, each sibling of candidate C, and each dependent of
2564 candidate C, determine whether the candidate is dependent upon a
2565 phi that hides its basis. If not, replace the candidate unconditionally.
2566 Otherwise, determine whether the cost of introducing compensation code
2567 for the candidate is offset by the gains from strength reduction. If
2568 so, replace the candidate and introduce the compensation code. */
2570 static void
2571 replace_uncond_cands_and_profitable_phis (slsr_cand_t c)
2573 if (phi_dependent_cand_p (c))
2575 /* A multiply candidate with a stride of 1 is just an artifice
2576 of a copy or cast; there is no value in replacing it. */
2577 if (c->kind == CAND_MULT && wi::to_widest (c->stride) != 1)
2579 /* A candidate dependent upon a phi will replace a multiply by
2580 a constant with an add, and will insert at most one add for
2581 each phi argument. Add these costs with the potential dead-code
2582 savings to determine profitability. */
2583 bool speed = optimize_bb_for_speed_p (gimple_bb (c->cand_stmt));
2584 int mult_savings = stmt_cost (c->cand_stmt, speed);
2585 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2586 tree phi_result = gimple_phi_result (phi);
2587 int one_add_cost = add_cost (speed,
2588 TYPE_MODE (TREE_TYPE (phi_result)));
2589 int add_costs = one_add_cost + phi_add_costs (phi, c, one_add_cost);
2590 int cost = add_costs - mult_savings - c->dead_savings;
2592 if (dump_file && (dump_flags & TDF_DETAILS))
2594 fprintf (dump_file, " Conditional candidate %d:\n", c->cand_num);
2595 fprintf (dump_file, " add_costs = %d\n", add_costs);
2596 fprintf (dump_file, " mult_savings = %d\n", mult_savings);
2597 fprintf (dump_file, " dead_savings = %d\n", c->dead_savings);
2598 fprintf (dump_file, " cost = %d\n", cost);
2599 if (cost <= COST_NEUTRAL)
2600 fputs (" Replacing...\n", dump_file);
2601 else
2602 fputs (" Not replaced.\n", dump_file);
2605 if (cost <= COST_NEUTRAL)
2606 replace_conditional_candidate (c);
2609 else
2610 replace_unconditional_candidate (c);
2612 if (c->sibling)
2613 replace_uncond_cands_and_profitable_phis (lookup_cand (c->sibling));
2615 if (c->dependent)
2616 replace_uncond_cands_and_profitable_phis (lookup_cand (c->dependent));
2619 /* Count the number of candidates in the tree rooted at C that have
2620 not already been replaced under other interpretations. */
2622 static int
2623 count_candidates (slsr_cand_t c)
2625 unsigned count = cand_already_replaced (c) ? 0 : 1;
2627 if (c->sibling)
2628 count += count_candidates (lookup_cand (c->sibling));
2630 if (c->dependent)
2631 count += count_candidates (lookup_cand (c->dependent));
2633 return count;
2636 /* Increase the count of INCREMENT by one in the increment vector.
2637 INCREMENT is associated with candidate C. If INCREMENT is to be
2638 conditionally executed as part of a conditional candidate replacement,
2639 IS_PHI_ADJUST is true, otherwise false. If an initializer
2640 T_0 = stride * I is provided by a candidate that dominates all
2641 candidates with the same increment, also record T_0 for subsequent use. */
2643 static void
2644 record_increment (slsr_cand_t c, widest_int increment, bool is_phi_adjust)
2646 bool found = false;
2647 unsigned i;
2649 /* Treat increments that differ only in sign as identical so as to
2650 share initializers, unless we are generating pointer arithmetic. */
2651 if (!address_arithmetic_p && wi::neg_p (increment))
2652 increment = -increment;
2654 for (i = 0; i < incr_vec_len; i++)
2656 if (incr_vec[i].incr == increment)
2658 incr_vec[i].count++;
2659 found = true;
2661 /* If we previously recorded an initializer that doesn't
2662 dominate this candidate, it's not going to be useful to
2663 us after all. */
2664 if (incr_vec[i].initializer
2665 && !dominated_by_p (CDI_DOMINATORS,
2666 gimple_bb (c->cand_stmt),
2667 incr_vec[i].init_bb))
2669 incr_vec[i].initializer = NULL_TREE;
2670 incr_vec[i].init_bb = NULL;
2673 break;
2677 if (!found && incr_vec_len < MAX_INCR_VEC_LEN - 1)
2679 /* The first time we see an increment, create the entry for it.
2680 If this is the root candidate which doesn't have a basis, set
2681 the count to zero. We're only processing it so it can possibly
2682 provide an initializer for other candidates. */
2683 incr_vec[incr_vec_len].incr = increment;
2684 incr_vec[incr_vec_len].count = c->basis || is_phi_adjust ? 1 : 0;
2685 incr_vec[incr_vec_len].cost = COST_INFINITE;
2687 /* Optimistically record the first occurrence of this increment
2688 as providing an initializer (if it does); we will revise this
2689 opinion later if it doesn't dominate all other occurrences.
2690 Exception: increments of 0, 1 never need initializers;
2691 and phi adjustments don't ever provide initializers. */
2692 if (c->kind == CAND_ADD
2693 && !is_phi_adjust
2694 && c->index == increment
2695 && (increment > 1 || increment < 0)
2696 && (gimple_assign_rhs_code (c->cand_stmt) == PLUS_EXPR
2697 || gimple_assign_rhs_code (c->cand_stmt) == POINTER_PLUS_EXPR))
2699 tree t0 = NULL_TREE;
2700 tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
2701 tree rhs2 = gimple_assign_rhs2 (c->cand_stmt);
2702 if (operand_equal_p (rhs1, c->base_expr, 0))
2703 t0 = rhs2;
2704 else if (operand_equal_p (rhs2, c->base_expr, 0))
2705 t0 = rhs1;
2706 if (t0
2707 && SSA_NAME_DEF_STMT (t0)
2708 && gimple_bb (SSA_NAME_DEF_STMT (t0)))
2710 incr_vec[incr_vec_len].initializer = t0;
2711 incr_vec[incr_vec_len++].init_bb
2712 = gimple_bb (SSA_NAME_DEF_STMT (t0));
2714 else
2716 incr_vec[incr_vec_len].initializer = NULL_TREE;
2717 incr_vec[incr_vec_len++].init_bb = NULL;
2720 else
2722 incr_vec[incr_vec_len].initializer = NULL_TREE;
2723 incr_vec[incr_vec_len++].init_bb = NULL;
2728 /* Recursive helper function for record_phi_increments. */
2730 static void
2731 record_phi_increments_1 (slsr_cand_t basis, gimple *phi)
2733 unsigned i;
2734 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2736 if (phi_cand->visited)
2737 return;
2738 phi_cand->visited = 1;
2740 for (i = 0; i < gimple_phi_num_args (phi); i++)
2742 tree arg = gimple_phi_arg_def (phi, i);
2744 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2746 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2748 if (gimple_code (arg_def) == GIMPLE_PHI)
2749 record_phi_increments_1 (basis, arg_def);
2750 else
2752 slsr_cand_t arg_cand = base_cand_from_table (arg);
2753 widest_int diff = arg_cand->index - basis->index;
2754 record_increment (arg_cand, diff, PHI_ADJUST);
2760 /* Given phi statement PHI that hides a candidate from its BASIS, find
2761 the increments along each incoming arc (recursively handling additional
2762 phis that may be present) and record them. These increments are the
2763 difference in index between the index-adjusting statements and the
2764 index of the basis. */
2766 static void
2767 record_phi_increments (slsr_cand_t basis, gimple *phi)
2769 record_phi_increments_1 (basis, phi);
2770 clear_visited (as_a <gphi *> (phi));
2773 /* Determine how many times each unique increment occurs in the set
2774 of candidates rooted at C's parent, recording the data in the
2775 increment vector. For each unique increment I, if an initializer
2776 T_0 = stride * I is provided by a candidate that dominates all
2777 candidates with the same increment, also record T_0 for subsequent
2778 use. */
2780 static void
2781 record_increments (slsr_cand_t c)
2783 if (!cand_already_replaced (c))
2785 if (!phi_dependent_cand_p (c))
2786 record_increment (c, cand_increment (c), NOT_PHI_ADJUST);
2787 else
2789 /* A candidate with a basis hidden by a phi will have one
2790 increment for its relationship to the index represented by
2791 the phi, and potentially additional increments along each
2792 incoming edge. For the root of the dependency tree (which
2793 has no basis), process just the initial index in case it has
2794 an initializer that can be used by subsequent candidates. */
2795 record_increment (c, c->index, NOT_PHI_ADJUST);
2797 if (c->basis)
2798 record_phi_increments (lookup_cand (c->basis),
2799 lookup_cand (c->def_phi)->cand_stmt);
2803 if (c->sibling)
2804 record_increments (lookup_cand (c->sibling));
2806 if (c->dependent)
2807 record_increments (lookup_cand (c->dependent));
2810 /* Recursive helper function for phi_incr_cost. */
2812 static int
2813 phi_incr_cost_1 (slsr_cand_t c, const widest_int &incr, gimple *phi,
2814 int *savings)
2816 unsigned i;
2817 int cost = 0;
2818 slsr_cand_t basis = lookup_cand (c->basis);
2819 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
2821 if (phi_cand->visited)
2822 return 0;
2823 phi_cand->visited = 1;
2825 for (i = 0; i < gimple_phi_num_args (phi); i++)
2827 tree arg = gimple_phi_arg_def (phi, i);
2829 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
2831 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
2833 if (gimple_code (arg_def) == GIMPLE_PHI)
2835 int feeding_savings = 0;
2836 tree feeding_var = gimple_phi_result (arg_def);
2837 cost += phi_incr_cost_1 (c, incr, arg_def, &feeding_savings);
2838 if (uses_consumed_by_stmt (feeding_var, phi))
2839 *savings += feeding_savings;
2841 else
2843 slsr_cand_t arg_cand = base_cand_from_table (arg);
2844 widest_int diff = arg_cand->index - basis->index;
2846 if (incr == diff)
2848 tree basis_lhs = gimple_assign_lhs (basis->cand_stmt);
2849 tree lhs = gimple_assign_lhs (arg_cand->cand_stmt);
2850 cost += add_cost (true, TYPE_MODE (TREE_TYPE (basis_lhs)));
2851 if (uses_consumed_by_stmt (lhs, phi))
2852 *savings += stmt_cost (arg_cand->cand_stmt, true);
2858 return cost;
2861 /* Add up and return the costs of introducing add statements that
2862 require the increment INCR on behalf of candidate C and phi
2863 statement PHI. Accumulate into *SAVINGS the potential savings
2864 from removing existing statements that feed PHI and have no other
2865 uses. */
2867 static int
2868 phi_incr_cost (slsr_cand_t c, const widest_int &incr, gimple *phi,
2869 int *savings)
2871 int retval = phi_incr_cost_1 (c, incr, phi, savings);
2872 clear_visited (as_a <gphi *> (phi));
2873 return retval;
2876 /* Return the first candidate in the tree rooted at C that has not
2877 already been replaced, favoring siblings over dependents. */
2879 static slsr_cand_t
2880 unreplaced_cand_in_tree (slsr_cand_t c)
2882 if (!cand_already_replaced (c))
2883 return c;
2885 if (c->sibling)
2887 slsr_cand_t sib = unreplaced_cand_in_tree (lookup_cand (c->sibling));
2888 if (sib)
2889 return sib;
2892 if (c->dependent)
2894 slsr_cand_t dep = unreplaced_cand_in_tree (lookup_cand (c->dependent));
2895 if (dep)
2896 return dep;
2899 return NULL;
2902 /* Return TRUE if the candidates in the tree rooted at C should be
2903 optimized for speed, else FALSE. We estimate this based on the block
2904 containing the most dominant candidate in the tree that has not yet
2905 been replaced. */
2907 static bool
2908 optimize_cands_for_speed_p (slsr_cand_t c)
2910 slsr_cand_t c2 = unreplaced_cand_in_tree (c);
2911 gcc_assert (c2);
2912 return optimize_bb_for_speed_p (gimple_bb (c2->cand_stmt));
2915 /* Add COST_IN to the lowest cost of any dependent path starting at
2916 candidate C or any of its siblings, counting only candidates along
2917 such paths with increment INCR. Assume that replacing a candidate
2918 reduces cost by REPL_SAVINGS. Also account for savings from any
2919 statements that would go dead. If COUNT_PHIS is true, include
2920 costs of introducing feeding statements for conditional candidates. */
2922 static int
2923 lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c,
2924 const widest_int &incr, bool count_phis)
2926 int local_cost, sib_cost, savings = 0;
2927 widest_int cand_incr = cand_abs_increment (c);
2929 if (cand_already_replaced (c))
2930 local_cost = cost_in;
2931 else if (incr == cand_incr)
2932 local_cost = cost_in - repl_savings - c->dead_savings;
2933 else
2934 local_cost = cost_in - c->dead_savings;
2936 if (count_phis
2937 && phi_dependent_cand_p (c)
2938 && !cand_already_replaced (c))
2940 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2941 local_cost += phi_incr_cost (c, incr, phi, &savings);
2943 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2944 local_cost -= savings;
2947 if (c->dependent)
2948 local_cost = lowest_cost_path (local_cost, repl_savings,
2949 lookup_cand (c->dependent), incr,
2950 count_phis);
2952 if (c->sibling)
2954 sib_cost = lowest_cost_path (cost_in, repl_savings,
2955 lookup_cand (c->sibling), incr,
2956 count_phis);
2957 local_cost = MIN (local_cost, sib_cost);
2960 return local_cost;
2963 /* Compute the total savings that would accrue from all replacements
2964 in the candidate tree rooted at C, counting only candidates with
2965 increment INCR. Assume that replacing a candidate reduces cost
2966 by REPL_SAVINGS. Also account for savings from statements that
2967 would go dead. */
2969 static int
2970 total_savings (int repl_savings, slsr_cand_t c, const widest_int &incr,
2971 bool count_phis)
2973 int savings = 0;
2974 widest_int cand_incr = cand_abs_increment (c);
2976 if (incr == cand_incr && !cand_already_replaced (c))
2977 savings += repl_savings + c->dead_savings;
2979 if (count_phis
2980 && phi_dependent_cand_p (c)
2981 && !cand_already_replaced (c))
2983 int phi_savings = 0;
2984 gimple *phi = lookup_cand (c->def_phi)->cand_stmt;
2985 savings -= phi_incr_cost (c, incr, phi, &phi_savings);
2987 if (uses_consumed_by_stmt (gimple_phi_result (phi), c->cand_stmt))
2988 savings += phi_savings;
2991 if (c->dependent)
2992 savings += total_savings (repl_savings, lookup_cand (c->dependent), incr,
2993 count_phis);
2995 if (c->sibling)
2996 savings += total_savings (repl_savings, lookup_cand (c->sibling), incr,
2997 count_phis);
2999 return savings;
3002 /* Use target-specific costs to determine and record which increments
3003 in the current candidate tree are profitable to replace, assuming
3004 MODE and SPEED. FIRST_DEP is the first dependent of the root of
3005 the candidate tree.
3007 One slight limitation here is that we don't account for the possible
3008 introduction of casts in some cases. See replace_one_candidate for
3009 the cases where these are introduced. This should probably be cleaned
3010 up sometime. */
3012 static void
3013 analyze_increments (slsr_cand_t first_dep, machine_mode mode, bool speed)
3015 unsigned i;
3017 for (i = 0; i < incr_vec_len; i++)
3019 HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
3021 /* If somehow this increment is bigger than a HWI, we won't
3022 be optimizing candidates that use it. And if the increment
3023 has a count of zero, nothing will be done with it. */
3024 if (!wi::fits_shwi_p (incr_vec[i].incr) || !incr_vec[i].count)
3025 incr_vec[i].cost = COST_INFINITE;
3027 /* Increments of 0, 1, and -1 are always profitable to replace,
3028 because they always replace a multiply or add with an add or
3029 copy, and may cause one or more existing instructions to go
3030 dead. Exception: -1 can't be assumed to be profitable for
3031 pointer addition. */
3032 else if (incr == 0
3033 || incr == 1
3034 || (incr == -1
3035 && !POINTER_TYPE_P (first_dep->cand_type)))
3036 incr_vec[i].cost = COST_NEUTRAL;
3038 /* If we need to add an initializer, give up if a cast from the
3039 candidate's type to its stride's type can lose precision.
3040 Note that this already takes into account that the stride may
3041 have been cast to a wider type, in which case this test won't
3042 fire. Example:
3044 short int _1;
3045 _2 = (int) _1;
3046 _3 = _2 * 10;
3047 _4 = x + _3; ADD: x + (10 * (int)_1) : int
3048 _5 = _2 * 15;
3049 _6 = x + _5; ADD: x + (15 * (int)_1) : int
3051 Although the stride was a short int initially, the stride
3052 used in the analysis has been widened to an int, and such
3053 widening will be done in the initializer as well. */
3054 else if (!incr_vec[i].initializer
3055 && TREE_CODE (first_dep->stride) != INTEGER_CST
3056 && !legal_cast_p_1 (first_dep->stride_type,
3057 TREE_TYPE (gimple_assign_lhs
3058 (first_dep->cand_stmt))))
3059 incr_vec[i].cost = COST_INFINITE;
3061 /* If we need to add an initializer, make sure we don't introduce
3062 a multiply by a pointer type, which can happen in certain cast
3063 scenarios. */
3064 else if (!incr_vec[i].initializer
3065 && TREE_CODE (first_dep->stride) != INTEGER_CST
3066 && POINTER_TYPE_P (first_dep->stride_type))
3067 incr_vec[i].cost = COST_INFINITE;
3069 /* For any other increment, if this is a multiply candidate, we
3070 must introduce a temporary T and initialize it with
3071 T_0 = stride * increment. When optimizing for speed, walk the
3072 candidate tree to calculate the best cost reduction along any
3073 path; if it offsets the fixed cost of inserting the initializer,
3074 replacing the increment is profitable. When optimizing for
3075 size, instead calculate the total cost reduction from replacing
3076 all candidates with this increment. */
3077 else if (first_dep->kind == CAND_MULT)
3079 int cost = mult_by_coeff_cost (incr, mode, speed);
3080 int repl_savings = mul_cost (speed, mode) - add_cost (speed, mode);
3081 if (speed)
3082 cost = lowest_cost_path (cost, repl_savings, first_dep,
3083 incr_vec[i].incr, COUNT_PHIS);
3084 else
3085 cost -= total_savings (repl_savings, first_dep, incr_vec[i].incr,
3086 COUNT_PHIS);
3088 incr_vec[i].cost = cost;
3091 /* If this is an add candidate, the initializer may already
3092 exist, so only calculate the cost of the initializer if it
3093 doesn't. We are replacing one add with another here, so the
3094 known replacement savings is zero. We will account for removal
3095 of dead instructions in lowest_cost_path or total_savings. */
3096 else
3098 int cost = 0;
3099 if (!incr_vec[i].initializer)
3100 cost = mult_by_coeff_cost (incr, mode, speed);
3102 if (speed)
3103 cost = lowest_cost_path (cost, 0, first_dep, incr_vec[i].incr,
3104 DONT_COUNT_PHIS);
3105 else
3106 cost -= total_savings (0, first_dep, incr_vec[i].incr,
3107 DONT_COUNT_PHIS);
3109 incr_vec[i].cost = cost;
3114 /* Return the nearest common dominator of BB1 and BB2. If the blocks
3115 are identical, return the earlier of C1 and C2 in *WHERE. Otherwise,
3116 if the NCD matches BB1, return C1 in *WHERE; if the NCD matches BB2,
3117 return C2 in *WHERE; and if the NCD matches neither, return NULL in
3118 *WHERE. Note: It is possible for one of C1 and C2 to be NULL. */
3120 static basic_block
3121 ncd_for_two_cands (basic_block bb1, basic_block bb2,
3122 slsr_cand_t c1, slsr_cand_t c2, slsr_cand_t *where)
3124 basic_block ncd;
3126 if (!bb1)
3128 *where = c2;
3129 return bb2;
3132 if (!bb2)
3134 *where = c1;
3135 return bb1;
3138 ncd = nearest_common_dominator (CDI_DOMINATORS, bb1, bb2);
3140 /* If both candidates are in the same block, the earlier
3141 candidate wins. */
3142 if (bb1 == ncd && bb2 == ncd)
3144 if (!c1 || (c2 && c2->cand_num < c1->cand_num))
3145 *where = c2;
3146 else
3147 *where = c1;
3150 /* Otherwise, if one of them produced a candidate in the
3151 dominator, that one wins. */
3152 else if (bb1 == ncd)
3153 *where = c1;
3155 else if (bb2 == ncd)
3156 *where = c2;
3158 /* If neither matches the dominator, neither wins. */
3159 else
3160 *where = NULL;
3162 return ncd;
3165 /* Consider all candidates that feed PHI. Find the nearest common
3166 dominator of those candidates requiring the given increment INCR.
3167 Further find and return the nearest common dominator of this result
3168 with block NCD. If the returned block contains one or more of the
3169 candidates, return the earliest candidate in the block in *WHERE. */
3171 static basic_block
3172 ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
3173 basic_block ncd, slsr_cand_t *where)
3175 unsigned i;
3176 slsr_cand_t basis = lookup_cand (c->basis);
3177 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3179 for (i = 0; i < gimple_phi_num_args (phi); i++)
3181 tree arg = gimple_phi_arg_def (phi, i);
3183 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3185 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3187 if (gimple_code (arg_def) == GIMPLE_PHI)
3188 ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
3189 where);
3190 else
3192 slsr_cand_t arg_cand = base_cand_from_table (arg);
3193 widest_int diff = arg_cand->index - basis->index;
3194 basic_block pred = gimple_phi_arg_edge (phi, i)->src;
3196 if ((incr == diff) || (!address_arithmetic_p && incr == -diff))
3197 ncd = ncd_for_two_cands (ncd, pred, *where, NULL, where);
3202 return ncd;
3205 /* Consider the candidate C together with any candidates that feed
3206 C's phi dependence (if any). Find and return the nearest common
3207 dominator of those candidates requiring the given increment INCR.
3208 If the returned block contains one or more of the candidates,
3209 return the earliest candidate in the block in *WHERE. */
3211 static basic_block
3212 ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
3214 basic_block ncd = NULL;
3216 if (cand_abs_increment (c) == incr)
3218 ncd = gimple_bb (c->cand_stmt);
3219 *where = c;
3222 if (phi_dependent_cand_p (c))
3223 ncd = ncd_with_phi (c, incr,
3224 as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
3225 ncd, where);
3227 return ncd;
3230 /* Consider all candidates in the tree rooted at C for which INCR
3231 represents the required increment of C relative to its basis.
3232 Find and return the basic block that most nearly dominates all
3233 such candidates. If the returned block contains one or more of
3234 the candidates, return the earliest candidate in the block in
3235 *WHERE. */
3237 static basic_block
3238 nearest_common_dominator_for_cands (slsr_cand_t c, const widest_int &incr,
3239 slsr_cand_t *where)
3241 basic_block sib_ncd = NULL, dep_ncd = NULL, this_ncd = NULL, ncd;
3242 slsr_cand_t sib_where = NULL, dep_where = NULL, this_where = NULL, new_where;
3244 /* First find the NCD of all siblings and dependents. */
3245 if (c->sibling)
3246 sib_ncd = nearest_common_dominator_for_cands (lookup_cand (c->sibling),
3247 incr, &sib_where);
3248 if (c->dependent)
3249 dep_ncd = nearest_common_dominator_for_cands (lookup_cand (c->dependent),
3250 incr, &dep_where);
3251 if (!sib_ncd && !dep_ncd)
3253 new_where = NULL;
3254 ncd = NULL;
3256 else if (sib_ncd && !dep_ncd)
3258 new_where = sib_where;
3259 ncd = sib_ncd;
3261 else if (dep_ncd && !sib_ncd)
3263 new_where = dep_where;
3264 ncd = dep_ncd;
3266 else
3267 ncd = ncd_for_two_cands (sib_ncd, dep_ncd, sib_where,
3268 dep_where, &new_where);
3270 /* If the candidate's increment doesn't match the one we're interested
3271 in (and nor do any increments for feeding defs of a phi-dependence),
3272 then the result depends only on siblings and dependents. */
3273 this_ncd = ncd_of_cand_and_phis (c, incr, &this_where);
3275 if (!this_ncd || cand_already_replaced (c))
3277 *where = new_where;
3278 return ncd;
3281 /* Otherwise, compare this candidate with the result from all siblings
3282 and dependents. */
3283 ncd = ncd_for_two_cands (ncd, this_ncd, new_where, this_where, where);
3285 return ncd;
3288 /* Return TRUE if the increment indexed by INDEX is profitable to replace. */
3290 static inline bool
3291 profitable_increment_p (unsigned index)
3293 return (incr_vec[index].cost <= COST_NEUTRAL);
3296 /* For each profitable increment in the increment vector not equal to
3297 0 or 1 (or -1, for non-pointer arithmetic), find the nearest common
3298 dominator of all statements in the candidate chain rooted at C
3299 that require that increment, and insert an initializer
3300 T_0 = stride * increment at that location. Record T_0 with the
3301 increment record. */
3303 static void
3304 insert_initializers (slsr_cand_t c)
3306 unsigned i;
3308 for (i = 0; i < incr_vec_len; i++)
3310 basic_block bb;
3311 slsr_cand_t where = NULL;
3312 gassign *init_stmt;
3313 gassign *cast_stmt = NULL;
3314 tree new_name, incr_tree, init_stride;
3315 widest_int incr = incr_vec[i].incr;
3317 if (!profitable_increment_p (i)
3318 || incr == 1
3319 || (incr == -1
3320 && (!POINTER_TYPE_P (lookup_cand (c->basis)->cand_type)))
3321 || incr == 0)
3322 continue;
3324 /* We may have already identified an existing initializer that
3325 will suffice. */
3326 if (incr_vec[i].initializer)
3328 if (dump_file && (dump_flags & TDF_DETAILS))
3330 fputs ("Using existing initializer: ", dump_file);
3331 print_gimple_stmt (dump_file,
3332 SSA_NAME_DEF_STMT (incr_vec[i].initializer),
3333 0, 0);
3335 continue;
3338 /* Find the block that most closely dominates all candidates
3339 with this increment. If there is at least one candidate in
3340 that block, the earliest one will be returned in WHERE. */
3341 bb = nearest_common_dominator_for_cands (c, incr, &where);
3343 /* If the NCD is not dominated by the block containing the
3344 definition of the stride, we can't legally insert a
3345 single initializer. Mark the increment as unprofitable
3346 so we don't make any replacements. FIXME: Multiple
3347 initializers could be placed with more analysis. */
3348 gimple *stride_def = SSA_NAME_DEF_STMT (c->stride);
3349 basic_block stride_bb = gimple_bb (stride_def);
3351 if (stride_bb && !dominated_by_p (CDI_DOMINATORS, bb, stride_bb))
3353 if (dump_file && (dump_flags & TDF_DETAILS))
3354 fprintf (dump_file,
3355 "Initializer #%d cannot be legally placed\n", i);
3356 incr_vec[i].cost = COST_INFINITE;
3357 continue;
3360 /* If the nominal stride has a different type than the recorded
3361 stride type, build a cast from the nominal stride to that type. */
3362 if (!types_compatible_p (TREE_TYPE (c->stride), c->stride_type))
3364 init_stride = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3365 cast_stmt = gimple_build_assign (init_stride, NOP_EXPR, c->stride);
3367 else
3368 init_stride = c->stride;
3370 /* Create a new SSA name to hold the initializer's value. */
3371 new_name = make_temp_ssa_name (c->stride_type, NULL, "slsr");
3372 incr_vec[i].initializer = new_name;
3374 /* Create the initializer and insert it in the latest possible
3375 dominating position. */
3376 incr_tree = wide_int_to_tree (c->stride_type, incr);
3377 init_stmt = gimple_build_assign (new_name, MULT_EXPR,
3378 init_stride, incr_tree);
3379 if (where)
3381 gimple_stmt_iterator gsi = gsi_for_stmt (where->cand_stmt);
3382 location_t loc = gimple_location (where->cand_stmt);
3384 if (cast_stmt)
3386 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3387 gimple_set_location (cast_stmt, loc);
3390 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3391 gimple_set_location (init_stmt, loc);
3393 else
3395 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3396 gimple *basis_stmt = lookup_cand (c->basis)->cand_stmt;
3397 location_t loc = gimple_location (basis_stmt);
3399 if (!gsi_end_p (gsi) && stmt_ends_bb_p (gsi_stmt (gsi)))
3401 if (cast_stmt)
3403 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3404 gimple_set_location (cast_stmt, loc);
3406 gsi_insert_before (&gsi, init_stmt, GSI_SAME_STMT);
3408 else
3410 if (cast_stmt)
3412 gsi_insert_after (&gsi, cast_stmt, GSI_NEW_STMT);
3413 gimple_set_location (cast_stmt, loc);
3415 gsi_insert_after (&gsi, init_stmt, GSI_SAME_STMT);
3418 gimple_set_location (init_stmt, gimple_location (basis_stmt));
3421 if (dump_file && (dump_flags & TDF_DETAILS))
3423 if (cast_stmt)
3425 fputs ("Inserting stride cast: ", dump_file);
3426 print_gimple_stmt (dump_file, cast_stmt, 0);
3428 fputs ("Inserting initializer: ", dump_file);
3429 print_gimple_stmt (dump_file, init_stmt, 0);
3434 /* Recursive helper function for all_phi_incrs_profitable. */
3436 static bool
3437 all_phi_incrs_profitable_1 (slsr_cand_t c, gphi *phi, int *spread)
3439 unsigned i;
3440 slsr_cand_t basis = lookup_cand (c->basis);
3441 slsr_cand_t phi_cand = *stmt_cand_map->get (phi);
3443 if (phi_cand->visited)
3444 return true;
3446 phi_cand->visited = 1;
3447 (*spread)++;
3449 /* If the basis doesn't dominate the PHI (including when the PHI is
3450 in the same block as the basis), we won't be able to create a PHI
3451 using the basis here. */
3452 basic_block basis_bb = gimple_bb (basis->cand_stmt);
3453 basic_block phi_bb = gimple_bb (phi);
3455 if (phi_bb == basis_bb
3456 || !dominated_by_p (CDI_DOMINATORS, phi_bb, basis_bb))
3457 return false;
3459 for (i = 0; i < gimple_phi_num_args (phi); i++)
3461 /* If the PHI arg resides in a block not dominated by the basis,
3462 we won't be able to create a PHI using the basis here. */
3463 basic_block pred_bb = gimple_phi_arg_edge (phi, i)->src;
3465 if (!dominated_by_p (CDI_DOMINATORS, pred_bb, basis_bb))
3466 return false;
3468 tree arg = gimple_phi_arg_def (phi, i);
3470 if (!operand_equal_p (arg, phi_cand->base_expr, 0))
3472 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
3474 if (gimple_code (arg_def) == GIMPLE_PHI)
3476 if (!all_phi_incrs_profitable_1 (c, as_a <gphi *> (arg_def),
3477 spread)
3478 || *spread > MAX_SPREAD)
3479 return false;
3481 else
3483 int j;
3484 slsr_cand_t arg_cand = base_cand_from_table (arg);
3485 widest_int increment = arg_cand->index - basis->index;
3487 if (!address_arithmetic_p && wi::neg_p (increment))
3488 increment = -increment;
3490 j = incr_vec_index (increment);
3492 if (dump_file && (dump_flags & TDF_DETAILS))
3494 fprintf (dump_file, " Conditional candidate %d, phi: ",
3495 c->cand_num);
3496 print_gimple_stmt (dump_file, phi, 0);
3497 fputs (" increment: ", dump_file);
3498 print_decs (increment, dump_file);
3499 if (j < 0)
3500 fprintf (dump_file,
3501 "\n Not replaced; incr_vec overflow.\n");
3502 else {
3503 fprintf (dump_file, "\n cost: %d\n", incr_vec[j].cost);
3504 if (profitable_increment_p (j))
3505 fputs (" Replacing...\n", dump_file);
3506 else
3507 fputs (" Not replaced.\n", dump_file);
3511 if (j < 0 || !profitable_increment_p (j))
3512 return false;
3517 return true;
3520 /* Return TRUE iff all required increments for candidates feeding PHI
3521 are profitable (and legal!) to replace on behalf of candidate C. */
3523 static bool
3524 all_phi_incrs_profitable (slsr_cand_t c, gphi *phi)
3526 int spread = 0;
3527 bool retval = all_phi_incrs_profitable_1 (c, phi, &spread);
3528 clear_visited (phi);
3529 return retval;
3532 /* Create a NOP_EXPR that copies FROM_EXPR into a new SSA name of
3533 type TO_TYPE, and insert it in front of the statement represented
3534 by candidate C. Use *NEW_VAR to create the new SSA name. Return
3535 the new SSA name. */
3537 static tree
3538 introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
3540 tree cast_lhs;
3541 gassign *cast_stmt;
3542 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3544 cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
3545 cast_stmt = gimple_build_assign (cast_lhs, NOP_EXPR, from_expr);
3546 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3547 gsi_insert_before (&gsi, cast_stmt, GSI_SAME_STMT);
3549 if (dump_file && (dump_flags & TDF_DETAILS))
3551 fputs (" Inserting: ", dump_file);
3552 print_gimple_stmt (dump_file, cast_stmt, 0);
3555 return cast_lhs;
3558 /* Replace the RHS of the statement represented by candidate C with
3559 NEW_CODE, NEW_RHS1, and NEW_RHS2, provided that to do so doesn't
3560 leave C unchanged or just interchange its operands. The original
3561 operation and operands are in OLD_CODE, OLD_RHS1, and OLD_RHS2.
3562 If the replacement was made and we are doing a details dump,
3563 return the revised statement, else NULL. */
3565 static gimple *
3566 replace_rhs_if_not_dup (enum tree_code new_code, tree new_rhs1, tree new_rhs2,
3567 enum tree_code old_code, tree old_rhs1, tree old_rhs2,
3568 slsr_cand_t c)
3570 if (new_code != old_code
3571 || ((!operand_equal_p (new_rhs1, old_rhs1, 0)
3572 || !operand_equal_p (new_rhs2, old_rhs2, 0))
3573 && (!operand_equal_p (new_rhs1, old_rhs2, 0)
3574 || !operand_equal_p (new_rhs2, old_rhs1, 0))))
3576 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3577 slsr_cand_t cc = c;
3578 gimple_assign_set_rhs_with_ops (&gsi, new_code, new_rhs1, new_rhs2);
3579 update_stmt (gsi_stmt (gsi));
3580 c->cand_stmt = gsi_stmt (gsi);
3581 while (cc->next_interp)
3583 cc = lookup_cand (cc->next_interp);
3584 cc->cand_stmt = gsi_stmt (gsi);
3587 if (dump_file && (dump_flags & TDF_DETAILS))
3588 return gsi_stmt (gsi);
3591 else if (dump_file && (dump_flags & TDF_DETAILS))
3592 fputs (" (duplicate, not actually replacing)\n", dump_file);
3594 return NULL;
3597 /* Strength-reduce the statement represented by candidate C by replacing
3598 it with an equivalent addition or subtraction. I is the index into
3599 the increment vector identifying C's increment. NEW_VAR is used to
3600 create a new SSA name if a cast needs to be introduced. BASIS_NAME
3601 is the rhs1 to use in creating the add/subtract. */
3603 static void
3604 replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
3606 gimple *stmt_to_print = NULL;
3607 tree orig_rhs1, orig_rhs2;
3608 tree rhs2;
3609 enum tree_code orig_code, repl_code;
3610 widest_int cand_incr;
3612 orig_code = gimple_assign_rhs_code (c->cand_stmt);
3613 orig_rhs1 = gimple_assign_rhs1 (c->cand_stmt);
3614 orig_rhs2 = gimple_assign_rhs2 (c->cand_stmt);
3615 cand_incr = cand_increment (c);
3617 if (dump_file && (dump_flags & TDF_DETAILS))
3619 fputs ("Replacing: ", dump_file);
3620 print_gimple_stmt (dump_file, c->cand_stmt, 0);
3621 stmt_to_print = c->cand_stmt;
3624 if (address_arithmetic_p)
3625 repl_code = POINTER_PLUS_EXPR;
3626 else
3627 repl_code = PLUS_EXPR;
3629 /* If the increment has an initializer T_0, replace the candidate
3630 statement with an add of the basis name and the initializer. */
3631 if (incr_vec[i].initializer)
3633 tree init_type = TREE_TYPE (incr_vec[i].initializer);
3634 tree orig_type = TREE_TYPE (orig_rhs2);
3636 if (types_compatible_p (orig_type, init_type))
3637 rhs2 = incr_vec[i].initializer;
3638 else
3639 rhs2 = introduce_cast_before_cand (c, orig_type,
3640 incr_vec[i].initializer);
3642 if (incr_vec[i].incr != cand_incr)
3644 gcc_assert (repl_code == PLUS_EXPR);
3645 repl_code = MINUS_EXPR;
3648 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3649 orig_code, orig_rhs1, orig_rhs2,
3653 /* Otherwise, the increment is one of -1, 0, and 1. Replace
3654 with a subtract of the stride from the basis name, a copy
3655 from the basis name, or an add of the stride to the basis
3656 name, respectively. It may be necessary to introduce a
3657 cast (or reuse an existing cast). */
3658 else if (cand_incr == 1)
3660 tree stride_type = TREE_TYPE (c->stride);
3661 tree orig_type = TREE_TYPE (orig_rhs2);
3663 if (types_compatible_p (orig_type, stride_type))
3664 rhs2 = c->stride;
3665 else
3666 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3668 stmt_to_print = replace_rhs_if_not_dup (repl_code, basis_name, rhs2,
3669 orig_code, orig_rhs1, orig_rhs2,
3673 else if (cand_incr == -1)
3675 tree stride_type = TREE_TYPE (c->stride);
3676 tree orig_type = TREE_TYPE (orig_rhs2);
3677 gcc_assert (repl_code != POINTER_PLUS_EXPR);
3679 if (types_compatible_p (orig_type, stride_type))
3680 rhs2 = c->stride;
3681 else
3682 rhs2 = introduce_cast_before_cand (c, orig_type, c->stride);
3684 if (orig_code != MINUS_EXPR
3685 || !operand_equal_p (basis_name, orig_rhs1, 0)
3686 || !operand_equal_p (rhs2, orig_rhs2, 0))
3688 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3689 slsr_cand_t cc = c;
3690 gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, basis_name, rhs2);
3691 update_stmt (gsi_stmt (gsi));
3692 c->cand_stmt = gsi_stmt (gsi);
3693 while (cc->next_interp)
3695 cc = lookup_cand (cc->next_interp);
3696 cc->cand_stmt = gsi_stmt (gsi);
3699 if (dump_file && (dump_flags & TDF_DETAILS))
3700 stmt_to_print = gsi_stmt (gsi);
3702 else if (dump_file && (dump_flags & TDF_DETAILS))
3703 fputs (" (duplicate, not actually replacing)\n", dump_file);
3706 else if (cand_incr == 0)
3708 tree lhs = gimple_assign_lhs (c->cand_stmt);
3709 tree lhs_type = TREE_TYPE (lhs);
3710 tree basis_type = TREE_TYPE (basis_name);
3712 if (types_compatible_p (lhs_type, basis_type))
3714 gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
3715 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3716 slsr_cand_t cc = c;
3717 gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
3718 gsi_replace (&gsi, copy_stmt, false);
3719 c->cand_stmt = copy_stmt;
3720 while (cc->next_interp)
3722 cc = lookup_cand (cc->next_interp);
3723 cc->cand_stmt = copy_stmt;
3726 if (dump_file && (dump_flags & TDF_DETAILS))
3727 stmt_to_print = copy_stmt;
3729 else
3731 gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
3732 gassign *cast_stmt = gimple_build_assign (lhs, NOP_EXPR, basis_name);
3733 slsr_cand_t cc = c;
3734 gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
3735 gsi_replace (&gsi, cast_stmt, false);
3736 c->cand_stmt = cast_stmt;
3737 while (cc->next_interp)
3739 cc = lookup_cand (cc->next_interp);
3740 cc->cand_stmt = cast_stmt;
3743 if (dump_file && (dump_flags & TDF_DETAILS))
3744 stmt_to_print = cast_stmt;
3747 else
3748 gcc_unreachable ();
3750 if (dump_file && (dump_flags & TDF_DETAILS) && stmt_to_print)
3752 fputs ("With: ", dump_file);
3753 print_gimple_stmt (dump_file, stmt_to_print, 0);
3754 fputs ("\n", dump_file);
3758 /* For each candidate in the tree rooted at C, replace it with
3759 an increment if such has been shown to be profitable. */
3761 static void
3762 replace_profitable_candidates (slsr_cand_t c)
3764 if (!cand_already_replaced (c))
3766 widest_int increment = cand_abs_increment (c);
3767 enum tree_code orig_code = gimple_assign_rhs_code (c->cand_stmt);
3768 int i;
3770 i = incr_vec_index (increment);
3772 /* Only process profitable increments. Nothing useful can be done
3773 to a cast or copy. */
3774 if (i >= 0
3775 && profitable_increment_p (i)
3776 && orig_code != SSA_NAME
3777 && !CONVERT_EXPR_CODE_P (orig_code))
3779 if (phi_dependent_cand_p (c))
3781 gphi *phi = as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt);
3783 if (all_phi_incrs_profitable (c, phi))
3785 /* Look up the LHS SSA name from C's basis. This will be
3786 the RHS1 of the adds we will introduce to create new
3787 phi arguments. */
3788 slsr_cand_t basis = lookup_cand (c->basis);
3789 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3791 /* Create a new phi statement that will represent C's true
3792 basis after the transformation is complete. */
3793 location_t loc = gimple_location (c->cand_stmt);
3794 tree name = create_phi_basis (c, phi, basis_name,
3795 loc, UNKNOWN_STRIDE);
3797 /* Replace C with an add of the new basis phi and the
3798 increment. */
3799 replace_one_candidate (c, i, name);
3802 else
3804 slsr_cand_t basis = lookup_cand (c->basis);
3805 tree basis_name = gimple_assign_lhs (basis->cand_stmt);
3806 replace_one_candidate (c, i, basis_name);
3811 if (c->sibling)
3812 replace_profitable_candidates (lookup_cand (c->sibling));
3814 if (c->dependent)
3815 replace_profitable_candidates (lookup_cand (c->dependent));
3818 /* Analyze costs of related candidates in the candidate vector,
3819 and make beneficial replacements. */
3821 static void
3822 analyze_candidates_and_replace (void)
3824 unsigned i;
3825 slsr_cand_t c;
3827 /* Each candidate that has a null basis and a non-null
3828 dependent is the root of a tree of related statements.
3829 Analyze each tree to determine a subset of those
3830 statements that can be replaced with maximum benefit. */
3831 FOR_EACH_VEC_ELT (cand_vec, i, c)
3833 slsr_cand_t first_dep;
3835 if (c->basis != 0 || c->dependent == 0)
3836 continue;
3838 if (dump_file && (dump_flags & TDF_DETAILS))
3839 fprintf (dump_file, "\nProcessing dependency tree rooted at %d.\n",
3840 c->cand_num);
3842 first_dep = lookup_cand (c->dependent);
3844 /* If this is a chain of CAND_REFs, unconditionally replace
3845 each of them with a strength-reduced data reference. */
3846 if (c->kind == CAND_REF)
3847 replace_refs (c);
3849 /* If the common stride of all related candidates is a known
3850 constant, each candidate without a phi-dependence can be
3851 profitably replaced. Each replaces a multiply by a single
3852 add, with the possibility that a feeding add also goes dead.
3853 A candidate with a phi-dependence is replaced only if the
3854 compensation code it requires is offset by the strength
3855 reduction savings. */
3856 else if (TREE_CODE (c->stride) == INTEGER_CST)
3857 replace_uncond_cands_and_profitable_phis (first_dep);
3859 /* When the stride is an SSA name, it may still be profitable
3860 to replace some or all of the dependent candidates, depending
3861 on whether the introduced increments can be reused, or are
3862 less expensive to calculate than the replaced statements. */
3863 else
3865 machine_mode mode;
3866 bool speed;
3868 /* Determine whether we'll be generating pointer arithmetic
3869 when replacing candidates. */
3870 address_arithmetic_p = (c->kind == CAND_ADD
3871 && POINTER_TYPE_P (c->cand_type));
3873 /* If all candidates have already been replaced under other
3874 interpretations, nothing remains to be done. */
3875 if (!count_candidates (c))
3876 continue;
3878 /* Construct an array of increments for this candidate chain. */
3879 incr_vec = XNEWVEC (incr_info, MAX_INCR_VEC_LEN);
3880 incr_vec_len = 0;
3881 record_increments (c);
3883 /* Determine which increments are profitable to replace. */
3884 mode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (c->cand_stmt)));
3885 speed = optimize_cands_for_speed_p (c);
3886 analyze_increments (first_dep, mode, speed);
3888 /* Insert initializers of the form T_0 = stride * increment
3889 for use in profitable replacements. */
3890 insert_initializers (first_dep);
3891 dump_incr_vec ();
3893 /* Perform the replacements. */
3894 replace_profitable_candidates (first_dep);
3895 free (incr_vec);
3899 /* For conditional candidates, we may have uncommitted insertions
3900 on edges to clean up. */
3901 gsi_commit_edge_inserts ();
3904 namespace {
3906 const pass_data pass_data_strength_reduction =
3908 GIMPLE_PASS, /* type */
3909 "slsr", /* name */
3910 OPTGROUP_NONE, /* optinfo_flags */
3911 TV_GIMPLE_SLSR, /* tv_id */
3912 ( PROP_cfg | PROP_ssa ), /* properties_required */
3913 0, /* properties_provided */
3914 0, /* properties_destroyed */
3915 0, /* todo_flags_start */
3916 0, /* todo_flags_finish */
3919 class pass_strength_reduction : public gimple_opt_pass
3921 public:
3922 pass_strength_reduction (gcc::context *ctxt)
3923 : gimple_opt_pass (pass_data_strength_reduction, ctxt)
3926 /* opt_pass methods: */
3927 virtual bool gate (function *) { return flag_tree_slsr; }
3928 virtual unsigned int execute (function *);
3930 }; // class pass_strength_reduction
3932 unsigned
3933 pass_strength_reduction::execute (function *fun)
3935 /* Create the obstack where candidates will reside. */
3936 gcc_obstack_init (&cand_obstack);
3938 /* Allocate the candidate vector. */
3939 cand_vec.create (128);
3941 /* Allocate the mapping from statements to candidate indices. */
3942 stmt_cand_map = new hash_map<gimple *, slsr_cand_t>;
3944 /* Create the obstack where candidate chains will reside. */
3945 gcc_obstack_init (&chain_obstack);
3947 /* Allocate the mapping from base expressions to candidate chains. */
3948 base_cand_map = new hash_table<cand_chain_hasher> (500);
3950 /* Allocate the mapping from bases to alternative bases. */
3951 alt_base_map = new hash_map<tree, tree>;
3953 /* Initialize the loop optimizer. We need to detect flow across
3954 back edges, and this gives us dominator information as well. */
3955 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
3957 /* Walk the CFG in predominator order looking for strength reduction
3958 candidates. */
3959 find_candidates_dom_walker (CDI_DOMINATORS)
3960 .walk (fun->cfg->x_entry_block_ptr);
3962 if (dump_file && (dump_flags & TDF_DETAILS))
3964 dump_cand_vec ();
3965 dump_cand_chains ();
3968 delete alt_base_map;
3969 free_affine_expand_cache (&name_expansions);
3971 /* Analyze costs and make appropriate replacements. */
3972 analyze_candidates_and_replace ();
3974 loop_optimizer_finalize ();
3975 delete base_cand_map;
3976 base_cand_map = NULL;
3977 obstack_free (&chain_obstack, NULL);
3978 delete stmt_cand_map;
3979 cand_vec.release ();
3980 obstack_free (&cand_obstack, NULL);
3982 return 0;
3985 } // anon namespace
3987 gimple_opt_pass *
3988 make_pass_strength_reduction (gcc::context *ctxt)
3990 return new pass_strength_reduction (ctxt);