Fix PR42930.
[official-gcc/constexpr.git] / gcc / tree-ssa-math-opts.c
blobc46a57fa04829e7140bf03b62f2240d7baf76772
1 /* Global, SSA-based optimizations using mathematical identities.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Currently, the only mini-pass in this file tries to CSE reciprocal
21 operations. These are common in sequences such as this one:
23 modulus = sqrt(x*x + y*y + z*z);
24 x = x / modulus;
25 y = y / modulus;
26 z = z / modulus;
28 that can be optimized to
30 modulus = sqrt(x*x + y*y + z*z);
31 rmodulus = 1.0 / modulus;
32 x = x * rmodulus;
33 y = y * rmodulus;
34 z = z * rmodulus;
36 We do this for loop invariant divisors, and with this pass whenever
37 we notice that a division has the same divisor multiple times.
39 Of course, like in PRE, we don't insert a division if a dominator
40 already has one. However, this cannot be done as an extension of
41 PRE for several reasons.
43 First of all, with some experiments it was found out that the
44 transformation is not always useful if there are only two divisions
45 hy the same divisor. This is probably because modern processors
46 can pipeline the divisions; on older, in-order processors it should
47 still be effective to optimize two divisions by the same number.
48 We make this a param, and it shall be called N in the remainder of
49 this comment.
51 Second, if trapping math is active, we have less freedom on where
52 to insert divisions: we can only do so in basic blocks that already
53 contain one. (If divisions don't trap, instead, we can insert
54 divisions elsewhere, which will be in blocks that are common dominators
55 of those that have the division).
57 We really don't want to compute the reciprocal unless a division will
58 be found. To do this, we won't insert the division in a basic block
59 that has less than N divisions *post-dominating* it.
61 The algorithm constructs a subset of the dominator tree, holding the
62 blocks containing the divisions and the common dominators to them,
63 and walk it twice. The first walk is in post-order, and it annotates
64 each block with the number of divisions that post-dominate it: this
65 gives information on where divisions can be inserted profitably.
66 The second walk is in pre-order, and it inserts divisions as explained
67 above, and replaces divisions by multiplications.
69 In the best case, the cost of the pass is O(n_statements). In the
70 worst-case, the cost is due to creating the dominator tree subset,
71 with a cost of O(n_basic_blocks ^ 2); however this can only happen
72 for n_statements / n_basic_blocks statements. So, the amortized cost
73 of creating the dominator tree subset is O(n_basic_blocks) and the
74 worst-case cost of the pass is O(n_statements * n_basic_blocks).
76 More practically, the cost will be small because there are few
77 divisions, and they tend to be in the same basic block, so insert_bb
78 is called very few times.
80 If we did this using domwalk.c, an efficient implementation would have
81 to work on all the variables in a single pass, because we could not
82 work on just a subset of the dominator tree, as we do now, and the
83 cost would also be something like O(n_statements * n_basic_blocks).
84 The data structures would be more complex in order to work on all the
85 variables in a single pass. */
87 #include "config.h"
88 #include "system.h"
89 #include "coretypes.h"
90 #include "tm.h"
91 #include "flags.h"
92 #include "tree.h"
93 #include "tree-flow.h"
94 #include "real.h"
95 #include "timevar.h"
96 #include "tree-pass.h"
97 #include "alloc-pool.h"
98 #include "basic-block.h"
99 #include "target.h"
100 #include "diagnostic.h"
101 #include "rtl.h"
102 #include "expr.h"
103 #include "optabs.h"
105 /* This structure represents one basic block that either computes a
106 division, or is a common dominator for basic block that compute a
107 division. */
108 struct occurrence {
109 /* The basic block represented by this structure. */
110 basic_block bb;
112 /* If non-NULL, the SSA_NAME holding the definition for a reciprocal
113 inserted in BB. */
114 tree recip_def;
116 /* If non-NULL, the GIMPLE_ASSIGN for a reciprocal computation that
117 was inserted in BB. */
118 gimple recip_def_stmt;
120 /* Pointer to a list of "struct occurrence"s for blocks dominated
121 by BB. */
122 struct occurrence *children;
124 /* Pointer to the next "struct occurrence"s in the list of blocks
125 sharing a common dominator. */
126 struct occurrence *next;
128 /* The number of divisions that are in BB before compute_merit. The
129 number of divisions that are in BB or post-dominate it after
130 compute_merit. */
131 int num_divisions;
133 /* True if the basic block has a division, false if it is a common
134 dominator for basic blocks that do. If it is false and trapping
135 math is active, BB is not a candidate for inserting a reciprocal. */
136 bool bb_has_division;
140 /* The instance of "struct occurrence" representing the highest
141 interesting block in the dominator tree. */
142 static struct occurrence *occ_head;
144 /* Allocation pool for getting instances of "struct occurrence". */
145 static alloc_pool occ_pool;
149 /* Allocate and return a new struct occurrence for basic block BB, and
150 whose children list is headed by CHILDREN. */
151 static struct occurrence *
152 occ_new (basic_block bb, struct occurrence *children)
154 struct occurrence *occ;
156 bb->aux = occ = (struct occurrence *) pool_alloc (occ_pool);
157 memset (occ, 0, sizeof (struct occurrence));
159 occ->bb = bb;
160 occ->children = children;
161 return occ;
165 /* Insert NEW_OCC into our subset of the dominator tree. P_HEAD points to a
166 list of "struct occurrence"s, one per basic block, having IDOM as
167 their common dominator.
169 We try to insert NEW_OCC as deep as possible in the tree, and we also
170 insert any other block that is a common dominator for BB and one
171 block already in the tree. */
173 static void
174 insert_bb (struct occurrence *new_occ, basic_block idom,
175 struct occurrence **p_head)
177 struct occurrence *occ, **p_occ;
179 for (p_occ = p_head; (occ = *p_occ) != NULL; )
181 basic_block bb = new_occ->bb, occ_bb = occ->bb;
182 basic_block dom = nearest_common_dominator (CDI_DOMINATORS, occ_bb, bb);
183 if (dom == bb)
185 /* BB dominates OCC_BB. OCC becomes NEW_OCC's child: remove OCC
186 from its list. */
187 *p_occ = occ->next;
188 occ->next = new_occ->children;
189 new_occ->children = occ;
191 /* Try the next block (it may as well be dominated by BB). */
194 else if (dom == occ_bb)
196 /* OCC_BB dominates BB. Tail recurse to look deeper. */
197 insert_bb (new_occ, dom, &occ->children);
198 return;
201 else if (dom != idom)
203 gcc_assert (!dom->aux);
205 /* There is a dominator between IDOM and BB, add it and make
206 two children out of NEW_OCC and OCC. First, remove OCC from
207 its list. */
208 *p_occ = occ->next;
209 new_occ->next = occ;
210 occ->next = NULL;
212 /* None of the previous blocks has DOM as a dominator: if we tail
213 recursed, we would reexamine them uselessly. Just switch BB with
214 DOM, and go on looking for blocks dominated by DOM. */
215 new_occ = occ_new (dom, new_occ);
218 else
220 /* Nothing special, go on with the next element. */
221 p_occ = &occ->next;
225 /* No place was found as a child of IDOM. Make BB a sibling of IDOM. */
226 new_occ->next = *p_head;
227 *p_head = new_occ;
230 /* Register that we found a division in BB. */
232 static inline void
233 register_division_in (basic_block bb)
235 struct occurrence *occ;
237 occ = (struct occurrence *) bb->aux;
238 if (!occ)
240 occ = occ_new (bb, NULL);
241 insert_bb (occ, ENTRY_BLOCK_PTR, &occ_head);
244 occ->bb_has_division = true;
245 occ->num_divisions++;
249 /* Compute the number of divisions that postdominate each block in OCC and
250 its children. */
252 static void
253 compute_merit (struct occurrence *occ)
255 struct occurrence *occ_child;
256 basic_block dom = occ->bb;
258 for (occ_child = occ->children; occ_child; occ_child = occ_child->next)
260 basic_block bb;
261 if (occ_child->children)
262 compute_merit (occ_child);
264 if (flag_exceptions)
265 bb = single_noncomplex_succ (dom);
266 else
267 bb = dom;
269 if (dominated_by_p (CDI_POST_DOMINATORS, bb, occ_child->bb))
270 occ->num_divisions += occ_child->num_divisions;
275 /* Return whether USE_STMT is a floating-point division by DEF. */
276 static inline bool
277 is_division_by (gimple use_stmt, tree def)
279 return is_gimple_assign (use_stmt)
280 && gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
281 && gimple_assign_rhs2 (use_stmt) == def
282 /* Do not recognize x / x as valid division, as we are getting
283 confused later by replacing all immediate uses x in such
284 a stmt. */
285 && gimple_assign_rhs1 (use_stmt) != def;
288 /* Walk the subset of the dominator tree rooted at OCC, setting the
289 RECIP_DEF field to a definition of 1.0 / DEF that can be used in
290 the given basic block. The field may be left NULL, of course,
291 if it is not possible or profitable to do the optimization.
293 DEF_BSI is an iterator pointing at the statement defining DEF.
294 If RECIP_DEF is set, a dominator already has a computation that can
295 be used. */
297 static void
298 insert_reciprocals (gimple_stmt_iterator *def_gsi, struct occurrence *occ,
299 tree def, tree recip_def, int threshold)
301 tree type;
302 gimple new_stmt;
303 gimple_stmt_iterator gsi;
304 struct occurrence *occ_child;
306 if (!recip_def
307 && (occ->bb_has_division || !flag_trapping_math)
308 && occ->num_divisions >= threshold)
310 /* Make a variable with the replacement and substitute it. */
311 type = TREE_TYPE (def);
312 recip_def = make_rename_temp (type, "reciptmp");
313 new_stmt = gimple_build_assign_with_ops (RDIV_EXPR, recip_def,
314 build_one_cst (type), def);
316 if (occ->bb_has_division)
318 /* Case 1: insert before an existing division. */
319 gsi = gsi_after_labels (occ->bb);
320 while (!gsi_end_p (gsi) && !is_division_by (gsi_stmt (gsi), def))
321 gsi_next (&gsi);
323 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
325 else if (def_gsi && occ->bb == def_gsi->bb)
327 /* Case 2: insert right after the definition. Note that this will
328 never happen if the definition statement can throw, because in
329 that case the sole successor of the statement's basic block will
330 dominate all the uses as well. */
331 gsi_insert_after (def_gsi, new_stmt, GSI_NEW_STMT);
333 else
335 /* Case 3: insert in a basic block not containing defs/uses. */
336 gsi = gsi_after_labels (occ->bb);
337 gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
340 occ->recip_def_stmt = new_stmt;
343 occ->recip_def = recip_def;
344 for (occ_child = occ->children; occ_child; occ_child = occ_child->next)
345 insert_reciprocals (def_gsi, occ_child, def, recip_def, threshold);
349 /* Replace the division at USE_P with a multiplication by the reciprocal, if
350 possible. */
352 static inline void
353 replace_reciprocal (use_operand_p use_p)
355 gimple use_stmt = USE_STMT (use_p);
356 basic_block bb = gimple_bb (use_stmt);
357 struct occurrence *occ = (struct occurrence *) bb->aux;
359 if (optimize_bb_for_speed_p (bb)
360 && occ->recip_def && use_stmt != occ->recip_def_stmt)
362 gimple_assign_set_rhs_code (use_stmt, MULT_EXPR);
363 SET_USE (use_p, occ->recip_def);
364 fold_stmt_inplace (use_stmt);
365 update_stmt (use_stmt);
370 /* Free OCC and return one more "struct occurrence" to be freed. */
372 static struct occurrence *
373 free_bb (struct occurrence *occ)
375 struct occurrence *child, *next;
377 /* First get the two pointers hanging off OCC. */
378 next = occ->next;
379 child = occ->children;
380 occ->bb->aux = NULL;
381 pool_free (occ_pool, occ);
383 /* Now ensure that we don't recurse unless it is necessary. */
384 if (!child)
385 return next;
386 else
388 while (next)
389 next = free_bb (next);
391 return child;
396 /* Look for floating-point divisions among DEF's uses, and try to
397 replace them by multiplications with the reciprocal. Add
398 as many statements computing the reciprocal as needed.
400 DEF must be a GIMPLE register of a floating-point type. */
402 static void
403 execute_cse_reciprocals_1 (gimple_stmt_iterator *def_gsi, tree def)
405 use_operand_p use_p;
406 imm_use_iterator use_iter;
407 struct occurrence *occ;
408 int count = 0, threshold;
410 gcc_assert (FLOAT_TYPE_P (TREE_TYPE (def)) && is_gimple_reg (def));
412 FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
414 gimple use_stmt = USE_STMT (use_p);
415 if (is_division_by (use_stmt, def))
417 register_division_in (gimple_bb (use_stmt));
418 count++;
422 /* Do the expensive part only if we can hope to optimize something. */
423 threshold = targetm.min_divisions_for_recip_mul (TYPE_MODE (TREE_TYPE (def)));
424 if (count >= threshold)
426 gimple use_stmt;
427 for (occ = occ_head; occ; occ = occ->next)
429 compute_merit (occ);
430 insert_reciprocals (def_gsi, occ, def, NULL, threshold);
433 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
435 if (is_division_by (use_stmt, def))
437 FOR_EACH_IMM_USE_ON_STMT (use_p, use_iter)
438 replace_reciprocal (use_p);
443 for (occ = occ_head; occ; )
444 occ = free_bb (occ);
446 occ_head = NULL;
449 static bool
450 gate_cse_reciprocals (void)
452 return optimize && flag_reciprocal_math;
455 /* Go through all the floating-point SSA_NAMEs, and call
456 execute_cse_reciprocals_1 on each of them. */
457 static unsigned int
458 execute_cse_reciprocals (void)
460 basic_block bb;
461 tree arg;
463 occ_pool = create_alloc_pool ("dominators for recip",
464 sizeof (struct occurrence),
465 n_basic_blocks / 3 + 1);
467 calculate_dominance_info (CDI_DOMINATORS);
468 calculate_dominance_info (CDI_POST_DOMINATORS);
470 #ifdef ENABLE_CHECKING
471 FOR_EACH_BB (bb)
472 gcc_assert (!bb->aux);
473 #endif
475 for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = TREE_CHAIN (arg))
476 if (gimple_default_def (cfun, arg)
477 && FLOAT_TYPE_P (TREE_TYPE (arg))
478 && is_gimple_reg (arg))
479 execute_cse_reciprocals_1 (NULL, gimple_default_def (cfun, arg));
481 FOR_EACH_BB (bb)
483 gimple_stmt_iterator gsi;
484 gimple phi;
485 tree def;
487 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
489 phi = gsi_stmt (gsi);
490 def = PHI_RESULT (phi);
491 if (FLOAT_TYPE_P (TREE_TYPE (def))
492 && is_gimple_reg (def))
493 execute_cse_reciprocals_1 (NULL, def);
496 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
498 gimple stmt = gsi_stmt (gsi);
500 if (gimple_has_lhs (stmt)
501 && (def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF)) != NULL
502 && FLOAT_TYPE_P (TREE_TYPE (def))
503 && TREE_CODE (def) == SSA_NAME)
504 execute_cse_reciprocals_1 (&gsi, def);
507 if (optimize_bb_for_size_p (bb))
508 continue;
510 /* Scan for a/func(b) and convert it to reciprocal a*rfunc(b). */
511 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
513 gimple stmt = gsi_stmt (gsi);
514 tree fndecl;
516 if (is_gimple_assign (stmt)
517 && gimple_assign_rhs_code (stmt) == RDIV_EXPR)
519 tree arg1 = gimple_assign_rhs2 (stmt);
520 gimple stmt1;
522 if (TREE_CODE (arg1) != SSA_NAME)
523 continue;
525 stmt1 = SSA_NAME_DEF_STMT (arg1);
527 if (is_gimple_call (stmt1)
528 && gimple_call_lhs (stmt1)
529 && (fndecl = gimple_call_fndecl (stmt1))
530 && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
531 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD))
533 enum built_in_function code;
534 bool md_code, fail;
535 imm_use_iterator ui;
536 use_operand_p use_p;
538 code = DECL_FUNCTION_CODE (fndecl);
539 md_code = DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD;
541 fndecl = targetm.builtin_reciprocal (code, md_code, false);
542 if (!fndecl)
543 continue;
545 /* Check that all uses of the SSA name are divisions,
546 otherwise replacing the defining statement will do
547 the wrong thing. */
548 fail = false;
549 FOR_EACH_IMM_USE_FAST (use_p, ui, arg1)
551 gimple stmt2 = USE_STMT (use_p);
552 if (is_gimple_debug (stmt2))
553 continue;
554 if (!is_gimple_assign (stmt2)
555 || gimple_assign_rhs_code (stmt2) != RDIV_EXPR
556 || gimple_assign_rhs1 (stmt2) == arg1
557 || gimple_assign_rhs2 (stmt2) != arg1)
559 fail = true;
560 break;
563 if (fail)
564 continue;
566 gimple_replace_lhs (stmt1, arg1);
567 gimple_call_set_fndecl (stmt1, fndecl);
568 update_stmt (stmt1);
570 FOR_EACH_IMM_USE_STMT (stmt, ui, arg1)
572 gimple_assign_set_rhs_code (stmt, MULT_EXPR);
573 fold_stmt_inplace (stmt);
574 update_stmt (stmt);
581 free_dominance_info (CDI_DOMINATORS);
582 free_dominance_info (CDI_POST_DOMINATORS);
583 free_alloc_pool (occ_pool);
584 return 0;
587 struct gimple_opt_pass pass_cse_reciprocals =
590 GIMPLE_PASS,
591 "recip", /* name */
592 gate_cse_reciprocals, /* gate */
593 execute_cse_reciprocals, /* execute */
594 NULL, /* sub */
595 NULL, /* next */
596 0, /* static_pass_number */
597 TV_NONE, /* tv_id */
598 PROP_ssa, /* properties_required */
599 0, /* properties_provided */
600 0, /* properties_destroyed */
601 0, /* todo_flags_start */
602 TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
603 | TODO_verify_stmts /* todo_flags_finish */
607 /* Records an occurrence at statement USE_STMT in the vector of trees
608 STMTS if it is dominated by *TOP_BB or dominates it or this basic block
609 is not yet initialized. Returns true if the occurrence was pushed on
610 the vector. Adjusts *TOP_BB to be the basic block dominating all
611 statements in the vector. */
613 static bool
614 maybe_record_sincos (VEC(gimple, heap) **stmts,
615 basic_block *top_bb, gimple use_stmt)
617 basic_block use_bb = gimple_bb (use_stmt);
618 if (*top_bb
619 && (*top_bb == use_bb
620 || dominated_by_p (CDI_DOMINATORS, use_bb, *top_bb)))
621 VEC_safe_push (gimple, heap, *stmts, use_stmt);
622 else if (!*top_bb
623 || dominated_by_p (CDI_DOMINATORS, *top_bb, use_bb))
625 VEC_safe_push (gimple, heap, *stmts, use_stmt);
626 *top_bb = use_bb;
628 else
629 return false;
631 return true;
634 /* Look for sin, cos and cexpi calls with the same argument NAME and
635 create a single call to cexpi CSEing the result in this case.
636 We first walk over all immediate uses of the argument collecting
637 statements that we can CSE in a vector and in a second pass replace
638 the statement rhs with a REALPART or IMAGPART expression on the
639 result of the cexpi call we insert before the use statement that
640 dominates all other candidates. */
642 static void
643 execute_cse_sincos_1 (tree name)
645 gimple_stmt_iterator gsi;
646 imm_use_iterator use_iter;
647 tree fndecl, res, type;
648 gimple def_stmt, use_stmt, stmt;
649 int seen_cos = 0, seen_sin = 0, seen_cexpi = 0;
650 VEC(gimple, heap) *stmts = NULL;
651 basic_block top_bb = NULL;
652 int i;
654 type = TREE_TYPE (name);
655 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, name)
657 if (gimple_code (use_stmt) != GIMPLE_CALL
658 || !gimple_call_lhs (use_stmt)
659 || !(fndecl = gimple_call_fndecl (use_stmt))
660 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
661 continue;
663 switch (DECL_FUNCTION_CODE (fndecl))
665 CASE_FLT_FN (BUILT_IN_COS):
666 seen_cos |= maybe_record_sincos (&stmts, &top_bb, use_stmt) ? 1 : 0;
667 break;
669 CASE_FLT_FN (BUILT_IN_SIN):
670 seen_sin |= maybe_record_sincos (&stmts, &top_bb, use_stmt) ? 1 : 0;
671 break;
673 CASE_FLT_FN (BUILT_IN_CEXPI):
674 seen_cexpi |= maybe_record_sincos (&stmts, &top_bb, use_stmt) ? 1 : 0;
675 break;
677 default:;
681 if (seen_cos + seen_sin + seen_cexpi <= 1)
683 VEC_free(gimple, heap, stmts);
684 return;
687 /* Simply insert cexpi at the beginning of top_bb but not earlier than
688 the name def statement. */
689 fndecl = mathfn_built_in (type, BUILT_IN_CEXPI);
690 if (!fndecl)
691 return;
692 res = make_rename_temp (TREE_TYPE (TREE_TYPE (fndecl)), "sincostmp");
693 stmt = gimple_build_call (fndecl, 1, name);
694 gimple_call_set_lhs (stmt, res);
696 def_stmt = SSA_NAME_DEF_STMT (name);
697 if (!SSA_NAME_IS_DEFAULT_DEF (name)
698 && gimple_code (def_stmt) != GIMPLE_PHI
699 && gimple_bb (def_stmt) == top_bb)
701 gsi = gsi_for_stmt (def_stmt);
702 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
704 else
706 gsi = gsi_after_labels (top_bb);
707 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
709 update_stmt (stmt);
711 /* And adjust the recorded old call sites. */
712 for (i = 0; VEC_iterate(gimple, stmts, i, use_stmt); ++i)
714 tree rhs = NULL;
715 fndecl = gimple_call_fndecl (use_stmt);
717 switch (DECL_FUNCTION_CODE (fndecl))
719 CASE_FLT_FN (BUILT_IN_COS):
720 rhs = fold_build1 (REALPART_EXPR, type, res);
721 break;
723 CASE_FLT_FN (BUILT_IN_SIN):
724 rhs = fold_build1 (IMAGPART_EXPR, type, res);
725 break;
727 CASE_FLT_FN (BUILT_IN_CEXPI):
728 rhs = res;
729 break;
731 default:;
732 gcc_unreachable ();
735 /* Replace call with a copy. */
736 stmt = gimple_build_assign (gimple_call_lhs (use_stmt), rhs);
738 gsi = gsi_for_stmt (use_stmt);
739 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
740 gsi_remove (&gsi, true);
743 VEC_free(gimple, heap, stmts);
746 /* Go through all calls to sin, cos and cexpi and call execute_cse_sincos_1
747 on the SSA_NAME argument of each of them. */
749 static unsigned int
750 execute_cse_sincos (void)
752 basic_block bb;
754 calculate_dominance_info (CDI_DOMINATORS);
756 FOR_EACH_BB (bb)
758 gimple_stmt_iterator gsi;
760 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
762 gimple stmt = gsi_stmt (gsi);
763 tree fndecl;
765 if (is_gimple_call (stmt)
766 && gimple_call_lhs (stmt)
767 && (fndecl = gimple_call_fndecl (stmt))
768 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
770 tree arg;
772 switch (DECL_FUNCTION_CODE (fndecl))
774 CASE_FLT_FN (BUILT_IN_COS):
775 CASE_FLT_FN (BUILT_IN_SIN):
776 CASE_FLT_FN (BUILT_IN_CEXPI):
777 arg = gimple_call_arg (stmt, 0);
778 if (TREE_CODE (arg) == SSA_NAME)
779 execute_cse_sincos_1 (arg);
780 break;
782 default:;
788 free_dominance_info (CDI_DOMINATORS);
789 return 0;
792 static bool
793 gate_cse_sincos (void)
795 /* Make sure we have either sincos or cexp. */
796 return (TARGET_HAS_SINCOS
797 || TARGET_C99_FUNCTIONS)
798 && optimize;
801 struct gimple_opt_pass pass_cse_sincos =
804 GIMPLE_PASS,
805 "sincos", /* name */
806 gate_cse_sincos, /* gate */
807 execute_cse_sincos, /* execute */
808 NULL, /* sub */
809 NULL, /* next */
810 0, /* static_pass_number */
811 TV_NONE, /* tv_id */
812 PROP_ssa, /* properties_required */
813 0, /* properties_provided */
814 0, /* properties_destroyed */
815 0, /* todo_flags_start */
816 TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
817 | TODO_verify_stmts /* todo_flags_finish */
821 /* A symbolic number is used to detect byte permutation and selection
822 patterns. Therefore the field N contains an artificial number
823 consisting of byte size markers:
825 0 - byte has the value 0
826 1..size - byte contains the content of the byte
827 number indexed with that value minus one */
829 struct symbolic_number {
830 unsigned HOST_WIDEST_INT n;
831 int size;
834 /* Perform a SHIFT or ROTATE operation by COUNT bits on symbolic
835 number N. Return false if the requested operation is not permitted
836 on a symbolic number. */
838 static inline bool
839 do_shift_rotate (enum tree_code code,
840 struct symbolic_number *n,
841 int count)
843 if (count % 8 != 0)
844 return false;
846 /* Zero out the extra bits of N in order to avoid them being shifted
847 into the significant bits. */
848 if (n->size < (int)sizeof (HOST_WIDEST_INT))
849 n->n &= ((unsigned HOST_WIDEST_INT)1 << (n->size * BITS_PER_UNIT)) - 1;
851 switch (code)
853 case LSHIFT_EXPR:
854 n->n <<= count;
855 break;
856 case RSHIFT_EXPR:
857 n->n >>= count;
858 break;
859 case LROTATE_EXPR:
860 n->n = (n->n << count) | (n->n >> ((n->size * BITS_PER_UNIT) - count));
861 break;
862 case RROTATE_EXPR:
863 n->n = (n->n >> count) | (n->n << ((n->size * BITS_PER_UNIT) - count));
864 break;
865 default:
866 return false;
868 return true;
871 /* Perform sanity checking for the symbolic number N and the gimple
872 statement STMT. */
874 static inline bool
875 verify_symbolic_number_p (struct symbolic_number *n, gimple stmt)
877 tree lhs_type;
879 lhs_type = gimple_expr_type (stmt);
881 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
882 return false;
884 if (TYPE_PRECISION (lhs_type) != n->size * BITS_PER_UNIT)
885 return false;
887 return true;
890 /* find_bswap_1 invokes itself recursively with N and tries to perform
891 the operation given by the rhs of STMT on the result. If the
892 operation could successfully be executed the function returns the
893 tree expression of the source operand and NULL otherwise. */
895 static tree
896 find_bswap_1 (gimple stmt, struct symbolic_number *n, int limit)
898 enum tree_code code;
899 tree rhs1, rhs2 = NULL;
900 gimple rhs1_stmt, rhs2_stmt;
901 tree source_expr1;
902 enum gimple_rhs_class rhs_class;
904 if (!limit || !is_gimple_assign (stmt))
905 return NULL_TREE;
907 rhs1 = gimple_assign_rhs1 (stmt);
909 if (TREE_CODE (rhs1) != SSA_NAME)
910 return NULL_TREE;
912 code = gimple_assign_rhs_code (stmt);
913 rhs_class = gimple_assign_rhs_class (stmt);
914 rhs1_stmt = SSA_NAME_DEF_STMT (rhs1);
916 if (rhs_class == GIMPLE_BINARY_RHS)
917 rhs2 = gimple_assign_rhs2 (stmt);
919 /* Handle unary rhs and binary rhs with integer constants as second
920 operand. */
922 if (rhs_class == GIMPLE_UNARY_RHS
923 || (rhs_class == GIMPLE_BINARY_RHS
924 && TREE_CODE (rhs2) == INTEGER_CST))
926 if (code != BIT_AND_EXPR
927 && code != LSHIFT_EXPR
928 && code != RSHIFT_EXPR
929 && code != LROTATE_EXPR
930 && code != RROTATE_EXPR
931 && code != NOP_EXPR
932 && code != CONVERT_EXPR)
933 return NULL_TREE;
935 source_expr1 = find_bswap_1 (rhs1_stmt, n, limit - 1);
937 /* If find_bswap_1 returned NULL STMT is a leaf node and we have
938 to initialize the symbolic number. */
939 if (!source_expr1)
941 /* Set up the symbolic number N by setting each byte to a
942 value between 1 and the byte size of rhs1. The highest
943 order byte is set to 1 and the lowest order byte to
944 n.size. */
945 n->size = TYPE_PRECISION (TREE_TYPE (rhs1));
946 if (n->size % BITS_PER_UNIT != 0)
947 return NULL_TREE;
948 n->size /= BITS_PER_UNIT;
949 n->n = (sizeof (HOST_WIDEST_INT) < 8 ? 0 :
950 (unsigned HOST_WIDEST_INT)0x01020304 << 32 | 0x05060708);
951 n->n >>= (sizeof (HOST_WIDEST_INT) - n->size) * BITS_PER_UNIT;
953 source_expr1 = rhs1;
956 switch (code)
958 case BIT_AND_EXPR:
960 int i;
961 unsigned HOST_WIDEST_INT val = widest_int_cst_value (rhs2);
962 unsigned HOST_WIDEST_INT tmp = val;
964 /* Only constants masking full bytes are allowed. */
965 for (i = 0; i < n->size; i++, tmp >>= BITS_PER_UNIT)
966 if ((tmp & 0xff) != 0 && (tmp & 0xff) != 0xff)
967 return NULL_TREE;
969 n->n &= val;
971 break;
972 case LSHIFT_EXPR:
973 case RSHIFT_EXPR:
974 case LROTATE_EXPR:
975 case RROTATE_EXPR:
976 if (!do_shift_rotate (code, n, (int)TREE_INT_CST_LOW (rhs2)))
977 return NULL_TREE;
978 break;
979 CASE_CONVERT:
981 int type_size;
983 type_size = TYPE_PRECISION (gimple_expr_type (stmt));
984 if (type_size % BITS_PER_UNIT != 0)
985 return NULL_TREE;
987 if (type_size / BITS_PER_UNIT < (int)(sizeof (HOST_WIDEST_INT)))
989 /* If STMT casts to a smaller type mask out the bits not
990 belonging to the target type. */
991 n->size = type_size / BITS_PER_UNIT;
992 n->n &= ((unsigned HOST_WIDEST_INT)1 << type_size) - 1;
995 break;
996 default:
997 return NULL_TREE;
999 return verify_symbolic_number_p (n, stmt) ? source_expr1 : NULL;
1002 /* Handle binary rhs. */
1004 if (rhs_class == GIMPLE_BINARY_RHS)
1006 struct symbolic_number n1, n2;
1007 tree source_expr2;
1009 if (code != BIT_IOR_EXPR)
1010 return NULL_TREE;
1012 if (TREE_CODE (rhs2) != SSA_NAME)
1013 return NULL_TREE;
1015 rhs2_stmt = SSA_NAME_DEF_STMT (rhs2);
1017 switch (code)
1019 case BIT_IOR_EXPR:
1020 source_expr1 = find_bswap_1 (rhs1_stmt, &n1, limit - 1);
1022 if (!source_expr1)
1023 return NULL_TREE;
1025 source_expr2 = find_bswap_1 (rhs2_stmt, &n2, limit - 1);
1027 if (source_expr1 != source_expr2
1028 || n1.size != n2.size)
1029 return NULL_TREE;
1031 n->size = n1.size;
1032 n->n = n1.n | n2.n;
1034 if (!verify_symbolic_number_p (n, stmt))
1035 return NULL_TREE;
1037 break;
1038 default:
1039 return NULL_TREE;
1041 return source_expr1;
1043 return NULL_TREE;
1046 /* Check if STMT completes a bswap implementation consisting of ORs,
1047 SHIFTs and ANDs. Return the source tree expression on which the
1048 byte swap is performed and NULL if no bswap was found. */
1050 static tree
1051 find_bswap (gimple stmt)
1053 /* The number which the find_bswap result should match in order to
1054 have a full byte swap. The insignificant bytes are masked out
1055 before using it. */
1056 unsigned HOST_WIDEST_INT cmp =
1057 sizeof (HOST_WIDEST_INT) < 8 ? 0 :
1058 (unsigned HOST_WIDEST_INT)0x08070605 << 32 | 0x04030201;
1060 struct symbolic_number n;
1061 tree source_expr;
1063 /* The last parameter determines the depth search limit. It usually
1064 correlates directly to the number of bytes to be touched. We
1065 increase that number by one here in order to also cover signed ->
1066 unsigned conversions of the src operand as can be seen in
1067 libgcc. */
1068 source_expr = find_bswap_1 (stmt, &n,
1069 TREE_INT_CST_LOW (
1070 TYPE_SIZE_UNIT (gimple_expr_type (stmt))) + 1);
1072 if (!source_expr)
1073 return NULL_TREE;
1075 /* Zero out the extra bits of N and CMP. */
1076 if (n.size < (int)sizeof (HOST_WIDEST_INT))
1078 unsigned HOST_WIDEST_INT mask =
1079 ((unsigned HOST_WIDEST_INT)1 << (n.size * BITS_PER_UNIT)) - 1;
1081 n.n &= mask;
1082 cmp &= mask;
1085 /* A complete byte swap should make the symbolic number to start
1086 with the largest digit in the highest order byte. */
1087 if (cmp != n.n)
1088 return NULL_TREE;
1090 return source_expr;
1093 /* Find manual byte swap implementations and turn them into a bswap
1094 builtin invokation. */
1096 static unsigned int
1097 execute_optimize_bswap (void)
1099 basic_block bb;
1100 bool bswap32_p, bswap64_p;
1101 bool changed = false;
1102 tree bswap32_type = NULL_TREE, bswap64_type = NULL_TREE;
1104 if (BITS_PER_UNIT != 8)
1105 return 0;
1107 if (sizeof (HOST_WIDEST_INT) < 8)
1108 return 0;
1110 bswap32_p = (built_in_decls[BUILT_IN_BSWAP32]
1111 && optab_handler (bswap_optab, SImode)->insn_code !=
1112 CODE_FOR_nothing);
1113 bswap64_p = (built_in_decls[BUILT_IN_BSWAP64]
1114 && (optab_handler (bswap_optab, DImode)->insn_code !=
1115 CODE_FOR_nothing
1116 || (bswap32_p && word_mode == SImode)));
1118 if (!bswap32_p && !bswap64_p)
1119 return 0;
1121 /* Determine the argument type of the builtins. The code later on
1122 assumes that the return and argument type are the same. */
1123 if (bswap32_p)
1125 tree fndecl = built_in_decls[BUILT_IN_BSWAP32];
1126 bswap32_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
1129 if (bswap64_p)
1131 tree fndecl = built_in_decls[BUILT_IN_BSWAP64];
1132 bswap64_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
1135 FOR_EACH_BB (bb)
1137 gimple_stmt_iterator gsi;
1139 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1141 gimple stmt = gsi_stmt (gsi);
1142 tree bswap_src, bswap_type;
1143 tree bswap_tmp;
1144 tree fndecl = NULL_TREE;
1145 int type_size;
1146 gimple call;
1148 if (!is_gimple_assign (stmt)
1149 || gimple_assign_rhs_code (stmt) != BIT_IOR_EXPR)
1150 continue;
1152 type_size = TYPE_PRECISION (gimple_expr_type (stmt));
1154 switch (type_size)
1156 case 32:
1157 if (bswap32_p)
1159 fndecl = built_in_decls[BUILT_IN_BSWAP32];
1160 bswap_type = bswap32_type;
1162 break;
1163 case 64:
1164 if (bswap64_p)
1166 fndecl = built_in_decls[BUILT_IN_BSWAP64];
1167 bswap_type = bswap64_type;
1169 break;
1170 default:
1171 continue;
1174 if (!fndecl)
1175 continue;
1177 bswap_src = find_bswap (stmt);
1179 if (!bswap_src)
1180 continue;
1182 changed = true;
1184 bswap_tmp = bswap_src;
1186 /* Convert the src expression if necessary. */
1187 if (!useless_type_conversion_p (TREE_TYPE (bswap_tmp), bswap_type))
1189 gimple convert_stmt;
1191 bswap_tmp = create_tmp_var (bswap_type, "bswapsrc");
1192 add_referenced_var (bswap_tmp);
1193 bswap_tmp = make_ssa_name (bswap_tmp, NULL);
1195 convert_stmt = gimple_build_assign_with_ops (
1196 CONVERT_EXPR, bswap_tmp, bswap_src, NULL);
1197 gsi_insert_before (&gsi, convert_stmt, GSI_SAME_STMT);
1200 call = gimple_build_call (fndecl, 1, bswap_tmp);
1202 bswap_tmp = gimple_assign_lhs (stmt);
1204 /* Convert the result if necessary. */
1205 if (!useless_type_conversion_p (TREE_TYPE (bswap_tmp), bswap_type))
1207 gimple convert_stmt;
1209 bswap_tmp = create_tmp_var (bswap_type, "bswapdst");
1210 add_referenced_var (bswap_tmp);
1211 bswap_tmp = make_ssa_name (bswap_tmp, NULL);
1212 convert_stmt = gimple_build_assign_with_ops (
1213 CONVERT_EXPR, gimple_assign_lhs (stmt), bswap_tmp, NULL);
1214 gsi_insert_after (&gsi, convert_stmt, GSI_SAME_STMT);
1217 gimple_call_set_lhs (call, bswap_tmp);
1219 if (dump_file)
1221 fprintf (dump_file, "%d bit bswap implementation found at: ",
1222 (int)type_size);
1223 print_gimple_stmt (dump_file, stmt, 0, 0);
1226 gsi_insert_after (&gsi, call, GSI_SAME_STMT);
1227 gsi_remove (&gsi, true);
1231 return (changed ? TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
1232 | TODO_verify_stmts : 0);
1235 static bool
1236 gate_optimize_bswap (void)
1238 return flag_expensive_optimizations && optimize;
1241 struct gimple_opt_pass pass_optimize_bswap =
1244 GIMPLE_PASS,
1245 "bswap", /* name */
1246 gate_optimize_bswap, /* gate */
1247 execute_optimize_bswap, /* execute */
1248 NULL, /* sub */
1249 NULL, /* next */
1250 0, /* static_pass_number */
1251 TV_NONE, /* tv_id */
1252 PROP_ssa, /* properties_required */
1253 0, /* properties_provided */
1254 0, /* properties_destroyed */
1255 0, /* todo_flags_start */
1256 0 /* todo_flags_finish */