1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 #include "coretypes.h"
29 #include "tree-flow.h"
30 #include "tree-gimple.h"
31 #include "tree-iterator.h"
32 #include "tree-pass.h"
33 #include "tree-ssa-propagate.h"
36 /* For each complex ssa name, a lattice value. We're interested in finding
37 out whether a complex number is degenerate in some way, having only real
38 or only complex parts. */
48 #define PAIR(a, b) ((a) << 2 | (b))
50 DEF_VEC_I(complex_lattice_t
);
51 DEF_VEC_ALLOC_I(complex_lattice_t
, heap
);
53 static VEC(complex_lattice_t
, heap
) *complex_lattice_values
;
55 /* For each complex variable, a pair of variables for the components. */
56 static VEC(tree
, heap
) *complex_variable_components
;
59 /* Return true if T is not a zero constant. In the case of real values,
60 we're only interested in +0.0. */
63 some_nonzerop (tree t
)
67 if (TREE_CODE (t
) == REAL_CST
)
68 zerop
= REAL_VALUES_IDENTICAL (TREE_REAL_CST (t
), dconst0
);
69 else if (TREE_CODE (t
) == INTEGER_CST
)
70 zerop
= integer_zerop (t
);
75 /* Compute a lattice value from T. It may be a gimple_val, or, as a
76 special exception, a COMPLEX_EXPR. */
78 static complex_lattice_t
79 find_lattice_value (tree t
)
83 complex_lattice_t ret
;
85 switch (TREE_CODE (t
))
88 return VEC_index (complex_lattice_t
, complex_lattice_values
,
89 SSA_NAME_VERSION (t
));
92 real
= TREE_REALPART (t
);
93 imag
= TREE_IMAGPART (t
);
97 real
= TREE_OPERAND (t
, 0);
98 imag
= TREE_OPERAND (t
, 1);
105 r
= some_nonzerop (real
);
106 i
= some_nonzerop (imag
);
107 ret
= r
*ONLY_REAL
+ i
*ONLY_IMAG
;
109 /* ??? On occasion we could do better than mapping 0+0i to real, but we
110 certainly don't want to leave it UNINITIALIZED, which eventually gets
111 mapped to VARYING. */
112 if (ret
== UNINITIALIZED
)
118 /* Determine if LHS is something for which we're interested in seeing
119 simulation results. */
122 is_complex_reg (tree lhs
)
124 return TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
&& is_gimple_reg (lhs
);
127 /* Mark the incoming parameters to the function as VARYING. */
130 init_parameter_lattice_values (void)
134 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= TREE_CHAIN (parm
))
135 if (is_complex_reg (parm
) && var_ann (parm
) != NULL
)
137 tree ssa_name
= default_def (parm
);
138 VEC_replace (complex_lattice_t
, complex_lattice_values
,
139 SSA_NAME_VERSION (ssa_name
), VARYING
);
143 /* Initialize DONT_SIMULATE_AGAIN for each stmt and phi. Return false if
144 we found no statements we want to simulate, and thus there's nothing for
145 the entire pass to do. */
148 init_dont_simulate_again (void)
151 block_stmt_iterator bsi
;
153 bool saw_a_complex_op
= false;
157 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
158 DONT_SIMULATE_AGAIN (phi
) = !is_complex_reg (PHI_RESULT (phi
));
160 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
162 tree orig_stmt
, stmt
, rhs
= NULL
;
165 orig_stmt
= stmt
= bsi_stmt (bsi
);
167 /* Most control-altering statements must be initially
168 simulated, else we won't cover the entire cfg. */
169 dsa
= !stmt_ends_bb_p (stmt
);
171 switch (TREE_CODE (stmt
))
174 /* We don't care what the lattice value of <retval> is,
175 since it's never used as an input to another computation. */
177 stmt
= TREE_OPERAND (stmt
, 0);
178 if (!stmt
|| TREE_CODE (stmt
) != MODIFY_EXPR
)
183 dsa
= !is_complex_reg (TREE_OPERAND (stmt
, 0));
184 rhs
= TREE_OPERAND (stmt
, 1);
188 rhs
= TREE_OPERAND (stmt
, 0);
196 switch (TREE_CODE (rhs
))
200 rhs
= TREE_OPERAND (rhs
, 0);
213 if (TREE_CODE (TREE_TYPE (rhs
)) == COMPLEX_TYPE
)
214 saw_a_complex_op
= true;
221 DONT_SIMULATE_AGAIN (orig_stmt
) = dsa
;
225 return saw_a_complex_op
;
229 /* Evaluate statement STMT against the complex lattice defined above. */
231 static enum ssa_prop_result
232 complex_visit_stmt (tree stmt
, edge
*taken_edge_p ATTRIBUTE_UNUSED
,
235 complex_lattice_t new_l
, old_l
, op1_l
, op2_l
;
239 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
240 return SSA_PROP_VARYING
;
242 lhs
= TREE_OPERAND (stmt
, 0);
243 rhs
= TREE_OPERAND (stmt
, 1);
245 /* These conditions should be satisfied due to the initial filter
246 set up in init_dont_simulate_again. */
247 gcc_assert (TREE_CODE (lhs
) == SSA_NAME
);
248 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
251 ver
= SSA_NAME_VERSION (lhs
);
252 old_l
= VEC_index (complex_lattice_t
, complex_lattice_values
, ver
);
254 switch (TREE_CODE (rhs
))
259 new_l
= find_lattice_value (rhs
);
264 op1_l
= find_lattice_value (TREE_OPERAND (rhs
, 0));
265 op2_l
= find_lattice_value (TREE_OPERAND (rhs
, 1));
267 /* We've set up the lattice values such that IOR neatly
269 new_l
= op1_l
| op2_l
;
278 op1_l
= find_lattice_value (TREE_OPERAND (rhs
, 0));
279 op2_l
= find_lattice_value (TREE_OPERAND (rhs
, 1));
281 /* Obviously, if either varies, so does the result. */
282 if (op1_l
== VARYING
|| op2_l
== VARYING
)
284 /* Don't prematurely promote variables if we've not yet seen
286 else if (op1_l
== UNINITIALIZED
)
288 else if (op2_l
== UNINITIALIZED
)
292 /* At this point both numbers have only one component. If the
293 numbers are of opposite kind, the result is imaginary,
294 otherwise the result is real. The add/subtract translates
295 the real/imag from/to 0/1; the ^ performs the comparison. */
296 new_l
= ((op1_l
- ONLY_REAL
) ^ (op2_l
- ONLY_REAL
)) + ONLY_REAL
;
298 /* Don't allow the lattice value to flip-flop indefinitely. */
305 new_l
= find_lattice_value (TREE_OPERAND (rhs
, 0));
313 /* If nothing changed this round, let the propagator know. */
315 return SSA_PROP_NOT_INTERESTING
;
317 VEC_replace (complex_lattice_t
, complex_lattice_values
, ver
, new_l
);
318 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
321 /* Evaluate a PHI node against the complex lattice defined above. */
323 static enum ssa_prop_result
324 complex_visit_phi (tree phi
)
326 complex_lattice_t new_l
, old_l
;
331 lhs
= PHI_RESULT (phi
);
333 /* This condition should be satisfied due to the initial filter
334 set up in init_dont_simulate_again. */
335 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
337 /* We've set up the lattice values such that IOR neatly models PHI meet. */
338 new_l
= UNINITIALIZED
;
339 for (i
= PHI_NUM_ARGS (phi
) - 1; i
>= 0; --i
)
340 new_l
|= find_lattice_value (PHI_ARG_DEF (phi
, i
));
342 ver
= SSA_NAME_VERSION (lhs
);
343 old_l
= VEC_index (complex_lattice_t
, complex_lattice_values
, ver
);
346 return SSA_PROP_NOT_INTERESTING
;
348 VEC_replace (complex_lattice_t
, complex_lattice_values
, ver
, new_l
);
349 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
352 /* For each referenced complex gimple register, set up a pair of registers
353 to hold the components of the complex value. */
356 create_components (void)
360 n
= num_referenced_vars
;
364 complex_variable_components
= VEC_alloc (tree
, heap
, 2*n
);
365 VEC_safe_grow (tree
, heap
, complex_variable_components
, 2*n
);
367 for (k
= 0; k
< n
; ++k
)
369 tree var
= referenced_var (k
);
370 tree r
= NULL
, i
= NULL
;
373 && TREE_CODE (TREE_TYPE (var
)) == COMPLEX_TYPE
374 && is_gimple_reg (var
))
376 tree inner_type
= TREE_TYPE (TREE_TYPE (var
));
378 r
= make_rename_temp (inner_type
, "CR");
379 i
= make_rename_temp (inner_type
, "CI");
380 DECL_SOURCE_LOCATION (r
) = DECL_SOURCE_LOCATION (var
);
381 DECL_SOURCE_LOCATION (i
) = DECL_SOURCE_LOCATION (var
);
382 DECL_ARTIFICIAL (r
) = 1;
383 DECL_ARTIFICIAL (i
) = 1;
385 if (DECL_NAME (var
) && !DECL_IGNORED_P (var
))
387 const char *name
= IDENTIFIER_POINTER (DECL_NAME (var
));
389 DECL_NAME (r
) = get_identifier (ACONCAT ((name
, "$real", NULL
)));
390 DECL_NAME (i
) = get_identifier (ACONCAT ((name
, "$imag", NULL
)));
392 SET_DECL_DEBUG_EXPR (r
, build1 (REALPART_EXPR
, inner_type
, var
));
393 SET_DECL_DEBUG_EXPR (i
, build1 (IMAGPART_EXPR
, inner_type
, var
));
394 DECL_DEBUG_EXPR_IS_FROM (r
) = 1;
395 DECL_DEBUG_EXPR_IS_FROM (i
) = 1;
397 DECL_IGNORED_P (r
) = 0;
398 DECL_IGNORED_P (i
) = 0;
400 TREE_NO_WARNING (r
) = TREE_NO_WARNING (var
);
401 TREE_NO_WARNING (i
) = TREE_NO_WARNING (var
);
405 DECL_IGNORED_P (r
) = 1;
406 DECL_IGNORED_P (i
) = 1;
407 TREE_NO_WARNING (r
) = 1;
408 TREE_NO_WARNING (i
) = 1;
412 VEC_replace (tree
, complex_variable_components
, 2*k
, r
);
413 VEC_replace (tree
, complex_variable_components
, 2*k
+ 1, i
);
417 /* Extract the real or imaginary part of a complex variable or constant.
418 Make sure that it's a proper gimple_val and gimplify it if not.
419 Emit any new code before BSI. */
422 extract_component (block_stmt_iterator
*bsi
, tree t
, bool imagpart_p
,
425 switch (TREE_CODE (t
))
428 return imagpart_p
? TREE_IMAGPART (t
) : TREE_REALPART (t
);
431 return TREE_OPERAND (t
, imagpart_p
);
439 tree inner_type
= TREE_TYPE (TREE_TYPE (t
));
441 t
= build1 ((imagpart_p
? IMAGPART_EXPR
: REALPART_EXPR
),
442 inner_type
, unshare_expr (t
));
445 t
= gimplify_val (bsi
, inner_type
, t
);
452 tree def
= SSA_NAME_DEF_STMT (t
);
454 if (TREE_CODE (def
) == MODIFY_EXPR
)
456 def
= TREE_OPERAND (def
, 1);
457 if (TREE_CODE (def
) == COMPLEX_CST
)
458 return imagpart_p
? TREE_IMAGPART (def
) : TREE_REALPART (def
);
459 if (TREE_CODE (def
) == COMPLEX_EXPR
)
461 def
= TREE_OPERAND (def
, imagpart_p
);
462 if (TREE_CONSTANT (def
))
467 return VEC_index (tree
, complex_variable_components
,
468 var_ann (SSA_NAME_VAR (t
))->uid
* 2 + imagpart_p
);
476 /* Update the complex components of the ssa name on the lhs of STMT. */
479 update_complex_components (block_stmt_iterator
*bsi
, tree stmt
, tree r
, tree i
)
481 unsigned int uid
= var_ann (SSA_NAME_VAR (TREE_OPERAND (stmt
, 0)))->uid
;
484 v
= VEC_index (tree
, complex_variable_components
, 2*uid
);
485 x
= build2 (MODIFY_EXPR
, TREE_TYPE (v
), v
, r
);
486 SET_EXPR_LOCUS (x
, EXPR_LOCUS (stmt
));
487 TREE_BLOCK (x
) = TREE_BLOCK (stmt
);
488 bsi_insert_after (bsi
, x
, BSI_NEW_STMT
);
490 v
= VEC_index (tree
, complex_variable_components
, 2*uid
+ 1);
491 x
= build2 (MODIFY_EXPR
, TREE_TYPE (v
), v
, i
);
492 SET_EXPR_LOCUS (x
, EXPR_LOCUS (stmt
));
493 TREE_BLOCK (x
) = TREE_BLOCK (stmt
);
494 bsi_insert_after (bsi
, x
, BSI_NEW_STMT
);
498 update_complex_components_on_edge (edge e
, tree stmt
, tree lhs
, tree r
, tree i
)
500 unsigned int uid
= var_ann (SSA_NAME_VAR (lhs
))->uid
;
503 v
= VEC_index (tree
, complex_variable_components
, 2*uid
);
504 x
= build2 (MODIFY_EXPR
, TREE_TYPE (v
), v
, r
);
507 SET_EXPR_LOCUS (x
, EXPR_LOCUS (stmt
));
508 TREE_BLOCK (x
) = TREE_BLOCK (stmt
);
510 bsi_insert_on_edge (e
, x
);
512 v
= VEC_index (tree
, complex_variable_components
, 2*uid
+ 1);
513 x
= build2 (MODIFY_EXPR
, TREE_TYPE (v
), v
, i
);
516 SET_EXPR_LOCUS (x
, EXPR_LOCUS (stmt
));
517 TREE_BLOCK (x
) = TREE_BLOCK (stmt
);
519 bsi_insert_on_edge (e
, x
);
522 /* Update an assignment to a complex variable in place. */
525 update_complex_assignment (block_stmt_iterator
*bsi
, tree r
, tree i
)
530 mod
= stmt
= bsi_stmt (*bsi
);
531 if (TREE_CODE (stmt
) == RETURN_EXPR
)
532 mod
= TREE_OPERAND (mod
, 0);
534 update_complex_components (bsi
, stmt
, r
, i
);
536 type
= TREE_TYPE (TREE_OPERAND (mod
, 1));
537 TREE_OPERAND (mod
, 1) = build (COMPLEX_EXPR
, type
, r
, i
);
541 /* Generate code at the entry point of the function to initialize the
542 component variables for a complex parameter. */
545 update_parameter_components (void)
547 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
550 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= TREE_CHAIN (parm
))
552 tree type
= TREE_TYPE (parm
);
555 if (TREE_CODE (type
) != COMPLEX_TYPE
|| !is_gimple_reg (parm
))
558 type
= TREE_TYPE (type
);
559 ssa_name
= default_def (parm
);
561 r
= build1 (REALPART_EXPR
, type
, ssa_name
);
562 i
= build1 (IMAGPART_EXPR
, type
, ssa_name
);
563 update_complex_components_on_edge (entry_edge
, NULL
, ssa_name
, r
, i
);
567 /* Generate code to set the component variables of a complex variable
568 to match the PHI statements in block BB. */
571 update_phi_components (basic_block bb
)
575 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
576 if (is_complex_reg (PHI_RESULT (phi
)))
579 tree lhs
= PHI_RESULT (phi
);
581 for (i
= 0, n
= PHI_NUM_ARGS (phi
); i
< n
; ++i
)
583 edge e
= PHI_ARG_EDGE (phi
, i
);
584 tree arg
= PHI_ARG_DEF (phi
, i
);
587 /* Avoid no-op assignments. This also prevents insertting stmts
588 onto abnormal edges, assuming the PHI isn't already broken. */
589 if (TREE_CODE (arg
) == SSA_NAME
590 && SSA_NAME_VAR (arg
) == SSA_NAME_VAR (lhs
))
593 r
= extract_component (NULL
, arg
, 0, false);
594 i
= extract_component (NULL
, arg
, 1, false);
595 update_complex_components_on_edge (e
, NULL
, lhs
, r
, i
);
600 /* Mark each virtual op in STMT for ssa update. */
603 update_all_vops (tree stmt
)
608 FOR_EACH_SSA_TREE_OPERAND (sym
, stmt
, iter
, SSA_OP_ALL_VIRTUALS
)
610 if (TREE_CODE (sym
) == SSA_NAME
)
611 sym
= SSA_NAME_VAR (sym
);
612 mark_sym_for_renaming (sym
);
616 /* Expand a complex move to scalars. */
619 expand_complex_move (block_stmt_iterator
*bsi
, tree stmt
, tree type
,
622 tree inner_type
= TREE_TYPE (type
);
625 if (TREE_CODE (lhs
) == SSA_NAME
)
627 if (is_ctrl_altering_stmt (bsi_stmt (*bsi
)))
632 /* The value is not assigned on the exception edges, so we need not
633 concern ourselves there. We do need to update on the fallthru
635 FOR_EACH_EDGE (e
, ei
, bsi
->bb
->succs
)
636 if (e
->flags
& EDGE_FALLTHRU
)
641 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
642 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
643 update_complex_components_on_edge (e
, stmt
, lhs
, r
, i
);
645 else if (TREE_CODE (rhs
) == CALL_EXPR
|| TREE_SIDE_EFFECTS (rhs
))
647 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
648 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
649 update_complex_components (bsi
, stmt
, r
, i
);
653 update_all_vops (bsi_stmt (*bsi
));
654 r
= extract_component (bsi
, rhs
, 0, true);
655 i
= extract_component (bsi
, rhs
, 1, true);
656 update_complex_assignment (bsi
, r
, i
);
659 else if (TREE_CODE (rhs
) == SSA_NAME
&& !TREE_SIDE_EFFECTS (lhs
))
663 r
= extract_component (bsi
, rhs
, 0, false);
664 i
= extract_component (bsi
, rhs
, 1, false);
666 x
= build1 (REALPART_EXPR
, inner_type
, unshare_expr (lhs
));
667 x
= build2 (MODIFY_EXPR
, inner_type
, x
, r
);
668 bsi_insert_before (bsi
, x
, BSI_SAME_STMT
);
670 if (stmt
== bsi_stmt (*bsi
))
672 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
673 TREE_OPERAND (stmt
, 0) = x
;
674 TREE_OPERAND (stmt
, 1) = i
;
675 TREE_TYPE (stmt
) = inner_type
;
679 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
680 x
= build2 (MODIFY_EXPR
, inner_type
, x
, i
);
681 bsi_insert_before (bsi
, x
, BSI_SAME_STMT
);
683 stmt
= bsi_stmt (*bsi
);
684 gcc_assert (TREE_CODE (stmt
) == RETURN_EXPR
);
685 TREE_OPERAND (stmt
, 0) = lhs
;
688 update_all_vops (stmt
);
693 /* Expand complex addition to scalars:
694 a + b = (ar + br) + i(ai + bi)
695 a - b = (ar - br) + i(ai + bi)
699 expand_complex_addition (block_stmt_iterator
*bsi
, tree inner_type
,
700 tree ar
, tree ai
, tree br
, tree bi
,
702 complex_lattice_t al
, complex_lattice_t bl
)
706 switch (PAIR (al
, bl
))
708 case PAIR (ONLY_REAL
, ONLY_REAL
):
709 rr
= gimplify_build2 (bsi
, code
, inner_type
, ar
, br
);
713 case PAIR (ONLY_REAL
, ONLY_IMAG
):
715 if (code
== MINUS_EXPR
)
716 ri
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, ai
, bi
);
721 case PAIR (ONLY_IMAG
, ONLY_REAL
):
722 if (code
== MINUS_EXPR
)
723 rr
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, ar
, br
);
729 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
731 ri
= gimplify_build2 (bsi
, code
, inner_type
, ai
, bi
);
734 case PAIR (VARYING
, ONLY_REAL
):
735 rr
= gimplify_build2 (bsi
, code
, inner_type
, ar
, br
);
739 case PAIR (VARYING
, ONLY_IMAG
):
741 ri
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, ai
, bi
);
744 case PAIR (ONLY_REAL
, VARYING
):
745 if (code
== MINUS_EXPR
)
747 rr
= gimplify_build2 (bsi
, code
, inner_type
, ar
, br
);
751 case PAIR (ONLY_IMAG
, VARYING
):
752 if (code
== MINUS_EXPR
)
755 ri
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, ai
, bi
);
758 case PAIR (VARYING
, VARYING
):
760 rr
= gimplify_build2 (bsi
, code
, inner_type
, ar
, br
);
761 ri
= gimplify_build2 (bsi
, code
, inner_type
, ai
, bi
);
768 update_complex_assignment (bsi
, rr
, ri
);
771 /* Expand a complex multiplication or division to a libcall to the c99
772 compliant routines. */
775 expand_complex_libcall (block_stmt_iterator
*bsi
, tree ar
, tree ai
,
776 tree br
, tree bi
, enum tree_code code
)
778 enum machine_mode mode
;
779 enum built_in_function bcode
;
780 tree args
, fn
, stmt
, type
;
782 args
= tree_cons (NULL
, bi
, NULL
);
783 args
= tree_cons (NULL
, br
, args
);
784 args
= tree_cons (NULL
, ai
, args
);
785 args
= tree_cons (NULL
, ar
, args
);
787 stmt
= bsi_stmt (*bsi
);
788 type
= TREE_TYPE (TREE_OPERAND (stmt
, 1));
790 mode
= TYPE_MODE (type
);
791 gcc_assert (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
);
792 if (code
== MULT_EXPR
)
793 bcode
= BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
;
794 else if (code
== RDIV_EXPR
)
795 bcode
= BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
;
798 fn
= built_in_decls
[bcode
];
800 TREE_OPERAND (stmt
, 1)
801 = build3 (CALL_EXPR
, type
, build_fold_addr_expr (fn
), args
, NULL
);
806 tree lhs
= TREE_OPERAND (stmt
, 0);
807 update_complex_components (bsi
, stmt
,
808 build1 (REALPART_EXPR
, type
, lhs
),
809 build1 (IMAGPART_EXPR
, type
, lhs
));
813 /* Expand complex multiplication to scalars:
814 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
818 expand_complex_multiplication (block_stmt_iterator
*bsi
, tree inner_type
,
819 tree ar
, tree ai
, tree br
, tree bi
,
820 complex_lattice_t al
, complex_lattice_t bl
)
826 complex_lattice_t tl
;
827 rr
= ar
, ar
= br
, br
= rr
;
828 ri
= ai
, ai
= bi
, bi
= ri
;
829 tl
= al
, al
= bl
, bl
= tl
;
832 switch (PAIR (al
, bl
))
834 case PAIR (ONLY_REAL
, ONLY_REAL
):
835 rr
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, br
);
839 case PAIR (ONLY_IMAG
, ONLY_REAL
):
841 if (TREE_CODE (ai
) == REAL_CST
842 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst1
))
845 ri
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, br
);
848 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
849 rr
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, bi
);
850 rr
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, rr
);
854 case PAIR (VARYING
, ONLY_REAL
):
855 rr
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, br
);
856 ri
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, br
);
859 case PAIR (VARYING
, ONLY_IMAG
):
860 rr
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, bi
);
861 rr
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, rr
);
862 ri
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, bi
);
865 case PAIR (VARYING
, VARYING
):
866 if (flag_complex_method
== 2 && SCALAR_FLOAT_TYPE_P (inner_type
))
868 expand_complex_libcall (bsi
, ar
, ai
, br
, bi
, MULT_EXPR
);
875 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, br
);
876 t2
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, bi
);
877 t3
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, bi
);
879 /* Avoid expanding redundant multiplication for the common
880 case of squaring a complex number. */
881 if (ar
== br
&& ai
== bi
)
884 t4
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, br
);
886 rr
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
887 ri
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t3
, t4
);
895 update_complex_assignment (bsi
, rr
, ri
);
898 /* Expand complex division to scalars, straightforward algorithm.
899 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
904 expand_complex_div_straight (block_stmt_iterator
*bsi
, tree inner_type
,
905 tree ar
, tree ai
, tree br
, tree bi
,
908 tree rr
, ri
, div
, t1
, t2
, t3
;
910 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, br
, br
);
911 t2
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, bi
, bi
);
912 div
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
914 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, br
);
915 t2
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, bi
);
916 t3
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
917 rr
= gimplify_build2 (bsi
, code
, inner_type
, t3
, div
);
919 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, br
);
920 t2
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, bi
);
921 t3
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
922 ri
= gimplify_build2 (bsi
, code
, inner_type
, t3
, div
);
924 update_complex_assignment (bsi
, rr
, ri
);
927 /* Expand complex division to scalars, modified algorithm to minimize
928 overflow with wide input ranges. */
931 expand_complex_div_wide (block_stmt_iterator
*bsi
, tree inner_type
,
932 tree ar
, tree ai
, tree br
, tree bi
,
935 tree rr
, ri
, ratio
, div
, t1
, t2
, tr
, ti
, cond
;
936 basic_block bb_cond
, bb_true
, bb_false
, bb_join
;
938 /* Examine |br| < |bi|, and branch. */
939 t1
= gimplify_build1 (bsi
, ABS_EXPR
, inner_type
, br
);
940 t2
= gimplify_build1 (bsi
, ABS_EXPR
, inner_type
, bi
);
941 cond
= fold_build2 (LT_EXPR
, boolean_type_node
, t1
, t2
);
944 bb_cond
= bb_true
= bb_false
= bb_join
= NULL
;
945 rr
= ri
= tr
= ti
= NULL
;
946 if (!TREE_CONSTANT (cond
))
950 cond
= build (COND_EXPR
, void_type_node
, cond
, NULL
, NULL
);
951 bsi_insert_before (bsi
, cond
, BSI_SAME_STMT
);
953 /* Split the original block, and create the TRUE and FALSE blocks. */
954 e
= split_block (bsi
->bb
, cond
);
957 bb_true
= create_empty_bb (bb_cond
);
958 bb_false
= create_empty_bb (bb_true
);
960 t1
= build (GOTO_EXPR
, void_type_node
, tree_block_label (bb_true
));
961 t2
= build (GOTO_EXPR
, void_type_node
, tree_block_label (bb_false
));
962 COND_EXPR_THEN (cond
) = t1
;
963 COND_EXPR_ELSE (cond
) = t2
;
965 /* Wire the blocks together. */
966 e
->flags
= EDGE_TRUE_VALUE
;
967 redirect_edge_succ (e
, bb_true
);
968 make_edge (bb_cond
, bb_false
, EDGE_FALSE_VALUE
);
969 make_edge (bb_true
, bb_join
, EDGE_FALLTHRU
);
970 make_edge (bb_false
, bb_join
, EDGE_FALLTHRU
);
972 /* Update dominance info. Note that bb_join's data was
973 updated by split_block. */
974 if (dom_info_available_p (CDI_DOMINATORS
))
976 set_immediate_dominator (CDI_DOMINATORS
, bb_true
, bb_cond
);
977 set_immediate_dominator (CDI_DOMINATORS
, bb_false
, bb_cond
);
980 rr
= make_rename_temp (inner_type
, NULL
);
981 ri
= make_rename_temp (inner_type
, NULL
);
984 /* In the TRUE branch, we compute
986 div = (br * ratio) + bi;
987 tr = (ar * ratio) + ai;
988 ti = (ai * ratio) - ar;
991 if (bb_true
|| integer_nonzerop (cond
))
995 *bsi
= bsi_last (bb_true
);
996 bsi_insert_after (bsi
, build_empty_stmt (), BSI_NEW_STMT
);
999 ratio
= gimplify_build2 (bsi
, code
, inner_type
, br
, bi
);
1001 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, br
, ratio
);
1002 div
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t1
, bi
);
1004 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1005 tr
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t1
, ai
);
1007 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1008 ti
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, t1
, ar
);
1010 tr
= gimplify_build2 (bsi
, code
, inner_type
, tr
, div
);
1011 ti
= gimplify_build2 (bsi
, code
, inner_type
, ti
, div
);
1015 t1
= build (MODIFY_EXPR
, inner_type
, rr
, tr
);
1016 bsi_insert_before (bsi
, t1
, BSI_SAME_STMT
);
1017 t1
= build (MODIFY_EXPR
, inner_type
, ri
, ti
);
1018 bsi_insert_before (bsi
, t1
, BSI_SAME_STMT
);
1023 /* In the FALSE branch, we compute
1025 divisor = (d * ratio) + c;
1026 tr = (b * ratio) + a;
1027 ti = b - (a * ratio);
1030 if (bb_false
|| integer_zerop (cond
))
1034 *bsi
= bsi_last (bb_false
);
1035 bsi_insert_after (bsi
, build_empty_stmt (), BSI_NEW_STMT
);
1038 ratio
= gimplify_build2 (bsi
, code
, inner_type
, bi
, br
);
1040 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, bi
, ratio
);
1041 div
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t1
, br
);
1043 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1044 tr
= gimplify_build2 (bsi
, PLUS_EXPR
, inner_type
, t1
, ar
);
1046 t1
= gimplify_build2 (bsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1047 ti
= gimplify_build2 (bsi
, MINUS_EXPR
, inner_type
, ai
, t1
);
1049 tr
= gimplify_build2 (bsi
, code
, inner_type
, tr
, div
);
1050 ti
= gimplify_build2 (bsi
, code
, inner_type
, ti
, div
);
1054 t1
= build (MODIFY_EXPR
, inner_type
, rr
, tr
);
1055 bsi_insert_before (bsi
, t1
, BSI_SAME_STMT
);
1056 t1
= build (MODIFY_EXPR
, inner_type
, ri
, ti
);
1057 bsi_insert_before (bsi
, t1
, BSI_SAME_STMT
);
1063 *bsi
= bsi_start (bb_join
);
1067 update_complex_assignment (bsi
, rr
, ri
);
1070 /* Expand complex division to scalars. */
1073 expand_complex_division (block_stmt_iterator
*bsi
, tree inner_type
,
1074 tree ar
, tree ai
, tree br
, tree bi
,
1075 enum tree_code code
,
1076 complex_lattice_t al
, complex_lattice_t bl
)
1080 switch (PAIR (al
, bl
))
1082 case PAIR (ONLY_REAL
, ONLY_REAL
):
1083 rr
= gimplify_build2 (bsi
, code
, inner_type
, ar
, br
);
1087 case PAIR (ONLY_REAL
, ONLY_IMAG
):
1089 ri
= gimplify_build2 (bsi
, code
, inner_type
, ar
, bi
);
1090 ri
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, ri
);
1093 case PAIR (ONLY_IMAG
, ONLY_REAL
):
1095 ri
= gimplify_build2 (bsi
, code
, inner_type
, ai
, br
);
1098 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1099 rr
= gimplify_build2 (bsi
, code
, inner_type
, ai
, bi
);
1103 case PAIR (VARYING
, ONLY_REAL
):
1104 rr
= gimplify_build2 (bsi
, code
, inner_type
, ar
, br
);
1105 ri
= gimplify_build2 (bsi
, code
, inner_type
, ai
, br
);
1108 case PAIR (VARYING
, ONLY_IMAG
):
1109 rr
= gimplify_build2 (bsi
, code
, inner_type
, ai
, bi
);
1110 ri
= gimplify_build2 (bsi
, code
, inner_type
, ar
, bi
);
1111 ri
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, ri
);
1113 case PAIR (ONLY_REAL
, VARYING
):
1114 case PAIR (ONLY_IMAG
, VARYING
):
1115 case PAIR (VARYING
, VARYING
):
1116 switch (flag_complex_method
)
1119 /* straightforward implementation of complex divide acceptable. */
1120 expand_complex_div_straight (bsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1124 if (SCALAR_FLOAT_TYPE_P (inner_type
))
1126 expand_complex_libcall (bsi
, ar
, ai
, br
, bi
, code
);
1132 /* wide ranges of inputs must work for complex divide. */
1133 expand_complex_div_wide (bsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1145 update_complex_assignment (bsi
, rr
, ri
);
1148 /* Expand complex negation to scalars:
1153 expand_complex_negation (block_stmt_iterator
*bsi
, tree inner_type
,
1158 rr
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, ar
);
1159 ri
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, ai
);
1161 update_complex_assignment (bsi
, rr
, ri
);
1164 /* Expand complex conjugate to scalars:
1169 expand_complex_conjugate (block_stmt_iterator
*bsi
, tree inner_type
,
1174 ri
= gimplify_build1 (bsi
, NEGATE_EXPR
, inner_type
, ai
);
1176 update_complex_assignment (bsi
, ar
, ri
);
1179 /* Expand complex comparison (EQ or NE only). */
1182 expand_complex_comparison (block_stmt_iterator
*bsi
, tree ar
, tree ai
,
1183 tree br
, tree bi
, enum tree_code code
)
1185 tree cr
, ci
, cc
, stmt
, expr
, type
;
1187 cr
= gimplify_build2 (bsi
, code
, boolean_type_node
, ar
, br
);
1188 ci
= gimplify_build2 (bsi
, code
, boolean_type_node
, ai
, bi
);
1189 cc
= gimplify_build2 (bsi
,
1190 (code
== EQ_EXPR
? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
),
1191 boolean_type_node
, cr
, ci
);
1193 stmt
= expr
= bsi_stmt (*bsi
);
1195 switch (TREE_CODE (stmt
))
1198 expr
= TREE_OPERAND (stmt
, 0);
1201 type
= TREE_TYPE (TREE_OPERAND (expr
, 1));
1202 TREE_OPERAND (expr
, 1) = fold_convert (type
, cc
);
1205 TREE_OPERAND (stmt
, 0) = cc
;
1214 /* Process one statement. If we identify a complex operation, expand it. */
1217 expand_complex_operations_1 (block_stmt_iterator
*bsi
)
1219 tree stmt
= bsi_stmt (*bsi
);
1220 tree rhs
, type
, inner_type
;
1221 tree ac
, ar
, ai
, bc
, br
, bi
;
1222 complex_lattice_t al
, bl
;
1223 enum tree_code code
;
1225 switch (TREE_CODE (stmt
))
1228 stmt
= TREE_OPERAND (stmt
, 0);
1231 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
1236 rhs
= TREE_OPERAND (stmt
, 1);
1240 rhs
= TREE_OPERAND (stmt
, 0);
1247 type
= TREE_TYPE (rhs
);
1248 code
= TREE_CODE (rhs
);
1250 /* Initial filter for operations we handle. */
1256 case TRUNC_DIV_EXPR
:
1258 case FLOOR_DIV_EXPR
:
1259 case ROUND_DIV_EXPR
:
1263 if (TREE_CODE (type
) != COMPLEX_TYPE
)
1265 inner_type
= TREE_TYPE (type
);
1270 inner_type
= TREE_TYPE (TREE_OPERAND (rhs
, 1));
1271 if (TREE_CODE (inner_type
) != COMPLEX_TYPE
)
1277 tree lhs
= TREE_OPERAND (stmt
, 0);
1278 tree rhs
= TREE_OPERAND (stmt
, 1);
1280 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1281 expand_complex_move (bsi
, stmt
, type
, lhs
, rhs
);
1282 else if ((TREE_CODE (rhs
) == REALPART_EXPR
1283 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
1284 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
1286 TREE_OPERAND (stmt
, 1)
1287 = extract_component (bsi
, TREE_OPERAND (rhs
, 0),
1288 TREE_CODE (rhs
) == IMAGPART_EXPR
, false);
1295 /* Extract the components of the two complex values. Make sure and
1296 handle the common case of the same value used twice specially. */
1297 ac
= TREE_OPERAND (rhs
, 0);
1298 ar
= extract_component (bsi
, ac
, 0, true);
1299 ai
= extract_component (bsi
, ac
, 1, true);
1301 if (TREE_CODE_CLASS (code
) == tcc_unary
)
1302 bc
= br
= bi
= NULL
;
1305 bc
= TREE_OPERAND (rhs
, 1);
1310 br
= extract_component (bsi
, bc
, 0, true);
1311 bi
= extract_component (bsi
, bc
, 1, true);
1317 al
= find_lattice_value (ac
);
1318 if (al
== UNINITIALIZED
)
1321 if (TREE_CODE_CLASS (code
) == tcc_unary
)
1327 bl
= find_lattice_value (bc
);
1328 if (bl
== UNINITIALIZED
)
1339 expand_complex_addition (bsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1343 expand_complex_multiplication (bsi
, inner_type
, ar
, ai
, br
, bi
, al
, bl
);
1346 case TRUNC_DIV_EXPR
:
1348 case FLOOR_DIV_EXPR
:
1349 case ROUND_DIV_EXPR
:
1351 expand_complex_division (bsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1355 expand_complex_negation (bsi
, inner_type
, ar
, ai
);
1359 expand_complex_conjugate (bsi
, inner_type
, ar
, ai
);
1364 expand_complex_comparison (bsi
, ar
, ai
, br
, bi
, code
);
1373 /* Entry point for complex operation lowering during optimization. */
1376 tree_lower_complex (void)
1378 int old_last_basic_block
;
1379 block_stmt_iterator bsi
;
1382 if (!init_dont_simulate_again ())
1385 complex_lattice_values
= VEC_alloc (complex_lattice_t
, heap
, num_ssa_names
);
1386 VEC_safe_grow (complex_lattice_t
, heap
,
1387 complex_lattice_values
, num_ssa_names
);
1388 memset (VEC_address (complex_lattice_t
, complex_lattice_values
), 0,
1389 num_ssa_names
* sizeof(complex_lattice_t
));
1390 init_parameter_lattice_values ();
1392 ssa_propagate (complex_visit_stmt
, complex_visit_phi
);
1394 create_components ();
1395 update_parameter_components ();
1397 old_last_basic_block
= last_basic_block
;
1400 if (bb
->index
>= old_last_basic_block
)
1402 update_phi_components (bb
);
1403 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1404 expand_complex_operations_1 (&bsi
);
1407 bsi_commit_edge_inserts ();
1409 VEC_free (tree
, heap
, complex_variable_components
);
1410 VEC_free (complex_lattice_t
, heap
, complex_lattice_values
);
1413 struct tree_opt_pass pass_lower_complex
=
1415 "cplxlower", /* name */
1417 tree_lower_complex
, /* execute */
1420 0, /* static_pass_number */
1422 PROP_ssa
, /* properties_required */
1423 0, /* properties_provided */
1424 0, /* properties_destroyed */
1425 0, /* todo_flags_start */
1426 TODO_dump_func
| TODO_ggc_collect
1428 | TODO_verify_stmts
, /* todo_flags_finish */
1433 /* Entry point for complex operation lowering without optimization. */
1436 tree_lower_complex_O0 (void)
1438 int old_last_basic_block
= last_basic_block
;
1439 block_stmt_iterator bsi
;
1444 if (bb
->index
>= old_last_basic_block
)
1446 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1447 expand_complex_operations_1 (&bsi
);
1452 gate_no_optimization (void)
1454 return optimize
== 0;
1457 struct tree_opt_pass pass_lower_complex_O0
=
1459 "cplxlower0", /* name */
1460 gate_no_optimization
, /* gate */
1461 tree_lower_complex_O0
, /* execute */
1464 0, /* static_pass_number */
1466 PROP_cfg
, /* properties_required */
1467 0, /* properties_provided */
1468 0, /* properties_destroyed */
1469 0, /* todo_flags_start */
1470 TODO_dump_func
| TODO_ggc_collect
1471 | TODO_verify_stmts
, /* todo_flags_finish */