1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "gimple-iterator.h"
29 #include "gimplify-me.h"
30 #include "gimple-ssa.h"
32 #include "tree-phinodes.h"
33 #include "ssa-iterators.h"
34 #include "tree-ssanames.h"
37 #include "tree-iterator.h"
38 #include "tree-pass.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-hasher.h"
44 /* For each complex ssa name, a lattice value. We're interested in finding
45 out whether a complex number is degenerate in some way, having only real
46 or only complex parts. */
56 /* The type complex_lattice_t holds combinations of the above
58 typedef int complex_lattice_t
;
60 #define PAIR(a, b) ((a) << 2 | (b))
63 static vec
<complex_lattice_t
> complex_lattice_values
;
65 /* For each complex variable, a pair of variables for the components exists in
67 static int_tree_htab_type complex_variable_components
;
69 /* For each complex SSA_NAME, a pair of ssa names for the components. */
70 static vec
<tree
> complex_ssa_name_components
;
72 /* Lookup UID in the complex_variable_components hashtable and return the
75 cvc_lookup (unsigned int uid
)
77 struct int_tree_map
*h
, in
;
79 h
= complex_variable_components
.find_with_hash (&in
, uid
);
80 return h
? h
->to
: NULL
;
83 /* Insert the pair UID, TO into the complex_variable_components hashtable. */
86 cvc_insert (unsigned int uid
, tree to
)
88 struct int_tree_map
*h
;
91 h
= XNEW (struct int_tree_map
);
94 loc
= complex_variable_components
.find_slot_with_hash (h
, uid
, INSERT
);
98 /* Return true if T is not a zero constant. In the case of real values,
99 we're only interested in +0.0. */
102 some_nonzerop (tree t
)
106 /* Operations with real or imaginary part of a complex number zero
107 cannot be treated the same as operations with a real or imaginary
108 operand if we care about the signs of zeros in the result. */
109 if (TREE_CODE (t
) == REAL_CST
&& !flag_signed_zeros
)
110 zerop
= REAL_VALUES_IDENTICAL (TREE_REAL_CST (t
), dconst0
);
111 else if (TREE_CODE (t
) == FIXED_CST
)
112 zerop
= fixed_zerop (t
);
113 else if (TREE_CODE (t
) == INTEGER_CST
)
114 zerop
= integer_zerop (t
);
120 /* Compute a lattice value from the components of a complex type REAL
123 static complex_lattice_t
124 find_lattice_value_parts (tree real
, tree imag
)
127 complex_lattice_t ret
;
129 r
= some_nonzerop (real
);
130 i
= some_nonzerop (imag
);
131 ret
= r
* ONLY_REAL
+ i
* ONLY_IMAG
;
133 /* ??? On occasion we could do better than mapping 0+0i to real, but we
134 certainly don't want to leave it UNINITIALIZED, which eventually gets
135 mapped to VARYING. */
136 if (ret
== UNINITIALIZED
)
143 /* Compute a lattice value from gimple_val T. */
145 static complex_lattice_t
146 find_lattice_value (tree t
)
150 switch (TREE_CODE (t
))
153 return complex_lattice_values
[SSA_NAME_VERSION (t
)];
156 real
= TREE_REALPART (t
);
157 imag
= TREE_IMAGPART (t
);
164 return find_lattice_value_parts (real
, imag
);
167 /* Determine if LHS is something for which we're interested in seeing
168 simulation results. */
171 is_complex_reg (tree lhs
)
173 return TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
&& is_gimple_reg (lhs
);
176 /* Mark the incoming parameters to the function as VARYING. */
179 init_parameter_lattice_values (void)
183 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= DECL_CHAIN (parm
))
184 if (is_complex_reg (parm
)
185 && (ssa_name
= ssa_default_def (cfun
, parm
)) != NULL_TREE
)
186 complex_lattice_values
[SSA_NAME_VERSION (ssa_name
)] = VARYING
;
189 /* Initialize simulation state for each statement. Return false if we
190 found no statements we want to simulate, and thus there's nothing
191 for the entire pass to do. */
194 init_dont_simulate_again (void)
197 gimple_stmt_iterator gsi
;
199 bool saw_a_complex_op
= false;
203 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
205 phi
= gsi_stmt (gsi
);
206 prop_set_simulate_again (phi
,
207 is_complex_reg (gimple_phi_result (phi
)));
210 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
216 stmt
= gsi_stmt (gsi
);
217 op0
= op1
= NULL_TREE
;
219 /* Most control-altering statements must be initially
220 simulated, else we won't cover the entire cfg. */
221 sim_again_p
= stmt_ends_bb_p (stmt
);
223 switch (gimple_code (stmt
))
226 if (gimple_call_lhs (stmt
))
227 sim_again_p
= is_complex_reg (gimple_call_lhs (stmt
));
231 sim_again_p
= is_complex_reg (gimple_assign_lhs (stmt
));
232 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
233 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
)
234 op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
236 op0
= gimple_assign_rhs1 (stmt
);
237 if (gimple_num_ops (stmt
) > 2)
238 op1
= gimple_assign_rhs2 (stmt
);
242 op0
= gimple_cond_lhs (stmt
);
243 op1
= gimple_cond_rhs (stmt
);
251 switch (gimple_expr_code (stmt
))
263 if (TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
264 || TREE_CODE (TREE_TYPE (op1
)) == COMPLEX_TYPE
)
265 saw_a_complex_op
= true;
270 if (TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
271 saw_a_complex_op
= true;
276 /* The total store transformation performed during
277 gimplification creates such uninitialized loads
278 and we need to lower the statement to be able
280 if (TREE_CODE (op0
) == SSA_NAME
281 && ssa_undefined_value_p (op0
))
282 saw_a_complex_op
= true;
289 prop_set_simulate_again (stmt
, sim_again_p
);
293 return saw_a_complex_op
;
297 /* Evaluate statement STMT against the complex lattice defined above. */
299 static enum ssa_prop_result
300 complex_visit_stmt (gimple stmt
, edge
*taken_edge_p ATTRIBUTE_UNUSED
,
303 complex_lattice_t new_l
, old_l
, op1_l
, op2_l
;
307 lhs
= gimple_get_lhs (stmt
);
308 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
310 return SSA_PROP_VARYING
;
312 /* These conditions should be satisfied due to the initial filter
313 set up in init_dont_simulate_again. */
314 gcc_assert (TREE_CODE (lhs
) == SSA_NAME
);
315 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
318 ver
= SSA_NAME_VERSION (lhs
);
319 old_l
= complex_lattice_values
[ver
];
321 switch (gimple_expr_code (stmt
))
325 new_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
329 new_l
= find_lattice_value_parts (gimple_assign_rhs1 (stmt
),
330 gimple_assign_rhs2 (stmt
));
335 op1_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
336 op2_l
= find_lattice_value (gimple_assign_rhs2 (stmt
));
338 /* We've set up the lattice values such that IOR neatly
340 new_l
= op1_l
| op2_l
;
349 op1_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
350 op2_l
= find_lattice_value (gimple_assign_rhs2 (stmt
));
352 /* Obviously, if either varies, so does the result. */
353 if (op1_l
== VARYING
|| op2_l
== VARYING
)
355 /* Don't prematurely promote variables if we've not yet seen
357 else if (op1_l
== UNINITIALIZED
)
359 else if (op2_l
== UNINITIALIZED
)
363 /* At this point both numbers have only one component. If the
364 numbers are of opposite kind, the result is imaginary,
365 otherwise the result is real. The add/subtract translates
366 the real/imag from/to 0/1; the ^ performs the comparison. */
367 new_l
= ((op1_l
- ONLY_REAL
) ^ (op2_l
- ONLY_REAL
)) + ONLY_REAL
;
369 /* Don't allow the lattice value to flip-flop indefinitely. */
376 new_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
384 /* If nothing changed this round, let the propagator know. */
386 return SSA_PROP_NOT_INTERESTING
;
388 complex_lattice_values
[ver
] = new_l
;
389 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
392 /* Evaluate a PHI node against the complex lattice defined above. */
394 static enum ssa_prop_result
395 complex_visit_phi (gimple phi
)
397 complex_lattice_t new_l
, old_l
;
402 lhs
= gimple_phi_result (phi
);
404 /* This condition should be satisfied due to the initial filter
405 set up in init_dont_simulate_again. */
406 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
408 /* We've set up the lattice values such that IOR neatly models PHI meet. */
409 new_l
= UNINITIALIZED
;
410 for (i
= gimple_phi_num_args (phi
) - 1; i
>= 0; --i
)
411 new_l
|= find_lattice_value (gimple_phi_arg_def (phi
, i
));
413 ver
= SSA_NAME_VERSION (lhs
);
414 old_l
= complex_lattice_values
[ver
];
417 return SSA_PROP_NOT_INTERESTING
;
419 complex_lattice_values
[ver
] = new_l
;
420 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
423 /* Create one backing variable for a complex component of ORIG. */
426 create_one_component_var (tree type
, tree orig
, const char *prefix
,
427 const char *suffix
, enum tree_code code
)
429 tree r
= create_tmp_var (type
, prefix
);
431 DECL_SOURCE_LOCATION (r
) = DECL_SOURCE_LOCATION (orig
);
432 DECL_ARTIFICIAL (r
) = 1;
434 if (DECL_NAME (orig
) && !DECL_IGNORED_P (orig
))
436 const char *name
= IDENTIFIER_POINTER (DECL_NAME (orig
));
438 DECL_NAME (r
) = get_identifier (ACONCAT ((name
, suffix
, NULL
)));
440 SET_DECL_DEBUG_EXPR (r
, build1 (code
, type
, orig
));
441 DECL_HAS_DEBUG_EXPR_P (r
) = 1;
442 DECL_IGNORED_P (r
) = 0;
443 TREE_NO_WARNING (r
) = TREE_NO_WARNING (orig
);
447 DECL_IGNORED_P (r
) = 1;
448 TREE_NO_WARNING (r
) = 1;
454 /* Retrieve a value for a complex component of VAR. */
457 get_component_var (tree var
, bool imag_p
)
459 size_t decl_index
= DECL_UID (var
) * 2 + imag_p
;
460 tree ret
= cvc_lookup (decl_index
);
464 ret
= create_one_component_var (TREE_TYPE (TREE_TYPE (var
)), var
,
465 imag_p
? "CI" : "CR",
466 imag_p
? "$imag" : "$real",
467 imag_p
? IMAGPART_EXPR
: REALPART_EXPR
);
468 cvc_insert (decl_index
, ret
);
474 /* Retrieve a value for a complex component of SSA_NAME. */
477 get_component_ssa_name (tree ssa_name
, bool imag_p
)
479 complex_lattice_t lattice
= find_lattice_value (ssa_name
);
480 size_t ssa_name_index
;
483 if (lattice
== (imag_p
? ONLY_REAL
: ONLY_IMAG
))
485 tree inner_type
= TREE_TYPE (TREE_TYPE (ssa_name
));
486 if (SCALAR_FLOAT_TYPE_P (inner_type
))
487 return build_real (inner_type
, dconst0
);
489 return build_int_cst (inner_type
, 0);
492 ssa_name_index
= SSA_NAME_VERSION (ssa_name
) * 2 + imag_p
;
493 ret
= complex_ssa_name_components
[ssa_name_index
];
496 if (SSA_NAME_VAR (ssa_name
))
497 ret
= get_component_var (SSA_NAME_VAR (ssa_name
), imag_p
);
499 ret
= TREE_TYPE (TREE_TYPE (ssa_name
));
500 ret
= make_ssa_name (ret
, NULL
);
502 /* Copy some properties from the original. In particular, whether it
503 is used in an abnormal phi, and whether it's uninitialized. */
504 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret
)
505 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
);
506 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
507 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == VAR_DECL
)
509 SSA_NAME_DEF_STMT (ret
) = SSA_NAME_DEF_STMT (ssa_name
);
510 set_ssa_default_def (cfun
, SSA_NAME_VAR (ret
), ret
);
513 complex_ssa_name_components
[ssa_name_index
] = ret
;
519 /* Set a value for a complex component of SSA_NAME, return a
520 gimple_seq of stuff that needs doing. */
523 set_component_ssa_name (tree ssa_name
, bool imag_p
, tree value
)
525 complex_lattice_t lattice
= find_lattice_value (ssa_name
);
526 size_t ssa_name_index
;
531 /* We know the value must be zero, else there's a bug in our lattice
532 analysis. But the value may well be a variable known to contain
533 zero. We should be safe ignoring it. */
534 if (lattice
== (imag_p
? ONLY_REAL
: ONLY_IMAG
))
537 /* If we've already assigned an SSA_NAME to this component, then this
538 means that our walk of the basic blocks found a use before the set.
539 This is fine. Now we should create an initialization for the value
540 we created earlier. */
541 ssa_name_index
= SSA_NAME_VERSION (ssa_name
) * 2 + imag_p
;
542 comp
= complex_ssa_name_components
[ssa_name_index
];
546 /* If we've nothing assigned, and the value we're given is already stable,
547 then install that as the value for this SSA_NAME. This preemptively
548 copy-propagates the value, which avoids unnecessary memory allocation. */
549 else if (is_gimple_min_invariant (value
)
550 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
552 complex_ssa_name_components
[ssa_name_index
] = value
;
555 else if (TREE_CODE (value
) == SSA_NAME
556 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
558 /* Replace an anonymous base value with the variable from cvc_lookup.
559 This should result in better debug info. */
560 if (SSA_NAME_VAR (ssa_name
)
561 && (!SSA_NAME_VAR (value
) || DECL_IGNORED_P (SSA_NAME_VAR (value
)))
562 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name
)))
564 comp
= get_component_var (SSA_NAME_VAR (ssa_name
), imag_p
);
565 replace_ssa_name_symbol (value
, comp
);
568 complex_ssa_name_components
[ssa_name_index
] = value
;
572 /* Finally, we need to stabilize the result by installing the value into
575 comp
= get_component_ssa_name (ssa_name
, imag_p
);
577 /* Do all the work to assign VALUE to COMP. */
579 value
= force_gimple_operand (value
, &list
, false, NULL
);
580 last
= gimple_build_assign (comp
, value
);
581 gimple_seq_add_stmt (&list
, last
);
582 gcc_assert (SSA_NAME_DEF_STMT (comp
) == last
);
587 /* Extract the real or imaginary part of a complex variable or constant.
588 Make sure that it's a proper gimple_val and gimplify it if not.
589 Emit any new code before gsi. */
592 extract_component (gimple_stmt_iterator
*gsi
, tree t
, bool imagpart_p
,
595 switch (TREE_CODE (t
))
598 return imagpart_p
? TREE_IMAGPART (t
) : TREE_REALPART (t
);
608 case VIEW_CONVERT_EXPR
:
611 tree inner_type
= TREE_TYPE (TREE_TYPE (t
));
613 t
= build1 ((imagpart_p
? IMAGPART_EXPR
: REALPART_EXPR
),
614 inner_type
, unshare_expr (t
));
617 t
= force_gimple_operand_gsi (gsi
, t
, true, NULL
, true,
624 return get_component_ssa_name (t
, imagpart_p
);
631 /* Update the complex components of the ssa name on the lhs of STMT. */
634 update_complex_components (gimple_stmt_iterator
*gsi
, gimple stmt
, tree r
,
640 lhs
= gimple_get_lhs (stmt
);
642 list
= set_component_ssa_name (lhs
, false, r
);
644 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
646 list
= set_component_ssa_name (lhs
, true, i
);
648 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
652 update_complex_components_on_edge (edge e
, tree lhs
, tree r
, tree i
)
656 list
= set_component_ssa_name (lhs
, false, r
);
658 gsi_insert_seq_on_edge (e
, list
);
660 list
= set_component_ssa_name (lhs
, true, i
);
662 gsi_insert_seq_on_edge (e
, list
);
666 /* Update an assignment to a complex variable in place. */
669 update_complex_assignment (gimple_stmt_iterator
*gsi
, tree r
, tree i
)
673 gimple_assign_set_rhs_with_ops (gsi
, COMPLEX_EXPR
, r
, i
);
674 stmt
= gsi_stmt (*gsi
);
676 if (maybe_clean_eh_stmt (stmt
))
677 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
679 if (gimple_in_ssa_p (cfun
))
680 update_complex_components (gsi
, gsi_stmt (*gsi
), r
, i
);
684 /* Generate code at the entry point of the function to initialize the
685 component variables for a complex parameter. */
688 update_parameter_components (void)
690 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
693 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= DECL_CHAIN (parm
))
695 tree type
= TREE_TYPE (parm
);
698 if (TREE_CODE (type
) != COMPLEX_TYPE
|| !is_gimple_reg (parm
))
701 type
= TREE_TYPE (type
);
702 ssa_name
= ssa_default_def (cfun
, parm
);
706 r
= build1 (REALPART_EXPR
, type
, ssa_name
);
707 i
= build1 (IMAGPART_EXPR
, type
, ssa_name
);
708 update_complex_components_on_edge (entry_edge
, ssa_name
, r
, i
);
712 /* Generate code to set the component variables of a complex variable
713 to match the PHI statements in block BB. */
716 update_phi_components (basic_block bb
)
718 gimple_stmt_iterator gsi
;
720 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
722 gimple phi
= gsi_stmt (gsi
);
724 if (is_complex_reg (gimple_phi_result (phi
)))
727 gimple pr
= NULL
, pi
= NULL
;
730 lr
= get_component_ssa_name (gimple_phi_result (phi
), false);
731 if (TREE_CODE (lr
) == SSA_NAME
)
732 pr
= create_phi_node (lr
, bb
);
734 li
= get_component_ssa_name (gimple_phi_result (phi
), true);
735 if (TREE_CODE (li
) == SSA_NAME
)
736 pi
= create_phi_node (li
, bb
);
738 for (i
= 0, n
= gimple_phi_num_args (phi
); i
< n
; ++i
)
740 tree comp
, arg
= gimple_phi_arg_def (phi
, i
);
743 comp
= extract_component (NULL
, arg
, false, false);
744 SET_PHI_ARG_DEF (pr
, i
, comp
);
748 comp
= extract_component (NULL
, arg
, true, false);
749 SET_PHI_ARG_DEF (pi
, i
, comp
);
756 /* Expand a complex move to scalars. */
759 expand_complex_move (gimple_stmt_iterator
*gsi
, tree type
)
761 tree inner_type
= TREE_TYPE (type
);
763 gimple stmt
= gsi_stmt (*gsi
);
765 if (is_gimple_assign (stmt
))
767 lhs
= gimple_assign_lhs (stmt
);
768 if (gimple_num_ops (stmt
) == 2)
769 rhs
= gimple_assign_rhs1 (stmt
);
773 else if (is_gimple_call (stmt
))
775 lhs
= gimple_call_lhs (stmt
);
781 if (TREE_CODE (lhs
) == SSA_NAME
)
783 if (is_ctrl_altering_stmt (stmt
))
787 /* The value is not assigned on the exception edges, so we need not
788 concern ourselves there. We do need to update on the fallthru
790 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
794 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
795 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
796 update_complex_components_on_edge (e
, lhs
, r
, i
);
798 else if (is_gimple_call (stmt
)
799 || gimple_has_side_effects (stmt
)
800 || gimple_assign_rhs_code (stmt
) == PAREN_EXPR
)
802 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
803 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
804 update_complex_components (gsi
, stmt
, r
, i
);
808 if (gimple_assign_rhs_code (stmt
) != COMPLEX_EXPR
)
810 r
= extract_component (gsi
, rhs
, 0, true);
811 i
= extract_component (gsi
, rhs
, 1, true);
815 r
= gimple_assign_rhs1 (stmt
);
816 i
= gimple_assign_rhs2 (stmt
);
818 update_complex_assignment (gsi
, r
, i
);
821 else if (rhs
&& TREE_CODE (rhs
) == SSA_NAME
&& !TREE_SIDE_EFFECTS (lhs
))
826 r
= extract_component (gsi
, rhs
, 0, false);
827 i
= extract_component (gsi
, rhs
, 1, false);
829 x
= build1 (REALPART_EXPR
, inner_type
, unshare_expr (lhs
));
830 t
= gimple_build_assign (x
, r
);
831 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
833 if (stmt
== gsi_stmt (*gsi
))
835 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
836 gimple_assign_set_lhs (stmt
, x
);
837 gimple_assign_set_rhs1 (stmt
, i
);
841 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
842 t
= gimple_build_assign (x
, i
);
843 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
845 stmt
= gsi_stmt (*gsi
);
846 gcc_assert (gimple_code (stmt
) == GIMPLE_RETURN
);
847 gimple_return_set_retval (stmt
, lhs
);
854 /* Expand complex addition to scalars:
855 a + b = (ar + br) + i(ai + bi)
856 a - b = (ar - br) + i(ai + bi)
860 expand_complex_addition (gimple_stmt_iterator
*gsi
, tree inner_type
,
861 tree ar
, tree ai
, tree br
, tree bi
,
863 complex_lattice_t al
, complex_lattice_t bl
)
867 switch (PAIR (al
, bl
))
869 case PAIR (ONLY_REAL
, ONLY_REAL
):
870 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
874 case PAIR (ONLY_REAL
, ONLY_IMAG
):
876 if (code
== MINUS_EXPR
)
877 ri
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ai
, bi
);
882 case PAIR (ONLY_IMAG
, ONLY_REAL
):
883 if (code
== MINUS_EXPR
)
884 rr
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ar
, br
);
890 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
892 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
895 case PAIR (VARYING
, ONLY_REAL
):
896 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
900 case PAIR (VARYING
, ONLY_IMAG
):
902 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
905 case PAIR (ONLY_REAL
, VARYING
):
906 if (code
== MINUS_EXPR
)
908 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
912 case PAIR (ONLY_IMAG
, VARYING
):
913 if (code
== MINUS_EXPR
)
916 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
919 case PAIR (VARYING
, VARYING
):
921 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
922 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
929 update_complex_assignment (gsi
, rr
, ri
);
932 /* Expand a complex multiplication or division to a libcall to the c99
933 compliant routines. */
936 expand_complex_libcall (gimple_stmt_iterator
*gsi
, tree ar
, tree ai
,
937 tree br
, tree bi
, enum tree_code code
)
939 enum machine_mode mode
;
940 enum built_in_function bcode
;
942 gimple old_stmt
, stmt
;
944 old_stmt
= gsi_stmt (*gsi
);
945 lhs
= gimple_assign_lhs (old_stmt
);
946 type
= TREE_TYPE (lhs
);
948 mode
= TYPE_MODE (type
);
949 gcc_assert (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
);
951 if (code
== MULT_EXPR
)
952 bcode
= ((enum built_in_function
)
953 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
954 else if (code
== RDIV_EXPR
)
955 bcode
= ((enum built_in_function
)
956 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
959 fn
= builtin_decl_explicit (bcode
);
961 stmt
= gimple_build_call (fn
, 4, ar
, ai
, br
, bi
);
962 gimple_call_set_lhs (stmt
, lhs
);
964 gsi_replace (gsi
, stmt
, false);
966 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
967 gimple_purge_dead_eh_edges (gsi_bb (*gsi
));
969 if (gimple_in_ssa_p (cfun
))
971 type
= TREE_TYPE (type
);
972 update_complex_components (gsi
, stmt
,
973 build1 (REALPART_EXPR
, type
, lhs
),
974 build1 (IMAGPART_EXPR
, type
, lhs
));
975 SSA_NAME_DEF_STMT (lhs
) = stmt
;
979 /* Expand complex multiplication to scalars:
980 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
984 expand_complex_multiplication (gimple_stmt_iterator
*gsi
, tree inner_type
,
985 tree ar
, tree ai
, tree br
, tree bi
,
986 complex_lattice_t al
, complex_lattice_t bl
)
992 complex_lattice_t tl
;
993 rr
= ar
, ar
= br
, br
= rr
;
994 ri
= ai
, ai
= bi
, bi
= ri
;
995 tl
= al
, al
= bl
, bl
= tl
;
998 switch (PAIR (al
, bl
))
1000 case PAIR (ONLY_REAL
, ONLY_REAL
):
1001 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1005 case PAIR (ONLY_IMAG
, ONLY_REAL
):
1007 if (TREE_CODE (ai
) == REAL_CST
1008 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst1
))
1011 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1014 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1015 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1016 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, rr
);
1020 case PAIR (VARYING
, ONLY_REAL
):
1021 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1022 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1025 case PAIR (VARYING
, ONLY_IMAG
):
1026 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1027 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, rr
);
1028 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1031 case PAIR (VARYING
, VARYING
):
1032 if (flag_complex_method
== 2 && SCALAR_FLOAT_TYPE_P (inner_type
))
1034 expand_complex_libcall (gsi
, ar
, ai
, br
, bi
, MULT_EXPR
);
1039 tree t1
, t2
, t3
, t4
;
1041 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1042 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1043 t3
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1045 /* Avoid expanding redundant multiplication for the common
1046 case of squaring a complex number. */
1047 if (ar
== br
&& ai
== bi
)
1050 t4
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1052 rr
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
1053 ri
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t3
, t4
);
1061 update_complex_assignment (gsi
, rr
, ri
);
1064 /* Keep this algorithm in sync with fold-const.c:const_binop().
1066 Expand complex division to scalars, straightforward algorithm.
1067 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1072 expand_complex_div_straight (gimple_stmt_iterator
*gsi
, tree inner_type
,
1073 tree ar
, tree ai
, tree br
, tree bi
,
1074 enum tree_code code
)
1076 tree rr
, ri
, div
, t1
, t2
, t3
;
1078 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, br
, br
);
1079 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, bi
, bi
);
1080 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
1082 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1083 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1084 t3
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
1085 rr
= gimplify_build2 (gsi
, code
, inner_type
, t3
, div
);
1087 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1088 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1089 t3
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
1090 ri
= gimplify_build2 (gsi
, code
, inner_type
, t3
, div
);
1092 update_complex_assignment (gsi
, rr
, ri
);
1095 /* Keep this algorithm in sync with fold-const.c:const_binop().
1097 Expand complex division to scalars, modified algorithm to minimize
1098 overflow with wide input ranges. */
1101 expand_complex_div_wide (gimple_stmt_iterator
*gsi
, tree inner_type
,
1102 tree ar
, tree ai
, tree br
, tree bi
,
1103 enum tree_code code
)
1105 tree rr
, ri
, ratio
, div
, t1
, t2
, tr
, ti
, compare
;
1106 basic_block bb_cond
, bb_true
, bb_false
, bb_join
;
1109 /* Examine |br| < |bi|, and branch. */
1110 t1
= gimplify_build1 (gsi
, ABS_EXPR
, inner_type
, br
);
1111 t2
= gimplify_build1 (gsi
, ABS_EXPR
, inner_type
, bi
);
1112 compare
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)),
1113 LT_EXPR
, boolean_type_node
, t1
, t2
);
1114 STRIP_NOPS (compare
);
1116 bb_cond
= bb_true
= bb_false
= bb_join
= NULL
;
1117 rr
= ri
= tr
= ti
= NULL
;
1118 if (TREE_CODE (compare
) != INTEGER_CST
)
1124 tmp
= create_tmp_var (boolean_type_node
, NULL
);
1125 stmt
= gimple_build_assign (tmp
, compare
);
1126 if (gimple_in_ssa_p (cfun
))
1128 tmp
= make_ssa_name (tmp
, stmt
);
1129 gimple_assign_set_lhs (stmt
, tmp
);
1132 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1134 cond
= fold_build2_loc (gimple_location (stmt
),
1135 EQ_EXPR
, boolean_type_node
, tmp
, boolean_true_node
);
1136 stmt
= gimple_build_cond_from_tree (cond
, NULL_TREE
, NULL_TREE
);
1137 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1139 /* Split the original block, and create the TRUE and FALSE blocks. */
1140 e
= split_block (gsi_bb (*gsi
), stmt
);
1143 bb_true
= create_empty_bb (bb_cond
);
1144 bb_false
= create_empty_bb (bb_true
);
1146 /* Wire the blocks together. */
1147 e
->flags
= EDGE_TRUE_VALUE
;
1148 redirect_edge_succ (e
, bb_true
);
1149 make_edge (bb_cond
, bb_false
, EDGE_FALSE_VALUE
);
1150 make_edge (bb_true
, bb_join
, EDGE_FALLTHRU
);
1151 make_edge (bb_false
, bb_join
, EDGE_FALLTHRU
);
1154 add_bb_to_loop (bb_true
, bb_cond
->loop_father
);
1155 add_bb_to_loop (bb_false
, bb_cond
->loop_father
);
1158 /* Update dominance info. Note that bb_join's data was
1159 updated by split_block. */
1160 if (dom_info_available_p (CDI_DOMINATORS
))
1162 set_immediate_dominator (CDI_DOMINATORS
, bb_true
, bb_cond
);
1163 set_immediate_dominator (CDI_DOMINATORS
, bb_false
, bb_cond
);
1166 rr
= create_tmp_reg (inner_type
, NULL
);
1167 ri
= create_tmp_reg (inner_type
, NULL
);
1170 /* In the TRUE branch, we compute
1172 div = (br * ratio) + bi;
1173 tr = (ar * ratio) + ai;
1174 ti = (ai * ratio) - ar;
1177 if (bb_true
|| integer_nonzerop (compare
))
1181 *gsi
= gsi_last_bb (bb_true
);
1182 gsi_insert_after (gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1185 ratio
= gimplify_build2 (gsi
, code
, inner_type
, br
, bi
);
1187 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, br
, ratio
);
1188 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, bi
);
1190 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1191 tr
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, ai
);
1193 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1194 ti
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, ar
);
1196 tr
= gimplify_build2 (gsi
, code
, inner_type
, tr
, div
);
1197 ti
= gimplify_build2 (gsi
, code
, inner_type
, ti
, div
);
1201 stmt
= gimple_build_assign (rr
, tr
);
1202 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1203 stmt
= gimple_build_assign (ri
, ti
);
1204 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1205 gsi_remove (gsi
, true);
1209 /* In the FALSE branch, we compute
1211 divisor = (d * ratio) + c;
1212 tr = (b * ratio) + a;
1213 ti = b - (a * ratio);
1216 if (bb_false
|| integer_zerop (compare
))
1220 *gsi
= gsi_last_bb (bb_false
);
1221 gsi_insert_after (gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1224 ratio
= gimplify_build2 (gsi
, code
, inner_type
, bi
, br
);
1226 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, bi
, ratio
);
1227 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, br
);
1229 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1230 tr
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, ar
);
1232 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1233 ti
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ai
, t1
);
1235 tr
= gimplify_build2 (gsi
, code
, inner_type
, tr
, div
);
1236 ti
= gimplify_build2 (gsi
, code
, inner_type
, ti
, div
);
1240 stmt
= gimple_build_assign (rr
, tr
);
1241 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1242 stmt
= gimple_build_assign (ri
, ti
);
1243 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1244 gsi_remove (gsi
, true);
1249 *gsi
= gsi_start_bb (bb_join
);
1253 update_complex_assignment (gsi
, rr
, ri
);
1256 /* Expand complex division to scalars. */
1259 expand_complex_division (gimple_stmt_iterator
*gsi
, tree inner_type
,
1260 tree ar
, tree ai
, tree br
, tree bi
,
1261 enum tree_code code
,
1262 complex_lattice_t al
, complex_lattice_t bl
)
1266 switch (PAIR (al
, bl
))
1268 case PAIR (ONLY_REAL
, ONLY_REAL
):
1269 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
1273 case PAIR (ONLY_REAL
, ONLY_IMAG
):
1275 ri
= gimplify_build2 (gsi
, code
, inner_type
, ar
, bi
);
1276 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ri
);
1279 case PAIR (ONLY_IMAG
, ONLY_REAL
):
1281 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, br
);
1284 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1285 rr
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
1289 case PAIR (VARYING
, ONLY_REAL
):
1290 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
1291 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, br
);
1294 case PAIR (VARYING
, ONLY_IMAG
):
1295 rr
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
1296 ri
= gimplify_build2 (gsi
, code
, inner_type
, ar
, bi
);
1297 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ri
);
1299 case PAIR (ONLY_REAL
, VARYING
):
1300 case PAIR (ONLY_IMAG
, VARYING
):
1301 case PAIR (VARYING
, VARYING
):
1302 switch (flag_complex_method
)
1305 /* straightforward implementation of complex divide acceptable. */
1306 expand_complex_div_straight (gsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1310 if (SCALAR_FLOAT_TYPE_P (inner_type
))
1312 expand_complex_libcall (gsi
, ar
, ai
, br
, bi
, code
);
1318 /* wide ranges of inputs must work for complex divide. */
1319 expand_complex_div_wide (gsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1331 update_complex_assignment (gsi
, rr
, ri
);
1334 /* Expand complex negation to scalars:
1339 expand_complex_negation (gimple_stmt_iterator
*gsi
, tree inner_type
,
1344 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ar
);
1345 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ai
);
1347 update_complex_assignment (gsi
, rr
, ri
);
1350 /* Expand complex conjugate to scalars:
1355 expand_complex_conjugate (gimple_stmt_iterator
*gsi
, tree inner_type
,
1360 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ai
);
1362 update_complex_assignment (gsi
, ar
, ri
);
1365 /* Expand complex comparison (EQ or NE only). */
1368 expand_complex_comparison (gimple_stmt_iterator
*gsi
, tree ar
, tree ai
,
1369 tree br
, tree bi
, enum tree_code code
)
1371 tree cr
, ci
, cc
, type
;
1374 cr
= gimplify_build2 (gsi
, code
, boolean_type_node
, ar
, br
);
1375 ci
= gimplify_build2 (gsi
, code
, boolean_type_node
, ai
, bi
);
1376 cc
= gimplify_build2 (gsi
,
1377 (code
== EQ_EXPR
? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
),
1378 boolean_type_node
, cr
, ci
);
1380 stmt
= gsi_stmt (*gsi
);
1382 switch (gimple_code (stmt
))
1385 type
= TREE_TYPE (gimple_return_retval (stmt
));
1386 gimple_return_set_retval (stmt
, fold_convert (type
, cc
));
1390 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1391 gimple_assign_set_rhs_from_tree (gsi
, fold_convert (type
, cc
));
1392 stmt
= gsi_stmt (*gsi
);
1396 gimple_cond_set_code (stmt
, EQ_EXPR
);
1397 gimple_cond_set_lhs (stmt
, cc
);
1398 gimple_cond_set_rhs (stmt
, boolean_true_node
);
1408 /* Expand inline asm that sets some complex SSA_NAMEs. */
1411 expand_complex_asm (gimple_stmt_iterator
*gsi
)
1413 gimple stmt
= gsi_stmt (*gsi
);
1416 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1418 tree link
= gimple_asm_output_op (stmt
, i
);
1419 tree op
= TREE_VALUE (link
);
1420 if (TREE_CODE (op
) == SSA_NAME
1421 && TREE_CODE (TREE_TYPE (op
)) == COMPLEX_TYPE
)
1423 tree type
= TREE_TYPE (op
);
1424 tree inner_type
= TREE_TYPE (type
);
1425 tree r
= build1 (REALPART_EXPR
, inner_type
, op
);
1426 tree i
= build1 (IMAGPART_EXPR
, inner_type
, op
);
1427 gimple_seq list
= set_component_ssa_name (op
, false, r
);
1430 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
1432 list
= set_component_ssa_name (op
, true, i
);
1434 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
1439 /* Process one statement. If we identify a complex operation, expand it. */
1442 expand_complex_operations_1 (gimple_stmt_iterator
*gsi
)
1444 gimple stmt
= gsi_stmt (*gsi
);
1445 tree type
, inner_type
, lhs
;
1446 tree ac
, ar
, ai
, bc
, br
, bi
;
1447 complex_lattice_t al
, bl
;
1448 enum tree_code code
;
1450 if (gimple_code (stmt
) == GIMPLE_ASM
)
1452 expand_complex_asm (gsi
);
1456 lhs
= gimple_get_lhs (stmt
);
1457 if (!lhs
&& gimple_code (stmt
) != GIMPLE_COND
)
1460 type
= TREE_TYPE (gimple_op (stmt
, 0));
1461 code
= gimple_expr_code (stmt
);
1463 /* Initial filter for operations we handle. */
1469 case TRUNC_DIV_EXPR
:
1471 case FLOOR_DIV_EXPR
:
1472 case ROUND_DIV_EXPR
:
1476 if (TREE_CODE (type
) != COMPLEX_TYPE
)
1478 inner_type
= TREE_TYPE (type
);
1483 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
1484 subcode, so we need to access the operands using gimple_op. */
1485 inner_type
= TREE_TYPE (gimple_op (stmt
, 1));
1486 if (TREE_CODE (inner_type
) != COMPLEX_TYPE
)
1494 /* GIMPLE_COND may also fallthru here, but we do not need to
1495 do anything with it. */
1496 if (gimple_code (stmt
) == GIMPLE_COND
)
1499 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1500 expand_complex_move (gsi
, type
);
1501 else if (is_gimple_assign (stmt
)
1502 && (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
1503 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
)
1504 && TREE_CODE (lhs
) == SSA_NAME
)
1506 rhs
= gimple_assign_rhs1 (stmt
);
1507 rhs
= extract_component (gsi
, TREE_OPERAND (rhs
, 0),
1508 gimple_assign_rhs_code (stmt
)
1511 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1512 stmt
= gsi_stmt (*gsi
);
1519 /* Extract the components of the two complex values. Make sure and
1520 handle the common case of the same value used twice specially. */
1521 if (is_gimple_assign (stmt
))
1523 ac
= gimple_assign_rhs1 (stmt
);
1524 bc
= (gimple_num_ops (stmt
) > 2) ? gimple_assign_rhs2 (stmt
) : NULL
;
1526 /* GIMPLE_CALL can not get here. */
1529 ac
= gimple_cond_lhs (stmt
);
1530 bc
= gimple_cond_rhs (stmt
);
1533 ar
= extract_component (gsi
, ac
, false, true);
1534 ai
= extract_component (gsi
, ac
, true, true);
1540 br
= extract_component (gsi
, bc
, 0, true);
1541 bi
= extract_component (gsi
, bc
, 1, true);
1544 br
= bi
= NULL_TREE
;
1546 if (gimple_in_ssa_p (cfun
))
1548 al
= find_lattice_value (ac
);
1549 if (al
== UNINITIALIZED
)
1552 if (TREE_CODE_CLASS (code
) == tcc_unary
)
1558 bl
= find_lattice_value (bc
);
1559 if (bl
== UNINITIALIZED
)
1570 expand_complex_addition (gsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1574 expand_complex_multiplication (gsi
, inner_type
, ar
, ai
, br
, bi
, al
, bl
);
1577 case TRUNC_DIV_EXPR
:
1579 case FLOOR_DIV_EXPR
:
1580 case ROUND_DIV_EXPR
:
1582 expand_complex_division (gsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1586 expand_complex_negation (gsi
, inner_type
, ar
, ai
);
1590 expand_complex_conjugate (gsi
, inner_type
, ar
, ai
);
1595 expand_complex_comparison (gsi
, ar
, ai
, br
, bi
, code
);
1604 /* Entry point for complex operation lowering during optimization. */
1607 tree_lower_complex (void)
1609 int old_last_basic_block
;
1610 gimple_stmt_iterator gsi
;
1613 if (!init_dont_simulate_again ())
1616 complex_lattice_values
.create (num_ssa_names
);
1617 complex_lattice_values
.safe_grow_cleared (num_ssa_names
);
1619 init_parameter_lattice_values ();
1620 ssa_propagate (complex_visit_stmt
, complex_visit_phi
);
1622 complex_variable_components
.create (10);
1624 complex_ssa_name_components
.create (2 * num_ssa_names
);
1625 complex_ssa_name_components
.safe_grow_cleared (2 * num_ssa_names
);
1627 update_parameter_components ();
1629 /* ??? Ideally we'd traverse the blocks in breadth-first order. */
1630 old_last_basic_block
= last_basic_block
;
1633 if (bb
->index
>= old_last_basic_block
)
1636 update_phi_components (bb
);
1637 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1638 expand_complex_operations_1 (&gsi
);
1641 gsi_commit_edge_inserts ();
1643 complex_variable_components
.dispose ();
1644 complex_ssa_name_components
.release ();
1645 complex_lattice_values
.release ();
1651 const pass_data pass_data_lower_complex
=
1653 GIMPLE_PASS
, /* type */
1654 "cplxlower", /* name */
1655 OPTGROUP_NONE
, /* optinfo_flags */
1656 false, /* has_gate */
1657 true, /* has_execute */
1658 TV_NONE
, /* tv_id */
1659 PROP_ssa
, /* properties_required */
1660 PROP_gimple_lcx
, /* properties_provided */
1661 0, /* properties_destroyed */
1662 0, /* todo_flags_start */
1663 ( TODO_update_ssa
| TODO_verify_stmts
), /* todo_flags_finish */
1666 class pass_lower_complex
: public gimple_opt_pass
1669 pass_lower_complex (gcc::context
*ctxt
)
1670 : gimple_opt_pass (pass_data_lower_complex
, ctxt
)
1673 /* opt_pass methods: */
1674 opt_pass
* clone () { return new pass_lower_complex (m_ctxt
); }
1675 unsigned int execute () { return tree_lower_complex (); }
1677 }; // class pass_lower_complex
1682 make_pass_lower_complex (gcc::context
*ctxt
)
1684 return new pass_lower_complex (ctxt
);
1689 gate_no_optimization (void)
1691 /* With errors, normal optimization passes are not run. If we don't
1692 lower complex operations at all, rtl expansion will abort. */
1693 return !(cfun
->curr_properties
& PROP_gimple_lcx
);
1698 const pass_data pass_data_lower_complex_O0
=
1700 GIMPLE_PASS
, /* type */
1701 "cplxlower0", /* name */
1702 OPTGROUP_NONE
, /* optinfo_flags */
1703 true, /* has_gate */
1704 true, /* has_execute */
1705 TV_NONE
, /* tv_id */
1706 PROP_cfg
, /* properties_required */
1707 PROP_gimple_lcx
, /* properties_provided */
1708 0, /* properties_destroyed */
1709 0, /* todo_flags_start */
1710 ( TODO_update_ssa
| TODO_verify_stmts
), /* todo_flags_finish */
1713 class pass_lower_complex_O0
: public gimple_opt_pass
1716 pass_lower_complex_O0 (gcc::context
*ctxt
)
1717 : gimple_opt_pass (pass_data_lower_complex_O0
, ctxt
)
1720 /* opt_pass methods: */
1721 bool gate () { return gate_no_optimization (); }
1722 unsigned int execute () { return tree_lower_complex (); }
1724 }; // class pass_lower_complex_O0
1729 make_pass_lower_complex_O0 (gcc::context
*ctxt
)
1731 return new pass_lower_complex_O0 (ctxt
);