1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "tree-flow.h"
28 #include "tree-iterator.h"
29 #include "tree-pass.h"
30 #include "tree-ssa-propagate.h"
31 #include "tree-hasher.h"
35 /* For each complex ssa name, a lattice value. We're interested in finding
36 out whether a complex number is degenerate in some way, having only real
37 or only complex parts. */
47 /* The type complex_lattice_t holds combinations of the above
49 typedef int complex_lattice_t
;
51 #define PAIR(a, b) ((a) << 2 | (b))
54 static vec
<complex_lattice_t
> complex_lattice_values
;
56 /* For each complex variable, a pair of variables for the components exists in
58 static int_tree_htab_type complex_variable_components
;
60 /* For each complex SSA_NAME, a pair of ssa names for the components. */
61 static vec
<tree
> complex_ssa_name_components
;
63 /* Lookup UID in the complex_variable_components hashtable and return the
66 cvc_lookup (unsigned int uid
)
68 struct int_tree_map
*h
, in
;
70 h
= complex_variable_components
.find_with_hash (&in
, uid
);
71 return h
? h
->to
: NULL
;
74 /* Insert the pair UID, TO into the complex_variable_components hashtable. */
77 cvc_insert (unsigned int uid
, tree to
)
79 struct int_tree_map
*h
;
82 h
= XNEW (struct int_tree_map
);
85 loc
= complex_variable_components
.find_slot_with_hash (h
, uid
, INSERT
);
89 /* Return true if T is not a zero constant. In the case of real values,
90 we're only interested in +0.0. */
93 some_nonzerop (tree t
)
97 /* Operations with real or imaginary part of a complex number zero
98 cannot be treated the same as operations with a real or imaginary
99 operand if we care about the signs of zeros in the result. */
100 if (TREE_CODE (t
) == REAL_CST
&& !flag_signed_zeros
)
101 zerop
= REAL_VALUES_IDENTICAL (TREE_REAL_CST (t
), dconst0
);
102 else if (TREE_CODE (t
) == FIXED_CST
)
103 zerop
= fixed_zerop (t
);
104 else if (TREE_CODE (t
) == INTEGER_CST
)
105 zerop
= integer_zerop (t
);
111 /* Compute a lattice value from the components of a complex type REAL
114 static complex_lattice_t
115 find_lattice_value_parts (tree real
, tree imag
)
118 complex_lattice_t ret
;
120 r
= some_nonzerop (real
);
121 i
= some_nonzerop (imag
);
122 ret
= r
* ONLY_REAL
+ i
* ONLY_IMAG
;
124 /* ??? On occasion we could do better than mapping 0+0i to real, but we
125 certainly don't want to leave it UNINITIALIZED, which eventually gets
126 mapped to VARYING. */
127 if (ret
== UNINITIALIZED
)
134 /* Compute a lattice value from gimple_val T. */
136 static complex_lattice_t
137 find_lattice_value (tree t
)
141 switch (TREE_CODE (t
))
144 return complex_lattice_values
[SSA_NAME_VERSION (t
)];
147 real
= TREE_REALPART (t
);
148 imag
= TREE_IMAGPART (t
);
155 return find_lattice_value_parts (real
, imag
);
158 /* Determine if LHS is something for which we're interested in seeing
159 simulation results. */
162 is_complex_reg (tree lhs
)
164 return TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
&& is_gimple_reg (lhs
);
167 /* Mark the incoming parameters to the function as VARYING. */
170 init_parameter_lattice_values (void)
174 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= DECL_CHAIN (parm
))
175 if (is_complex_reg (parm
)
176 && (ssa_name
= ssa_default_def (cfun
, parm
)) != NULL_TREE
)
177 complex_lattice_values
[SSA_NAME_VERSION (ssa_name
)] = VARYING
;
180 /* Initialize simulation state for each statement. Return false if we
181 found no statements we want to simulate, and thus there's nothing
182 for the entire pass to do. */
185 init_dont_simulate_again (void)
188 gimple_stmt_iterator gsi
;
190 bool saw_a_complex_op
= false;
194 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
196 phi
= gsi_stmt (gsi
);
197 prop_set_simulate_again (phi
,
198 is_complex_reg (gimple_phi_result (phi
)));
201 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
207 stmt
= gsi_stmt (gsi
);
208 op0
= op1
= NULL_TREE
;
210 /* Most control-altering statements must be initially
211 simulated, else we won't cover the entire cfg. */
212 sim_again_p
= stmt_ends_bb_p (stmt
);
214 switch (gimple_code (stmt
))
217 if (gimple_call_lhs (stmt
))
218 sim_again_p
= is_complex_reg (gimple_call_lhs (stmt
));
222 sim_again_p
= is_complex_reg (gimple_assign_lhs (stmt
));
223 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
224 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
)
225 op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
227 op0
= gimple_assign_rhs1 (stmt
);
228 if (gimple_num_ops (stmt
) > 2)
229 op1
= gimple_assign_rhs2 (stmt
);
233 op0
= gimple_cond_lhs (stmt
);
234 op1
= gimple_cond_rhs (stmt
);
242 switch (gimple_expr_code (stmt
))
254 if (TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
255 || TREE_CODE (TREE_TYPE (op1
)) == COMPLEX_TYPE
)
256 saw_a_complex_op
= true;
261 if (TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
262 saw_a_complex_op
= true;
267 /* The total store transformation performed during
268 gimplification creates such uninitialized loads
269 and we need to lower the statement to be able
271 if (TREE_CODE (op0
) == SSA_NAME
272 && ssa_undefined_value_p (op0
))
273 saw_a_complex_op
= true;
280 prop_set_simulate_again (stmt
, sim_again_p
);
284 return saw_a_complex_op
;
288 /* Evaluate statement STMT against the complex lattice defined above. */
290 static enum ssa_prop_result
291 complex_visit_stmt (gimple stmt
, edge
*taken_edge_p ATTRIBUTE_UNUSED
,
294 complex_lattice_t new_l
, old_l
, op1_l
, op2_l
;
298 lhs
= gimple_get_lhs (stmt
);
299 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
301 return SSA_PROP_VARYING
;
303 /* These conditions should be satisfied due to the initial filter
304 set up in init_dont_simulate_again. */
305 gcc_assert (TREE_CODE (lhs
) == SSA_NAME
);
306 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
309 ver
= SSA_NAME_VERSION (lhs
);
310 old_l
= complex_lattice_values
[ver
];
312 switch (gimple_expr_code (stmt
))
316 new_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
320 new_l
= find_lattice_value_parts (gimple_assign_rhs1 (stmt
),
321 gimple_assign_rhs2 (stmt
));
326 op1_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
327 op2_l
= find_lattice_value (gimple_assign_rhs2 (stmt
));
329 /* We've set up the lattice values such that IOR neatly
331 new_l
= op1_l
| op2_l
;
340 op1_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
341 op2_l
= find_lattice_value (gimple_assign_rhs2 (stmt
));
343 /* Obviously, if either varies, so does the result. */
344 if (op1_l
== VARYING
|| op2_l
== VARYING
)
346 /* Don't prematurely promote variables if we've not yet seen
348 else if (op1_l
== UNINITIALIZED
)
350 else if (op2_l
== UNINITIALIZED
)
354 /* At this point both numbers have only one component. If the
355 numbers are of opposite kind, the result is imaginary,
356 otherwise the result is real. The add/subtract translates
357 the real/imag from/to 0/1; the ^ performs the comparison. */
358 new_l
= ((op1_l
- ONLY_REAL
) ^ (op2_l
- ONLY_REAL
)) + ONLY_REAL
;
360 /* Don't allow the lattice value to flip-flop indefinitely. */
367 new_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
375 /* If nothing changed this round, let the propagator know. */
377 return SSA_PROP_NOT_INTERESTING
;
379 complex_lattice_values
[ver
] = new_l
;
380 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
383 /* Evaluate a PHI node against the complex lattice defined above. */
385 static enum ssa_prop_result
386 complex_visit_phi (gimple phi
)
388 complex_lattice_t new_l
, old_l
;
393 lhs
= gimple_phi_result (phi
);
395 /* This condition should be satisfied due to the initial filter
396 set up in init_dont_simulate_again. */
397 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
399 /* We've set up the lattice values such that IOR neatly models PHI meet. */
400 new_l
= UNINITIALIZED
;
401 for (i
= gimple_phi_num_args (phi
) - 1; i
>= 0; --i
)
402 new_l
|= find_lattice_value (gimple_phi_arg_def (phi
, i
));
404 ver
= SSA_NAME_VERSION (lhs
);
405 old_l
= complex_lattice_values
[ver
];
408 return SSA_PROP_NOT_INTERESTING
;
410 complex_lattice_values
[ver
] = new_l
;
411 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
414 /* Create one backing variable for a complex component of ORIG. */
417 create_one_component_var (tree type
, tree orig
, const char *prefix
,
418 const char *suffix
, enum tree_code code
)
420 tree r
= create_tmp_var (type
, prefix
);
422 DECL_SOURCE_LOCATION (r
) = DECL_SOURCE_LOCATION (orig
);
423 DECL_ARTIFICIAL (r
) = 1;
425 if (DECL_NAME (orig
) && !DECL_IGNORED_P (orig
))
427 const char *name
= IDENTIFIER_POINTER (DECL_NAME (orig
));
429 DECL_NAME (r
) = get_identifier (ACONCAT ((name
, suffix
, NULL
)));
431 SET_DECL_DEBUG_EXPR (r
, build1 (code
, type
, orig
));
432 DECL_HAS_DEBUG_EXPR_P (r
) = 1;
433 DECL_IGNORED_P (r
) = 0;
434 TREE_NO_WARNING (r
) = TREE_NO_WARNING (orig
);
438 DECL_IGNORED_P (r
) = 1;
439 TREE_NO_WARNING (r
) = 1;
445 /* Retrieve a value for a complex component of VAR. */
448 get_component_var (tree var
, bool imag_p
)
450 size_t decl_index
= DECL_UID (var
) * 2 + imag_p
;
451 tree ret
= cvc_lookup (decl_index
);
455 ret
= create_one_component_var (TREE_TYPE (TREE_TYPE (var
)), var
,
456 imag_p
? "CI" : "CR",
457 imag_p
? "$imag" : "$real",
458 imag_p
? IMAGPART_EXPR
: REALPART_EXPR
);
459 cvc_insert (decl_index
, ret
);
465 /* Retrieve a value for a complex component of SSA_NAME. */
468 get_component_ssa_name (tree ssa_name
, bool imag_p
)
470 complex_lattice_t lattice
= find_lattice_value (ssa_name
);
471 size_t ssa_name_index
;
474 if (lattice
== (imag_p
? ONLY_REAL
: ONLY_IMAG
))
476 tree inner_type
= TREE_TYPE (TREE_TYPE (ssa_name
));
477 if (SCALAR_FLOAT_TYPE_P (inner_type
))
478 return build_real (inner_type
, dconst0
);
480 return build_int_cst (inner_type
, 0);
483 ssa_name_index
= SSA_NAME_VERSION (ssa_name
) * 2 + imag_p
;
484 ret
= complex_ssa_name_components
[ssa_name_index
];
487 if (SSA_NAME_VAR (ssa_name
))
488 ret
= get_component_var (SSA_NAME_VAR (ssa_name
), imag_p
);
490 ret
= TREE_TYPE (TREE_TYPE (ssa_name
));
491 ret
= make_ssa_name (ret
, NULL
);
493 /* Copy some properties from the original. In particular, whether it
494 is used in an abnormal phi, and whether it's uninitialized. */
495 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret
)
496 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
);
497 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
498 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == VAR_DECL
)
500 SSA_NAME_DEF_STMT (ret
) = SSA_NAME_DEF_STMT (ssa_name
);
501 set_ssa_default_def (cfun
, SSA_NAME_VAR (ret
), ret
);
504 complex_ssa_name_components
[ssa_name_index
] = ret
;
510 /* Set a value for a complex component of SSA_NAME, return a
511 gimple_seq of stuff that needs doing. */
514 set_component_ssa_name (tree ssa_name
, bool imag_p
, tree value
)
516 complex_lattice_t lattice
= find_lattice_value (ssa_name
);
517 size_t ssa_name_index
;
522 /* We know the value must be zero, else there's a bug in our lattice
523 analysis. But the value may well be a variable known to contain
524 zero. We should be safe ignoring it. */
525 if (lattice
== (imag_p
? ONLY_REAL
: ONLY_IMAG
))
528 /* If we've already assigned an SSA_NAME to this component, then this
529 means that our walk of the basic blocks found a use before the set.
530 This is fine. Now we should create an initialization for the value
531 we created earlier. */
532 ssa_name_index
= SSA_NAME_VERSION (ssa_name
) * 2 + imag_p
;
533 comp
= complex_ssa_name_components
[ssa_name_index
];
537 /* If we've nothing assigned, and the value we're given is already stable,
538 then install that as the value for this SSA_NAME. This preemptively
539 copy-propagates the value, which avoids unnecessary memory allocation. */
540 else if (is_gimple_min_invariant (value
)
541 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
543 complex_ssa_name_components
[ssa_name_index
] = value
;
546 else if (TREE_CODE (value
) == SSA_NAME
547 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
549 /* Replace an anonymous base value with the variable from cvc_lookup.
550 This should result in better debug info. */
551 if (SSA_NAME_VAR (ssa_name
)
552 && (!SSA_NAME_VAR (value
) || DECL_IGNORED_P (SSA_NAME_VAR (value
)))
553 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name
)))
555 comp
= get_component_var (SSA_NAME_VAR (ssa_name
), imag_p
);
556 replace_ssa_name_symbol (value
, comp
);
559 complex_ssa_name_components
[ssa_name_index
] = value
;
563 /* Finally, we need to stabilize the result by installing the value into
566 comp
= get_component_ssa_name (ssa_name
, imag_p
);
568 /* Do all the work to assign VALUE to COMP. */
570 value
= force_gimple_operand (value
, &list
, false, NULL
);
571 last
= gimple_build_assign (comp
, value
);
572 gimple_seq_add_stmt (&list
, last
);
573 gcc_assert (SSA_NAME_DEF_STMT (comp
) == last
);
578 /* Extract the real or imaginary part of a complex variable or constant.
579 Make sure that it's a proper gimple_val and gimplify it if not.
580 Emit any new code before gsi. */
583 extract_component (gimple_stmt_iterator
*gsi
, tree t
, bool imagpart_p
,
586 switch (TREE_CODE (t
))
589 return imagpart_p
? TREE_IMAGPART (t
) : TREE_REALPART (t
);
599 case VIEW_CONVERT_EXPR
:
602 tree inner_type
= TREE_TYPE (TREE_TYPE (t
));
604 t
= build1 ((imagpart_p
? IMAGPART_EXPR
: REALPART_EXPR
),
605 inner_type
, unshare_expr (t
));
608 t
= force_gimple_operand_gsi (gsi
, t
, true, NULL
, true,
615 return get_component_ssa_name (t
, imagpart_p
);
622 /* Update the complex components of the ssa name on the lhs of STMT. */
625 update_complex_components (gimple_stmt_iterator
*gsi
, gimple stmt
, tree r
,
631 lhs
= gimple_get_lhs (stmt
);
633 list
= set_component_ssa_name (lhs
, false, r
);
635 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
637 list
= set_component_ssa_name (lhs
, true, i
);
639 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
643 update_complex_components_on_edge (edge e
, tree lhs
, tree r
, tree i
)
647 list
= set_component_ssa_name (lhs
, false, r
);
649 gsi_insert_seq_on_edge (e
, list
);
651 list
= set_component_ssa_name (lhs
, true, i
);
653 gsi_insert_seq_on_edge (e
, list
);
657 /* Update an assignment to a complex variable in place. */
660 update_complex_assignment (gimple_stmt_iterator
*gsi
, tree r
, tree i
)
664 gimple_assign_set_rhs_with_ops (gsi
, COMPLEX_EXPR
, r
, i
);
665 stmt
= gsi_stmt (*gsi
);
667 if (maybe_clean_eh_stmt (stmt
))
668 gimple_purge_dead_eh_edges (gimple_bb (stmt
));
670 if (gimple_in_ssa_p (cfun
))
671 update_complex_components (gsi
, gsi_stmt (*gsi
), r
, i
);
675 /* Generate code at the entry point of the function to initialize the
676 component variables for a complex parameter. */
679 update_parameter_components (void)
681 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR
);
684 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= DECL_CHAIN (parm
))
686 tree type
= TREE_TYPE (parm
);
689 if (TREE_CODE (type
) != COMPLEX_TYPE
|| !is_gimple_reg (parm
))
692 type
= TREE_TYPE (type
);
693 ssa_name
= ssa_default_def (cfun
, parm
);
697 r
= build1 (REALPART_EXPR
, type
, ssa_name
);
698 i
= build1 (IMAGPART_EXPR
, type
, ssa_name
);
699 update_complex_components_on_edge (entry_edge
, ssa_name
, r
, i
);
703 /* Generate code to set the component variables of a complex variable
704 to match the PHI statements in block BB. */
707 update_phi_components (basic_block bb
)
709 gimple_stmt_iterator gsi
;
711 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
713 gimple phi
= gsi_stmt (gsi
);
715 if (is_complex_reg (gimple_phi_result (phi
)))
718 gimple pr
= NULL
, pi
= NULL
;
721 lr
= get_component_ssa_name (gimple_phi_result (phi
), false);
722 if (TREE_CODE (lr
) == SSA_NAME
)
723 pr
= create_phi_node (lr
, bb
);
725 li
= get_component_ssa_name (gimple_phi_result (phi
), true);
726 if (TREE_CODE (li
) == SSA_NAME
)
727 pi
= create_phi_node (li
, bb
);
729 for (i
= 0, n
= gimple_phi_num_args (phi
); i
< n
; ++i
)
731 tree comp
, arg
= gimple_phi_arg_def (phi
, i
);
734 comp
= extract_component (NULL
, arg
, false, false);
735 SET_PHI_ARG_DEF (pr
, i
, comp
);
739 comp
= extract_component (NULL
, arg
, true, false);
740 SET_PHI_ARG_DEF (pi
, i
, comp
);
747 /* Expand a complex move to scalars. */
750 expand_complex_move (gimple_stmt_iterator
*gsi
, tree type
)
752 tree inner_type
= TREE_TYPE (type
);
754 gimple stmt
= gsi_stmt (*gsi
);
756 if (is_gimple_assign (stmt
))
758 lhs
= gimple_assign_lhs (stmt
);
759 if (gimple_num_ops (stmt
) == 2)
760 rhs
= gimple_assign_rhs1 (stmt
);
764 else if (is_gimple_call (stmt
))
766 lhs
= gimple_call_lhs (stmt
);
772 if (TREE_CODE (lhs
) == SSA_NAME
)
774 if (is_ctrl_altering_stmt (stmt
))
778 /* The value is not assigned on the exception edges, so we need not
779 concern ourselves there. We do need to update on the fallthru
781 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
785 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
786 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
787 update_complex_components_on_edge (e
, lhs
, r
, i
);
789 else if (is_gimple_call (stmt
)
790 || gimple_has_side_effects (stmt
)
791 || gimple_assign_rhs_code (stmt
) == PAREN_EXPR
)
793 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
794 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
795 update_complex_components (gsi
, stmt
, r
, i
);
799 if (gimple_assign_rhs_code (stmt
) != COMPLEX_EXPR
)
801 r
= extract_component (gsi
, rhs
, 0, true);
802 i
= extract_component (gsi
, rhs
, 1, true);
806 r
= gimple_assign_rhs1 (stmt
);
807 i
= gimple_assign_rhs2 (stmt
);
809 update_complex_assignment (gsi
, r
, i
);
812 else if (rhs
&& TREE_CODE (rhs
) == SSA_NAME
&& !TREE_SIDE_EFFECTS (lhs
))
817 r
= extract_component (gsi
, rhs
, 0, false);
818 i
= extract_component (gsi
, rhs
, 1, false);
820 x
= build1 (REALPART_EXPR
, inner_type
, unshare_expr (lhs
));
821 t
= gimple_build_assign (x
, r
);
822 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
824 if (stmt
== gsi_stmt (*gsi
))
826 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
827 gimple_assign_set_lhs (stmt
, x
);
828 gimple_assign_set_rhs1 (stmt
, i
);
832 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
833 t
= gimple_build_assign (x
, i
);
834 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
836 stmt
= gsi_stmt (*gsi
);
837 gcc_assert (gimple_code (stmt
) == GIMPLE_RETURN
);
838 gimple_return_set_retval (stmt
, lhs
);
845 /* Expand complex addition to scalars:
846 a + b = (ar + br) + i(ai + bi)
847 a - b = (ar - br) + i(ai + bi)
851 expand_complex_addition (gimple_stmt_iterator
*gsi
, tree inner_type
,
852 tree ar
, tree ai
, tree br
, tree bi
,
854 complex_lattice_t al
, complex_lattice_t bl
)
858 switch (PAIR (al
, bl
))
860 case PAIR (ONLY_REAL
, ONLY_REAL
):
861 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
865 case PAIR (ONLY_REAL
, ONLY_IMAG
):
867 if (code
== MINUS_EXPR
)
868 ri
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ai
, bi
);
873 case PAIR (ONLY_IMAG
, ONLY_REAL
):
874 if (code
== MINUS_EXPR
)
875 rr
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ar
, br
);
881 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
883 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
886 case PAIR (VARYING
, ONLY_REAL
):
887 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
891 case PAIR (VARYING
, ONLY_IMAG
):
893 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
896 case PAIR (ONLY_REAL
, VARYING
):
897 if (code
== MINUS_EXPR
)
899 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
903 case PAIR (ONLY_IMAG
, VARYING
):
904 if (code
== MINUS_EXPR
)
907 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
910 case PAIR (VARYING
, VARYING
):
912 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
913 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
920 update_complex_assignment (gsi
, rr
, ri
);
923 /* Expand a complex multiplication or division to a libcall to the c99
924 compliant routines. */
927 expand_complex_libcall (gimple_stmt_iterator
*gsi
, tree ar
, tree ai
,
928 tree br
, tree bi
, enum tree_code code
)
930 enum machine_mode mode
;
931 enum built_in_function bcode
;
933 gimple old_stmt
, stmt
;
935 old_stmt
= gsi_stmt (*gsi
);
936 lhs
= gimple_assign_lhs (old_stmt
);
937 type
= TREE_TYPE (lhs
);
939 mode
= TYPE_MODE (type
);
940 gcc_assert (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
);
942 if (code
== MULT_EXPR
)
943 bcode
= ((enum built_in_function
)
944 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
945 else if (code
== RDIV_EXPR
)
946 bcode
= ((enum built_in_function
)
947 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
950 fn
= builtin_decl_explicit (bcode
);
952 stmt
= gimple_build_call (fn
, 4, ar
, ai
, br
, bi
);
953 gimple_call_set_lhs (stmt
, lhs
);
955 gsi_replace (gsi
, stmt
, false);
957 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
958 gimple_purge_dead_eh_edges (gsi_bb (*gsi
));
960 if (gimple_in_ssa_p (cfun
))
962 type
= TREE_TYPE (type
);
963 update_complex_components (gsi
, stmt
,
964 build1 (REALPART_EXPR
, type
, lhs
),
965 build1 (IMAGPART_EXPR
, type
, lhs
));
966 SSA_NAME_DEF_STMT (lhs
) = stmt
;
970 /* Expand complex multiplication to scalars:
971 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
975 expand_complex_multiplication (gimple_stmt_iterator
*gsi
, tree inner_type
,
976 tree ar
, tree ai
, tree br
, tree bi
,
977 complex_lattice_t al
, complex_lattice_t bl
)
983 complex_lattice_t tl
;
984 rr
= ar
, ar
= br
, br
= rr
;
985 ri
= ai
, ai
= bi
, bi
= ri
;
986 tl
= al
, al
= bl
, bl
= tl
;
989 switch (PAIR (al
, bl
))
991 case PAIR (ONLY_REAL
, ONLY_REAL
):
992 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
996 case PAIR (ONLY_IMAG
, ONLY_REAL
):
998 if (TREE_CODE (ai
) == REAL_CST
999 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai
), dconst1
))
1002 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1005 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1006 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1007 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, rr
);
1011 case PAIR (VARYING
, ONLY_REAL
):
1012 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1013 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1016 case PAIR (VARYING
, ONLY_IMAG
):
1017 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1018 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, rr
);
1019 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1022 case PAIR (VARYING
, VARYING
):
1023 if (flag_complex_method
== 2 && SCALAR_FLOAT_TYPE_P (inner_type
))
1025 expand_complex_libcall (gsi
, ar
, ai
, br
, bi
, MULT_EXPR
);
1030 tree t1
, t2
, t3
, t4
;
1032 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1033 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1034 t3
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1036 /* Avoid expanding redundant multiplication for the common
1037 case of squaring a complex number. */
1038 if (ar
== br
&& ai
== bi
)
1041 t4
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1043 rr
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
1044 ri
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t3
, t4
);
1052 update_complex_assignment (gsi
, rr
, ri
);
1055 /* Keep this algorithm in sync with fold-const.c:const_binop().
1057 Expand complex division to scalars, straightforward algorithm.
1058 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1063 expand_complex_div_straight (gimple_stmt_iterator
*gsi
, tree inner_type
,
1064 tree ar
, tree ai
, tree br
, tree bi
,
1065 enum tree_code code
)
1067 tree rr
, ri
, div
, t1
, t2
, t3
;
1069 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, br
, br
);
1070 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, bi
, bi
);
1071 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
1073 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1074 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1075 t3
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
1076 rr
= gimplify_build2 (gsi
, code
, inner_type
, t3
, div
);
1078 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1079 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1080 t3
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
1081 ri
= gimplify_build2 (gsi
, code
, inner_type
, t3
, div
);
1083 update_complex_assignment (gsi
, rr
, ri
);
1086 /* Keep this algorithm in sync with fold-const.c:const_binop().
1088 Expand complex division to scalars, modified algorithm to minimize
1089 overflow with wide input ranges. */
1092 expand_complex_div_wide (gimple_stmt_iterator
*gsi
, tree inner_type
,
1093 tree ar
, tree ai
, tree br
, tree bi
,
1094 enum tree_code code
)
1096 tree rr
, ri
, ratio
, div
, t1
, t2
, tr
, ti
, compare
;
1097 basic_block bb_cond
, bb_true
, bb_false
, bb_join
;
1100 /* Examine |br| < |bi|, and branch. */
1101 t1
= gimplify_build1 (gsi
, ABS_EXPR
, inner_type
, br
);
1102 t2
= gimplify_build1 (gsi
, ABS_EXPR
, inner_type
, bi
);
1103 compare
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)),
1104 LT_EXPR
, boolean_type_node
, t1
, t2
);
1105 STRIP_NOPS (compare
);
1107 bb_cond
= bb_true
= bb_false
= bb_join
= NULL
;
1108 rr
= ri
= tr
= ti
= NULL
;
1109 if (TREE_CODE (compare
) != INTEGER_CST
)
1115 tmp
= create_tmp_var (boolean_type_node
, NULL
);
1116 stmt
= gimple_build_assign (tmp
, compare
);
1117 if (gimple_in_ssa_p (cfun
))
1119 tmp
= make_ssa_name (tmp
, stmt
);
1120 gimple_assign_set_lhs (stmt
, tmp
);
1123 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1125 cond
= fold_build2_loc (gimple_location (stmt
),
1126 EQ_EXPR
, boolean_type_node
, tmp
, boolean_true_node
);
1127 stmt
= gimple_build_cond_from_tree (cond
, NULL_TREE
, NULL_TREE
);
1128 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1130 /* Split the original block, and create the TRUE and FALSE blocks. */
1131 e
= split_block (gsi_bb (*gsi
), stmt
);
1134 bb_true
= create_empty_bb (bb_cond
);
1135 bb_false
= create_empty_bb (bb_true
);
1137 /* Wire the blocks together. */
1138 e
->flags
= EDGE_TRUE_VALUE
;
1139 redirect_edge_succ (e
, bb_true
);
1140 make_edge (bb_cond
, bb_false
, EDGE_FALSE_VALUE
);
1141 make_edge (bb_true
, bb_join
, EDGE_FALLTHRU
);
1142 make_edge (bb_false
, bb_join
, EDGE_FALLTHRU
);
1145 add_bb_to_loop (bb_true
, bb_cond
->loop_father
);
1146 add_bb_to_loop (bb_false
, bb_cond
->loop_father
);
1149 /* Update dominance info. Note that bb_join's data was
1150 updated by split_block. */
1151 if (dom_info_available_p (CDI_DOMINATORS
))
1153 set_immediate_dominator (CDI_DOMINATORS
, bb_true
, bb_cond
);
1154 set_immediate_dominator (CDI_DOMINATORS
, bb_false
, bb_cond
);
1157 rr
= create_tmp_reg (inner_type
, NULL
);
1158 ri
= create_tmp_reg (inner_type
, NULL
);
1161 /* In the TRUE branch, we compute
1163 div = (br * ratio) + bi;
1164 tr = (ar * ratio) + ai;
1165 ti = (ai * ratio) - ar;
1168 if (bb_true
|| integer_nonzerop (compare
))
1172 *gsi
= gsi_last_bb (bb_true
);
1173 gsi_insert_after (gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1176 ratio
= gimplify_build2 (gsi
, code
, inner_type
, br
, bi
);
1178 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, br
, ratio
);
1179 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, bi
);
1181 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1182 tr
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, ai
);
1184 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1185 ti
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, ar
);
1187 tr
= gimplify_build2 (gsi
, code
, inner_type
, tr
, div
);
1188 ti
= gimplify_build2 (gsi
, code
, inner_type
, ti
, div
);
1192 stmt
= gimple_build_assign (rr
, tr
);
1193 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1194 stmt
= gimple_build_assign (ri
, ti
);
1195 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1196 gsi_remove (gsi
, true);
1200 /* In the FALSE branch, we compute
1202 divisor = (d * ratio) + c;
1203 tr = (b * ratio) + a;
1204 ti = b - (a * ratio);
1207 if (bb_false
|| integer_zerop (compare
))
1211 *gsi
= gsi_last_bb (bb_false
);
1212 gsi_insert_after (gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1215 ratio
= gimplify_build2 (gsi
, code
, inner_type
, bi
, br
);
1217 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, bi
, ratio
);
1218 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, br
);
1220 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1221 tr
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, ar
);
1223 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1224 ti
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ai
, t1
);
1226 tr
= gimplify_build2 (gsi
, code
, inner_type
, tr
, div
);
1227 ti
= gimplify_build2 (gsi
, code
, inner_type
, ti
, div
);
1231 stmt
= gimple_build_assign (rr
, tr
);
1232 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1233 stmt
= gimple_build_assign (ri
, ti
);
1234 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1235 gsi_remove (gsi
, true);
1240 *gsi
= gsi_start_bb (bb_join
);
1244 update_complex_assignment (gsi
, rr
, ri
);
1247 /* Expand complex division to scalars. */
1250 expand_complex_division (gimple_stmt_iterator
*gsi
, tree inner_type
,
1251 tree ar
, tree ai
, tree br
, tree bi
,
1252 enum tree_code code
,
1253 complex_lattice_t al
, complex_lattice_t bl
)
1257 switch (PAIR (al
, bl
))
1259 case PAIR (ONLY_REAL
, ONLY_REAL
):
1260 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
1264 case PAIR (ONLY_REAL
, ONLY_IMAG
):
1266 ri
= gimplify_build2 (gsi
, code
, inner_type
, ar
, bi
);
1267 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ri
);
1270 case PAIR (ONLY_IMAG
, ONLY_REAL
):
1272 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, br
);
1275 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1276 rr
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
1280 case PAIR (VARYING
, ONLY_REAL
):
1281 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
1282 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, br
);
1285 case PAIR (VARYING
, ONLY_IMAG
):
1286 rr
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
1287 ri
= gimplify_build2 (gsi
, code
, inner_type
, ar
, bi
);
1288 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ri
);
1290 case PAIR (ONLY_REAL
, VARYING
):
1291 case PAIR (ONLY_IMAG
, VARYING
):
1292 case PAIR (VARYING
, VARYING
):
1293 switch (flag_complex_method
)
1296 /* straightforward implementation of complex divide acceptable. */
1297 expand_complex_div_straight (gsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1301 if (SCALAR_FLOAT_TYPE_P (inner_type
))
1303 expand_complex_libcall (gsi
, ar
, ai
, br
, bi
, code
);
1309 /* wide ranges of inputs must work for complex divide. */
1310 expand_complex_div_wide (gsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1322 update_complex_assignment (gsi
, rr
, ri
);
1325 /* Expand complex negation to scalars:
1330 expand_complex_negation (gimple_stmt_iterator
*gsi
, tree inner_type
,
1335 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ar
);
1336 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ai
);
1338 update_complex_assignment (gsi
, rr
, ri
);
1341 /* Expand complex conjugate to scalars:
1346 expand_complex_conjugate (gimple_stmt_iterator
*gsi
, tree inner_type
,
1351 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ai
);
1353 update_complex_assignment (gsi
, ar
, ri
);
1356 /* Expand complex comparison (EQ or NE only). */
1359 expand_complex_comparison (gimple_stmt_iterator
*gsi
, tree ar
, tree ai
,
1360 tree br
, tree bi
, enum tree_code code
)
1362 tree cr
, ci
, cc
, type
;
1365 cr
= gimplify_build2 (gsi
, code
, boolean_type_node
, ar
, br
);
1366 ci
= gimplify_build2 (gsi
, code
, boolean_type_node
, ai
, bi
);
1367 cc
= gimplify_build2 (gsi
,
1368 (code
== EQ_EXPR
? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
),
1369 boolean_type_node
, cr
, ci
);
1371 stmt
= gsi_stmt (*gsi
);
1373 switch (gimple_code (stmt
))
1376 type
= TREE_TYPE (gimple_return_retval (stmt
));
1377 gimple_return_set_retval (stmt
, fold_convert (type
, cc
));
1381 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1382 gimple_assign_set_rhs_from_tree (gsi
, fold_convert (type
, cc
));
1383 stmt
= gsi_stmt (*gsi
);
1387 gimple_cond_set_code (stmt
, EQ_EXPR
);
1388 gimple_cond_set_lhs (stmt
, cc
);
1389 gimple_cond_set_rhs (stmt
, boolean_true_node
);
1399 /* Expand inline asm that sets some complex SSA_NAMEs. */
1402 expand_complex_asm (gimple_stmt_iterator
*gsi
)
1404 gimple stmt
= gsi_stmt (*gsi
);
1407 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1409 tree link
= gimple_asm_output_op (stmt
, i
);
1410 tree op
= TREE_VALUE (link
);
1411 if (TREE_CODE (op
) == SSA_NAME
1412 && TREE_CODE (TREE_TYPE (op
)) == COMPLEX_TYPE
)
1414 tree type
= TREE_TYPE (op
);
1415 tree inner_type
= TREE_TYPE (type
);
1416 tree r
= build1 (REALPART_EXPR
, inner_type
, op
);
1417 tree i
= build1 (IMAGPART_EXPR
, inner_type
, op
);
1418 gimple_seq list
= set_component_ssa_name (op
, false, r
);
1421 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
1423 list
= set_component_ssa_name (op
, true, i
);
1425 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
1430 /* Process one statement. If we identify a complex operation, expand it. */
1433 expand_complex_operations_1 (gimple_stmt_iterator
*gsi
)
1435 gimple stmt
= gsi_stmt (*gsi
);
1436 tree type
, inner_type
, lhs
;
1437 tree ac
, ar
, ai
, bc
, br
, bi
;
1438 complex_lattice_t al
, bl
;
1439 enum tree_code code
;
1441 if (gimple_code (stmt
) == GIMPLE_ASM
)
1443 expand_complex_asm (gsi
);
1447 lhs
= gimple_get_lhs (stmt
);
1448 if (!lhs
&& gimple_code (stmt
) != GIMPLE_COND
)
1451 type
= TREE_TYPE (gimple_op (stmt
, 0));
1452 code
= gimple_expr_code (stmt
);
1454 /* Initial filter for operations we handle. */
1460 case TRUNC_DIV_EXPR
:
1462 case FLOOR_DIV_EXPR
:
1463 case ROUND_DIV_EXPR
:
1467 if (TREE_CODE (type
) != COMPLEX_TYPE
)
1469 inner_type
= TREE_TYPE (type
);
1474 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
1475 subcode, so we need to access the operands using gimple_op. */
1476 inner_type
= TREE_TYPE (gimple_op (stmt
, 1));
1477 if (TREE_CODE (inner_type
) != COMPLEX_TYPE
)
1485 /* GIMPLE_COND may also fallthru here, but we do not need to
1486 do anything with it. */
1487 if (gimple_code (stmt
) == GIMPLE_COND
)
1490 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1491 expand_complex_move (gsi
, type
);
1492 else if (is_gimple_assign (stmt
)
1493 && (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
1494 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
)
1495 && TREE_CODE (lhs
) == SSA_NAME
)
1497 rhs
= gimple_assign_rhs1 (stmt
);
1498 rhs
= extract_component (gsi
, TREE_OPERAND (rhs
, 0),
1499 gimple_assign_rhs_code (stmt
)
1502 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1503 stmt
= gsi_stmt (*gsi
);
1510 /* Extract the components of the two complex values. Make sure and
1511 handle the common case of the same value used twice specially. */
1512 if (is_gimple_assign (stmt
))
1514 ac
= gimple_assign_rhs1 (stmt
);
1515 bc
= (gimple_num_ops (stmt
) > 2) ? gimple_assign_rhs2 (stmt
) : NULL
;
1517 /* GIMPLE_CALL can not get here. */
1520 ac
= gimple_cond_lhs (stmt
);
1521 bc
= gimple_cond_rhs (stmt
);
1524 ar
= extract_component (gsi
, ac
, false, true);
1525 ai
= extract_component (gsi
, ac
, true, true);
1531 br
= extract_component (gsi
, bc
, 0, true);
1532 bi
= extract_component (gsi
, bc
, 1, true);
1535 br
= bi
= NULL_TREE
;
1537 if (gimple_in_ssa_p (cfun
))
1539 al
= find_lattice_value (ac
);
1540 if (al
== UNINITIALIZED
)
1543 if (TREE_CODE_CLASS (code
) == tcc_unary
)
1549 bl
= find_lattice_value (bc
);
1550 if (bl
== UNINITIALIZED
)
1561 expand_complex_addition (gsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1565 expand_complex_multiplication (gsi
, inner_type
, ar
, ai
, br
, bi
, al
, bl
);
1568 case TRUNC_DIV_EXPR
:
1570 case FLOOR_DIV_EXPR
:
1571 case ROUND_DIV_EXPR
:
1573 expand_complex_division (gsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1577 expand_complex_negation (gsi
, inner_type
, ar
, ai
);
1581 expand_complex_conjugate (gsi
, inner_type
, ar
, ai
);
1586 expand_complex_comparison (gsi
, ar
, ai
, br
, bi
, code
);
1595 /* Entry point for complex operation lowering during optimization. */
1598 tree_lower_complex (void)
1600 int old_last_basic_block
;
1601 gimple_stmt_iterator gsi
;
1604 if (!init_dont_simulate_again ())
1607 complex_lattice_values
.create (num_ssa_names
);
1608 complex_lattice_values
.safe_grow_cleared (num_ssa_names
);
1610 init_parameter_lattice_values ();
1611 ssa_propagate (complex_visit_stmt
, complex_visit_phi
);
1613 complex_variable_components
.create (10);
1615 complex_ssa_name_components
.create (2 * num_ssa_names
);
1616 complex_ssa_name_components
.safe_grow_cleared (2 * num_ssa_names
);
1618 update_parameter_components ();
1620 /* ??? Ideally we'd traverse the blocks in breadth-first order. */
1621 old_last_basic_block
= last_basic_block
;
1624 if (bb
->index
>= old_last_basic_block
)
1627 update_phi_components (bb
);
1628 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1629 expand_complex_operations_1 (&gsi
);
1632 gsi_commit_edge_inserts ();
1634 complex_variable_components
.dispose ();
1635 complex_ssa_name_components
.release ();
1636 complex_lattice_values
.release ();
1642 const pass_data pass_data_lower_complex
=
1644 GIMPLE_PASS
, /* type */
1645 "cplxlower", /* name */
1646 OPTGROUP_NONE
, /* optinfo_flags */
1647 false, /* has_gate */
1648 true, /* has_execute */
1649 TV_NONE
, /* tv_id */
1650 PROP_ssa
, /* properties_required */
1651 PROP_gimple_lcx
, /* properties_provided */
1652 0, /* properties_destroyed */
1653 0, /* todo_flags_start */
1654 ( TODO_update_ssa
| TODO_verify_stmts
), /* todo_flags_finish */
1657 class pass_lower_complex
: public gimple_opt_pass
1660 pass_lower_complex(gcc::context
*ctxt
)
1661 : gimple_opt_pass(pass_data_lower_complex
, ctxt
)
1664 /* opt_pass methods: */
1665 opt_pass
* clone () { return new pass_lower_complex (ctxt_
); }
1666 unsigned int execute () { return tree_lower_complex (); }
1668 }; // class pass_lower_complex
1673 make_pass_lower_complex (gcc::context
*ctxt
)
1675 return new pass_lower_complex (ctxt
);
1680 gate_no_optimization (void)
1682 /* With errors, normal optimization passes are not run. If we don't
1683 lower complex operations at all, rtl expansion will abort. */
1684 return !(cfun
->curr_properties
& PROP_gimple_lcx
);
1689 const pass_data pass_data_lower_complex_O0
=
1691 GIMPLE_PASS
, /* type */
1692 "cplxlower0", /* name */
1693 OPTGROUP_NONE
, /* optinfo_flags */
1694 true, /* has_gate */
1695 true, /* has_execute */
1696 TV_NONE
, /* tv_id */
1697 PROP_cfg
, /* properties_required */
1698 PROP_gimple_lcx
, /* properties_provided */
1699 0, /* properties_destroyed */
1700 0, /* todo_flags_start */
1701 ( TODO_update_ssa
| TODO_verify_stmts
), /* todo_flags_finish */
1704 class pass_lower_complex_O0
: public gimple_opt_pass
1707 pass_lower_complex_O0(gcc::context
*ctxt
)
1708 : gimple_opt_pass(pass_data_lower_complex_O0
, ctxt
)
1711 /* opt_pass methods: */
1712 bool gate () { return gate_no_optimization (); }
1713 unsigned int execute () { return tree_lower_complex (); }
1715 }; // class pass_lower_complex_O0
1720 make_pass_lower_complex_O0 (gcc::context
*ctxt
)
1722 return new pass_lower_complex_O0 (ctxt
);