1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-hasher.h"
45 /* For each complex ssa name, a lattice value. We're interested in finding
46 out whether a complex number is degenerate in some way, having only real
47 or only complex parts. */
57 /* The type complex_lattice_t holds combinations of the above
59 typedef int complex_lattice_t
;
61 #define PAIR(a, b) ((a) << 2 | (b))
63 class complex_propagate
: public ssa_propagation_engine
65 enum ssa_prop_result
visit_stmt (gimple
*, edge
*, tree
*) FINAL OVERRIDE
;
66 enum ssa_prop_result
visit_phi (gphi
*) FINAL OVERRIDE
;
69 static vec
<complex_lattice_t
> complex_lattice_values
;
71 /* For each complex variable, a pair of variables for the components exists in
73 static int_tree_htab_type
*complex_variable_components
;
75 /* For each complex SSA_NAME, a pair of ssa names for the components. */
76 static vec
<tree
> complex_ssa_name_components
;
78 /* Vector of PHI triplets (original complex PHI and corresponding real and
79 imag PHIs if real and/or imag PHIs contain temporarily
80 non-SSA_NAME/non-invariant args that need to be replaced by SSA_NAMEs. */
81 static vec
<gphi
*> phis_to_revisit
;
83 /* BBs that need EH cleanup. */
84 static bitmap need_eh_cleanup
;
86 /* Lookup UID in the complex_variable_components hashtable and return the
89 cvc_lookup (unsigned int uid
)
91 struct int_tree_map in
;
93 return complex_variable_components
->find_with_hash (in
, uid
).to
;
96 /* Insert the pair UID, TO into the complex_variable_components hashtable. */
99 cvc_insert (unsigned int uid
, tree to
)
105 loc
= complex_variable_components
->find_slot_with_hash (h
, uid
, INSERT
);
110 /* Return true if T is not a zero constant. In the case of real values,
111 we're only interested in +0.0. */
114 some_nonzerop (tree t
)
118 /* Operations with real or imaginary part of a complex number zero
119 cannot be treated the same as operations with a real or imaginary
120 operand if we care about the signs of zeros in the result. */
121 if (TREE_CODE (t
) == REAL_CST
&& !flag_signed_zeros
)
122 zerop
= real_identical (&TREE_REAL_CST (t
), &dconst0
);
123 else if (TREE_CODE (t
) == FIXED_CST
)
124 zerop
= fixed_zerop (t
);
125 else if (TREE_CODE (t
) == INTEGER_CST
)
126 zerop
= integer_zerop (t
);
132 /* Compute a lattice value from the components of a complex type REAL
135 static complex_lattice_t
136 find_lattice_value_parts (tree real
, tree imag
)
139 complex_lattice_t ret
;
141 r
= some_nonzerop (real
);
142 i
= some_nonzerop (imag
);
143 ret
= r
* ONLY_REAL
+ i
* ONLY_IMAG
;
145 /* ??? On occasion we could do better than mapping 0+0i to real, but we
146 certainly don't want to leave it UNINITIALIZED, which eventually gets
147 mapped to VARYING. */
148 if (ret
== UNINITIALIZED
)
155 /* Compute a lattice value from gimple_val T. */
157 static complex_lattice_t
158 find_lattice_value (tree t
)
162 switch (TREE_CODE (t
))
165 return complex_lattice_values
[SSA_NAME_VERSION (t
)];
168 real
= TREE_REALPART (t
);
169 imag
= TREE_IMAGPART (t
);
176 return find_lattice_value_parts (real
, imag
);
179 /* Determine if LHS is something for which we're interested in seeing
180 simulation results. */
183 is_complex_reg (tree lhs
)
185 return TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
&& is_gimple_reg (lhs
);
188 /* Mark the incoming parameters to the function as VARYING. */
191 init_parameter_lattice_values (void)
195 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= DECL_CHAIN (parm
))
196 if (is_complex_reg (parm
)
197 && (ssa_name
= ssa_default_def (cfun
, parm
)) != NULL_TREE
)
198 complex_lattice_values
[SSA_NAME_VERSION (ssa_name
)] = VARYING
;
201 /* Initialize simulation state for each statement. Return false if we
202 found no statements we want to simulate, and thus there's nothing
203 for the entire pass to do. */
206 init_dont_simulate_again (void)
209 bool saw_a_complex_op
= false;
211 FOR_EACH_BB_FN (bb
, cfun
)
213 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
216 gphi
*phi
= gsi
.phi ();
217 prop_set_simulate_again (phi
,
218 is_complex_reg (gimple_phi_result (phi
)));
221 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
228 stmt
= gsi_stmt (gsi
);
229 op0
= op1
= NULL_TREE
;
231 /* Most control-altering statements must be initially
232 simulated, else we won't cover the entire cfg. */
233 sim_again_p
= stmt_ends_bb_p (stmt
);
235 switch (gimple_code (stmt
))
238 if (gimple_call_lhs (stmt
))
239 sim_again_p
= is_complex_reg (gimple_call_lhs (stmt
));
243 sim_again_p
= is_complex_reg (gimple_assign_lhs (stmt
));
244 if (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
245 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
)
246 op0
= TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
248 op0
= gimple_assign_rhs1 (stmt
);
249 if (gimple_num_ops (stmt
) > 2)
250 op1
= gimple_assign_rhs2 (stmt
);
254 op0
= gimple_cond_lhs (stmt
);
255 op1
= gimple_cond_rhs (stmt
);
263 switch (gimple_expr_code (stmt
))
275 if (TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
276 || TREE_CODE (TREE_TYPE (op1
)) == COMPLEX_TYPE
)
277 saw_a_complex_op
= true;
282 if (TREE_CODE (TREE_TYPE (op0
)) == COMPLEX_TYPE
)
283 saw_a_complex_op
= true;
288 /* The total store transformation performed during
289 gimplification creates such uninitialized loads
290 and we need to lower the statement to be able
292 if (TREE_CODE (op0
) == SSA_NAME
293 && ssa_undefined_value_p (op0
))
294 saw_a_complex_op
= true;
301 prop_set_simulate_again (stmt
, sim_again_p
);
305 return saw_a_complex_op
;
309 /* Evaluate statement STMT against the complex lattice defined above. */
312 complex_propagate::visit_stmt (gimple
*stmt
, edge
*taken_edge_p ATTRIBUTE_UNUSED
,
315 complex_lattice_t new_l
, old_l
, op1_l
, op2_l
;
319 lhs
= gimple_get_lhs (stmt
);
320 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
321 if (!lhs
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
322 return SSA_PROP_VARYING
;
324 /* These conditions should be satisfied due to the initial filter
325 set up in init_dont_simulate_again. */
326 gcc_assert (TREE_CODE (lhs
) == SSA_NAME
);
327 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
330 ver
= SSA_NAME_VERSION (lhs
);
331 old_l
= complex_lattice_values
[ver
];
333 switch (gimple_expr_code (stmt
))
337 new_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
341 new_l
= find_lattice_value_parts (gimple_assign_rhs1 (stmt
),
342 gimple_assign_rhs2 (stmt
));
347 op1_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
348 op2_l
= find_lattice_value (gimple_assign_rhs2 (stmt
));
350 /* We've set up the lattice values such that IOR neatly
352 new_l
= op1_l
| op2_l
;
361 op1_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
362 op2_l
= find_lattice_value (gimple_assign_rhs2 (stmt
));
364 /* Obviously, if either varies, so does the result. */
365 if (op1_l
== VARYING
|| op2_l
== VARYING
)
367 /* Don't prematurely promote variables if we've not yet seen
369 else if (op1_l
== UNINITIALIZED
)
371 else if (op2_l
== UNINITIALIZED
)
375 /* At this point both numbers have only one component. If the
376 numbers are of opposite kind, the result is imaginary,
377 otherwise the result is real. The add/subtract translates
378 the real/imag from/to 0/1; the ^ performs the comparison. */
379 new_l
= ((op1_l
- ONLY_REAL
) ^ (op2_l
- ONLY_REAL
)) + ONLY_REAL
;
381 /* Don't allow the lattice value to flip-flop indefinitely. */
388 new_l
= find_lattice_value (gimple_assign_rhs1 (stmt
));
396 /* If nothing changed this round, let the propagator know. */
398 return SSA_PROP_NOT_INTERESTING
;
400 complex_lattice_values
[ver
] = new_l
;
401 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
404 /* Evaluate a PHI node against the complex lattice defined above. */
407 complex_propagate::visit_phi (gphi
*phi
)
409 complex_lattice_t new_l
, old_l
;
414 lhs
= gimple_phi_result (phi
);
416 /* This condition should be satisfied due to the initial filter
417 set up in init_dont_simulate_again. */
418 gcc_assert (TREE_CODE (TREE_TYPE (lhs
)) == COMPLEX_TYPE
);
420 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
421 return SSA_PROP_VARYING
;
423 /* We've set up the lattice values such that IOR neatly models PHI meet. */
424 new_l
= UNINITIALIZED
;
425 for (i
= gimple_phi_num_args (phi
) - 1; i
>= 0; --i
)
426 new_l
|= find_lattice_value (gimple_phi_arg_def (phi
, i
));
428 ver
= SSA_NAME_VERSION (lhs
);
429 old_l
= complex_lattice_values
[ver
];
432 return SSA_PROP_NOT_INTERESTING
;
434 complex_lattice_values
[ver
] = new_l
;
435 return new_l
== VARYING
? SSA_PROP_VARYING
: SSA_PROP_INTERESTING
;
438 /* Create one backing variable for a complex component of ORIG. */
441 create_one_component_var (tree type
, tree orig
, const char *prefix
,
442 const char *suffix
, enum tree_code code
)
444 tree r
= create_tmp_var (type
, prefix
);
446 DECL_SOURCE_LOCATION (r
) = DECL_SOURCE_LOCATION (orig
);
447 DECL_ARTIFICIAL (r
) = 1;
449 if (DECL_NAME (orig
) && !DECL_IGNORED_P (orig
))
451 const char *name
= IDENTIFIER_POINTER (DECL_NAME (orig
));
452 name
= ACONCAT ((name
, suffix
, NULL
));
453 DECL_NAME (r
) = get_identifier (name
);
455 SET_DECL_DEBUG_EXPR (r
, build1 (code
, type
, orig
));
456 DECL_HAS_DEBUG_EXPR_P (r
) = 1;
457 DECL_IGNORED_P (r
) = 0;
458 TREE_NO_WARNING (r
) = TREE_NO_WARNING (orig
);
462 DECL_IGNORED_P (r
) = 1;
463 TREE_NO_WARNING (r
) = 1;
469 /* Retrieve a value for a complex component of VAR. */
472 get_component_var (tree var
, bool imag_p
)
474 size_t decl_index
= DECL_UID (var
) * 2 + imag_p
;
475 tree ret
= cvc_lookup (decl_index
);
479 ret
= create_one_component_var (TREE_TYPE (TREE_TYPE (var
)), var
,
480 imag_p
? "CI" : "CR",
481 imag_p
? "$imag" : "$real",
482 imag_p
? IMAGPART_EXPR
: REALPART_EXPR
);
483 cvc_insert (decl_index
, ret
);
489 /* Retrieve a value for a complex component of SSA_NAME. */
492 get_component_ssa_name (tree ssa_name
, bool imag_p
)
494 complex_lattice_t lattice
= find_lattice_value (ssa_name
);
495 size_t ssa_name_index
;
498 if (lattice
== (imag_p
? ONLY_REAL
: ONLY_IMAG
))
500 tree inner_type
= TREE_TYPE (TREE_TYPE (ssa_name
));
501 if (SCALAR_FLOAT_TYPE_P (inner_type
))
502 return build_real (inner_type
, dconst0
);
504 return build_int_cst (inner_type
, 0);
507 ssa_name_index
= SSA_NAME_VERSION (ssa_name
) * 2 + imag_p
;
508 ret
= complex_ssa_name_components
[ssa_name_index
];
511 if (SSA_NAME_VAR (ssa_name
))
512 ret
= get_component_var (SSA_NAME_VAR (ssa_name
), imag_p
);
514 ret
= TREE_TYPE (TREE_TYPE (ssa_name
));
515 ret
= make_ssa_name (ret
);
517 /* Copy some properties from the original. In particular, whether it
518 is used in an abnormal phi, and whether it's uninitialized. */
519 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret
)
520 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
);
521 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name
)
522 && TREE_CODE (SSA_NAME_VAR (ssa_name
)) == VAR_DECL
)
524 SSA_NAME_DEF_STMT (ret
) = SSA_NAME_DEF_STMT (ssa_name
);
525 set_ssa_default_def (cfun
, SSA_NAME_VAR (ret
), ret
);
528 complex_ssa_name_components
[ssa_name_index
] = ret
;
534 /* Set a value for a complex component of SSA_NAME, return a
535 gimple_seq of stuff that needs doing. */
538 set_component_ssa_name (tree ssa_name
, bool imag_p
, tree value
)
540 complex_lattice_t lattice
= find_lattice_value (ssa_name
);
541 size_t ssa_name_index
;
546 /* We know the value must be zero, else there's a bug in our lattice
547 analysis. But the value may well be a variable known to contain
548 zero. We should be safe ignoring it. */
549 if (lattice
== (imag_p
? ONLY_REAL
: ONLY_IMAG
))
552 /* If we've already assigned an SSA_NAME to this component, then this
553 means that our walk of the basic blocks found a use before the set.
554 This is fine. Now we should create an initialization for the value
555 we created earlier. */
556 ssa_name_index
= SSA_NAME_VERSION (ssa_name
) * 2 + imag_p
;
557 comp
= complex_ssa_name_components
[ssa_name_index
];
561 /* If we've nothing assigned, and the value we're given is already stable,
562 then install that as the value for this SSA_NAME. This preemptively
563 copy-propagates the value, which avoids unnecessary memory allocation. */
564 else if (is_gimple_min_invariant (value
)
565 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
567 complex_ssa_name_components
[ssa_name_index
] = value
;
570 else if (TREE_CODE (value
) == SSA_NAME
571 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name
))
573 /* Replace an anonymous base value with the variable from cvc_lookup.
574 This should result in better debug info. */
575 if (!SSA_NAME_IS_DEFAULT_DEF (value
)
576 && SSA_NAME_VAR (ssa_name
)
577 && (!SSA_NAME_VAR (value
) || DECL_IGNORED_P (SSA_NAME_VAR (value
)))
578 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name
)))
580 comp
= get_component_var (SSA_NAME_VAR (ssa_name
), imag_p
);
581 replace_ssa_name_symbol (value
, comp
);
584 complex_ssa_name_components
[ssa_name_index
] = value
;
588 /* Finally, we need to stabilize the result by installing the value into
591 comp
= get_component_ssa_name (ssa_name
, imag_p
);
593 /* Do all the work to assign VALUE to COMP. */
595 value
= force_gimple_operand (value
, &list
, false, NULL
);
596 last
= gimple_build_assign (comp
, value
);
597 gimple_seq_add_stmt (&list
, last
);
598 gcc_assert (SSA_NAME_DEF_STMT (comp
) == last
);
603 /* Extract the real or imaginary part of a complex variable or constant.
604 Make sure that it's a proper gimple_val and gimplify it if not.
605 Emit any new code before gsi. */
608 extract_component (gimple_stmt_iterator
*gsi
, tree t
, bool imagpart_p
,
609 bool gimple_p
, bool phiarg_p
= false)
611 switch (TREE_CODE (t
))
614 return imagpart_p
? TREE_IMAGPART (t
) : TREE_REALPART (t
);
621 tree inner_type
= TREE_TYPE (TREE_TYPE (t
));
622 t
= unshare_expr (t
);
623 TREE_TYPE (t
) = inner_type
;
624 TREE_OPERAND (t
, 1) = TYPE_SIZE (inner_type
);
626 TREE_OPERAND (t
, 2) = size_binop (PLUS_EXPR
, TREE_OPERAND (t
, 2),
627 TYPE_SIZE (inner_type
));
629 t
= force_gimple_operand_gsi (gsi
, t
, true, NULL
, true,
639 case VIEW_CONVERT_EXPR
:
642 tree inner_type
= TREE_TYPE (TREE_TYPE (t
));
644 t
= build1 ((imagpart_p
? IMAGPART_EXPR
: REALPART_EXPR
),
645 inner_type
, unshare_expr (t
));
648 t
= force_gimple_operand_gsi (gsi
, t
, true, NULL
, true,
655 t
= get_component_ssa_name (t
, imagpart_p
);
656 if (TREE_CODE (t
) == SSA_NAME
&& SSA_NAME_DEF_STMT (t
) == NULL
)
657 gcc_assert (phiarg_p
);
665 /* Update the complex components of the ssa name on the lhs of STMT. */
668 update_complex_components (gimple_stmt_iterator
*gsi
, gimple
*stmt
, tree r
,
674 lhs
= gimple_get_lhs (stmt
);
676 list
= set_component_ssa_name (lhs
, false, r
);
678 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
680 list
= set_component_ssa_name (lhs
, true, i
);
682 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
686 update_complex_components_on_edge (edge e
, tree lhs
, tree r
, tree i
)
690 list
= set_component_ssa_name (lhs
, false, r
);
692 gsi_insert_seq_on_edge (e
, list
);
694 list
= set_component_ssa_name (lhs
, true, i
);
696 gsi_insert_seq_on_edge (e
, list
);
700 /* Update an assignment to a complex variable in place. */
703 update_complex_assignment (gimple_stmt_iterator
*gsi
, tree r
, tree i
)
705 gimple
*old_stmt
= gsi_stmt (*gsi
);
706 gimple_assign_set_rhs_with_ops (gsi
, COMPLEX_EXPR
, r
, i
);
707 gimple
*stmt
= gsi_stmt (*gsi
);
709 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
710 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
712 update_complex_components (gsi
, gsi_stmt (*gsi
), r
, i
);
716 /* Generate code at the entry point of the function to initialize the
717 component variables for a complex parameter. */
720 update_parameter_components (void)
722 edge entry_edge
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
725 for (parm
= DECL_ARGUMENTS (cfun
->decl
); parm
; parm
= DECL_CHAIN (parm
))
727 tree type
= TREE_TYPE (parm
);
730 if (TREE_CODE (type
) != COMPLEX_TYPE
|| !is_gimple_reg (parm
))
733 type
= TREE_TYPE (type
);
734 ssa_name
= ssa_default_def (cfun
, parm
);
738 r
= build1 (REALPART_EXPR
, type
, ssa_name
);
739 i
= build1 (IMAGPART_EXPR
, type
, ssa_name
);
740 update_complex_components_on_edge (entry_edge
, ssa_name
, r
, i
);
744 /* Generate code to set the component variables of a complex variable
745 to match the PHI statements in block BB. */
748 update_phi_components (basic_block bb
)
752 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
754 gphi
*phi
= gsi
.phi ();
756 if (is_complex_reg (gimple_phi_result (phi
)))
758 gphi
*p
[2] = { NULL
, NULL
};
759 unsigned int i
, j
, n
;
760 bool revisit_phi
= false;
762 for (j
= 0; j
< 2; j
++)
764 tree l
= get_component_ssa_name (gimple_phi_result (phi
), j
> 0);
765 if (TREE_CODE (l
) == SSA_NAME
)
766 p
[j
] = create_phi_node (l
, bb
);
769 for (i
= 0, n
= gimple_phi_num_args (phi
); i
< n
; ++i
)
771 tree comp
, arg
= gimple_phi_arg_def (phi
, i
);
772 for (j
= 0; j
< 2; j
++)
775 comp
= extract_component (NULL
, arg
, j
> 0, false, true);
776 if (TREE_CODE (comp
) == SSA_NAME
777 && SSA_NAME_DEF_STMT (comp
) == NULL
)
779 /* For the benefit of any gimple simplification during
780 this pass that might walk SSA_NAME def stmts,
781 don't add SSA_NAMEs without definitions into the
782 PHI arguments, but put a decl in there instead
783 temporarily, and revisit this PHI later on. */
784 if (SSA_NAME_VAR (comp
))
785 comp
= SSA_NAME_VAR (comp
);
787 comp
= create_tmp_reg (TREE_TYPE (comp
),
791 SET_PHI_ARG_DEF (p
[j
], i
, comp
);
797 phis_to_revisit
.safe_push (phi
);
798 phis_to_revisit
.safe_push (p
[0]);
799 phis_to_revisit
.safe_push (p
[1]);
805 /* Expand a complex move to scalars. */
808 expand_complex_move (gimple_stmt_iterator
*gsi
, tree type
)
810 tree inner_type
= TREE_TYPE (type
);
812 gimple
*stmt
= gsi_stmt (*gsi
);
814 if (is_gimple_assign (stmt
))
816 lhs
= gimple_assign_lhs (stmt
);
817 if (gimple_num_ops (stmt
) == 2)
818 rhs
= gimple_assign_rhs1 (stmt
);
822 else if (is_gimple_call (stmt
))
824 lhs
= gimple_call_lhs (stmt
);
830 if (TREE_CODE (lhs
) == SSA_NAME
)
832 if (is_ctrl_altering_stmt (stmt
))
836 /* The value is not assigned on the exception edges, so we need not
837 concern ourselves there. We do need to update on the fallthru
839 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
843 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
844 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
845 update_complex_components_on_edge (e
, lhs
, r
, i
);
847 else if (is_gimple_call (stmt
)
848 || gimple_has_side_effects (stmt
)
849 || gimple_assign_rhs_code (stmt
) == PAREN_EXPR
)
851 r
= build1 (REALPART_EXPR
, inner_type
, lhs
);
852 i
= build1 (IMAGPART_EXPR
, inner_type
, lhs
);
853 update_complex_components (gsi
, stmt
, r
, i
);
857 if (gimple_assign_rhs_code (stmt
) != COMPLEX_EXPR
)
859 r
= extract_component (gsi
, rhs
, 0, true);
860 i
= extract_component (gsi
, rhs
, 1, true);
864 r
= gimple_assign_rhs1 (stmt
);
865 i
= gimple_assign_rhs2 (stmt
);
867 update_complex_assignment (gsi
, r
, i
);
870 else if (rhs
&& TREE_CODE (rhs
) == SSA_NAME
&& !TREE_SIDE_EFFECTS (lhs
))
876 loc
= gimple_location (stmt
);
877 r
= extract_component (gsi
, rhs
, 0, false);
878 i
= extract_component (gsi
, rhs
, 1, false);
880 x
= build1 (REALPART_EXPR
, inner_type
, unshare_expr (lhs
));
881 t
= gimple_build_assign (x
, r
);
882 gimple_set_location (t
, loc
);
883 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
885 if (stmt
== gsi_stmt (*gsi
))
887 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
888 gimple_assign_set_lhs (stmt
, x
);
889 gimple_assign_set_rhs1 (stmt
, i
);
893 x
= build1 (IMAGPART_EXPR
, inner_type
, unshare_expr (lhs
));
894 t
= gimple_build_assign (x
, i
);
895 gimple_set_location (t
, loc
);
896 gsi_insert_before (gsi
, t
, GSI_SAME_STMT
);
898 stmt
= gsi_stmt (*gsi
);
899 gcc_assert (gimple_code (stmt
) == GIMPLE_RETURN
);
900 gimple_return_set_retval (as_a
<greturn
*> (stmt
), lhs
);
907 /* Expand complex addition to scalars:
908 a + b = (ar + br) + i(ai + bi)
909 a - b = (ar - br) + i(ai + bi)
913 expand_complex_addition (gimple_stmt_iterator
*gsi
, tree inner_type
,
914 tree ar
, tree ai
, tree br
, tree bi
,
916 complex_lattice_t al
, complex_lattice_t bl
)
920 switch (PAIR (al
, bl
))
922 case PAIR (ONLY_REAL
, ONLY_REAL
):
923 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
927 case PAIR (ONLY_REAL
, ONLY_IMAG
):
929 if (code
== MINUS_EXPR
)
930 ri
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ai
, bi
);
935 case PAIR (ONLY_IMAG
, ONLY_REAL
):
936 if (code
== MINUS_EXPR
)
937 rr
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ar
, br
);
943 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
945 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
948 case PAIR (VARYING
, ONLY_REAL
):
949 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
953 case PAIR (VARYING
, ONLY_IMAG
):
955 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
958 case PAIR (ONLY_REAL
, VARYING
):
959 if (code
== MINUS_EXPR
)
961 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
965 case PAIR (ONLY_IMAG
, VARYING
):
966 if (code
== MINUS_EXPR
)
969 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
972 case PAIR (VARYING
, VARYING
):
974 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
975 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
982 update_complex_assignment (gsi
, rr
, ri
);
985 /* Expand a complex multiplication or division to a libcall to the c99
986 compliant routines. TYPE is the complex type of the operation.
987 If INPLACE_P replace the statement at GSI with
988 the libcall and return NULL_TREE. Else insert the call, assign its
989 result to an output variable and return that variable. If INPLACE_P
990 is true then the statement being replaced should be an assignment
994 expand_complex_libcall (gimple_stmt_iterator
*gsi
, tree type
, tree ar
, tree ai
,
995 tree br
, tree bi
, enum tree_code code
, bool inplace_p
)
998 enum built_in_function bcode
;
1002 mode
= TYPE_MODE (type
);
1003 gcc_assert (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
);
1005 if (code
== MULT_EXPR
)
1006 bcode
= ((enum built_in_function
)
1007 (BUILT_IN_COMPLEX_MUL_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
1008 else if (code
== RDIV_EXPR
)
1009 bcode
= ((enum built_in_function
)
1010 (BUILT_IN_COMPLEX_DIV_MIN
+ mode
- MIN_MODE_COMPLEX_FLOAT
));
1013 fn
= builtin_decl_explicit (bcode
);
1014 stmt
= gimple_build_call (fn
, 4, ar
, ai
, br
, bi
);
1018 gimple
*old_stmt
= gsi_stmt (*gsi
);
1019 gimple_call_set_nothrow (stmt
, !stmt_could_throw_p (cfun
, old_stmt
));
1020 lhs
= gimple_assign_lhs (old_stmt
);
1021 gimple_call_set_lhs (stmt
, lhs
);
1022 gsi_replace (gsi
, stmt
, true);
1024 type
= TREE_TYPE (type
);
1025 if (stmt_can_throw_internal (cfun
, stmt
))
1029 FOR_EACH_EDGE (e
, ei
, gimple_bb (stmt
)->succs
)
1030 if (!(e
->flags
& EDGE_EH
))
1032 basic_block bb
= split_edge (e
);
1033 gimple_stmt_iterator gsi2
= gsi_start_bb (bb
);
1034 update_complex_components (&gsi2
, stmt
,
1035 build1 (REALPART_EXPR
, type
, lhs
),
1036 build1 (IMAGPART_EXPR
, type
, lhs
));
1040 update_complex_components (gsi
, stmt
,
1041 build1 (REALPART_EXPR
, type
, lhs
),
1042 build1 (IMAGPART_EXPR
, type
, lhs
));
1043 SSA_NAME_DEF_STMT (lhs
) = stmt
;
1047 gimple_call_set_nothrow (stmt
, true);
1048 lhs
= make_ssa_name (type
);
1049 gimple_call_set_lhs (stmt
, lhs
);
1050 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1055 /* Perform a complex multiplication on two complex constants A, B represented
1056 by AR, AI, BR, BI of type TYPE.
1057 The operation we want is: a * b = (ar*br - ai*bi) + i(ar*bi + br*ai).
1058 Insert the GIMPLE statements into GSI. Store the real and imaginary
1059 components of the result into RR and RI. */
1062 expand_complex_multiplication_components (gimple_stmt_iterator
*gsi
,
1063 tree type
, tree ar
, tree ai
,
1067 tree t1
, t2
, t3
, t4
;
1069 t1
= gimplify_build2 (gsi
, MULT_EXPR
, type
, ar
, br
);
1070 t2
= gimplify_build2 (gsi
, MULT_EXPR
, type
, ai
, bi
);
1071 t3
= gimplify_build2 (gsi
, MULT_EXPR
, type
, ar
, bi
);
1073 /* Avoid expanding redundant multiplication for the common
1074 case of squaring a complex number. */
1075 if (ar
== br
&& ai
== bi
)
1078 t4
= gimplify_build2 (gsi
, MULT_EXPR
, type
, ai
, br
);
1080 *rr
= gimplify_build2 (gsi
, MINUS_EXPR
, type
, t1
, t2
);
1081 *ri
= gimplify_build2 (gsi
, PLUS_EXPR
, type
, t3
, t4
);
1084 /* Expand complex multiplication to scalars:
1085 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
1089 expand_complex_multiplication (gimple_stmt_iterator
*gsi
, tree type
,
1090 tree ar
, tree ai
, tree br
, tree bi
,
1091 complex_lattice_t al
, complex_lattice_t bl
)
1094 tree inner_type
= TREE_TYPE (type
);
1098 complex_lattice_t tl
;
1099 rr
= ar
, ar
= br
, br
= rr
;
1100 ri
= ai
, ai
= bi
, bi
= ri
;
1101 tl
= al
, al
= bl
, bl
= tl
;
1104 switch (PAIR (al
, bl
))
1106 case PAIR (ONLY_REAL
, ONLY_REAL
):
1107 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1111 case PAIR (ONLY_IMAG
, ONLY_REAL
):
1113 if (TREE_CODE (ai
) == REAL_CST
1114 && real_identical (&TREE_REAL_CST (ai
), &dconst1
))
1117 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1120 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1121 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1122 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, rr
);
1126 case PAIR (VARYING
, ONLY_REAL
):
1127 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1128 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1131 case PAIR (VARYING
, ONLY_IMAG
):
1132 rr
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1133 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, rr
);
1134 ri
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1137 case PAIR (VARYING
, VARYING
):
1138 if (flag_complex_method
== 2 && SCALAR_FLOAT_TYPE_P (inner_type
))
1140 /* If optimizing for size or not at all just do a libcall.
1141 Same if there are exception-handling edges or signaling NaNs. */
1142 if (optimize
== 0 || optimize_bb_for_size_p (gsi_bb (*gsi
))
1143 || stmt_can_throw_internal (cfun
, gsi_stmt (*gsi
))
1144 || flag_signaling_nans
)
1146 expand_complex_libcall (gsi
, type
, ar
, ai
, br
, bi
,
1151 if (!HONOR_NANS (inner_type
))
1153 /* If we are not worrying about NaNs expand to
1154 (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */
1155 expand_complex_multiplication_components (gsi
, inner_type
,
1161 /* Else, expand x = a * b into
1162 x = (ar*br - ai*bi) + i(ar*bi + br*ai);
1163 if (isunordered (__real__ x, __imag__ x))
1164 x = __muldc3 (a, b); */
1167 expand_complex_multiplication_components (gsi
, inner_type
, ar
, ai
,
1168 br
, bi
, &tmpr
, &tmpi
);
1171 = gimple_build_cond (UNORDERED_EXPR
, tmpr
, tmpi
,
1172 NULL_TREE
, NULL_TREE
);
1174 basic_block orig_bb
= gsi_bb (*gsi
);
1175 /* We want to keep track of the original complex multiplication
1176 statement as we're going to modify it later in
1177 update_complex_assignment. Make sure that insert_cond_bb leaves
1178 that statement in the join block. */
1181 = insert_cond_bb (gsi_bb (*gsi
), gsi_stmt (*gsi
), check
,
1182 profile_probability::very_unlikely ());
1184 gimple_stmt_iterator cond_bb_gsi
= gsi_last_bb (cond_bb
);
1185 gsi_insert_after (&cond_bb_gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1188 = expand_complex_libcall (&cond_bb_gsi
, type
, ar
, ai
, br
,
1189 bi
, MULT_EXPR
, false);
1190 tree cond_real
= gimplify_build1 (&cond_bb_gsi
, REALPART_EXPR
,
1191 inner_type
, libcall_res
);
1192 tree cond_imag
= gimplify_build1 (&cond_bb_gsi
, IMAGPART_EXPR
,
1193 inner_type
, libcall_res
);
1195 basic_block join_bb
= single_succ_edge (cond_bb
)->dest
;
1196 *gsi
= gsi_start_nondebug_after_labels_bb (join_bb
);
1198 /* We have a conditional block with some assignments in cond_bb.
1199 Wire up the PHIs to wrap up. */
1200 rr
= make_ssa_name (inner_type
);
1201 ri
= make_ssa_name (inner_type
);
1202 edge cond_to_join
= single_succ_edge (cond_bb
);
1203 edge orig_to_join
= find_edge (orig_bb
, join_bb
);
1205 gphi
*real_phi
= create_phi_node (rr
, gsi_bb (*gsi
));
1206 add_phi_arg (real_phi
, cond_real
, cond_to_join
, UNKNOWN_LOCATION
);
1207 add_phi_arg (real_phi
, tmpr
, orig_to_join
, UNKNOWN_LOCATION
);
1209 gphi
*imag_phi
= create_phi_node (ri
, gsi_bb (*gsi
));
1210 add_phi_arg (imag_phi
, cond_imag
, cond_to_join
, UNKNOWN_LOCATION
);
1211 add_phi_arg (imag_phi
, tmpi
, orig_to_join
, UNKNOWN_LOCATION
);
1214 /* If we are not worrying about NaNs expand to
1215 (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */
1216 expand_complex_multiplication_components (gsi
, inner_type
, ar
, ai
,
1224 update_complex_assignment (gsi
, rr
, ri
);
1227 /* Keep this algorithm in sync with fold-const.c:const_binop().
1229 Expand complex division to scalars, straightforward algorithm.
1230 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1235 expand_complex_div_straight (gimple_stmt_iterator
*gsi
, tree inner_type
,
1236 tree ar
, tree ai
, tree br
, tree bi
,
1237 enum tree_code code
)
1239 tree rr
, ri
, div
, t1
, t2
, t3
;
1241 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, br
, br
);
1242 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, bi
, bi
);
1243 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
1245 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, br
);
1246 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, bi
);
1247 t3
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, t2
);
1248 rr
= gimplify_build2 (gsi
, code
, inner_type
, t3
, div
);
1250 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, br
);
1251 t2
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, bi
);
1252 t3
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, t2
);
1253 ri
= gimplify_build2 (gsi
, code
, inner_type
, t3
, div
);
1255 update_complex_assignment (gsi
, rr
, ri
);
1258 /* Keep this algorithm in sync with fold-const.c:const_binop().
1260 Expand complex division to scalars, modified algorithm to minimize
1261 overflow with wide input ranges. */
1264 expand_complex_div_wide (gimple_stmt_iterator
*gsi
, tree inner_type
,
1265 tree ar
, tree ai
, tree br
, tree bi
,
1266 enum tree_code code
)
1268 tree rr
, ri
, ratio
, div
, t1
, t2
, tr
, ti
, compare
;
1269 basic_block bb_cond
, bb_true
, bb_false
, bb_join
;
1272 /* Examine |br| < |bi|, and branch. */
1273 t1
= gimplify_build1 (gsi
, ABS_EXPR
, inner_type
, br
);
1274 t2
= gimplify_build1 (gsi
, ABS_EXPR
, inner_type
, bi
);
1275 compare
= fold_build2_loc (gimple_location (gsi_stmt (*gsi
)),
1276 LT_EXPR
, boolean_type_node
, t1
, t2
);
1277 STRIP_NOPS (compare
);
1279 bb_cond
= bb_true
= bb_false
= bb_join
= NULL
;
1280 rr
= ri
= tr
= ti
= NULL
;
1281 if (TREE_CODE (compare
) != INTEGER_CST
)
1287 tmp
= make_ssa_name (boolean_type_node
);
1288 stmt
= gimple_build_assign (tmp
, compare
);
1289 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1291 cond
= fold_build2_loc (gimple_location (stmt
),
1292 EQ_EXPR
, boolean_type_node
, tmp
, boolean_true_node
);
1293 stmt
= gimple_build_cond_from_tree (cond
, NULL_TREE
, NULL_TREE
);
1294 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1296 /* Split the original block, and create the TRUE and FALSE blocks. */
1297 e
= split_block (gsi_bb (*gsi
), stmt
);
1300 bb_true
= create_empty_bb (bb_cond
);
1301 bb_false
= create_empty_bb (bb_true
);
1302 bb_true
->count
= bb_false
->count
1303 = bb_cond
->count
.apply_probability (profile_probability::even ());
1305 /* Wire the blocks together. */
1306 e
->flags
= EDGE_TRUE_VALUE
;
1307 /* TODO: With value profile we could add an historgram to determine real
1309 e
->probability
= profile_probability::even ();
1310 redirect_edge_succ (e
, bb_true
);
1311 edge e2
= make_edge (bb_cond
, bb_false
, EDGE_FALSE_VALUE
);
1312 e2
->probability
= profile_probability::even ();
1313 make_single_succ_edge (bb_true
, bb_join
, EDGE_FALLTHRU
);
1314 make_single_succ_edge (bb_false
, bb_join
, EDGE_FALLTHRU
);
1315 add_bb_to_loop (bb_true
, bb_cond
->loop_father
);
1316 add_bb_to_loop (bb_false
, bb_cond
->loop_father
);
1318 /* Update dominance info. Note that bb_join's data was
1319 updated by split_block. */
1320 if (dom_info_available_p (CDI_DOMINATORS
))
1322 set_immediate_dominator (CDI_DOMINATORS
, bb_true
, bb_cond
);
1323 set_immediate_dominator (CDI_DOMINATORS
, bb_false
, bb_cond
);
1326 rr
= create_tmp_reg (inner_type
);
1327 ri
= create_tmp_reg (inner_type
);
1330 /* In the TRUE branch, we compute
1332 div = (br * ratio) + bi;
1333 tr = (ar * ratio) + ai;
1334 ti = (ai * ratio) - ar;
1337 if (bb_true
|| integer_nonzerop (compare
))
1341 *gsi
= gsi_last_bb (bb_true
);
1342 gsi_insert_after (gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1345 ratio
= gimplify_build2 (gsi
, code
, inner_type
, br
, bi
);
1347 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, br
, ratio
);
1348 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, bi
);
1350 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1351 tr
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, ai
);
1353 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1354 ti
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, t1
, ar
);
1356 tr
= gimplify_build2 (gsi
, code
, inner_type
, tr
, div
);
1357 ti
= gimplify_build2 (gsi
, code
, inner_type
, ti
, div
);
1361 stmt
= gimple_build_assign (rr
, tr
);
1362 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1363 stmt
= gimple_build_assign (ri
, ti
);
1364 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1365 gsi_remove (gsi
, true);
1369 /* In the FALSE branch, we compute
1371 divisor = (d * ratio) + c;
1372 tr = (b * ratio) + a;
1373 ti = b - (a * ratio);
1376 if (bb_false
|| integer_zerop (compare
))
1380 *gsi
= gsi_last_bb (bb_false
);
1381 gsi_insert_after (gsi
, gimple_build_nop (), GSI_NEW_STMT
);
1384 ratio
= gimplify_build2 (gsi
, code
, inner_type
, bi
, br
);
1386 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, bi
, ratio
);
1387 div
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, br
);
1389 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ai
, ratio
);
1390 tr
= gimplify_build2 (gsi
, PLUS_EXPR
, inner_type
, t1
, ar
);
1392 t1
= gimplify_build2 (gsi
, MULT_EXPR
, inner_type
, ar
, ratio
);
1393 ti
= gimplify_build2 (gsi
, MINUS_EXPR
, inner_type
, ai
, t1
);
1395 tr
= gimplify_build2 (gsi
, code
, inner_type
, tr
, div
);
1396 ti
= gimplify_build2 (gsi
, code
, inner_type
, ti
, div
);
1400 stmt
= gimple_build_assign (rr
, tr
);
1401 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1402 stmt
= gimple_build_assign (ri
, ti
);
1403 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1404 gsi_remove (gsi
, true);
1409 *gsi
= gsi_start_bb (bb_join
);
1413 update_complex_assignment (gsi
, rr
, ri
);
1416 /* Expand complex division to scalars. */
1419 expand_complex_division (gimple_stmt_iterator
*gsi
, tree type
,
1420 tree ar
, tree ai
, tree br
, tree bi
,
1421 enum tree_code code
,
1422 complex_lattice_t al
, complex_lattice_t bl
)
1426 tree inner_type
= TREE_TYPE (type
);
1427 switch (PAIR (al
, bl
))
1429 case PAIR (ONLY_REAL
, ONLY_REAL
):
1430 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
1434 case PAIR (ONLY_REAL
, ONLY_IMAG
):
1436 ri
= gimplify_build2 (gsi
, code
, inner_type
, ar
, bi
);
1437 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ri
);
1440 case PAIR (ONLY_IMAG
, ONLY_REAL
):
1442 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, br
);
1445 case PAIR (ONLY_IMAG
, ONLY_IMAG
):
1446 rr
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
1450 case PAIR (VARYING
, ONLY_REAL
):
1451 rr
= gimplify_build2 (gsi
, code
, inner_type
, ar
, br
);
1452 ri
= gimplify_build2 (gsi
, code
, inner_type
, ai
, br
);
1455 case PAIR (VARYING
, ONLY_IMAG
):
1456 rr
= gimplify_build2 (gsi
, code
, inner_type
, ai
, bi
);
1457 ri
= gimplify_build2 (gsi
, code
, inner_type
, ar
, bi
);
1458 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ri
);
1461 case PAIR (ONLY_REAL
, VARYING
):
1462 case PAIR (ONLY_IMAG
, VARYING
):
1463 case PAIR (VARYING
, VARYING
):
1464 switch (flag_complex_method
)
1467 /* straightforward implementation of complex divide acceptable. */
1468 expand_complex_div_straight (gsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1472 if (SCALAR_FLOAT_TYPE_P (inner_type
))
1474 expand_complex_libcall (gsi
, type
, ar
, ai
, br
, bi
, code
, true);
1480 /* wide ranges of inputs must work for complex divide. */
1481 expand_complex_div_wide (gsi
, inner_type
, ar
, ai
, br
, bi
, code
);
1493 update_complex_assignment (gsi
, rr
, ri
);
1496 /* Expand complex negation to scalars:
1501 expand_complex_negation (gimple_stmt_iterator
*gsi
, tree inner_type
,
1506 rr
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ar
);
1507 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ai
);
1509 update_complex_assignment (gsi
, rr
, ri
);
1512 /* Expand complex conjugate to scalars:
1517 expand_complex_conjugate (gimple_stmt_iterator
*gsi
, tree inner_type
,
1522 ri
= gimplify_build1 (gsi
, NEGATE_EXPR
, inner_type
, ai
);
1524 update_complex_assignment (gsi
, ar
, ri
);
1527 /* Expand complex comparison (EQ or NE only). */
1530 expand_complex_comparison (gimple_stmt_iterator
*gsi
, tree ar
, tree ai
,
1531 tree br
, tree bi
, enum tree_code code
)
1533 tree cr
, ci
, cc
, type
;
1536 cr
= gimplify_build2 (gsi
, code
, boolean_type_node
, ar
, br
);
1537 ci
= gimplify_build2 (gsi
, code
, boolean_type_node
, ai
, bi
);
1538 cc
= gimplify_build2 (gsi
,
1539 (code
== EQ_EXPR
? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
),
1540 boolean_type_node
, cr
, ci
);
1542 stmt
= gsi_stmt (*gsi
);
1544 switch (gimple_code (stmt
))
1548 greturn
*return_stmt
= as_a
<greturn
*> (stmt
);
1549 type
= TREE_TYPE (gimple_return_retval (return_stmt
));
1550 gimple_return_set_retval (return_stmt
, fold_convert (type
, cc
));
1555 type
= TREE_TYPE (gimple_assign_lhs (stmt
));
1556 gimple_assign_set_rhs_from_tree (gsi
, fold_convert (type
, cc
));
1557 stmt
= gsi_stmt (*gsi
);
1562 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
1563 gimple_cond_set_code (cond_stmt
, EQ_EXPR
);
1564 gimple_cond_set_lhs (cond_stmt
, cc
);
1565 gimple_cond_set_rhs (cond_stmt
, boolean_true_node
);
1574 if (maybe_clean_eh_stmt (stmt
))
1575 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
1578 /* Expand inline asm that sets some complex SSA_NAMEs. */
1581 expand_complex_asm (gimple_stmt_iterator
*gsi
)
1583 gasm
*stmt
= as_a
<gasm
*> (gsi_stmt (*gsi
));
1586 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
1588 tree link
= gimple_asm_output_op (stmt
, i
);
1589 tree op
= TREE_VALUE (link
);
1590 if (TREE_CODE (op
) == SSA_NAME
1591 && TREE_CODE (TREE_TYPE (op
)) == COMPLEX_TYPE
)
1593 tree type
= TREE_TYPE (op
);
1594 tree inner_type
= TREE_TYPE (type
);
1595 tree r
= build1 (REALPART_EXPR
, inner_type
, op
);
1596 tree i
= build1 (IMAGPART_EXPR
, inner_type
, op
);
1597 gimple_seq list
= set_component_ssa_name (op
, false, r
);
1600 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
1602 list
= set_component_ssa_name (op
, true, i
);
1604 gsi_insert_seq_after (gsi
, list
, GSI_CONTINUE_LINKING
);
1609 /* Process one statement. If we identify a complex operation, expand it. */
1612 expand_complex_operations_1 (gimple_stmt_iterator
*gsi
)
1614 gimple
*stmt
= gsi_stmt (*gsi
);
1615 tree type
, inner_type
, lhs
;
1616 tree ac
, ar
, ai
, bc
, br
, bi
;
1617 complex_lattice_t al
, bl
;
1618 enum tree_code code
;
1620 if (gimple_code (stmt
) == GIMPLE_ASM
)
1622 expand_complex_asm (gsi
);
1626 lhs
= gimple_get_lhs (stmt
);
1627 if (!lhs
&& gimple_code (stmt
) != GIMPLE_COND
)
1630 type
= TREE_TYPE (gimple_op (stmt
, 0));
1631 code
= gimple_expr_code (stmt
);
1633 /* Initial filter for operations we handle. */
1639 case TRUNC_DIV_EXPR
:
1641 case FLOOR_DIV_EXPR
:
1642 case ROUND_DIV_EXPR
:
1646 if (TREE_CODE (type
) != COMPLEX_TYPE
)
1648 inner_type
= TREE_TYPE (type
);
1653 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
1654 subcode, so we need to access the operands using gimple_op. */
1655 inner_type
= TREE_TYPE (gimple_op (stmt
, 1));
1656 if (TREE_CODE (inner_type
) != COMPLEX_TYPE
)
1664 /* GIMPLE_COND may also fallthru here, but we do not need to
1665 do anything with it. */
1666 if (gimple_code (stmt
) == GIMPLE_COND
)
1669 if (TREE_CODE (type
) == COMPLEX_TYPE
)
1670 expand_complex_move (gsi
, type
);
1671 else if (is_gimple_assign (stmt
)
1672 && (gimple_assign_rhs_code (stmt
) == REALPART_EXPR
1673 || gimple_assign_rhs_code (stmt
) == IMAGPART_EXPR
)
1674 && TREE_CODE (lhs
) == SSA_NAME
)
1676 rhs
= gimple_assign_rhs1 (stmt
);
1677 rhs
= extract_component (gsi
, TREE_OPERAND (rhs
, 0),
1678 gimple_assign_rhs_code (stmt
)
1681 gimple_assign_set_rhs_from_tree (gsi
, rhs
);
1682 stmt
= gsi_stmt (*gsi
);
1689 /* Extract the components of the two complex values. Make sure and
1690 handle the common case of the same value used twice specially. */
1691 if (is_gimple_assign (stmt
))
1693 ac
= gimple_assign_rhs1 (stmt
);
1694 bc
= (gimple_num_ops (stmt
) > 2) ? gimple_assign_rhs2 (stmt
) : NULL
;
1696 /* GIMPLE_CALL cannot get here. */
1699 ac
= gimple_cond_lhs (stmt
);
1700 bc
= gimple_cond_rhs (stmt
);
1703 ar
= extract_component (gsi
, ac
, false, true);
1704 ai
= extract_component (gsi
, ac
, true, true);
1710 br
= extract_component (gsi
, bc
, 0, true);
1711 bi
= extract_component (gsi
, bc
, 1, true);
1714 br
= bi
= NULL_TREE
;
1716 al
= find_lattice_value (ac
);
1717 if (al
== UNINITIALIZED
)
1720 if (TREE_CODE_CLASS (code
) == tcc_unary
)
1726 bl
= find_lattice_value (bc
);
1727 if (bl
== UNINITIALIZED
)
1735 expand_complex_addition (gsi
, inner_type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1739 expand_complex_multiplication (gsi
, type
, ar
, ai
, br
, bi
, al
, bl
);
1742 case TRUNC_DIV_EXPR
:
1744 case FLOOR_DIV_EXPR
:
1745 case ROUND_DIV_EXPR
:
1747 expand_complex_division (gsi
, type
, ar
, ai
, br
, bi
, code
, al
, bl
);
1751 expand_complex_negation (gsi
, inner_type
, ar
, ai
);
1755 expand_complex_conjugate (gsi
, inner_type
, ar
, ai
);
1760 expand_complex_comparison (gsi
, ar
, ai
, br
, bi
, code
);
1769 /* Entry point for complex operation lowering during optimization. */
1772 tree_lower_complex (void)
1774 gimple_stmt_iterator gsi
;
1779 if (!init_dont_simulate_again ())
1782 complex_lattice_values
.create (num_ssa_names
);
1783 complex_lattice_values
.safe_grow_cleared (num_ssa_names
, true);
1785 init_parameter_lattice_values ();
1786 class complex_propagate complex_propagate
;
1787 complex_propagate
.ssa_propagate ();
1789 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
1791 complex_variable_components
= new int_tree_htab_type (10);
1793 complex_ssa_name_components
.create (2 * num_ssa_names
);
1794 complex_ssa_name_components
.safe_grow_cleared (2 * num_ssa_names
, true);
1796 update_parameter_components ();
1798 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
1799 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
1800 for (i
= 0; i
< n_bbs
; i
++)
1802 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
1805 update_phi_components (bb
);
1806 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1807 expand_complex_operations_1 (&gsi
);
1812 if (!phis_to_revisit
.is_empty ())
1814 unsigned int n
= phis_to_revisit
.length ();
1815 for (unsigned int j
= 0; j
< n
; j
+= 3)
1816 for (unsigned int k
= 0; k
< 2; k
++)
1817 if (gphi
*phi
= phis_to_revisit
[j
+ k
+ 1])
1819 unsigned int m
= gimple_phi_num_args (phi
);
1820 for (unsigned int l
= 0; l
< m
; ++l
)
1822 tree op
= gimple_phi_arg_def (phi
, l
);
1823 if (TREE_CODE (op
) == SSA_NAME
1824 || is_gimple_min_invariant (op
))
1826 tree arg
= gimple_phi_arg_def (phis_to_revisit
[j
], l
);
1827 op
= extract_component (NULL
, arg
, k
> 0, false, false);
1828 SET_PHI_ARG_DEF (phi
, l
, op
);
1831 phis_to_revisit
.release ();
1834 gsi_commit_edge_inserts ();
1837 = gimple_purge_all_dead_eh_edges (need_eh_cleanup
) ? TODO_cleanup_cfg
: 0;
1838 BITMAP_FREE (need_eh_cleanup
);
1840 delete complex_variable_components
;
1841 complex_variable_components
= NULL
;
1842 complex_ssa_name_components
.release ();
1843 complex_lattice_values
.release ();
1849 const pass_data pass_data_lower_complex
=
1851 GIMPLE_PASS
, /* type */
1852 "cplxlower", /* name */
1853 OPTGROUP_NONE
, /* optinfo_flags */
1854 TV_NONE
, /* tv_id */
1855 PROP_ssa
, /* properties_required */
1856 PROP_gimple_lcx
, /* properties_provided */
1857 0, /* properties_destroyed */
1858 0, /* todo_flags_start */
1859 TODO_update_ssa
, /* todo_flags_finish */
1862 class pass_lower_complex
: public gimple_opt_pass
1865 pass_lower_complex (gcc::context
*ctxt
)
1866 : gimple_opt_pass (pass_data_lower_complex
, ctxt
)
1869 /* opt_pass methods: */
1870 opt_pass
* clone () { return new pass_lower_complex (m_ctxt
); }
1871 virtual unsigned int execute (function
*) { return tree_lower_complex (); }
1873 }; // class pass_lower_complex
1878 make_pass_lower_complex (gcc::context
*ctxt
)
1880 return new pass_lower_complex (ctxt
);
1886 const pass_data pass_data_lower_complex_O0
=
1888 GIMPLE_PASS
, /* type */
1889 "cplxlower0", /* name */
1890 OPTGROUP_NONE
, /* optinfo_flags */
1891 TV_NONE
, /* tv_id */
1892 PROP_cfg
, /* properties_required */
1893 PROP_gimple_lcx
, /* properties_provided */
1894 0, /* properties_destroyed */
1895 0, /* todo_flags_start */
1896 TODO_update_ssa
, /* todo_flags_finish */
1899 class pass_lower_complex_O0
: public gimple_opt_pass
1902 pass_lower_complex_O0 (gcc::context
*ctxt
)
1903 : gimple_opt_pass (pass_data_lower_complex_O0
, ctxt
)
1906 /* opt_pass methods: */
1907 virtual bool gate (function
*fun
)
1909 /* With errors, normal optimization passes are not run. If we don't
1910 lower complex operations at all, rtl expansion will abort. */
1911 return !(fun
->curr_properties
& PROP_gimple_lcx
);
1914 virtual unsigned int execute (function
*) { return tree_lower_complex (); }
1916 }; // class pass_lower_complex_O0
1921 make_pass_lower_complex_O0 (gcc::context
*ctxt
)
1923 return new pass_lower_complex_O0 (ctxt
);