1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
3 Copyright (C) 2006-2019 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
27 #include "coretypes.h"
29 #include "insn-codes.h"
34 #include "tree-pass.h"
36 #include "optabs-tree.h"
38 #include "gimple-pretty-print.h"
40 #include "fold-const.h"
42 #include "stor-layout.h"
45 #include "gimple-iterator.h"
46 #include "gimplify-me.h"
47 #include "gimple-fold.h"
50 #include "alloc-pool.h"
52 #include "tree-into-ssa.h"
53 #include "omp-general.h"
55 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
56 type in the GIMPLE type system that is language-independent? */
57 #include "langhooks.h"
59 #include "tree-switch-conversion.h"
61 using namespace tree_switch_conversion
;
65 switch_conversion::switch_conversion (): m_final_bb (NULL
), m_other_count (),
66 m_constructors (NULL
), m_default_values (NULL
),
67 m_arr_ref_first (NULL
), m_arr_ref_last (NULL
),
68 m_reason (NULL
), m_default_case_nonstandard (false), m_cfg_altered (false)
72 /* Collection information about SWTCH statement. */
75 switch_conversion::collect (gswitch
*swtch
)
77 unsigned int branch_num
= gimple_switch_num_labels (swtch
);
78 tree min_case
, max_case
;
80 edge e
, e_default
, e_first
;
85 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
86 is a default label which is the first in the vector.
87 Collect the bits we can deduce from the CFG. */
88 m_index_expr
= gimple_switch_index (swtch
);
89 m_switch_bb
= gimple_bb (swtch
);
90 e_default
= gimple_switch_default_edge (cfun
, swtch
);
91 m_default_bb
= e_default
->dest
;
92 m_default_prob
= e_default
->probability
;
93 m_default_count
= e_default
->count ();
94 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
96 m_other_count
+= e
->count ();
98 /* Get upper and lower bounds of case values, and the covered range. */
99 min_case
= gimple_switch_label (swtch
, 1);
100 max_case
= gimple_switch_label (swtch
, branch_num
- 1);
102 m_range_min
= CASE_LOW (min_case
);
103 if (CASE_HIGH (max_case
) != NULL_TREE
)
104 m_range_max
= CASE_HIGH (max_case
);
106 m_range_max
= CASE_LOW (max_case
);
108 m_contiguous_range
= true;
109 tree last
= CASE_HIGH (min_case
) ? CASE_HIGH (min_case
) : m_range_min
;
110 for (i
= 2; i
< branch_num
; i
++)
112 tree elt
= gimple_switch_label (swtch
, i
);
113 if (wi::to_wide (last
) + 1 != wi::to_wide (CASE_LOW (elt
)))
115 m_contiguous_range
= false;
118 last
= CASE_HIGH (elt
) ? CASE_HIGH (elt
) : CASE_LOW (elt
);
121 if (m_contiguous_range
)
122 e_first
= gimple_switch_edge (cfun
, swtch
, 1);
126 /* See if there is one common successor block for all branch
127 targets. If it exists, record it in FINAL_BB.
128 Start with the destination of the first non-default case
129 if the range is contiguous and default case otherwise as
130 guess or its destination in case it is a forwarder block. */
131 if (! single_pred_p (e_first
->dest
))
132 m_final_bb
= e_first
->dest
;
133 else if (single_succ_p (e_first
->dest
)
134 && ! single_pred_p (single_succ (e_first
->dest
)))
135 m_final_bb
= single_succ (e_first
->dest
);
136 /* Require that all switch destinations are either that common
137 FINAL_BB or a forwarder to it, except for the default
138 case if contiguous range. */
140 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
142 if (e
->dest
== m_final_bb
)
145 if (single_pred_p (e
->dest
)
146 && single_succ_p (e
->dest
)
147 && single_succ (e
->dest
) == m_final_bb
)
150 if (e
== e_default
&& m_contiguous_range
)
152 m_default_case_nonstandard
= true;
161 = int_const_binop (MINUS_EXPR
, m_range_max
, m_range_min
);
163 /* Get a count of the number of case labels. Single-valued case labels
164 simply count as one, but a case range counts double, since it may
165 require two compares if it gets lowered as a branching tree. */
167 for (i
= 1; i
< branch_num
; i
++)
169 tree elt
= gimple_switch_label (swtch
, i
);
172 && ! tree_int_cst_equal (CASE_LOW (elt
), CASE_HIGH (elt
)))
176 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
177 block. Assume a CFG cleanup would have already removed degenerate
178 switch statements, this allows us to just use EDGE_COUNT. */
179 m_uniq
= EDGE_COUNT (gimple_bb (swtch
)->succs
) - 1;
182 /* Checks whether the range given by individual case statements of the switch
183 switch statement isn't too big and whether the number of branches actually
184 satisfies the size of the new array. */
187 switch_conversion::check_range ()
189 gcc_assert (m_range_size
);
190 if (!tree_fits_uhwi_p (m_range_size
))
192 m_reason
= "index range way too large or otherwise unusable";
196 if (tree_to_uhwi (m_range_size
)
197 > ((unsigned) m_count
* SWITCH_CONVERSION_BRANCH_RATIO
))
199 m_reason
= "the maximum range-branch ratio exceeded";
206 /* Checks whether all but the final BB basic blocks are empty. */
209 switch_conversion::check_all_empty_except_final ()
211 edge e
, e_default
= find_edge (m_switch_bb
, m_default_bb
);
214 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
216 if (e
->dest
== m_final_bb
)
219 if (!empty_block_p (e
->dest
))
221 if (m_contiguous_range
&& e
== e_default
)
223 m_default_case_nonstandard
= true;
227 m_reason
= "bad case - a non-final BB not empty";
235 /* This function checks whether all required values in phi nodes in final_bb
236 are constants. Required values are those that correspond to a basic block
237 which is a part of the examined switch statement. It returns true if the
238 phi nodes are OK, otherwise false. */
241 switch_conversion::check_final_bb ()
246 for (gsi
= gsi_start_phis (m_final_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
248 gphi
*phi
= gsi
.phi ();
251 if (virtual_operand_p (gimple_phi_result (phi
)))
256 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
258 basic_block bb
= gimple_phi_arg_edge (phi
, i
)->src
;
260 if (bb
== m_switch_bb
261 || (single_pred_p (bb
)
262 && single_pred (bb
) == m_switch_bb
263 && (!m_default_case_nonstandard
264 || empty_block_p (bb
))))
267 const char *reason
= NULL
;
269 val
= gimple_phi_arg_def (phi
, i
);
270 if (!is_gimple_ip_invariant (val
))
271 reason
= "non-invariant value from a case";
274 reloc
= initializer_constant_valid_p (val
, TREE_TYPE (val
));
275 if ((flag_pic
&& reloc
!= null_pointer_node
)
276 || (!flag_pic
&& reloc
== NULL_TREE
))
280 = "value from a case would need runtime relocations";
283 = "value from a case is not a valid initializer";
288 /* For contiguous range, we can allow non-constant
289 or one that needs relocation, as long as it is
290 only reachable from the default case. */
291 if (bb
== m_switch_bb
)
293 if (!m_contiguous_range
|| bb
!= m_default_bb
)
299 unsigned int branch_num
= gimple_switch_num_labels (m_switch
);
300 for (unsigned int i
= 1; i
< branch_num
; i
++)
302 if (gimple_switch_label_bb (cfun
, m_switch
, i
) == bb
)
308 m_default_case_nonstandard
= true;
317 /* The following function allocates default_values, target_{in,out}_names and
318 constructors arrays. The last one is also populated with pointers to
319 vectors that will become constructors of new arrays. */
322 switch_conversion::create_temp_arrays ()
326 m_default_values
= XCNEWVEC (tree
, m_phi_count
* 3);
327 /* ??? Macros do not support multi argument templates in their
328 argument list. We create a typedef to work around that problem. */
329 typedef vec
<constructor_elt
, va_gc
> *vec_constructor_elt_gc
;
330 m_constructors
= XCNEWVEC (vec_constructor_elt_gc
, m_phi_count
);
331 m_target_inbound_names
= m_default_values
+ m_phi_count
;
332 m_target_outbound_names
= m_target_inbound_names
+ m_phi_count
;
333 for (i
= 0; i
< m_phi_count
; i
++)
334 vec_alloc (m_constructors
[i
], tree_to_uhwi (m_range_size
) + 1);
337 /* Populate the array of default values in the order of phi nodes.
338 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
339 if the range is non-contiguous or the default case has standard
340 structure, otherwise it is the first non-default case instead. */
343 switch_conversion::gather_default_values (tree default_case
)
346 basic_block bb
= label_to_block (cfun
, CASE_LABEL (default_case
));
350 gcc_assert (CASE_LOW (default_case
) == NULL_TREE
351 || m_default_case_nonstandard
);
353 if (bb
== m_final_bb
)
354 e
= find_edge (m_switch_bb
, bb
);
356 e
= single_succ_edge (bb
);
358 for (gsi
= gsi_start_phis (m_final_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
360 gphi
*phi
= gsi
.phi ();
361 if (virtual_operand_p (gimple_phi_result (phi
)))
363 tree val
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
365 m_default_values
[i
++] = val
;
369 /* The following function populates the vectors in the constructors array with
370 future contents of the static arrays. The vectors are populated in the
371 order of phi nodes. */
374 switch_conversion::build_constructors ()
376 unsigned i
, branch_num
= gimple_switch_num_labels (m_switch
);
377 tree pos
= m_range_min
;
378 tree pos_one
= build_int_cst (TREE_TYPE (pos
), 1);
380 for (i
= 1; i
< branch_num
; i
++)
382 tree cs
= gimple_switch_label (m_switch
, i
);
383 basic_block bb
= label_to_block (cfun
, CASE_LABEL (cs
));
389 if (bb
== m_final_bb
)
390 e
= find_edge (m_switch_bb
, bb
);
392 e
= single_succ_edge (bb
);
395 while (tree_int_cst_lt (pos
, CASE_LOW (cs
)))
398 for (k
= 0; k
< m_phi_count
; k
++)
402 elt
.index
= int_const_binop (MINUS_EXPR
, pos
, m_range_min
);
404 = unshare_expr_without_location (m_default_values
[k
]);
405 m_constructors
[k
]->quick_push (elt
);
408 pos
= int_const_binop (PLUS_EXPR
, pos
, pos_one
);
410 gcc_assert (tree_int_cst_equal (pos
, CASE_LOW (cs
)));
414 high
= CASE_HIGH (cs
);
416 high
= CASE_LOW (cs
);
417 for (gsi
= gsi_start_phis (m_final_bb
);
418 !gsi_end_p (gsi
); gsi_next (&gsi
))
420 gphi
*phi
= gsi
.phi ();
421 if (virtual_operand_p (gimple_phi_result (phi
)))
423 tree val
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
424 tree low
= CASE_LOW (cs
);
431 elt
.index
= int_const_binop (MINUS_EXPR
, pos
, m_range_min
);
432 elt
.value
= unshare_expr_without_location (val
);
433 m_constructors
[j
]->quick_push (elt
);
435 pos
= int_const_binop (PLUS_EXPR
, pos
, pos_one
);
436 } while (!tree_int_cst_lt (high
, pos
)
437 && tree_int_cst_lt (low
, pos
));
443 /* If all values in the constructor vector are products of a linear function
444 a * x + b, then return true. When true, COEFF_A and COEFF_B and
445 coefficients of the linear function. Note that equal values are special
446 case of a linear function with a and b equal to zero. */
449 switch_conversion::contains_linear_function_p (vec
<constructor_elt
, va_gc
> *vec
,
454 constructor_elt
*elt
;
456 gcc_assert (vec
->length () >= 2);
458 /* Let's try to find any linear function a * x + y that can apply to
459 given values. 'a' can be calculated as follows:
461 a = (y2 - y1) / (x2 - x1) where x2 - x1 = 1 (consecutive case indices)
470 tree elt0
= (*vec
)[0].value
;
471 tree elt1
= (*vec
)[1].value
;
473 if (TREE_CODE (elt0
) != INTEGER_CST
|| TREE_CODE (elt1
) != INTEGER_CST
)
477 = wide_int::from (wi::to_wide (m_range_min
),
478 TYPE_PRECISION (TREE_TYPE (elt0
)),
479 TYPE_SIGN (TREE_TYPE (m_range_min
)));
480 wide_int y1
= wi::to_wide (elt0
);
481 wide_int y2
= wi::to_wide (elt1
);
482 wide_int a
= y2
- y1
;
483 wide_int b
= y2
- a
* (range_min
+ 1);
485 /* Verify that all values fulfill the linear function. */
486 FOR_EACH_VEC_SAFE_ELT (vec
, i
, elt
)
488 if (TREE_CODE (elt
->value
) != INTEGER_CST
)
491 wide_int value
= wi::to_wide (elt
->value
);
492 if (a
* range_min
+ b
!= value
)
504 /* Return type which should be used for array elements, either TYPE's
505 main variant or, for integral types, some smaller integral type
506 that can still hold all the constants. */
509 switch_conversion::array_value_type (tree type
, int num
)
511 unsigned int i
, len
= vec_safe_length (m_constructors
[num
]);
512 constructor_elt
*elt
;
516 /* Types with alignments greater than their size can reach here, e.g. out of
517 SRA. We couldn't use these as an array component type so get back to the
518 main variant first, which, for our purposes, is fine for other types as
521 type
= TYPE_MAIN_VARIANT (type
);
523 if (!INTEGRAL_TYPE_P (type
))
526 scalar_int_mode type_mode
= SCALAR_INT_TYPE_MODE (type
);
527 scalar_int_mode mode
= get_narrowest_mode (type_mode
);
528 if (GET_MODE_SIZE (type_mode
) <= GET_MODE_SIZE (mode
))
531 if (len
< (optimize_bb_for_size_p (gimple_bb (m_switch
)) ? 2 : 32))
534 FOR_EACH_VEC_SAFE_ELT (m_constructors
[num
], i
, elt
)
538 if (TREE_CODE (elt
->value
) != INTEGER_CST
)
541 cst
= wi::to_wide (elt
->value
);
544 unsigned int prec
= GET_MODE_BITSIZE (mode
);
545 if (prec
> HOST_BITS_PER_WIDE_INT
)
548 if (sign
>= 0 && cst
== wi::zext (cst
, prec
))
550 if (sign
== 0 && cst
== wi::sext (cst
, prec
))
555 if (sign
<= 0 && cst
== wi::sext (cst
, prec
))
564 if (!GET_MODE_WIDER_MODE (mode
).exists (&mode
)
565 || GET_MODE_SIZE (mode
) >= GET_MODE_SIZE (type_mode
))
571 sign
= TYPE_UNSIGNED (type
) ? 1 : -1;
572 smaller_type
= lang_hooks
.types
.type_for_mode (mode
, sign
>= 0);
573 if (GET_MODE_SIZE (type_mode
)
574 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type
)))
580 /* Create an appropriate array type and declaration and assemble a static
581 array variable. Also create a load statement that initializes
582 the variable in question with a value from the static array. SWTCH is
583 the switch statement being converted, NUM is the index to
584 arrays of constructors, default values and target SSA names
585 for this particular array. ARR_INDEX_TYPE is the type of the index
586 of the new array, PHI is the phi node of the final BB that corresponds
587 to the value that will be loaded from the created array. TIDX
588 is an ssa name of a temporary variable holding the index for loads from the
592 switch_conversion::build_one_array (int num
, tree arr_index_type
,
593 gphi
*phi
, tree tidx
)
597 gimple_stmt_iterator gsi
= gsi_for_stmt (m_switch
);
598 location_t loc
= gimple_location (m_switch
);
600 gcc_assert (m_default_values
[num
]);
602 name
= copy_ssa_name (PHI_RESULT (phi
));
603 m_target_inbound_names
[num
] = name
;
605 vec
<constructor_elt
, va_gc
> *constructor
= m_constructors
[num
];
606 wide_int coeff_a
, coeff_b
;
607 bool linear_p
= contains_linear_function_p (constructor
, &coeff_a
, &coeff_b
);
610 && (type
= range_check_type (TREE_TYPE ((*constructor
)[0].value
))))
612 if (dump_file
&& coeff_a
.to_uhwi () > 0)
613 fprintf (dump_file
, "Linear transformation with A = %" PRId64
614 " and B = %" PRId64
"\n", coeff_a
.to_shwi (),
617 /* We must use type of constructor values. */
618 gimple_seq seq
= NULL
;
619 tree tmp
= gimple_convert (&seq
, type
, m_index_expr
);
620 tree tmp2
= gimple_build (&seq
, MULT_EXPR
, type
,
621 wide_int_to_tree (type
, coeff_a
), tmp
);
622 tree tmp3
= gimple_build (&seq
, PLUS_EXPR
, type
, tmp2
,
623 wide_int_to_tree (type
, coeff_b
));
624 tree tmp4
= gimple_convert (&seq
, TREE_TYPE (name
), tmp3
);
625 gsi_insert_seq_before (&gsi
, seq
, GSI_SAME_STMT
);
626 load
= gimple_build_assign (name
, tmp4
);
630 tree array_type
, ctor
, decl
, value_type
, fetch
, default_type
;
632 default_type
= TREE_TYPE (m_default_values
[num
]);
633 value_type
= array_value_type (default_type
, num
);
634 array_type
= build_array_type (value_type
, arr_index_type
);
635 if (default_type
!= value_type
)
638 constructor_elt
*elt
;
640 FOR_EACH_VEC_SAFE_ELT (constructor
, i
, elt
)
641 elt
->value
= fold_convert (value_type
, elt
->value
);
643 ctor
= build_constructor (array_type
, constructor
);
644 TREE_CONSTANT (ctor
) = true;
645 TREE_STATIC (ctor
) = true;
647 decl
= build_decl (loc
, VAR_DECL
, NULL_TREE
, array_type
);
648 TREE_STATIC (decl
) = 1;
649 DECL_INITIAL (decl
) = ctor
;
651 DECL_NAME (decl
) = create_tmp_var_name ("CSWTCH");
652 DECL_ARTIFICIAL (decl
) = 1;
653 DECL_IGNORED_P (decl
) = 1;
654 TREE_CONSTANT (decl
) = 1;
655 TREE_READONLY (decl
) = 1;
656 DECL_IGNORED_P (decl
) = 1;
657 if (offloading_function_p (cfun
->decl
))
658 DECL_ATTRIBUTES (decl
)
659 = tree_cons (get_identifier ("omp declare target"), NULL_TREE
,
661 varpool_node::finalize_decl (decl
);
663 fetch
= build4 (ARRAY_REF
, value_type
, decl
, tidx
, NULL_TREE
,
665 if (default_type
!= value_type
)
667 fetch
= fold_convert (default_type
, fetch
);
668 fetch
= force_gimple_operand_gsi (&gsi
, fetch
, true, NULL_TREE
,
669 true, GSI_SAME_STMT
);
671 load
= gimple_build_assign (name
, fetch
);
674 gsi_insert_before (&gsi
, load
, GSI_SAME_STMT
);
676 m_arr_ref_last
= load
;
679 /* Builds and initializes static arrays initialized with values gathered from
680 the switch statement. Also creates statements that load values from
684 switch_conversion::build_arrays ()
687 tree tidx
, sub
, utype
;
689 gimple_stmt_iterator gsi
;
692 location_t loc
= gimple_location (m_switch
);
694 gsi
= gsi_for_stmt (m_switch
);
696 /* Make sure we do not generate arithmetics in a subrange. */
697 utype
= TREE_TYPE (m_index_expr
);
698 if (TREE_TYPE (utype
))
699 utype
= lang_hooks
.types
.type_for_mode (TYPE_MODE (TREE_TYPE (utype
)), 1);
701 utype
= lang_hooks
.types
.type_for_mode (TYPE_MODE (utype
), 1);
703 arr_index_type
= build_index_type (m_range_size
);
704 tidx
= make_ssa_name (utype
);
705 sub
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
706 fold_convert_loc (loc
, utype
, m_index_expr
),
707 fold_convert_loc (loc
, utype
, m_range_min
));
708 sub
= force_gimple_operand_gsi (&gsi
, sub
,
709 false, NULL
, true, GSI_SAME_STMT
);
710 stmt
= gimple_build_assign (tidx
, sub
);
712 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
714 m_arr_ref_first
= stmt
;
716 for (gpi
= gsi_start_phis (m_final_bb
), i
= 0;
717 !gsi_end_p (gpi
); gsi_next (&gpi
))
719 gphi
*phi
= gpi
.phi ();
720 if (!virtual_operand_p (gimple_phi_result (phi
)))
721 build_one_array (i
++, arr_index_type
, phi
, tidx
);
726 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
728 if (e
->dest
== m_final_bb
)
730 if (!m_default_case_nonstandard
731 || e
->dest
!= m_default_bb
)
733 e
= single_succ_edge (e
->dest
);
737 gcc_assert (e
&& e
->dest
== m_final_bb
);
738 m_target_vop
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
743 /* Generates and appropriately inserts loads of default values at the position
744 given by GSI. Returns the last inserted statement. */
747 switch_conversion::gen_def_assigns (gimple_stmt_iterator
*gsi
)
750 gassign
*assign
= NULL
;
752 for (i
= 0; i
< m_phi_count
; i
++)
754 tree name
= copy_ssa_name (m_target_inbound_names
[i
]);
755 m_target_outbound_names
[i
] = name
;
756 assign
= gimple_build_assign (name
, m_default_values
[i
]);
757 gsi_insert_before (gsi
, assign
, GSI_SAME_STMT
);
758 update_stmt (assign
);
763 /* Deletes the unused bbs and edges that now contain the switch statement and
764 its empty branch bbs. BBD is the now dead BB containing
765 the original switch statement, FINAL is the last BB of the converted
766 switch statement (in terms of succession). */
769 switch_conversion::prune_bbs (basic_block bbd
, basic_block final
,
770 basic_block default_bb
)
775 for (ei
= ei_start (bbd
->succs
); (e
= ei_safe_edge (ei
)); )
780 if (bb
!= final
&& bb
!= default_bb
)
781 delete_basic_block (bb
);
783 delete_basic_block (bbd
);
786 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
787 from the basic block loading values from an array and E2F from the basic
788 block loading default values. BBF is the last switch basic block (see the
789 bbf description in the comment below). */
792 switch_conversion::fix_phi_nodes (edge e1f
, edge e2f
, basic_block bbf
)
797 for (gsi
= gsi_start_phis (bbf
), i
= 0;
798 !gsi_end_p (gsi
); gsi_next (&gsi
))
800 gphi
*phi
= gsi
.phi ();
801 tree inbound
, outbound
;
802 if (virtual_operand_p (gimple_phi_result (phi
)))
803 inbound
= outbound
= m_target_vop
;
806 inbound
= m_target_inbound_names
[i
];
807 outbound
= m_target_outbound_names
[i
++];
809 add_phi_arg (phi
, inbound
, e1f
, UNKNOWN_LOCATION
);
810 if (!m_default_case_nonstandard
)
811 add_phi_arg (phi
, outbound
, e2f
, UNKNOWN_LOCATION
);
815 /* Creates a check whether the switch expression value actually falls into the
816 range given by all the cases. If it does not, the temporaries are loaded
817 with default values instead. */
820 switch_conversion::gen_inbound_check ()
822 tree label_decl1
= create_artificial_label (UNKNOWN_LOCATION
);
823 tree label_decl2
= create_artificial_label (UNKNOWN_LOCATION
);
824 tree label_decl3
= create_artificial_label (UNKNOWN_LOCATION
);
825 glabel
*label1
, *label2
, *label3
;
831 gassign
*last_assign
= NULL
;
832 gimple_stmt_iterator gsi
;
833 basic_block bb0
, bb1
, bb2
, bbf
, bbd
;
834 edge e01
= NULL
, e02
, e21
, e1d
, e1f
, e2f
;
835 location_t loc
= gimple_location (m_switch
);
837 gcc_assert (m_default_values
);
839 bb0
= gimple_bb (m_switch
);
841 tidx
= gimple_assign_lhs (m_arr_ref_first
);
842 utype
= TREE_TYPE (tidx
);
844 /* (end of) block 0 */
845 gsi
= gsi_for_stmt (m_arr_ref_first
);
848 bound
= fold_convert_loc (loc
, utype
, m_range_size
);
849 cond_stmt
= gimple_build_cond (LE_EXPR
, tidx
, bound
, NULL_TREE
, NULL_TREE
);
850 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
851 update_stmt (cond_stmt
);
854 if (!m_default_case_nonstandard
)
856 label2
= gimple_build_label (label_decl2
);
857 gsi_insert_before (&gsi
, label2
, GSI_SAME_STMT
);
858 last_assign
= gen_def_assigns (&gsi
);
862 label1
= gimple_build_label (label_decl1
);
863 gsi_insert_before (&gsi
, label1
, GSI_SAME_STMT
);
866 gsi
= gsi_start_bb (m_final_bb
);
867 label3
= gimple_build_label (label_decl3
);
868 gsi_insert_before (&gsi
, label3
, GSI_SAME_STMT
);
871 e02
= split_block (bb0
, cond_stmt
);
874 if (m_default_case_nonstandard
)
879 e01
->flags
= EDGE_TRUE_VALUE
;
880 e02
= make_edge (bb0
, bb2
, EDGE_FALSE_VALUE
);
881 edge e_default
= find_edge (bb1
, bb2
);
882 for (gphi_iterator gsi
= gsi_start_phis (bb2
);
883 !gsi_end_p (gsi
); gsi_next (&gsi
))
885 gphi
*phi
= gsi
.phi ();
886 tree arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e_default
);
887 add_phi_arg (phi
, arg
, e02
,
888 gimple_phi_arg_location_from_edge (phi
, e_default
));
890 /* Partially fix the dominator tree, if it is available. */
891 if (dom_info_available_p (CDI_DOMINATORS
))
892 redirect_immediate_dominators (CDI_DOMINATORS
, bb1
, bb0
);
896 e21
= split_block (bb2
, last_assign
);
901 e1d
= split_block (bb1
, m_arr_ref_last
);
905 /* Flags and profiles of the edge for in-range values. */
906 if (!m_default_case_nonstandard
)
907 e01
= make_edge (bb0
, bb1
, EDGE_TRUE_VALUE
);
908 e01
->probability
= m_default_prob
.invert ();
910 /* Flags and profiles of the edge taking care of out-of-range values. */
911 e02
->flags
&= ~EDGE_FALLTHRU
;
912 e02
->flags
|= EDGE_FALSE_VALUE
;
913 e02
->probability
= m_default_prob
;
917 e1f
= make_edge (bb1
, bbf
, EDGE_FALLTHRU
);
918 e1f
->probability
= profile_probability::always ();
920 if (m_default_case_nonstandard
)
924 e2f
= make_edge (bb2
, bbf
, EDGE_FALLTHRU
);
925 e2f
->probability
= profile_probability::always ();
928 /* frequencies of the new BBs */
929 bb1
->count
= e01
->count ();
930 bb2
->count
= e02
->count ();
931 if (!m_default_case_nonstandard
)
932 bbf
->count
= e1f
->count () + e2f
->count ();
934 /* Tidy blocks that have become unreachable. */
935 prune_bbs (bbd
, m_final_bb
,
936 m_default_case_nonstandard
? m_default_bb
: NULL
);
938 /* Fixup the PHI nodes in bbF. */
939 fix_phi_nodes (e1f
, e2f
, bbf
);
941 /* Fix the dominator tree, if it is available. */
942 if (dom_info_available_p (CDI_DOMINATORS
))
944 vec
<basic_block
> bbs_to_fix_dom
;
946 set_immediate_dominator (CDI_DOMINATORS
, bb1
, bb0
);
947 if (!m_default_case_nonstandard
)
948 set_immediate_dominator (CDI_DOMINATORS
, bb2
, bb0
);
949 if (! get_immediate_dominator (CDI_DOMINATORS
, bbf
))
950 /* If bbD was the immediate dominator ... */
951 set_immediate_dominator (CDI_DOMINATORS
, bbf
, bb0
);
953 bbs_to_fix_dom
.create (3 + (bb2
!= bbf
));
954 bbs_to_fix_dom
.quick_push (bb0
);
955 bbs_to_fix_dom
.quick_push (bb1
);
957 bbs_to_fix_dom
.quick_push (bb2
);
958 bbs_to_fix_dom
.quick_push (bbf
);
960 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
961 bbs_to_fix_dom
.release ();
965 /* The following function is invoked on every switch statement (the current
966 one is given in SWTCH) and runs the individual phases of switch
967 conversion on it one after another until one fails or the conversion
968 is completed. On success, NULL is in m_reason, otherwise points
969 to a string with the reason why the conversion failed. */
972 switch_conversion::expand (gswitch
*swtch
)
974 /* Group case labels so that we get the right results from the heuristics
975 that decide on the code generation approach for this switch. */
976 m_cfg_altered
|= group_case_labels_stmt (swtch
);
978 /* If this switch is now a degenerate case with only a default label,
979 there is nothing left for us to do. */
980 if (gimple_switch_num_labels (swtch
) < 2)
982 m_reason
= "switch is a degenerate case";
988 /* No error markers should reach here (they should be filtered out
989 during gimplification). */
990 gcc_checking_assert (TREE_TYPE (m_index_expr
) != error_mark_node
);
992 /* A switch on a constant should have been optimized in tree-cfg-cleanup. */
993 gcc_checking_assert (!TREE_CONSTANT (m_index_expr
));
995 /* Prefer bit test if possible. */
996 if (tree_fits_uhwi_p (m_range_size
)
997 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size
), m_uniq
)
998 && bit_test_cluster::is_beneficial (m_count
, m_uniq
))
1000 m_reason
= "expanding as bit test is preferable";
1006 /* This will be expanded as a decision tree . */
1007 m_reason
= "expanding as jumps is preferable";
1011 /* If there is no common successor, we cannot do the transformation. */
1014 m_reason
= "no common successor to all case label target blocks found";
1018 /* Check the case label values are within reasonable range: */
1019 if (!check_range ())
1021 gcc_assert (m_reason
);
1025 /* For all the cases, see whether they are empty, the assignments they
1026 represent constant and so on... */
1027 if (!check_all_empty_except_final ())
1029 gcc_assert (m_reason
);
1032 if (!check_final_bb ())
1034 gcc_assert (m_reason
);
1038 /* At this point all checks have passed and we can proceed with the
1041 create_temp_arrays ();
1042 gather_default_values (m_default_case_nonstandard
1043 ? gimple_switch_label (swtch
, 1)
1044 : gimple_switch_default_label (swtch
));
1045 build_constructors ();
1047 build_arrays (); /* Build the static arrays and assignments. */
1048 gen_inbound_check (); /* Build the bounds check. */
1050 m_cfg_altered
= true;
1055 switch_conversion::~switch_conversion ()
1057 XDELETEVEC (m_constructors
);
1058 XDELETEVEC (m_default_values
);
1063 group_cluster::group_cluster (vec
<cluster
*> &clusters
,
1064 unsigned start
, unsigned end
)
1066 gcc_checking_assert (end
- start
+ 1 >= 1);
1067 m_prob
= profile_probability::never ();
1068 m_cases
.create (end
- start
+ 1);
1069 for (unsigned i
= start
; i
<= end
; i
++)
1071 m_cases
.quick_push (static_cast<simple_cluster
*> (clusters
[i
]));
1072 m_prob
+= clusters
[i
]->m_prob
;
1074 m_subtree_prob
= m_prob
;
1079 group_cluster::~group_cluster ()
1081 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1087 /* Dump content of a cluster. */
1090 group_cluster::dump (FILE *f
, bool details
)
1092 unsigned total_values
= 0;
1093 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1094 total_values
+= m_cases
[i
]->get_range (m_cases
[i
]->get_low (),
1095 m_cases
[i
]->get_high ());
1097 unsigned comparison_count
= 0;
1098 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1100 simple_cluster
*sc
= static_cast<simple_cluster
*> (m_cases
[i
]);
1101 comparison_count
+= sc
->m_range_p
? 2 : 1;
1104 unsigned HOST_WIDE_INT range
= get_range (get_low (), get_high ());
1105 fprintf (f
, "%s", get_type () == JUMP_TABLE
? "JT" : "BT");
1108 fprintf (f
, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1109 " density: %.2f%%)", total_values
, comparison_count
, range
,
1110 100.0f
* comparison_count
/ range
);
1113 PRINT_CASE (f
, get_low ());
1115 PRINT_CASE (f
, get_high ());
1119 /* Emit GIMPLE code to handle the cluster. */
1122 jump_table_cluster::emit (tree index_expr
, tree
,
1123 tree default_label_expr
, basic_block default_bb
)
1125 unsigned HOST_WIDE_INT range
= get_range (get_low (), get_high ());
1126 unsigned HOST_WIDE_INT nondefault_range
= 0;
1128 /* For jump table we just emit a new gswitch statement that will
1129 be latter lowered to jump table. */
1130 auto_vec
<tree
> labels
;
1131 labels
.create (m_cases
.length ());
1133 make_edge (m_case_bb
, default_bb
, 0);
1134 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1136 labels
.quick_push (unshare_expr (m_cases
[i
]->m_case_label_expr
));
1137 make_edge (m_case_bb
, m_cases
[i
]->m_case_bb
, 0);
1140 gswitch
*s
= gimple_build_switch (index_expr
,
1141 unshare_expr (default_label_expr
), labels
);
1142 gimple_stmt_iterator gsi
= gsi_start_bb (m_case_bb
);
1143 gsi_insert_after (&gsi
, s
, GSI_NEW_STMT
);
1145 /* Set up even probabilities for all cases. */
1146 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1148 simple_cluster
*sc
= static_cast<simple_cluster
*> (m_cases
[i
]);
1149 edge case_edge
= find_edge (m_case_bb
, sc
->m_case_bb
);
1150 unsigned HOST_WIDE_INT case_range
1151 = sc
->get_range (sc
->get_low (), sc
->get_high ());
1152 nondefault_range
+= case_range
;
1154 /* case_edge->aux is number of values in a jump-table that are covered
1155 by the case_edge. */
1156 case_edge
->aux
= (void *) ((intptr_t) (case_edge
->aux
) + case_range
);
1159 edge default_edge
= gimple_switch_default_edge (cfun
, s
);
1160 default_edge
->probability
= profile_probability::never ();
1162 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1164 simple_cluster
*sc
= static_cast<simple_cluster
*> (m_cases
[i
]);
1165 edge case_edge
= find_edge (m_case_bb
, sc
->m_case_bb
);
1166 case_edge
->probability
1167 = profile_probability::always ().apply_scale ((intptr_t)case_edge
->aux
,
1171 /* Number of non-default values is probability of default edge. */
1172 default_edge
->probability
1173 += profile_probability::always ().apply_scale (nondefault_range
,
1176 switch_decision_tree::reset_out_edges_aux (s
);
1179 /* Find jump tables of given CLUSTERS, where all members of the vector
1180 are of type simple_cluster. New clusters are returned. */
1183 jump_table_cluster::find_jump_tables (vec
<cluster
*> &clusters
)
1186 return clusters
.copy ();
1188 unsigned l
= clusters
.length ();
1189 auto_vec
<min_cluster_item
> min
;
1190 min
.reserve (l
+ 1);
1192 min
.quick_push (min_cluster_item (0, 0, 0));
1194 for (unsigned i
= 1; i
<= l
; i
++)
1196 /* Set minimal # of clusters with i-th item to infinite. */
1197 min
.quick_push (min_cluster_item (INT_MAX
, INT_MAX
, INT_MAX
));
1199 for (unsigned j
= 0; j
< i
; j
++)
1201 unsigned HOST_WIDE_INT s
= min
[j
].m_non_jt_cases
;
1202 if (i
- j
< case_values_threshold ())
1205 /* Prefer clusters with smaller number of numbers covered. */
1206 if ((min
[j
].m_count
+ 1 < min
[i
].m_count
1207 || (min
[j
].m_count
+ 1 == min
[i
].m_count
1208 && s
< min
[i
].m_non_jt_cases
))
1209 && can_be_handled (clusters
, j
, i
- 1))
1210 min
[i
] = min_cluster_item (min
[j
].m_count
+ 1, j
, s
);
1213 gcc_checking_assert (min
[i
].m_count
!= INT_MAX
);
1217 if (min
[l
].m_count
== l
)
1218 return clusters
.copy ();
1220 vec
<cluster
*> output
;
1223 /* Find and build the clusters. */
1224 for (unsigned int end
= l
;;)
1226 int start
= min
[end
].m_start
;
1228 /* Do not allow clusters with small number of cases. */
1229 if (is_beneficial (clusters
, start
, end
- 1))
1230 output
.safe_push (new jump_table_cluster (clusters
, start
, end
- 1));
1232 for (int i
= end
- 1; i
>= start
; i
--)
1233 output
.safe_push (clusters
[i
]);
1245 /* Return true when cluster starting at START and ending at END (inclusive)
1246 can build a jump-table. */
1249 jump_table_cluster::can_be_handled (const vec
<cluster
*> &clusters
,
1250 unsigned start
, unsigned end
)
1252 /* If the switch is relatively small such that the cost of one
1253 indirect jump on the target are higher than the cost of a
1254 decision tree, go with the decision tree.
1256 If range of values is much bigger than number of values,
1257 or if it is too large to represent in a HOST_WIDE_INT,
1258 make a sequence of conditional branches instead of a dispatch.
1260 The definition of "much bigger" depends on whether we are
1261 optimizing for size or for speed.
1263 For algorithm correctness, jump table for a single case must return
1264 true. We bail out in is_beneficial if it's called just for
1269 unsigned HOST_WIDE_INT max_ratio
1270 = (optimize_insn_for_size_p ()
1271 ? PARAM_VALUE (PARAM_JUMP_TABLE_MAX_GROWTH_RATIO_FOR_SIZE
)
1272 : PARAM_VALUE (PARAM_JUMP_TABLE_MAX_GROWTH_RATIO_FOR_SPEED
));
1273 unsigned HOST_WIDE_INT range
= get_range (clusters
[start
]->get_low (),
1274 clusters
[end
]->get_high ());
1275 /* Check overflow. */
1279 unsigned HOST_WIDE_INT comparison_count
= 0;
1280 for (unsigned i
= start
; i
<= end
; i
++)
1282 simple_cluster
*sc
= static_cast<simple_cluster
*> (clusters
[i
]);
1283 comparison_count
+= sc
->m_range_p
? 2 : 1;
1286 unsigned HOST_WIDE_INT lhs
= 100 * range
;
1290 return lhs
<= max_ratio
* comparison_count
;
1293 /* Return true if cluster starting at START and ending at END (inclusive)
1294 is profitable transformation. */
1297 jump_table_cluster::is_beneficial (const vec
<cluster
*> &,
1298 unsigned start
, unsigned end
)
1300 /* Single case bail out. */
1304 return end
- start
+ 1 >= case_values_threshold ();
1307 /* Find bit tests of given CLUSTERS, where all members of the vector
1308 are of type simple_cluster. New clusters are returned. */
1311 bit_test_cluster::find_bit_tests (vec
<cluster
*> &clusters
)
1313 unsigned l
= clusters
.length ();
1314 auto_vec
<min_cluster_item
> min
;
1315 min
.reserve (l
+ 1);
1317 min
.quick_push (min_cluster_item (0, 0, 0));
1319 for (unsigned i
= 1; i
<= l
; i
++)
1321 /* Set minimal # of clusters with i-th item to infinite. */
1322 min
.quick_push (min_cluster_item (INT_MAX
, INT_MAX
, INT_MAX
));
1324 for (unsigned j
= 0; j
< i
; j
++)
1326 if (min
[j
].m_count
+ 1 < min
[i
].m_count
1327 && can_be_handled (clusters
, j
, i
- 1))
1328 min
[i
] = min_cluster_item (min
[j
].m_count
+ 1, j
, INT_MAX
);
1331 gcc_checking_assert (min
[i
].m_count
!= INT_MAX
);
1335 if (min
[l
].m_count
== l
)
1336 return clusters
.copy ();
1338 vec
<cluster
*> output
;
1341 /* Find and build the clusters. */
1342 for (unsigned end
= l
;;)
1344 int start
= min
[end
].m_start
;
1346 if (is_beneficial (clusters
, start
, end
- 1))
1348 bool entire
= start
== 0 && end
== clusters
.length ();
1349 output
.safe_push (new bit_test_cluster (clusters
, start
, end
- 1,
1353 for (int i
= end
- 1; i
>= start
; i
--)
1354 output
.safe_push (clusters
[i
]);
1366 /* Return true when RANGE of case values with UNIQ labels
1367 can build a bit test. */
1370 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range
,
1373 /* Check overflow. */
1377 if (range
>= GET_MODE_BITSIZE (word_mode
))
1383 /* Return true when cluster starting at START and ending at END (inclusive)
1384 can build a bit test. */
1387 bit_test_cluster::can_be_handled (const vec
<cluster
*> &clusters
,
1388 unsigned start
, unsigned end
)
1390 /* For algorithm correctness, bit test for a single case must return
1391 true. We bail out in is_beneficial if it's called just for
1396 unsigned HOST_WIDE_INT range
= get_range (clusters
[start
]->get_low (),
1397 clusters
[end
]->get_high ());
1398 auto_bitmap dest_bbs
;
1400 for (unsigned i
= start
; i
<= end
; i
++)
1402 simple_cluster
*sc
= static_cast<simple_cluster
*> (clusters
[i
]);
1403 bitmap_set_bit (dest_bbs
, sc
->m_case_bb
->index
);
1406 return can_be_handled (range
, bitmap_count_bits (dest_bbs
));
1409 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1413 bit_test_cluster::is_beneficial (unsigned count
, unsigned uniq
)
1415 return (((uniq
== 1 && count
>= 3)
1416 || (uniq
== 2 && count
>= 5)
1417 || (uniq
== 3 && count
>= 6)));
1420 /* Return true if cluster starting at START and ending at END (inclusive)
1421 is profitable transformation. */
1424 bit_test_cluster::is_beneficial (const vec
<cluster
*> &clusters
,
1425 unsigned start
, unsigned end
)
1427 /* Single case bail out. */
1431 auto_bitmap dest_bbs
;
1433 for (unsigned i
= start
; i
<= end
; i
++)
1435 simple_cluster
*sc
= static_cast<simple_cluster
*> (clusters
[i
]);
1436 bitmap_set_bit (dest_bbs
, sc
->m_case_bb
->index
);
1439 unsigned uniq
= bitmap_count_bits (dest_bbs
);
1440 unsigned count
= end
- start
+ 1;
1441 return is_beneficial (count
, uniq
);
1444 /* Comparison function for qsort to order bit tests by decreasing
1445 probability of execution. */
1448 case_bit_test::cmp (const void *p1
, const void *p2
)
1450 const case_bit_test
*const d1
= (const case_bit_test
*) p1
;
1451 const case_bit_test
*const d2
= (const case_bit_test
*) p2
;
1453 if (d2
->bits
!= d1
->bits
)
1454 return d2
->bits
- d1
->bits
;
1456 /* Stabilize the sort. */
1457 return (LABEL_DECL_UID (CASE_LABEL (d2
->label
))
1458 - LABEL_DECL_UID (CASE_LABEL (d1
->label
)));
1461 /* Expand a switch statement by a short sequence of bit-wise
1462 comparisons. "switch(x)" is effectively converted into
1463 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1466 INDEX_EXPR is the value being switched on.
1468 MINVAL is the lowest case value of in the case nodes,
1469 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1470 are not guaranteed to be of the same type as INDEX_EXPR
1471 (the gimplifier doesn't change the type of case label values,
1472 and MINVAL and RANGE are derived from those values).
1473 MAXVAL is MINVAL + RANGE.
1475 There *MUST* be max_case_bit_tests or less unique case
1479 bit_test_cluster::emit (tree index_expr
, tree index_type
,
1480 tree
, basic_block default_bb
)
1482 case_bit_test test
[m_max_case_bit_tests
] = { {} };
1483 unsigned int i
, j
, k
;
1486 tree unsigned_index_type
= range_check_type (index_type
);
1488 gimple_stmt_iterator gsi
;
1489 gassign
*shift_stmt
;
1491 tree idx
, tmp
, csui
;
1492 tree word_type_node
= lang_hooks
.types
.type_for_mode (word_mode
, 1);
1493 tree word_mode_zero
= fold_convert (word_type_node
, integer_zero_node
);
1494 tree word_mode_one
= fold_convert (word_type_node
, integer_one_node
);
1495 int prec
= TYPE_PRECISION (word_type_node
);
1496 wide_int wone
= wi::one (prec
);
1498 tree minval
= get_low ();
1499 tree maxval
= get_high ();
1500 tree range
= int_const_binop (MINUS_EXPR
, maxval
, minval
);
1501 unsigned HOST_WIDE_INT bt_range
= get_range (minval
, maxval
);
1503 /* Go through all case labels, and collect the case labels, profile
1504 counts, and other information we need to build the branch tests. */
1506 for (i
= 0; i
< m_cases
.length (); i
++)
1508 unsigned int lo
, hi
;
1509 simple_cluster
*n
= static_cast<simple_cluster
*> (m_cases
[i
]);
1510 for (k
= 0; k
< count
; k
++)
1511 if (n
->m_case_bb
== test
[k
].target_bb
)
1516 gcc_checking_assert (count
< m_max_case_bit_tests
);
1517 test
[k
].mask
= wi::zero (prec
);
1518 test
[k
].target_bb
= n
->m_case_bb
;
1519 test
[k
].label
= n
->m_case_label_expr
;
1524 test
[k
].bits
+= n
->get_range (n
->get_low (), n
->get_high ());
1526 lo
= tree_to_uhwi (int_const_binop (MINUS_EXPR
, n
->get_low (), minval
));
1527 if (n
->get_high () == NULL_TREE
)
1530 hi
= tree_to_uhwi (int_const_binop (MINUS_EXPR
, n
->get_high (),
1533 for (j
= lo
; j
<= hi
; j
++)
1534 test
[k
].mask
|= wi::lshift (wone
, j
);
1537 qsort (test
, count
, sizeof (*test
), case_bit_test::cmp
);
1539 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1540 the minval subtractions, but it might make the mask constants more
1541 expensive. So, compare the costs. */
1542 if (compare_tree_int (minval
, 0) > 0
1543 && compare_tree_int (maxval
, GET_MODE_BITSIZE (word_mode
)) < 0)
1546 HOST_WIDE_INT m
= tree_to_uhwi (minval
);
1547 rtx reg
= gen_raw_REG (word_mode
, 10000);
1548 bool speed_p
= optimize_insn_for_speed_p ();
1549 cost_diff
= set_rtx_cost (gen_rtx_PLUS (word_mode
, reg
,
1550 GEN_INT (-m
)), speed_p
);
1551 for (i
= 0; i
< count
; i
++)
1553 rtx r
= immed_wide_int_const (test
[i
].mask
, word_mode
);
1554 cost_diff
+= set_src_cost (gen_rtx_AND (word_mode
, reg
, r
),
1555 word_mode
, speed_p
);
1556 r
= immed_wide_int_const (wi::lshift (test
[i
].mask
, m
), word_mode
);
1557 cost_diff
-= set_src_cost (gen_rtx_AND (word_mode
, reg
, r
),
1558 word_mode
, speed_p
);
1562 for (i
= 0; i
< count
; i
++)
1563 test
[i
].mask
= wi::lshift (test
[i
].mask
, m
);
1564 minval
= build_zero_cst (TREE_TYPE (minval
));
1569 /* Now build the test-and-branch code. */
1571 gsi
= gsi_last_bb (m_case_bb
);
1573 /* idx = (unsigned)x - minval. */
1574 idx
= fold_convert (unsigned_index_type
, index_expr
);
1575 idx
= fold_build2 (MINUS_EXPR
, unsigned_index_type
, idx
,
1576 fold_convert (unsigned_index_type
, minval
));
1577 idx
= force_gimple_operand_gsi (&gsi
, idx
,
1578 /*simple=*/true, NULL_TREE
,
1579 /*before=*/true, GSI_SAME_STMT
);
1581 if (m_handles_entire_switch
)
1583 /* if (idx > range) goto default */
1585 = force_gimple_operand_gsi (&gsi
,
1586 fold_convert (unsigned_index_type
, range
),
1587 /*simple=*/true, NULL_TREE
,
1588 /*before=*/true, GSI_SAME_STMT
);
1589 tmp
= fold_build2 (GT_EXPR
, boolean_type_node
, idx
, range
);
1591 = hoist_edge_and_branch_if_true (&gsi
, tmp
, default_bb
,
1592 profile_probability::unlikely ());
1593 gsi
= gsi_last_bb (new_bb
);
1596 /* csui = (1 << (word_mode) idx) */
1597 csui
= make_ssa_name (word_type_node
);
1598 tmp
= fold_build2 (LSHIFT_EXPR
, word_type_node
, word_mode_one
,
1599 fold_convert (word_type_node
, idx
));
1600 tmp
= force_gimple_operand_gsi (&gsi
, tmp
,
1601 /*simple=*/false, NULL_TREE
,
1602 /*before=*/true, GSI_SAME_STMT
);
1603 shift_stmt
= gimple_build_assign (csui
, tmp
);
1604 gsi_insert_before (&gsi
, shift_stmt
, GSI_SAME_STMT
);
1605 update_stmt (shift_stmt
);
1607 profile_probability prob
= profile_probability::always ();
1609 /* for each unique set of cases:
1610 if (const & csui) goto target */
1611 for (k
= 0; k
< count
; k
++)
1613 prob
= profile_probability::always ().apply_scale (test
[k
].bits
,
1615 bt_range
-= test
[k
].bits
;
1616 tmp
= wide_int_to_tree (word_type_node
, test
[k
].mask
);
1617 tmp
= fold_build2 (BIT_AND_EXPR
, word_type_node
, csui
, tmp
);
1618 tmp
= force_gimple_operand_gsi (&gsi
, tmp
,
1619 /*simple=*/true, NULL_TREE
,
1620 /*before=*/true, GSI_SAME_STMT
);
1621 tmp
= fold_build2 (NE_EXPR
, boolean_type_node
, tmp
, word_mode_zero
);
1623 = hoist_edge_and_branch_if_true (&gsi
, tmp
, test
[k
].target_bb
, prob
);
1624 gsi
= gsi_last_bb (new_bb
);
1627 /* We should have removed all edges now. */
1628 gcc_assert (EDGE_COUNT (gsi_bb (gsi
)->succs
) == 0);
1630 /* If nothing matched, go to the default label. */
1631 edge e
= make_edge (gsi_bb (gsi
), default_bb
, EDGE_FALLTHRU
);
1632 e
->probability
= profile_probability::always ();
1635 /* Split the basic block at the statement pointed to by GSIP, and insert
1636 a branch to the target basic block of E_TRUE conditional on tree
1639 It is assumed that there is already an edge from the to-be-split
1640 basic block to E_TRUE->dest block. This edge is removed, and the
1641 profile information on the edge is re-used for the new conditional
1644 The CFG is updated. The dominator tree will not be valid after
1645 this transformation, but the immediate dominators are updated if
1646 UPDATE_DOMINATORS is true.
1648 Returns the newly created basic block. */
1651 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator
*gsip
,
1652 tree cond
, basic_block case_bb
,
1653 profile_probability prob
)
1658 basic_block new_bb
, split_bb
= gsi_bb (*gsip
);
1660 edge e_true
= make_edge (split_bb
, case_bb
, EDGE_TRUE_VALUE
);
1661 e_true
->probability
= prob
;
1662 gcc_assert (e_true
->src
== split_bb
);
1664 tmp
= force_gimple_operand_gsi (gsip
, cond
, /*simple=*/true, NULL
,
1665 /*before=*/true, GSI_SAME_STMT
);
1666 cond_stmt
= gimple_build_cond_from_tree (tmp
, NULL_TREE
, NULL_TREE
);
1667 gsi_insert_before (gsip
, cond_stmt
, GSI_SAME_STMT
);
1669 e_false
= split_block (split_bb
, cond_stmt
);
1670 new_bb
= e_false
->dest
;
1671 redirect_edge_pred (e_true
, split_bb
);
1673 e_false
->flags
&= ~EDGE_FALLTHRU
;
1674 e_false
->flags
|= EDGE_FALSE_VALUE
;
1675 e_false
->probability
= e_true
->probability
.invert ();
1676 new_bb
->count
= e_false
->count ();
1681 /* Compute the number of case labels that correspond to each outgoing edge of
1682 switch statement. Record this information in the aux field of the edge. */
1685 switch_decision_tree::compute_cases_per_edge ()
1687 reset_out_edges_aux (m_switch
);
1688 int ncases
= gimple_switch_num_labels (m_switch
);
1689 for (int i
= ncases
- 1; i
>= 1; --i
)
1691 edge case_edge
= gimple_switch_edge (cfun
, m_switch
, i
);
1692 case_edge
->aux
= (void *) ((intptr_t) (case_edge
->aux
) + 1);
1696 /* Analyze switch statement and return true when the statement is expanded
1697 as decision tree. */
1700 switch_decision_tree::analyze_switch_statement ()
1702 unsigned l
= gimple_switch_num_labels (m_switch
);
1703 basic_block bb
= gimple_bb (m_switch
);
1704 auto_vec
<cluster
*> clusters
;
1705 clusters
.create (l
- 1);
1707 basic_block default_bb
= gimple_switch_default_bb (cfun
, m_switch
);
1708 m_case_bbs
.reserve (l
);
1709 m_case_bbs
.quick_push (default_bb
);
1711 compute_cases_per_edge ();
1713 for (unsigned i
= 1; i
< l
; i
++)
1715 tree elt
= gimple_switch_label (m_switch
, i
);
1716 tree lab
= CASE_LABEL (elt
);
1717 basic_block case_bb
= label_to_block (cfun
, lab
);
1718 edge case_edge
= find_edge (bb
, case_bb
);
1719 tree low
= CASE_LOW (elt
);
1720 tree high
= CASE_HIGH (elt
);
1722 profile_probability p
1723 = case_edge
->probability
.apply_scale (1, (intptr_t) (case_edge
->aux
));
1724 clusters
.quick_push (new simple_cluster (low
, high
, elt
, case_edge
->dest
,
1726 m_case_bbs
.quick_push (case_edge
->dest
);
1729 reset_out_edges_aux (m_switch
);
1731 /* Find jump table clusters. */
1732 vec
<cluster
*> output
= jump_table_cluster::find_jump_tables (clusters
);
1734 /* Find bit test clusters. */
1735 vec
<cluster
*> output2
;
1736 auto_vec
<cluster
*> tmp
;
1740 for (unsigned i
= 0; i
< output
.length (); i
++)
1742 cluster
*c
= output
[i
];
1743 if (c
->get_type () != SIMPLE_CASE
)
1745 if (!tmp
.is_empty ())
1747 vec
<cluster
*> n
= bit_test_cluster::find_bit_tests (tmp
);
1748 output2
.safe_splice (n
);
1752 output2
.safe_push (c
);
1758 /* We still can have a temporary vector to test. */
1759 if (!tmp
.is_empty ())
1761 vec
<cluster
*> n
= bit_test_cluster::find_bit_tests (tmp
);
1762 output2
.safe_splice (n
);
1768 fprintf (dump_file
, ";; GIMPLE switch case clusters: ");
1769 for (unsigned i
= 0; i
< output2
.length (); i
++)
1770 output2
[i
]->dump (dump_file
, dump_flags
& TDF_DETAILS
);
1771 fprintf (dump_file
, "\n");
1776 bool expanded
= try_switch_expansion (output2
);
1778 for (unsigned i
= 0; i
< output2
.length (); i
++)
1786 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1790 switch_decision_tree::try_switch_expansion (vec
<cluster
*> &clusters
)
1792 tree index_expr
= gimple_switch_index (m_switch
);
1793 tree index_type
= TREE_TYPE (index_expr
);
1794 basic_block bb
= gimple_bb (m_switch
);
1796 if (gimple_switch_num_labels (m_switch
) == 1
1797 || range_check_type (index_type
) == NULL_TREE
)
1800 /* Find the default case target label. */
1801 edge default_edge
= gimple_switch_default_edge (cfun
, m_switch
);
1802 m_default_bb
= default_edge
->dest
;
1804 /* Do the insertion of a case label into m_case_list. The labels are
1805 fed to us in descending order from the sorted vector of case labels used
1806 in the tree part of the middle end. So the list we construct is
1807 sorted in ascending order. */
1809 for (int i
= clusters
.length () - 1; i
>= 0; i
--)
1811 case_tree_node
*r
= m_case_list
;
1812 m_case_list
= m_case_node_pool
.allocate ();
1813 m_case_list
->m_right
= r
;
1814 m_case_list
->m_c
= clusters
[i
];
1817 record_phi_operand_mapping ();
1819 /* Split basic block that contains the gswitch statement. */
1820 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1822 if (gsi_end_p (gsi
))
1823 e
= split_block_after_labels (bb
);
1827 e
= split_block (bb
, gsi_stmt (gsi
));
1829 bb
= split_edge (e
);
1831 /* Create new basic blocks for non-case clusters where specific expansion
1833 for (unsigned i
= 0; i
< clusters
.length (); i
++)
1834 if (clusters
[i
]->get_type () != SIMPLE_CASE
)
1836 clusters
[i
]->m_case_bb
= create_empty_bb (bb
);
1837 clusters
[i
]->m_case_bb
->loop_father
= bb
->loop_father
;
1840 /* Do not do an extra work for a single cluster. */
1841 if (clusters
.length () == 1
1842 && clusters
[0]->get_type () != SIMPLE_CASE
)
1844 cluster
*c
= clusters
[0];
1845 c
->emit (index_expr
, index_type
,
1846 gimple_switch_default_label (m_switch
), m_default_bb
);
1847 redirect_edge_succ (single_succ_edge (bb
), c
->m_case_bb
);
1851 emit (bb
, index_expr
, default_edge
->probability
, index_type
);
1853 /* Emit cluster-specific switch handling. */
1854 for (unsigned i
= 0; i
< clusters
.length (); i
++)
1855 if (clusters
[i
]->get_type () != SIMPLE_CASE
)
1856 clusters
[i
]->emit (index_expr
, index_type
,
1857 gimple_switch_default_label (m_switch
),
1861 fix_phi_operands_for_edges ();
1866 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1867 and used in a label basic block. */
1870 switch_decision_tree::record_phi_operand_mapping ()
1872 basic_block switch_bb
= gimple_bb (m_switch
);
1873 /* Record all PHI nodes that have to be fixed after conversion. */
1874 for (unsigned i
= 0; i
< m_case_bbs
.length (); i
++)
1877 basic_block bb
= m_case_bbs
[i
];
1878 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1880 gphi
*phi
= gsi
.phi ();
1882 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1884 basic_block phi_src_bb
= gimple_phi_arg_edge (phi
, i
)->src
;
1885 if (phi_src_bb
== switch_bb
)
1887 tree def
= gimple_phi_arg_def (phi
, i
);
1888 tree result
= gimple_phi_result (phi
);
1889 m_phi_mapping
.put (result
, def
);
1897 /* Append new operands to PHI statements that were introduced due to
1898 addition of new edges to case labels. */
1901 switch_decision_tree::fix_phi_operands_for_edges ()
1905 for (unsigned i
= 0; i
< m_case_bbs
.length (); i
++)
1907 basic_block bb
= m_case_bbs
[i
];
1908 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1910 gphi
*phi
= gsi
.phi ();
1911 for (unsigned j
= 0; j
< gimple_phi_num_args (phi
); j
++)
1913 tree def
= gimple_phi_arg_def (phi
, j
);
1914 if (def
== NULL_TREE
)
1916 edge e
= gimple_phi_arg_edge (phi
, j
);
1918 = m_phi_mapping
.get (gimple_phi_result (phi
));
1919 gcc_assert (definition
);
1920 add_phi_arg (phi
, *definition
, e
, UNKNOWN_LOCATION
);
1927 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1928 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1930 We generate a binary decision tree to select the appropriate target
1934 switch_decision_tree::emit (basic_block bb
, tree index_expr
,
1935 profile_probability default_prob
, tree index_type
)
1937 balance_case_nodes (&m_case_list
, NULL
);
1940 dump_function_to_file (current_function_decl
, dump_file
, dump_flags
);
1941 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1943 int indent_step
= ceil_log2 (TYPE_PRECISION (index_type
)) + 2;
1944 fprintf (dump_file
, ";; Expanding GIMPLE switch as decision tree:\n");
1945 gcc_assert (m_case_list
!= NULL
);
1946 dump_case_nodes (dump_file
, m_case_list
, indent_step
, 0);
1949 bb
= emit_case_nodes (bb
, index_expr
, m_case_list
, default_prob
, index_type
,
1950 gimple_location (m_switch
));
1953 emit_jump (bb
, m_default_bb
);
1955 /* Remove all edges and do just an edge that will reach default_bb. */
1956 bb
= gimple_bb (m_switch
);
1957 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1958 gsi_remove (&gsi
, true);
1960 delete_basic_block (bb
);
1963 /* Take an ordered list of case nodes
1964 and transform them into a near optimal binary tree,
1965 on the assumption that any target code selection value is as
1966 likely as any other.
1968 The transformation is performed by splitting the ordered
1969 list into two equal sections plus a pivot. The parts are
1970 then attached to the pivot as left and right branches. Each
1971 branch is then transformed recursively. */
1974 switch_decision_tree::balance_case_nodes (case_tree_node
**head
,
1975 case_tree_node
*parent
)
1984 case_tree_node
**npp
;
1985 case_tree_node
*left
;
1986 profile_probability prob
= profile_probability::never ();
1988 /* Count the number of entries on branch. Also count the ranges. */
1992 if (!tree_int_cst_equal (np
->m_c
->get_low (), np
->m_c
->get_high ()))
1996 prob
+= np
->m_c
->m_prob
;
2002 /* Split this list if it is long enough for that to help. */
2005 profile_probability pivot_prob
= prob
.apply_scale (1, 2);
2007 /* Find the place in the list that bisects the list's total cost,
2008 where ranges count as 2. */
2011 /* Skip nodes while their probability does not reach
2013 prob
-= (*npp
)->m_c
->m_prob
;
2014 if ((prob
.initialized_p () && prob
< pivot_prob
)
2015 || ! (*npp
)->m_right
)
2017 npp
= &(*npp
)->m_right
;
2023 np
->m_parent
= parent
;
2024 np
->m_left
= left
== np
? NULL
: left
;
2026 /* Optimize each of the two split parts. */
2027 balance_case_nodes (&np
->m_left
, np
);
2028 balance_case_nodes (&np
->m_right
, np
);
2029 np
->m_c
->m_subtree_prob
= np
->m_c
->m_prob
;
2031 np
->m_c
->m_subtree_prob
+= np
->m_left
->m_c
->m_subtree_prob
;
2033 np
->m_c
->m_subtree_prob
+= np
->m_right
->m_c
->m_subtree_prob
;
2037 /* Else leave this branch as one level,
2038 but fill in `parent' fields. */
2040 np
->m_parent
= parent
;
2041 np
->m_c
->m_subtree_prob
= np
->m_c
->m_prob
;
2042 for (; np
->m_right
; np
= np
->m_right
)
2044 np
->m_right
->m_parent
= np
;
2045 (*head
)->m_c
->m_subtree_prob
+= np
->m_right
->m_c
->m_subtree_prob
;
2051 /* Dump ROOT, a list or tree of case nodes, to file. */
2054 switch_decision_tree::dump_case_nodes (FILE *f
, case_tree_node
*root
,
2055 int indent_step
, int indent_level
)
2061 dump_case_nodes (f
, root
->m_left
, indent_step
, indent_level
);
2064 fprintf (f
, "%*s", indent_step
* indent_level
, "");
2065 root
->m_c
->dump (f
);
2066 root
->m_c
->m_prob
.dump (f
);
2067 fputs (" subtree: ", f
);
2068 root
->m_c
->m_subtree_prob
.dump (f
);
2071 dump_case_nodes (f
, root
->m_right
, indent_step
, indent_level
);
2075 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2078 switch_decision_tree::emit_jump (basic_block bb
, basic_block case_bb
)
2080 edge e
= single_succ_edge (bb
);
2081 redirect_edge_succ (e
, case_bb
);
2084 /* Generate code to compare OP0 with OP1 so that the condition codes are
2085 set and to jump to LABEL_BB if the condition is true.
2086 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2087 PROB is the probability of jumping to LABEL_BB. */
2090 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb
, tree op0
,
2091 tree op1
, tree_code comparison
,
2092 basic_block label_bb
,
2093 profile_probability prob
,
2096 // TODO: it's once called with lhs != index.
2097 op1
= fold_convert (TREE_TYPE (op0
), op1
);
2099 gcond
*cond
= gimple_build_cond (comparison
, op0
, op1
, NULL_TREE
, NULL_TREE
);
2100 gimple_set_location (cond
, loc
);
2101 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
2102 gsi_insert_after (&gsi
, cond
, GSI_NEW_STMT
);
2104 gcc_assert (single_succ_p (bb
));
2106 /* Make a new basic block where false branch will take place. */
2107 edge false_edge
= split_block (bb
, cond
);
2108 false_edge
->flags
= EDGE_FALSE_VALUE
;
2109 false_edge
->probability
= prob
.invert ();
2111 edge true_edge
= make_edge (bb
, label_bb
, EDGE_TRUE_VALUE
);
2112 true_edge
->probability
= prob
;
2114 return false_edge
->dest
;
2117 /* Generate code to jump to LABEL if OP0 and OP1 are equal.
2118 PROB is the probability of jumping to LABEL_BB.
2119 BB is a basic block where the new condition will be placed. */
2122 switch_decision_tree::do_jump_if_equal (basic_block bb
, tree op0
, tree op1
,
2123 basic_block label_bb
,
2124 profile_probability prob
,
2127 op1
= fold_convert (TREE_TYPE (op0
), op1
);
2129 gcond
*cond
= gimple_build_cond (EQ_EXPR
, op0
, op1
, NULL_TREE
, NULL_TREE
);
2130 gimple_set_location (cond
, loc
);
2131 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
2132 gsi_insert_before (&gsi
, cond
, GSI_SAME_STMT
);
2134 gcc_assert (single_succ_p (bb
));
2136 /* Make a new basic block where false branch will take place. */
2137 edge false_edge
= split_block (bb
, cond
);
2138 false_edge
->flags
= EDGE_FALSE_VALUE
;
2139 false_edge
->probability
= prob
.invert ();
2141 edge true_edge
= make_edge (bb
, label_bb
, EDGE_TRUE_VALUE
);
2142 true_edge
->probability
= prob
;
2144 return false_edge
->dest
;
2147 /* Emit step-by-step code to select a case for the value of INDEX.
2148 The thus generated decision tree follows the form of the
2149 case-node binary tree NODE, whose nodes represent test conditions.
2150 DEFAULT_PROB is probability of cases leading to default BB.
2151 INDEX_TYPE is the type of the index of the switch. */
2154 switch_decision_tree::emit_case_nodes (basic_block bb
, tree index
,
2155 case_tree_node
*node
,
2156 profile_probability default_prob
,
2157 tree index_type
, location_t loc
)
2159 profile_probability p
;
2161 /* If node is null, we are done. */
2165 /* Single value case. */
2166 if (node
->m_c
->is_single_value_p ())
2168 /* Node is single valued. First see if the index expression matches
2169 this node and then check our children, if any. */
2170 p
= node
->m_c
->m_prob
/ (node
->m_c
->m_subtree_prob
+ default_prob
);
2171 bb
= do_jump_if_equal (bb
, index
, node
->m_c
->get_low (),
2172 node
->m_c
->m_case_bb
, p
, loc
);
2173 /* Since this case is taken at this point, reduce its weight from
2175 node
->m_c
->m_subtree_prob
-= p
;
2177 if (node
->m_left
!= NULL
&& node
->m_right
!= NULL
)
2179 /* 1) the node has both children
2181 If both children are single-valued cases with no
2182 children, finish up all the work. This way, we can save
2183 one ordered comparison. */
2185 if (!node
->m_left
->has_child ()
2186 && node
->m_left
->m_c
->is_single_value_p ()
2187 && !node
->m_right
->has_child ()
2188 && node
->m_right
->m_c
->is_single_value_p ())
2190 p
= (node
->m_right
->m_c
->m_prob
2191 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2192 bb
= do_jump_if_equal (bb
, index
, node
->m_right
->m_c
->get_low (),
2193 node
->m_right
->m_c
->m_case_bb
, p
, loc
);
2195 p
= (node
->m_left
->m_c
->m_prob
2196 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2197 bb
= do_jump_if_equal (bb
, index
, node
->m_left
->m_c
->get_low (),
2198 node
->m_left
->m_c
->m_case_bb
, p
, loc
);
2202 /* Branch to a label where we will handle it later. */
2203 basic_block test_bb
= split_edge (single_succ_edge (bb
));
2204 redirect_edge_succ (single_pred_edge (test_bb
),
2205 single_succ_edge (bb
)->dest
);
2207 p
= ((node
->m_right
->m_c
->m_subtree_prob
2208 + default_prob
.apply_scale (1, 2))
2209 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2210 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_high (),
2211 GT_EXPR
, test_bb
, p
, loc
);
2212 default_prob
= default_prob
.apply_scale (1, 2);
2214 /* Handle the left-hand subtree. */
2215 bb
= emit_case_nodes (bb
, index
, node
->m_left
,
2216 default_prob
, index_type
, loc
);
2218 /* If the left-hand subtree fell through,
2219 don't let it fall into the right-hand subtree. */
2220 if (bb
&& m_default_bb
)
2221 emit_jump (bb
, m_default_bb
);
2223 bb
= emit_case_nodes (test_bb
, index
, node
->m_right
,
2224 default_prob
, index_type
, loc
);
2227 else if (node
->m_left
== NULL
&& node
->m_right
!= NULL
)
2229 /* 2) the node has only right child. */
2231 /* Here we have a right child but no left so we issue a conditional
2232 branch to default and process the right child.
2234 Omit the conditional branch to default if the right child
2235 does not have any children and is single valued; it would
2236 cost too much space to save so little time. */
2238 if (node
->m_right
->has_child ()
2239 || !node
->m_right
->m_c
->is_single_value_p ())
2241 p
= (default_prob
.apply_scale (1, 2)
2242 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2243 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_low (),
2244 LT_EXPR
, m_default_bb
, p
, loc
);
2245 default_prob
= default_prob
.apply_scale (1, 2);
2247 bb
= emit_case_nodes (bb
, index
, node
->m_right
, default_prob
,
2252 /* We cannot process node->right normally
2253 since we haven't ruled out the numbers less than
2254 this node's value. So handle node->right explicitly. */
2255 p
= (node
->m_right
->m_c
->m_subtree_prob
2256 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2257 bb
= do_jump_if_equal (bb
, index
, node
->m_right
->m_c
->get_low (),
2258 node
->m_right
->m_c
->m_case_bb
, p
, loc
);
2261 else if (node
->m_left
!= NULL
&& node
->m_right
== NULL
)
2263 /* 3) just one subtree, on the left. Similar case as previous. */
2265 if (node
->m_left
->has_child ()
2266 || !node
->m_left
->m_c
->is_single_value_p ())
2268 p
= (default_prob
.apply_scale (1, 2)
2269 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2270 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_high (),
2271 GT_EXPR
, m_default_bb
, p
, loc
);
2272 default_prob
= default_prob
.apply_scale (1, 2);
2274 bb
= emit_case_nodes (bb
, index
, node
->m_left
, default_prob
,
2279 /* We cannot process node->left normally
2280 since we haven't ruled out the numbers less than
2281 this node's value. So handle node->left explicitly. */
2282 p
= (node
->m_left
->m_c
->m_subtree_prob
2283 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2284 bb
= do_jump_if_equal (bb
, index
, node
->m_left
->m_c
->get_low (),
2285 node
->m_left
->m_c
->m_case_bb
, p
, loc
);
2291 /* Node is a range. These cases are very similar to those for a single
2292 value, except that we do not start by testing whether this node
2293 is the one to branch to. */
2294 if (node
->has_child () || node
->m_c
->get_type () != SIMPLE_CASE
)
2296 /* Branch to a label where we will handle it later. */
2297 basic_block test_bb
= split_edge (single_succ_edge (bb
));
2298 redirect_edge_succ (single_pred_edge (test_bb
),
2299 single_succ_edge (bb
)->dest
);
2302 profile_probability right_prob
= profile_probability::never ();
2304 right_prob
= node
->m_right
->m_c
->m_subtree_prob
;
2305 p
= ((right_prob
+ default_prob
.apply_scale (1, 2))
2306 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2308 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_high (),
2309 GT_EXPR
, test_bb
, p
, loc
);
2310 default_prob
= default_prob
.apply_scale (1, 2);
2312 /* Value belongs to this node or to the left-hand subtree. */
2313 p
= node
->m_c
->m_prob
/ (node
->m_c
->m_subtree_prob
+ default_prob
);
2314 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_low (),
2315 GE_EXPR
, node
->m_c
->m_case_bb
, p
, loc
);
2317 /* Handle the left-hand subtree. */
2318 bb
= emit_case_nodes (bb
, index
, node
->m_left
,
2319 default_prob
, index_type
, loc
);
2321 /* If the left-hand subtree fell through,
2322 don't let it fall into the right-hand subtree. */
2323 if (bb
&& m_default_bb
)
2324 emit_jump (bb
, m_default_bb
);
2326 bb
= emit_case_nodes (test_bb
, index
, node
->m_right
,
2327 default_prob
, index_type
, loc
);
2331 /* Node has no children so we check low and high bounds to remove
2332 redundant tests. Only one of the bounds can exist,
2333 since otherwise this node is bounded--a case tested already. */
2335 generate_range_test (bb
, index
, node
->m_c
->get_low (),
2336 node
->m_c
->get_high (), &lhs
, &rhs
);
2337 p
= default_prob
/ (node
->m_c
->m_subtree_prob
+ default_prob
);
2339 bb
= emit_cmp_and_jump_insns (bb
, lhs
, rhs
, GT_EXPR
,
2340 m_default_bb
, p
, loc
);
2342 emit_jump (bb
, node
->m_c
->m_case_bb
);
2350 /* The main function of the pass scans statements for switches and invokes
2351 process_switch on them. */
2355 const pass_data pass_data_convert_switch
=
2357 GIMPLE_PASS
, /* type */
2358 "switchconv", /* name */
2359 OPTGROUP_NONE
, /* optinfo_flags */
2360 TV_TREE_SWITCH_CONVERSION
, /* tv_id */
2361 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2362 0, /* properties_provided */
2363 0, /* properties_destroyed */
2364 0, /* todo_flags_start */
2365 TODO_update_ssa
, /* todo_flags_finish */
2368 class pass_convert_switch
: public gimple_opt_pass
2371 pass_convert_switch (gcc::context
*ctxt
)
2372 : gimple_opt_pass (pass_data_convert_switch
, ctxt
)
2375 /* opt_pass methods: */
2376 virtual bool gate (function
*) { return flag_tree_switch_conversion
!= 0; }
2377 virtual unsigned int execute (function
*);
2379 }; // class pass_convert_switch
2382 pass_convert_switch::execute (function
*fun
)
2385 bool cfg_altered
= false;
2387 FOR_EACH_BB_FN (bb
, fun
)
2389 gimple
*stmt
= last_stmt (bb
);
2390 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
2394 expanded_location loc
= expand_location (gimple_location (stmt
));
2396 fprintf (dump_file
, "beginning to process the following "
2397 "SWITCH statement (%s:%d) : ------- \n",
2398 loc
.file
, loc
.line
);
2399 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2400 putc ('\n', dump_file
);
2403 switch_conversion sconv
;
2404 sconv
.expand (as_a
<gswitch
*> (stmt
));
2405 cfg_altered
|= sconv
.m_cfg_altered
;
2406 if (!sconv
.m_reason
)
2410 fputs ("Switch converted\n", dump_file
);
2411 fputs ("--------------------------------\n", dump_file
);
2414 /* Make no effort to update the post-dominator tree.
2415 It is actually not that hard for the transformations
2416 we have performed, but it is not supported
2417 by iterate_fix_dominators. */
2418 free_dominance_info (CDI_POST_DOMINATORS
);
2424 fputs ("Bailing out - ", dump_file
);
2425 fputs (sconv
.m_reason
, dump_file
);
2426 fputs ("\n--------------------------------\n", dump_file
);
2432 return cfg_altered
? TODO_cleanup_cfg
: 0;;
2438 make_pass_convert_switch (gcc::context
*ctxt
)
2440 return new pass_convert_switch (ctxt
);
2443 /* The main function of the pass scans statements for switches and invokes
2444 process_switch on them. */
2448 template <bool O0
> class pass_lower_switch
: public gimple_opt_pass
2451 pass_lower_switch (gcc::context
*ctxt
) : gimple_opt_pass (data
, ctxt
) {}
2453 static const pass_data data
;
2457 return new pass_lower_switch
<O0
> (m_ctxt
);
2463 return !O0
|| !optimize
;
2466 virtual unsigned int execute (function
*fun
);
2467 }; // class pass_lower_switch
2470 const pass_data pass_lower_switch
<O0
>::data
= {
2471 GIMPLE_PASS
, /* type */
2472 O0
? "switchlower_O0" : "switchlower", /* name */
2473 OPTGROUP_NONE
, /* optinfo_flags */
2474 TV_TREE_SWITCH_LOWERING
, /* tv_id */
2475 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2476 0, /* properties_provided */
2477 0, /* properties_destroyed */
2478 0, /* todo_flags_start */
2479 TODO_update_ssa
| TODO_cleanup_cfg
, /* todo_flags_finish */
2484 pass_lower_switch
<O0
>::execute (function
*fun
)
2487 bool expanded
= false;
2489 auto_vec
<gimple
*> switch_statements
;
2490 switch_statements
.create (1);
2492 FOR_EACH_BB_FN (bb
, fun
)
2494 gimple
*stmt
= last_stmt (bb
);
2496 if (stmt
&& (swtch
= dyn_cast
<gswitch
*> (stmt
)))
2499 group_case_labels_stmt (swtch
);
2500 switch_statements
.safe_push (swtch
);
2504 for (unsigned i
= 0; i
< switch_statements
.length (); i
++)
2506 gimple
*stmt
= switch_statements
[i
];
2509 expanded_location loc
= expand_location (gimple_location (stmt
));
2511 fprintf (dump_file
, "beginning to process the following "
2512 "SWITCH statement (%s:%d) : ------- \n",
2513 loc
.file
, loc
.line
);
2514 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2515 putc ('\n', dump_file
);
2518 gswitch
*swtch
= dyn_cast
<gswitch
*> (stmt
);
2521 switch_decision_tree
dt (swtch
);
2522 expanded
|= dt
.analyze_switch_statement ();
2528 free_dominance_info (CDI_DOMINATORS
);
2529 free_dominance_info (CDI_POST_DOMINATORS
);
2530 mark_virtual_operands_for_renaming (cfun
);
2539 make_pass_lower_switch_O0 (gcc::context
*ctxt
)
2541 return new pass_lower_switch
<true> (ctxt
);
2544 make_pass_lower_switch (gcc::context
*ctxt
)
2546 return new pass_lower_switch
<false> (ctxt
);