1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
3 Copyright (C) 2006-2018 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
27 #include "coretypes.h"
29 #include "insn-codes.h"
34 #include "tree-pass.h"
36 #include "optabs-tree.h"
38 #include "gimple-pretty-print.h"
40 #include "fold-const.h"
42 #include "stor-layout.h"
45 #include "gimple-iterator.h"
46 #include "gimplify-me.h"
49 #include "alloc-pool.h"
51 #include "tree-into-ssa.h"
52 #include "omp-general.h"
54 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
55 type in the GIMPLE type system that is language-independent? */
56 #include "langhooks.h"
58 #include "tree-switch-conversion.h"
60 using namespace tree_switch_conversion
;
64 switch_conversion::switch_conversion (): m_final_bb (NULL
), m_other_count (),
65 m_constructors (NULL
), m_default_values (NULL
),
66 m_arr_ref_first (NULL
), m_arr_ref_last (NULL
),
67 m_reason (NULL
), m_default_case_nonstandard (false), m_cfg_altered (false)
71 /* Collection information about SWTCH statement. */
74 switch_conversion::collect (gswitch
*swtch
)
76 unsigned int branch_num
= gimple_switch_num_labels (swtch
);
77 tree min_case
, max_case
;
79 edge e
, e_default
, e_first
;
84 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
85 is a default label which is the first in the vector.
86 Collect the bits we can deduce from the CFG. */
87 m_index_expr
= gimple_switch_index (swtch
);
88 m_switch_bb
= gimple_bb (swtch
);
89 e_default
= gimple_switch_default_edge (cfun
, swtch
);
90 m_default_bb
= e_default
->dest
;
91 m_default_prob
= e_default
->probability
;
92 m_default_count
= e_default
->count ();
93 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
95 m_other_count
+= e
->count ();
97 /* Get upper and lower bounds of case values, and the covered range. */
98 min_case
= gimple_switch_label (swtch
, 1);
99 max_case
= gimple_switch_label (swtch
, branch_num
- 1);
101 m_range_min
= CASE_LOW (min_case
);
102 if (CASE_HIGH (max_case
) != NULL_TREE
)
103 m_range_max
= CASE_HIGH (max_case
);
105 m_range_max
= CASE_LOW (max_case
);
107 m_contiguous_range
= true;
108 tree last
= CASE_HIGH (min_case
) ? CASE_HIGH (min_case
) : m_range_min
;
109 for (i
= 2; i
< branch_num
; i
++)
111 tree elt
= gimple_switch_label (swtch
, i
);
112 if (wi::to_wide (last
) + 1 != wi::to_wide (CASE_LOW (elt
)))
114 m_contiguous_range
= false;
117 last
= CASE_HIGH (elt
) ? CASE_HIGH (elt
) : CASE_LOW (elt
);
120 if (m_contiguous_range
)
121 e_first
= gimple_switch_edge (cfun
, swtch
, 1);
125 /* See if there is one common successor block for all branch
126 targets. If it exists, record it in FINAL_BB.
127 Start with the destination of the first non-default case
128 if the range is contiguous and default case otherwise as
129 guess or its destination in case it is a forwarder block. */
130 if (! single_pred_p (e_first
->dest
))
131 m_final_bb
= e_first
->dest
;
132 else if (single_succ_p (e_first
->dest
)
133 && ! single_pred_p (single_succ (e_first
->dest
)))
134 m_final_bb
= single_succ (e_first
->dest
);
135 /* Require that all switch destinations are either that common
136 FINAL_BB or a forwarder to it, except for the default
137 case if contiguous range. */
139 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
141 if (e
->dest
== m_final_bb
)
144 if (single_pred_p (e
->dest
)
145 && single_succ_p (e
->dest
)
146 && single_succ (e
->dest
) == m_final_bb
)
149 if (e
== e_default
&& m_contiguous_range
)
151 m_default_case_nonstandard
= true;
160 = int_const_binop (MINUS_EXPR
, m_range_max
, m_range_min
);
162 /* Get a count of the number of case labels. Single-valued case labels
163 simply count as one, but a case range counts double, since it may
164 require two compares if it gets lowered as a branching tree. */
166 for (i
= 1; i
< branch_num
; i
++)
168 tree elt
= gimple_switch_label (swtch
, i
);
171 && ! tree_int_cst_equal (CASE_LOW (elt
), CASE_HIGH (elt
)))
175 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
176 block. Assume a CFG cleanup would have already removed degenerate
177 switch statements, this allows us to just use EDGE_COUNT. */
178 m_uniq
= EDGE_COUNT (gimple_bb (swtch
)->succs
) - 1;
181 /* Checks whether the range given by individual case statements of the switch
182 switch statement isn't too big and whether the number of branches actually
183 satisfies the size of the new array. */
186 switch_conversion::check_range ()
188 gcc_assert (m_range_size
);
189 if (!tree_fits_uhwi_p (m_range_size
))
191 m_reason
= "index range way too large or otherwise unusable";
195 if (tree_to_uhwi (m_range_size
)
196 > ((unsigned) m_count
* SWITCH_CONVERSION_BRANCH_RATIO
))
198 m_reason
= "the maximum range-branch ratio exceeded";
205 /* Checks whether all but the final BB basic blocks are empty. */
208 switch_conversion::check_all_empty_except_final ()
210 edge e
, e_default
= find_edge (m_switch_bb
, m_default_bb
);
213 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
215 if (e
->dest
== m_final_bb
)
218 if (!empty_block_p (e
->dest
))
220 if (m_contiguous_range
&& e
== e_default
)
222 m_default_case_nonstandard
= true;
226 m_reason
= "bad case - a non-final BB not empty";
234 /* This function checks whether all required values in phi nodes in final_bb
235 are constants. Required values are those that correspond to a basic block
236 which is a part of the examined switch statement. It returns true if the
237 phi nodes are OK, otherwise false. */
240 switch_conversion::check_final_bb ()
245 for (gsi
= gsi_start_phis (m_final_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
247 gphi
*phi
= gsi
.phi ();
250 if (virtual_operand_p (gimple_phi_result (phi
)))
255 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
257 basic_block bb
= gimple_phi_arg_edge (phi
, i
)->src
;
259 if (bb
== m_switch_bb
260 || (single_pred_p (bb
)
261 && single_pred (bb
) == m_switch_bb
262 && (!m_default_case_nonstandard
263 || empty_block_p (bb
))))
266 const char *reason
= NULL
;
268 val
= gimple_phi_arg_def (phi
, i
);
269 if (!is_gimple_ip_invariant (val
))
270 reason
= "non-invariant value from a case";
273 reloc
= initializer_constant_valid_p (val
, TREE_TYPE (val
));
274 if ((flag_pic
&& reloc
!= null_pointer_node
)
275 || (!flag_pic
&& reloc
== NULL_TREE
))
279 = "value from a case would need runtime relocations";
282 = "value from a case is not a valid initializer";
287 /* For contiguous range, we can allow non-constant
288 or one that needs relocation, as long as it is
289 only reachable from the default case. */
290 if (bb
== m_switch_bb
)
292 if (!m_contiguous_range
|| bb
!= m_default_bb
)
298 unsigned int branch_num
= gimple_switch_num_labels (m_switch
);
299 for (unsigned int i
= 1; i
< branch_num
; i
++)
301 if (gimple_switch_label_bb (cfun
, m_switch
, i
) == bb
)
307 m_default_case_nonstandard
= true;
316 /* The following function allocates default_values, target_{in,out}_names and
317 constructors arrays. The last one is also populated with pointers to
318 vectors that will become constructors of new arrays. */
321 switch_conversion::create_temp_arrays ()
325 m_default_values
= XCNEWVEC (tree
, m_phi_count
* 3);
326 /* ??? Macros do not support multi argument templates in their
327 argument list. We create a typedef to work around that problem. */
328 typedef vec
<constructor_elt
, va_gc
> *vec_constructor_elt_gc
;
329 m_constructors
= XCNEWVEC (vec_constructor_elt_gc
, m_phi_count
);
330 m_target_inbound_names
= m_default_values
+ m_phi_count
;
331 m_target_outbound_names
= m_target_inbound_names
+ m_phi_count
;
332 for (i
= 0; i
< m_phi_count
; i
++)
333 vec_alloc (m_constructors
[i
], tree_to_uhwi (m_range_size
) + 1);
336 /* Populate the array of default values in the order of phi nodes.
337 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
338 if the range is non-contiguous or the default case has standard
339 structure, otherwise it is the first non-default case instead. */
342 switch_conversion::gather_default_values (tree default_case
)
345 basic_block bb
= label_to_block (cfun
, CASE_LABEL (default_case
));
349 gcc_assert (CASE_LOW (default_case
) == NULL_TREE
350 || m_default_case_nonstandard
);
352 if (bb
== m_final_bb
)
353 e
= find_edge (m_switch_bb
, bb
);
355 e
= single_succ_edge (bb
);
357 for (gsi
= gsi_start_phis (m_final_bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
359 gphi
*phi
= gsi
.phi ();
360 if (virtual_operand_p (gimple_phi_result (phi
)))
362 tree val
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
364 m_default_values
[i
++] = val
;
368 /* The following function populates the vectors in the constructors array with
369 future contents of the static arrays. The vectors are populated in the
370 order of phi nodes. */
373 switch_conversion::build_constructors ()
375 unsigned i
, branch_num
= gimple_switch_num_labels (m_switch
);
376 tree pos
= m_range_min
;
377 tree pos_one
= build_int_cst (TREE_TYPE (pos
), 1);
379 for (i
= 1; i
< branch_num
; i
++)
381 tree cs
= gimple_switch_label (m_switch
, i
);
382 basic_block bb
= label_to_block (cfun
, CASE_LABEL (cs
));
388 if (bb
== m_final_bb
)
389 e
= find_edge (m_switch_bb
, bb
);
391 e
= single_succ_edge (bb
);
394 while (tree_int_cst_lt (pos
, CASE_LOW (cs
)))
397 for (k
= 0; k
< m_phi_count
; k
++)
401 elt
.index
= int_const_binop (MINUS_EXPR
, pos
, m_range_min
);
403 = unshare_expr_without_location (m_default_values
[k
]);
404 m_constructors
[k
]->quick_push (elt
);
407 pos
= int_const_binop (PLUS_EXPR
, pos
, pos_one
);
409 gcc_assert (tree_int_cst_equal (pos
, CASE_LOW (cs
)));
413 high
= CASE_HIGH (cs
);
415 high
= CASE_LOW (cs
);
416 for (gsi
= gsi_start_phis (m_final_bb
);
417 !gsi_end_p (gsi
); gsi_next (&gsi
))
419 gphi
*phi
= gsi
.phi ();
420 if (virtual_operand_p (gimple_phi_result (phi
)))
422 tree val
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
423 tree low
= CASE_LOW (cs
);
430 elt
.index
= int_const_binop (MINUS_EXPR
, pos
, m_range_min
);
431 elt
.value
= unshare_expr_without_location (val
);
432 m_constructors
[j
]->quick_push (elt
);
434 pos
= int_const_binop (PLUS_EXPR
, pos
, pos_one
);
435 } while (!tree_int_cst_lt (high
, pos
)
436 && tree_int_cst_lt (low
, pos
));
442 /* If all values in the constructor vector are the same, return the value.
443 Otherwise return NULL_TREE. Not supposed to be called for empty
447 switch_conversion::contains_same_values_p (vec
<constructor_elt
, va_gc
> *vec
)
450 tree prev
= NULL_TREE
;
451 constructor_elt
*elt
;
453 FOR_EACH_VEC_SAFE_ELT (vec
, i
, elt
)
457 else if (!operand_equal_p (elt
->value
, prev
, OEP_ONLY_CONST
))
463 /* Return type which should be used for array elements, either TYPE's
464 main variant or, for integral types, some smaller integral type
465 that can still hold all the constants. */
468 switch_conversion::array_value_type (tree type
, int num
)
470 unsigned int i
, len
= vec_safe_length (m_constructors
[num
]);
471 constructor_elt
*elt
;
475 /* Types with alignments greater than their size can reach here, e.g. out of
476 SRA. We couldn't use these as an array component type so get back to the
477 main variant first, which, for our purposes, is fine for other types as
480 type
= TYPE_MAIN_VARIANT (type
);
482 if (!INTEGRAL_TYPE_P (type
))
485 scalar_int_mode type_mode
= SCALAR_INT_TYPE_MODE (type
);
486 scalar_int_mode mode
= get_narrowest_mode (type_mode
);
487 if (GET_MODE_SIZE (type_mode
) <= GET_MODE_SIZE (mode
))
490 if (len
< (optimize_bb_for_size_p (gimple_bb (m_switch
)) ? 2 : 32))
493 FOR_EACH_VEC_SAFE_ELT (m_constructors
[num
], i
, elt
)
497 if (TREE_CODE (elt
->value
) != INTEGER_CST
)
500 cst
= wi::to_wide (elt
->value
);
503 unsigned int prec
= GET_MODE_BITSIZE (mode
);
504 if (prec
> HOST_BITS_PER_WIDE_INT
)
507 if (sign
>= 0 && cst
== wi::zext (cst
, prec
))
509 if (sign
== 0 && cst
== wi::sext (cst
, prec
))
514 if (sign
<= 0 && cst
== wi::sext (cst
, prec
))
523 if (!GET_MODE_WIDER_MODE (mode
).exists (&mode
)
524 || GET_MODE_SIZE (mode
) >= GET_MODE_SIZE (type_mode
))
530 sign
= TYPE_UNSIGNED (type
) ? 1 : -1;
531 smaller_type
= lang_hooks
.types
.type_for_mode (mode
, sign
>= 0);
532 if (GET_MODE_SIZE (type_mode
)
533 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type
)))
539 /* Create an appropriate array type and declaration and assemble a static
540 array variable. Also create a load statement that initializes
541 the variable in question with a value from the static array. SWTCH is
542 the switch statement being converted, NUM is the index to
543 arrays of constructors, default values and target SSA names
544 for this particular array. ARR_INDEX_TYPE is the type of the index
545 of the new array, PHI is the phi node of the final BB that corresponds
546 to the value that will be loaded from the created array. TIDX
547 is an ssa name of a temporary variable holding the index for loads from the
551 switch_conversion::build_one_array (int num
, tree arr_index_type
,
552 gphi
*phi
, tree tidx
)
556 gimple_stmt_iterator gsi
= gsi_for_stmt (m_switch
);
557 location_t loc
= gimple_location (m_switch
);
559 gcc_assert (m_default_values
[num
]);
561 name
= copy_ssa_name (PHI_RESULT (phi
));
562 m_target_inbound_names
[num
] = name
;
564 cst
= contains_same_values_p (m_constructors
[num
]);
566 load
= gimple_build_assign (name
, cst
);
569 tree array_type
, ctor
, decl
, value_type
, fetch
, default_type
;
571 default_type
= TREE_TYPE (m_default_values
[num
]);
572 value_type
= array_value_type (default_type
, num
);
573 array_type
= build_array_type (value_type
, arr_index_type
);
574 if (default_type
!= value_type
)
577 constructor_elt
*elt
;
579 FOR_EACH_VEC_SAFE_ELT (m_constructors
[num
], i
, elt
)
580 elt
->value
= fold_convert (value_type
, elt
->value
);
582 ctor
= build_constructor (array_type
, m_constructors
[num
]);
583 TREE_CONSTANT (ctor
) = true;
584 TREE_STATIC (ctor
) = true;
586 decl
= build_decl (loc
, VAR_DECL
, NULL_TREE
, array_type
);
587 TREE_STATIC (decl
) = 1;
588 DECL_INITIAL (decl
) = ctor
;
590 DECL_NAME (decl
) = create_tmp_var_name ("CSWTCH");
591 DECL_ARTIFICIAL (decl
) = 1;
592 DECL_IGNORED_P (decl
) = 1;
593 TREE_CONSTANT (decl
) = 1;
594 TREE_READONLY (decl
) = 1;
595 DECL_IGNORED_P (decl
) = 1;
596 if (offloading_function_p (cfun
->decl
))
597 DECL_ATTRIBUTES (decl
)
598 = tree_cons (get_identifier ("omp declare target"), NULL_TREE
,
600 varpool_node::finalize_decl (decl
);
602 fetch
= build4 (ARRAY_REF
, value_type
, decl
, tidx
, NULL_TREE
,
604 if (default_type
!= value_type
)
606 fetch
= fold_convert (default_type
, fetch
);
607 fetch
= force_gimple_operand_gsi (&gsi
, fetch
, true, NULL_TREE
,
608 true, GSI_SAME_STMT
);
610 load
= gimple_build_assign (name
, fetch
);
613 gsi_insert_before (&gsi
, load
, GSI_SAME_STMT
);
615 m_arr_ref_last
= load
;
618 /* Builds and initializes static arrays initialized with values gathered from
619 the switch statement. Also creates statements that load values from
623 switch_conversion::build_arrays ()
626 tree tidx
, sub
, utype
;
628 gimple_stmt_iterator gsi
;
631 location_t loc
= gimple_location (m_switch
);
633 gsi
= gsi_for_stmt (m_switch
);
635 /* Make sure we do not generate arithmetics in a subrange. */
636 utype
= TREE_TYPE (m_index_expr
);
637 if (TREE_TYPE (utype
))
638 utype
= lang_hooks
.types
.type_for_mode (TYPE_MODE (TREE_TYPE (utype
)), 1);
640 utype
= lang_hooks
.types
.type_for_mode (TYPE_MODE (utype
), 1);
642 arr_index_type
= build_index_type (m_range_size
);
643 tidx
= make_ssa_name (utype
);
644 sub
= fold_build2_loc (loc
, MINUS_EXPR
, utype
,
645 fold_convert_loc (loc
, utype
, m_index_expr
),
646 fold_convert_loc (loc
, utype
, m_range_min
));
647 sub
= force_gimple_operand_gsi (&gsi
, sub
,
648 false, NULL
, true, GSI_SAME_STMT
);
649 stmt
= gimple_build_assign (tidx
, sub
);
651 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
653 m_arr_ref_first
= stmt
;
655 for (gpi
= gsi_start_phis (m_final_bb
), i
= 0;
656 !gsi_end_p (gpi
); gsi_next (&gpi
))
658 gphi
*phi
= gpi
.phi ();
659 if (!virtual_operand_p (gimple_phi_result (phi
)))
660 build_one_array (i
++, arr_index_type
, phi
, tidx
);
665 FOR_EACH_EDGE (e
, ei
, m_switch_bb
->succs
)
667 if (e
->dest
== m_final_bb
)
669 if (!m_default_case_nonstandard
670 || e
->dest
!= m_default_bb
)
672 e
= single_succ_edge (e
->dest
);
676 gcc_assert (e
&& e
->dest
== m_final_bb
);
677 m_target_vop
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
682 /* Generates and appropriately inserts loads of default values at the position
683 given by GSI. Returns the last inserted statement. */
686 switch_conversion::gen_def_assigns (gimple_stmt_iterator
*gsi
)
689 gassign
*assign
= NULL
;
691 for (i
= 0; i
< m_phi_count
; i
++)
693 tree name
= copy_ssa_name (m_target_inbound_names
[i
]);
694 m_target_outbound_names
[i
] = name
;
695 assign
= gimple_build_assign (name
, m_default_values
[i
]);
696 gsi_insert_before (gsi
, assign
, GSI_SAME_STMT
);
697 update_stmt (assign
);
702 /* Deletes the unused bbs and edges that now contain the switch statement and
703 its empty branch bbs. BBD is the now dead BB containing
704 the original switch statement, FINAL is the last BB of the converted
705 switch statement (in terms of succession). */
708 switch_conversion::prune_bbs (basic_block bbd
, basic_block final
,
709 basic_block default_bb
)
714 for (ei
= ei_start (bbd
->succs
); (e
= ei_safe_edge (ei
)); )
719 if (bb
!= final
&& bb
!= default_bb
)
720 delete_basic_block (bb
);
722 delete_basic_block (bbd
);
725 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
726 from the basic block loading values from an array and E2F from the basic
727 block loading default values. BBF is the last switch basic block (see the
728 bbf description in the comment below). */
731 switch_conversion::fix_phi_nodes (edge e1f
, edge e2f
, basic_block bbf
)
736 for (gsi
= gsi_start_phis (bbf
), i
= 0;
737 !gsi_end_p (gsi
); gsi_next (&gsi
))
739 gphi
*phi
= gsi
.phi ();
740 tree inbound
, outbound
;
741 if (virtual_operand_p (gimple_phi_result (phi
)))
742 inbound
= outbound
= m_target_vop
;
745 inbound
= m_target_inbound_names
[i
];
746 outbound
= m_target_outbound_names
[i
++];
748 add_phi_arg (phi
, inbound
, e1f
, UNKNOWN_LOCATION
);
749 if (!m_default_case_nonstandard
)
750 add_phi_arg (phi
, outbound
, e2f
, UNKNOWN_LOCATION
);
754 /* Creates a check whether the switch expression value actually falls into the
755 range given by all the cases. If it does not, the temporaries are loaded
756 with default values instead. */
759 switch_conversion::gen_inbound_check ()
761 tree label_decl1
= create_artificial_label (UNKNOWN_LOCATION
);
762 tree label_decl2
= create_artificial_label (UNKNOWN_LOCATION
);
763 tree label_decl3
= create_artificial_label (UNKNOWN_LOCATION
);
764 glabel
*label1
, *label2
, *label3
;
770 gassign
*last_assign
= NULL
;
771 gimple_stmt_iterator gsi
;
772 basic_block bb0
, bb1
, bb2
, bbf
, bbd
;
773 edge e01
= NULL
, e02
, e21
, e1d
, e1f
, e2f
;
774 location_t loc
= gimple_location (m_switch
);
776 gcc_assert (m_default_values
);
778 bb0
= gimple_bb (m_switch
);
780 tidx
= gimple_assign_lhs (m_arr_ref_first
);
781 utype
= TREE_TYPE (tidx
);
783 /* (end of) block 0 */
784 gsi
= gsi_for_stmt (m_arr_ref_first
);
787 bound
= fold_convert_loc (loc
, utype
, m_range_size
);
788 cond_stmt
= gimple_build_cond (LE_EXPR
, tidx
, bound
, NULL_TREE
, NULL_TREE
);
789 gsi_insert_before (&gsi
, cond_stmt
, GSI_SAME_STMT
);
790 update_stmt (cond_stmt
);
793 if (!m_default_case_nonstandard
)
795 label2
= gimple_build_label (label_decl2
);
796 gsi_insert_before (&gsi
, label2
, GSI_SAME_STMT
);
797 last_assign
= gen_def_assigns (&gsi
);
801 label1
= gimple_build_label (label_decl1
);
802 gsi_insert_before (&gsi
, label1
, GSI_SAME_STMT
);
805 gsi
= gsi_start_bb (m_final_bb
);
806 label3
= gimple_build_label (label_decl3
);
807 gsi_insert_before (&gsi
, label3
, GSI_SAME_STMT
);
810 e02
= split_block (bb0
, cond_stmt
);
813 if (m_default_case_nonstandard
)
818 e01
->flags
= EDGE_TRUE_VALUE
;
819 e02
= make_edge (bb0
, bb2
, EDGE_FALSE_VALUE
);
820 edge e_default
= find_edge (bb1
, bb2
);
821 for (gphi_iterator gsi
= gsi_start_phis (bb2
);
822 !gsi_end_p (gsi
); gsi_next (&gsi
))
824 gphi
*phi
= gsi
.phi ();
825 tree arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e_default
);
826 add_phi_arg (phi
, arg
, e02
,
827 gimple_phi_arg_location_from_edge (phi
, e_default
));
829 /* Partially fix the dominator tree, if it is available. */
830 if (dom_info_available_p (CDI_DOMINATORS
))
831 redirect_immediate_dominators (CDI_DOMINATORS
, bb1
, bb0
);
835 e21
= split_block (bb2
, last_assign
);
840 e1d
= split_block (bb1
, m_arr_ref_last
);
844 /* Flags and profiles of the edge for in-range values. */
845 if (!m_default_case_nonstandard
)
846 e01
= make_edge (bb0
, bb1
, EDGE_TRUE_VALUE
);
847 e01
->probability
= m_default_prob
.invert ();
849 /* Flags and profiles of the edge taking care of out-of-range values. */
850 e02
->flags
&= ~EDGE_FALLTHRU
;
851 e02
->flags
|= EDGE_FALSE_VALUE
;
852 e02
->probability
= m_default_prob
;
856 e1f
= make_edge (bb1
, bbf
, EDGE_FALLTHRU
);
857 e1f
->probability
= profile_probability::always ();
859 if (m_default_case_nonstandard
)
863 e2f
= make_edge (bb2
, bbf
, EDGE_FALLTHRU
);
864 e2f
->probability
= profile_probability::always ();
867 /* frequencies of the new BBs */
868 bb1
->count
= e01
->count ();
869 bb2
->count
= e02
->count ();
870 if (!m_default_case_nonstandard
)
871 bbf
->count
= e1f
->count () + e2f
->count ();
873 /* Tidy blocks that have become unreachable. */
874 prune_bbs (bbd
, m_final_bb
,
875 m_default_case_nonstandard
? m_default_bb
: NULL
);
877 /* Fixup the PHI nodes in bbF. */
878 fix_phi_nodes (e1f
, e2f
, bbf
);
880 /* Fix the dominator tree, if it is available. */
881 if (dom_info_available_p (CDI_DOMINATORS
))
883 vec
<basic_block
> bbs_to_fix_dom
;
885 set_immediate_dominator (CDI_DOMINATORS
, bb1
, bb0
);
886 if (!m_default_case_nonstandard
)
887 set_immediate_dominator (CDI_DOMINATORS
, bb2
, bb0
);
888 if (! get_immediate_dominator (CDI_DOMINATORS
, bbf
))
889 /* If bbD was the immediate dominator ... */
890 set_immediate_dominator (CDI_DOMINATORS
, bbf
, bb0
);
892 bbs_to_fix_dom
.create (3 + (bb2
!= bbf
));
893 bbs_to_fix_dom
.quick_push (bb0
);
894 bbs_to_fix_dom
.quick_push (bb1
);
896 bbs_to_fix_dom
.quick_push (bb2
);
897 bbs_to_fix_dom
.quick_push (bbf
);
899 iterate_fix_dominators (CDI_DOMINATORS
, bbs_to_fix_dom
, true);
900 bbs_to_fix_dom
.release ();
904 /* The following function is invoked on every switch statement (the current
905 one is given in SWTCH) and runs the individual phases of switch
906 conversion on it one after another until one fails or the conversion
907 is completed. On success, NULL is in m_reason, otherwise points
908 to a string with the reason why the conversion failed. */
911 switch_conversion::expand (gswitch
*swtch
)
913 /* Group case labels so that we get the right results from the heuristics
914 that decide on the code generation approach for this switch. */
915 m_cfg_altered
|= group_case_labels_stmt (swtch
);
916 gcc_assert (gimple_switch_num_labels (swtch
) >= 2);
920 /* No error markers should reach here (they should be filtered out
921 during gimplification). */
922 gcc_checking_assert (TREE_TYPE (m_index_expr
) != error_mark_node
);
924 /* A switch on a constant should have been optimized in tree-cfg-cleanup. */
925 gcc_checking_assert (!TREE_CONSTANT (m_index_expr
));
927 /* Prefer bit test if possible. */
928 if (tree_fits_uhwi_p (m_range_size
)
929 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size
), m_uniq
)
930 && bit_test_cluster::is_beneficial (m_count
, m_uniq
))
932 m_reason
= "expanding as bit test is preferable";
938 /* This will be expanded as a decision tree . */
939 m_reason
= "expanding as jumps is preferable";
943 /* If there is no common successor, we cannot do the transformation. */
946 m_reason
= "no common successor to all case label target blocks found";
950 /* Check the case label values are within reasonable range: */
953 gcc_assert (m_reason
);
957 /* For all the cases, see whether they are empty, the assignments they
958 represent constant and so on... */
959 if (!check_all_empty_except_final ())
961 gcc_assert (m_reason
);
964 if (!check_final_bb ())
966 gcc_assert (m_reason
);
970 /* At this point all checks have passed and we can proceed with the
973 create_temp_arrays ();
974 gather_default_values (m_default_case_nonstandard
975 ? gimple_switch_label (swtch
, 1)
976 : gimple_switch_default_label (swtch
));
977 build_constructors ();
979 build_arrays (); /* Build the static arrays and assignments. */
980 gen_inbound_check (); /* Build the bounds check. */
982 m_cfg_altered
= true;
987 switch_conversion::~switch_conversion ()
989 XDELETEVEC (m_constructors
);
990 XDELETEVEC (m_default_values
);
995 group_cluster::group_cluster (vec
<cluster
*> &clusters
,
996 unsigned start
, unsigned end
)
998 gcc_checking_assert (end
- start
+ 1 >= 1);
999 m_prob
= profile_probability::never ();
1000 m_cases
.create (end
- start
+ 1);
1001 for (unsigned i
= start
; i
<= end
; i
++)
1003 m_cases
.quick_push (static_cast<simple_cluster
*> (clusters
[i
]));
1004 m_prob
+= clusters
[i
]->m_prob
;
1006 m_subtree_prob
= m_prob
;
1011 group_cluster::~group_cluster ()
1013 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1019 /* Dump content of a cluster. */
1022 group_cluster::dump (FILE *f
, bool details
)
1024 unsigned total_values
= 0;
1025 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1026 total_values
+= m_cases
[i
]->get_range (m_cases
[i
]->get_low (),
1027 m_cases
[i
]->get_high ());
1029 unsigned comparison_count
= 0;
1030 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1032 simple_cluster
*sc
= static_cast<simple_cluster
*> (m_cases
[i
]);
1033 comparison_count
+= sc
->m_range_p
? 2 : 1;
1036 unsigned HOST_WIDE_INT range
= get_range (get_low (), get_high ());
1037 fprintf (f
, "%s", get_type () == JUMP_TABLE
? "JT" : "BT");
1040 fprintf (f
, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1041 " density: %.2f%%)", total_values
, comparison_count
, range
,
1042 100.0f
* comparison_count
/ range
);
1045 PRINT_CASE (f
, get_low ());
1047 PRINT_CASE (f
, get_high ());
1051 /* Emit GIMPLE code to handle the cluster. */
1054 jump_table_cluster::emit (tree index_expr
, tree
,
1055 tree default_label_expr
, basic_block default_bb
)
1057 unsigned HOST_WIDE_INT range
= get_range (get_low (), get_high ());
1058 unsigned HOST_WIDE_INT nondefault_range
= 0;
1060 /* For jump table we just emit a new gswitch statement that will
1061 be latter lowered to jump table. */
1062 auto_vec
<tree
> labels
;
1063 labels
.create (m_cases
.length ());
1065 make_edge (m_case_bb
, default_bb
, 0);
1066 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1068 labels
.quick_push (unshare_expr (m_cases
[i
]->m_case_label_expr
));
1069 make_edge (m_case_bb
, m_cases
[i
]->m_case_bb
, 0);
1072 gswitch
*s
= gimple_build_switch (index_expr
,
1073 unshare_expr (default_label_expr
), labels
);
1074 gimple_stmt_iterator gsi
= gsi_start_bb (m_case_bb
);
1075 gsi_insert_after (&gsi
, s
, GSI_NEW_STMT
);
1077 /* Set up even probabilities for all cases. */
1078 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1080 simple_cluster
*sc
= static_cast<simple_cluster
*> (m_cases
[i
]);
1081 edge case_edge
= find_edge (m_case_bb
, sc
->m_case_bb
);
1082 unsigned HOST_WIDE_INT case_range
1083 = sc
->get_range (sc
->get_low (), sc
->get_high ());
1084 nondefault_range
+= case_range
;
1086 /* case_edge->aux is number of values in a jump-table that are covered
1087 by the case_edge. */
1088 case_edge
->aux
= (void *) ((intptr_t) (case_edge
->aux
) + case_range
);
1091 edge default_edge
= gimple_switch_default_edge (cfun
, s
);
1092 default_edge
->probability
= profile_probability::never ();
1094 for (unsigned i
= 0; i
< m_cases
.length (); i
++)
1096 simple_cluster
*sc
= static_cast<simple_cluster
*> (m_cases
[i
]);
1097 edge case_edge
= find_edge (m_case_bb
, sc
->m_case_bb
);
1098 case_edge
->probability
1099 = profile_probability::always ().apply_scale ((intptr_t)case_edge
->aux
,
1103 /* Number of non-default values is probability of default edge. */
1104 default_edge
->probability
1105 += profile_probability::always ().apply_scale (nondefault_range
,
1108 switch_decision_tree::reset_out_edges_aux (s
);
1111 /* Find jump tables of given CLUSTERS, where all members of the vector
1112 are of type simple_cluster. New clusters are returned. */
1115 jump_table_cluster::find_jump_tables (vec
<cluster
*> &clusters
)
1118 return clusters
.copy ();
1120 unsigned l
= clusters
.length ();
1121 auto_vec
<min_cluster_item
> min
;
1122 min
.reserve (l
+ 1);
1124 min
.quick_push (min_cluster_item (0, 0, 0));
1126 for (unsigned i
= 1; i
<= l
; i
++)
1128 /* Set minimal # of clusters with i-th item to infinite. */
1129 min
.quick_push (min_cluster_item (INT_MAX
, INT_MAX
, INT_MAX
));
1131 for (unsigned j
= 0; j
< i
; j
++)
1133 unsigned HOST_WIDE_INT s
= min
[j
].m_non_jt_cases
;
1134 if (i
- j
< case_values_threshold ())
1137 /* Prefer clusters with smaller number of numbers covered. */
1138 if ((min
[j
].m_count
+ 1 < min
[i
].m_count
1139 || (min
[j
].m_count
+ 1 == min
[i
].m_count
1140 && s
< min
[i
].m_non_jt_cases
))
1141 && can_be_handled (clusters
, j
, i
- 1))
1142 min
[i
] = min_cluster_item (min
[j
].m_count
+ 1, j
, s
);
1145 gcc_checking_assert (min
[i
].m_count
!= INT_MAX
);
1149 if (min
[l
].m_count
== INT_MAX
)
1150 return clusters
.copy ();
1152 vec
<cluster
*> output
;
1155 /* Find and build the clusters. */
1158 int start
= min
[end
].m_start
;
1160 /* Do not allow clusters with small number of cases. */
1161 if (is_beneficial (clusters
, start
, end
- 1))
1162 output
.safe_push (new jump_table_cluster (clusters
, start
, end
- 1));
1164 for (int i
= end
- 1; i
>= start
; i
--)
1165 output
.safe_push (clusters
[i
]);
1177 /* Return true when cluster starting at START and ending at END (inclusive)
1178 can build a jump-table. */
1181 jump_table_cluster::can_be_handled (const vec
<cluster
*> &clusters
,
1182 unsigned start
, unsigned end
)
1184 /* If the switch is relatively small such that the cost of one
1185 indirect jump on the target are higher than the cost of a
1186 decision tree, go with the decision tree.
1188 If range of values is much bigger than number of values,
1189 or if it is too large to represent in a HOST_WIDE_INT,
1190 make a sequence of conditional branches instead of a dispatch.
1192 The definition of "much bigger" depends on whether we are
1193 optimizing for size or for speed. */
1194 if (!flag_jump_tables
)
1197 /* For algorithm correctness, jump table for a single case must return
1198 true. We bail out in is_beneficial if it's called just for
1203 unsigned HOST_WIDE_INT max_ratio
1204 = optimize_insn_for_size_p () ? max_ratio_for_size
: max_ratio_for_speed
;
1205 unsigned HOST_WIDE_INT range
= get_range (clusters
[start
]->get_low (),
1206 clusters
[end
]->get_high ());
1207 /* Check overflow. */
1211 unsigned HOST_WIDE_INT comparison_count
= 0;
1212 for (unsigned i
= start
; i
<= end
; i
++)
1214 simple_cluster
*sc
= static_cast<simple_cluster
*> (clusters
[i
]);
1215 comparison_count
+= sc
->m_range_p
? 2 : 1;
1218 return range
<= max_ratio
* comparison_count
;
1221 /* Return true if cluster starting at START and ending at END (inclusive)
1222 is profitable transformation. */
1225 jump_table_cluster::is_beneficial (const vec
<cluster
*> &,
1226 unsigned start
, unsigned end
)
1228 /* Single case bail out. */
1232 return end
- start
+ 1 >= case_values_threshold ();
1235 /* Definition of jump_table_cluster constants. */
1237 const unsigned HOST_WIDE_INT
jump_table_cluster::max_ratio_for_size
;
1238 const unsigned HOST_WIDE_INT
jump_table_cluster::max_ratio_for_speed
;
1240 /* Find bit tests of given CLUSTERS, where all members of the vector
1241 are of type simple_cluster. New clusters are returned. */
1244 bit_test_cluster::find_bit_tests (vec
<cluster
*> &clusters
)
1246 vec
<cluster
*> output
;
1249 unsigned l
= clusters
.length ();
1250 auto_vec
<min_cluster_item
> min
;
1251 min
.reserve (l
+ 1);
1253 min
.quick_push (min_cluster_item (0, 0, 0));
1255 for (unsigned i
= 1; i
<= l
; i
++)
1257 /* Set minimal # of clusters with i-th item to infinite. */
1258 min
.quick_push (min_cluster_item (INT_MAX
, INT_MAX
, INT_MAX
));
1260 for (unsigned j
= 0; j
< i
; j
++)
1262 if (min
[j
].m_count
+ 1 < min
[i
].m_count
1263 && can_be_handled (clusters
, j
, i
- 1))
1264 min
[i
] = min_cluster_item (min
[j
].m_count
+ 1, j
, INT_MAX
);
1267 gcc_checking_assert (min
[i
].m_count
!= INT_MAX
);
1271 if (min
[l
].m_count
== INT_MAX
)
1272 return clusters
.copy ();
1274 /* Find and build the clusters. */
1275 for (unsigned end
= l
;;)
1277 int start
= min
[end
].m_start
;
1279 if (is_beneficial (clusters
, start
, end
- 1))
1281 bool entire
= start
== 0 && end
== clusters
.length ();
1282 output
.safe_push (new bit_test_cluster (clusters
, start
, end
- 1,
1286 for (int i
= end
- 1; i
>= start
; i
--)
1287 output
.safe_push (clusters
[i
]);
1299 /* Return true when RANGE of case values with UNIQ labels
1300 can build a bit test. */
1303 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range
,
1306 /* Check overflow. */
1310 if (range
>= GET_MODE_BITSIZE (word_mode
))
1316 /* Return true when cluster starting at START and ending at END (inclusive)
1317 can build a bit test. */
1320 bit_test_cluster::can_be_handled (const vec
<cluster
*> &clusters
,
1321 unsigned start
, unsigned end
)
1323 /* For algorithm correctness, bit test for a single case must return
1324 true. We bail out in is_beneficial if it's called just for
1329 unsigned HOST_WIDE_INT range
= get_range (clusters
[start
]->get_low (),
1330 clusters
[end
]->get_high ());
1331 auto_bitmap dest_bbs
;
1333 for (unsigned i
= start
; i
<= end
; i
++)
1335 simple_cluster
*sc
= static_cast<simple_cluster
*> (clusters
[i
]);
1336 bitmap_set_bit (dest_bbs
, sc
->m_case_bb
->index
);
1339 return can_be_handled (range
, bitmap_count_bits (dest_bbs
));
1342 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1346 bit_test_cluster::is_beneficial (unsigned count
, unsigned uniq
)
1348 return (((uniq
== 1 && count
>= 3)
1349 || (uniq
== 2 && count
>= 5)
1350 || (uniq
== 3 && count
>= 6)));
1353 /* Return true if cluster starting at START and ending at END (inclusive)
1354 is profitable transformation. */
1357 bit_test_cluster::is_beneficial (const vec
<cluster
*> &clusters
,
1358 unsigned start
, unsigned end
)
1360 /* Single case bail out. */
1364 auto_bitmap dest_bbs
;
1366 for (unsigned i
= start
; i
<= end
; i
++)
1368 simple_cluster
*sc
= static_cast<simple_cluster
*> (clusters
[i
]);
1369 bitmap_set_bit (dest_bbs
, sc
->m_case_bb
->index
);
1372 unsigned uniq
= bitmap_count_bits (dest_bbs
);
1373 unsigned count
= end
- start
+ 1;
1374 return is_beneficial (count
, uniq
);
1377 /* Comparison function for qsort to order bit tests by decreasing
1378 probability of execution. */
1381 case_bit_test::cmp (const void *p1
, const void *p2
)
1383 const struct case_bit_test
*const d1
= (const struct case_bit_test
*) p1
;
1384 const struct case_bit_test
*const d2
= (const struct case_bit_test
*) p2
;
1386 if (d2
->bits
!= d1
->bits
)
1387 return d2
->bits
- d1
->bits
;
1389 /* Stabilize the sort. */
1390 return (LABEL_DECL_UID (CASE_LABEL (d2
->label
))
1391 - LABEL_DECL_UID (CASE_LABEL (d1
->label
)));
1394 /* Expand a switch statement by a short sequence of bit-wise
1395 comparisons. "switch(x)" is effectively converted into
1396 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1399 INDEX_EXPR is the value being switched on.
1401 MINVAL is the lowest case value of in the case nodes,
1402 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1403 are not guaranteed to be of the same type as INDEX_EXPR
1404 (the gimplifier doesn't change the type of case label values,
1405 and MINVAL and RANGE are derived from those values).
1406 MAXVAL is MINVAL + RANGE.
1408 There *MUST* be max_case_bit_tests or less unique case
1412 bit_test_cluster::emit (tree index_expr
, tree index_type
,
1413 tree
, basic_block default_bb
)
1415 struct case_bit_test test
[m_max_case_bit_tests
] = { {} };
1416 unsigned int i
, j
, k
;
1419 tree unsigned_index_type
= unsigned_type_for (index_type
);
1421 gimple_stmt_iterator gsi
;
1422 gassign
*shift_stmt
;
1424 tree idx
, tmp
, csui
;
1425 tree word_type_node
= lang_hooks
.types
.type_for_mode (word_mode
, 1);
1426 tree word_mode_zero
= fold_convert (word_type_node
, integer_zero_node
);
1427 tree word_mode_one
= fold_convert (word_type_node
, integer_one_node
);
1428 int prec
= TYPE_PRECISION (word_type_node
);
1429 wide_int wone
= wi::one (prec
);
1431 tree minval
= get_low ();
1432 tree maxval
= get_high ();
1433 tree range
= int_const_binop (MINUS_EXPR
, maxval
, minval
);
1434 unsigned HOST_WIDE_INT bt_range
= get_range (minval
, maxval
);
1436 /* Go through all case labels, and collect the case labels, profile
1437 counts, and other information we need to build the branch tests. */
1439 for (i
= 0; i
< m_cases
.length (); i
++)
1441 unsigned int lo
, hi
;
1442 simple_cluster
*n
= static_cast<simple_cluster
*> (m_cases
[i
]);
1443 for (k
= 0; k
< count
; k
++)
1444 if (n
->m_case_bb
== test
[k
].target_bb
)
1449 gcc_checking_assert (count
< m_max_case_bit_tests
);
1450 test
[k
].mask
= wi::zero (prec
);
1451 test
[k
].target_bb
= n
->m_case_bb
;
1452 test
[k
].label
= n
->m_case_label_expr
;
1457 test
[k
].bits
+= n
->get_range (n
->get_low (), n
->get_high ());
1459 lo
= tree_to_uhwi (int_const_binop (MINUS_EXPR
, n
->get_low (), minval
));
1460 if (n
->get_high () == NULL_TREE
)
1463 hi
= tree_to_uhwi (int_const_binop (MINUS_EXPR
, n
->get_high (),
1466 for (j
= lo
; j
<= hi
; j
++)
1467 test
[k
].mask
|= wi::lshift (wone
, j
);
1470 qsort (test
, count
, sizeof (*test
), case_bit_test::cmp
);
1472 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1473 the minval subtractions, but it might make the mask constants more
1474 expensive. So, compare the costs. */
1475 if (compare_tree_int (minval
, 0) > 0
1476 && compare_tree_int (maxval
, GET_MODE_BITSIZE (word_mode
)) < 0)
1479 HOST_WIDE_INT m
= tree_to_uhwi (minval
);
1480 rtx reg
= gen_raw_REG (word_mode
, 10000);
1481 bool speed_p
= optimize_insn_for_speed_p ();
1482 cost_diff
= set_rtx_cost (gen_rtx_PLUS (word_mode
, reg
,
1483 GEN_INT (-m
)), speed_p
);
1484 for (i
= 0; i
< count
; i
++)
1486 rtx r
= immed_wide_int_const (test
[i
].mask
, word_mode
);
1487 cost_diff
+= set_src_cost (gen_rtx_AND (word_mode
, reg
, r
),
1488 word_mode
, speed_p
);
1489 r
= immed_wide_int_const (wi::lshift (test
[i
].mask
, m
), word_mode
);
1490 cost_diff
-= set_src_cost (gen_rtx_AND (word_mode
, reg
, r
),
1491 word_mode
, speed_p
);
1495 for (i
= 0; i
< count
; i
++)
1496 test
[i
].mask
= wi::lshift (test
[i
].mask
, m
);
1497 minval
= build_zero_cst (TREE_TYPE (minval
));
1502 /* Now build the test-and-branch code. */
1504 gsi
= gsi_last_bb (m_case_bb
);
1506 /* idx = (unsigned)x - minval. */
1507 idx
= fold_convert (unsigned_index_type
, index_expr
);
1508 idx
= fold_build2 (MINUS_EXPR
, unsigned_index_type
, idx
,
1509 fold_convert (unsigned_index_type
, minval
));
1510 idx
= force_gimple_operand_gsi (&gsi
, idx
,
1511 /*simple=*/true, NULL_TREE
,
1512 /*before=*/true, GSI_SAME_STMT
);
1514 if (m_handles_entire_switch
)
1516 /* if (idx > range) goto default */
1518 = force_gimple_operand_gsi (&gsi
,
1519 fold_convert (unsigned_index_type
, range
),
1520 /*simple=*/true, NULL_TREE
,
1521 /*before=*/true, GSI_SAME_STMT
);
1522 tmp
= fold_build2 (GT_EXPR
, boolean_type_node
, idx
, range
);
1524 = hoist_edge_and_branch_if_true (&gsi
, tmp
, default_bb
,
1525 profile_probability::unlikely ());
1526 gsi
= gsi_last_bb (new_bb
);
1529 /* csui = (1 << (word_mode) idx) */
1530 csui
= make_ssa_name (word_type_node
);
1531 tmp
= fold_build2 (LSHIFT_EXPR
, word_type_node
, word_mode_one
,
1532 fold_convert (word_type_node
, idx
));
1533 tmp
= force_gimple_operand_gsi (&gsi
, tmp
,
1534 /*simple=*/false, NULL_TREE
,
1535 /*before=*/true, GSI_SAME_STMT
);
1536 shift_stmt
= gimple_build_assign (csui
, tmp
);
1537 gsi_insert_before (&gsi
, shift_stmt
, GSI_SAME_STMT
);
1538 update_stmt (shift_stmt
);
1540 profile_probability prob
= profile_probability::always ();
1542 /* for each unique set of cases:
1543 if (const & csui) goto target */
1544 for (k
= 0; k
< count
; k
++)
1546 prob
= profile_probability::always ().apply_scale (test
[k
].bits
,
1548 bt_range
-= test
[k
].bits
;
1549 tmp
= wide_int_to_tree (word_type_node
, test
[k
].mask
);
1550 tmp
= fold_build2 (BIT_AND_EXPR
, word_type_node
, csui
, tmp
);
1551 tmp
= force_gimple_operand_gsi (&gsi
, tmp
,
1552 /*simple=*/true, NULL_TREE
,
1553 /*before=*/true, GSI_SAME_STMT
);
1554 tmp
= fold_build2 (NE_EXPR
, boolean_type_node
, tmp
, word_mode_zero
);
1556 = hoist_edge_and_branch_if_true (&gsi
, tmp
, test
[k
].target_bb
, prob
);
1557 gsi
= gsi_last_bb (new_bb
);
1560 /* We should have removed all edges now. */
1561 gcc_assert (EDGE_COUNT (gsi_bb (gsi
)->succs
) == 0);
1563 /* If nothing matched, go to the default label. */
1564 edge e
= make_edge (gsi_bb (gsi
), default_bb
, EDGE_FALLTHRU
);
1565 e
->probability
= profile_probability::always ();
1568 /* Split the basic block at the statement pointed to by GSIP, and insert
1569 a branch to the target basic block of E_TRUE conditional on tree
1572 It is assumed that there is already an edge from the to-be-split
1573 basic block to E_TRUE->dest block. This edge is removed, and the
1574 profile information on the edge is re-used for the new conditional
1577 The CFG is updated. The dominator tree will not be valid after
1578 this transformation, but the immediate dominators are updated if
1579 UPDATE_DOMINATORS is true.
1581 Returns the newly created basic block. */
1584 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator
*gsip
,
1585 tree cond
, basic_block case_bb
,
1586 profile_probability prob
)
1591 basic_block new_bb
, split_bb
= gsi_bb (*gsip
);
1593 edge e_true
= make_edge (split_bb
, case_bb
, EDGE_TRUE_VALUE
);
1594 e_true
->probability
= prob
;
1595 gcc_assert (e_true
->src
== split_bb
);
1597 tmp
= force_gimple_operand_gsi (gsip
, cond
, /*simple=*/true, NULL
,
1598 /*before=*/true, GSI_SAME_STMT
);
1599 cond_stmt
= gimple_build_cond_from_tree (tmp
, NULL_TREE
, NULL_TREE
);
1600 gsi_insert_before (gsip
, cond_stmt
, GSI_SAME_STMT
);
1602 e_false
= split_block (split_bb
, cond_stmt
);
1603 new_bb
= e_false
->dest
;
1604 redirect_edge_pred (e_true
, split_bb
);
1606 e_false
->flags
&= ~EDGE_FALLTHRU
;
1607 e_false
->flags
|= EDGE_FALSE_VALUE
;
1608 e_false
->probability
= e_true
->probability
.invert ();
1609 new_bb
->count
= e_false
->count ();
1614 /* Compute the number of case labels that correspond to each outgoing edge of
1615 switch statement. Record this information in the aux field of the edge. */
1618 switch_decision_tree::compute_cases_per_edge ()
1620 reset_out_edges_aux (m_switch
);
1621 int ncases
= gimple_switch_num_labels (m_switch
);
1622 for (int i
= ncases
- 1; i
>= 1; --i
)
1624 edge case_edge
= gimple_switch_edge (cfun
, m_switch
, i
);
1625 case_edge
->aux
= (void *) ((intptr_t) (case_edge
->aux
) + 1);
1629 /* Analyze switch statement and return true when the statement is expanded
1630 as decision tree. */
1633 switch_decision_tree::analyze_switch_statement ()
1635 unsigned l
= gimple_switch_num_labels (m_switch
);
1636 basic_block bb
= gimple_bb (m_switch
);
1637 auto_vec
<cluster
*> clusters
;
1638 clusters
.create (l
- 1);
1640 basic_block default_bb
= gimple_switch_default_bb (cfun
, m_switch
);
1641 m_case_bbs
.reserve (l
);
1642 m_case_bbs
.quick_push (default_bb
);
1644 compute_cases_per_edge ();
1646 for (unsigned i
= 1; i
< l
; i
++)
1648 tree elt
= gimple_switch_label (m_switch
, i
);
1649 tree lab
= CASE_LABEL (elt
);
1650 basic_block case_bb
= label_to_block (cfun
, lab
);
1651 edge case_edge
= find_edge (bb
, case_bb
);
1652 tree low
= CASE_LOW (elt
);
1653 tree high
= CASE_HIGH (elt
);
1655 profile_probability p
1656 = case_edge
->probability
.apply_scale (1, (intptr_t) (case_edge
->aux
));
1657 clusters
.quick_push (new simple_cluster (low
, high
, elt
, case_edge
->dest
,
1659 m_case_bbs
.quick_push (case_edge
->dest
);
1662 reset_out_edges_aux (m_switch
);
1664 /* Find jump table clusters. */
1665 vec
<cluster
*> output
= jump_table_cluster::find_jump_tables (clusters
);
1667 /* Find bit test clusters. */
1668 vec
<cluster
*> output2
;
1669 auto_vec
<cluster
*> tmp
;
1673 for (unsigned i
= 0; i
< output
.length (); i
++)
1675 cluster
*c
= output
[i
];
1676 if (c
->get_type () != SIMPLE_CASE
)
1678 if (!tmp
.is_empty ())
1680 vec
<cluster
*> n
= bit_test_cluster::find_bit_tests (tmp
);
1681 output2
.safe_splice (n
);
1685 output2
.safe_push (c
);
1691 /* We still can have a temporary vector to test. */
1692 if (!tmp
.is_empty ())
1694 vec
<cluster
*> n
= bit_test_cluster::find_bit_tests (tmp
);
1695 output2
.safe_splice (n
);
1701 fprintf (dump_file
, ";; GIMPLE switch case clusters: ");
1702 for (unsigned i
= 0; i
< output2
.length (); i
++)
1703 output2
[i
]->dump (dump_file
, dump_flags
& TDF_DETAILS
);
1704 fprintf (dump_file
, "\n");
1709 bool expanded
= try_switch_expansion (output2
);
1711 for (unsigned i
= 0; i
< output2
.length (); i
++)
1719 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1723 switch_decision_tree::try_switch_expansion (vec
<cluster
*> &clusters
)
1725 tree index_expr
= gimple_switch_index (m_switch
);
1726 tree index_type
= TREE_TYPE (index_expr
);
1727 basic_block bb
= gimple_bb (m_switch
);
1729 if (gimple_switch_num_labels (m_switch
) == 1)
1732 /* Find the default case target label. */
1733 edge default_edge
= gimple_switch_default_edge (cfun
, m_switch
);
1734 m_default_bb
= default_edge
->dest
;
1736 /* Do the insertion of a case label into m_case_list. The labels are
1737 fed to us in descending order from the sorted vector of case labels used
1738 in the tree part of the middle end. So the list we construct is
1739 sorted in ascending order. */
1741 for (int i
= clusters
.length () - 1; i
>= 0; i
--)
1743 case_tree_node
*r
= m_case_list
;
1744 m_case_list
= m_case_node_pool
.allocate ();
1745 m_case_list
->m_right
= r
;
1746 m_case_list
->m_c
= clusters
[i
];
1749 record_phi_operand_mapping ();
1751 /* Split basic block that contains the gswitch statement. */
1752 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1754 if (gsi_end_p (gsi
))
1755 e
= split_block_after_labels (bb
);
1759 e
= split_block (bb
, gsi_stmt (gsi
));
1761 bb
= split_edge (e
);
1763 /* Create new basic blocks for non-case clusters where specific expansion
1765 for (unsigned i
= 0; i
< clusters
.length (); i
++)
1766 if (clusters
[i
]->get_type () != SIMPLE_CASE
)
1768 clusters
[i
]->m_case_bb
= create_empty_bb (bb
);
1769 clusters
[i
]->m_case_bb
->loop_father
= bb
->loop_father
;
1772 /* Do not do an extra work for a single cluster. */
1773 if (clusters
.length () == 1
1774 && clusters
[0]->get_type () != SIMPLE_CASE
)
1776 cluster
*c
= clusters
[0];
1777 c
->emit (index_expr
, index_type
,
1778 gimple_switch_default_label (m_switch
), m_default_bb
);
1779 redirect_edge_succ (single_succ_edge (bb
), c
->m_case_bb
);
1783 emit (bb
, index_expr
, default_edge
->probability
, index_type
);
1785 /* Emit cluster-specific switch handling. */
1786 for (unsigned i
= 0; i
< clusters
.length (); i
++)
1787 if (clusters
[i
]->get_type () != SIMPLE_CASE
)
1788 clusters
[i
]->emit (index_expr
, index_type
,
1789 gimple_switch_default_label (m_switch
),
1793 fix_phi_operands_for_edges ();
1798 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1799 and used in a label basic block. */
1802 switch_decision_tree::record_phi_operand_mapping ()
1804 basic_block switch_bb
= gimple_bb (m_switch
);
1805 /* Record all PHI nodes that have to be fixed after conversion. */
1806 for (unsigned i
= 0; i
< m_case_bbs
.length (); i
++)
1809 basic_block bb
= m_case_bbs
[i
];
1810 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1812 gphi
*phi
= gsi
.phi ();
1814 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1816 basic_block phi_src_bb
= gimple_phi_arg_edge (phi
, i
)->src
;
1817 if (phi_src_bb
== switch_bb
)
1819 tree def
= gimple_phi_arg_def (phi
, i
);
1820 tree result
= gimple_phi_result (phi
);
1821 m_phi_mapping
.put (result
, def
);
1829 /* Append new operands to PHI statements that were introduced due to
1830 addition of new edges to case labels. */
1833 switch_decision_tree::fix_phi_operands_for_edges ()
1837 for (unsigned i
= 0; i
< m_case_bbs
.length (); i
++)
1839 basic_block bb
= m_case_bbs
[i
];
1840 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1842 gphi
*phi
= gsi
.phi ();
1843 for (unsigned j
= 0; j
< gimple_phi_num_args (phi
); j
++)
1845 tree def
= gimple_phi_arg_def (phi
, j
);
1846 if (def
== NULL_TREE
)
1848 edge e
= gimple_phi_arg_edge (phi
, j
);
1850 = m_phi_mapping
.get (gimple_phi_result (phi
));
1851 gcc_assert (definition
);
1852 add_phi_arg (phi
, *definition
, e
, UNKNOWN_LOCATION
);
1859 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1860 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1862 We generate a binary decision tree to select the appropriate target
1866 switch_decision_tree::emit (basic_block bb
, tree index_expr
,
1867 profile_probability default_prob
, tree index_type
)
1869 balance_case_nodes (&m_case_list
, NULL
);
1872 dump_function_to_file (current_function_decl
, dump_file
, dump_flags
);
1873 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1875 int indent_step
= ceil_log2 (TYPE_PRECISION (index_type
)) + 2;
1876 fprintf (dump_file
, ";; Expanding GIMPLE switch as decision tree:\n");
1877 gcc_assert (m_case_list
!= NULL
);
1878 dump_case_nodes (dump_file
, m_case_list
, indent_step
, 0);
1881 bb
= emit_case_nodes (bb
, index_expr
, m_case_list
, default_prob
, index_type
);
1884 emit_jump (bb
, m_default_bb
);
1886 /* Remove all edges and do just an edge that will reach default_bb. */
1887 bb
= gimple_bb (m_switch
);
1888 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
1889 gsi_remove (&gsi
, true);
1891 delete_basic_block (bb
);
1894 /* Take an ordered list of case nodes
1895 and transform them into a near optimal binary tree,
1896 on the assumption that any target code selection value is as
1897 likely as any other.
1899 The transformation is performed by splitting the ordered
1900 list into two equal sections plus a pivot. The parts are
1901 then attached to the pivot as left and right branches. Each
1902 branch is then transformed recursively. */
1905 switch_decision_tree::balance_case_nodes (case_tree_node
**head
,
1906 case_tree_node
*parent
)
1915 case_tree_node
**npp
;
1916 case_tree_node
*left
;
1918 /* Count the number of entries on branch. Also count the ranges. */
1922 if (!tree_int_cst_equal (np
->m_c
->get_low (), np
->m_c
->get_high ()))
1931 /* Split this list if it is long enough for that to help. */
1935 /* If there are just three nodes, split at the middle one. */
1937 npp
= &(*npp
)->m_right
;
1940 /* Find the place in the list that bisects the list's total cost,
1941 where ranges count as 2.
1942 Here I gets half the total cost. */
1943 i
= (i
+ ranges
+ 1) / 2;
1946 /* Skip nodes while their cost does not reach that amount. */
1947 if (!tree_int_cst_equal ((*npp
)->m_c
->get_low (),
1948 (*npp
)->m_c
->get_high ()))
1953 npp
= &(*npp
)->m_right
;
1958 np
->m_parent
= parent
;
1961 /* Optimize each of the two split parts. */
1962 balance_case_nodes (&np
->m_left
, np
);
1963 balance_case_nodes (&np
->m_right
, np
);
1964 np
->m_c
->m_subtree_prob
= np
->m_c
->m_prob
;
1965 np
->m_c
->m_subtree_prob
+= np
->m_left
->m_c
->m_subtree_prob
;
1966 np
->m_c
->m_subtree_prob
+= np
->m_right
->m_c
->m_subtree_prob
;
1970 /* Else leave this branch as one level,
1971 but fill in `parent' fields. */
1973 np
->m_parent
= parent
;
1974 np
->m_c
->m_subtree_prob
= np
->m_c
->m_prob
;
1975 for (; np
->m_right
; np
= np
->m_right
)
1977 np
->m_right
->m_parent
= np
;
1978 (*head
)->m_c
->m_subtree_prob
+= np
->m_right
->m_c
->m_subtree_prob
;
1984 /* Dump ROOT, a list or tree of case nodes, to file. */
1987 switch_decision_tree::dump_case_nodes (FILE *f
, case_tree_node
*root
,
1988 int indent_step
, int indent_level
)
1994 dump_case_nodes (f
, root
->m_left
, indent_step
, indent_level
);
1997 fprintf (f
, "%*s", indent_step
* indent_level
, "");
1998 root
->m_c
->dump (f
);
1999 root
->m_c
->m_prob
.dump (f
);
2000 fputs (" subtree: ", f
);
2001 root
->m_c
->m_subtree_prob
.dump (f
);
2004 dump_case_nodes (f
, root
->m_right
, indent_step
, indent_level
);
2008 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2011 switch_decision_tree::emit_jump (basic_block bb
, basic_block case_bb
)
2013 edge e
= single_succ_edge (bb
);
2014 redirect_edge_succ (e
, case_bb
);
2017 /* Generate code to compare OP0 with OP1 so that the condition codes are
2018 set and to jump to LABEL_BB if the condition is true.
2019 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2020 PROB is the probability of jumping to LABEL_BB. */
2023 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb
, tree op0
,
2024 tree op1
, tree_code comparison
,
2025 basic_block label_bb
,
2026 profile_probability prob
)
2028 // TODO: it's once called with lhs != index.
2029 op1
= fold_convert (TREE_TYPE (op0
), op1
);
2031 gcond
*cond
= gimple_build_cond (comparison
, op0
, op1
, NULL_TREE
, NULL_TREE
);
2032 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
2033 gsi_insert_after (&gsi
, cond
, GSI_NEW_STMT
);
2035 gcc_assert (single_succ_p (bb
));
2037 /* Make a new basic block where false branch will take place. */
2038 edge false_edge
= split_block (bb
, cond
);
2039 false_edge
->flags
= EDGE_FALSE_VALUE
;
2040 false_edge
->probability
= prob
.invert ();
2042 edge true_edge
= make_edge (bb
, label_bb
, EDGE_TRUE_VALUE
);
2043 true_edge
->probability
= prob
;
2045 return false_edge
->dest
;
2048 /* Generate code to jump to LABEL if OP0 and OP1 are equal.
2049 PROB is the probability of jumping to LABEL_BB.
2050 BB is a basic block where the new condition will be placed. */
2053 switch_decision_tree::do_jump_if_equal (basic_block bb
, tree op0
, tree op1
,
2054 basic_block label_bb
,
2055 profile_probability prob
)
2057 op1
= fold_convert (TREE_TYPE (op0
), op1
);
2059 gcond
*cond
= gimple_build_cond (EQ_EXPR
, op0
, op1
, NULL_TREE
, NULL_TREE
);
2060 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
2061 gsi_insert_before (&gsi
, cond
, GSI_SAME_STMT
);
2063 gcc_assert (single_succ_p (bb
));
2065 /* Make a new basic block where false branch will take place. */
2066 edge false_edge
= split_block (bb
, cond
);
2067 false_edge
->flags
= EDGE_FALSE_VALUE
;
2068 false_edge
->probability
= prob
.invert ();
2070 edge true_edge
= make_edge (bb
, label_bb
, EDGE_TRUE_VALUE
);
2071 true_edge
->probability
= prob
;
2073 return false_edge
->dest
;
2076 /* Emit step-by-step code to select a case for the value of INDEX.
2077 The thus generated decision tree follows the form of the
2078 case-node binary tree NODE, whose nodes represent test conditions.
2079 DEFAULT_PROB is probability of cases leading to default BB.
2080 INDEX_TYPE is the type of the index of the switch. */
2083 switch_decision_tree::emit_case_nodes (basic_block bb
, tree index
,
2084 case_tree_node
*node
,
2085 profile_probability default_prob
,
2088 profile_probability p
;
2090 /* If node is null, we are done. */
2094 /* Single value case. */
2095 if (node
->m_c
->is_single_value_p ())
2097 /* Node is single valued. First see if the index expression matches
2098 this node and then check our children, if any. */
2099 p
= node
->m_c
->m_prob
/ (node
->m_c
->m_subtree_prob
+ default_prob
);
2100 bb
= do_jump_if_equal (bb
, index
, node
->m_c
->get_low (),
2101 node
->m_c
->m_case_bb
, p
);
2102 /* Since this case is taken at this point, reduce its weight from
2104 node
->m_c
->m_subtree_prob
-= p
;
2106 if (node
->m_left
!= NULL
&& node
->m_right
!= NULL
)
2108 /* 1) the node has both children
2110 If both children are single-valued cases with no
2111 children, finish up all the work. This way, we can save
2112 one ordered comparison. */
2114 if (!node
->m_left
->has_child ()
2115 && node
->m_left
->m_c
->is_single_value_p ()
2116 && !node
->m_right
->has_child ()
2117 && node
->m_right
->m_c
->is_single_value_p ())
2119 p
= (node
->m_right
->m_c
->m_prob
2120 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2121 bb
= do_jump_if_equal (bb
, index
, node
->m_right
->m_c
->get_low (),
2122 node
->m_right
->m_c
->m_case_bb
, p
);
2124 p
= (node
->m_left
->m_c
->m_prob
2125 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2126 bb
= do_jump_if_equal (bb
, index
, node
->m_left
->m_c
->get_low (),
2127 node
->m_left
->m_c
->m_case_bb
, p
);
2131 /* Branch to a label where we will handle it later. */
2132 basic_block test_bb
= split_edge (single_succ_edge (bb
));
2133 redirect_edge_succ (single_pred_edge (test_bb
),
2134 single_succ_edge (bb
)->dest
);
2136 p
= ((node
->m_right
->m_c
->m_subtree_prob
2137 + default_prob
.apply_scale (1, 2))
2138 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2139 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_high (),
2140 GT_EXPR
, test_bb
, p
);
2141 default_prob
= default_prob
.apply_scale (1, 2);
2143 /* Handle the left-hand subtree. */
2144 bb
= emit_case_nodes (bb
, index
, node
->m_left
,
2145 default_prob
, index_type
);
2147 /* If the left-hand subtree fell through,
2148 don't let it fall into the right-hand subtree. */
2149 if (bb
&& m_default_bb
)
2150 emit_jump (bb
, m_default_bb
);
2152 bb
= emit_case_nodes (test_bb
, index
, node
->m_right
,
2153 default_prob
, index_type
);
2156 else if (node
->m_left
== NULL
&& node
->m_right
!= NULL
)
2158 /* 2) the node has only right child. */
2160 /* Here we have a right child but no left so we issue a conditional
2161 branch to default and process the right child.
2163 Omit the conditional branch to default if the right child
2164 does not have any children and is single valued; it would
2165 cost too much space to save so little time. */
2167 if (node
->m_right
->has_child ()
2168 || !node
->m_right
->m_c
->is_single_value_p ())
2170 p
= (default_prob
.apply_scale (1, 2)
2171 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2172 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_low (),
2173 LT_EXPR
, m_default_bb
, p
);
2174 default_prob
= default_prob
.apply_scale (1, 2);
2176 bb
= emit_case_nodes (bb
, index
, node
->m_right
, default_prob
,
2181 /* We cannot process node->right normally
2182 since we haven't ruled out the numbers less than
2183 this node's value. So handle node->right explicitly. */
2184 p
= (node
->m_right
->m_c
->m_subtree_prob
2185 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2186 bb
= do_jump_if_equal (bb
, index
, node
->m_right
->m_c
->get_low (),
2187 node
->m_right
->m_c
->m_case_bb
, p
);
2190 else if (node
->m_left
!= NULL
&& node
->m_right
== NULL
)
2192 /* 3) just one subtree, on the left. Similar case as previous. */
2194 if (node
->m_left
->has_child ()
2195 || !node
->m_left
->m_c
->is_single_value_p ())
2197 p
= (default_prob
.apply_scale (1, 2)
2198 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2199 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_high (),
2200 GT_EXPR
, m_default_bb
, p
);
2201 default_prob
= default_prob
.apply_scale (1, 2);
2203 bb
= emit_case_nodes (bb
, index
, node
->m_left
, default_prob
,
2208 /* We cannot process node->left normally
2209 since we haven't ruled out the numbers less than
2210 this node's value. So handle node->left explicitly. */
2211 p
= (node
->m_left
->m_c
->m_subtree_prob
2212 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2213 bb
= do_jump_if_equal (bb
, index
, node
->m_left
->m_c
->get_low (),
2214 node
->m_left
->m_c
->m_case_bb
, p
);
2220 /* Node is a range. These cases are very similar to those for a single
2221 value, except that we do not start by testing whether this node
2222 is the one to branch to. */
2223 if (node
->has_child () || node
->m_c
->get_type () != SIMPLE_CASE
)
2225 /* Branch to a label where we will handle it later. */
2226 basic_block test_bb
= split_edge (single_succ_edge (bb
));
2227 redirect_edge_succ (single_pred_edge (test_bb
),
2228 single_succ_edge (bb
)->dest
);
2231 profile_probability right_prob
= profile_probability::never ();
2233 right_prob
= node
->m_right
->m_c
->m_subtree_prob
;
2234 p
= ((right_prob
+ default_prob
.apply_scale (1, 2))
2235 / (node
->m_c
->m_subtree_prob
+ default_prob
));
2237 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_high (),
2238 GT_EXPR
, test_bb
, p
);
2239 default_prob
= default_prob
.apply_scale (1, 2);
2241 /* Value belongs to this node or to the left-hand subtree. */
2242 p
= node
->m_c
->m_prob
/ (node
->m_c
->m_subtree_prob
+ default_prob
);
2243 bb
= emit_cmp_and_jump_insns (bb
, index
, node
->m_c
->get_low (),
2244 GE_EXPR
, node
->m_c
->m_case_bb
, p
);
2246 /* Handle the left-hand subtree. */
2247 bb
= emit_case_nodes (bb
, index
, node
->m_left
,
2248 default_prob
, index_type
);
2250 /* If the left-hand subtree fell through,
2251 don't let it fall into the right-hand subtree. */
2252 if (bb
&& m_default_bb
)
2253 emit_jump (bb
, m_default_bb
);
2255 bb
= emit_case_nodes (test_bb
, index
, node
->m_right
,
2256 default_prob
, index_type
);
2260 /* Node has no children so we check low and high bounds to remove
2261 redundant tests. Only one of the bounds can exist,
2262 since otherwise this node is bounded--a case tested already. */
2264 generate_range_test (bb
, index
, node
->m_c
->get_low (),
2265 node
->m_c
->get_high (), &lhs
, &rhs
);
2266 p
= default_prob
/ (node
->m_c
->m_subtree_prob
+ default_prob
);
2268 bb
= emit_cmp_and_jump_insns (bb
, lhs
, rhs
, GT_EXPR
,
2271 emit_jump (bb
, node
->m_c
->m_case_bb
);
2279 /* The main function of the pass scans statements for switches and invokes
2280 process_switch on them. */
2284 const pass_data pass_data_convert_switch
=
2286 GIMPLE_PASS
, /* type */
2287 "switchconv", /* name */
2288 OPTGROUP_NONE
, /* optinfo_flags */
2289 TV_TREE_SWITCH_CONVERSION
, /* tv_id */
2290 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2291 0, /* properties_provided */
2292 0, /* properties_destroyed */
2293 0, /* todo_flags_start */
2294 TODO_update_ssa
, /* todo_flags_finish */
2297 class pass_convert_switch
: public gimple_opt_pass
2300 pass_convert_switch (gcc::context
*ctxt
)
2301 : gimple_opt_pass (pass_data_convert_switch
, ctxt
)
2304 /* opt_pass methods: */
2305 virtual bool gate (function
*) { return flag_tree_switch_conversion
!= 0; }
2306 virtual unsigned int execute (function
*);
2308 }; // class pass_convert_switch
2311 pass_convert_switch::execute (function
*fun
)
2314 bool cfg_altered
= false;
2316 FOR_EACH_BB_FN (bb
, fun
)
2318 gimple
*stmt
= last_stmt (bb
);
2319 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
2323 expanded_location loc
= expand_location (gimple_location (stmt
));
2325 fprintf (dump_file
, "beginning to process the following "
2326 "SWITCH statement (%s:%d) : ------- \n",
2327 loc
.file
, loc
.line
);
2328 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2329 putc ('\n', dump_file
);
2332 switch_conversion sconv
;
2333 sconv
.expand (as_a
<gswitch
*> (stmt
));
2334 cfg_altered
|= sconv
.m_cfg_altered
;
2335 if (!sconv
.m_reason
)
2339 fputs ("Switch converted\n", dump_file
);
2340 fputs ("--------------------------------\n", dump_file
);
2343 /* Make no effort to update the post-dominator tree.
2344 It is actually not that hard for the transformations
2345 we have performed, but it is not supported
2346 by iterate_fix_dominators. */
2347 free_dominance_info (CDI_POST_DOMINATORS
);
2353 fputs ("Bailing out - ", dump_file
);
2354 fputs (sconv
.m_reason
, dump_file
);
2355 fputs ("\n--------------------------------\n", dump_file
);
2361 return cfg_altered
? TODO_cleanup_cfg
: 0;;
2367 make_pass_convert_switch (gcc::context
*ctxt
)
2369 return new pass_convert_switch (ctxt
);
2372 /* The main function of the pass scans statements for switches and invokes
2373 process_switch on them. */
2377 template <bool O0
> class pass_lower_switch
: public gimple_opt_pass
2380 pass_lower_switch (gcc::context
*ctxt
) : gimple_opt_pass (data
, ctxt
) {}
2382 static const pass_data data
;
2386 return new pass_lower_switch
<O0
> (m_ctxt
);
2392 return !O0
|| !optimize
;
2395 virtual unsigned int execute (function
*fun
);
2396 }; // class pass_lower_switch
2399 const pass_data pass_lower_switch
<O0
>::data
= {
2400 GIMPLE_PASS
, /* type */
2401 O0
? "switchlower_O0" : "switchlower", /* name */
2402 OPTGROUP_NONE
, /* optinfo_flags */
2403 TV_TREE_SWITCH_LOWERING
, /* tv_id */
2404 ( PROP_cfg
| PROP_ssa
), /* properties_required */
2405 0, /* properties_provided */
2406 0, /* properties_destroyed */
2407 0, /* todo_flags_start */
2408 TODO_update_ssa
| TODO_cleanup_cfg
, /* todo_flags_finish */
2413 pass_lower_switch
<O0
>::execute (function
*fun
)
2416 bool expanded
= false;
2418 auto_vec
<gimple
*> switch_statements
;
2419 switch_statements
.create (1);
2421 FOR_EACH_BB_FN (bb
, fun
)
2423 gimple
*stmt
= last_stmt (bb
);
2424 if (stmt
&& gimple_code (stmt
) == GIMPLE_SWITCH
)
2425 switch_statements
.safe_push (stmt
);
2428 for (unsigned i
= 0; i
< switch_statements
.length (); i
++)
2430 gimple
*stmt
= switch_statements
[i
];
2433 expanded_location loc
= expand_location (gimple_location (stmt
));
2435 fprintf (dump_file
, "beginning to process the following "
2436 "SWITCH statement (%s:%d) : ------- \n",
2437 loc
.file
, loc
.line
);
2438 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2439 putc ('\n', dump_file
);
2442 gswitch
*swtch
= dyn_cast
<gswitch
*> (stmt
);
2445 switch_decision_tree
dt (swtch
);
2446 expanded
|= dt
.analyze_switch_statement ();
2452 free_dominance_info (CDI_DOMINATORS
);
2453 free_dominance_info (CDI_POST_DOMINATORS
);
2454 mark_virtual_operands_for_renaming (cfun
);
2463 make_pass_lower_switch_O0 (gcc::context
*ctxt
)
2465 return new pass_lower_switch
<true> (ctxt
);
2468 make_pass_lower_switch (gcc::context
*ctxt
)
2470 return new pass_lower_switch
<false> (ctxt
);