[RS6000] Don't be too clever with dg-do run and dg-do compile
[official-gcc.git] / gcc / tree-switch-conversion.c
blob03a1fe632d0a20720be5176662c64593aa5875f1
1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
2 a jump table.
3 Copyright (C) 2006-2020 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "insn-codes.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "cfghooks.h"
34 #include "tree-pass.h"
35 #include "ssa.h"
36 #include "optabs-tree.h"
37 #include "cgraph.h"
38 #include "gimple-pretty-print.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "cfganal.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "gimplify-me.h"
46 #include "gimple-fold.h"
47 #include "tree-cfg.h"
48 #include "cfgloop.h"
49 #include "alloc-pool.h"
50 #include "target.h"
51 #include "tree-into-ssa.h"
52 #include "omp-general.h"
54 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
55 type in the GIMPLE type system that is language-independent? */
56 #include "langhooks.h"
58 #include "tree-switch-conversion.h"
60 using namespace tree_switch_conversion;
62 /* Constructor. */
64 switch_conversion::switch_conversion (): m_final_bb (NULL),
65 m_constructors (NULL), m_default_values (NULL),
66 m_arr_ref_first (NULL), m_arr_ref_last (NULL),
67 m_reason (NULL), m_default_case_nonstandard (false), m_cfg_altered (false)
71 /* Collection information about SWTCH statement. */
73 void
74 switch_conversion::collect (gswitch *swtch)
76 unsigned int branch_num = gimple_switch_num_labels (swtch);
77 tree min_case, max_case;
78 unsigned int i;
79 edge e, e_default, e_first;
80 edge_iterator ei;
82 m_switch = swtch;
84 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
85 is a default label which is the first in the vector.
86 Collect the bits we can deduce from the CFG. */
87 m_index_expr = gimple_switch_index (swtch);
88 m_switch_bb = gimple_bb (swtch);
89 e_default = gimple_switch_default_edge (cfun, swtch);
90 m_default_bb = e_default->dest;
91 m_default_prob = e_default->probability;
93 /* Get upper and lower bounds of case values, and the covered range. */
94 min_case = gimple_switch_label (swtch, 1);
95 max_case = gimple_switch_label (swtch, branch_num - 1);
97 m_range_min = CASE_LOW (min_case);
98 if (CASE_HIGH (max_case) != NULL_TREE)
99 m_range_max = CASE_HIGH (max_case);
100 else
101 m_range_max = CASE_LOW (max_case);
103 m_contiguous_range = true;
104 tree last = CASE_HIGH (min_case) ? CASE_HIGH (min_case) : m_range_min;
105 for (i = 2; i < branch_num; i++)
107 tree elt = gimple_switch_label (swtch, i);
108 if (wi::to_wide (last) + 1 != wi::to_wide (CASE_LOW (elt)))
110 m_contiguous_range = false;
111 break;
113 last = CASE_HIGH (elt) ? CASE_HIGH (elt) : CASE_LOW (elt);
116 if (m_contiguous_range)
117 e_first = gimple_switch_edge (cfun, swtch, 1);
118 else
119 e_first = e_default;
121 /* See if there is one common successor block for all branch
122 targets. If it exists, record it in FINAL_BB.
123 Start with the destination of the first non-default case
124 if the range is contiguous and default case otherwise as
125 guess or its destination in case it is a forwarder block. */
126 if (! single_pred_p (e_first->dest))
127 m_final_bb = e_first->dest;
128 else if (single_succ_p (e_first->dest)
129 && ! single_pred_p (single_succ (e_first->dest)))
130 m_final_bb = single_succ (e_first->dest);
131 /* Require that all switch destinations are either that common
132 FINAL_BB or a forwarder to it, except for the default
133 case if contiguous range. */
134 if (m_final_bb)
135 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
137 if (e->dest == m_final_bb)
138 continue;
140 if (single_pred_p (e->dest)
141 && single_succ_p (e->dest)
142 && single_succ (e->dest) == m_final_bb)
143 continue;
145 if (e == e_default && m_contiguous_range)
147 m_default_case_nonstandard = true;
148 continue;
151 m_final_bb = NULL;
152 break;
155 m_range_size
156 = int_const_binop (MINUS_EXPR, m_range_max, m_range_min);
158 /* Get a count of the number of case labels. Single-valued case labels
159 simply count as one, but a case range counts double, since it may
160 require two compares if it gets lowered as a branching tree. */
161 m_count = 0;
162 for (i = 1; i < branch_num; i++)
164 tree elt = gimple_switch_label (swtch, i);
165 m_count++;
166 if (CASE_HIGH (elt)
167 && ! tree_int_cst_equal (CASE_LOW (elt), CASE_HIGH (elt)))
168 m_count++;
171 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
172 block. Assume a CFG cleanup would have already removed degenerate
173 switch statements, this allows us to just use EDGE_COUNT. */
174 m_uniq = EDGE_COUNT (gimple_bb (swtch)->succs) - 1;
177 /* Checks whether the range given by individual case statements of the switch
178 switch statement isn't too big and whether the number of branches actually
179 satisfies the size of the new array. */
181 bool
182 switch_conversion::check_range ()
184 gcc_assert (m_range_size);
185 if (!tree_fits_uhwi_p (m_range_size))
187 m_reason = "index range way too large or otherwise unusable";
188 return false;
191 if (tree_to_uhwi (m_range_size)
192 > ((unsigned) m_count * param_switch_conversion_branch_ratio))
194 m_reason = "the maximum range-branch ratio exceeded";
195 return false;
198 return true;
201 /* Checks whether all but the final BB basic blocks are empty. */
203 bool
204 switch_conversion::check_all_empty_except_final ()
206 edge e, e_default = find_edge (m_switch_bb, m_default_bb);
207 edge_iterator ei;
209 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
211 if (e->dest == m_final_bb)
212 continue;
214 if (!empty_block_p (e->dest))
216 if (m_contiguous_range && e == e_default)
218 m_default_case_nonstandard = true;
219 continue;
222 m_reason = "bad case - a non-final BB not empty";
223 return false;
227 return true;
230 /* This function checks whether all required values in phi nodes in final_bb
231 are constants. Required values are those that correspond to a basic block
232 which is a part of the examined switch statement. It returns true if the
233 phi nodes are OK, otherwise false. */
235 bool
236 switch_conversion::check_final_bb ()
238 gphi_iterator gsi;
240 m_phi_count = 0;
241 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
243 gphi *phi = gsi.phi ();
244 unsigned int i;
246 if (virtual_operand_p (gimple_phi_result (phi)))
247 continue;
249 m_phi_count++;
251 for (i = 0; i < gimple_phi_num_args (phi); i++)
253 basic_block bb = gimple_phi_arg_edge (phi, i)->src;
255 if (bb == m_switch_bb
256 || (single_pred_p (bb)
257 && single_pred (bb) == m_switch_bb
258 && (!m_default_case_nonstandard
259 || empty_block_p (bb))))
261 tree reloc, val;
262 const char *reason = NULL;
264 val = gimple_phi_arg_def (phi, i);
265 if (!is_gimple_ip_invariant (val))
266 reason = "non-invariant value from a case";
267 else
269 reloc = initializer_constant_valid_p (val, TREE_TYPE (val));
270 if ((flag_pic && reloc != null_pointer_node)
271 || (!flag_pic && reloc == NULL_TREE))
273 if (reloc)
274 reason
275 = "value from a case would need runtime relocations";
276 else
277 reason
278 = "value from a case is not a valid initializer";
281 if (reason)
283 /* For contiguous range, we can allow non-constant
284 or one that needs relocation, as long as it is
285 only reachable from the default case. */
286 if (bb == m_switch_bb)
287 bb = m_final_bb;
288 if (!m_contiguous_range || bb != m_default_bb)
290 m_reason = reason;
291 return false;
294 unsigned int branch_num = gimple_switch_num_labels (m_switch);
295 for (unsigned int i = 1; i < branch_num; i++)
297 if (gimple_switch_label_bb (cfun, m_switch, i) == bb)
299 m_reason = reason;
300 return false;
303 m_default_case_nonstandard = true;
309 return true;
312 /* The following function allocates default_values, target_{in,out}_names and
313 constructors arrays. The last one is also populated with pointers to
314 vectors that will become constructors of new arrays. */
316 void
317 switch_conversion::create_temp_arrays ()
319 int i;
321 m_default_values = XCNEWVEC (tree, m_phi_count * 3);
322 /* ??? Macros do not support multi argument templates in their
323 argument list. We create a typedef to work around that problem. */
324 typedef vec<constructor_elt, va_gc> *vec_constructor_elt_gc;
325 m_constructors = XCNEWVEC (vec_constructor_elt_gc, m_phi_count);
326 m_target_inbound_names = m_default_values + m_phi_count;
327 m_target_outbound_names = m_target_inbound_names + m_phi_count;
328 for (i = 0; i < m_phi_count; i++)
329 vec_alloc (m_constructors[i], tree_to_uhwi (m_range_size) + 1);
332 /* Populate the array of default values in the order of phi nodes.
333 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
334 if the range is non-contiguous or the default case has standard
335 structure, otherwise it is the first non-default case instead. */
337 void
338 switch_conversion::gather_default_values (tree default_case)
340 gphi_iterator gsi;
341 basic_block bb = label_to_block (cfun, CASE_LABEL (default_case));
342 edge e;
343 int i = 0;
345 gcc_assert (CASE_LOW (default_case) == NULL_TREE
346 || m_default_case_nonstandard);
348 if (bb == m_final_bb)
349 e = find_edge (m_switch_bb, bb);
350 else
351 e = single_succ_edge (bb);
353 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
355 gphi *phi = gsi.phi ();
356 if (virtual_operand_p (gimple_phi_result (phi)))
357 continue;
358 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
359 gcc_assert (val);
360 m_default_values[i++] = val;
364 /* The following function populates the vectors in the constructors array with
365 future contents of the static arrays. The vectors are populated in the
366 order of phi nodes. */
368 void
369 switch_conversion::build_constructors ()
371 unsigned i, branch_num = gimple_switch_num_labels (m_switch);
372 tree pos = m_range_min;
373 tree pos_one = build_int_cst (TREE_TYPE (pos), 1);
375 for (i = 1; i < branch_num; i++)
377 tree cs = gimple_switch_label (m_switch, i);
378 basic_block bb = label_to_block (cfun, CASE_LABEL (cs));
379 edge e;
380 tree high;
381 gphi_iterator gsi;
382 int j;
384 if (bb == m_final_bb)
385 e = find_edge (m_switch_bb, bb);
386 else
387 e = single_succ_edge (bb);
388 gcc_assert (e);
390 while (tree_int_cst_lt (pos, CASE_LOW (cs)))
392 int k;
393 for (k = 0; k < m_phi_count; k++)
395 constructor_elt elt;
397 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
398 elt.value
399 = unshare_expr_without_location (m_default_values[k]);
400 m_constructors[k]->quick_push (elt);
403 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
405 gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));
407 j = 0;
408 if (CASE_HIGH (cs))
409 high = CASE_HIGH (cs);
410 else
411 high = CASE_LOW (cs);
412 for (gsi = gsi_start_phis (m_final_bb);
413 !gsi_end_p (gsi); gsi_next (&gsi))
415 gphi *phi = gsi.phi ();
416 if (virtual_operand_p (gimple_phi_result (phi)))
417 continue;
418 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
419 tree low = CASE_LOW (cs);
420 pos = CASE_LOW (cs);
424 constructor_elt elt;
426 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
427 elt.value = unshare_expr_without_location (val);
428 m_constructors[j]->quick_push (elt);
430 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
431 } while (!tree_int_cst_lt (high, pos)
432 && tree_int_cst_lt (low, pos));
433 j++;
438 /* If all values in the constructor vector are products of a linear function
439 a * x + b, then return true. When true, COEFF_A and COEFF_B and
440 coefficients of the linear function. Note that equal values are special
441 case of a linear function with a and b equal to zero. */
443 bool
444 switch_conversion::contains_linear_function_p (vec<constructor_elt, va_gc> *vec,
445 wide_int *coeff_a,
446 wide_int *coeff_b)
448 unsigned int i;
449 constructor_elt *elt;
451 gcc_assert (vec->length () >= 2);
453 /* Let's try to find any linear function a * x + y that can apply to
454 given values. 'a' can be calculated as follows:
456 a = (y2 - y1) / (x2 - x1) where x2 - x1 = 1 (consecutive case indices)
457 a = y2 - y1
461 b = y2 - a * x2
465 tree elt0 = (*vec)[0].value;
466 tree elt1 = (*vec)[1].value;
468 if (TREE_CODE (elt0) != INTEGER_CST || TREE_CODE (elt1) != INTEGER_CST)
469 return false;
471 wide_int range_min
472 = wide_int::from (wi::to_wide (m_range_min),
473 TYPE_PRECISION (TREE_TYPE (elt0)),
474 TYPE_SIGN (TREE_TYPE (m_range_min)));
475 wide_int y1 = wi::to_wide (elt0);
476 wide_int y2 = wi::to_wide (elt1);
477 wide_int a = y2 - y1;
478 wide_int b = y2 - a * (range_min + 1);
480 /* Verify that all values fulfill the linear function. */
481 FOR_EACH_VEC_SAFE_ELT (vec, i, elt)
483 if (TREE_CODE (elt->value) != INTEGER_CST)
484 return false;
486 wide_int value = wi::to_wide (elt->value);
487 if (a * range_min + b != value)
488 return false;
490 ++range_min;
493 *coeff_a = a;
494 *coeff_b = b;
496 return true;
499 /* Return type which should be used for array elements, either TYPE's
500 main variant or, for integral types, some smaller integral type
501 that can still hold all the constants. */
503 tree
504 switch_conversion::array_value_type (tree type, int num)
506 unsigned int i, len = vec_safe_length (m_constructors[num]);
507 constructor_elt *elt;
508 int sign = 0;
509 tree smaller_type;
511 /* Types with alignments greater than their size can reach here, e.g. out of
512 SRA. We couldn't use these as an array component type so get back to the
513 main variant first, which, for our purposes, is fine for other types as
514 well. */
516 type = TYPE_MAIN_VARIANT (type);
518 if (!INTEGRAL_TYPE_P (type))
519 return type;
521 scalar_int_mode type_mode = SCALAR_INT_TYPE_MODE (type);
522 scalar_int_mode mode = get_narrowest_mode (type_mode);
523 if (GET_MODE_SIZE (type_mode) <= GET_MODE_SIZE (mode))
524 return type;
526 if (len < (optimize_bb_for_size_p (gimple_bb (m_switch)) ? 2 : 32))
527 return type;
529 FOR_EACH_VEC_SAFE_ELT (m_constructors[num], i, elt)
531 wide_int cst;
533 if (TREE_CODE (elt->value) != INTEGER_CST)
534 return type;
536 cst = wi::to_wide (elt->value);
537 while (1)
539 unsigned int prec = GET_MODE_BITSIZE (mode);
540 if (prec > HOST_BITS_PER_WIDE_INT)
541 return type;
543 if (sign >= 0 && cst == wi::zext (cst, prec))
545 if (sign == 0 && cst == wi::sext (cst, prec))
546 break;
547 sign = 1;
548 break;
550 if (sign <= 0 && cst == wi::sext (cst, prec))
552 sign = -1;
553 break;
556 if (sign == 1)
557 sign = 0;
559 if (!GET_MODE_WIDER_MODE (mode).exists (&mode)
560 || GET_MODE_SIZE (mode) >= GET_MODE_SIZE (type_mode))
561 return type;
565 if (sign == 0)
566 sign = TYPE_UNSIGNED (type) ? 1 : -1;
567 smaller_type = lang_hooks.types.type_for_mode (mode, sign >= 0);
568 if (GET_MODE_SIZE (type_mode)
569 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type)))
570 return type;
572 return smaller_type;
575 /* Create an appropriate array type and declaration and assemble a static
576 array variable. Also create a load statement that initializes
577 the variable in question with a value from the static array. SWTCH is
578 the switch statement being converted, NUM is the index to
579 arrays of constructors, default values and target SSA names
580 for this particular array. ARR_INDEX_TYPE is the type of the index
581 of the new array, PHI is the phi node of the final BB that corresponds
582 to the value that will be loaded from the created array. TIDX
583 is an ssa name of a temporary variable holding the index for loads from the
584 new array. */
586 void
587 switch_conversion::build_one_array (int num, tree arr_index_type,
588 gphi *phi, tree tidx)
590 tree name;
591 gimple *load;
592 gimple_stmt_iterator gsi = gsi_for_stmt (m_switch);
593 location_t loc = gimple_location (m_switch);
595 gcc_assert (m_default_values[num]);
597 name = copy_ssa_name (PHI_RESULT (phi));
598 m_target_inbound_names[num] = name;
600 vec<constructor_elt, va_gc> *constructor = m_constructors[num];
601 wide_int coeff_a, coeff_b;
602 bool linear_p = contains_linear_function_p (constructor, &coeff_a, &coeff_b);
603 tree type;
604 if (linear_p
605 && (type = range_check_type (TREE_TYPE ((*constructor)[0].value))))
607 if (dump_file && coeff_a.to_uhwi () > 0)
608 fprintf (dump_file, "Linear transformation with A = %" PRId64
609 " and B = %" PRId64 "\n", coeff_a.to_shwi (),
610 coeff_b.to_shwi ());
612 /* We must use type of constructor values. */
613 gimple_seq seq = NULL;
614 tree tmp = gimple_convert (&seq, type, m_index_expr);
615 tree tmp2 = gimple_build (&seq, MULT_EXPR, type,
616 wide_int_to_tree (type, coeff_a), tmp);
617 tree tmp3 = gimple_build (&seq, PLUS_EXPR, type, tmp2,
618 wide_int_to_tree (type, coeff_b));
619 tree tmp4 = gimple_convert (&seq, TREE_TYPE (name), tmp3);
620 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
621 load = gimple_build_assign (name, tmp4);
623 else
625 tree array_type, ctor, decl, value_type, fetch, default_type;
627 default_type = TREE_TYPE (m_default_values[num]);
628 value_type = array_value_type (default_type, num);
629 array_type = build_array_type (value_type, arr_index_type);
630 if (default_type != value_type)
632 unsigned int i;
633 constructor_elt *elt;
635 FOR_EACH_VEC_SAFE_ELT (constructor, i, elt)
636 elt->value = fold_convert (value_type, elt->value);
638 ctor = build_constructor (array_type, constructor);
639 TREE_CONSTANT (ctor) = true;
640 TREE_STATIC (ctor) = true;
642 decl = build_decl (loc, VAR_DECL, NULL_TREE, array_type);
643 TREE_STATIC (decl) = 1;
644 DECL_INITIAL (decl) = ctor;
646 DECL_NAME (decl) = create_tmp_var_name ("CSWTCH");
647 DECL_ARTIFICIAL (decl) = 1;
648 DECL_IGNORED_P (decl) = 1;
649 TREE_CONSTANT (decl) = 1;
650 TREE_READONLY (decl) = 1;
651 DECL_IGNORED_P (decl) = 1;
652 if (offloading_function_p (cfun->decl))
653 DECL_ATTRIBUTES (decl)
654 = tree_cons (get_identifier ("omp declare target"), NULL_TREE,
655 NULL_TREE);
656 varpool_node::finalize_decl (decl);
658 fetch = build4 (ARRAY_REF, value_type, decl, tidx, NULL_TREE,
659 NULL_TREE);
660 if (default_type != value_type)
662 fetch = fold_convert (default_type, fetch);
663 fetch = force_gimple_operand_gsi (&gsi, fetch, true, NULL_TREE,
664 true, GSI_SAME_STMT);
666 load = gimple_build_assign (name, fetch);
669 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
670 update_stmt (load);
671 m_arr_ref_last = load;
674 /* Builds and initializes static arrays initialized with values gathered from
675 the switch statement. Also creates statements that load values from
676 them. */
678 void
679 switch_conversion::build_arrays ()
681 tree arr_index_type;
682 tree tidx, sub, utype;
683 gimple *stmt;
684 gimple_stmt_iterator gsi;
685 gphi_iterator gpi;
686 int i;
687 location_t loc = gimple_location (m_switch);
689 gsi = gsi_for_stmt (m_switch);
691 /* Make sure we do not generate arithmetics in a subrange. */
692 utype = TREE_TYPE (m_index_expr);
693 if (TREE_TYPE (utype))
694 utype = lang_hooks.types.type_for_mode (TYPE_MODE (TREE_TYPE (utype)), 1);
695 else
696 utype = lang_hooks.types.type_for_mode (TYPE_MODE (utype), 1);
698 arr_index_type = build_index_type (m_range_size);
699 tidx = make_ssa_name (utype);
700 sub = fold_build2_loc (loc, MINUS_EXPR, utype,
701 fold_convert_loc (loc, utype, m_index_expr),
702 fold_convert_loc (loc, utype, m_range_min));
703 sub = force_gimple_operand_gsi (&gsi, sub,
704 false, NULL, true, GSI_SAME_STMT);
705 stmt = gimple_build_assign (tidx, sub);
707 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
708 update_stmt (stmt);
709 m_arr_ref_first = stmt;
711 for (gpi = gsi_start_phis (m_final_bb), i = 0;
712 !gsi_end_p (gpi); gsi_next (&gpi))
714 gphi *phi = gpi.phi ();
715 if (!virtual_operand_p (gimple_phi_result (phi)))
716 build_one_array (i++, arr_index_type, phi, tidx);
717 else
719 edge e;
720 edge_iterator ei;
721 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
723 if (e->dest == m_final_bb)
724 break;
725 if (!m_default_case_nonstandard
726 || e->dest != m_default_bb)
728 e = single_succ_edge (e->dest);
729 break;
732 gcc_assert (e && e->dest == m_final_bb);
733 m_target_vop = PHI_ARG_DEF_FROM_EDGE (phi, e);
738 /* Generates and appropriately inserts loads of default values at the position
739 given by GSI. Returns the last inserted statement. */
741 gassign *
742 switch_conversion::gen_def_assigns (gimple_stmt_iterator *gsi)
744 int i;
745 gassign *assign = NULL;
747 for (i = 0; i < m_phi_count; i++)
749 tree name = copy_ssa_name (m_target_inbound_names[i]);
750 m_target_outbound_names[i] = name;
751 assign = gimple_build_assign (name, m_default_values[i]);
752 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
753 update_stmt (assign);
755 return assign;
758 /* Deletes the unused bbs and edges that now contain the switch statement and
759 its empty branch bbs. BBD is the now dead BB containing
760 the original switch statement, FINAL is the last BB of the converted
761 switch statement (in terms of succession). */
763 void
764 switch_conversion::prune_bbs (basic_block bbd, basic_block final,
765 basic_block default_bb)
767 edge_iterator ei;
768 edge e;
770 for (ei = ei_start (bbd->succs); (e = ei_safe_edge (ei)); )
772 basic_block bb;
773 bb = e->dest;
774 remove_edge (e);
775 if (bb != final && bb != default_bb)
776 delete_basic_block (bb);
778 delete_basic_block (bbd);
781 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
782 from the basic block loading values from an array and E2F from the basic
783 block loading default values. BBF is the last switch basic block (see the
784 bbf description in the comment below). */
786 void
787 switch_conversion::fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
789 gphi_iterator gsi;
790 int i;
792 for (gsi = gsi_start_phis (bbf), i = 0;
793 !gsi_end_p (gsi); gsi_next (&gsi))
795 gphi *phi = gsi.phi ();
796 tree inbound, outbound;
797 if (virtual_operand_p (gimple_phi_result (phi)))
798 inbound = outbound = m_target_vop;
799 else
801 inbound = m_target_inbound_names[i];
802 outbound = m_target_outbound_names[i++];
804 add_phi_arg (phi, inbound, e1f, UNKNOWN_LOCATION);
805 if (!m_default_case_nonstandard)
806 add_phi_arg (phi, outbound, e2f, UNKNOWN_LOCATION);
810 /* Creates a check whether the switch expression value actually falls into the
811 range given by all the cases. If it does not, the temporaries are loaded
812 with default values instead. */
814 void
815 switch_conversion::gen_inbound_check ()
817 tree label_decl1 = create_artificial_label (UNKNOWN_LOCATION);
818 tree label_decl2 = create_artificial_label (UNKNOWN_LOCATION);
819 tree label_decl3 = create_artificial_label (UNKNOWN_LOCATION);
820 glabel *label1, *label2, *label3;
821 tree utype, tidx;
822 tree bound;
824 gcond *cond_stmt;
826 gassign *last_assign = NULL;
827 gimple_stmt_iterator gsi;
828 basic_block bb0, bb1, bb2, bbf, bbd;
829 edge e01 = NULL, e02, e21, e1d, e1f, e2f;
830 location_t loc = gimple_location (m_switch);
832 gcc_assert (m_default_values);
834 bb0 = gimple_bb (m_switch);
836 tidx = gimple_assign_lhs (m_arr_ref_first);
837 utype = TREE_TYPE (tidx);
839 /* (end of) block 0 */
840 gsi = gsi_for_stmt (m_arr_ref_first);
841 gsi_next (&gsi);
843 bound = fold_convert_loc (loc, utype, m_range_size);
844 cond_stmt = gimple_build_cond (LE_EXPR, tidx, bound, NULL_TREE, NULL_TREE);
845 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
846 update_stmt (cond_stmt);
848 /* block 2 */
849 if (!m_default_case_nonstandard)
851 label2 = gimple_build_label (label_decl2);
852 gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
853 last_assign = gen_def_assigns (&gsi);
856 /* block 1 */
857 label1 = gimple_build_label (label_decl1);
858 gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
860 /* block F */
861 gsi = gsi_start_bb (m_final_bb);
862 label3 = gimple_build_label (label_decl3);
863 gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
865 /* cfg fix */
866 e02 = split_block (bb0, cond_stmt);
867 bb2 = e02->dest;
869 if (m_default_case_nonstandard)
871 bb1 = bb2;
872 bb2 = m_default_bb;
873 e01 = e02;
874 e01->flags = EDGE_TRUE_VALUE;
875 e02 = make_edge (bb0, bb2, EDGE_FALSE_VALUE);
876 edge e_default = find_edge (bb1, bb2);
877 for (gphi_iterator gsi = gsi_start_phis (bb2);
878 !gsi_end_p (gsi); gsi_next (&gsi))
880 gphi *phi = gsi.phi ();
881 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, e_default);
882 add_phi_arg (phi, arg, e02,
883 gimple_phi_arg_location_from_edge (phi, e_default));
885 /* Partially fix the dominator tree, if it is available. */
886 if (dom_info_available_p (CDI_DOMINATORS))
887 redirect_immediate_dominators (CDI_DOMINATORS, bb1, bb0);
889 else
891 e21 = split_block (bb2, last_assign);
892 bb1 = e21->dest;
893 remove_edge (e21);
896 e1d = split_block (bb1, m_arr_ref_last);
897 bbd = e1d->dest;
898 remove_edge (e1d);
900 /* Flags and profiles of the edge for in-range values. */
901 if (!m_default_case_nonstandard)
902 e01 = make_edge (bb0, bb1, EDGE_TRUE_VALUE);
903 e01->probability = m_default_prob.invert ();
905 /* Flags and profiles of the edge taking care of out-of-range values. */
906 e02->flags &= ~EDGE_FALLTHRU;
907 e02->flags |= EDGE_FALSE_VALUE;
908 e02->probability = m_default_prob;
910 bbf = m_final_bb;
912 e1f = make_edge (bb1, bbf, EDGE_FALLTHRU);
913 e1f->probability = profile_probability::always ();
915 if (m_default_case_nonstandard)
916 e2f = NULL;
917 else
919 e2f = make_edge (bb2, bbf, EDGE_FALLTHRU);
920 e2f->probability = profile_probability::always ();
923 /* frequencies of the new BBs */
924 bb1->count = e01->count ();
925 bb2->count = e02->count ();
926 if (!m_default_case_nonstandard)
927 bbf->count = e1f->count () + e2f->count ();
929 /* Tidy blocks that have become unreachable. */
930 prune_bbs (bbd, m_final_bb,
931 m_default_case_nonstandard ? m_default_bb : NULL);
933 /* Fixup the PHI nodes in bbF. */
934 fix_phi_nodes (e1f, e2f, bbf);
936 /* Fix the dominator tree, if it is available. */
937 if (dom_info_available_p (CDI_DOMINATORS))
939 vec<basic_block> bbs_to_fix_dom;
941 set_immediate_dominator (CDI_DOMINATORS, bb1, bb0);
942 if (!m_default_case_nonstandard)
943 set_immediate_dominator (CDI_DOMINATORS, bb2, bb0);
944 if (! get_immediate_dominator (CDI_DOMINATORS, bbf))
945 /* If bbD was the immediate dominator ... */
946 set_immediate_dominator (CDI_DOMINATORS, bbf, bb0);
948 bbs_to_fix_dom.create (3 + (bb2 != bbf));
949 bbs_to_fix_dom.quick_push (bb0);
950 bbs_to_fix_dom.quick_push (bb1);
951 if (bb2 != bbf)
952 bbs_to_fix_dom.quick_push (bb2);
953 bbs_to_fix_dom.quick_push (bbf);
955 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
956 bbs_to_fix_dom.release ();
960 /* The following function is invoked on every switch statement (the current
961 one is given in SWTCH) and runs the individual phases of switch
962 conversion on it one after another until one fails or the conversion
963 is completed. On success, NULL is in m_reason, otherwise points
964 to a string with the reason why the conversion failed. */
966 void
967 switch_conversion::expand (gswitch *swtch)
969 /* Group case labels so that we get the right results from the heuristics
970 that decide on the code generation approach for this switch. */
971 m_cfg_altered |= group_case_labels_stmt (swtch);
973 /* If this switch is now a degenerate case with only a default label,
974 there is nothing left for us to do. */
975 if (gimple_switch_num_labels (swtch) < 2)
977 m_reason = "switch is a degenerate case";
978 return;
981 collect (swtch);
983 /* No error markers should reach here (they should be filtered out
984 during gimplification). */
985 gcc_checking_assert (TREE_TYPE (m_index_expr) != error_mark_node);
987 /* Prefer bit test if possible. */
988 if (tree_fits_uhwi_p (m_range_size)
989 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size), m_uniq)
990 && bit_test_cluster::is_beneficial (m_count, m_uniq))
992 m_reason = "expanding as bit test is preferable";
993 return;
996 if (m_uniq <= 2)
998 /* This will be expanded as a decision tree . */
999 m_reason = "expanding as jumps is preferable";
1000 return;
1003 /* If there is no common successor, we cannot do the transformation. */
1004 if (!m_final_bb)
1006 m_reason = "no common successor to all case label target blocks found";
1007 return;
1010 /* Check the case label values are within reasonable range: */
1011 if (!check_range ())
1013 gcc_assert (m_reason);
1014 return;
1017 /* For all the cases, see whether they are empty, the assignments they
1018 represent constant and so on... */
1019 if (!check_all_empty_except_final ())
1021 gcc_assert (m_reason);
1022 return;
1024 if (!check_final_bb ())
1026 gcc_assert (m_reason);
1027 return;
1030 /* At this point all checks have passed and we can proceed with the
1031 transformation. */
1033 create_temp_arrays ();
1034 gather_default_values (m_default_case_nonstandard
1035 ? gimple_switch_label (swtch, 1)
1036 : gimple_switch_default_label (swtch));
1037 build_constructors ();
1039 build_arrays (); /* Build the static arrays and assignments. */
1040 gen_inbound_check (); /* Build the bounds check. */
1042 m_cfg_altered = true;
1045 /* Destructor. */
1047 switch_conversion::~switch_conversion ()
1049 XDELETEVEC (m_constructors);
1050 XDELETEVEC (m_default_values);
1053 /* Constructor. */
1055 group_cluster::group_cluster (vec<cluster *> &clusters,
1056 unsigned start, unsigned end)
1058 gcc_checking_assert (end - start + 1 >= 1);
1059 m_prob = profile_probability::never ();
1060 m_cases.create (end - start + 1);
1061 for (unsigned i = start; i <= end; i++)
1063 m_cases.quick_push (static_cast<simple_cluster *> (clusters[i]));
1064 m_prob += clusters[i]->m_prob;
1066 m_subtree_prob = m_prob;
1069 /* Destructor. */
1071 group_cluster::~group_cluster ()
1073 for (unsigned i = 0; i < m_cases.length (); i++)
1074 delete m_cases[i];
1076 m_cases.release ();
1079 /* Dump content of a cluster. */
1081 void
1082 group_cluster::dump (FILE *f, bool details)
1084 unsigned total_values = 0;
1085 for (unsigned i = 0; i < m_cases.length (); i++)
1086 total_values += m_cases[i]->get_range (m_cases[i]->get_low (),
1087 m_cases[i]->get_high ());
1089 unsigned comparison_count = 0;
1090 for (unsigned i = 0; i < m_cases.length (); i++)
1092 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1093 comparison_count += sc->m_range_p ? 2 : 1;
1096 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1097 fprintf (f, "%s", get_type () == JUMP_TABLE ? "JT" : "BT");
1099 if (details)
1100 fprintf (f, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1101 " density: %.2f%%)", total_values, comparison_count, range,
1102 100.0f * comparison_count / range);
1104 fprintf (f, ":");
1105 PRINT_CASE (f, get_low ());
1106 fprintf (f, "-");
1107 PRINT_CASE (f, get_high ());
1108 fprintf (f, " ");
1111 /* Emit GIMPLE code to handle the cluster. */
1113 void
1114 jump_table_cluster::emit (tree index_expr, tree,
1115 tree default_label_expr, basic_block default_bb)
1117 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1118 unsigned HOST_WIDE_INT nondefault_range = 0;
1120 /* For jump table we just emit a new gswitch statement that will
1121 be latter lowered to jump table. */
1122 auto_vec <tree> labels;
1123 labels.create (m_cases.length ());
1125 make_edge (m_case_bb, default_bb, 0);
1126 for (unsigned i = 0; i < m_cases.length (); i++)
1128 labels.quick_push (unshare_expr (m_cases[i]->m_case_label_expr));
1129 make_edge (m_case_bb, m_cases[i]->m_case_bb, 0);
1132 gswitch *s = gimple_build_switch (index_expr,
1133 unshare_expr (default_label_expr), labels);
1134 gimple_stmt_iterator gsi = gsi_start_bb (m_case_bb);
1135 gsi_insert_after (&gsi, s, GSI_NEW_STMT);
1137 /* Set up even probabilities for all cases. */
1138 for (unsigned i = 0; i < m_cases.length (); i++)
1140 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1141 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1142 unsigned HOST_WIDE_INT case_range
1143 = sc->get_range (sc->get_low (), sc->get_high ());
1144 nondefault_range += case_range;
1146 /* case_edge->aux is number of values in a jump-table that are covered
1147 by the case_edge. */
1148 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + case_range);
1151 edge default_edge = gimple_switch_default_edge (cfun, s);
1152 default_edge->probability = profile_probability::never ();
1154 for (unsigned i = 0; i < m_cases.length (); i++)
1156 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1157 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1158 case_edge->probability
1159 = profile_probability::always ().apply_scale ((intptr_t)case_edge->aux,
1160 range);
1163 /* Number of non-default values is probability of default edge. */
1164 default_edge->probability
1165 += profile_probability::always ().apply_scale (nondefault_range,
1166 range).invert ();
1168 switch_decision_tree::reset_out_edges_aux (s);
1171 /* Find jump tables of given CLUSTERS, where all members of the vector
1172 are of type simple_cluster. New clusters are returned. */
1174 vec<cluster *>
1175 jump_table_cluster::find_jump_tables (vec<cluster *> &clusters)
1177 if (!is_enabled ())
1178 return clusters.copy ();
1180 unsigned l = clusters.length ();
1181 auto_vec<min_cluster_item> min;
1182 min.reserve (l + 1);
1184 min.quick_push (min_cluster_item (0, 0, 0));
1186 for (unsigned i = 1; i <= l; i++)
1188 /* Set minimal # of clusters with i-th item to infinite. */
1189 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1191 for (unsigned j = 0; j < i; j++)
1193 unsigned HOST_WIDE_INT s = min[j].m_non_jt_cases;
1194 if (i - j < case_values_threshold ())
1195 s += i - j;
1197 /* Prefer clusters with smaller number of numbers covered. */
1198 if ((min[j].m_count + 1 < min[i].m_count
1199 || (min[j].m_count + 1 == min[i].m_count
1200 && s < min[i].m_non_jt_cases))
1201 && can_be_handled (clusters, j, i - 1))
1202 min[i] = min_cluster_item (min[j].m_count + 1, j, s);
1205 gcc_checking_assert (min[i].m_count != INT_MAX);
1208 /* No result. */
1209 if (min[l].m_count == l)
1210 return clusters.copy ();
1212 vec<cluster *> output;
1213 output.create (4);
1215 /* Find and build the clusters. */
1216 for (unsigned int end = l;;)
1218 int start = min[end].m_start;
1220 /* Do not allow clusters with small number of cases. */
1221 if (is_beneficial (clusters, start, end - 1))
1222 output.safe_push (new jump_table_cluster (clusters, start, end - 1));
1223 else
1224 for (int i = end - 1; i >= start; i--)
1225 output.safe_push (clusters[i]);
1227 end = start;
1229 if (start <= 0)
1230 break;
1233 output.reverse ();
1234 return output;
1237 /* Return true when cluster starting at START and ending at END (inclusive)
1238 can build a jump-table. */
1240 bool
1241 jump_table_cluster::can_be_handled (const vec<cluster *> &clusters,
1242 unsigned start, unsigned end)
1244 /* If the switch is relatively small such that the cost of one
1245 indirect jump on the target are higher than the cost of a
1246 decision tree, go with the decision tree.
1248 If range of values is much bigger than number of values,
1249 or if it is too large to represent in a HOST_WIDE_INT,
1250 make a sequence of conditional branches instead of a dispatch.
1252 The definition of "much bigger" depends on whether we are
1253 optimizing for size or for speed.
1255 For algorithm correctness, jump table for a single case must return
1256 true. We bail out in is_beneficial if it's called just for
1257 a single case. */
1258 if (start == end)
1259 return true;
1261 unsigned HOST_WIDE_INT max_ratio
1262 = (optimize_insn_for_size_p ()
1263 ? param_jump_table_max_growth_ratio_for_size
1264 : param_jump_table_max_growth_ratio_for_speed);
1265 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1266 clusters[end]->get_high ());
1267 /* Check overflow. */
1268 if (range == 0)
1269 return false;
1271 if (range > HOST_WIDE_INT_M1U / 100)
1272 return false;
1274 unsigned HOST_WIDE_INT lhs = 100 * range;
1275 if (lhs < range)
1276 return false;
1278 /* First make quick guess as each cluster
1279 can add at maximum 2 to the comparison_count. */
1280 if (lhs > 2 * max_ratio * (end - start + 1))
1281 return false;
1283 unsigned HOST_WIDE_INT comparison_count = 0;
1284 for (unsigned i = start; i <= end; i++)
1286 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1287 comparison_count += sc->m_range_p ? 2 : 1;
1290 return lhs <= max_ratio * comparison_count;
1293 /* Return true if cluster starting at START and ending at END (inclusive)
1294 is profitable transformation. */
1296 bool
1297 jump_table_cluster::is_beneficial (const vec<cluster *> &,
1298 unsigned start, unsigned end)
1300 /* Single case bail out. */
1301 if (start == end)
1302 return false;
1304 return end - start + 1 >= case_values_threshold ();
1307 /* Find bit tests of given CLUSTERS, where all members of the vector
1308 are of type simple_cluster. New clusters are returned. */
1310 vec<cluster *>
1311 bit_test_cluster::find_bit_tests (vec<cluster *> &clusters)
1313 unsigned l = clusters.length ();
1314 auto_vec<min_cluster_item> min;
1315 min.reserve (l + 1);
1317 min.quick_push (min_cluster_item (0, 0, 0));
1319 for (unsigned i = 1; i <= l; i++)
1321 /* Set minimal # of clusters with i-th item to infinite. */
1322 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1324 for (unsigned j = 0; j < i; j++)
1326 if (min[j].m_count + 1 < min[i].m_count
1327 && can_be_handled (clusters, j, i - 1))
1328 min[i] = min_cluster_item (min[j].m_count + 1, j, INT_MAX);
1331 gcc_checking_assert (min[i].m_count != INT_MAX);
1334 /* No result. */
1335 if (min[l].m_count == l)
1336 return clusters.copy ();
1338 vec<cluster *> output;
1339 output.create (4);
1341 /* Find and build the clusters. */
1342 for (unsigned end = l;;)
1344 int start = min[end].m_start;
1346 if (is_beneficial (clusters, start, end - 1))
1348 bool entire = start == 0 && end == clusters.length ();
1349 output.safe_push (new bit_test_cluster (clusters, start, end - 1,
1350 entire));
1352 else
1353 for (int i = end - 1; i >= start; i--)
1354 output.safe_push (clusters[i]);
1356 end = start;
1358 if (start <= 0)
1359 break;
1362 output.reverse ();
1363 return output;
1366 /* Return true when RANGE of case values with UNIQ labels
1367 can build a bit test. */
1369 bool
1370 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range,
1371 unsigned int uniq)
1373 /* Check overflow. */
1374 if (range == 0)
1375 return false;
1377 if (range >= GET_MODE_BITSIZE (word_mode))
1378 return false;
1380 return uniq <= m_max_case_bit_tests;
1383 /* Return true when cluster starting at START and ending at END (inclusive)
1384 can build a bit test. */
1386 bool
1387 bit_test_cluster::can_be_handled (const vec<cluster *> &clusters,
1388 unsigned start, unsigned end)
1390 auto_vec<int, m_max_case_bit_tests> dest_bbs;
1391 /* For algorithm correctness, bit test for a single case must return
1392 true. We bail out in is_beneficial if it's called just for
1393 a single case. */
1394 if (start == end)
1395 return true;
1397 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1398 clusters[end]->get_high ());
1400 /* Make a guess first. */
1401 if (!can_be_handled (range, m_max_case_bit_tests))
1402 return false;
1404 for (unsigned i = start; i <= end; i++)
1406 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1407 /* m_max_case_bit_tests is very small integer, thus the operation
1408 is constant. */
1409 if (!dest_bbs.contains (sc->m_case_bb->index))
1411 if (dest_bbs.length () >= m_max_case_bit_tests)
1412 return false;
1413 dest_bbs.quick_push (sc->m_case_bb->index);
1417 return true;
1420 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1421 transformation. */
1423 bool
1424 bit_test_cluster::is_beneficial (unsigned count, unsigned uniq)
1426 return (((uniq == 1 && count >= 3)
1427 || (uniq == 2 && count >= 5)
1428 || (uniq == 3 && count >= 6)));
1431 /* Return true if cluster starting at START and ending at END (inclusive)
1432 is profitable transformation. */
1434 bool
1435 bit_test_cluster::is_beneficial (const vec<cluster *> &clusters,
1436 unsigned start, unsigned end)
1438 /* Single case bail out. */
1439 if (start == end)
1440 return false;
1442 auto_bitmap dest_bbs;
1444 for (unsigned i = start; i <= end; i++)
1446 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1447 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1450 unsigned uniq = bitmap_count_bits (dest_bbs);
1451 unsigned count = end - start + 1;
1452 return is_beneficial (count, uniq);
1455 /* Comparison function for qsort to order bit tests by decreasing
1456 probability of execution. */
1459 case_bit_test::cmp (const void *p1, const void *p2)
1461 const case_bit_test *const d1 = (const case_bit_test *) p1;
1462 const case_bit_test *const d2 = (const case_bit_test *) p2;
1464 if (d2->bits != d1->bits)
1465 return d2->bits - d1->bits;
1467 /* Stabilize the sort. */
1468 return (LABEL_DECL_UID (CASE_LABEL (d2->label))
1469 - LABEL_DECL_UID (CASE_LABEL (d1->label)));
1472 /* Expand a switch statement by a short sequence of bit-wise
1473 comparisons. "switch(x)" is effectively converted into
1474 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1475 integer constants.
1477 INDEX_EXPR is the value being switched on.
1479 MINVAL is the lowest case value of in the case nodes,
1480 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1481 are not guaranteed to be of the same type as INDEX_EXPR
1482 (the gimplifier doesn't change the type of case label values,
1483 and MINVAL and RANGE are derived from those values).
1484 MAXVAL is MINVAL + RANGE.
1486 There *MUST* be max_case_bit_tests or less unique case
1487 node targets. */
1489 void
1490 bit_test_cluster::emit (tree index_expr, tree index_type,
1491 tree, basic_block default_bb)
1493 case_bit_test test[m_max_case_bit_tests] = { {} };
1494 unsigned int i, j, k;
1495 unsigned int count;
1497 tree unsigned_index_type = range_check_type (index_type);
1499 gimple_stmt_iterator gsi;
1500 gassign *shift_stmt;
1502 tree idx, tmp, csui;
1503 tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
1504 tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
1505 tree word_mode_one = fold_convert (word_type_node, integer_one_node);
1506 int prec = TYPE_PRECISION (word_type_node);
1507 wide_int wone = wi::one (prec);
1509 tree minval = get_low ();
1510 tree maxval = get_high ();
1511 tree range = int_const_binop (MINUS_EXPR, maxval, minval);
1512 unsigned HOST_WIDE_INT bt_range = get_range (minval, maxval);
1514 /* Go through all case labels, and collect the case labels, profile
1515 counts, and other information we need to build the branch tests. */
1516 count = 0;
1517 for (i = 0; i < m_cases.length (); i++)
1519 unsigned int lo, hi;
1520 simple_cluster *n = static_cast<simple_cluster *> (m_cases[i]);
1521 for (k = 0; k < count; k++)
1522 if (n->m_case_bb == test[k].target_bb)
1523 break;
1525 if (k == count)
1527 gcc_checking_assert (count < m_max_case_bit_tests);
1528 test[k].mask = wi::zero (prec);
1529 test[k].target_bb = n->m_case_bb;
1530 test[k].label = n->m_case_label_expr;
1531 test[k].bits = 0;
1532 count++;
1535 test[k].bits += n->get_range (n->get_low (), n->get_high ());
1537 lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_low (), minval));
1538 if (n->get_high () == NULL_TREE)
1539 hi = lo;
1540 else
1541 hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_high (),
1542 minval));
1544 for (j = lo; j <= hi; j++)
1545 test[k].mask |= wi::lshift (wone, j);
1548 qsort (test, count, sizeof (*test), case_bit_test::cmp);
1550 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1551 the minval subtractions, but it might make the mask constants more
1552 expensive. So, compare the costs. */
1553 if (compare_tree_int (minval, 0) > 0
1554 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
1556 int cost_diff;
1557 HOST_WIDE_INT m = tree_to_uhwi (minval);
1558 rtx reg = gen_raw_REG (word_mode, 10000);
1559 bool speed_p = optimize_insn_for_speed_p ();
1560 cost_diff = set_src_cost (gen_rtx_PLUS (word_mode, reg,
1561 GEN_INT (-m)),
1562 word_mode, speed_p);
1563 for (i = 0; i < count; i++)
1565 rtx r = immed_wide_int_const (test[i].mask, word_mode);
1566 cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
1567 word_mode, speed_p);
1568 r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
1569 cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
1570 word_mode, speed_p);
1572 if (cost_diff > 0)
1574 for (i = 0; i < count; i++)
1575 test[i].mask = wi::lshift (test[i].mask, m);
1576 minval = build_zero_cst (TREE_TYPE (minval));
1577 range = maxval;
1581 /* Now build the test-and-branch code. */
1583 gsi = gsi_last_bb (m_case_bb);
1585 /* idx = (unsigned)x - minval. */
1586 idx = fold_convert (unsigned_index_type, index_expr);
1587 idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
1588 fold_convert (unsigned_index_type, minval));
1589 idx = force_gimple_operand_gsi (&gsi, idx,
1590 /*simple=*/true, NULL_TREE,
1591 /*before=*/true, GSI_SAME_STMT);
1593 if (m_handles_entire_switch)
1595 /* if (idx > range) goto default */
1596 range
1597 = force_gimple_operand_gsi (&gsi,
1598 fold_convert (unsigned_index_type, range),
1599 /*simple=*/true, NULL_TREE,
1600 /*before=*/true, GSI_SAME_STMT);
1601 tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
1602 basic_block new_bb
1603 = hoist_edge_and_branch_if_true (&gsi, tmp, default_bb,
1604 profile_probability::unlikely ());
1605 gsi = gsi_last_bb (new_bb);
1608 /* csui = (1 << (word_mode) idx) */
1609 csui = make_ssa_name (word_type_node);
1610 tmp = fold_build2 (LSHIFT_EXPR, word_type_node, word_mode_one,
1611 fold_convert (word_type_node, idx));
1612 tmp = force_gimple_operand_gsi (&gsi, tmp,
1613 /*simple=*/false, NULL_TREE,
1614 /*before=*/true, GSI_SAME_STMT);
1615 shift_stmt = gimple_build_assign (csui, tmp);
1616 gsi_insert_before (&gsi, shift_stmt, GSI_SAME_STMT);
1617 update_stmt (shift_stmt);
1619 profile_probability prob = profile_probability::always ();
1621 /* for each unique set of cases:
1622 if (const & csui) goto target */
1623 for (k = 0; k < count; k++)
1625 prob = profile_probability::always ().apply_scale (test[k].bits,
1626 bt_range);
1627 bt_range -= test[k].bits;
1628 tmp = wide_int_to_tree (word_type_node, test[k].mask);
1629 tmp = fold_build2 (BIT_AND_EXPR, word_type_node, csui, tmp);
1630 tmp = force_gimple_operand_gsi (&gsi, tmp,
1631 /*simple=*/true, NULL_TREE,
1632 /*before=*/true, GSI_SAME_STMT);
1633 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, word_mode_zero);
1634 basic_block new_bb
1635 = hoist_edge_and_branch_if_true (&gsi, tmp, test[k].target_bb, prob);
1636 gsi = gsi_last_bb (new_bb);
1639 /* We should have removed all edges now. */
1640 gcc_assert (EDGE_COUNT (gsi_bb (gsi)->succs) == 0);
1642 /* If nothing matched, go to the default label. */
1643 edge e = make_edge (gsi_bb (gsi), default_bb, EDGE_FALLTHRU);
1644 e->probability = profile_probability::always ();
1647 /* Split the basic block at the statement pointed to by GSIP, and insert
1648 a branch to the target basic block of E_TRUE conditional on tree
1649 expression COND.
1651 It is assumed that there is already an edge from the to-be-split
1652 basic block to E_TRUE->dest block. This edge is removed, and the
1653 profile information on the edge is re-used for the new conditional
1654 jump.
1656 The CFG is updated. The dominator tree will not be valid after
1657 this transformation, but the immediate dominators are updated if
1658 UPDATE_DOMINATORS is true.
1660 Returns the newly created basic block. */
1662 basic_block
1663 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator *gsip,
1664 tree cond, basic_block case_bb,
1665 profile_probability prob)
1667 tree tmp;
1668 gcond *cond_stmt;
1669 edge e_false;
1670 basic_block new_bb, split_bb = gsi_bb (*gsip);
1672 edge e_true = make_edge (split_bb, case_bb, EDGE_TRUE_VALUE);
1673 e_true->probability = prob;
1674 gcc_assert (e_true->src == split_bb);
1676 tmp = force_gimple_operand_gsi (gsip, cond, /*simple=*/true, NULL,
1677 /*before=*/true, GSI_SAME_STMT);
1678 cond_stmt = gimple_build_cond_from_tree (tmp, NULL_TREE, NULL_TREE);
1679 gsi_insert_before (gsip, cond_stmt, GSI_SAME_STMT);
1681 e_false = split_block (split_bb, cond_stmt);
1682 new_bb = e_false->dest;
1683 redirect_edge_pred (e_true, split_bb);
1685 e_false->flags &= ~EDGE_FALLTHRU;
1686 e_false->flags |= EDGE_FALSE_VALUE;
1687 e_false->probability = e_true->probability.invert ();
1688 new_bb->count = e_false->count ();
1690 return new_bb;
1693 /* Compute the number of case labels that correspond to each outgoing edge of
1694 switch statement. Record this information in the aux field of the edge. */
1696 void
1697 switch_decision_tree::compute_cases_per_edge ()
1699 reset_out_edges_aux (m_switch);
1700 int ncases = gimple_switch_num_labels (m_switch);
1701 for (int i = ncases - 1; i >= 1; --i)
1703 edge case_edge = gimple_switch_edge (cfun, m_switch, i);
1704 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + 1);
1708 /* Analyze switch statement and return true when the statement is expanded
1709 as decision tree. */
1711 bool
1712 switch_decision_tree::analyze_switch_statement ()
1714 unsigned l = gimple_switch_num_labels (m_switch);
1715 basic_block bb = gimple_bb (m_switch);
1716 auto_vec<cluster *> clusters;
1717 clusters.create (l - 1);
1719 basic_block default_bb = gimple_switch_default_bb (cfun, m_switch);
1720 m_case_bbs.reserve (l);
1721 m_case_bbs.quick_push (default_bb);
1723 compute_cases_per_edge ();
1725 for (unsigned i = 1; i < l; i++)
1727 tree elt = gimple_switch_label (m_switch, i);
1728 tree lab = CASE_LABEL (elt);
1729 basic_block case_bb = label_to_block (cfun, lab);
1730 edge case_edge = find_edge (bb, case_bb);
1731 tree low = CASE_LOW (elt);
1732 tree high = CASE_HIGH (elt);
1734 profile_probability p
1735 = case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
1736 clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
1737 p));
1738 m_case_bbs.quick_push (case_edge->dest);
1741 reset_out_edges_aux (m_switch);
1743 /* Find jump table clusters. */
1744 vec<cluster *> output = jump_table_cluster::find_jump_tables (clusters);
1746 /* Find bit test clusters. */
1747 vec<cluster *> output2;
1748 auto_vec<cluster *> tmp;
1749 output2.create (1);
1750 tmp.create (1);
1752 for (unsigned i = 0; i < output.length (); i++)
1754 cluster *c = output[i];
1755 if (c->get_type () != SIMPLE_CASE)
1757 if (!tmp.is_empty ())
1759 vec<cluster *> n = bit_test_cluster::find_bit_tests (tmp);
1760 output2.safe_splice (n);
1761 n.release ();
1762 tmp.truncate (0);
1764 output2.safe_push (c);
1766 else
1767 tmp.safe_push (c);
1770 /* We still can have a temporary vector to test. */
1771 if (!tmp.is_empty ())
1773 vec<cluster *> n = bit_test_cluster::find_bit_tests (tmp);
1774 output2.safe_splice (n);
1775 n.release ();
1778 if (dump_file)
1780 fprintf (dump_file, ";; GIMPLE switch case clusters: ");
1781 for (unsigned i = 0; i < output2.length (); i++)
1782 output2[i]->dump (dump_file, dump_flags & TDF_DETAILS);
1783 fprintf (dump_file, "\n");
1786 output.release ();
1788 bool expanded = try_switch_expansion (output2);
1790 for (unsigned i = 0; i < output2.length (); i++)
1791 delete output2[i];
1793 output2.release ();
1795 return expanded;
1798 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1799 expanded. */
1801 bool
1802 switch_decision_tree::try_switch_expansion (vec<cluster *> &clusters)
1804 tree index_expr = gimple_switch_index (m_switch);
1805 tree index_type = TREE_TYPE (index_expr);
1806 basic_block bb = gimple_bb (m_switch);
1808 if (gimple_switch_num_labels (m_switch) == 1
1809 || range_check_type (index_type) == NULL_TREE)
1810 return false;
1812 /* Find the default case target label. */
1813 edge default_edge = gimple_switch_default_edge (cfun, m_switch);
1814 m_default_bb = default_edge->dest;
1816 /* Do the insertion of a case label into m_case_list. The labels are
1817 fed to us in descending order from the sorted vector of case labels used
1818 in the tree part of the middle end. So the list we construct is
1819 sorted in ascending order. */
1821 for (int i = clusters.length () - 1; i >= 0; i--)
1823 case_tree_node *r = m_case_list;
1824 m_case_list = m_case_node_pool.allocate ();
1825 m_case_list->m_right = r;
1826 m_case_list->m_c = clusters[i];
1829 record_phi_operand_mapping ();
1831 /* Split basic block that contains the gswitch statement. */
1832 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1833 edge e;
1834 if (gsi_end_p (gsi))
1835 e = split_block_after_labels (bb);
1836 else
1838 gsi_prev (&gsi);
1839 e = split_block (bb, gsi_stmt (gsi));
1841 bb = split_edge (e);
1843 /* Create new basic blocks for non-case clusters where specific expansion
1844 needs to happen. */
1845 for (unsigned i = 0; i < clusters.length (); i++)
1846 if (clusters[i]->get_type () != SIMPLE_CASE)
1848 clusters[i]->m_case_bb = create_empty_bb (bb);
1849 clusters[i]->m_case_bb->count = bb->count;
1850 clusters[i]->m_case_bb->loop_father = bb->loop_father;
1853 /* Do not do an extra work for a single cluster. */
1854 if (clusters.length () == 1
1855 && clusters[0]->get_type () != SIMPLE_CASE)
1857 cluster *c = clusters[0];
1858 c->emit (index_expr, index_type,
1859 gimple_switch_default_label (m_switch), m_default_bb);
1860 redirect_edge_succ (single_succ_edge (bb), c->m_case_bb);
1862 else
1864 emit (bb, index_expr, default_edge->probability, index_type);
1866 /* Emit cluster-specific switch handling. */
1867 for (unsigned i = 0; i < clusters.length (); i++)
1868 if (clusters[i]->get_type () != SIMPLE_CASE)
1869 clusters[i]->emit (index_expr, index_type,
1870 gimple_switch_default_label (m_switch),
1871 m_default_bb);
1874 fix_phi_operands_for_edges ();
1876 return true;
1879 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1880 and used in a label basic block. */
1882 void
1883 switch_decision_tree::record_phi_operand_mapping ()
1885 basic_block switch_bb = gimple_bb (m_switch);
1886 /* Record all PHI nodes that have to be fixed after conversion. */
1887 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1889 gphi_iterator gsi;
1890 basic_block bb = m_case_bbs[i];
1891 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1893 gphi *phi = gsi.phi ();
1895 for (unsigned i = 0; i < gimple_phi_num_args (phi); i++)
1897 basic_block phi_src_bb = gimple_phi_arg_edge (phi, i)->src;
1898 if (phi_src_bb == switch_bb)
1900 tree def = gimple_phi_arg_def (phi, i);
1901 tree result = gimple_phi_result (phi);
1902 m_phi_mapping.put (result, def);
1903 break;
1910 /* Append new operands to PHI statements that were introduced due to
1911 addition of new edges to case labels. */
1913 void
1914 switch_decision_tree::fix_phi_operands_for_edges ()
1916 gphi_iterator gsi;
1918 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1920 basic_block bb = m_case_bbs[i];
1921 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1923 gphi *phi = gsi.phi ();
1924 for (unsigned j = 0; j < gimple_phi_num_args (phi); j++)
1926 tree def = gimple_phi_arg_def (phi, j);
1927 if (def == NULL_TREE)
1929 edge e = gimple_phi_arg_edge (phi, j);
1930 tree *definition
1931 = m_phi_mapping.get (gimple_phi_result (phi));
1932 gcc_assert (definition);
1933 add_phi_arg (phi, *definition, e, UNKNOWN_LOCATION);
1940 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1941 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1943 We generate a binary decision tree to select the appropriate target
1944 code. */
1946 void
1947 switch_decision_tree::emit (basic_block bb, tree index_expr,
1948 profile_probability default_prob, tree index_type)
1950 balance_case_nodes (&m_case_list, NULL);
1952 if (dump_file)
1953 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1954 if (dump_file && (dump_flags & TDF_DETAILS))
1956 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1957 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1958 gcc_assert (m_case_list != NULL);
1959 dump_case_nodes (dump_file, m_case_list, indent_step, 0);
1962 bb = emit_case_nodes (bb, index_expr, m_case_list, default_prob, index_type,
1963 gimple_location (m_switch));
1965 if (bb)
1966 emit_jump (bb, m_default_bb);
1968 /* Remove all edges and do just an edge that will reach default_bb. */
1969 bb = gimple_bb (m_switch);
1970 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1971 gsi_remove (&gsi, true);
1973 delete_basic_block (bb);
1976 /* Take an ordered list of case nodes
1977 and transform them into a near optimal binary tree,
1978 on the assumption that any target code selection value is as
1979 likely as any other.
1981 The transformation is performed by splitting the ordered
1982 list into two equal sections plus a pivot. The parts are
1983 then attached to the pivot as left and right branches. Each
1984 branch is then transformed recursively. */
1986 void
1987 switch_decision_tree::balance_case_nodes (case_tree_node **head,
1988 case_tree_node *parent)
1990 case_tree_node *np;
1992 np = *head;
1993 if (np)
1995 int i = 0;
1996 int ranges = 0;
1997 case_tree_node **npp;
1998 case_tree_node *left;
1999 profile_probability prob = profile_probability::never ();
2001 /* Count the number of entries on branch. Also count the ranges. */
2003 while (np)
2005 if (!tree_int_cst_equal (np->m_c->get_low (), np->m_c->get_high ()))
2006 ranges++;
2008 i++;
2009 prob += np->m_c->m_prob;
2010 np = np->m_right;
2013 if (i > 2)
2015 /* Split this list if it is long enough for that to help. */
2016 npp = head;
2017 left = *npp;
2018 profile_probability pivot_prob = prob.apply_scale (1, 2);
2020 /* Find the place in the list that bisects the list's total cost,
2021 where ranges count as 2. */
2022 while (1)
2024 /* Skip nodes while their probability does not reach
2025 that amount. */
2026 prob -= (*npp)->m_c->m_prob;
2027 if ((prob.initialized_p () && prob < pivot_prob)
2028 || ! (*npp)->m_right)
2029 break;
2030 npp = &(*npp)->m_right;
2033 np = *npp;
2034 *npp = 0;
2035 *head = np;
2036 np->m_parent = parent;
2037 np->m_left = left == np ? NULL : left;
2039 /* Optimize each of the two split parts. */
2040 balance_case_nodes (&np->m_left, np);
2041 balance_case_nodes (&np->m_right, np);
2042 np->m_c->m_subtree_prob = np->m_c->m_prob;
2043 if (np->m_left)
2044 np->m_c->m_subtree_prob += np->m_left->m_c->m_subtree_prob;
2045 if (np->m_right)
2046 np->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2048 else
2050 /* Else leave this branch as one level,
2051 but fill in `parent' fields. */
2052 np = *head;
2053 np->m_parent = parent;
2054 np->m_c->m_subtree_prob = np->m_c->m_prob;
2055 for (; np->m_right; np = np->m_right)
2057 np->m_right->m_parent = np;
2058 (*head)->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2064 /* Dump ROOT, a list or tree of case nodes, to file. */
2066 void
2067 switch_decision_tree::dump_case_nodes (FILE *f, case_tree_node *root,
2068 int indent_step, int indent_level)
2070 if (root == 0)
2071 return;
2072 indent_level++;
2074 dump_case_nodes (f, root->m_left, indent_step, indent_level);
2076 fputs (";; ", f);
2077 fprintf (f, "%*s", indent_step * indent_level, "");
2078 root->m_c->dump (f);
2079 root->m_c->m_prob.dump (f);
2080 fputs (" subtree: ", f);
2081 root->m_c->m_subtree_prob.dump (f);
2082 fputs (")\n", f);
2084 dump_case_nodes (f, root->m_right, indent_step, indent_level);
2088 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2090 void
2091 switch_decision_tree::emit_jump (basic_block bb, basic_block case_bb)
2093 edge e = single_succ_edge (bb);
2094 redirect_edge_succ (e, case_bb);
2097 /* Generate code to compare OP0 with OP1 so that the condition codes are
2098 set and to jump to LABEL_BB if the condition is true.
2099 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2100 PROB is the probability of jumping to LABEL_BB. */
2102 basic_block
2103 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb, tree op0,
2104 tree op1, tree_code comparison,
2105 basic_block label_bb,
2106 profile_probability prob,
2107 location_t loc)
2109 // TODO: it's once called with lhs != index.
2110 op1 = fold_convert (TREE_TYPE (op0), op1);
2112 gcond *cond = gimple_build_cond (comparison, op0, op1, NULL_TREE, NULL_TREE);
2113 gimple_set_location (cond, loc);
2114 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2115 gsi_insert_after (&gsi, cond, GSI_NEW_STMT);
2117 gcc_assert (single_succ_p (bb));
2119 /* Make a new basic block where false branch will take place. */
2120 edge false_edge = split_block (bb, cond);
2121 false_edge->flags = EDGE_FALSE_VALUE;
2122 false_edge->probability = prob.invert ();
2124 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2125 true_edge->probability = prob;
2127 return false_edge->dest;
2130 /* Generate code to jump to LABEL if OP0 and OP1 are equal.
2131 PROB is the probability of jumping to LABEL_BB.
2132 BB is a basic block where the new condition will be placed. */
2134 basic_block
2135 switch_decision_tree::do_jump_if_equal (basic_block bb, tree op0, tree op1,
2136 basic_block label_bb,
2137 profile_probability prob,
2138 location_t loc)
2140 op1 = fold_convert (TREE_TYPE (op0), op1);
2142 gcond *cond = gimple_build_cond (EQ_EXPR, op0, op1, NULL_TREE, NULL_TREE);
2143 gimple_set_location (cond, loc);
2144 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2145 gsi_insert_before (&gsi, cond, GSI_SAME_STMT);
2147 gcc_assert (single_succ_p (bb));
2149 /* Make a new basic block where false branch will take place. */
2150 edge false_edge = split_block (bb, cond);
2151 false_edge->flags = EDGE_FALSE_VALUE;
2152 false_edge->probability = prob.invert ();
2154 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2155 true_edge->probability = prob;
2157 return false_edge->dest;
2160 /* Emit step-by-step code to select a case for the value of INDEX.
2161 The thus generated decision tree follows the form of the
2162 case-node binary tree NODE, whose nodes represent test conditions.
2163 DEFAULT_PROB is probability of cases leading to default BB.
2164 INDEX_TYPE is the type of the index of the switch. */
2166 basic_block
2167 switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
2168 case_tree_node *node,
2169 profile_probability default_prob,
2170 tree index_type, location_t loc)
2172 profile_probability p;
2174 /* If node is null, we are done. */
2175 if (node == NULL)
2176 return bb;
2178 /* Single value case. */
2179 if (node->m_c->is_single_value_p ())
2181 /* Node is single valued. First see if the index expression matches
2182 this node and then check our children, if any. */
2183 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2184 bb = do_jump_if_equal (bb, index, node->m_c->get_low (),
2185 node->m_c->m_case_bb, p, loc);
2186 /* Since this case is taken at this point, reduce its weight from
2187 subtree_weight. */
2188 node->m_c->m_subtree_prob -= p;
2190 if (node->m_left != NULL && node->m_right != NULL)
2192 /* 1) the node has both children
2194 If both children are single-valued cases with no
2195 children, finish up all the work. This way, we can save
2196 one ordered comparison. */
2198 if (!node->m_left->has_child ()
2199 && node->m_left->m_c->is_single_value_p ()
2200 && !node->m_right->has_child ()
2201 && node->m_right->m_c->is_single_value_p ())
2203 p = (node->m_right->m_c->m_prob
2204 / (node->m_c->m_subtree_prob + default_prob));
2205 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2206 node->m_right->m_c->m_case_bb, p, loc);
2208 p = (node->m_left->m_c->m_prob
2209 / (node->m_c->m_subtree_prob + default_prob));
2210 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2211 node->m_left->m_c->m_case_bb, p, loc);
2213 else
2215 /* Branch to a label where we will handle it later. */
2216 basic_block test_bb = split_edge (single_succ_edge (bb));
2217 redirect_edge_succ (single_pred_edge (test_bb),
2218 single_succ_edge (bb)->dest);
2220 p = ((node->m_right->m_c->m_subtree_prob
2221 + default_prob.apply_scale (1, 2))
2222 / (node->m_c->m_subtree_prob + default_prob));
2223 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2224 GT_EXPR, test_bb, p, loc);
2225 default_prob = default_prob.apply_scale (1, 2);
2227 /* Handle the left-hand subtree. */
2228 bb = emit_case_nodes (bb, index, node->m_left,
2229 default_prob, index_type, loc);
2231 /* If the left-hand subtree fell through,
2232 don't let it fall into the right-hand subtree. */
2233 if (bb && m_default_bb)
2234 emit_jump (bb, m_default_bb);
2236 bb = emit_case_nodes (test_bb, index, node->m_right,
2237 default_prob, index_type, loc);
2240 else if (node->m_left == NULL && node->m_right != NULL)
2242 /* 2) the node has only right child. */
2244 /* Here we have a right child but no left so we issue a conditional
2245 branch to default and process the right child.
2247 Omit the conditional branch to default if the right child
2248 does not have any children and is single valued; it would
2249 cost too much space to save so little time. */
2251 if (node->m_right->has_child ()
2252 || !node->m_right->m_c->is_single_value_p ())
2254 p = (default_prob.apply_scale (1, 2)
2255 / (node->m_c->m_subtree_prob + default_prob));
2256 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2257 LT_EXPR, m_default_bb, p, loc);
2258 default_prob = default_prob.apply_scale (1, 2);
2260 bb = emit_case_nodes (bb, index, node->m_right, default_prob,
2261 index_type, loc);
2263 else
2265 /* We cannot process node->right normally
2266 since we haven't ruled out the numbers less than
2267 this node's value. So handle node->right explicitly. */
2268 p = (node->m_right->m_c->m_subtree_prob
2269 / (node->m_c->m_subtree_prob + default_prob));
2270 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2271 node->m_right->m_c->m_case_bb, p, loc);
2274 else if (node->m_left != NULL && node->m_right == NULL)
2276 /* 3) just one subtree, on the left. Similar case as previous. */
2278 if (node->m_left->has_child ()
2279 || !node->m_left->m_c->is_single_value_p ())
2281 p = (default_prob.apply_scale (1, 2)
2282 / (node->m_c->m_subtree_prob + default_prob));
2283 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2284 GT_EXPR, m_default_bb, p, loc);
2285 default_prob = default_prob.apply_scale (1, 2);
2287 bb = emit_case_nodes (bb, index, node->m_left, default_prob,
2288 index_type, loc);
2290 else
2292 /* We cannot process node->left normally
2293 since we haven't ruled out the numbers less than
2294 this node's value. So handle node->left explicitly. */
2295 p = (node->m_left->m_c->m_subtree_prob
2296 / (node->m_c->m_subtree_prob + default_prob));
2297 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2298 node->m_left->m_c->m_case_bb, p, loc);
2302 else
2304 /* Node is a range. These cases are very similar to those for a single
2305 value, except that we do not start by testing whether this node
2306 is the one to branch to. */
2307 if (node->has_child () || node->m_c->get_type () != SIMPLE_CASE)
2309 /* Branch to a label where we will handle it later. */
2310 basic_block test_bb = split_edge (single_succ_edge (bb));
2311 redirect_edge_succ (single_pred_edge (test_bb),
2312 single_succ_edge (bb)->dest);
2315 profile_probability right_prob = profile_probability::never ();
2316 if (node->m_right)
2317 right_prob = node->m_right->m_c->m_subtree_prob;
2318 p = ((right_prob + default_prob.apply_scale (1, 2))
2319 / (node->m_c->m_subtree_prob + default_prob));
2321 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2322 GT_EXPR, test_bb, p, loc);
2323 default_prob = default_prob.apply_scale (1, 2);
2325 /* Value belongs to this node or to the left-hand subtree. */
2326 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2327 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2328 GE_EXPR, node->m_c->m_case_bb, p, loc);
2330 /* Handle the left-hand subtree. */
2331 bb = emit_case_nodes (bb, index, node->m_left,
2332 default_prob, index_type, loc);
2334 /* If the left-hand subtree fell through,
2335 don't let it fall into the right-hand subtree. */
2336 if (bb && m_default_bb)
2337 emit_jump (bb, m_default_bb);
2339 bb = emit_case_nodes (test_bb, index, node->m_right,
2340 default_prob, index_type, loc);
2342 else
2344 /* Node has no children so we check low and high bounds to remove
2345 redundant tests. Only one of the bounds can exist,
2346 since otherwise this node is bounded--a case tested already. */
2347 tree lhs, rhs;
2348 generate_range_test (bb, index, node->m_c->get_low (),
2349 node->m_c->get_high (), &lhs, &rhs);
2350 p = default_prob / (node->m_c->m_subtree_prob + default_prob);
2352 bb = emit_cmp_and_jump_insns (bb, lhs, rhs, GT_EXPR,
2353 m_default_bb, p, loc);
2355 emit_jump (bb, node->m_c->m_case_bb);
2356 return NULL;
2360 return bb;
2363 /* The main function of the pass scans statements for switches and invokes
2364 process_switch on them. */
2366 namespace {
2368 const pass_data pass_data_convert_switch =
2370 GIMPLE_PASS, /* type */
2371 "switchconv", /* name */
2372 OPTGROUP_NONE, /* optinfo_flags */
2373 TV_TREE_SWITCH_CONVERSION, /* tv_id */
2374 ( PROP_cfg | PROP_ssa ), /* properties_required */
2375 0, /* properties_provided */
2376 0, /* properties_destroyed */
2377 0, /* todo_flags_start */
2378 TODO_update_ssa, /* todo_flags_finish */
2381 class pass_convert_switch : public gimple_opt_pass
2383 public:
2384 pass_convert_switch (gcc::context *ctxt)
2385 : gimple_opt_pass (pass_data_convert_switch, ctxt)
2388 /* opt_pass methods: */
2389 virtual bool gate (function *) { return flag_tree_switch_conversion != 0; }
2390 virtual unsigned int execute (function *);
2392 }; // class pass_convert_switch
2394 unsigned int
2395 pass_convert_switch::execute (function *fun)
2397 basic_block bb;
2398 bool cfg_altered = false;
2400 FOR_EACH_BB_FN (bb, fun)
2402 gimple *stmt = last_stmt (bb);
2403 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
2405 if (dump_file)
2407 expanded_location loc = expand_location (gimple_location (stmt));
2409 fprintf (dump_file, "beginning to process the following "
2410 "SWITCH statement (%s:%d) : ------- \n",
2411 loc.file, loc.line);
2412 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2413 putc ('\n', dump_file);
2416 switch_conversion sconv;
2417 sconv.expand (as_a <gswitch *> (stmt));
2418 cfg_altered |= sconv.m_cfg_altered;
2419 if (!sconv.m_reason)
2421 if (dump_file)
2423 fputs ("Switch converted\n", dump_file);
2424 fputs ("--------------------------------\n", dump_file);
2427 /* Make no effort to update the post-dominator tree.
2428 It is actually not that hard for the transformations
2429 we have performed, but it is not supported
2430 by iterate_fix_dominators. */
2431 free_dominance_info (CDI_POST_DOMINATORS);
2433 else
2435 if (dump_file)
2437 fputs ("Bailing out - ", dump_file);
2438 fputs (sconv.m_reason, dump_file);
2439 fputs ("\n--------------------------------\n", dump_file);
2445 return cfg_altered ? TODO_cleanup_cfg : 0;;
2448 } // anon namespace
2450 gimple_opt_pass *
2451 make_pass_convert_switch (gcc::context *ctxt)
2453 return new pass_convert_switch (ctxt);
2456 /* The main function of the pass scans statements for switches and invokes
2457 process_switch on them. */
2459 namespace {
2461 template <bool O0> class pass_lower_switch: public gimple_opt_pass
2463 public:
2464 pass_lower_switch (gcc::context *ctxt) : gimple_opt_pass (data, ctxt) {}
2466 static const pass_data data;
2467 opt_pass *
2468 clone ()
2470 return new pass_lower_switch<O0> (m_ctxt);
2473 virtual bool
2474 gate (function *)
2476 return !O0 || !optimize;
2479 virtual unsigned int execute (function *fun);
2480 }; // class pass_lower_switch
2482 template <bool O0>
2483 const pass_data pass_lower_switch<O0>::data = {
2484 GIMPLE_PASS, /* type */
2485 O0 ? "switchlower_O0" : "switchlower", /* name */
2486 OPTGROUP_NONE, /* optinfo_flags */
2487 TV_TREE_SWITCH_LOWERING, /* tv_id */
2488 ( PROP_cfg | PROP_ssa ), /* properties_required */
2489 0, /* properties_provided */
2490 0, /* properties_destroyed */
2491 0, /* todo_flags_start */
2492 TODO_update_ssa | TODO_cleanup_cfg, /* todo_flags_finish */
2495 template <bool O0>
2496 unsigned int
2497 pass_lower_switch<O0>::execute (function *fun)
2499 basic_block bb;
2500 bool expanded = false;
2502 auto_vec<gimple *> switch_statements;
2503 switch_statements.create (1);
2505 FOR_EACH_BB_FN (bb, fun)
2507 gimple *stmt = last_stmt (bb);
2508 gswitch *swtch;
2509 if (stmt && (swtch = dyn_cast<gswitch *> (stmt)))
2511 if (!O0)
2512 group_case_labels_stmt (swtch);
2513 switch_statements.safe_push (swtch);
2517 for (unsigned i = 0; i < switch_statements.length (); i++)
2519 gimple *stmt = switch_statements[i];
2520 if (dump_file)
2522 expanded_location loc = expand_location (gimple_location (stmt));
2524 fprintf (dump_file, "beginning to process the following "
2525 "SWITCH statement (%s:%d) : ------- \n",
2526 loc.file, loc.line);
2527 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2528 putc ('\n', dump_file);
2531 gswitch *swtch = dyn_cast<gswitch *> (stmt);
2532 if (swtch)
2534 switch_decision_tree dt (swtch);
2535 expanded |= dt.analyze_switch_statement ();
2539 if (expanded)
2541 free_dominance_info (CDI_DOMINATORS);
2542 free_dominance_info (CDI_POST_DOMINATORS);
2543 mark_virtual_operands_for_renaming (cfun);
2546 return 0;
2549 } // anon namespace
2551 gimple_opt_pass *
2552 make_pass_lower_switch_O0 (gcc::context *ctxt)
2554 return new pass_lower_switch<true> (ctxt);
2556 gimple_opt_pass *
2557 make_pass_lower_switch (gcc::context *ctxt)
2559 return new pass_lower_switch<false> (ctxt);