testsuite: Update pr79251 ilp32 store counts.
[official-gcc.git] / gcc / tree-switch-conversion.c
blob24dbad93138f09b516888dbe8c5492dc1be87f0e
1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
2 a jump table.
3 Copyright (C) 2006-2021 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "insn-codes.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "cfghooks.h"
34 #include "tree-pass.h"
35 #include "ssa.h"
36 #include "optabs-tree.h"
37 #include "cgraph.h"
38 #include "gimple-pretty-print.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "cfganal.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "gimplify-me.h"
46 #include "gimple-fold.h"
47 #include "tree-cfg.h"
48 #include "cfgloop.h"
49 #include "alloc-pool.h"
50 #include "target.h"
51 #include "tree-into-ssa.h"
52 #include "omp-general.h"
54 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
55 type in the GIMPLE type system that is language-independent? */
56 #include "langhooks.h"
58 #include "tree-switch-conversion.h"
60 using namespace tree_switch_conversion;
62 /* Constructor. */
64 switch_conversion::switch_conversion (): m_final_bb (NULL),
65 m_constructors (NULL), m_default_values (NULL),
66 m_arr_ref_first (NULL), m_arr_ref_last (NULL),
67 m_reason (NULL), m_default_case_nonstandard (false), m_cfg_altered (false)
71 /* Collection information about SWTCH statement. */
73 void
74 switch_conversion::collect (gswitch *swtch)
76 unsigned int branch_num = gimple_switch_num_labels (swtch);
77 tree min_case, max_case;
78 unsigned int i;
79 edge e, e_default, e_first;
80 edge_iterator ei;
82 m_switch = swtch;
84 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
85 is a default label which is the first in the vector.
86 Collect the bits we can deduce from the CFG. */
87 m_index_expr = gimple_switch_index (swtch);
88 m_switch_bb = gimple_bb (swtch);
89 e_default = gimple_switch_default_edge (cfun, swtch);
90 m_default_bb = e_default->dest;
91 m_default_prob = e_default->probability;
93 /* Get upper and lower bounds of case values, and the covered range. */
94 min_case = gimple_switch_label (swtch, 1);
95 max_case = gimple_switch_label (swtch, branch_num - 1);
97 m_range_min = CASE_LOW (min_case);
98 if (CASE_HIGH (max_case) != NULL_TREE)
99 m_range_max = CASE_HIGH (max_case);
100 else
101 m_range_max = CASE_LOW (max_case);
103 m_contiguous_range = true;
104 tree last = CASE_HIGH (min_case) ? CASE_HIGH (min_case) : m_range_min;
105 for (i = 2; i < branch_num; i++)
107 tree elt = gimple_switch_label (swtch, i);
108 if (wi::to_wide (last) + 1 != wi::to_wide (CASE_LOW (elt)))
110 m_contiguous_range = false;
111 break;
113 last = CASE_HIGH (elt) ? CASE_HIGH (elt) : CASE_LOW (elt);
116 if (m_contiguous_range)
117 e_first = gimple_switch_edge (cfun, swtch, 1);
118 else
119 e_first = e_default;
121 /* See if there is one common successor block for all branch
122 targets. If it exists, record it in FINAL_BB.
123 Start with the destination of the first non-default case
124 if the range is contiguous and default case otherwise as
125 guess or its destination in case it is a forwarder block. */
126 if (! single_pred_p (e_first->dest))
127 m_final_bb = e_first->dest;
128 else if (single_succ_p (e_first->dest)
129 && ! single_pred_p (single_succ (e_first->dest)))
130 m_final_bb = single_succ (e_first->dest);
131 /* Require that all switch destinations are either that common
132 FINAL_BB or a forwarder to it, except for the default
133 case if contiguous range. */
134 if (m_final_bb)
135 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
137 if (e->dest == m_final_bb)
138 continue;
140 if (single_pred_p (e->dest)
141 && single_succ_p (e->dest)
142 && single_succ (e->dest) == m_final_bb)
143 continue;
145 if (e == e_default && m_contiguous_range)
147 m_default_case_nonstandard = true;
148 continue;
151 m_final_bb = NULL;
152 break;
155 m_range_size
156 = int_const_binop (MINUS_EXPR, m_range_max, m_range_min);
158 /* Get a count of the number of case labels. Single-valued case labels
159 simply count as one, but a case range counts double, since it may
160 require two compares if it gets lowered as a branching tree. */
161 m_count = 0;
162 for (i = 1; i < branch_num; i++)
164 tree elt = gimple_switch_label (swtch, i);
165 m_count++;
166 if (CASE_HIGH (elt)
167 && ! tree_int_cst_equal (CASE_LOW (elt), CASE_HIGH (elt)))
168 m_count++;
171 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
172 block. Assume a CFG cleanup would have already removed degenerate
173 switch statements, this allows us to just use EDGE_COUNT. */
174 m_uniq = EDGE_COUNT (gimple_bb (swtch)->succs) - 1;
177 /* Checks whether the range given by individual case statements of the switch
178 switch statement isn't too big and whether the number of branches actually
179 satisfies the size of the new array. */
181 bool
182 switch_conversion::check_range ()
184 gcc_assert (m_range_size);
185 if (!tree_fits_uhwi_p (m_range_size))
187 m_reason = "index range way too large or otherwise unusable";
188 return false;
191 if (tree_to_uhwi (m_range_size)
192 > ((unsigned) m_count * param_switch_conversion_branch_ratio))
194 m_reason = "the maximum range-branch ratio exceeded";
195 return false;
198 return true;
201 /* Checks whether all but the final BB basic blocks are empty. */
203 bool
204 switch_conversion::check_all_empty_except_final ()
206 edge e, e_default = find_edge (m_switch_bb, m_default_bb);
207 edge_iterator ei;
209 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
211 if (e->dest == m_final_bb)
212 continue;
214 if (!empty_block_p (e->dest))
216 if (m_contiguous_range && e == e_default)
218 m_default_case_nonstandard = true;
219 continue;
222 m_reason = "bad case - a non-final BB not empty";
223 return false;
227 return true;
230 /* This function checks whether all required values in phi nodes in final_bb
231 are constants. Required values are those that correspond to a basic block
232 which is a part of the examined switch statement. It returns true if the
233 phi nodes are OK, otherwise false. */
235 bool
236 switch_conversion::check_final_bb ()
238 gphi_iterator gsi;
240 m_phi_count = 0;
241 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
243 gphi *phi = gsi.phi ();
244 unsigned int i;
246 if (virtual_operand_p (gimple_phi_result (phi)))
247 continue;
249 m_phi_count++;
251 for (i = 0; i < gimple_phi_num_args (phi); i++)
253 basic_block bb = gimple_phi_arg_edge (phi, i)->src;
255 if (bb == m_switch_bb
256 || (single_pred_p (bb)
257 && single_pred (bb) == m_switch_bb
258 && (!m_default_case_nonstandard
259 || empty_block_p (bb))))
261 tree reloc, val;
262 const char *reason = NULL;
264 val = gimple_phi_arg_def (phi, i);
265 if (!is_gimple_ip_invariant (val))
266 reason = "non-invariant value from a case";
267 else
269 reloc = initializer_constant_valid_p (val, TREE_TYPE (val));
270 if ((flag_pic && reloc != null_pointer_node)
271 || (!flag_pic && reloc == NULL_TREE))
273 if (reloc)
274 reason
275 = "value from a case would need runtime relocations";
276 else
277 reason
278 = "value from a case is not a valid initializer";
281 if (reason)
283 /* For contiguous range, we can allow non-constant
284 or one that needs relocation, as long as it is
285 only reachable from the default case. */
286 if (bb == m_switch_bb)
287 bb = m_final_bb;
288 if (!m_contiguous_range || bb != m_default_bb)
290 m_reason = reason;
291 return false;
294 unsigned int branch_num = gimple_switch_num_labels (m_switch);
295 for (unsigned int i = 1; i < branch_num; i++)
297 if (gimple_switch_label_bb (cfun, m_switch, i) == bb)
299 m_reason = reason;
300 return false;
303 m_default_case_nonstandard = true;
309 return true;
312 /* The following function allocates default_values, target_{in,out}_names and
313 constructors arrays. The last one is also populated with pointers to
314 vectors that will become constructors of new arrays. */
316 void
317 switch_conversion::create_temp_arrays ()
319 int i;
321 m_default_values = XCNEWVEC (tree, m_phi_count * 3);
322 /* ??? Macros do not support multi argument templates in their
323 argument list. We create a typedef to work around that problem. */
324 typedef vec<constructor_elt, va_gc> *vec_constructor_elt_gc;
325 m_constructors = XCNEWVEC (vec_constructor_elt_gc, m_phi_count);
326 m_target_inbound_names = m_default_values + m_phi_count;
327 m_target_outbound_names = m_target_inbound_names + m_phi_count;
328 for (i = 0; i < m_phi_count; i++)
329 vec_alloc (m_constructors[i], tree_to_uhwi (m_range_size) + 1);
332 /* Populate the array of default values in the order of phi nodes.
333 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
334 if the range is non-contiguous or the default case has standard
335 structure, otherwise it is the first non-default case instead. */
337 void
338 switch_conversion::gather_default_values (tree default_case)
340 gphi_iterator gsi;
341 basic_block bb = label_to_block (cfun, CASE_LABEL (default_case));
342 edge e;
343 int i = 0;
345 gcc_assert (CASE_LOW (default_case) == NULL_TREE
346 || m_default_case_nonstandard);
348 if (bb == m_final_bb)
349 e = find_edge (m_switch_bb, bb);
350 else
351 e = single_succ_edge (bb);
353 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
355 gphi *phi = gsi.phi ();
356 if (virtual_operand_p (gimple_phi_result (phi)))
357 continue;
358 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
359 gcc_assert (val);
360 m_default_values[i++] = val;
364 /* The following function populates the vectors in the constructors array with
365 future contents of the static arrays. The vectors are populated in the
366 order of phi nodes. */
368 void
369 switch_conversion::build_constructors ()
371 unsigned i, branch_num = gimple_switch_num_labels (m_switch);
372 tree pos = m_range_min;
373 tree pos_one = build_int_cst (TREE_TYPE (pos), 1);
375 for (i = 1; i < branch_num; i++)
377 tree cs = gimple_switch_label (m_switch, i);
378 basic_block bb = label_to_block (cfun, CASE_LABEL (cs));
379 edge e;
380 tree high;
381 gphi_iterator gsi;
382 int j;
384 if (bb == m_final_bb)
385 e = find_edge (m_switch_bb, bb);
386 else
387 e = single_succ_edge (bb);
388 gcc_assert (e);
390 while (tree_int_cst_lt (pos, CASE_LOW (cs)))
392 int k;
393 for (k = 0; k < m_phi_count; k++)
395 constructor_elt elt;
397 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
398 elt.value
399 = unshare_expr_without_location (m_default_values[k]);
400 m_constructors[k]->quick_push (elt);
403 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
405 gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));
407 j = 0;
408 if (CASE_HIGH (cs))
409 high = CASE_HIGH (cs);
410 else
411 high = CASE_LOW (cs);
412 for (gsi = gsi_start_phis (m_final_bb);
413 !gsi_end_p (gsi); gsi_next (&gsi))
415 gphi *phi = gsi.phi ();
416 if (virtual_operand_p (gimple_phi_result (phi)))
417 continue;
418 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
419 tree low = CASE_LOW (cs);
420 pos = CASE_LOW (cs);
424 constructor_elt elt;
426 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
427 elt.value = unshare_expr_without_location (val);
428 m_constructors[j]->quick_push (elt);
430 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
431 } while (!tree_int_cst_lt (high, pos)
432 && tree_int_cst_lt (low, pos));
433 j++;
438 /* If all values in the constructor vector are products of a linear function
439 a * x + b, then return true. When true, COEFF_A and COEFF_B and
440 coefficients of the linear function. Note that equal values are special
441 case of a linear function with a and b equal to zero. */
443 bool
444 switch_conversion::contains_linear_function_p (vec<constructor_elt, va_gc> *vec,
445 wide_int *coeff_a,
446 wide_int *coeff_b)
448 unsigned int i;
449 constructor_elt *elt;
451 gcc_assert (vec->length () >= 2);
453 /* Let's try to find any linear function a * x + y that can apply to
454 given values. 'a' can be calculated as follows:
456 a = (y2 - y1) / (x2 - x1) where x2 - x1 = 1 (consecutive case indices)
457 a = y2 - y1
461 b = y2 - a * x2
465 tree elt0 = (*vec)[0].value;
466 tree elt1 = (*vec)[1].value;
468 if (TREE_CODE (elt0) != INTEGER_CST || TREE_CODE (elt1) != INTEGER_CST)
469 return false;
471 wide_int range_min
472 = wide_int::from (wi::to_wide (m_range_min),
473 TYPE_PRECISION (TREE_TYPE (elt0)),
474 TYPE_SIGN (TREE_TYPE (m_range_min)));
475 wide_int y1 = wi::to_wide (elt0);
476 wide_int y2 = wi::to_wide (elt1);
477 wide_int a = y2 - y1;
478 wide_int b = y2 - a * (range_min + 1);
480 /* Verify that all values fulfill the linear function. */
481 FOR_EACH_VEC_SAFE_ELT (vec, i, elt)
483 if (TREE_CODE (elt->value) != INTEGER_CST)
484 return false;
486 wide_int value = wi::to_wide (elt->value);
487 if (a * range_min + b != value)
488 return false;
490 ++range_min;
493 *coeff_a = a;
494 *coeff_b = b;
496 return true;
499 /* Return type which should be used for array elements, either TYPE's
500 main variant or, for integral types, some smaller integral type
501 that can still hold all the constants. */
503 tree
504 switch_conversion::array_value_type (tree type, int num)
506 unsigned int i, len = vec_safe_length (m_constructors[num]);
507 constructor_elt *elt;
508 int sign = 0;
509 tree smaller_type;
511 /* Types with alignments greater than their size can reach here, e.g. out of
512 SRA. We couldn't use these as an array component type so get back to the
513 main variant first, which, for our purposes, is fine for other types as
514 well. */
516 type = TYPE_MAIN_VARIANT (type);
518 if (!INTEGRAL_TYPE_P (type))
519 return type;
521 scalar_int_mode type_mode = SCALAR_INT_TYPE_MODE (type);
522 scalar_int_mode mode = get_narrowest_mode (type_mode);
523 if (GET_MODE_SIZE (type_mode) <= GET_MODE_SIZE (mode))
524 return type;
526 if (len < (optimize_bb_for_size_p (gimple_bb (m_switch)) ? 2 : 32))
527 return type;
529 FOR_EACH_VEC_SAFE_ELT (m_constructors[num], i, elt)
531 wide_int cst;
533 if (TREE_CODE (elt->value) != INTEGER_CST)
534 return type;
536 cst = wi::to_wide (elt->value);
537 while (1)
539 unsigned int prec = GET_MODE_BITSIZE (mode);
540 if (prec > HOST_BITS_PER_WIDE_INT)
541 return type;
543 if (sign >= 0 && cst == wi::zext (cst, prec))
545 if (sign == 0 && cst == wi::sext (cst, prec))
546 break;
547 sign = 1;
548 break;
550 if (sign <= 0 && cst == wi::sext (cst, prec))
552 sign = -1;
553 break;
556 if (sign == 1)
557 sign = 0;
559 if (!GET_MODE_WIDER_MODE (mode).exists (&mode)
560 || GET_MODE_SIZE (mode) >= GET_MODE_SIZE (type_mode))
561 return type;
565 if (sign == 0)
566 sign = TYPE_UNSIGNED (type) ? 1 : -1;
567 smaller_type = lang_hooks.types.type_for_mode (mode, sign >= 0);
568 if (GET_MODE_SIZE (type_mode)
569 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type)))
570 return type;
572 return smaller_type;
575 /* Create an appropriate array type and declaration and assemble a static
576 array variable. Also create a load statement that initializes
577 the variable in question with a value from the static array. SWTCH is
578 the switch statement being converted, NUM is the index to
579 arrays of constructors, default values and target SSA names
580 for this particular array. ARR_INDEX_TYPE is the type of the index
581 of the new array, PHI is the phi node of the final BB that corresponds
582 to the value that will be loaded from the created array. TIDX
583 is an ssa name of a temporary variable holding the index for loads from the
584 new array. */
586 void
587 switch_conversion::build_one_array (int num, tree arr_index_type,
588 gphi *phi, tree tidx)
590 tree name;
591 gimple *load;
592 gimple_stmt_iterator gsi = gsi_for_stmt (m_switch);
593 location_t loc = gimple_location (m_switch);
595 gcc_assert (m_default_values[num]);
597 name = copy_ssa_name (PHI_RESULT (phi));
598 m_target_inbound_names[num] = name;
600 vec<constructor_elt, va_gc> *constructor = m_constructors[num];
601 wide_int coeff_a, coeff_b;
602 bool linear_p = contains_linear_function_p (constructor, &coeff_a, &coeff_b);
603 tree type;
604 if (linear_p
605 && (type = range_check_type (TREE_TYPE ((*constructor)[0].value))))
607 if (dump_file && coeff_a.to_uhwi () > 0)
608 fprintf (dump_file, "Linear transformation with A = %" PRId64
609 " and B = %" PRId64 "\n", coeff_a.to_shwi (),
610 coeff_b.to_shwi ());
612 /* We must use type of constructor values. */
613 gimple_seq seq = NULL;
614 tree tmp = gimple_convert (&seq, type, m_index_expr);
615 tree tmp2 = gimple_build (&seq, MULT_EXPR, type,
616 wide_int_to_tree (type, coeff_a), tmp);
617 tree tmp3 = gimple_build (&seq, PLUS_EXPR, type, tmp2,
618 wide_int_to_tree (type, coeff_b));
619 tree tmp4 = gimple_convert (&seq, TREE_TYPE (name), tmp3);
620 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
621 load = gimple_build_assign (name, tmp4);
623 else
625 tree array_type, ctor, decl, value_type, fetch, default_type;
627 default_type = TREE_TYPE (m_default_values[num]);
628 value_type = array_value_type (default_type, num);
629 array_type = build_array_type (value_type, arr_index_type);
630 if (default_type != value_type)
632 unsigned int i;
633 constructor_elt *elt;
635 FOR_EACH_VEC_SAFE_ELT (constructor, i, elt)
636 elt->value = fold_convert (value_type, elt->value);
638 ctor = build_constructor (array_type, constructor);
639 TREE_CONSTANT (ctor) = true;
640 TREE_STATIC (ctor) = true;
642 decl = build_decl (loc, VAR_DECL, NULL_TREE, array_type);
643 TREE_STATIC (decl) = 1;
644 DECL_INITIAL (decl) = ctor;
646 DECL_NAME (decl) = create_tmp_var_name ("CSWTCH");
647 DECL_ARTIFICIAL (decl) = 1;
648 DECL_IGNORED_P (decl) = 1;
649 TREE_CONSTANT (decl) = 1;
650 TREE_READONLY (decl) = 1;
651 DECL_IGNORED_P (decl) = 1;
652 if (offloading_function_p (cfun->decl))
653 DECL_ATTRIBUTES (decl)
654 = tree_cons (get_identifier ("omp declare target"), NULL_TREE,
655 NULL_TREE);
656 varpool_node::finalize_decl (decl);
658 fetch = build4 (ARRAY_REF, value_type, decl, tidx, NULL_TREE,
659 NULL_TREE);
660 if (default_type != value_type)
662 fetch = fold_convert (default_type, fetch);
663 fetch = force_gimple_operand_gsi (&gsi, fetch, true, NULL_TREE,
664 true, GSI_SAME_STMT);
666 load = gimple_build_assign (name, fetch);
669 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
670 update_stmt (load);
671 m_arr_ref_last = load;
674 /* Builds and initializes static arrays initialized with values gathered from
675 the switch statement. Also creates statements that load values from
676 them. */
678 void
679 switch_conversion::build_arrays ()
681 tree arr_index_type;
682 tree tidx, sub, utype;
683 gimple *stmt;
684 gimple_stmt_iterator gsi;
685 gphi_iterator gpi;
686 int i;
687 location_t loc = gimple_location (m_switch);
689 gsi = gsi_for_stmt (m_switch);
691 /* Make sure we do not generate arithmetics in a subrange. */
692 utype = TREE_TYPE (m_index_expr);
693 if (TREE_TYPE (utype))
694 utype = lang_hooks.types.type_for_mode (TYPE_MODE (TREE_TYPE (utype)), 1);
695 else
696 utype = lang_hooks.types.type_for_mode (TYPE_MODE (utype), 1);
698 arr_index_type = build_index_type (m_range_size);
699 tidx = make_ssa_name (utype);
700 sub = fold_build2_loc (loc, MINUS_EXPR, utype,
701 fold_convert_loc (loc, utype, m_index_expr),
702 fold_convert_loc (loc, utype, m_range_min));
703 sub = force_gimple_operand_gsi (&gsi, sub,
704 false, NULL, true, GSI_SAME_STMT);
705 stmt = gimple_build_assign (tidx, sub);
707 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
708 update_stmt (stmt);
709 m_arr_ref_first = stmt;
711 for (gpi = gsi_start_phis (m_final_bb), i = 0;
712 !gsi_end_p (gpi); gsi_next (&gpi))
714 gphi *phi = gpi.phi ();
715 if (!virtual_operand_p (gimple_phi_result (phi)))
716 build_one_array (i++, arr_index_type, phi, tidx);
717 else
719 edge e;
720 edge_iterator ei;
721 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
723 if (e->dest == m_final_bb)
724 break;
725 if (!m_default_case_nonstandard
726 || e->dest != m_default_bb)
728 e = single_succ_edge (e->dest);
729 break;
732 gcc_assert (e && e->dest == m_final_bb);
733 m_target_vop = PHI_ARG_DEF_FROM_EDGE (phi, e);
738 /* Generates and appropriately inserts loads of default values at the position
739 given by GSI. Returns the last inserted statement. */
741 gassign *
742 switch_conversion::gen_def_assigns (gimple_stmt_iterator *gsi)
744 int i;
745 gassign *assign = NULL;
747 for (i = 0; i < m_phi_count; i++)
749 tree name = copy_ssa_name (m_target_inbound_names[i]);
750 m_target_outbound_names[i] = name;
751 assign = gimple_build_assign (name, m_default_values[i]);
752 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
753 update_stmt (assign);
755 return assign;
758 /* Deletes the unused bbs and edges that now contain the switch statement and
759 its empty branch bbs. BBD is the now dead BB containing
760 the original switch statement, FINAL is the last BB of the converted
761 switch statement (in terms of succession). */
763 void
764 switch_conversion::prune_bbs (basic_block bbd, basic_block final,
765 basic_block default_bb)
767 edge_iterator ei;
768 edge e;
770 for (ei = ei_start (bbd->succs); (e = ei_safe_edge (ei)); )
772 basic_block bb;
773 bb = e->dest;
774 remove_edge (e);
775 if (bb != final && bb != default_bb)
776 delete_basic_block (bb);
778 delete_basic_block (bbd);
781 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
782 from the basic block loading values from an array and E2F from the basic
783 block loading default values. BBF is the last switch basic block (see the
784 bbf description in the comment below). */
786 void
787 switch_conversion::fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
789 gphi_iterator gsi;
790 int i;
792 for (gsi = gsi_start_phis (bbf), i = 0;
793 !gsi_end_p (gsi); gsi_next (&gsi))
795 gphi *phi = gsi.phi ();
796 tree inbound, outbound;
797 if (virtual_operand_p (gimple_phi_result (phi)))
798 inbound = outbound = m_target_vop;
799 else
801 inbound = m_target_inbound_names[i];
802 outbound = m_target_outbound_names[i++];
804 add_phi_arg (phi, inbound, e1f, UNKNOWN_LOCATION);
805 if (!m_default_case_nonstandard)
806 add_phi_arg (phi, outbound, e2f, UNKNOWN_LOCATION);
810 /* Creates a check whether the switch expression value actually falls into the
811 range given by all the cases. If it does not, the temporaries are loaded
812 with default values instead. */
814 void
815 switch_conversion::gen_inbound_check ()
817 tree label_decl1 = create_artificial_label (UNKNOWN_LOCATION);
818 tree label_decl2 = create_artificial_label (UNKNOWN_LOCATION);
819 tree label_decl3 = create_artificial_label (UNKNOWN_LOCATION);
820 glabel *label1, *label2, *label3;
821 tree utype, tidx;
822 tree bound;
824 gcond *cond_stmt;
826 gassign *last_assign = NULL;
827 gimple_stmt_iterator gsi;
828 basic_block bb0, bb1, bb2, bbf, bbd;
829 edge e01 = NULL, e02, e21, e1d, e1f, e2f;
830 location_t loc = gimple_location (m_switch);
832 gcc_assert (m_default_values);
834 bb0 = gimple_bb (m_switch);
836 tidx = gimple_assign_lhs (m_arr_ref_first);
837 utype = TREE_TYPE (tidx);
839 /* (end of) block 0 */
840 gsi = gsi_for_stmt (m_arr_ref_first);
841 gsi_next (&gsi);
843 bound = fold_convert_loc (loc, utype, m_range_size);
844 cond_stmt = gimple_build_cond (LE_EXPR, tidx, bound, NULL_TREE, NULL_TREE);
845 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
846 update_stmt (cond_stmt);
848 /* block 2 */
849 if (!m_default_case_nonstandard)
851 label2 = gimple_build_label (label_decl2);
852 gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
853 last_assign = gen_def_assigns (&gsi);
856 /* block 1 */
857 label1 = gimple_build_label (label_decl1);
858 gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
860 /* block F */
861 gsi = gsi_start_bb (m_final_bb);
862 label3 = gimple_build_label (label_decl3);
863 gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
865 /* cfg fix */
866 e02 = split_block (bb0, cond_stmt);
867 bb2 = e02->dest;
869 if (m_default_case_nonstandard)
871 bb1 = bb2;
872 bb2 = m_default_bb;
873 e01 = e02;
874 e01->flags = EDGE_TRUE_VALUE;
875 e02 = make_edge (bb0, bb2, EDGE_FALSE_VALUE);
876 edge e_default = find_edge (bb1, bb2);
877 for (gphi_iterator gsi = gsi_start_phis (bb2);
878 !gsi_end_p (gsi); gsi_next (&gsi))
880 gphi *phi = gsi.phi ();
881 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, e_default);
882 add_phi_arg (phi, arg, e02,
883 gimple_phi_arg_location_from_edge (phi, e_default));
885 /* Partially fix the dominator tree, if it is available. */
886 if (dom_info_available_p (CDI_DOMINATORS))
887 redirect_immediate_dominators (CDI_DOMINATORS, bb1, bb0);
889 else
891 e21 = split_block (bb2, last_assign);
892 bb1 = e21->dest;
893 remove_edge (e21);
896 e1d = split_block (bb1, m_arr_ref_last);
897 bbd = e1d->dest;
898 remove_edge (e1d);
900 /* Flags and profiles of the edge for in-range values. */
901 if (!m_default_case_nonstandard)
902 e01 = make_edge (bb0, bb1, EDGE_TRUE_VALUE);
903 e01->probability = m_default_prob.invert ();
905 /* Flags and profiles of the edge taking care of out-of-range values. */
906 e02->flags &= ~EDGE_FALLTHRU;
907 e02->flags |= EDGE_FALSE_VALUE;
908 e02->probability = m_default_prob;
910 bbf = m_final_bb;
912 e1f = make_edge (bb1, bbf, EDGE_FALLTHRU);
913 e1f->probability = profile_probability::always ();
915 if (m_default_case_nonstandard)
916 e2f = NULL;
917 else
919 e2f = make_edge (bb2, bbf, EDGE_FALLTHRU);
920 e2f->probability = profile_probability::always ();
923 /* frequencies of the new BBs */
924 bb1->count = e01->count ();
925 bb2->count = e02->count ();
926 if (!m_default_case_nonstandard)
927 bbf->count = e1f->count () + e2f->count ();
929 /* Tidy blocks that have become unreachable. */
930 prune_bbs (bbd, m_final_bb,
931 m_default_case_nonstandard ? m_default_bb : NULL);
933 /* Fixup the PHI nodes in bbF. */
934 fix_phi_nodes (e1f, e2f, bbf);
936 /* Fix the dominator tree, if it is available. */
937 if (dom_info_available_p (CDI_DOMINATORS))
939 vec<basic_block> bbs_to_fix_dom;
941 set_immediate_dominator (CDI_DOMINATORS, bb1, bb0);
942 if (!m_default_case_nonstandard)
943 set_immediate_dominator (CDI_DOMINATORS, bb2, bb0);
944 if (! get_immediate_dominator (CDI_DOMINATORS, bbf))
945 /* If bbD was the immediate dominator ... */
946 set_immediate_dominator (CDI_DOMINATORS, bbf, bb0);
948 bbs_to_fix_dom.create (3 + (bb2 != bbf));
949 bbs_to_fix_dom.quick_push (bb0);
950 bbs_to_fix_dom.quick_push (bb1);
951 if (bb2 != bbf)
952 bbs_to_fix_dom.quick_push (bb2);
953 bbs_to_fix_dom.quick_push (bbf);
955 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
956 bbs_to_fix_dom.release ();
960 /* The following function is invoked on every switch statement (the current
961 one is given in SWTCH) and runs the individual phases of switch
962 conversion on it one after another until one fails or the conversion
963 is completed. On success, NULL is in m_reason, otherwise points
964 to a string with the reason why the conversion failed. */
966 void
967 switch_conversion::expand (gswitch *swtch)
969 /* Group case labels so that we get the right results from the heuristics
970 that decide on the code generation approach for this switch. */
971 m_cfg_altered |= group_case_labels_stmt (swtch);
973 /* If this switch is now a degenerate case with only a default label,
974 there is nothing left for us to do. */
975 if (gimple_switch_num_labels (swtch) < 2)
977 m_reason = "switch is a degenerate case";
978 return;
981 collect (swtch);
983 /* No error markers should reach here (they should be filtered out
984 during gimplification). */
985 gcc_checking_assert (TREE_TYPE (m_index_expr) != error_mark_node);
987 /* Prefer bit test if possible. */
988 if (tree_fits_uhwi_p (m_range_size)
989 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size), m_uniq)
990 && bit_test_cluster::is_beneficial (m_count, m_uniq))
992 m_reason = "expanding as bit test is preferable";
993 return;
996 if (m_uniq <= 2)
998 /* This will be expanded as a decision tree . */
999 m_reason = "expanding as jumps is preferable";
1000 return;
1003 /* If there is no common successor, we cannot do the transformation. */
1004 if (!m_final_bb)
1006 m_reason = "no common successor to all case label target blocks found";
1007 return;
1010 /* Check the case label values are within reasonable range: */
1011 if (!check_range ())
1013 gcc_assert (m_reason);
1014 return;
1017 /* For all the cases, see whether they are empty, the assignments they
1018 represent constant and so on... */
1019 if (!check_all_empty_except_final ())
1021 gcc_assert (m_reason);
1022 return;
1024 if (!check_final_bb ())
1026 gcc_assert (m_reason);
1027 return;
1030 /* At this point all checks have passed and we can proceed with the
1031 transformation. */
1033 create_temp_arrays ();
1034 gather_default_values (m_default_case_nonstandard
1035 ? gimple_switch_label (swtch, 1)
1036 : gimple_switch_default_label (swtch));
1037 build_constructors ();
1039 build_arrays (); /* Build the static arrays and assignments. */
1040 gen_inbound_check (); /* Build the bounds check. */
1042 m_cfg_altered = true;
1045 /* Destructor. */
1047 switch_conversion::~switch_conversion ()
1049 XDELETEVEC (m_constructors);
1050 XDELETEVEC (m_default_values);
1053 /* Constructor. */
1055 group_cluster::group_cluster (vec<cluster *> &clusters,
1056 unsigned start, unsigned end)
1058 gcc_checking_assert (end - start + 1 >= 1);
1059 m_prob = profile_probability::never ();
1060 m_cases.create (end - start + 1);
1061 for (unsigned i = start; i <= end; i++)
1063 m_cases.quick_push (static_cast<simple_cluster *> (clusters[i]));
1064 m_prob += clusters[i]->m_prob;
1066 m_subtree_prob = m_prob;
1069 /* Destructor. */
1071 group_cluster::~group_cluster ()
1073 for (unsigned i = 0; i < m_cases.length (); i++)
1074 delete m_cases[i];
1076 m_cases.release ();
1079 /* Dump content of a cluster. */
1081 void
1082 group_cluster::dump (FILE *f, bool details)
1084 unsigned total_values = 0;
1085 for (unsigned i = 0; i < m_cases.length (); i++)
1086 total_values += m_cases[i]->get_range (m_cases[i]->get_low (),
1087 m_cases[i]->get_high ());
1089 unsigned comparison_count = 0;
1090 for (unsigned i = 0; i < m_cases.length (); i++)
1092 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1093 comparison_count += sc->m_range_p ? 2 : 1;
1096 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1097 fprintf (f, "%s", get_type () == JUMP_TABLE ? "JT" : "BT");
1099 if (details)
1100 fprintf (f, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1101 " density: %.2f%%)", total_values, comparison_count, range,
1102 100.0f * comparison_count / range);
1104 fprintf (f, ":");
1105 PRINT_CASE (f, get_low ());
1106 fprintf (f, "-");
1107 PRINT_CASE (f, get_high ());
1108 fprintf (f, " ");
1111 /* Emit GIMPLE code to handle the cluster. */
1113 void
1114 jump_table_cluster::emit (tree index_expr, tree,
1115 tree default_label_expr, basic_block default_bb)
1117 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1118 unsigned HOST_WIDE_INT nondefault_range = 0;
1120 /* For jump table we just emit a new gswitch statement that will
1121 be latter lowered to jump table. */
1122 auto_vec <tree> labels;
1123 labels.create (m_cases.length ());
1125 make_edge (m_case_bb, default_bb, 0);
1126 for (unsigned i = 0; i < m_cases.length (); i++)
1128 labels.quick_push (unshare_expr (m_cases[i]->m_case_label_expr));
1129 make_edge (m_case_bb, m_cases[i]->m_case_bb, 0);
1132 gswitch *s = gimple_build_switch (index_expr,
1133 unshare_expr (default_label_expr), labels);
1134 gimple_stmt_iterator gsi = gsi_start_bb (m_case_bb);
1135 gsi_insert_after (&gsi, s, GSI_NEW_STMT);
1137 /* Set up even probabilities for all cases. */
1138 for (unsigned i = 0; i < m_cases.length (); i++)
1140 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1141 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1142 unsigned HOST_WIDE_INT case_range
1143 = sc->get_range (sc->get_low (), sc->get_high ());
1144 nondefault_range += case_range;
1146 /* case_edge->aux is number of values in a jump-table that are covered
1147 by the case_edge. */
1148 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + case_range);
1151 edge default_edge = gimple_switch_default_edge (cfun, s);
1152 default_edge->probability = profile_probability::never ();
1154 for (unsigned i = 0; i < m_cases.length (); i++)
1156 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1157 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1158 case_edge->probability
1159 = profile_probability::always ().apply_scale ((intptr_t)case_edge->aux,
1160 range);
1163 /* Number of non-default values is probability of default edge. */
1164 default_edge->probability
1165 += profile_probability::always ().apply_scale (nondefault_range,
1166 range).invert ();
1168 switch_decision_tree::reset_out_edges_aux (s);
1171 /* Find jump tables of given CLUSTERS, where all members of the vector
1172 are of type simple_cluster. New clusters are returned. */
1174 vec<cluster *>
1175 jump_table_cluster::find_jump_tables (vec<cluster *> &clusters)
1177 if (!is_enabled ())
1178 return clusters.copy ();
1180 unsigned l = clusters.length ();
1181 auto_vec<min_cluster_item> min;
1182 min.reserve (l + 1);
1184 min.quick_push (min_cluster_item (0, 0, 0));
1186 for (unsigned i = 1; i <= l; i++)
1188 /* Set minimal # of clusters with i-th item to infinite. */
1189 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1191 for (unsigned j = 0; j < i; j++)
1193 unsigned HOST_WIDE_INT s = min[j].m_non_jt_cases;
1194 if (i - j < case_values_threshold ())
1195 s += i - j;
1197 /* Prefer clusters with smaller number of numbers covered. */
1198 if ((min[j].m_count + 1 < min[i].m_count
1199 || (min[j].m_count + 1 == min[i].m_count
1200 && s < min[i].m_non_jt_cases))
1201 && can_be_handled (clusters, j, i - 1))
1202 min[i] = min_cluster_item (min[j].m_count + 1, j, s);
1205 gcc_checking_assert (min[i].m_count != INT_MAX);
1208 /* No result. */
1209 if (min[l].m_count == l)
1210 return clusters.copy ();
1212 vec<cluster *> output;
1213 output.create (4);
1215 /* Find and build the clusters. */
1216 for (unsigned int end = l;;)
1218 int start = min[end].m_start;
1220 /* Do not allow clusters with small number of cases. */
1221 if (is_beneficial (clusters, start, end - 1))
1222 output.safe_push (new jump_table_cluster (clusters, start, end - 1));
1223 else
1224 for (int i = end - 1; i >= start; i--)
1225 output.safe_push (clusters[i]);
1227 end = start;
1229 if (start <= 0)
1230 break;
1233 output.reverse ();
1234 return output;
1237 /* Return true when cluster starting at START and ending at END (inclusive)
1238 can build a jump-table. */
1240 bool
1241 jump_table_cluster::can_be_handled (const vec<cluster *> &clusters,
1242 unsigned start, unsigned end)
1244 /* If the switch is relatively small such that the cost of one
1245 indirect jump on the target are higher than the cost of a
1246 decision tree, go with the decision tree.
1248 If range of values is much bigger than number of values,
1249 or if it is too large to represent in a HOST_WIDE_INT,
1250 make a sequence of conditional branches instead of a dispatch.
1252 The definition of "much bigger" depends on whether we are
1253 optimizing for size or for speed.
1255 For algorithm correctness, jump table for a single case must return
1256 true. We bail out in is_beneficial if it's called just for
1257 a single case. */
1258 if (start == end)
1259 return true;
1261 unsigned HOST_WIDE_INT max_ratio
1262 = (optimize_insn_for_size_p ()
1263 ? param_jump_table_max_growth_ratio_for_size
1264 : param_jump_table_max_growth_ratio_for_speed);
1265 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1266 clusters[end]->get_high ());
1267 /* Check overflow. */
1268 if (range == 0)
1269 return false;
1271 if (range > HOST_WIDE_INT_M1U / 100)
1272 return false;
1274 unsigned HOST_WIDE_INT lhs = 100 * range;
1275 if (lhs < range)
1276 return false;
1278 /* First make quick guess as each cluster
1279 can add at maximum 2 to the comparison_count. */
1280 if (lhs > 2 * max_ratio * (end - start + 1))
1281 return false;
1283 unsigned HOST_WIDE_INT comparison_count = 0;
1284 for (unsigned i = start; i <= end; i++)
1286 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1287 comparison_count += sc->m_range_p ? 2 : 1;
1290 return lhs <= max_ratio * comparison_count;
1293 /* Return true if cluster starting at START and ending at END (inclusive)
1294 is profitable transformation. */
1296 bool
1297 jump_table_cluster::is_beneficial (const vec<cluster *> &,
1298 unsigned start, unsigned end)
1300 /* Single case bail out. */
1301 if (start == end)
1302 return false;
1304 return end - start + 1 >= case_values_threshold ();
1307 /* Find bit tests of given CLUSTERS, where all members of the vector
1308 are of type simple_cluster. New clusters are returned. */
1310 vec<cluster *>
1311 bit_test_cluster::find_bit_tests (vec<cluster *> &clusters)
1313 if (!is_enabled ())
1314 return clusters.copy ();
1316 unsigned l = clusters.length ();
1317 auto_vec<min_cluster_item> min;
1318 min.reserve (l + 1);
1320 min.quick_push (min_cluster_item (0, 0, 0));
1322 for (unsigned i = 1; i <= l; i++)
1324 /* Set minimal # of clusters with i-th item to infinite. */
1325 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1327 for (unsigned j = 0; j < i; j++)
1329 if (min[j].m_count + 1 < min[i].m_count
1330 && can_be_handled (clusters, j, i - 1))
1331 min[i] = min_cluster_item (min[j].m_count + 1, j, INT_MAX);
1334 gcc_checking_assert (min[i].m_count != INT_MAX);
1337 /* No result. */
1338 if (min[l].m_count == l)
1339 return clusters.copy ();
1341 vec<cluster *> output;
1342 output.create (4);
1344 /* Find and build the clusters. */
1345 for (unsigned end = l;;)
1347 int start = min[end].m_start;
1349 if (is_beneficial (clusters, start, end - 1))
1351 bool entire = start == 0 && end == clusters.length ();
1352 output.safe_push (new bit_test_cluster (clusters, start, end - 1,
1353 entire));
1355 else
1356 for (int i = end - 1; i >= start; i--)
1357 output.safe_push (clusters[i]);
1359 end = start;
1361 if (start <= 0)
1362 break;
1365 output.reverse ();
1366 return output;
1369 /* Return true when RANGE of case values with UNIQ labels
1370 can build a bit test. */
1372 bool
1373 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range,
1374 unsigned int uniq)
1376 /* Check overflow. */
1377 if (range == 0)
1378 return false;
1380 if (range >= GET_MODE_BITSIZE (word_mode))
1381 return false;
1383 return uniq <= m_max_case_bit_tests;
1386 /* Return true when cluster starting at START and ending at END (inclusive)
1387 can build a bit test. */
1389 bool
1390 bit_test_cluster::can_be_handled (const vec<cluster *> &clusters,
1391 unsigned start, unsigned end)
1393 auto_vec<int, m_max_case_bit_tests> dest_bbs;
1394 /* For algorithm correctness, bit test for a single case must return
1395 true. We bail out in is_beneficial if it's called just for
1396 a single case. */
1397 if (start == end)
1398 return true;
1400 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1401 clusters[end]->get_high ());
1403 /* Make a guess first. */
1404 if (!can_be_handled (range, m_max_case_bit_tests))
1405 return false;
1407 for (unsigned i = start; i <= end; i++)
1409 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1410 /* m_max_case_bit_tests is very small integer, thus the operation
1411 is constant. */
1412 if (!dest_bbs.contains (sc->m_case_bb->index))
1414 if (dest_bbs.length () >= m_max_case_bit_tests)
1415 return false;
1416 dest_bbs.quick_push (sc->m_case_bb->index);
1420 return true;
1423 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1424 transformation. */
1426 bool
1427 bit_test_cluster::is_beneficial (unsigned count, unsigned uniq)
1429 return (((uniq == 1 && count >= 3)
1430 || (uniq == 2 && count >= 5)
1431 || (uniq == 3 && count >= 6)));
1434 /* Return true if cluster starting at START and ending at END (inclusive)
1435 is profitable transformation. */
1437 bool
1438 bit_test_cluster::is_beneficial (const vec<cluster *> &clusters,
1439 unsigned start, unsigned end)
1441 /* Single case bail out. */
1442 if (start == end)
1443 return false;
1445 auto_bitmap dest_bbs;
1447 for (unsigned i = start; i <= end; i++)
1449 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1450 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1453 unsigned uniq = bitmap_count_bits (dest_bbs);
1454 unsigned count = end - start + 1;
1455 return is_beneficial (count, uniq);
1458 /* Comparison function for qsort to order bit tests by decreasing
1459 probability of execution. */
1462 case_bit_test::cmp (const void *p1, const void *p2)
1464 const case_bit_test *const d1 = (const case_bit_test *) p1;
1465 const case_bit_test *const d2 = (const case_bit_test *) p2;
1467 if (d2->bits != d1->bits)
1468 return d2->bits - d1->bits;
1470 /* Stabilize the sort. */
1471 return (LABEL_DECL_UID (CASE_LABEL (d2->label))
1472 - LABEL_DECL_UID (CASE_LABEL (d1->label)));
1475 /* Expand a switch statement by a short sequence of bit-wise
1476 comparisons. "switch(x)" is effectively converted into
1477 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1478 integer constants.
1480 INDEX_EXPR is the value being switched on.
1482 MINVAL is the lowest case value of in the case nodes,
1483 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1484 are not guaranteed to be of the same type as INDEX_EXPR
1485 (the gimplifier doesn't change the type of case label values,
1486 and MINVAL and RANGE are derived from those values).
1487 MAXVAL is MINVAL + RANGE.
1489 There *MUST* be max_case_bit_tests or less unique case
1490 node targets. */
1492 void
1493 bit_test_cluster::emit (tree index_expr, tree index_type,
1494 tree, basic_block default_bb)
1496 case_bit_test test[m_max_case_bit_tests] = { {} };
1497 unsigned int i, j, k;
1498 unsigned int count;
1500 tree unsigned_index_type = range_check_type (index_type);
1502 gimple_stmt_iterator gsi;
1503 gassign *shift_stmt;
1505 tree idx, tmp, csui;
1506 tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
1507 tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
1508 tree word_mode_one = fold_convert (word_type_node, integer_one_node);
1509 int prec = TYPE_PRECISION (word_type_node);
1510 wide_int wone = wi::one (prec);
1512 tree minval = get_low ();
1513 tree maxval = get_high ();
1514 unsigned HOST_WIDE_INT bt_range = get_range (minval, maxval);
1516 /* Go through all case labels, and collect the case labels, profile
1517 counts, and other information we need to build the branch tests. */
1518 count = 0;
1519 for (i = 0; i < m_cases.length (); i++)
1521 unsigned int lo, hi;
1522 simple_cluster *n = static_cast<simple_cluster *> (m_cases[i]);
1523 for (k = 0; k < count; k++)
1524 if (n->m_case_bb == test[k].target_bb)
1525 break;
1527 if (k == count)
1529 gcc_checking_assert (count < m_max_case_bit_tests);
1530 test[k].mask = wi::zero (prec);
1531 test[k].target_bb = n->m_case_bb;
1532 test[k].label = n->m_case_label_expr;
1533 test[k].bits = 0;
1534 count++;
1537 test[k].bits += n->get_range (n->get_low (), n->get_high ());
1539 lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_low (), minval));
1540 if (n->get_high () == NULL_TREE)
1541 hi = lo;
1542 else
1543 hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_high (),
1544 minval));
1546 for (j = lo; j <= hi; j++)
1547 test[k].mask |= wi::lshift (wone, j);
1550 qsort (test, count, sizeof (*test), case_bit_test::cmp);
1552 /* If every possible relative value of the index expression is a valid shift
1553 amount, then we can merge the entry test in the bit test. */
1554 wide_int min, max;
1555 bool entry_test_needed;
1556 if (TREE_CODE (index_expr) == SSA_NAME
1557 && get_range_info (index_expr, &min, &max) == VR_RANGE
1558 && wi::leu_p (max - min, prec - 1))
1560 tree index_type = TREE_TYPE (index_expr);
1561 minval = fold_convert (index_type, minval);
1562 wide_int iminval = wi::to_wide (minval);
1563 if (wi::lt_p (min, iminval, TYPE_SIGN (index_type)))
1565 minval = wide_int_to_tree (index_type, min);
1566 for (i = 0; i < count; i++)
1567 test[i].mask = wi::lshift (test[i].mask, iminval - min);
1569 else if (wi::gt_p (min, iminval, TYPE_SIGN (index_type)))
1571 minval = wide_int_to_tree (index_type, min);
1572 for (i = 0; i < count; i++)
1573 test[i].mask = wi::lrshift (test[i].mask, min - iminval);
1575 maxval = wide_int_to_tree (index_type, max);
1576 entry_test_needed = false;
1578 else
1579 entry_test_needed = true;
1581 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1582 the minval subtractions, but it might make the mask constants more
1583 expensive. So, compare the costs. */
1584 if (compare_tree_int (minval, 0) > 0 && compare_tree_int (maxval, prec) < 0)
1586 int cost_diff;
1587 HOST_WIDE_INT m = tree_to_uhwi (minval);
1588 rtx reg = gen_raw_REG (word_mode, 10000);
1589 bool speed_p = optimize_insn_for_speed_p ();
1590 cost_diff = set_src_cost (gen_rtx_PLUS (word_mode, reg,
1591 GEN_INT (-m)),
1592 word_mode, speed_p);
1593 for (i = 0; i < count; i++)
1595 rtx r = immed_wide_int_const (test[i].mask, word_mode);
1596 cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
1597 word_mode, speed_p);
1598 r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
1599 cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
1600 word_mode, speed_p);
1602 if (cost_diff > 0)
1604 for (i = 0; i < count; i++)
1605 test[i].mask = wi::lshift (test[i].mask, m);
1606 minval = build_zero_cst (TREE_TYPE (minval));
1610 /* Now build the test-and-branch code. */
1612 gsi = gsi_last_bb (m_case_bb);
1614 /* idx = (unsigned)x - minval. */
1615 idx = fold_convert (unsigned_index_type, index_expr);
1616 idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
1617 fold_convert (unsigned_index_type, minval));
1618 idx = force_gimple_operand_gsi (&gsi, idx,
1619 /*simple=*/true, NULL_TREE,
1620 /*before=*/true, GSI_SAME_STMT);
1622 if (m_handles_entire_switch && entry_test_needed)
1624 tree range = int_const_binop (MINUS_EXPR, maxval, minval);
1625 /* if (idx > range) goto default */
1626 range
1627 = force_gimple_operand_gsi (&gsi,
1628 fold_convert (unsigned_index_type, range),
1629 /*simple=*/true, NULL_TREE,
1630 /*before=*/true, GSI_SAME_STMT);
1631 tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
1632 basic_block new_bb
1633 = hoist_edge_and_branch_if_true (&gsi, tmp, default_bb,
1634 profile_probability::unlikely ());
1635 gsi = gsi_last_bb (new_bb);
1638 tmp = fold_build2 (LSHIFT_EXPR, word_type_node, word_mode_one,
1639 fold_convert (word_type_node, idx));
1641 /* csui = (1 << (word_mode) idx) */
1642 if (count > 1)
1644 csui = make_ssa_name (word_type_node);
1645 tmp = force_gimple_operand_gsi (&gsi, tmp,
1646 /*simple=*/false, NULL_TREE,
1647 /*before=*/true, GSI_SAME_STMT);
1648 shift_stmt = gimple_build_assign (csui, tmp);
1649 gsi_insert_before (&gsi, shift_stmt, GSI_SAME_STMT);
1650 update_stmt (shift_stmt);
1652 else
1653 csui = tmp;
1655 profile_probability prob = profile_probability::always ();
1657 /* for each unique set of cases:
1658 if (const & csui) goto target */
1659 for (k = 0; k < count; k++)
1661 prob = profile_probability::always ().apply_scale (test[k].bits,
1662 bt_range);
1663 bt_range -= test[k].bits;
1664 tmp = wide_int_to_tree (word_type_node, test[k].mask);
1665 tmp = fold_build2 (BIT_AND_EXPR, word_type_node, csui, tmp);
1666 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, word_mode_zero);
1667 tmp = force_gimple_operand_gsi (&gsi, tmp,
1668 /*simple=*/true, NULL_TREE,
1669 /*before=*/true, GSI_SAME_STMT);
1670 basic_block new_bb
1671 = hoist_edge_and_branch_if_true (&gsi, tmp, test[k].target_bb, prob);
1672 gsi = gsi_last_bb (new_bb);
1675 /* We should have removed all edges now. */
1676 gcc_assert (EDGE_COUNT (gsi_bb (gsi)->succs) == 0);
1678 /* If nothing matched, go to the default label. */
1679 edge e = make_edge (gsi_bb (gsi), default_bb, EDGE_FALLTHRU);
1680 e->probability = profile_probability::always ();
1683 /* Split the basic block at the statement pointed to by GSIP, and insert
1684 a branch to the target basic block of E_TRUE conditional on tree
1685 expression COND.
1687 It is assumed that there is already an edge from the to-be-split
1688 basic block to E_TRUE->dest block. This edge is removed, and the
1689 profile information on the edge is re-used for the new conditional
1690 jump.
1692 The CFG is updated. The dominator tree will not be valid after
1693 this transformation, but the immediate dominators are updated if
1694 UPDATE_DOMINATORS is true.
1696 Returns the newly created basic block. */
1698 basic_block
1699 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator *gsip,
1700 tree cond, basic_block case_bb,
1701 profile_probability prob)
1703 tree tmp;
1704 gcond *cond_stmt;
1705 edge e_false;
1706 basic_block new_bb, split_bb = gsi_bb (*gsip);
1708 edge e_true = make_edge (split_bb, case_bb, EDGE_TRUE_VALUE);
1709 e_true->probability = prob;
1710 gcc_assert (e_true->src == split_bb);
1712 tmp = force_gimple_operand_gsi (gsip, cond, /*simple=*/true, NULL,
1713 /*before=*/true, GSI_SAME_STMT);
1714 cond_stmt = gimple_build_cond_from_tree (tmp, NULL_TREE, NULL_TREE);
1715 gsi_insert_before (gsip, cond_stmt, GSI_SAME_STMT);
1717 e_false = split_block (split_bb, cond_stmt);
1718 new_bb = e_false->dest;
1719 redirect_edge_pred (e_true, split_bb);
1721 e_false->flags &= ~EDGE_FALLTHRU;
1722 e_false->flags |= EDGE_FALSE_VALUE;
1723 e_false->probability = e_true->probability.invert ();
1724 new_bb->count = e_false->count ();
1726 return new_bb;
1729 /* Compute the number of case labels that correspond to each outgoing edge of
1730 switch statement. Record this information in the aux field of the edge. */
1732 void
1733 switch_decision_tree::compute_cases_per_edge ()
1735 reset_out_edges_aux (m_switch);
1736 int ncases = gimple_switch_num_labels (m_switch);
1737 for (int i = ncases - 1; i >= 1; --i)
1739 edge case_edge = gimple_switch_edge (cfun, m_switch, i);
1740 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + 1);
1744 /* Analyze switch statement and return true when the statement is expanded
1745 as decision tree. */
1747 bool
1748 switch_decision_tree::analyze_switch_statement ()
1750 unsigned l = gimple_switch_num_labels (m_switch);
1751 basic_block bb = gimple_bb (m_switch);
1752 auto_vec<cluster *> clusters;
1753 clusters.create (l - 1);
1755 basic_block default_bb = gimple_switch_default_bb (cfun, m_switch);
1756 m_case_bbs.reserve (l);
1757 m_case_bbs.quick_push (default_bb);
1759 compute_cases_per_edge ();
1761 for (unsigned i = 1; i < l; i++)
1763 tree elt = gimple_switch_label (m_switch, i);
1764 tree lab = CASE_LABEL (elt);
1765 basic_block case_bb = label_to_block (cfun, lab);
1766 edge case_edge = find_edge (bb, case_bb);
1767 tree low = CASE_LOW (elt);
1768 tree high = CASE_HIGH (elt);
1770 profile_probability p
1771 = case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
1772 clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
1773 p));
1774 m_case_bbs.quick_push (case_edge->dest);
1777 reset_out_edges_aux (m_switch);
1779 /* Find bit-test clusters. */
1780 vec<cluster *> output = bit_test_cluster::find_bit_tests (clusters);
1782 /* Find jump table clusters. */
1783 vec<cluster *> output2;
1784 auto_vec<cluster *> tmp;
1785 output2.create (1);
1786 tmp.create (1);
1788 for (unsigned i = 0; i < output.length (); i++)
1790 cluster *c = output[i];
1791 if (c->get_type () != SIMPLE_CASE)
1793 if (!tmp.is_empty ())
1795 vec<cluster *> n = jump_table_cluster::find_jump_tables (tmp);
1796 output2.safe_splice (n);
1797 n.release ();
1798 tmp.truncate (0);
1800 output2.safe_push (c);
1802 else
1803 tmp.safe_push (c);
1806 /* We still can have a temporary vector to test. */
1807 if (!tmp.is_empty ())
1809 vec<cluster *> n = jump_table_cluster::find_jump_tables (tmp);
1810 output2.safe_splice (n);
1811 n.release ();
1814 if (dump_file)
1816 fprintf (dump_file, ";; GIMPLE switch case clusters: ");
1817 for (unsigned i = 0; i < output2.length (); i++)
1818 output2[i]->dump (dump_file, dump_flags & TDF_DETAILS);
1819 fprintf (dump_file, "\n");
1822 output.release ();
1824 bool expanded = try_switch_expansion (output2);
1826 for (unsigned i = 0; i < output2.length (); i++)
1827 delete output2[i];
1829 output2.release ();
1831 return expanded;
1834 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1835 expanded. */
1837 bool
1838 switch_decision_tree::try_switch_expansion (vec<cluster *> &clusters)
1840 tree index_expr = gimple_switch_index (m_switch);
1841 tree index_type = TREE_TYPE (index_expr);
1842 basic_block bb = gimple_bb (m_switch);
1844 if (gimple_switch_num_labels (m_switch) == 1
1845 || range_check_type (index_type) == NULL_TREE)
1846 return false;
1848 /* Find the default case target label. */
1849 edge default_edge = gimple_switch_default_edge (cfun, m_switch);
1850 m_default_bb = default_edge->dest;
1852 /* Do the insertion of a case label into m_case_list. The labels are
1853 fed to us in descending order from the sorted vector of case labels used
1854 in the tree part of the middle end. So the list we construct is
1855 sorted in ascending order. */
1857 for (int i = clusters.length () - 1; i >= 0; i--)
1859 case_tree_node *r = m_case_list;
1860 m_case_list = m_case_node_pool.allocate ();
1861 m_case_list->m_right = r;
1862 m_case_list->m_c = clusters[i];
1865 record_phi_operand_mapping ();
1867 /* Split basic block that contains the gswitch statement. */
1868 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1869 edge e;
1870 if (gsi_end_p (gsi))
1871 e = split_block_after_labels (bb);
1872 else
1874 gsi_prev (&gsi);
1875 e = split_block (bb, gsi_stmt (gsi));
1877 bb = split_edge (e);
1879 /* Create new basic blocks for non-case clusters where specific expansion
1880 needs to happen. */
1881 for (unsigned i = 0; i < clusters.length (); i++)
1882 if (clusters[i]->get_type () != SIMPLE_CASE)
1884 clusters[i]->m_case_bb = create_empty_bb (bb);
1885 clusters[i]->m_case_bb->count = bb->count;
1886 clusters[i]->m_case_bb->loop_father = bb->loop_father;
1889 /* Do not do an extra work for a single cluster. */
1890 if (clusters.length () == 1
1891 && clusters[0]->get_type () != SIMPLE_CASE)
1893 cluster *c = clusters[0];
1894 c->emit (index_expr, index_type,
1895 gimple_switch_default_label (m_switch), m_default_bb);
1896 redirect_edge_succ (single_succ_edge (bb), c->m_case_bb);
1898 else
1900 emit (bb, index_expr, default_edge->probability, index_type);
1902 /* Emit cluster-specific switch handling. */
1903 for (unsigned i = 0; i < clusters.length (); i++)
1904 if (clusters[i]->get_type () != SIMPLE_CASE)
1905 clusters[i]->emit (index_expr, index_type,
1906 gimple_switch_default_label (m_switch),
1907 m_default_bb);
1910 fix_phi_operands_for_edges ();
1912 return true;
1915 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1916 and used in a label basic block. */
1918 void
1919 switch_decision_tree::record_phi_operand_mapping ()
1921 basic_block switch_bb = gimple_bb (m_switch);
1922 /* Record all PHI nodes that have to be fixed after conversion. */
1923 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1925 gphi_iterator gsi;
1926 basic_block bb = m_case_bbs[i];
1927 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1929 gphi *phi = gsi.phi ();
1931 for (unsigned i = 0; i < gimple_phi_num_args (phi); i++)
1933 basic_block phi_src_bb = gimple_phi_arg_edge (phi, i)->src;
1934 if (phi_src_bb == switch_bb)
1936 tree def = gimple_phi_arg_def (phi, i);
1937 tree result = gimple_phi_result (phi);
1938 m_phi_mapping.put (result, def);
1939 break;
1946 /* Append new operands to PHI statements that were introduced due to
1947 addition of new edges to case labels. */
1949 void
1950 switch_decision_tree::fix_phi_operands_for_edges ()
1952 gphi_iterator gsi;
1954 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1956 basic_block bb = m_case_bbs[i];
1957 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1959 gphi *phi = gsi.phi ();
1960 for (unsigned j = 0; j < gimple_phi_num_args (phi); j++)
1962 tree def = gimple_phi_arg_def (phi, j);
1963 if (def == NULL_TREE)
1965 edge e = gimple_phi_arg_edge (phi, j);
1966 tree *definition
1967 = m_phi_mapping.get (gimple_phi_result (phi));
1968 gcc_assert (definition);
1969 add_phi_arg (phi, *definition, e, UNKNOWN_LOCATION);
1976 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1977 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1979 We generate a binary decision tree to select the appropriate target
1980 code. */
1982 void
1983 switch_decision_tree::emit (basic_block bb, tree index_expr,
1984 profile_probability default_prob, tree index_type)
1986 balance_case_nodes (&m_case_list, NULL);
1988 if (dump_file)
1989 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1990 if (dump_file && (dump_flags & TDF_DETAILS))
1992 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1993 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1994 gcc_assert (m_case_list != NULL);
1995 dump_case_nodes (dump_file, m_case_list, indent_step, 0);
1998 bb = emit_case_nodes (bb, index_expr, m_case_list, default_prob, index_type,
1999 gimple_location (m_switch));
2001 if (bb)
2002 emit_jump (bb, m_default_bb);
2004 /* Remove all edges and do just an edge that will reach default_bb. */
2005 bb = gimple_bb (m_switch);
2006 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2007 gsi_remove (&gsi, true);
2009 delete_basic_block (bb);
2012 /* Take an ordered list of case nodes
2013 and transform them into a near optimal binary tree,
2014 on the assumption that any target code selection value is as
2015 likely as any other.
2017 The transformation is performed by splitting the ordered
2018 list into two equal sections plus a pivot. The parts are
2019 then attached to the pivot as left and right branches. Each
2020 branch is then transformed recursively. */
2022 void
2023 switch_decision_tree::balance_case_nodes (case_tree_node **head,
2024 case_tree_node *parent)
2026 case_tree_node *np;
2028 np = *head;
2029 if (np)
2031 int i = 0;
2032 int ranges = 0;
2033 case_tree_node **npp;
2034 case_tree_node *left;
2035 profile_probability prob = profile_probability::never ();
2037 /* Count the number of entries on branch. Also count the ranges. */
2039 while (np)
2041 if (!tree_int_cst_equal (np->m_c->get_low (), np->m_c->get_high ()))
2042 ranges++;
2044 i++;
2045 prob += np->m_c->m_prob;
2046 np = np->m_right;
2049 if (i > 2)
2051 /* Split this list if it is long enough for that to help. */
2052 npp = head;
2053 left = *npp;
2054 profile_probability pivot_prob = prob.apply_scale (1, 2);
2056 /* Find the place in the list that bisects the list's total cost,
2057 where ranges count as 2. */
2058 while (1)
2060 /* Skip nodes while their probability does not reach
2061 that amount. */
2062 prob -= (*npp)->m_c->m_prob;
2063 if ((prob.initialized_p () && prob < pivot_prob)
2064 || ! (*npp)->m_right)
2065 break;
2066 npp = &(*npp)->m_right;
2069 np = *npp;
2070 *npp = 0;
2071 *head = np;
2072 np->m_parent = parent;
2073 np->m_left = left == np ? NULL : left;
2075 /* Optimize each of the two split parts. */
2076 balance_case_nodes (&np->m_left, np);
2077 balance_case_nodes (&np->m_right, np);
2078 np->m_c->m_subtree_prob = np->m_c->m_prob;
2079 if (np->m_left)
2080 np->m_c->m_subtree_prob += np->m_left->m_c->m_subtree_prob;
2081 if (np->m_right)
2082 np->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2084 else
2086 /* Else leave this branch as one level,
2087 but fill in `parent' fields. */
2088 np = *head;
2089 np->m_parent = parent;
2090 np->m_c->m_subtree_prob = np->m_c->m_prob;
2091 for (; np->m_right; np = np->m_right)
2093 np->m_right->m_parent = np;
2094 (*head)->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2100 /* Dump ROOT, a list or tree of case nodes, to file. */
2102 void
2103 switch_decision_tree::dump_case_nodes (FILE *f, case_tree_node *root,
2104 int indent_step, int indent_level)
2106 if (root == 0)
2107 return;
2108 indent_level++;
2110 dump_case_nodes (f, root->m_left, indent_step, indent_level);
2112 fputs (";; ", f);
2113 fprintf (f, "%*s", indent_step * indent_level, "");
2114 root->m_c->dump (f);
2115 root->m_c->m_prob.dump (f);
2116 fputs (" subtree: ", f);
2117 root->m_c->m_subtree_prob.dump (f);
2118 fputs (")\n", f);
2120 dump_case_nodes (f, root->m_right, indent_step, indent_level);
2124 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2126 void
2127 switch_decision_tree::emit_jump (basic_block bb, basic_block case_bb)
2129 edge e = single_succ_edge (bb);
2130 redirect_edge_succ (e, case_bb);
2133 /* Generate code to compare OP0 with OP1 so that the condition codes are
2134 set and to jump to LABEL_BB if the condition is true.
2135 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2136 PROB is the probability of jumping to LABEL_BB. */
2138 basic_block
2139 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb, tree op0,
2140 tree op1, tree_code comparison,
2141 basic_block label_bb,
2142 profile_probability prob,
2143 location_t loc)
2145 // TODO: it's once called with lhs != index.
2146 op1 = fold_convert (TREE_TYPE (op0), op1);
2148 gcond *cond = gimple_build_cond (comparison, op0, op1, NULL_TREE, NULL_TREE);
2149 gimple_set_location (cond, loc);
2150 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2151 gsi_insert_after (&gsi, cond, GSI_NEW_STMT);
2153 gcc_assert (single_succ_p (bb));
2155 /* Make a new basic block where false branch will take place. */
2156 edge false_edge = split_block (bb, cond);
2157 false_edge->flags = EDGE_FALSE_VALUE;
2158 false_edge->probability = prob.invert ();
2160 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2161 true_edge->probability = prob;
2163 return false_edge->dest;
2166 /* Generate code to jump to LABEL if OP0 and OP1 are equal.
2167 PROB is the probability of jumping to LABEL_BB.
2168 BB is a basic block where the new condition will be placed. */
2170 basic_block
2171 switch_decision_tree::do_jump_if_equal (basic_block bb, tree op0, tree op1,
2172 basic_block label_bb,
2173 profile_probability prob,
2174 location_t loc)
2176 op1 = fold_convert (TREE_TYPE (op0), op1);
2178 gcond *cond = gimple_build_cond (EQ_EXPR, op0, op1, NULL_TREE, NULL_TREE);
2179 gimple_set_location (cond, loc);
2180 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2181 gsi_insert_before (&gsi, cond, GSI_SAME_STMT);
2183 gcc_assert (single_succ_p (bb));
2185 /* Make a new basic block where false branch will take place. */
2186 edge false_edge = split_block (bb, cond);
2187 false_edge->flags = EDGE_FALSE_VALUE;
2188 false_edge->probability = prob.invert ();
2190 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2191 true_edge->probability = prob;
2193 return false_edge->dest;
2196 /* Emit step-by-step code to select a case for the value of INDEX.
2197 The thus generated decision tree follows the form of the
2198 case-node binary tree NODE, whose nodes represent test conditions.
2199 DEFAULT_PROB is probability of cases leading to default BB.
2200 INDEX_TYPE is the type of the index of the switch. */
2202 basic_block
2203 switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
2204 case_tree_node *node,
2205 profile_probability default_prob,
2206 tree index_type, location_t loc)
2208 profile_probability p;
2210 /* If node is null, we are done. */
2211 if (node == NULL)
2212 return bb;
2214 /* Single value case. */
2215 if (node->m_c->is_single_value_p ())
2217 /* Node is single valued. First see if the index expression matches
2218 this node and then check our children, if any. */
2219 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2220 bb = do_jump_if_equal (bb, index, node->m_c->get_low (),
2221 node->m_c->m_case_bb, p, loc);
2222 /* Since this case is taken at this point, reduce its weight from
2223 subtree_weight. */
2224 node->m_c->m_subtree_prob -= p;
2226 if (node->m_left != NULL && node->m_right != NULL)
2228 /* 1) the node has both children
2230 If both children are single-valued cases with no
2231 children, finish up all the work. This way, we can save
2232 one ordered comparison. */
2234 if (!node->m_left->has_child ()
2235 && node->m_left->m_c->is_single_value_p ()
2236 && !node->m_right->has_child ()
2237 && node->m_right->m_c->is_single_value_p ())
2239 p = (node->m_right->m_c->m_prob
2240 / (node->m_c->m_subtree_prob + default_prob));
2241 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2242 node->m_right->m_c->m_case_bb, p, loc);
2244 p = (node->m_left->m_c->m_prob
2245 / (node->m_c->m_subtree_prob + default_prob));
2246 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2247 node->m_left->m_c->m_case_bb, p, loc);
2249 else
2251 /* Branch to a label where we will handle it later. */
2252 basic_block test_bb = split_edge (single_succ_edge (bb));
2253 redirect_edge_succ (single_pred_edge (test_bb),
2254 single_succ_edge (bb)->dest);
2256 p = ((node->m_right->m_c->m_subtree_prob
2257 + default_prob.apply_scale (1, 2))
2258 / (node->m_c->m_subtree_prob + default_prob));
2259 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2260 GT_EXPR, test_bb, p, loc);
2261 default_prob = default_prob.apply_scale (1, 2);
2263 /* Handle the left-hand subtree. */
2264 bb = emit_case_nodes (bb, index, node->m_left,
2265 default_prob, index_type, loc);
2267 /* If the left-hand subtree fell through,
2268 don't let it fall into the right-hand subtree. */
2269 if (bb && m_default_bb)
2270 emit_jump (bb, m_default_bb);
2272 bb = emit_case_nodes (test_bb, index, node->m_right,
2273 default_prob, index_type, loc);
2276 else if (node->m_left == NULL && node->m_right != NULL)
2278 /* 2) the node has only right child. */
2280 /* Here we have a right child but no left so we issue a conditional
2281 branch to default and process the right child.
2283 Omit the conditional branch to default if the right child
2284 does not have any children and is single valued; it would
2285 cost too much space to save so little time. */
2287 if (node->m_right->has_child ()
2288 || !node->m_right->m_c->is_single_value_p ())
2290 p = (default_prob.apply_scale (1, 2)
2291 / (node->m_c->m_subtree_prob + default_prob));
2292 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2293 LT_EXPR, m_default_bb, p, loc);
2294 default_prob = default_prob.apply_scale (1, 2);
2296 bb = emit_case_nodes (bb, index, node->m_right, default_prob,
2297 index_type, loc);
2299 else
2301 /* We cannot process node->right normally
2302 since we haven't ruled out the numbers less than
2303 this node's value. So handle node->right explicitly. */
2304 p = (node->m_right->m_c->m_subtree_prob
2305 / (node->m_c->m_subtree_prob + default_prob));
2306 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2307 node->m_right->m_c->m_case_bb, p, loc);
2310 else if (node->m_left != NULL && node->m_right == NULL)
2312 /* 3) just one subtree, on the left. Similar case as previous. */
2314 if (node->m_left->has_child ()
2315 || !node->m_left->m_c->is_single_value_p ())
2317 p = (default_prob.apply_scale (1, 2)
2318 / (node->m_c->m_subtree_prob + default_prob));
2319 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2320 GT_EXPR, m_default_bb, p, loc);
2321 default_prob = default_prob.apply_scale (1, 2);
2323 bb = emit_case_nodes (bb, index, node->m_left, default_prob,
2324 index_type, loc);
2326 else
2328 /* We cannot process node->left normally
2329 since we haven't ruled out the numbers less than
2330 this node's value. So handle node->left explicitly. */
2331 p = (node->m_left->m_c->m_subtree_prob
2332 / (node->m_c->m_subtree_prob + default_prob));
2333 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2334 node->m_left->m_c->m_case_bb, p, loc);
2338 else
2340 /* Node is a range. These cases are very similar to those for a single
2341 value, except that we do not start by testing whether this node
2342 is the one to branch to. */
2343 if (node->has_child () || node->m_c->get_type () != SIMPLE_CASE)
2345 /* Branch to a label where we will handle it later. */
2346 basic_block test_bb = split_edge (single_succ_edge (bb));
2347 redirect_edge_succ (single_pred_edge (test_bb),
2348 single_succ_edge (bb)->dest);
2351 profile_probability right_prob = profile_probability::never ();
2352 if (node->m_right)
2353 right_prob = node->m_right->m_c->m_subtree_prob;
2354 p = ((right_prob + default_prob.apply_scale (1, 2))
2355 / (node->m_c->m_subtree_prob + default_prob));
2357 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2358 GT_EXPR, test_bb, p, loc);
2359 default_prob = default_prob.apply_scale (1, 2);
2361 /* Value belongs to this node or to the left-hand subtree. */
2362 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2363 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2364 GE_EXPR, node->m_c->m_case_bb, p, loc);
2366 /* Handle the left-hand subtree. */
2367 bb = emit_case_nodes (bb, index, node->m_left,
2368 default_prob, index_type, loc);
2370 /* If the left-hand subtree fell through,
2371 don't let it fall into the right-hand subtree. */
2372 if (bb && m_default_bb)
2373 emit_jump (bb, m_default_bb);
2375 bb = emit_case_nodes (test_bb, index, node->m_right,
2376 default_prob, index_type, loc);
2378 else
2380 /* Node has no children so we check low and high bounds to remove
2381 redundant tests. Only one of the bounds can exist,
2382 since otherwise this node is bounded--a case tested already. */
2383 tree lhs, rhs;
2384 generate_range_test (bb, index, node->m_c->get_low (),
2385 node->m_c->get_high (), &lhs, &rhs);
2386 p = default_prob / (node->m_c->m_subtree_prob + default_prob);
2388 bb = emit_cmp_and_jump_insns (bb, lhs, rhs, GT_EXPR,
2389 m_default_bb, p, loc);
2391 emit_jump (bb, node->m_c->m_case_bb);
2392 return NULL;
2396 return bb;
2399 /* The main function of the pass scans statements for switches and invokes
2400 process_switch on them. */
2402 namespace {
2404 const pass_data pass_data_convert_switch =
2406 GIMPLE_PASS, /* type */
2407 "switchconv", /* name */
2408 OPTGROUP_NONE, /* optinfo_flags */
2409 TV_TREE_SWITCH_CONVERSION, /* tv_id */
2410 ( PROP_cfg | PROP_ssa ), /* properties_required */
2411 0, /* properties_provided */
2412 0, /* properties_destroyed */
2413 0, /* todo_flags_start */
2414 TODO_update_ssa, /* todo_flags_finish */
2417 class pass_convert_switch : public gimple_opt_pass
2419 public:
2420 pass_convert_switch (gcc::context *ctxt)
2421 : gimple_opt_pass (pass_data_convert_switch, ctxt)
2424 /* opt_pass methods: */
2425 virtual bool gate (function *) { return flag_tree_switch_conversion != 0; }
2426 virtual unsigned int execute (function *);
2428 }; // class pass_convert_switch
2430 unsigned int
2431 pass_convert_switch::execute (function *fun)
2433 basic_block bb;
2434 bool cfg_altered = false;
2436 FOR_EACH_BB_FN (bb, fun)
2438 gimple *stmt = last_stmt (bb);
2439 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
2441 if (dump_file)
2443 expanded_location loc = expand_location (gimple_location (stmt));
2445 fprintf (dump_file, "beginning to process the following "
2446 "SWITCH statement (%s:%d) : ------- \n",
2447 loc.file, loc.line);
2448 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2449 putc ('\n', dump_file);
2452 switch_conversion sconv;
2453 sconv.expand (as_a <gswitch *> (stmt));
2454 cfg_altered |= sconv.m_cfg_altered;
2455 if (!sconv.m_reason)
2457 if (dump_file)
2459 fputs ("Switch converted\n", dump_file);
2460 fputs ("--------------------------------\n", dump_file);
2463 /* Make no effort to update the post-dominator tree.
2464 It is actually not that hard for the transformations
2465 we have performed, but it is not supported
2466 by iterate_fix_dominators. */
2467 free_dominance_info (CDI_POST_DOMINATORS);
2469 else
2471 if (dump_file)
2473 fputs ("Bailing out - ", dump_file);
2474 fputs (sconv.m_reason, dump_file);
2475 fputs ("\n--------------------------------\n", dump_file);
2481 return cfg_altered ? TODO_cleanup_cfg : 0;;
2484 } // anon namespace
2486 gimple_opt_pass *
2487 make_pass_convert_switch (gcc::context *ctxt)
2489 return new pass_convert_switch (ctxt);
2492 /* The main function of the pass scans statements for switches and invokes
2493 process_switch on them. */
2495 namespace {
2497 template <bool O0> class pass_lower_switch: public gimple_opt_pass
2499 public:
2500 pass_lower_switch (gcc::context *ctxt) : gimple_opt_pass (data, ctxt) {}
2502 static const pass_data data;
2503 opt_pass *
2504 clone ()
2506 return new pass_lower_switch<O0> (m_ctxt);
2509 virtual bool
2510 gate (function *)
2512 return !O0 || !optimize;
2515 virtual unsigned int execute (function *fun);
2516 }; // class pass_lower_switch
2518 template <bool O0>
2519 const pass_data pass_lower_switch<O0>::data = {
2520 GIMPLE_PASS, /* type */
2521 O0 ? "switchlower_O0" : "switchlower", /* name */
2522 OPTGROUP_NONE, /* optinfo_flags */
2523 TV_TREE_SWITCH_LOWERING, /* tv_id */
2524 ( PROP_cfg | PROP_ssa ), /* properties_required */
2525 0, /* properties_provided */
2526 0, /* properties_destroyed */
2527 0, /* todo_flags_start */
2528 TODO_update_ssa | TODO_cleanup_cfg, /* todo_flags_finish */
2531 template <bool O0>
2532 unsigned int
2533 pass_lower_switch<O0>::execute (function *fun)
2535 basic_block bb;
2536 bool expanded = false;
2538 auto_vec<gimple *> switch_statements;
2539 switch_statements.create (1);
2541 FOR_EACH_BB_FN (bb, fun)
2543 gimple *stmt = last_stmt (bb);
2544 gswitch *swtch;
2545 if (stmt && (swtch = dyn_cast<gswitch *> (stmt)))
2547 if (!O0)
2548 group_case_labels_stmt (swtch);
2549 switch_statements.safe_push (swtch);
2553 for (unsigned i = 0; i < switch_statements.length (); i++)
2555 gimple *stmt = switch_statements[i];
2556 if (dump_file)
2558 expanded_location loc = expand_location (gimple_location (stmt));
2560 fprintf (dump_file, "beginning to process the following "
2561 "SWITCH statement (%s:%d) : ------- \n",
2562 loc.file, loc.line);
2563 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2564 putc ('\n', dump_file);
2567 gswitch *swtch = dyn_cast<gswitch *> (stmt);
2568 if (swtch)
2570 switch_decision_tree dt (swtch);
2571 expanded |= dt.analyze_switch_statement ();
2575 if (expanded)
2577 free_dominance_info (CDI_DOMINATORS);
2578 free_dominance_info (CDI_POST_DOMINATORS);
2579 mark_virtual_operands_for_renaming (cfun);
2582 return 0;
2585 } // anon namespace
2587 gimple_opt_pass *
2588 make_pass_lower_switch_O0 (gcc::context *ctxt)
2590 return new pass_lower_switch<true> (ctxt);
2592 gimple_opt_pass *
2593 make_pass_lower_switch (gcc::context *ctxt)
2595 return new pass_lower_switch<false> (ctxt);