[Ada] Unnesting: handle conditional expressions
[official-gcc.git] / gcc / tree-switch-conversion.c
blob5f8ed46f496cccd9b53ee7a5295c09912190ba0c
1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
2 a jump table.
3 Copyright (C) 2006-2019 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "insn-codes.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "cfghooks.h"
34 #include "tree-pass.h"
35 #include "ssa.h"
36 #include "optabs-tree.h"
37 #include "cgraph.h"
38 #include "gimple-pretty-print.h"
39 #include "params.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "stor-layout.h"
43 #include "cfganal.h"
44 #include "gimplify.h"
45 #include "gimple-iterator.h"
46 #include "gimplify-me.h"
47 #include "gimple-fold.h"
48 #include "tree-cfg.h"
49 #include "cfgloop.h"
50 #include "alloc-pool.h"
51 #include "target.h"
52 #include "tree-into-ssa.h"
53 #include "omp-general.h"
55 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
56 type in the GIMPLE type system that is language-independent? */
57 #include "langhooks.h"
59 #include "tree-switch-conversion.h"
61 using namespace tree_switch_conversion;
63 /* Constructor. */
65 switch_conversion::switch_conversion (): m_final_bb (NULL), m_other_count (),
66 m_constructors (NULL), m_default_values (NULL),
67 m_arr_ref_first (NULL), m_arr_ref_last (NULL),
68 m_reason (NULL), m_default_case_nonstandard (false), m_cfg_altered (false)
72 /* Collection information about SWTCH statement. */
74 void
75 switch_conversion::collect (gswitch *swtch)
77 unsigned int branch_num = gimple_switch_num_labels (swtch);
78 tree min_case, max_case;
79 unsigned int i;
80 edge e, e_default, e_first;
81 edge_iterator ei;
83 m_switch = swtch;
85 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
86 is a default label which is the first in the vector.
87 Collect the bits we can deduce from the CFG. */
88 m_index_expr = gimple_switch_index (swtch);
89 m_switch_bb = gimple_bb (swtch);
90 e_default = gimple_switch_default_edge (cfun, swtch);
91 m_default_bb = e_default->dest;
92 m_default_prob = e_default->probability;
93 m_default_count = e_default->count ();
94 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
95 if (e != e_default)
96 m_other_count += e->count ();
98 /* Get upper and lower bounds of case values, and the covered range. */
99 min_case = gimple_switch_label (swtch, 1);
100 max_case = gimple_switch_label (swtch, branch_num - 1);
102 m_range_min = CASE_LOW (min_case);
103 if (CASE_HIGH (max_case) != NULL_TREE)
104 m_range_max = CASE_HIGH (max_case);
105 else
106 m_range_max = CASE_LOW (max_case);
108 m_contiguous_range = true;
109 tree last = CASE_HIGH (min_case) ? CASE_HIGH (min_case) : m_range_min;
110 for (i = 2; i < branch_num; i++)
112 tree elt = gimple_switch_label (swtch, i);
113 if (wi::to_wide (last) + 1 != wi::to_wide (CASE_LOW (elt)))
115 m_contiguous_range = false;
116 break;
118 last = CASE_HIGH (elt) ? CASE_HIGH (elt) : CASE_LOW (elt);
121 if (m_contiguous_range)
122 e_first = gimple_switch_edge (cfun, swtch, 1);
123 else
124 e_first = e_default;
126 /* See if there is one common successor block for all branch
127 targets. If it exists, record it in FINAL_BB.
128 Start with the destination of the first non-default case
129 if the range is contiguous and default case otherwise as
130 guess or its destination in case it is a forwarder block. */
131 if (! single_pred_p (e_first->dest))
132 m_final_bb = e_first->dest;
133 else if (single_succ_p (e_first->dest)
134 && ! single_pred_p (single_succ (e_first->dest)))
135 m_final_bb = single_succ (e_first->dest);
136 /* Require that all switch destinations are either that common
137 FINAL_BB or a forwarder to it, except for the default
138 case if contiguous range. */
139 if (m_final_bb)
140 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
142 if (e->dest == m_final_bb)
143 continue;
145 if (single_pred_p (e->dest)
146 && single_succ_p (e->dest)
147 && single_succ (e->dest) == m_final_bb)
148 continue;
150 if (e == e_default && m_contiguous_range)
152 m_default_case_nonstandard = true;
153 continue;
156 m_final_bb = NULL;
157 break;
160 m_range_size
161 = int_const_binop (MINUS_EXPR, m_range_max, m_range_min);
163 /* Get a count of the number of case labels. Single-valued case labels
164 simply count as one, but a case range counts double, since it may
165 require two compares if it gets lowered as a branching tree. */
166 m_count = 0;
167 for (i = 1; i < branch_num; i++)
169 tree elt = gimple_switch_label (swtch, i);
170 m_count++;
171 if (CASE_HIGH (elt)
172 && ! tree_int_cst_equal (CASE_LOW (elt), CASE_HIGH (elt)))
173 m_count++;
176 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
177 block. Assume a CFG cleanup would have already removed degenerate
178 switch statements, this allows us to just use EDGE_COUNT. */
179 m_uniq = EDGE_COUNT (gimple_bb (swtch)->succs) - 1;
182 /* Checks whether the range given by individual case statements of the switch
183 switch statement isn't too big and whether the number of branches actually
184 satisfies the size of the new array. */
186 bool
187 switch_conversion::check_range ()
189 gcc_assert (m_range_size);
190 if (!tree_fits_uhwi_p (m_range_size))
192 m_reason = "index range way too large or otherwise unusable";
193 return false;
196 if (tree_to_uhwi (m_range_size)
197 > ((unsigned) m_count * SWITCH_CONVERSION_BRANCH_RATIO))
199 m_reason = "the maximum range-branch ratio exceeded";
200 return false;
203 return true;
206 /* Checks whether all but the final BB basic blocks are empty. */
208 bool
209 switch_conversion::check_all_empty_except_final ()
211 edge e, e_default = find_edge (m_switch_bb, m_default_bb);
212 edge_iterator ei;
214 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
216 if (e->dest == m_final_bb)
217 continue;
219 if (!empty_block_p (e->dest))
221 if (m_contiguous_range && e == e_default)
223 m_default_case_nonstandard = true;
224 continue;
227 m_reason = "bad case - a non-final BB not empty";
228 return false;
232 return true;
235 /* This function checks whether all required values in phi nodes in final_bb
236 are constants. Required values are those that correspond to a basic block
237 which is a part of the examined switch statement. It returns true if the
238 phi nodes are OK, otherwise false. */
240 bool
241 switch_conversion::check_final_bb ()
243 gphi_iterator gsi;
245 m_phi_count = 0;
246 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
248 gphi *phi = gsi.phi ();
249 unsigned int i;
251 if (virtual_operand_p (gimple_phi_result (phi)))
252 continue;
254 m_phi_count++;
256 for (i = 0; i < gimple_phi_num_args (phi); i++)
258 basic_block bb = gimple_phi_arg_edge (phi, i)->src;
260 if (bb == m_switch_bb
261 || (single_pred_p (bb)
262 && single_pred (bb) == m_switch_bb
263 && (!m_default_case_nonstandard
264 || empty_block_p (bb))))
266 tree reloc, val;
267 const char *reason = NULL;
269 val = gimple_phi_arg_def (phi, i);
270 if (!is_gimple_ip_invariant (val))
271 reason = "non-invariant value from a case";
272 else
274 reloc = initializer_constant_valid_p (val, TREE_TYPE (val));
275 if ((flag_pic && reloc != null_pointer_node)
276 || (!flag_pic && reloc == NULL_TREE))
278 if (reloc)
279 reason
280 = "value from a case would need runtime relocations";
281 else
282 reason
283 = "value from a case is not a valid initializer";
286 if (reason)
288 /* For contiguous range, we can allow non-constant
289 or one that needs relocation, as long as it is
290 only reachable from the default case. */
291 if (bb == m_switch_bb)
292 bb = m_final_bb;
293 if (!m_contiguous_range || bb != m_default_bb)
295 m_reason = reason;
296 return false;
299 unsigned int branch_num = gimple_switch_num_labels (m_switch);
300 for (unsigned int i = 1; i < branch_num; i++)
302 if (gimple_switch_label_bb (cfun, m_switch, i) == bb)
304 m_reason = reason;
305 return false;
308 m_default_case_nonstandard = true;
314 return true;
317 /* The following function allocates default_values, target_{in,out}_names and
318 constructors arrays. The last one is also populated with pointers to
319 vectors that will become constructors of new arrays. */
321 void
322 switch_conversion::create_temp_arrays ()
324 int i;
326 m_default_values = XCNEWVEC (tree, m_phi_count * 3);
327 /* ??? Macros do not support multi argument templates in their
328 argument list. We create a typedef to work around that problem. */
329 typedef vec<constructor_elt, va_gc> *vec_constructor_elt_gc;
330 m_constructors = XCNEWVEC (vec_constructor_elt_gc, m_phi_count);
331 m_target_inbound_names = m_default_values + m_phi_count;
332 m_target_outbound_names = m_target_inbound_names + m_phi_count;
333 for (i = 0; i < m_phi_count; i++)
334 vec_alloc (m_constructors[i], tree_to_uhwi (m_range_size) + 1);
337 /* Populate the array of default values in the order of phi nodes.
338 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
339 if the range is non-contiguous or the default case has standard
340 structure, otherwise it is the first non-default case instead. */
342 void
343 switch_conversion::gather_default_values (tree default_case)
345 gphi_iterator gsi;
346 basic_block bb = label_to_block (cfun, CASE_LABEL (default_case));
347 edge e;
348 int i = 0;
350 gcc_assert (CASE_LOW (default_case) == NULL_TREE
351 || m_default_case_nonstandard);
353 if (bb == m_final_bb)
354 e = find_edge (m_switch_bb, bb);
355 else
356 e = single_succ_edge (bb);
358 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
360 gphi *phi = gsi.phi ();
361 if (virtual_operand_p (gimple_phi_result (phi)))
362 continue;
363 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
364 gcc_assert (val);
365 m_default_values[i++] = val;
369 /* The following function populates the vectors in the constructors array with
370 future contents of the static arrays. The vectors are populated in the
371 order of phi nodes. */
373 void
374 switch_conversion::build_constructors ()
376 unsigned i, branch_num = gimple_switch_num_labels (m_switch);
377 tree pos = m_range_min;
378 tree pos_one = build_int_cst (TREE_TYPE (pos), 1);
380 for (i = 1; i < branch_num; i++)
382 tree cs = gimple_switch_label (m_switch, i);
383 basic_block bb = label_to_block (cfun, CASE_LABEL (cs));
384 edge e;
385 tree high;
386 gphi_iterator gsi;
387 int j;
389 if (bb == m_final_bb)
390 e = find_edge (m_switch_bb, bb);
391 else
392 e = single_succ_edge (bb);
393 gcc_assert (e);
395 while (tree_int_cst_lt (pos, CASE_LOW (cs)))
397 int k;
398 for (k = 0; k < m_phi_count; k++)
400 constructor_elt elt;
402 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
403 elt.value
404 = unshare_expr_without_location (m_default_values[k]);
405 m_constructors[k]->quick_push (elt);
408 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
410 gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));
412 j = 0;
413 if (CASE_HIGH (cs))
414 high = CASE_HIGH (cs);
415 else
416 high = CASE_LOW (cs);
417 for (gsi = gsi_start_phis (m_final_bb);
418 !gsi_end_p (gsi); gsi_next (&gsi))
420 gphi *phi = gsi.phi ();
421 if (virtual_operand_p (gimple_phi_result (phi)))
422 continue;
423 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
424 tree low = CASE_LOW (cs);
425 pos = CASE_LOW (cs);
429 constructor_elt elt;
431 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
432 elt.value = unshare_expr_without_location (val);
433 m_constructors[j]->quick_push (elt);
435 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
436 } while (!tree_int_cst_lt (high, pos)
437 && tree_int_cst_lt (low, pos));
438 j++;
443 /* If all values in the constructor vector are products of a linear function
444 a * x + b, then return true. When true, COEFF_A and COEFF_B and
445 coefficients of the linear function. Note that equal values are special
446 case of a linear function with a and b equal to zero. */
448 bool
449 switch_conversion::contains_linear_function_p (vec<constructor_elt, va_gc> *vec,
450 wide_int *coeff_a,
451 wide_int *coeff_b)
453 unsigned int i;
454 constructor_elt *elt;
456 gcc_assert (vec->length () >= 2);
458 /* Let's try to find any linear function a * x + y that can apply to
459 given values. 'a' can be calculated as follows:
461 a = (y2 - y1) / (x2 - x1) where x2 - x1 = 1 (consecutive case indices)
462 a = y2 - y1
466 b = y2 - a * x2
470 tree elt0 = (*vec)[0].value;
471 tree elt1 = (*vec)[1].value;
473 if (TREE_CODE (elt0) != INTEGER_CST || TREE_CODE (elt1) != INTEGER_CST)
474 return false;
476 wide_int range_min
477 = wide_int::from (wi::to_wide (m_range_min),
478 TYPE_PRECISION (TREE_TYPE (elt0)),
479 TYPE_SIGN (TREE_TYPE (m_range_min)));
480 wide_int y1 = wi::to_wide (elt0);
481 wide_int y2 = wi::to_wide (elt1);
482 wide_int a = y2 - y1;
483 wide_int b = y2 - a * (range_min + 1);
485 /* Verify that all values fulfill the linear function. */
486 FOR_EACH_VEC_SAFE_ELT (vec, i, elt)
488 if (TREE_CODE (elt->value) != INTEGER_CST)
489 return false;
491 wide_int value = wi::to_wide (elt->value);
492 if (a * range_min + b != value)
493 return false;
495 ++range_min;
498 *coeff_a = a;
499 *coeff_b = b;
501 return true;
504 /* Return type which should be used for array elements, either TYPE's
505 main variant or, for integral types, some smaller integral type
506 that can still hold all the constants. */
508 tree
509 switch_conversion::array_value_type (tree type, int num)
511 unsigned int i, len = vec_safe_length (m_constructors[num]);
512 constructor_elt *elt;
513 int sign = 0;
514 tree smaller_type;
516 /* Types with alignments greater than their size can reach here, e.g. out of
517 SRA. We couldn't use these as an array component type so get back to the
518 main variant first, which, for our purposes, is fine for other types as
519 well. */
521 type = TYPE_MAIN_VARIANT (type);
523 if (!INTEGRAL_TYPE_P (type))
524 return type;
526 scalar_int_mode type_mode = SCALAR_INT_TYPE_MODE (type);
527 scalar_int_mode mode = get_narrowest_mode (type_mode);
528 if (GET_MODE_SIZE (type_mode) <= GET_MODE_SIZE (mode))
529 return type;
531 if (len < (optimize_bb_for_size_p (gimple_bb (m_switch)) ? 2 : 32))
532 return type;
534 FOR_EACH_VEC_SAFE_ELT (m_constructors[num], i, elt)
536 wide_int cst;
538 if (TREE_CODE (elt->value) != INTEGER_CST)
539 return type;
541 cst = wi::to_wide (elt->value);
542 while (1)
544 unsigned int prec = GET_MODE_BITSIZE (mode);
545 if (prec > HOST_BITS_PER_WIDE_INT)
546 return type;
548 if (sign >= 0 && cst == wi::zext (cst, prec))
550 if (sign == 0 && cst == wi::sext (cst, prec))
551 break;
552 sign = 1;
553 break;
555 if (sign <= 0 && cst == wi::sext (cst, prec))
557 sign = -1;
558 break;
561 if (sign == 1)
562 sign = 0;
564 if (!GET_MODE_WIDER_MODE (mode).exists (&mode)
565 || GET_MODE_SIZE (mode) >= GET_MODE_SIZE (type_mode))
566 return type;
570 if (sign == 0)
571 sign = TYPE_UNSIGNED (type) ? 1 : -1;
572 smaller_type = lang_hooks.types.type_for_mode (mode, sign >= 0);
573 if (GET_MODE_SIZE (type_mode)
574 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type)))
575 return type;
577 return smaller_type;
580 /* Create an appropriate array type and declaration and assemble a static
581 array variable. Also create a load statement that initializes
582 the variable in question with a value from the static array. SWTCH is
583 the switch statement being converted, NUM is the index to
584 arrays of constructors, default values and target SSA names
585 for this particular array. ARR_INDEX_TYPE is the type of the index
586 of the new array, PHI is the phi node of the final BB that corresponds
587 to the value that will be loaded from the created array. TIDX
588 is an ssa name of a temporary variable holding the index for loads from the
589 new array. */
591 void
592 switch_conversion::build_one_array (int num, tree arr_index_type,
593 gphi *phi, tree tidx)
595 tree name;
596 gimple *load;
597 gimple_stmt_iterator gsi = gsi_for_stmt (m_switch);
598 location_t loc = gimple_location (m_switch);
600 gcc_assert (m_default_values[num]);
602 name = copy_ssa_name (PHI_RESULT (phi));
603 m_target_inbound_names[num] = name;
605 vec<constructor_elt, va_gc> *constructor = m_constructors[num];
606 wide_int coeff_a, coeff_b;
607 bool linear_p = contains_linear_function_p (constructor, &coeff_a, &coeff_b);
608 if (linear_p)
610 if (dump_file && coeff_a.to_uhwi () > 0)
611 fprintf (dump_file, "Linear transformation with A = %" PRId64
612 " and B = %" PRId64 "\n", coeff_a.to_shwi (),
613 coeff_b.to_shwi ());
615 /* We must use type of constructor values. */
616 tree t = unsigned_type_for (TREE_TYPE ((*constructor)[0].value));
617 gimple_seq seq = NULL;
618 tree tmp = gimple_convert (&seq, t, m_index_expr);
619 tree tmp2 = gimple_build (&seq, MULT_EXPR, t,
620 wide_int_to_tree (t, coeff_a), tmp);
621 tree tmp3 = gimple_build (&seq, PLUS_EXPR, t, tmp2,
622 wide_int_to_tree (t, coeff_b));
623 tree tmp4 = gimple_convert (&seq, TREE_TYPE (name), tmp3);
624 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
625 load = gimple_build_assign (name, tmp4);
627 else
629 tree array_type, ctor, decl, value_type, fetch, default_type;
631 default_type = TREE_TYPE (m_default_values[num]);
632 value_type = array_value_type (default_type, num);
633 array_type = build_array_type (value_type, arr_index_type);
634 if (default_type != value_type)
636 unsigned int i;
637 constructor_elt *elt;
639 FOR_EACH_VEC_SAFE_ELT (constructor, i, elt)
640 elt->value = fold_convert (value_type, elt->value);
642 ctor = build_constructor (array_type, constructor);
643 TREE_CONSTANT (ctor) = true;
644 TREE_STATIC (ctor) = true;
646 decl = build_decl (loc, VAR_DECL, NULL_TREE, array_type);
647 TREE_STATIC (decl) = 1;
648 DECL_INITIAL (decl) = ctor;
650 DECL_NAME (decl) = create_tmp_var_name ("CSWTCH");
651 DECL_ARTIFICIAL (decl) = 1;
652 DECL_IGNORED_P (decl) = 1;
653 TREE_CONSTANT (decl) = 1;
654 TREE_READONLY (decl) = 1;
655 DECL_IGNORED_P (decl) = 1;
656 if (offloading_function_p (cfun->decl))
657 DECL_ATTRIBUTES (decl)
658 = tree_cons (get_identifier ("omp declare target"), NULL_TREE,
659 NULL_TREE);
660 varpool_node::finalize_decl (decl);
662 fetch = build4 (ARRAY_REF, value_type, decl, tidx, NULL_TREE,
663 NULL_TREE);
664 if (default_type != value_type)
666 fetch = fold_convert (default_type, fetch);
667 fetch = force_gimple_operand_gsi (&gsi, fetch, true, NULL_TREE,
668 true, GSI_SAME_STMT);
670 load = gimple_build_assign (name, fetch);
673 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
674 update_stmt (load);
675 m_arr_ref_last = load;
678 /* Builds and initializes static arrays initialized with values gathered from
679 the switch statement. Also creates statements that load values from
680 them. */
682 void
683 switch_conversion::build_arrays ()
685 tree arr_index_type;
686 tree tidx, sub, utype;
687 gimple *stmt;
688 gimple_stmt_iterator gsi;
689 gphi_iterator gpi;
690 int i;
691 location_t loc = gimple_location (m_switch);
693 gsi = gsi_for_stmt (m_switch);
695 /* Make sure we do not generate arithmetics in a subrange. */
696 utype = TREE_TYPE (m_index_expr);
697 if (TREE_TYPE (utype))
698 utype = lang_hooks.types.type_for_mode (TYPE_MODE (TREE_TYPE (utype)), 1);
699 else
700 utype = lang_hooks.types.type_for_mode (TYPE_MODE (utype), 1);
702 arr_index_type = build_index_type (m_range_size);
703 tidx = make_ssa_name (utype);
704 sub = fold_build2_loc (loc, MINUS_EXPR, utype,
705 fold_convert_loc (loc, utype, m_index_expr),
706 fold_convert_loc (loc, utype, m_range_min));
707 sub = force_gimple_operand_gsi (&gsi, sub,
708 false, NULL, true, GSI_SAME_STMT);
709 stmt = gimple_build_assign (tidx, sub);
711 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
712 update_stmt (stmt);
713 m_arr_ref_first = stmt;
715 for (gpi = gsi_start_phis (m_final_bb), i = 0;
716 !gsi_end_p (gpi); gsi_next (&gpi))
718 gphi *phi = gpi.phi ();
719 if (!virtual_operand_p (gimple_phi_result (phi)))
720 build_one_array (i++, arr_index_type, phi, tidx);
721 else
723 edge e;
724 edge_iterator ei;
725 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
727 if (e->dest == m_final_bb)
728 break;
729 if (!m_default_case_nonstandard
730 || e->dest != m_default_bb)
732 e = single_succ_edge (e->dest);
733 break;
736 gcc_assert (e && e->dest == m_final_bb);
737 m_target_vop = PHI_ARG_DEF_FROM_EDGE (phi, e);
742 /* Generates and appropriately inserts loads of default values at the position
743 given by GSI. Returns the last inserted statement. */
745 gassign *
746 switch_conversion::gen_def_assigns (gimple_stmt_iterator *gsi)
748 int i;
749 gassign *assign = NULL;
751 for (i = 0; i < m_phi_count; i++)
753 tree name = copy_ssa_name (m_target_inbound_names[i]);
754 m_target_outbound_names[i] = name;
755 assign = gimple_build_assign (name, m_default_values[i]);
756 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
757 update_stmt (assign);
759 return assign;
762 /* Deletes the unused bbs and edges that now contain the switch statement and
763 its empty branch bbs. BBD is the now dead BB containing
764 the original switch statement, FINAL is the last BB of the converted
765 switch statement (in terms of succession). */
767 void
768 switch_conversion::prune_bbs (basic_block bbd, basic_block final,
769 basic_block default_bb)
771 edge_iterator ei;
772 edge e;
774 for (ei = ei_start (bbd->succs); (e = ei_safe_edge (ei)); )
776 basic_block bb;
777 bb = e->dest;
778 remove_edge (e);
779 if (bb != final && bb != default_bb)
780 delete_basic_block (bb);
782 delete_basic_block (bbd);
785 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
786 from the basic block loading values from an array and E2F from the basic
787 block loading default values. BBF is the last switch basic block (see the
788 bbf description in the comment below). */
790 void
791 switch_conversion::fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
793 gphi_iterator gsi;
794 int i;
796 for (gsi = gsi_start_phis (bbf), i = 0;
797 !gsi_end_p (gsi); gsi_next (&gsi))
799 gphi *phi = gsi.phi ();
800 tree inbound, outbound;
801 if (virtual_operand_p (gimple_phi_result (phi)))
802 inbound = outbound = m_target_vop;
803 else
805 inbound = m_target_inbound_names[i];
806 outbound = m_target_outbound_names[i++];
808 add_phi_arg (phi, inbound, e1f, UNKNOWN_LOCATION);
809 if (!m_default_case_nonstandard)
810 add_phi_arg (phi, outbound, e2f, UNKNOWN_LOCATION);
814 /* Creates a check whether the switch expression value actually falls into the
815 range given by all the cases. If it does not, the temporaries are loaded
816 with default values instead. */
818 void
819 switch_conversion::gen_inbound_check ()
821 tree label_decl1 = create_artificial_label (UNKNOWN_LOCATION);
822 tree label_decl2 = create_artificial_label (UNKNOWN_LOCATION);
823 tree label_decl3 = create_artificial_label (UNKNOWN_LOCATION);
824 glabel *label1, *label2, *label3;
825 tree utype, tidx;
826 tree bound;
828 gcond *cond_stmt;
830 gassign *last_assign = NULL;
831 gimple_stmt_iterator gsi;
832 basic_block bb0, bb1, bb2, bbf, bbd;
833 edge e01 = NULL, e02, e21, e1d, e1f, e2f;
834 location_t loc = gimple_location (m_switch);
836 gcc_assert (m_default_values);
838 bb0 = gimple_bb (m_switch);
840 tidx = gimple_assign_lhs (m_arr_ref_first);
841 utype = TREE_TYPE (tidx);
843 /* (end of) block 0 */
844 gsi = gsi_for_stmt (m_arr_ref_first);
845 gsi_next (&gsi);
847 bound = fold_convert_loc (loc, utype, m_range_size);
848 cond_stmt = gimple_build_cond (LE_EXPR, tidx, bound, NULL_TREE, NULL_TREE);
849 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
850 update_stmt (cond_stmt);
852 /* block 2 */
853 if (!m_default_case_nonstandard)
855 label2 = gimple_build_label (label_decl2);
856 gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
857 last_assign = gen_def_assigns (&gsi);
860 /* block 1 */
861 label1 = gimple_build_label (label_decl1);
862 gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
864 /* block F */
865 gsi = gsi_start_bb (m_final_bb);
866 label3 = gimple_build_label (label_decl3);
867 gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
869 /* cfg fix */
870 e02 = split_block (bb0, cond_stmt);
871 bb2 = e02->dest;
873 if (m_default_case_nonstandard)
875 bb1 = bb2;
876 bb2 = m_default_bb;
877 e01 = e02;
878 e01->flags = EDGE_TRUE_VALUE;
879 e02 = make_edge (bb0, bb2, EDGE_FALSE_VALUE);
880 edge e_default = find_edge (bb1, bb2);
881 for (gphi_iterator gsi = gsi_start_phis (bb2);
882 !gsi_end_p (gsi); gsi_next (&gsi))
884 gphi *phi = gsi.phi ();
885 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, e_default);
886 add_phi_arg (phi, arg, e02,
887 gimple_phi_arg_location_from_edge (phi, e_default));
889 /* Partially fix the dominator tree, if it is available. */
890 if (dom_info_available_p (CDI_DOMINATORS))
891 redirect_immediate_dominators (CDI_DOMINATORS, bb1, bb0);
893 else
895 e21 = split_block (bb2, last_assign);
896 bb1 = e21->dest;
897 remove_edge (e21);
900 e1d = split_block (bb1, m_arr_ref_last);
901 bbd = e1d->dest;
902 remove_edge (e1d);
904 /* Flags and profiles of the edge for in-range values. */
905 if (!m_default_case_nonstandard)
906 e01 = make_edge (bb0, bb1, EDGE_TRUE_VALUE);
907 e01->probability = m_default_prob.invert ();
909 /* Flags and profiles of the edge taking care of out-of-range values. */
910 e02->flags &= ~EDGE_FALLTHRU;
911 e02->flags |= EDGE_FALSE_VALUE;
912 e02->probability = m_default_prob;
914 bbf = m_final_bb;
916 e1f = make_edge (bb1, bbf, EDGE_FALLTHRU);
917 e1f->probability = profile_probability::always ();
919 if (m_default_case_nonstandard)
920 e2f = NULL;
921 else
923 e2f = make_edge (bb2, bbf, EDGE_FALLTHRU);
924 e2f->probability = profile_probability::always ();
927 /* frequencies of the new BBs */
928 bb1->count = e01->count ();
929 bb2->count = e02->count ();
930 if (!m_default_case_nonstandard)
931 bbf->count = e1f->count () + e2f->count ();
933 /* Tidy blocks that have become unreachable. */
934 prune_bbs (bbd, m_final_bb,
935 m_default_case_nonstandard ? m_default_bb : NULL);
937 /* Fixup the PHI nodes in bbF. */
938 fix_phi_nodes (e1f, e2f, bbf);
940 /* Fix the dominator tree, if it is available. */
941 if (dom_info_available_p (CDI_DOMINATORS))
943 vec<basic_block> bbs_to_fix_dom;
945 set_immediate_dominator (CDI_DOMINATORS, bb1, bb0);
946 if (!m_default_case_nonstandard)
947 set_immediate_dominator (CDI_DOMINATORS, bb2, bb0);
948 if (! get_immediate_dominator (CDI_DOMINATORS, bbf))
949 /* If bbD was the immediate dominator ... */
950 set_immediate_dominator (CDI_DOMINATORS, bbf, bb0);
952 bbs_to_fix_dom.create (3 + (bb2 != bbf));
953 bbs_to_fix_dom.quick_push (bb0);
954 bbs_to_fix_dom.quick_push (bb1);
955 if (bb2 != bbf)
956 bbs_to_fix_dom.quick_push (bb2);
957 bbs_to_fix_dom.quick_push (bbf);
959 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
960 bbs_to_fix_dom.release ();
964 /* The following function is invoked on every switch statement (the current
965 one is given in SWTCH) and runs the individual phases of switch
966 conversion on it one after another until one fails or the conversion
967 is completed. On success, NULL is in m_reason, otherwise points
968 to a string with the reason why the conversion failed. */
970 void
971 switch_conversion::expand (gswitch *swtch)
973 /* Group case labels so that we get the right results from the heuristics
974 that decide on the code generation approach for this switch. */
975 m_cfg_altered |= group_case_labels_stmt (swtch);
977 /* If this switch is now a degenerate case with only a default label,
978 there is nothing left for us to do. */
979 if (gimple_switch_num_labels (swtch) < 2)
981 m_reason = "switch is a degenerate case";
982 return;
985 collect (swtch);
987 /* No error markers should reach here (they should be filtered out
988 during gimplification). */
989 gcc_checking_assert (TREE_TYPE (m_index_expr) != error_mark_node);
991 /* A switch on a constant should have been optimized in tree-cfg-cleanup. */
992 gcc_checking_assert (!TREE_CONSTANT (m_index_expr));
994 /* Prefer bit test if possible. */
995 if (tree_fits_uhwi_p (m_range_size)
996 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size), m_uniq)
997 && bit_test_cluster::is_beneficial (m_count, m_uniq))
999 m_reason = "expanding as bit test is preferable";
1000 return;
1003 if (m_uniq <= 2)
1005 /* This will be expanded as a decision tree . */
1006 m_reason = "expanding as jumps is preferable";
1007 return;
1010 /* If there is no common successor, we cannot do the transformation. */
1011 if (!m_final_bb)
1013 m_reason = "no common successor to all case label target blocks found";
1014 return;
1017 /* Check the case label values are within reasonable range: */
1018 if (!check_range ())
1020 gcc_assert (m_reason);
1021 return;
1024 /* For all the cases, see whether they are empty, the assignments they
1025 represent constant and so on... */
1026 if (!check_all_empty_except_final ())
1028 gcc_assert (m_reason);
1029 return;
1031 if (!check_final_bb ())
1033 gcc_assert (m_reason);
1034 return;
1037 /* At this point all checks have passed and we can proceed with the
1038 transformation. */
1040 create_temp_arrays ();
1041 gather_default_values (m_default_case_nonstandard
1042 ? gimple_switch_label (swtch, 1)
1043 : gimple_switch_default_label (swtch));
1044 build_constructors ();
1046 build_arrays (); /* Build the static arrays and assignments. */
1047 gen_inbound_check (); /* Build the bounds check. */
1049 m_cfg_altered = true;
1052 /* Destructor. */
1054 switch_conversion::~switch_conversion ()
1056 XDELETEVEC (m_constructors);
1057 XDELETEVEC (m_default_values);
1060 /* Constructor. */
1062 group_cluster::group_cluster (vec<cluster *> &clusters,
1063 unsigned start, unsigned end)
1065 gcc_checking_assert (end - start + 1 >= 1);
1066 m_prob = profile_probability::never ();
1067 m_cases.create (end - start + 1);
1068 for (unsigned i = start; i <= end; i++)
1070 m_cases.quick_push (static_cast<simple_cluster *> (clusters[i]));
1071 m_prob += clusters[i]->m_prob;
1073 m_subtree_prob = m_prob;
1076 /* Destructor. */
1078 group_cluster::~group_cluster ()
1080 for (unsigned i = 0; i < m_cases.length (); i++)
1081 delete m_cases[i];
1083 m_cases.release ();
1086 /* Dump content of a cluster. */
1088 void
1089 group_cluster::dump (FILE *f, bool details)
1091 unsigned total_values = 0;
1092 for (unsigned i = 0; i < m_cases.length (); i++)
1093 total_values += m_cases[i]->get_range (m_cases[i]->get_low (),
1094 m_cases[i]->get_high ());
1096 unsigned comparison_count = 0;
1097 for (unsigned i = 0; i < m_cases.length (); i++)
1099 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1100 comparison_count += sc->m_range_p ? 2 : 1;
1103 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1104 fprintf (f, "%s", get_type () == JUMP_TABLE ? "JT" : "BT");
1106 if (details)
1107 fprintf (f, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1108 " density: %.2f%%)", total_values, comparison_count, range,
1109 100.0f * comparison_count / range);
1111 fprintf (f, ":");
1112 PRINT_CASE (f, get_low ());
1113 fprintf (f, "-");
1114 PRINT_CASE (f, get_high ());
1115 fprintf (f, " ");
1118 /* Emit GIMPLE code to handle the cluster. */
1120 void
1121 jump_table_cluster::emit (tree index_expr, tree,
1122 tree default_label_expr, basic_block default_bb)
1124 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1125 unsigned HOST_WIDE_INT nondefault_range = 0;
1127 /* For jump table we just emit a new gswitch statement that will
1128 be latter lowered to jump table. */
1129 auto_vec <tree> labels;
1130 labels.create (m_cases.length ());
1132 make_edge (m_case_bb, default_bb, 0);
1133 for (unsigned i = 0; i < m_cases.length (); i++)
1135 labels.quick_push (unshare_expr (m_cases[i]->m_case_label_expr));
1136 make_edge (m_case_bb, m_cases[i]->m_case_bb, 0);
1139 gswitch *s = gimple_build_switch (index_expr,
1140 unshare_expr (default_label_expr), labels);
1141 gimple_stmt_iterator gsi = gsi_start_bb (m_case_bb);
1142 gsi_insert_after (&gsi, s, GSI_NEW_STMT);
1144 /* Set up even probabilities for all cases. */
1145 for (unsigned i = 0; i < m_cases.length (); i++)
1147 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1148 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1149 unsigned HOST_WIDE_INT case_range
1150 = sc->get_range (sc->get_low (), sc->get_high ());
1151 nondefault_range += case_range;
1153 /* case_edge->aux is number of values in a jump-table that are covered
1154 by the case_edge. */
1155 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + case_range);
1158 edge default_edge = gimple_switch_default_edge (cfun, s);
1159 default_edge->probability = profile_probability::never ();
1161 for (unsigned i = 0; i < m_cases.length (); i++)
1163 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1164 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1165 case_edge->probability
1166 = profile_probability::always ().apply_scale ((intptr_t)case_edge->aux,
1167 range);
1170 /* Number of non-default values is probability of default edge. */
1171 default_edge->probability
1172 += profile_probability::always ().apply_scale (nondefault_range,
1173 range).invert ();
1175 switch_decision_tree::reset_out_edges_aux (s);
1178 /* Find jump tables of given CLUSTERS, where all members of the vector
1179 are of type simple_cluster. New clusters are returned. */
1181 vec<cluster *>
1182 jump_table_cluster::find_jump_tables (vec<cluster *> &clusters)
1184 if (!is_enabled ())
1185 return clusters.copy ();
1187 unsigned l = clusters.length ();
1188 auto_vec<min_cluster_item> min;
1189 min.reserve (l + 1);
1191 min.quick_push (min_cluster_item (0, 0, 0));
1193 for (unsigned i = 1; i <= l; i++)
1195 /* Set minimal # of clusters with i-th item to infinite. */
1196 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1198 for (unsigned j = 0; j < i; j++)
1200 unsigned HOST_WIDE_INT s = min[j].m_non_jt_cases;
1201 if (i - j < case_values_threshold ())
1202 s += i - j;
1204 /* Prefer clusters with smaller number of numbers covered. */
1205 if ((min[j].m_count + 1 < min[i].m_count
1206 || (min[j].m_count + 1 == min[i].m_count
1207 && s < min[i].m_non_jt_cases))
1208 && can_be_handled (clusters, j, i - 1))
1209 min[i] = min_cluster_item (min[j].m_count + 1, j, s);
1212 gcc_checking_assert (min[i].m_count != INT_MAX);
1215 /* No result. */
1216 if (min[l].m_count == INT_MAX)
1217 return clusters.copy ();
1219 vec<cluster *> output;
1220 output.create (4);
1222 /* Find and build the clusters. */
1223 for (int end = l;;)
1225 int start = min[end].m_start;
1227 /* Do not allow clusters with small number of cases. */
1228 if (is_beneficial (clusters, start, end - 1))
1229 output.safe_push (new jump_table_cluster (clusters, start, end - 1));
1230 else
1231 for (int i = end - 1; i >= start; i--)
1232 output.safe_push (clusters[i]);
1234 end = start;
1236 if (start <= 0)
1237 break;
1240 output.reverse ();
1241 return output;
1244 /* Return true when cluster starting at START and ending at END (inclusive)
1245 can build a jump-table. */
1247 bool
1248 jump_table_cluster::can_be_handled (const vec<cluster *> &clusters,
1249 unsigned start, unsigned end)
1251 /* If the switch is relatively small such that the cost of one
1252 indirect jump on the target are higher than the cost of a
1253 decision tree, go with the decision tree.
1255 If range of values is much bigger than number of values,
1256 or if it is too large to represent in a HOST_WIDE_INT,
1257 make a sequence of conditional branches instead of a dispatch.
1259 The definition of "much bigger" depends on whether we are
1260 optimizing for size or for speed. */
1261 if (!flag_jump_tables)
1262 return false;
1264 /* For algorithm correctness, jump table for a single case must return
1265 true. We bail out in is_beneficial if it's called just for
1266 a single case. */
1267 if (start == end)
1268 return true;
1270 unsigned HOST_WIDE_INT max_ratio
1271 = (optimize_insn_for_size_p ()
1272 ? PARAM_VALUE (PARAM_JUMP_TABLE_MAX_GROWTH_RATIO_FOR_SIZE)
1273 : PARAM_VALUE (PARAM_JUMP_TABLE_MAX_GROWTH_RATIO_FOR_SPEED));
1274 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1275 clusters[end]->get_high ());
1276 /* Check overflow. */
1277 if (range == 0)
1278 return false;
1280 unsigned HOST_WIDE_INT comparison_count = 0;
1281 for (unsigned i = start; i <= end; i++)
1283 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1284 comparison_count += sc->m_range_p ? 2 : 1;
1287 unsigned HOST_WIDE_INT lhs = 100 * range;
1288 if (lhs < range)
1289 return false;
1291 return lhs <= max_ratio * comparison_count;
1294 /* Return true if cluster starting at START and ending at END (inclusive)
1295 is profitable transformation. */
1297 bool
1298 jump_table_cluster::is_beneficial (const vec<cluster *> &,
1299 unsigned start, unsigned end)
1301 /* Single case bail out. */
1302 if (start == end)
1303 return false;
1305 return end - start + 1 >= case_values_threshold ();
1308 /* Find bit tests of given CLUSTERS, where all members of the vector
1309 are of type simple_cluster. New clusters are returned. */
1311 vec<cluster *>
1312 bit_test_cluster::find_bit_tests (vec<cluster *> &clusters)
1314 vec<cluster *> output;
1315 output.create (4);
1317 unsigned l = clusters.length ();
1318 auto_vec<min_cluster_item> min;
1319 min.reserve (l + 1);
1321 min.quick_push (min_cluster_item (0, 0, 0));
1323 for (unsigned i = 1; i <= l; i++)
1325 /* Set minimal # of clusters with i-th item to infinite. */
1326 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1328 for (unsigned j = 0; j < i; j++)
1330 if (min[j].m_count + 1 < min[i].m_count
1331 && can_be_handled (clusters, j, i - 1))
1332 min[i] = min_cluster_item (min[j].m_count + 1, j, INT_MAX);
1335 gcc_checking_assert (min[i].m_count != INT_MAX);
1338 /* No result. */
1339 if (min[l].m_count == INT_MAX)
1340 return clusters.copy ();
1342 /* Find and build the clusters. */
1343 for (unsigned end = l;;)
1345 int start = min[end].m_start;
1347 if (is_beneficial (clusters, start, end - 1))
1349 bool entire = start == 0 && end == clusters.length ();
1350 output.safe_push (new bit_test_cluster (clusters, start, end - 1,
1351 entire));
1353 else
1354 for (int i = end - 1; i >= start; i--)
1355 output.safe_push (clusters[i]);
1357 end = start;
1359 if (start <= 0)
1360 break;
1363 output.reverse ();
1364 return output;
1367 /* Return true when RANGE of case values with UNIQ labels
1368 can build a bit test. */
1370 bool
1371 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range,
1372 unsigned int uniq)
1374 /* Check overflow. */
1375 if (range == 0)
1376 return 0;
1378 if (range >= GET_MODE_BITSIZE (word_mode))
1379 return false;
1381 return uniq <= 3;
1384 /* Return true when cluster starting at START and ending at END (inclusive)
1385 can build a bit test. */
1387 bool
1388 bit_test_cluster::can_be_handled (const vec<cluster *> &clusters,
1389 unsigned start, unsigned end)
1391 /* For algorithm correctness, bit test for a single case must return
1392 true. We bail out in is_beneficial if it's called just for
1393 a single case. */
1394 if (start == end)
1395 return true;
1397 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1398 clusters[end]->get_high ());
1399 auto_bitmap dest_bbs;
1401 for (unsigned i = start; i <= end; i++)
1403 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1404 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1407 return can_be_handled (range, bitmap_count_bits (dest_bbs));
1410 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1411 transformation. */
1413 bool
1414 bit_test_cluster::is_beneficial (unsigned count, unsigned uniq)
1416 return (((uniq == 1 && count >= 3)
1417 || (uniq == 2 && count >= 5)
1418 || (uniq == 3 && count >= 6)));
1421 /* Return true if cluster starting at START and ending at END (inclusive)
1422 is profitable transformation. */
1424 bool
1425 bit_test_cluster::is_beneficial (const vec<cluster *> &clusters,
1426 unsigned start, unsigned end)
1428 /* Single case bail out. */
1429 if (start == end)
1430 return false;
1432 auto_bitmap dest_bbs;
1434 for (unsigned i = start; i <= end; i++)
1436 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1437 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1440 unsigned uniq = bitmap_count_bits (dest_bbs);
1441 unsigned count = end - start + 1;
1442 return is_beneficial (count, uniq);
1445 /* Comparison function for qsort to order bit tests by decreasing
1446 probability of execution. */
1449 case_bit_test::cmp (const void *p1, const void *p2)
1451 const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
1452 const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
1454 if (d2->bits != d1->bits)
1455 return d2->bits - d1->bits;
1457 /* Stabilize the sort. */
1458 return (LABEL_DECL_UID (CASE_LABEL (d2->label))
1459 - LABEL_DECL_UID (CASE_LABEL (d1->label)));
1462 /* Expand a switch statement by a short sequence of bit-wise
1463 comparisons. "switch(x)" is effectively converted into
1464 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1465 integer constants.
1467 INDEX_EXPR is the value being switched on.
1469 MINVAL is the lowest case value of in the case nodes,
1470 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1471 are not guaranteed to be of the same type as INDEX_EXPR
1472 (the gimplifier doesn't change the type of case label values,
1473 and MINVAL and RANGE are derived from those values).
1474 MAXVAL is MINVAL + RANGE.
1476 There *MUST* be max_case_bit_tests or less unique case
1477 node targets. */
1479 void
1480 bit_test_cluster::emit (tree index_expr, tree index_type,
1481 tree, basic_block default_bb)
1483 struct case_bit_test test[m_max_case_bit_tests] = { {} };
1484 unsigned int i, j, k;
1485 unsigned int count;
1487 tree unsigned_index_type = unsigned_type_for (index_type);
1489 gimple_stmt_iterator gsi;
1490 gassign *shift_stmt;
1492 tree idx, tmp, csui;
1493 tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
1494 tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
1495 tree word_mode_one = fold_convert (word_type_node, integer_one_node);
1496 int prec = TYPE_PRECISION (word_type_node);
1497 wide_int wone = wi::one (prec);
1499 tree minval = get_low ();
1500 tree maxval = get_high ();
1501 tree range = int_const_binop (MINUS_EXPR, maxval, minval);
1502 unsigned HOST_WIDE_INT bt_range = get_range (minval, maxval);
1504 /* Go through all case labels, and collect the case labels, profile
1505 counts, and other information we need to build the branch tests. */
1506 count = 0;
1507 for (i = 0; i < m_cases.length (); i++)
1509 unsigned int lo, hi;
1510 simple_cluster *n = static_cast<simple_cluster *> (m_cases[i]);
1511 for (k = 0; k < count; k++)
1512 if (n->m_case_bb == test[k].target_bb)
1513 break;
1515 if (k == count)
1517 gcc_checking_assert (count < m_max_case_bit_tests);
1518 test[k].mask = wi::zero (prec);
1519 test[k].target_bb = n->m_case_bb;
1520 test[k].label = n->m_case_label_expr;
1521 test[k].bits = 0;
1522 count++;
1525 test[k].bits += n->get_range (n->get_low (), n->get_high ());
1527 lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_low (), minval));
1528 if (n->get_high () == NULL_TREE)
1529 hi = lo;
1530 else
1531 hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_high (),
1532 minval));
1534 for (j = lo; j <= hi; j++)
1535 test[k].mask |= wi::lshift (wone, j);
1538 qsort (test, count, sizeof (*test), case_bit_test::cmp);
1540 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1541 the minval subtractions, but it might make the mask constants more
1542 expensive. So, compare the costs. */
1543 if (compare_tree_int (minval, 0) > 0
1544 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
1546 int cost_diff;
1547 HOST_WIDE_INT m = tree_to_uhwi (minval);
1548 rtx reg = gen_raw_REG (word_mode, 10000);
1549 bool speed_p = optimize_insn_for_speed_p ();
1550 cost_diff = set_rtx_cost (gen_rtx_PLUS (word_mode, reg,
1551 GEN_INT (-m)), speed_p);
1552 for (i = 0; i < count; i++)
1554 rtx r = immed_wide_int_const (test[i].mask, word_mode);
1555 cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
1556 word_mode, speed_p);
1557 r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
1558 cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
1559 word_mode, speed_p);
1561 if (cost_diff > 0)
1563 for (i = 0; i < count; i++)
1564 test[i].mask = wi::lshift (test[i].mask, m);
1565 minval = build_zero_cst (TREE_TYPE (minval));
1566 range = maxval;
1570 /* Now build the test-and-branch code. */
1572 gsi = gsi_last_bb (m_case_bb);
1574 /* idx = (unsigned)x - minval. */
1575 idx = fold_convert (unsigned_index_type, index_expr);
1576 idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
1577 fold_convert (unsigned_index_type, minval));
1578 idx = force_gimple_operand_gsi (&gsi, idx,
1579 /*simple=*/true, NULL_TREE,
1580 /*before=*/true, GSI_SAME_STMT);
1582 if (m_handles_entire_switch)
1584 /* if (idx > range) goto default */
1585 range
1586 = force_gimple_operand_gsi (&gsi,
1587 fold_convert (unsigned_index_type, range),
1588 /*simple=*/true, NULL_TREE,
1589 /*before=*/true, GSI_SAME_STMT);
1590 tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
1591 basic_block new_bb
1592 = hoist_edge_and_branch_if_true (&gsi, tmp, default_bb,
1593 profile_probability::unlikely ());
1594 gsi = gsi_last_bb (new_bb);
1597 /* csui = (1 << (word_mode) idx) */
1598 csui = make_ssa_name (word_type_node);
1599 tmp = fold_build2 (LSHIFT_EXPR, word_type_node, word_mode_one,
1600 fold_convert (word_type_node, idx));
1601 tmp = force_gimple_operand_gsi (&gsi, tmp,
1602 /*simple=*/false, NULL_TREE,
1603 /*before=*/true, GSI_SAME_STMT);
1604 shift_stmt = gimple_build_assign (csui, tmp);
1605 gsi_insert_before (&gsi, shift_stmt, GSI_SAME_STMT);
1606 update_stmt (shift_stmt);
1608 profile_probability prob = profile_probability::always ();
1610 /* for each unique set of cases:
1611 if (const & csui) goto target */
1612 for (k = 0; k < count; k++)
1614 prob = profile_probability::always ().apply_scale (test[k].bits,
1615 bt_range);
1616 bt_range -= test[k].bits;
1617 tmp = wide_int_to_tree (word_type_node, test[k].mask);
1618 tmp = fold_build2 (BIT_AND_EXPR, word_type_node, csui, tmp);
1619 tmp = force_gimple_operand_gsi (&gsi, tmp,
1620 /*simple=*/true, NULL_TREE,
1621 /*before=*/true, GSI_SAME_STMT);
1622 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, word_mode_zero);
1623 basic_block new_bb
1624 = hoist_edge_and_branch_if_true (&gsi, tmp, test[k].target_bb, prob);
1625 gsi = gsi_last_bb (new_bb);
1628 /* We should have removed all edges now. */
1629 gcc_assert (EDGE_COUNT (gsi_bb (gsi)->succs) == 0);
1631 /* If nothing matched, go to the default label. */
1632 edge e = make_edge (gsi_bb (gsi), default_bb, EDGE_FALLTHRU);
1633 e->probability = profile_probability::always ();
1636 /* Split the basic block at the statement pointed to by GSIP, and insert
1637 a branch to the target basic block of E_TRUE conditional on tree
1638 expression COND.
1640 It is assumed that there is already an edge from the to-be-split
1641 basic block to E_TRUE->dest block. This edge is removed, and the
1642 profile information on the edge is re-used for the new conditional
1643 jump.
1645 The CFG is updated. The dominator tree will not be valid after
1646 this transformation, but the immediate dominators are updated if
1647 UPDATE_DOMINATORS is true.
1649 Returns the newly created basic block. */
1651 basic_block
1652 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator *gsip,
1653 tree cond, basic_block case_bb,
1654 profile_probability prob)
1656 tree tmp;
1657 gcond *cond_stmt;
1658 edge e_false;
1659 basic_block new_bb, split_bb = gsi_bb (*gsip);
1661 edge e_true = make_edge (split_bb, case_bb, EDGE_TRUE_VALUE);
1662 e_true->probability = prob;
1663 gcc_assert (e_true->src == split_bb);
1665 tmp = force_gimple_operand_gsi (gsip, cond, /*simple=*/true, NULL,
1666 /*before=*/true, GSI_SAME_STMT);
1667 cond_stmt = gimple_build_cond_from_tree (tmp, NULL_TREE, NULL_TREE);
1668 gsi_insert_before (gsip, cond_stmt, GSI_SAME_STMT);
1670 e_false = split_block (split_bb, cond_stmt);
1671 new_bb = e_false->dest;
1672 redirect_edge_pred (e_true, split_bb);
1674 e_false->flags &= ~EDGE_FALLTHRU;
1675 e_false->flags |= EDGE_FALSE_VALUE;
1676 e_false->probability = e_true->probability.invert ();
1677 new_bb->count = e_false->count ();
1679 return new_bb;
1682 /* Compute the number of case labels that correspond to each outgoing edge of
1683 switch statement. Record this information in the aux field of the edge. */
1685 void
1686 switch_decision_tree::compute_cases_per_edge ()
1688 reset_out_edges_aux (m_switch);
1689 int ncases = gimple_switch_num_labels (m_switch);
1690 for (int i = ncases - 1; i >= 1; --i)
1692 edge case_edge = gimple_switch_edge (cfun, m_switch, i);
1693 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + 1);
1697 /* Analyze switch statement and return true when the statement is expanded
1698 as decision tree. */
1700 bool
1701 switch_decision_tree::analyze_switch_statement ()
1703 unsigned l = gimple_switch_num_labels (m_switch);
1704 basic_block bb = gimple_bb (m_switch);
1705 auto_vec<cluster *> clusters;
1706 clusters.create (l - 1);
1708 basic_block default_bb = gimple_switch_default_bb (cfun, m_switch);
1709 m_case_bbs.reserve (l);
1710 m_case_bbs.quick_push (default_bb);
1712 compute_cases_per_edge ();
1714 for (unsigned i = 1; i < l; i++)
1716 tree elt = gimple_switch_label (m_switch, i);
1717 tree lab = CASE_LABEL (elt);
1718 basic_block case_bb = label_to_block (cfun, lab);
1719 edge case_edge = find_edge (bb, case_bb);
1720 tree low = CASE_LOW (elt);
1721 tree high = CASE_HIGH (elt);
1723 profile_probability p
1724 = case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
1725 clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
1726 p));
1727 m_case_bbs.quick_push (case_edge->dest);
1730 reset_out_edges_aux (m_switch);
1732 /* Find jump table clusters. */
1733 vec<cluster *> output = jump_table_cluster::find_jump_tables (clusters);
1735 /* Find bit test clusters. */
1736 vec<cluster *> output2;
1737 auto_vec<cluster *> tmp;
1738 output2.create (1);
1739 tmp.create (1);
1741 for (unsigned i = 0; i < output.length (); i++)
1743 cluster *c = output[i];
1744 if (c->get_type () != SIMPLE_CASE)
1746 if (!tmp.is_empty ())
1748 vec<cluster *> n = bit_test_cluster::find_bit_tests (tmp);
1749 output2.safe_splice (n);
1750 n.release ();
1751 tmp.truncate (0);
1753 output2.safe_push (c);
1755 else
1756 tmp.safe_push (c);
1759 /* We still can have a temporary vector to test. */
1760 if (!tmp.is_empty ())
1762 vec<cluster *> n = bit_test_cluster::find_bit_tests (tmp);
1763 output2.safe_splice (n);
1764 n.release ();
1767 if (dump_file)
1769 fprintf (dump_file, ";; GIMPLE switch case clusters: ");
1770 for (unsigned i = 0; i < output2.length (); i++)
1771 output2[i]->dump (dump_file, dump_flags & TDF_DETAILS);
1772 fprintf (dump_file, "\n");
1775 output.release ();
1777 bool expanded = try_switch_expansion (output2);
1779 for (unsigned i = 0; i < output2.length (); i++)
1780 delete output2[i];
1782 output2.release ();
1784 return expanded;
1787 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1788 expanded. */
1790 bool
1791 switch_decision_tree::try_switch_expansion (vec<cluster *> &clusters)
1793 tree index_expr = gimple_switch_index (m_switch);
1794 tree index_type = TREE_TYPE (index_expr);
1795 basic_block bb = gimple_bb (m_switch);
1797 if (gimple_switch_num_labels (m_switch) == 1)
1798 return false;
1800 /* Find the default case target label. */
1801 edge default_edge = gimple_switch_default_edge (cfun, m_switch);
1802 m_default_bb = default_edge->dest;
1804 /* Do the insertion of a case label into m_case_list. The labels are
1805 fed to us in descending order from the sorted vector of case labels used
1806 in the tree part of the middle end. So the list we construct is
1807 sorted in ascending order. */
1809 for (int i = clusters.length () - 1; i >= 0; i--)
1811 case_tree_node *r = m_case_list;
1812 m_case_list = m_case_node_pool.allocate ();
1813 m_case_list->m_right = r;
1814 m_case_list->m_c = clusters[i];
1817 record_phi_operand_mapping ();
1819 /* Split basic block that contains the gswitch statement. */
1820 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1821 edge e;
1822 if (gsi_end_p (gsi))
1823 e = split_block_after_labels (bb);
1824 else
1826 gsi_prev (&gsi);
1827 e = split_block (bb, gsi_stmt (gsi));
1829 bb = split_edge (e);
1831 /* Create new basic blocks for non-case clusters where specific expansion
1832 needs to happen. */
1833 for (unsigned i = 0; i < clusters.length (); i++)
1834 if (clusters[i]->get_type () != SIMPLE_CASE)
1836 clusters[i]->m_case_bb = create_empty_bb (bb);
1837 clusters[i]->m_case_bb->loop_father = bb->loop_father;
1840 /* Do not do an extra work for a single cluster. */
1841 if (clusters.length () == 1
1842 && clusters[0]->get_type () != SIMPLE_CASE)
1844 cluster *c = clusters[0];
1845 c->emit (index_expr, index_type,
1846 gimple_switch_default_label (m_switch), m_default_bb);
1847 redirect_edge_succ (single_succ_edge (bb), c->m_case_bb);
1849 else
1851 emit (bb, index_expr, default_edge->probability, index_type);
1853 /* Emit cluster-specific switch handling. */
1854 for (unsigned i = 0; i < clusters.length (); i++)
1855 if (clusters[i]->get_type () != SIMPLE_CASE)
1856 clusters[i]->emit (index_expr, index_type,
1857 gimple_switch_default_label (m_switch),
1858 m_default_bb);
1861 fix_phi_operands_for_edges ();
1863 return true;
1866 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1867 and used in a label basic block. */
1869 void
1870 switch_decision_tree::record_phi_operand_mapping ()
1872 basic_block switch_bb = gimple_bb (m_switch);
1873 /* Record all PHI nodes that have to be fixed after conversion. */
1874 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1876 gphi_iterator gsi;
1877 basic_block bb = m_case_bbs[i];
1878 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1880 gphi *phi = gsi.phi ();
1882 for (unsigned i = 0; i < gimple_phi_num_args (phi); i++)
1884 basic_block phi_src_bb = gimple_phi_arg_edge (phi, i)->src;
1885 if (phi_src_bb == switch_bb)
1887 tree def = gimple_phi_arg_def (phi, i);
1888 tree result = gimple_phi_result (phi);
1889 m_phi_mapping.put (result, def);
1890 break;
1897 /* Append new operands to PHI statements that were introduced due to
1898 addition of new edges to case labels. */
1900 void
1901 switch_decision_tree::fix_phi_operands_for_edges ()
1903 gphi_iterator gsi;
1905 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1907 basic_block bb = m_case_bbs[i];
1908 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1910 gphi *phi = gsi.phi ();
1911 for (unsigned j = 0; j < gimple_phi_num_args (phi); j++)
1913 tree def = gimple_phi_arg_def (phi, j);
1914 if (def == NULL_TREE)
1916 edge e = gimple_phi_arg_edge (phi, j);
1917 tree *definition
1918 = m_phi_mapping.get (gimple_phi_result (phi));
1919 gcc_assert (definition);
1920 add_phi_arg (phi, *definition, e, UNKNOWN_LOCATION);
1927 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1928 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1930 We generate a binary decision tree to select the appropriate target
1931 code. */
1933 void
1934 switch_decision_tree::emit (basic_block bb, tree index_expr,
1935 profile_probability default_prob, tree index_type)
1937 balance_case_nodes (&m_case_list, NULL);
1939 if (dump_file)
1940 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1941 if (dump_file && (dump_flags & TDF_DETAILS))
1943 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1944 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1945 gcc_assert (m_case_list != NULL);
1946 dump_case_nodes (dump_file, m_case_list, indent_step, 0);
1949 bb = emit_case_nodes (bb, index_expr, m_case_list, default_prob, index_type,
1950 gimple_location (m_switch));
1952 if (bb)
1953 emit_jump (bb, m_default_bb);
1955 /* Remove all edges and do just an edge that will reach default_bb. */
1956 bb = gimple_bb (m_switch);
1957 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1958 gsi_remove (&gsi, true);
1960 delete_basic_block (bb);
1963 /* Take an ordered list of case nodes
1964 and transform them into a near optimal binary tree,
1965 on the assumption that any target code selection value is as
1966 likely as any other.
1968 The transformation is performed by splitting the ordered
1969 list into two equal sections plus a pivot. The parts are
1970 then attached to the pivot as left and right branches. Each
1971 branch is then transformed recursively. */
1973 void
1974 switch_decision_tree::balance_case_nodes (case_tree_node **head,
1975 case_tree_node *parent)
1977 case_tree_node *np;
1979 np = *head;
1980 if (np)
1982 int i = 0;
1983 int ranges = 0;
1984 case_tree_node **npp;
1985 case_tree_node *left;
1986 profile_probability prob = profile_probability::never ();
1988 /* Count the number of entries on branch. Also count the ranges. */
1990 while (np)
1992 if (!tree_int_cst_equal (np->m_c->get_low (), np->m_c->get_high ()))
1993 ranges++;
1995 i++;
1996 prob += np->m_c->m_prob;
1997 np = np->m_right;
2000 if (i > 2)
2002 /* Split this list if it is long enough for that to help. */
2003 npp = head;
2004 left = *npp;
2005 profile_probability pivot_prob = prob.apply_scale (1, 2);
2007 /* Find the place in the list that bisects the list's total cost,
2008 where ranges count as 2. */
2009 while (1)
2011 /* Skip nodes while their probability does not reach
2012 that amount. */
2013 prob -= (*npp)->m_c->m_prob;
2014 if ((prob.initialized_p () && prob < pivot_prob)
2015 || ! (*npp)->m_right)
2016 break;
2017 npp = &(*npp)->m_right;
2020 np = *npp;
2021 *npp = 0;
2022 *head = np;
2023 np->m_parent = parent;
2024 np->m_left = left == np ? NULL : left;
2026 /* Optimize each of the two split parts. */
2027 balance_case_nodes (&np->m_left, np);
2028 balance_case_nodes (&np->m_right, np);
2029 np->m_c->m_subtree_prob = np->m_c->m_prob;
2030 if (np->m_left)
2031 np->m_c->m_subtree_prob += np->m_left->m_c->m_subtree_prob;
2032 if (np->m_right)
2033 np->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2035 else
2037 /* Else leave this branch as one level,
2038 but fill in `parent' fields. */
2039 np = *head;
2040 np->m_parent = parent;
2041 np->m_c->m_subtree_prob = np->m_c->m_prob;
2042 for (; np->m_right; np = np->m_right)
2044 np->m_right->m_parent = np;
2045 (*head)->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2051 /* Dump ROOT, a list or tree of case nodes, to file. */
2053 void
2054 switch_decision_tree::dump_case_nodes (FILE *f, case_tree_node *root,
2055 int indent_step, int indent_level)
2057 if (root == 0)
2058 return;
2059 indent_level++;
2061 dump_case_nodes (f, root->m_left, indent_step, indent_level);
2063 fputs (";; ", f);
2064 fprintf (f, "%*s", indent_step * indent_level, "");
2065 root->m_c->dump (f);
2066 root->m_c->m_prob.dump (f);
2067 fputs (" subtree: ", f);
2068 root->m_c->m_subtree_prob.dump (f);
2069 fputs (")\n", f);
2071 dump_case_nodes (f, root->m_right, indent_step, indent_level);
2075 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2077 void
2078 switch_decision_tree::emit_jump (basic_block bb, basic_block case_bb)
2080 edge e = single_succ_edge (bb);
2081 redirect_edge_succ (e, case_bb);
2084 /* Generate code to compare OP0 with OP1 so that the condition codes are
2085 set and to jump to LABEL_BB if the condition is true.
2086 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2087 PROB is the probability of jumping to LABEL_BB. */
2089 basic_block
2090 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb, tree op0,
2091 tree op1, tree_code comparison,
2092 basic_block label_bb,
2093 profile_probability prob,
2094 location_t loc)
2096 // TODO: it's once called with lhs != index.
2097 op1 = fold_convert (TREE_TYPE (op0), op1);
2099 gcond *cond = gimple_build_cond (comparison, op0, op1, NULL_TREE, NULL_TREE);
2100 gimple_set_location (cond, loc);
2101 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2102 gsi_insert_after (&gsi, cond, GSI_NEW_STMT);
2104 gcc_assert (single_succ_p (bb));
2106 /* Make a new basic block where false branch will take place. */
2107 edge false_edge = split_block (bb, cond);
2108 false_edge->flags = EDGE_FALSE_VALUE;
2109 false_edge->probability = prob.invert ();
2111 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2112 true_edge->probability = prob;
2114 return false_edge->dest;
2117 /* Generate code to jump to LABEL if OP0 and OP1 are equal.
2118 PROB is the probability of jumping to LABEL_BB.
2119 BB is a basic block where the new condition will be placed. */
2121 basic_block
2122 switch_decision_tree::do_jump_if_equal (basic_block bb, tree op0, tree op1,
2123 basic_block label_bb,
2124 profile_probability prob,
2125 location_t loc)
2127 op1 = fold_convert (TREE_TYPE (op0), op1);
2129 gcond *cond = gimple_build_cond (EQ_EXPR, op0, op1, NULL_TREE, NULL_TREE);
2130 gimple_set_location (cond, loc);
2131 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2132 gsi_insert_before (&gsi, cond, GSI_SAME_STMT);
2134 gcc_assert (single_succ_p (bb));
2136 /* Make a new basic block where false branch will take place. */
2137 edge false_edge = split_block (bb, cond);
2138 false_edge->flags = EDGE_FALSE_VALUE;
2139 false_edge->probability = prob.invert ();
2141 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2142 true_edge->probability = prob;
2144 return false_edge->dest;
2147 /* Emit step-by-step code to select a case for the value of INDEX.
2148 The thus generated decision tree follows the form of the
2149 case-node binary tree NODE, whose nodes represent test conditions.
2150 DEFAULT_PROB is probability of cases leading to default BB.
2151 INDEX_TYPE is the type of the index of the switch. */
2153 basic_block
2154 switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
2155 case_tree_node *node,
2156 profile_probability default_prob,
2157 tree index_type, location_t loc)
2159 profile_probability p;
2161 /* If node is null, we are done. */
2162 if (node == NULL)
2163 return bb;
2165 /* Single value case. */
2166 if (node->m_c->is_single_value_p ())
2168 /* Node is single valued. First see if the index expression matches
2169 this node and then check our children, if any. */
2170 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2171 bb = do_jump_if_equal (bb, index, node->m_c->get_low (),
2172 node->m_c->m_case_bb, p, loc);
2173 /* Since this case is taken at this point, reduce its weight from
2174 subtree_weight. */
2175 node->m_c->m_subtree_prob -= p;
2177 if (node->m_left != NULL && node->m_right != NULL)
2179 /* 1) the node has both children
2181 If both children are single-valued cases with no
2182 children, finish up all the work. This way, we can save
2183 one ordered comparison. */
2185 if (!node->m_left->has_child ()
2186 && node->m_left->m_c->is_single_value_p ()
2187 && !node->m_right->has_child ()
2188 && node->m_right->m_c->is_single_value_p ())
2190 p = (node->m_right->m_c->m_prob
2191 / (node->m_c->m_subtree_prob + default_prob));
2192 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2193 node->m_right->m_c->m_case_bb, p, loc);
2195 p = (node->m_left->m_c->m_prob
2196 / (node->m_c->m_subtree_prob + default_prob));
2197 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2198 node->m_left->m_c->m_case_bb, p, loc);
2200 else
2202 /* Branch to a label where we will handle it later. */
2203 basic_block test_bb = split_edge (single_succ_edge (bb));
2204 redirect_edge_succ (single_pred_edge (test_bb),
2205 single_succ_edge (bb)->dest);
2207 p = ((node->m_right->m_c->m_subtree_prob
2208 + default_prob.apply_scale (1, 2))
2209 / (node->m_c->m_subtree_prob + default_prob));
2210 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2211 GT_EXPR, test_bb, p, loc);
2212 default_prob = default_prob.apply_scale (1, 2);
2214 /* Handle the left-hand subtree. */
2215 bb = emit_case_nodes (bb, index, node->m_left,
2216 default_prob, index_type, loc);
2218 /* If the left-hand subtree fell through,
2219 don't let it fall into the right-hand subtree. */
2220 if (bb && m_default_bb)
2221 emit_jump (bb, m_default_bb);
2223 bb = emit_case_nodes (test_bb, index, node->m_right,
2224 default_prob, index_type, loc);
2227 else if (node->m_left == NULL && node->m_right != NULL)
2229 /* 2) the node has only right child. */
2231 /* Here we have a right child but no left so we issue a conditional
2232 branch to default and process the right child.
2234 Omit the conditional branch to default if the right child
2235 does not have any children and is single valued; it would
2236 cost too much space to save so little time. */
2238 if (node->m_right->has_child ()
2239 || !node->m_right->m_c->is_single_value_p ())
2241 p = (default_prob.apply_scale (1, 2)
2242 / (node->m_c->m_subtree_prob + default_prob));
2243 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2244 LT_EXPR, m_default_bb, p, loc);
2245 default_prob = default_prob.apply_scale (1, 2);
2247 bb = emit_case_nodes (bb, index, node->m_right, default_prob,
2248 index_type, loc);
2250 else
2252 /* We cannot process node->right normally
2253 since we haven't ruled out the numbers less than
2254 this node's value. So handle node->right explicitly. */
2255 p = (node->m_right->m_c->m_subtree_prob
2256 / (node->m_c->m_subtree_prob + default_prob));
2257 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2258 node->m_right->m_c->m_case_bb, p, loc);
2261 else if (node->m_left != NULL && node->m_right == NULL)
2263 /* 3) just one subtree, on the left. Similar case as previous. */
2265 if (node->m_left->has_child ()
2266 || !node->m_left->m_c->is_single_value_p ())
2268 p = (default_prob.apply_scale (1, 2)
2269 / (node->m_c->m_subtree_prob + default_prob));
2270 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2271 GT_EXPR, m_default_bb, p, loc);
2272 default_prob = default_prob.apply_scale (1, 2);
2274 bb = emit_case_nodes (bb, index, node->m_left, default_prob,
2275 index_type, loc);
2277 else
2279 /* We cannot process node->left normally
2280 since we haven't ruled out the numbers less than
2281 this node's value. So handle node->left explicitly. */
2282 p = (node->m_left->m_c->m_subtree_prob
2283 / (node->m_c->m_subtree_prob + default_prob));
2284 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2285 node->m_left->m_c->m_case_bb, p, loc);
2289 else
2291 /* Node is a range. These cases are very similar to those for a single
2292 value, except that we do not start by testing whether this node
2293 is the one to branch to. */
2294 if (node->has_child () || node->m_c->get_type () != SIMPLE_CASE)
2296 /* Branch to a label where we will handle it later. */
2297 basic_block test_bb = split_edge (single_succ_edge (bb));
2298 redirect_edge_succ (single_pred_edge (test_bb),
2299 single_succ_edge (bb)->dest);
2302 profile_probability right_prob = profile_probability::never ();
2303 if (node->m_right)
2304 right_prob = node->m_right->m_c->m_subtree_prob;
2305 p = ((right_prob + default_prob.apply_scale (1, 2))
2306 / (node->m_c->m_subtree_prob + default_prob));
2308 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2309 GT_EXPR, test_bb, p, loc);
2310 default_prob = default_prob.apply_scale (1, 2);
2312 /* Value belongs to this node or to the left-hand subtree. */
2313 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2314 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2315 GE_EXPR, node->m_c->m_case_bb, p, loc);
2317 /* Handle the left-hand subtree. */
2318 bb = emit_case_nodes (bb, index, node->m_left,
2319 default_prob, index_type, loc);
2321 /* If the left-hand subtree fell through,
2322 don't let it fall into the right-hand subtree. */
2323 if (bb && m_default_bb)
2324 emit_jump (bb, m_default_bb);
2326 bb = emit_case_nodes (test_bb, index, node->m_right,
2327 default_prob, index_type, loc);
2329 else
2331 /* Node has no children so we check low and high bounds to remove
2332 redundant tests. Only one of the bounds can exist,
2333 since otherwise this node is bounded--a case tested already. */
2334 tree lhs, rhs;
2335 generate_range_test (bb, index, node->m_c->get_low (),
2336 node->m_c->get_high (), &lhs, &rhs);
2337 p = default_prob / (node->m_c->m_subtree_prob + default_prob);
2339 bb = emit_cmp_and_jump_insns (bb, lhs, rhs, GT_EXPR,
2340 m_default_bb, p, loc);
2342 emit_jump (bb, node->m_c->m_case_bb);
2343 return NULL;
2347 return bb;
2350 /* The main function of the pass scans statements for switches and invokes
2351 process_switch on them. */
2353 namespace {
2355 const pass_data pass_data_convert_switch =
2357 GIMPLE_PASS, /* type */
2358 "switchconv", /* name */
2359 OPTGROUP_NONE, /* optinfo_flags */
2360 TV_TREE_SWITCH_CONVERSION, /* tv_id */
2361 ( PROP_cfg | PROP_ssa ), /* properties_required */
2362 0, /* properties_provided */
2363 0, /* properties_destroyed */
2364 0, /* todo_flags_start */
2365 TODO_update_ssa, /* todo_flags_finish */
2368 class pass_convert_switch : public gimple_opt_pass
2370 public:
2371 pass_convert_switch (gcc::context *ctxt)
2372 : gimple_opt_pass (pass_data_convert_switch, ctxt)
2375 /* opt_pass methods: */
2376 virtual bool gate (function *) { return flag_tree_switch_conversion != 0; }
2377 virtual unsigned int execute (function *);
2379 }; // class pass_convert_switch
2381 unsigned int
2382 pass_convert_switch::execute (function *fun)
2384 basic_block bb;
2385 bool cfg_altered = false;
2387 FOR_EACH_BB_FN (bb, fun)
2389 gimple *stmt = last_stmt (bb);
2390 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
2392 if (dump_file)
2394 expanded_location loc = expand_location (gimple_location (stmt));
2396 fprintf (dump_file, "beginning to process the following "
2397 "SWITCH statement (%s:%d) : ------- \n",
2398 loc.file, loc.line);
2399 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2400 putc ('\n', dump_file);
2403 switch_conversion sconv;
2404 sconv.expand (as_a <gswitch *> (stmt));
2405 cfg_altered |= sconv.m_cfg_altered;
2406 if (!sconv.m_reason)
2408 if (dump_file)
2410 fputs ("Switch converted\n", dump_file);
2411 fputs ("--------------------------------\n", dump_file);
2414 /* Make no effort to update the post-dominator tree.
2415 It is actually not that hard for the transformations
2416 we have performed, but it is not supported
2417 by iterate_fix_dominators. */
2418 free_dominance_info (CDI_POST_DOMINATORS);
2420 else
2422 if (dump_file)
2424 fputs ("Bailing out - ", dump_file);
2425 fputs (sconv.m_reason, dump_file);
2426 fputs ("\n--------------------------------\n", dump_file);
2432 return cfg_altered ? TODO_cleanup_cfg : 0;;
2435 } // anon namespace
2437 gimple_opt_pass *
2438 make_pass_convert_switch (gcc::context *ctxt)
2440 return new pass_convert_switch (ctxt);
2443 /* The main function of the pass scans statements for switches and invokes
2444 process_switch on them. */
2446 namespace {
2448 template <bool O0> class pass_lower_switch: public gimple_opt_pass
2450 public:
2451 pass_lower_switch (gcc::context *ctxt) : gimple_opt_pass (data, ctxt) {}
2453 static const pass_data data;
2454 opt_pass *
2455 clone ()
2457 return new pass_lower_switch<O0> (m_ctxt);
2460 virtual bool
2461 gate (function *)
2463 return !O0 || !optimize;
2466 virtual unsigned int execute (function *fun);
2467 }; // class pass_lower_switch
2469 template <bool O0>
2470 const pass_data pass_lower_switch<O0>::data = {
2471 GIMPLE_PASS, /* type */
2472 O0 ? "switchlower_O0" : "switchlower", /* name */
2473 OPTGROUP_NONE, /* optinfo_flags */
2474 TV_TREE_SWITCH_LOWERING, /* tv_id */
2475 ( PROP_cfg | PROP_ssa ), /* properties_required */
2476 0, /* properties_provided */
2477 0, /* properties_destroyed */
2478 0, /* todo_flags_start */
2479 TODO_update_ssa | TODO_cleanup_cfg, /* todo_flags_finish */
2482 template <bool O0>
2483 unsigned int
2484 pass_lower_switch<O0>::execute (function *fun)
2486 basic_block bb;
2487 bool expanded = false;
2489 auto_vec<gimple *> switch_statements;
2490 switch_statements.create (1);
2492 FOR_EACH_BB_FN (bb, fun)
2494 gimple *stmt = last_stmt (bb);
2495 gswitch *swtch;
2496 if (stmt && (swtch = dyn_cast<gswitch *> (stmt)))
2498 if (!O0)
2499 group_case_labels_stmt (swtch);
2500 switch_statements.safe_push (swtch);
2504 for (unsigned i = 0; i < switch_statements.length (); i++)
2506 gimple *stmt = switch_statements[i];
2507 if (dump_file)
2509 expanded_location loc = expand_location (gimple_location (stmt));
2511 fprintf (dump_file, "beginning to process the following "
2512 "SWITCH statement (%s:%d) : ------- \n",
2513 loc.file, loc.line);
2514 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2515 putc ('\n', dump_file);
2518 gswitch *swtch = dyn_cast<gswitch *> (stmt);
2519 if (swtch)
2521 switch_decision_tree dt (swtch);
2522 expanded |= dt.analyze_switch_statement ();
2526 if (expanded)
2528 free_dominance_info (CDI_DOMINATORS);
2529 free_dominance_info (CDI_POST_DOMINATORS);
2530 mark_virtual_operands_for_renaming (cfun);
2533 return 0;
2536 } // anon namespace
2538 gimple_opt_pass *
2539 make_pass_lower_switch_O0 (gcc::context *ctxt)
2541 return new pass_lower_switch<true> (ctxt);
2543 gimple_opt_pass *
2544 make_pass_lower_switch (gcc::context *ctxt)
2546 return new pass_lower_switch<false> (ctxt);