c++: 'this' adjustment for devirtualized call
[official-gcc.git] / gcc / tree-switch-conversion.c
blob294b545700863d445c198ae0b14d0ae7f177f078
1 /* Lower GIMPLE_SWITCH expressions to something more efficient than
2 a jump table.
3 Copyright (C) 2006-2021 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* This file handles the lowering of GIMPLE_SWITCH to an indexed
23 load, or a series of bit-test-and-branch expressions. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "insn-codes.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "cfghooks.h"
34 #include "tree-pass.h"
35 #include "ssa.h"
36 #include "optabs-tree.h"
37 #include "cgraph.h"
38 #include "gimple-pretty-print.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "stor-layout.h"
42 #include "cfganal.h"
43 #include "gimplify.h"
44 #include "gimple-iterator.h"
45 #include "gimplify-me.h"
46 #include "gimple-fold.h"
47 #include "tree-cfg.h"
48 #include "cfgloop.h"
49 #include "alloc-pool.h"
50 #include "target.h"
51 #include "tree-into-ssa.h"
52 #include "omp-general.h"
53 #include "gimple-range.h"
55 /* ??? For lang_hooks.types.type_for_mode, but is there a word_mode
56 type in the GIMPLE type system that is language-independent? */
57 #include "langhooks.h"
59 #include "tree-switch-conversion.h"
61 using namespace tree_switch_conversion;
63 /* Constructor. */
65 switch_conversion::switch_conversion (): m_final_bb (NULL),
66 m_constructors (NULL), m_default_values (NULL),
67 m_arr_ref_first (NULL), m_arr_ref_last (NULL),
68 m_reason (NULL), m_default_case_nonstandard (false), m_cfg_altered (false)
72 /* Collection information about SWTCH statement. */
74 void
75 switch_conversion::collect (gswitch *swtch)
77 unsigned int branch_num = gimple_switch_num_labels (swtch);
78 tree min_case, max_case;
79 unsigned int i;
80 edge e, e_default, e_first;
81 edge_iterator ei;
83 m_switch = swtch;
85 /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
86 is a default label which is the first in the vector.
87 Collect the bits we can deduce from the CFG. */
88 m_index_expr = gimple_switch_index (swtch);
89 m_switch_bb = gimple_bb (swtch);
90 e_default = gimple_switch_default_edge (cfun, swtch);
91 m_default_bb = e_default->dest;
92 m_default_prob = e_default->probability;
94 /* Get upper and lower bounds of case values, and the covered range. */
95 min_case = gimple_switch_label (swtch, 1);
96 max_case = gimple_switch_label (swtch, branch_num - 1);
98 m_range_min = CASE_LOW (min_case);
99 if (CASE_HIGH (max_case) != NULL_TREE)
100 m_range_max = CASE_HIGH (max_case);
101 else
102 m_range_max = CASE_LOW (max_case);
104 m_contiguous_range = true;
105 tree last = CASE_HIGH (min_case) ? CASE_HIGH (min_case) : m_range_min;
106 for (i = 2; i < branch_num; i++)
108 tree elt = gimple_switch_label (swtch, i);
109 if (wi::to_wide (last) + 1 != wi::to_wide (CASE_LOW (elt)))
111 m_contiguous_range = false;
112 break;
114 last = CASE_HIGH (elt) ? CASE_HIGH (elt) : CASE_LOW (elt);
117 if (m_contiguous_range)
118 e_first = gimple_switch_edge (cfun, swtch, 1);
119 else
120 e_first = e_default;
122 /* See if there is one common successor block for all branch
123 targets. If it exists, record it in FINAL_BB.
124 Start with the destination of the first non-default case
125 if the range is contiguous and default case otherwise as
126 guess or its destination in case it is a forwarder block. */
127 if (! single_pred_p (e_first->dest))
128 m_final_bb = e_first->dest;
129 else if (single_succ_p (e_first->dest)
130 && ! single_pred_p (single_succ (e_first->dest)))
131 m_final_bb = single_succ (e_first->dest);
132 /* Require that all switch destinations are either that common
133 FINAL_BB or a forwarder to it, except for the default
134 case if contiguous range. */
135 if (m_final_bb)
136 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
138 if (e->dest == m_final_bb)
139 continue;
141 if (single_pred_p (e->dest)
142 && single_succ_p (e->dest)
143 && single_succ (e->dest) == m_final_bb)
144 continue;
146 if (e == e_default && m_contiguous_range)
148 m_default_case_nonstandard = true;
149 continue;
152 m_final_bb = NULL;
153 break;
156 m_range_size
157 = int_const_binop (MINUS_EXPR, m_range_max, m_range_min);
159 /* Get a count of the number of case labels. Single-valued case labels
160 simply count as one, but a case range counts double, since it may
161 require two compares if it gets lowered as a branching tree. */
162 m_count = 0;
163 for (i = 1; i < branch_num; i++)
165 tree elt = gimple_switch_label (swtch, i);
166 m_count++;
167 if (CASE_HIGH (elt)
168 && ! tree_int_cst_equal (CASE_LOW (elt), CASE_HIGH (elt)))
169 m_count++;
172 /* Get the number of unique non-default targets out of the GIMPLE_SWITCH
173 block. Assume a CFG cleanup would have already removed degenerate
174 switch statements, this allows us to just use EDGE_COUNT. */
175 m_uniq = EDGE_COUNT (gimple_bb (swtch)->succs) - 1;
178 /* Checks whether the range given by individual case statements of the switch
179 switch statement isn't too big and whether the number of branches actually
180 satisfies the size of the new array. */
182 bool
183 switch_conversion::check_range ()
185 gcc_assert (m_range_size);
186 if (!tree_fits_uhwi_p (m_range_size))
188 m_reason = "index range way too large or otherwise unusable";
189 return false;
192 if (tree_to_uhwi (m_range_size)
193 > ((unsigned) m_count * param_switch_conversion_branch_ratio))
195 m_reason = "the maximum range-branch ratio exceeded";
196 return false;
199 return true;
202 /* Checks whether all but the final BB basic blocks are empty. */
204 bool
205 switch_conversion::check_all_empty_except_final ()
207 edge e, e_default = find_edge (m_switch_bb, m_default_bb);
208 edge_iterator ei;
210 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
212 if (e->dest == m_final_bb)
213 continue;
215 if (!empty_block_p (e->dest))
217 if (m_contiguous_range && e == e_default)
219 m_default_case_nonstandard = true;
220 continue;
223 m_reason = "bad case - a non-final BB not empty";
224 return false;
228 return true;
231 /* This function checks whether all required values in phi nodes in final_bb
232 are constants. Required values are those that correspond to a basic block
233 which is a part of the examined switch statement. It returns true if the
234 phi nodes are OK, otherwise false. */
236 bool
237 switch_conversion::check_final_bb ()
239 gphi_iterator gsi;
241 m_phi_count = 0;
242 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
244 gphi *phi = gsi.phi ();
245 unsigned int i;
247 if (virtual_operand_p (gimple_phi_result (phi)))
248 continue;
250 m_phi_count++;
252 for (i = 0; i < gimple_phi_num_args (phi); i++)
254 basic_block bb = gimple_phi_arg_edge (phi, i)->src;
256 if (bb == m_switch_bb
257 || (single_pred_p (bb)
258 && single_pred (bb) == m_switch_bb
259 && (!m_default_case_nonstandard
260 || empty_block_p (bb))))
262 tree reloc, val;
263 const char *reason = NULL;
265 val = gimple_phi_arg_def (phi, i);
266 if (!is_gimple_ip_invariant (val))
267 reason = "non-invariant value from a case";
268 else
270 reloc = initializer_constant_valid_p (val, TREE_TYPE (val));
271 if ((flag_pic && reloc != null_pointer_node)
272 || (!flag_pic && reloc == NULL_TREE))
274 if (reloc)
275 reason
276 = "value from a case would need runtime relocations";
277 else
278 reason
279 = "value from a case is not a valid initializer";
282 if (reason)
284 /* For contiguous range, we can allow non-constant
285 or one that needs relocation, as long as it is
286 only reachable from the default case. */
287 if (bb == m_switch_bb)
288 bb = m_final_bb;
289 if (!m_contiguous_range || bb != m_default_bb)
291 m_reason = reason;
292 return false;
295 unsigned int branch_num = gimple_switch_num_labels (m_switch);
296 for (unsigned int i = 1; i < branch_num; i++)
298 if (gimple_switch_label_bb (cfun, m_switch, i) == bb)
300 m_reason = reason;
301 return false;
304 m_default_case_nonstandard = true;
310 return true;
313 /* The following function allocates default_values, target_{in,out}_names and
314 constructors arrays. The last one is also populated with pointers to
315 vectors that will become constructors of new arrays. */
317 void
318 switch_conversion::create_temp_arrays ()
320 int i;
322 m_default_values = XCNEWVEC (tree, m_phi_count * 3);
323 /* ??? Macros do not support multi argument templates in their
324 argument list. We create a typedef to work around that problem. */
325 typedef vec<constructor_elt, va_gc> *vec_constructor_elt_gc;
326 m_constructors = XCNEWVEC (vec_constructor_elt_gc, m_phi_count);
327 m_target_inbound_names = m_default_values + m_phi_count;
328 m_target_outbound_names = m_target_inbound_names + m_phi_count;
329 for (i = 0; i < m_phi_count; i++)
330 vec_alloc (m_constructors[i], tree_to_uhwi (m_range_size) + 1);
333 /* Populate the array of default values in the order of phi nodes.
334 DEFAULT_CASE is the CASE_LABEL_EXPR for the default switch branch
335 if the range is non-contiguous or the default case has standard
336 structure, otherwise it is the first non-default case instead. */
338 void
339 switch_conversion::gather_default_values (tree default_case)
341 gphi_iterator gsi;
342 basic_block bb = label_to_block (cfun, CASE_LABEL (default_case));
343 edge e;
344 int i = 0;
346 gcc_assert (CASE_LOW (default_case) == NULL_TREE
347 || m_default_case_nonstandard);
349 if (bb == m_final_bb)
350 e = find_edge (m_switch_bb, bb);
351 else
352 e = single_succ_edge (bb);
354 for (gsi = gsi_start_phis (m_final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
356 gphi *phi = gsi.phi ();
357 if (virtual_operand_p (gimple_phi_result (phi)))
358 continue;
359 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
360 gcc_assert (val);
361 m_default_values[i++] = val;
365 /* The following function populates the vectors in the constructors array with
366 future contents of the static arrays. The vectors are populated in the
367 order of phi nodes. */
369 void
370 switch_conversion::build_constructors ()
372 unsigned i, branch_num = gimple_switch_num_labels (m_switch);
373 tree pos = m_range_min;
374 tree pos_one = build_int_cst (TREE_TYPE (pos), 1);
376 for (i = 1; i < branch_num; i++)
378 tree cs = gimple_switch_label (m_switch, i);
379 basic_block bb = label_to_block (cfun, CASE_LABEL (cs));
380 edge e;
381 tree high;
382 gphi_iterator gsi;
383 int j;
385 if (bb == m_final_bb)
386 e = find_edge (m_switch_bb, bb);
387 else
388 e = single_succ_edge (bb);
389 gcc_assert (e);
391 while (tree_int_cst_lt (pos, CASE_LOW (cs)))
393 int k;
394 for (k = 0; k < m_phi_count; k++)
396 constructor_elt elt;
398 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
399 elt.value
400 = unshare_expr_without_location (m_default_values[k]);
401 m_constructors[k]->quick_push (elt);
404 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
406 gcc_assert (tree_int_cst_equal (pos, CASE_LOW (cs)));
408 j = 0;
409 if (CASE_HIGH (cs))
410 high = CASE_HIGH (cs);
411 else
412 high = CASE_LOW (cs);
413 for (gsi = gsi_start_phis (m_final_bb);
414 !gsi_end_p (gsi); gsi_next (&gsi))
416 gphi *phi = gsi.phi ();
417 if (virtual_operand_p (gimple_phi_result (phi)))
418 continue;
419 tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
420 tree low = CASE_LOW (cs);
421 pos = CASE_LOW (cs);
425 constructor_elt elt;
427 elt.index = int_const_binop (MINUS_EXPR, pos, m_range_min);
428 elt.value = unshare_expr_without_location (val);
429 m_constructors[j]->quick_push (elt);
431 pos = int_const_binop (PLUS_EXPR, pos, pos_one);
432 } while (!tree_int_cst_lt (high, pos)
433 && tree_int_cst_lt (low, pos));
434 j++;
439 /* If all values in the constructor vector are products of a linear function
440 a * x + b, then return true. When true, COEFF_A and COEFF_B and
441 coefficients of the linear function. Note that equal values are special
442 case of a linear function with a and b equal to zero. */
444 bool
445 switch_conversion::contains_linear_function_p (vec<constructor_elt, va_gc> *vec,
446 wide_int *coeff_a,
447 wide_int *coeff_b)
449 unsigned int i;
450 constructor_elt *elt;
452 gcc_assert (vec->length () >= 2);
454 /* Let's try to find any linear function a * x + y that can apply to
455 given values. 'a' can be calculated as follows:
457 a = (y2 - y1) / (x2 - x1) where x2 - x1 = 1 (consecutive case indices)
458 a = y2 - y1
462 b = y2 - a * x2
466 tree elt0 = (*vec)[0].value;
467 tree elt1 = (*vec)[1].value;
469 if (TREE_CODE (elt0) != INTEGER_CST || TREE_CODE (elt1) != INTEGER_CST)
470 return false;
472 wide_int range_min
473 = wide_int::from (wi::to_wide (m_range_min),
474 TYPE_PRECISION (TREE_TYPE (elt0)),
475 TYPE_SIGN (TREE_TYPE (m_range_min)));
476 wide_int y1 = wi::to_wide (elt0);
477 wide_int y2 = wi::to_wide (elt1);
478 wide_int a = y2 - y1;
479 wide_int b = y2 - a * (range_min + 1);
481 /* Verify that all values fulfill the linear function. */
482 FOR_EACH_VEC_SAFE_ELT (vec, i, elt)
484 if (TREE_CODE (elt->value) != INTEGER_CST)
485 return false;
487 wide_int value = wi::to_wide (elt->value);
488 if (a * range_min + b != value)
489 return false;
491 ++range_min;
494 *coeff_a = a;
495 *coeff_b = b;
497 return true;
500 /* Return type which should be used for array elements, either TYPE's
501 main variant or, for integral types, some smaller integral type
502 that can still hold all the constants. */
504 tree
505 switch_conversion::array_value_type (tree type, int num)
507 unsigned int i, len = vec_safe_length (m_constructors[num]);
508 constructor_elt *elt;
509 int sign = 0;
510 tree smaller_type;
512 /* Types with alignments greater than their size can reach here, e.g. out of
513 SRA. We couldn't use these as an array component type so get back to the
514 main variant first, which, for our purposes, is fine for other types as
515 well. */
517 type = TYPE_MAIN_VARIANT (type);
519 if (!INTEGRAL_TYPE_P (type))
520 return type;
522 scalar_int_mode type_mode = SCALAR_INT_TYPE_MODE (type);
523 scalar_int_mode mode = get_narrowest_mode (type_mode);
524 if (GET_MODE_SIZE (type_mode) <= GET_MODE_SIZE (mode))
525 return type;
527 if (len < (optimize_bb_for_size_p (gimple_bb (m_switch)) ? 2 : 32))
528 return type;
530 FOR_EACH_VEC_SAFE_ELT (m_constructors[num], i, elt)
532 wide_int cst;
534 if (TREE_CODE (elt->value) != INTEGER_CST)
535 return type;
537 cst = wi::to_wide (elt->value);
538 while (1)
540 unsigned int prec = GET_MODE_BITSIZE (mode);
541 if (prec > HOST_BITS_PER_WIDE_INT)
542 return type;
544 if (sign >= 0 && cst == wi::zext (cst, prec))
546 if (sign == 0 && cst == wi::sext (cst, prec))
547 break;
548 sign = 1;
549 break;
551 if (sign <= 0 && cst == wi::sext (cst, prec))
553 sign = -1;
554 break;
557 if (sign == 1)
558 sign = 0;
560 if (!GET_MODE_WIDER_MODE (mode).exists (&mode)
561 || GET_MODE_SIZE (mode) >= GET_MODE_SIZE (type_mode))
562 return type;
566 if (sign == 0)
567 sign = TYPE_UNSIGNED (type) ? 1 : -1;
568 smaller_type = lang_hooks.types.type_for_mode (mode, sign >= 0);
569 if (GET_MODE_SIZE (type_mode)
570 <= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (smaller_type)))
571 return type;
573 return smaller_type;
576 /* Create an appropriate array type and declaration and assemble a static
577 array variable. Also create a load statement that initializes
578 the variable in question with a value from the static array. SWTCH is
579 the switch statement being converted, NUM is the index to
580 arrays of constructors, default values and target SSA names
581 for this particular array. ARR_INDEX_TYPE is the type of the index
582 of the new array, PHI is the phi node of the final BB that corresponds
583 to the value that will be loaded from the created array. TIDX
584 is an ssa name of a temporary variable holding the index for loads from the
585 new array. */
587 void
588 switch_conversion::build_one_array (int num, tree arr_index_type,
589 gphi *phi, tree tidx)
591 tree name;
592 gimple *load;
593 gimple_stmt_iterator gsi = gsi_for_stmt (m_switch);
594 location_t loc = gimple_location (m_switch);
596 gcc_assert (m_default_values[num]);
598 name = copy_ssa_name (PHI_RESULT (phi));
599 m_target_inbound_names[num] = name;
601 vec<constructor_elt, va_gc> *constructor = m_constructors[num];
602 wide_int coeff_a, coeff_b;
603 bool linear_p = contains_linear_function_p (constructor, &coeff_a, &coeff_b);
604 tree type;
605 if (linear_p
606 && (type = range_check_type (TREE_TYPE ((*constructor)[0].value))))
608 if (dump_file && coeff_a.to_uhwi () > 0)
609 fprintf (dump_file, "Linear transformation with A = %" PRId64
610 " and B = %" PRId64 "\n", coeff_a.to_shwi (),
611 coeff_b.to_shwi ());
613 /* We must use type of constructor values. */
614 gimple_seq seq = NULL;
615 tree tmp = gimple_convert (&seq, type, m_index_expr);
616 tree tmp2 = gimple_build (&seq, MULT_EXPR, type,
617 wide_int_to_tree (type, coeff_a), tmp);
618 tree tmp3 = gimple_build (&seq, PLUS_EXPR, type, tmp2,
619 wide_int_to_tree (type, coeff_b));
620 tree tmp4 = gimple_convert (&seq, TREE_TYPE (name), tmp3);
621 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
622 load = gimple_build_assign (name, tmp4);
624 else
626 tree array_type, ctor, decl, value_type, fetch, default_type;
628 default_type = TREE_TYPE (m_default_values[num]);
629 value_type = array_value_type (default_type, num);
630 array_type = build_array_type (value_type, arr_index_type);
631 if (default_type != value_type)
633 unsigned int i;
634 constructor_elt *elt;
636 FOR_EACH_VEC_SAFE_ELT (constructor, i, elt)
637 elt->value = fold_convert (value_type, elt->value);
639 ctor = build_constructor (array_type, constructor);
640 TREE_CONSTANT (ctor) = true;
641 TREE_STATIC (ctor) = true;
643 decl = build_decl (loc, VAR_DECL, NULL_TREE, array_type);
644 TREE_STATIC (decl) = 1;
645 DECL_INITIAL (decl) = ctor;
647 DECL_NAME (decl) = create_tmp_var_name ("CSWTCH");
648 DECL_ARTIFICIAL (decl) = 1;
649 DECL_IGNORED_P (decl) = 1;
650 TREE_CONSTANT (decl) = 1;
651 TREE_READONLY (decl) = 1;
652 DECL_IGNORED_P (decl) = 1;
653 if (offloading_function_p (cfun->decl))
654 DECL_ATTRIBUTES (decl)
655 = tree_cons (get_identifier ("omp declare target"), NULL_TREE,
656 NULL_TREE);
657 varpool_node::finalize_decl (decl);
659 fetch = build4 (ARRAY_REF, value_type, decl, tidx, NULL_TREE,
660 NULL_TREE);
661 if (default_type != value_type)
663 fetch = fold_convert (default_type, fetch);
664 fetch = force_gimple_operand_gsi (&gsi, fetch, true, NULL_TREE,
665 true, GSI_SAME_STMT);
667 load = gimple_build_assign (name, fetch);
670 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
671 update_stmt (load);
672 m_arr_ref_last = load;
675 /* Builds and initializes static arrays initialized with values gathered from
676 the switch statement. Also creates statements that load values from
677 them. */
679 void
680 switch_conversion::build_arrays ()
682 tree arr_index_type;
683 tree tidx, sub, utype;
684 gimple *stmt;
685 gimple_stmt_iterator gsi;
686 gphi_iterator gpi;
687 int i;
688 location_t loc = gimple_location (m_switch);
690 gsi = gsi_for_stmt (m_switch);
692 /* Make sure we do not generate arithmetics in a subrange. */
693 utype = TREE_TYPE (m_index_expr);
694 if (TREE_TYPE (utype))
695 utype = lang_hooks.types.type_for_mode (TYPE_MODE (TREE_TYPE (utype)), 1);
696 else
697 utype = lang_hooks.types.type_for_mode (TYPE_MODE (utype), 1);
699 arr_index_type = build_index_type (m_range_size);
700 tidx = make_ssa_name (utype);
701 sub = fold_build2_loc (loc, MINUS_EXPR, utype,
702 fold_convert_loc (loc, utype, m_index_expr),
703 fold_convert_loc (loc, utype, m_range_min));
704 sub = force_gimple_operand_gsi (&gsi, sub,
705 false, NULL, true, GSI_SAME_STMT);
706 stmt = gimple_build_assign (tidx, sub);
708 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
709 update_stmt (stmt);
710 m_arr_ref_first = stmt;
712 for (gpi = gsi_start_phis (m_final_bb), i = 0;
713 !gsi_end_p (gpi); gsi_next (&gpi))
715 gphi *phi = gpi.phi ();
716 if (!virtual_operand_p (gimple_phi_result (phi)))
717 build_one_array (i++, arr_index_type, phi, tidx);
718 else
720 edge e;
721 edge_iterator ei;
722 FOR_EACH_EDGE (e, ei, m_switch_bb->succs)
724 if (e->dest == m_final_bb)
725 break;
726 if (!m_default_case_nonstandard
727 || e->dest != m_default_bb)
729 e = single_succ_edge (e->dest);
730 break;
733 gcc_assert (e && e->dest == m_final_bb);
734 m_target_vop = PHI_ARG_DEF_FROM_EDGE (phi, e);
739 /* Generates and appropriately inserts loads of default values at the position
740 given by GSI. Returns the last inserted statement. */
742 gassign *
743 switch_conversion::gen_def_assigns (gimple_stmt_iterator *gsi)
745 int i;
746 gassign *assign = NULL;
748 for (i = 0; i < m_phi_count; i++)
750 tree name = copy_ssa_name (m_target_inbound_names[i]);
751 m_target_outbound_names[i] = name;
752 assign = gimple_build_assign (name, m_default_values[i]);
753 gsi_insert_before (gsi, assign, GSI_SAME_STMT);
754 update_stmt (assign);
756 return assign;
759 /* Deletes the unused bbs and edges that now contain the switch statement and
760 its empty branch bbs. BBD is the now dead BB containing
761 the original switch statement, FINAL is the last BB of the converted
762 switch statement (in terms of succession). */
764 void
765 switch_conversion::prune_bbs (basic_block bbd, basic_block final,
766 basic_block default_bb)
768 edge_iterator ei;
769 edge e;
771 for (ei = ei_start (bbd->succs); (e = ei_safe_edge (ei)); )
773 basic_block bb;
774 bb = e->dest;
775 remove_edge (e);
776 if (bb != final && bb != default_bb)
777 delete_basic_block (bb);
779 delete_basic_block (bbd);
782 /* Add values to phi nodes in final_bb for the two new edges. E1F is the edge
783 from the basic block loading values from an array and E2F from the basic
784 block loading default values. BBF is the last switch basic block (see the
785 bbf description in the comment below). */
787 void
788 switch_conversion::fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
790 gphi_iterator gsi;
791 int i;
793 for (gsi = gsi_start_phis (bbf), i = 0;
794 !gsi_end_p (gsi); gsi_next (&gsi))
796 gphi *phi = gsi.phi ();
797 tree inbound, outbound;
798 if (virtual_operand_p (gimple_phi_result (phi)))
799 inbound = outbound = m_target_vop;
800 else
802 inbound = m_target_inbound_names[i];
803 outbound = m_target_outbound_names[i++];
805 add_phi_arg (phi, inbound, e1f, UNKNOWN_LOCATION);
806 if (!m_default_case_nonstandard)
807 add_phi_arg (phi, outbound, e2f, UNKNOWN_LOCATION);
811 /* Creates a check whether the switch expression value actually falls into the
812 range given by all the cases. If it does not, the temporaries are loaded
813 with default values instead. */
815 void
816 switch_conversion::gen_inbound_check ()
818 tree label_decl1 = create_artificial_label (UNKNOWN_LOCATION);
819 tree label_decl2 = create_artificial_label (UNKNOWN_LOCATION);
820 tree label_decl3 = create_artificial_label (UNKNOWN_LOCATION);
821 glabel *label1, *label2, *label3;
822 tree utype, tidx;
823 tree bound;
825 gcond *cond_stmt;
827 gassign *last_assign = NULL;
828 gimple_stmt_iterator gsi;
829 basic_block bb0, bb1, bb2, bbf, bbd;
830 edge e01 = NULL, e02, e21, e1d, e1f, e2f;
831 location_t loc = gimple_location (m_switch);
833 gcc_assert (m_default_values);
835 bb0 = gimple_bb (m_switch);
837 tidx = gimple_assign_lhs (m_arr_ref_first);
838 utype = TREE_TYPE (tidx);
840 /* (end of) block 0 */
841 gsi = gsi_for_stmt (m_arr_ref_first);
842 gsi_next (&gsi);
844 bound = fold_convert_loc (loc, utype, m_range_size);
845 cond_stmt = gimple_build_cond (LE_EXPR, tidx, bound, NULL_TREE, NULL_TREE);
846 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
847 update_stmt (cond_stmt);
849 /* block 2 */
850 if (!m_default_case_nonstandard)
852 label2 = gimple_build_label (label_decl2);
853 gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
854 last_assign = gen_def_assigns (&gsi);
857 /* block 1 */
858 label1 = gimple_build_label (label_decl1);
859 gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
861 /* block F */
862 gsi = gsi_start_bb (m_final_bb);
863 label3 = gimple_build_label (label_decl3);
864 gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
866 /* cfg fix */
867 e02 = split_block (bb0, cond_stmt);
868 bb2 = e02->dest;
870 if (m_default_case_nonstandard)
872 bb1 = bb2;
873 bb2 = m_default_bb;
874 e01 = e02;
875 e01->flags = EDGE_TRUE_VALUE;
876 e02 = make_edge (bb0, bb2, EDGE_FALSE_VALUE);
877 edge e_default = find_edge (bb1, bb2);
878 for (gphi_iterator gsi = gsi_start_phis (bb2);
879 !gsi_end_p (gsi); gsi_next (&gsi))
881 gphi *phi = gsi.phi ();
882 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, e_default);
883 add_phi_arg (phi, arg, e02,
884 gimple_phi_arg_location_from_edge (phi, e_default));
886 /* Partially fix the dominator tree, if it is available. */
887 if (dom_info_available_p (CDI_DOMINATORS))
888 redirect_immediate_dominators (CDI_DOMINATORS, bb1, bb0);
890 else
892 e21 = split_block (bb2, last_assign);
893 bb1 = e21->dest;
894 remove_edge (e21);
897 e1d = split_block (bb1, m_arr_ref_last);
898 bbd = e1d->dest;
899 remove_edge (e1d);
901 /* Flags and profiles of the edge for in-range values. */
902 if (!m_default_case_nonstandard)
903 e01 = make_edge (bb0, bb1, EDGE_TRUE_VALUE);
904 e01->probability = m_default_prob.invert ();
906 /* Flags and profiles of the edge taking care of out-of-range values. */
907 e02->flags &= ~EDGE_FALLTHRU;
908 e02->flags |= EDGE_FALSE_VALUE;
909 e02->probability = m_default_prob;
911 bbf = m_final_bb;
913 e1f = make_edge (bb1, bbf, EDGE_FALLTHRU);
914 e1f->probability = profile_probability::always ();
916 if (m_default_case_nonstandard)
917 e2f = NULL;
918 else
920 e2f = make_edge (bb2, bbf, EDGE_FALLTHRU);
921 e2f->probability = profile_probability::always ();
924 /* frequencies of the new BBs */
925 bb1->count = e01->count ();
926 bb2->count = e02->count ();
927 if (!m_default_case_nonstandard)
928 bbf->count = e1f->count () + e2f->count ();
930 /* Tidy blocks that have become unreachable. */
931 prune_bbs (bbd, m_final_bb,
932 m_default_case_nonstandard ? m_default_bb : NULL);
934 /* Fixup the PHI nodes in bbF. */
935 fix_phi_nodes (e1f, e2f, bbf);
937 /* Fix the dominator tree, if it is available. */
938 if (dom_info_available_p (CDI_DOMINATORS))
940 vec<basic_block> bbs_to_fix_dom;
942 set_immediate_dominator (CDI_DOMINATORS, bb1, bb0);
943 if (!m_default_case_nonstandard)
944 set_immediate_dominator (CDI_DOMINATORS, bb2, bb0);
945 if (! get_immediate_dominator (CDI_DOMINATORS, bbf))
946 /* If bbD was the immediate dominator ... */
947 set_immediate_dominator (CDI_DOMINATORS, bbf, bb0);
949 bbs_to_fix_dom.create (3 + (bb2 != bbf));
950 bbs_to_fix_dom.quick_push (bb0);
951 bbs_to_fix_dom.quick_push (bb1);
952 if (bb2 != bbf)
953 bbs_to_fix_dom.quick_push (bb2);
954 bbs_to_fix_dom.quick_push (bbf);
956 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
957 bbs_to_fix_dom.release ();
961 /* The following function is invoked on every switch statement (the current
962 one is given in SWTCH) and runs the individual phases of switch
963 conversion on it one after another until one fails or the conversion
964 is completed. On success, NULL is in m_reason, otherwise points
965 to a string with the reason why the conversion failed. */
967 void
968 switch_conversion::expand (gswitch *swtch)
970 /* Group case labels so that we get the right results from the heuristics
971 that decide on the code generation approach for this switch. */
972 m_cfg_altered |= group_case_labels_stmt (swtch);
974 /* If this switch is now a degenerate case with only a default label,
975 there is nothing left for us to do. */
976 if (gimple_switch_num_labels (swtch) < 2)
978 m_reason = "switch is a degenerate case";
979 return;
982 collect (swtch);
984 /* No error markers should reach here (they should be filtered out
985 during gimplification). */
986 gcc_checking_assert (TREE_TYPE (m_index_expr) != error_mark_node);
988 /* Prefer bit test if possible. */
989 if (tree_fits_uhwi_p (m_range_size)
990 && bit_test_cluster::can_be_handled (tree_to_uhwi (m_range_size), m_uniq)
991 && bit_test_cluster::is_beneficial (m_count, m_uniq))
993 m_reason = "expanding as bit test is preferable";
994 return;
997 if (m_uniq <= 2)
999 /* This will be expanded as a decision tree . */
1000 m_reason = "expanding as jumps is preferable";
1001 return;
1004 /* If there is no common successor, we cannot do the transformation. */
1005 if (!m_final_bb)
1007 m_reason = "no common successor to all case label target blocks found";
1008 return;
1011 /* Check the case label values are within reasonable range: */
1012 if (!check_range ())
1014 gcc_assert (m_reason);
1015 return;
1018 /* For all the cases, see whether they are empty, the assignments they
1019 represent constant and so on... */
1020 if (!check_all_empty_except_final ())
1022 gcc_assert (m_reason);
1023 return;
1025 if (!check_final_bb ())
1027 gcc_assert (m_reason);
1028 return;
1031 /* At this point all checks have passed and we can proceed with the
1032 transformation. */
1034 create_temp_arrays ();
1035 gather_default_values (m_default_case_nonstandard
1036 ? gimple_switch_label (swtch, 1)
1037 : gimple_switch_default_label (swtch));
1038 build_constructors ();
1040 build_arrays (); /* Build the static arrays and assignments. */
1041 gen_inbound_check (); /* Build the bounds check. */
1043 m_cfg_altered = true;
1046 /* Destructor. */
1048 switch_conversion::~switch_conversion ()
1050 XDELETEVEC (m_constructors);
1051 XDELETEVEC (m_default_values);
1054 /* Constructor. */
1056 group_cluster::group_cluster (vec<cluster *> &clusters,
1057 unsigned start, unsigned end)
1059 gcc_checking_assert (end - start + 1 >= 1);
1060 m_prob = profile_probability::never ();
1061 m_cases.create (end - start + 1);
1062 for (unsigned i = start; i <= end; i++)
1064 m_cases.quick_push (static_cast<simple_cluster *> (clusters[i]));
1065 m_prob += clusters[i]->m_prob;
1067 m_subtree_prob = m_prob;
1070 /* Destructor. */
1072 group_cluster::~group_cluster ()
1074 for (unsigned i = 0; i < m_cases.length (); i++)
1075 delete m_cases[i];
1077 m_cases.release ();
1080 /* Dump content of a cluster. */
1082 void
1083 group_cluster::dump (FILE *f, bool details)
1085 unsigned total_values = 0;
1086 for (unsigned i = 0; i < m_cases.length (); i++)
1087 total_values += m_cases[i]->get_range (m_cases[i]->get_low (),
1088 m_cases[i]->get_high ());
1090 unsigned comparison_count = 0;
1091 for (unsigned i = 0; i < m_cases.length (); i++)
1093 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1094 comparison_count += sc->m_range_p ? 2 : 1;
1097 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1098 fprintf (f, "%s", get_type () == JUMP_TABLE ? "JT" : "BT");
1100 if (details)
1101 fprintf (f, "(values:%d comparisons:%d range:" HOST_WIDE_INT_PRINT_DEC
1102 " density: %.2f%%)", total_values, comparison_count, range,
1103 100.0f * comparison_count / range);
1105 fprintf (f, ":");
1106 PRINT_CASE (f, get_low ());
1107 fprintf (f, "-");
1108 PRINT_CASE (f, get_high ());
1109 fprintf (f, " ");
1112 /* Emit GIMPLE code to handle the cluster. */
1114 void
1115 jump_table_cluster::emit (tree index_expr, tree,
1116 tree default_label_expr, basic_block default_bb,
1117 location_t loc)
1119 unsigned HOST_WIDE_INT range = get_range (get_low (), get_high ());
1120 unsigned HOST_WIDE_INT nondefault_range = 0;
1122 /* For jump table we just emit a new gswitch statement that will
1123 be latter lowered to jump table. */
1124 auto_vec <tree> labels;
1125 labels.create (m_cases.length ());
1127 make_edge (m_case_bb, default_bb, 0);
1128 for (unsigned i = 0; i < m_cases.length (); i++)
1130 labels.quick_push (unshare_expr (m_cases[i]->m_case_label_expr));
1131 make_edge (m_case_bb, m_cases[i]->m_case_bb, 0);
1134 gswitch *s = gimple_build_switch (index_expr,
1135 unshare_expr (default_label_expr), labels);
1136 gimple_set_location (s, loc);
1137 gimple_stmt_iterator gsi = gsi_start_bb (m_case_bb);
1138 gsi_insert_after (&gsi, s, GSI_NEW_STMT);
1140 /* Set up even probabilities for all cases. */
1141 for (unsigned i = 0; i < m_cases.length (); i++)
1143 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1144 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1145 unsigned HOST_WIDE_INT case_range
1146 = sc->get_range (sc->get_low (), sc->get_high ());
1147 nondefault_range += case_range;
1149 /* case_edge->aux is number of values in a jump-table that are covered
1150 by the case_edge. */
1151 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + case_range);
1154 edge default_edge = gimple_switch_default_edge (cfun, s);
1155 default_edge->probability = profile_probability::never ();
1157 for (unsigned i = 0; i < m_cases.length (); i++)
1159 simple_cluster *sc = static_cast<simple_cluster *> (m_cases[i]);
1160 edge case_edge = find_edge (m_case_bb, sc->m_case_bb);
1161 case_edge->probability
1162 = profile_probability::always ().apply_scale ((intptr_t)case_edge->aux,
1163 range);
1166 /* Number of non-default values is probability of default edge. */
1167 default_edge->probability
1168 += profile_probability::always ().apply_scale (nondefault_range,
1169 range).invert ();
1171 switch_decision_tree::reset_out_edges_aux (s);
1174 /* Find jump tables of given CLUSTERS, where all members of the vector
1175 are of type simple_cluster. New clusters are returned. */
1177 vec<cluster *>
1178 jump_table_cluster::find_jump_tables (vec<cluster *> &clusters)
1180 if (!is_enabled ())
1181 return clusters.copy ();
1183 unsigned l = clusters.length ();
1184 auto_vec<min_cluster_item> min;
1185 min.reserve (l + 1);
1187 min.quick_push (min_cluster_item (0, 0, 0));
1189 for (unsigned i = 1; i <= l; i++)
1191 /* Set minimal # of clusters with i-th item to infinite. */
1192 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1194 for (unsigned j = 0; j < i; j++)
1196 unsigned HOST_WIDE_INT s = min[j].m_non_jt_cases;
1197 if (i - j < case_values_threshold ())
1198 s += i - j;
1200 /* Prefer clusters with smaller number of numbers covered. */
1201 if ((min[j].m_count + 1 < min[i].m_count
1202 || (min[j].m_count + 1 == min[i].m_count
1203 && s < min[i].m_non_jt_cases))
1204 && can_be_handled (clusters, j, i - 1))
1205 min[i] = min_cluster_item (min[j].m_count + 1, j, s);
1208 gcc_checking_assert (min[i].m_count != INT_MAX);
1211 /* No result. */
1212 if (min[l].m_count == l)
1213 return clusters.copy ();
1215 vec<cluster *> output;
1216 output.create (4);
1218 /* Find and build the clusters. */
1219 for (unsigned int end = l;;)
1221 int start = min[end].m_start;
1223 /* Do not allow clusters with small number of cases. */
1224 if (is_beneficial (clusters, start, end - 1))
1225 output.safe_push (new jump_table_cluster (clusters, start, end - 1));
1226 else
1227 for (int i = end - 1; i >= start; i--)
1228 output.safe_push (clusters[i]);
1230 end = start;
1232 if (start <= 0)
1233 break;
1236 output.reverse ();
1237 return output;
1240 /* Return true when cluster starting at START and ending at END (inclusive)
1241 can build a jump-table. */
1243 bool
1244 jump_table_cluster::can_be_handled (const vec<cluster *> &clusters,
1245 unsigned start, unsigned end)
1247 /* If the switch is relatively small such that the cost of one
1248 indirect jump on the target are higher than the cost of a
1249 decision tree, go with the decision tree.
1251 If range of values is much bigger than number of values,
1252 or if it is too large to represent in a HOST_WIDE_INT,
1253 make a sequence of conditional branches instead of a dispatch.
1255 The definition of "much bigger" depends on whether we are
1256 optimizing for size or for speed.
1258 For algorithm correctness, jump table for a single case must return
1259 true. We bail out in is_beneficial if it's called just for
1260 a single case. */
1261 if (start == end)
1262 return true;
1264 unsigned HOST_WIDE_INT max_ratio
1265 = (optimize_insn_for_size_p ()
1266 ? param_jump_table_max_growth_ratio_for_size
1267 : param_jump_table_max_growth_ratio_for_speed);
1268 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1269 clusters[end]->get_high ());
1270 /* Check overflow. */
1271 if (range == 0)
1272 return false;
1274 if (range > HOST_WIDE_INT_M1U / 100)
1275 return false;
1277 unsigned HOST_WIDE_INT lhs = 100 * range;
1278 if (lhs < range)
1279 return false;
1281 /* First make quick guess as each cluster
1282 can add at maximum 2 to the comparison_count. */
1283 if (lhs > 2 * max_ratio * (end - start + 1))
1284 return false;
1286 unsigned HOST_WIDE_INT comparison_count = 0;
1287 for (unsigned i = start; i <= end; i++)
1289 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1290 comparison_count += sc->m_range_p ? 2 : 1;
1293 return lhs <= max_ratio * comparison_count;
1296 /* Return true if cluster starting at START and ending at END (inclusive)
1297 is profitable transformation. */
1299 bool
1300 jump_table_cluster::is_beneficial (const vec<cluster *> &,
1301 unsigned start, unsigned end)
1303 /* Single case bail out. */
1304 if (start == end)
1305 return false;
1307 return end - start + 1 >= case_values_threshold ();
1310 /* Find bit tests of given CLUSTERS, where all members of the vector
1311 are of type simple_cluster. New clusters are returned. */
1313 vec<cluster *>
1314 bit_test_cluster::find_bit_tests (vec<cluster *> &clusters)
1316 if (!is_enabled ())
1317 return clusters.copy ();
1319 unsigned l = clusters.length ();
1320 auto_vec<min_cluster_item> min;
1321 min.reserve (l + 1);
1323 min.quick_push (min_cluster_item (0, 0, 0));
1325 for (unsigned i = 1; i <= l; i++)
1327 /* Set minimal # of clusters with i-th item to infinite. */
1328 min.quick_push (min_cluster_item (INT_MAX, INT_MAX, INT_MAX));
1330 for (unsigned j = 0; j < i; j++)
1332 if (min[j].m_count + 1 < min[i].m_count
1333 && can_be_handled (clusters, j, i - 1))
1334 min[i] = min_cluster_item (min[j].m_count + 1, j, INT_MAX);
1337 gcc_checking_assert (min[i].m_count != INT_MAX);
1340 /* No result. */
1341 if (min[l].m_count == l)
1342 return clusters.copy ();
1344 vec<cluster *> output;
1345 output.create (4);
1347 /* Find and build the clusters. */
1348 for (unsigned end = l;;)
1350 int start = min[end].m_start;
1352 if (is_beneficial (clusters, start, end - 1))
1354 bool entire = start == 0 && end == clusters.length ();
1355 output.safe_push (new bit_test_cluster (clusters, start, end - 1,
1356 entire));
1358 else
1359 for (int i = end - 1; i >= start; i--)
1360 output.safe_push (clusters[i]);
1362 end = start;
1364 if (start <= 0)
1365 break;
1368 output.reverse ();
1369 return output;
1372 /* Return true when RANGE of case values with UNIQ labels
1373 can build a bit test. */
1375 bool
1376 bit_test_cluster::can_be_handled (unsigned HOST_WIDE_INT range,
1377 unsigned int uniq)
1379 /* Check overflow. */
1380 if (range == 0)
1381 return false;
1383 if (range >= GET_MODE_BITSIZE (word_mode))
1384 return false;
1386 return uniq <= m_max_case_bit_tests;
1389 /* Return true when cluster starting at START and ending at END (inclusive)
1390 can build a bit test. */
1392 bool
1393 bit_test_cluster::can_be_handled (const vec<cluster *> &clusters,
1394 unsigned start, unsigned end)
1396 auto_vec<int, m_max_case_bit_tests> dest_bbs;
1397 /* For algorithm correctness, bit test for a single case must return
1398 true. We bail out in is_beneficial if it's called just for
1399 a single case. */
1400 if (start == end)
1401 return true;
1403 unsigned HOST_WIDE_INT range = get_range (clusters[start]->get_low (),
1404 clusters[end]->get_high ());
1406 /* Make a guess first. */
1407 if (!can_be_handled (range, m_max_case_bit_tests))
1408 return false;
1410 for (unsigned i = start; i <= end; i++)
1412 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1413 /* m_max_case_bit_tests is very small integer, thus the operation
1414 is constant. */
1415 if (!dest_bbs.contains (sc->m_case_bb->index))
1417 if (dest_bbs.length () >= m_max_case_bit_tests)
1418 return false;
1419 dest_bbs.quick_push (sc->m_case_bb->index);
1423 return true;
1426 /* Return true when COUNT of cases of UNIQ labels is beneficial for bit test
1427 transformation. */
1429 bool
1430 bit_test_cluster::is_beneficial (unsigned count, unsigned uniq)
1432 return (((uniq == 1 && count >= 3)
1433 || (uniq == 2 && count >= 5)
1434 || (uniq == 3 && count >= 6)));
1437 /* Return true if cluster starting at START and ending at END (inclusive)
1438 is profitable transformation. */
1440 bool
1441 bit_test_cluster::is_beneficial (const vec<cluster *> &clusters,
1442 unsigned start, unsigned end)
1444 /* Single case bail out. */
1445 if (start == end)
1446 return false;
1448 auto_bitmap dest_bbs;
1450 for (unsigned i = start; i <= end; i++)
1452 simple_cluster *sc = static_cast<simple_cluster *> (clusters[i]);
1453 bitmap_set_bit (dest_bbs, sc->m_case_bb->index);
1456 unsigned uniq = bitmap_count_bits (dest_bbs);
1457 unsigned count = end - start + 1;
1458 return is_beneficial (count, uniq);
1461 /* Comparison function for qsort to order bit tests by decreasing
1462 probability of execution. */
1465 case_bit_test::cmp (const void *p1, const void *p2)
1467 const case_bit_test *const d1 = (const case_bit_test *) p1;
1468 const case_bit_test *const d2 = (const case_bit_test *) p2;
1470 if (d2->bits != d1->bits)
1471 return d2->bits - d1->bits;
1473 /* Stabilize the sort. */
1474 return (LABEL_DECL_UID (CASE_LABEL (d2->label))
1475 - LABEL_DECL_UID (CASE_LABEL (d1->label)));
1478 /* Expand a switch statement by a short sequence of bit-wise
1479 comparisons. "switch(x)" is effectively converted into
1480 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
1481 integer constants.
1483 INDEX_EXPR is the value being switched on.
1485 MINVAL is the lowest case value of in the case nodes,
1486 and RANGE is highest value minus MINVAL. MINVAL and RANGE
1487 are not guaranteed to be of the same type as INDEX_EXPR
1488 (the gimplifier doesn't change the type of case label values,
1489 and MINVAL and RANGE are derived from those values).
1490 MAXVAL is MINVAL + RANGE.
1492 There *MUST* be max_case_bit_tests or less unique case
1493 node targets. */
1495 void
1496 bit_test_cluster::emit (tree index_expr, tree index_type,
1497 tree, basic_block default_bb, location_t)
1499 case_bit_test test[m_max_case_bit_tests] = { {} };
1500 unsigned int i, j, k;
1501 unsigned int count;
1503 tree unsigned_index_type = range_check_type (index_type);
1505 gimple_stmt_iterator gsi;
1506 gassign *shift_stmt;
1508 tree idx, tmp, csui;
1509 tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
1510 tree word_mode_zero = fold_convert (word_type_node, integer_zero_node);
1511 tree word_mode_one = fold_convert (word_type_node, integer_one_node);
1512 int prec = TYPE_PRECISION (word_type_node);
1513 wide_int wone = wi::one (prec);
1515 tree minval = get_low ();
1516 tree maxval = get_high ();
1517 unsigned HOST_WIDE_INT bt_range = get_range (minval, maxval);
1519 /* Go through all case labels, and collect the case labels, profile
1520 counts, and other information we need to build the branch tests. */
1521 count = 0;
1522 for (i = 0; i < m_cases.length (); i++)
1524 unsigned int lo, hi;
1525 simple_cluster *n = static_cast<simple_cluster *> (m_cases[i]);
1526 for (k = 0; k < count; k++)
1527 if (n->m_case_bb == test[k].target_bb)
1528 break;
1530 if (k == count)
1532 gcc_checking_assert (count < m_max_case_bit_tests);
1533 test[k].mask = wi::zero (prec);
1534 test[k].target_bb = n->m_case_bb;
1535 test[k].label = n->m_case_label_expr;
1536 test[k].bits = 0;
1537 count++;
1540 test[k].bits += n->get_range (n->get_low (), n->get_high ());
1542 lo = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_low (), minval));
1543 if (n->get_high () == NULL_TREE)
1544 hi = lo;
1545 else
1546 hi = tree_to_uhwi (int_const_binop (MINUS_EXPR, n->get_high (),
1547 minval));
1549 for (j = lo; j <= hi; j++)
1550 test[k].mask |= wi::lshift (wone, j);
1553 qsort (test, count, sizeof (*test), case_bit_test::cmp);
1555 /* If every possible relative value of the index expression is a valid shift
1556 amount, then we can merge the entry test in the bit test. */
1557 bool entry_test_needed;
1558 value_range r;
1559 if (TREE_CODE (index_expr) == SSA_NAME
1560 && get_range_query (cfun)->range_of_expr (r, index_expr)
1561 && r.kind () == VR_RANGE
1562 && wi::leu_p (r.upper_bound () - r.lower_bound (), prec - 1))
1564 wide_int min = r.lower_bound ();
1565 wide_int max = r.upper_bound ();
1566 tree index_type = TREE_TYPE (index_expr);
1567 minval = fold_convert (index_type, minval);
1568 wide_int iminval = wi::to_wide (minval);
1569 if (wi::lt_p (min, iminval, TYPE_SIGN (index_type)))
1571 minval = wide_int_to_tree (index_type, min);
1572 for (i = 0; i < count; i++)
1573 test[i].mask = wi::lshift (test[i].mask, iminval - min);
1575 else if (wi::gt_p (min, iminval, TYPE_SIGN (index_type)))
1577 minval = wide_int_to_tree (index_type, min);
1578 for (i = 0; i < count; i++)
1579 test[i].mask = wi::lrshift (test[i].mask, min - iminval);
1581 maxval = wide_int_to_tree (index_type, max);
1582 entry_test_needed = false;
1584 else
1585 entry_test_needed = true;
1587 /* If all values are in the 0 .. BITS_PER_WORD-1 range, we can get rid of
1588 the minval subtractions, but it might make the mask constants more
1589 expensive. So, compare the costs. */
1590 if (compare_tree_int (minval, 0) > 0 && compare_tree_int (maxval, prec) < 0)
1592 int cost_diff;
1593 HOST_WIDE_INT m = tree_to_uhwi (minval);
1594 rtx reg = gen_raw_REG (word_mode, 10000);
1595 bool speed_p = optimize_insn_for_speed_p ();
1596 cost_diff = set_src_cost (gen_rtx_PLUS (word_mode, reg,
1597 GEN_INT (-m)),
1598 word_mode, speed_p);
1599 for (i = 0; i < count; i++)
1601 rtx r = immed_wide_int_const (test[i].mask, word_mode);
1602 cost_diff += set_src_cost (gen_rtx_AND (word_mode, reg, r),
1603 word_mode, speed_p);
1604 r = immed_wide_int_const (wi::lshift (test[i].mask, m), word_mode);
1605 cost_diff -= set_src_cost (gen_rtx_AND (word_mode, reg, r),
1606 word_mode, speed_p);
1608 if (cost_diff > 0)
1610 for (i = 0; i < count; i++)
1611 test[i].mask = wi::lshift (test[i].mask, m);
1612 minval = build_zero_cst (TREE_TYPE (minval));
1616 /* Now build the test-and-branch code. */
1618 gsi = gsi_last_bb (m_case_bb);
1620 /* idx = (unsigned)x - minval. */
1621 idx = fold_convert (unsigned_index_type, index_expr);
1622 idx = fold_build2 (MINUS_EXPR, unsigned_index_type, idx,
1623 fold_convert (unsigned_index_type, minval));
1624 idx = force_gimple_operand_gsi (&gsi, idx,
1625 /*simple=*/true, NULL_TREE,
1626 /*before=*/true, GSI_SAME_STMT);
1628 if (m_handles_entire_switch && entry_test_needed)
1630 tree range = int_const_binop (MINUS_EXPR, maxval, minval);
1631 /* if (idx > range) goto default */
1632 range
1633 = force_gimple_operand_gsi (&gsi,
1634 fold_convert (unsigned_index_type, range),
1635 /*simple=*/true, NULL_TREE,
1636 /*before=*/true, GSI_SAME_STMT);
1637 tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
1638 basic_block new_bb
1639 = hoist_edge_and_branch_if_true (&gsi, tmp, default_bb,
1640 profile_probability::unlikely ());
1641 gsi = gsi_last_bb (new_bb);
1644 tmp = fold_build2 (LSHIFT_EXPR, word_type_node, word_mode_one,
1645 fold_convert (word_type_node, idx));
1647 /* csui = (1 << (word_mode) idx) */
1648 if (count > 1)
1650 csui = make_ssa_name (word_type_node);
1651 tmp = force_gimple_operand_gsi (&gsi, tmp,
1652 /*simple=*/false, NULL_TREE,
1653 /*before=*/true, GSI_SAME_STMT);
1654 shift_stmt = gimple_build_assign (csui, tmp);
1655 gsi_insert_before (&gsi, shift_stmt, GSI_SAME_STMT);
1656 update_stmt (shift_stmt);
1658 else
1659 csui = tmp;
1661 profile_probability prob = profile_probability::always ();
1663 /* for each unique set of cases:
1664 if (const & csui) goto target */
1665 for (k = 0; k < count; k++)
1667 prob = profile_probability::always ().apply_scale (test[k].bits,
1668 bt_range);
1669 bt_range -= test[k].bits;
1670 tmp = wide_int_to_tree (word_type_node, test[k].mask);
1671 tmp = fold_build2 (BIT_AND_EXPR, word_type_node, csui, tmp);
1672 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp, word_mode_zero);
1673 tmp = force_gimple_operand_gsi (&gsi, tmp,
1674 /*simple=*/true, NULL_TREE,
1675 /*before=*/true, GSI_SAME_STMT);
1676 basic_block new_bb
1677 = hoist_edge_and_branch_if_true (&gsi, tmp, test[k].target_bb, prob);
1678 gsi = gsi_last_bb (new_bb);
1681 /* We should have removed all edges now. */
1682 gcc_assert (EDGE_COUNT (gsi_bb (gsi)->succs) == 0);
1684 /* If nothing matched, go to the default label. */
1685 edge e = make_edge (gsi_bb (gsi), default_bb, EDGE_FALLTHRU);
1686 e->probability = profile_probability::always ();
1689 /* Split the basic block at the statement pointed to by GSIP, and insert
1690 a branch to the target basic block of E_TRUE conditional on tree
1691 expression COND.
1693 It is assumed that there is already an edge from the to-be-split
1694 basic block to E_TRUE->dest block. This edge is removed, and the
1695 profile information on the edge is re-used for the new conditional
1696 jump.
1698 The CFG is updated. The dominator tree will not be valid after
1699 this transformation, but the immediate dominators are updated if
1700 UPDATE_DOMINATORS is true.
1702 Returns the newly created basic block. */
1704 basic_block
1705 bit_test_cluster::hoist_edge_and_branch_if_true (gimple_stmt_iterator *gsip,
1706 tree cond, basic_block case_bb,
1707 profile_probability prob)
1709 tree tmp;
1710 gcond *cond_stmt;
1711 edge e_false;
1712 basic_block new_bb, split_bb = gsi_bb (*gsip);
1714 edge e_true = make_edge (split_bb, case_bb, EDGE_TRUE_VALUE);
1715 e_true->probability = prob;
1716 gcc_assert (e_true->src == split_bb);
1718 tmp = force_gimple_operand_gsi (gsip, cond, /*simple=*/true, NULL,
1719 /*before=*/true, GSI_SAME_STMT);
1720 cond_stmt = gimple_build_cond_from_tree (tmp, NULL_TREE, NULL_TREE);
1721 gsi_insert_before (gsip, cond_stmt, GSI_SAME_STMT);
1723 e_false = split_block (split_bb, cond_stmt);
1724 new_bb = e_false->dest;
1725 redirect_edge_pred (e_true, split_bb);
1727 e_false->flags &= ~EDGE_FALLTHRU;
1728 e_false->flags |= EDGE_FALSE_VALUE;
1729 e_false->probability = e_true->probability.invert ();
1730 new_bb->count = e_false->count ();
1732 return new_bb;
1735 /* Compute the number of case labels that correspond to each outgoing edge of
1736 switch statement. Record this information in the aux field of the edge. */
1738 void
1739 switch_decision_tree::compute_cases_per_edge ()
1741 reset_out_edges_aux (m_switch);
1742 int ncases = gimple_switch_num_labels (m_switch);
1743 for (int i = ncases - 1; i >= 1; --i)
1745 edge case_edge = gimple_switch_edge (cfun, m_switch, i);
1746 case_edge->aux = (void *) ((intptr_t) (case_edge->aux) + 1);
1750 /* Analyze switch statement and return true when the statement is expanded
1751 as decision tree. */
1753 bool
1754 switch_decision_tree::analyze_switch_statement ()
1756 unsigned l = gimple_switch_num_labels (m_switch);
1757 basic_block bb = gimple_bb (m_switch);
1758 auto_vec<cluster *> clusters;
1759 clusters.create (l - 1);
1761 basic_block default_bb = gimple_switch_default_bb (cfun, m_switch);
1762 m_case_bbs.reserve (l);
1763 m_case_bbs.quick_push (default_bb);
1765 compute_cases_per_edge ();
1767 for (unsigned i = 1; i < l; i++)
1769 tree elt = gimple_switch_label (m_switch, i);
1770 tree lab = CASE_LABEL (elt);
1771 basic_block case_bb = label_to_block (cfun, lab);
1772 edge case_edge = find_edge (bb, case_bb);
1773 tree low = CASE_LOW (elt);
1774 tree high = CASE_HIGH (elt);
1776 profile_probability p
1777 = case_edge->probability.apply_scale (1, (intptr_t) (case_edge->aux));
1778 clusters.quick_push (new simple_cluster (low, high, elt, case_edge->dest,
1779 p));
1780 m_case_bbs.quick_push (case_edge->dest);
1783 reset_out_edges_aux (m_switch);
1785 /* Find bit-test clusters. */
1786 vec<cluster *> output = bit_test_cluster::find_bit_tests (clusters);
1788 /* Find jump table clusters. */
1789 vec<cluster *> output2;
1790 auto_vec<cluster *> tmp;
1791 output2.create (1);
1792 tmp.create (1);
1794 for (unsigned i = 0; i < output.length (); i++)
1796 cluster *c = output[i];
1797 if (c->get_type () != SIMPLE_CASE)
1799 if (!tmp.is_empty ())
1801 vec<cluster *> n = jump_table_cluster::find_jump_tables (tmp);
1802 output2.safe_splice (n);
1803 n.release ();
1804 tmp.truncate (0);
1806 output2.safe_push (c);
1808 else
1809 tmp.safe_push (c);
1812 /* We still can have a temporary vector to test. */
1813 if (!tmp.is_empty ())
1815 vec<cluster *> n = jump_table_cluster::find_jump_tables (tmp);
1816 output2.safe_splice (n);
1817 n.release ();
1820 if (dump_file)
1822 fprintf (dump_file, ";; GIMPLE switch case clusters: ");
1823 for (unsigned i = 0; i < output2.length (); i++)
1824 output2[i]->dump (dump_file, dump_flags & TDF_DETAILS);
1825 fprintf (dump_file, "\n");
1828 output.release ();
1830 bool expanded = try_switch_expansion (output2);
1831 release_clusters (output2);
1832 return expanded;
1835 /* Attempt to expand CLUSTERS as a decision tree. Return true when
1836 expanded. */
1838 bool
1839 switch_decision_tree::try_switch_expansion (vec<cluster *> &clusters)
1841 tree index_expr = gimple_switch_index (m_switch);
1842 tree index_type = TREE_TYPE (index_expr);
1843 basic_block bb = gimple_bb (m_switch);
1845 if (gimple_switch_num_labels (m_switch) == 1
1846 || range_check_type (index_type) == NULL_TREE)
1847 return false;
1849 /* Find the default case target label. */
1850 edge default_edge = gimple_switch_default_edge (cfun, m_switch);
1851 m_default_bb = default_edge->dest;
1853 /* Do the insertion of a case label into m_case_list. The labels are
1854 fed to us in descending order from the sorted vector of case labels used
1855 in the tree part of the middle end. So the list we construct is
1856 sorted in ascending order. */
1858 for (int i = clusters.length () - 1; i >= 0; i--)
1860 case_tree_node *r = m_case_list;
1861 m_case_list = m_case_node_pool.allocate ();
1862 m_case_list->m_right = r;
1863 m_case_list->m_c = clusters[i];
1866 record_phi_operand_mapping ();
1868 /* Split basic block that contains the gswitch statement. */
1869 gimple_stmt_iterator gsi = gsi_last_bb (bb);
1870 edge e;
1871 if (gsi_end_p (gsi))
1872 e = split_block_after_labels (bb);
1873 else
1875 gsi_prev (&gsi);
1876 e = split_block (bb, gsi_stmt (gsi));
1878 bb = split_edge (e);
1880 /* Create new basic blocks for non-case clusters where specific expansion
1881 needs to happen. */
1882 for (unsigned i = 0; i < clusters.length (); i++)
1883 if (clusters[i]->get_type () != SIMPLE_CASE)
1885 clusters[i]->m_case_bb = create_empty_bb (bb);
1886 clusters[i]->m_case_bb->count = bb->count;
1887 clusters[i]->m_case_bb->loop_father = bb->loop_father;
1890 /* Do not do an extra work for a single cluster. */
1891 if (clusters.length () == 1
1892 && clusters[0]->get_type () != SIMPLE_CASE)
1894 cluster *c = clusters[0];
1895 c->emit (index_expr, index_type,
1896 gimple_switch_default_label (m_switch), m_default_bb,
1897 gimple_location (m_switch));
1898 redirect_edge_succ (single_succ_edge (bb), c->m_case_bb);
1900 else
1902 emit (bb, index_expr, default_edge->probability, index_type);
1904 /* Emit cluster-specific switch handling. */
1905 for (unsigned i = 0; i < clusters.length (); i++)
1906 if (clusters[i]->get_type () != SIMPLE_CASE)
1907 clusters[i]->emit (index_expr, index_type,
1908 gimple_switch_default_label (m_switch),
1909 m_default_bb, gimple_location (m_switch));
1912 fix_phi_operands_for_edges ();
1914 return true;
1917 /* Before switch transformation, record all SSA_NAMEs defined in switch BB
1918 and used in a label basic block. */
1920 void
1921 switch_decision_tree::record_phi_operand_mapping ()
1923 basic_block switch_bb = gimple_bb (m_switch);
1924 /* Record all PHI nodes that have to be fixed after conversion. */
1925 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1927 gphi_iterator gsi;
1928 basic_block bb = m_case_bbs[i];
1929 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1931 gphi *phi = gsi.phi ();
1933 for (unsigned i = 0; i < gimple_phi_num_args (phi); i++)
1935 basic_block phi_src_bb = gimple_phi_arg_edge (phi, i)->src;
1936 if (phi_src_bb == switch_bb)
1938 tree def = gimple_phi_arg_def (phi, i);
1939 tree result = gimple_phi_result (phi);
1940 m_phi_mapping.put (result, def);
1941 break;
1948 /* Append new operands to PHI statements that were introduced due to
1949 addition of new edges to case labels. */
1951 void
1952 switch_decision_tree::fix_phi_operands_for_edges ()
1954 gphi_iterator gsi;
1956 for (unsigned i = 0; i < m_case_bbs.length (); i++)
1958 basic_block bb = m_case_bbs[i];
1959 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1961 gphi *phi = gsi.phi ();
1962 for (unsigned j = 0; j < gimple_phi_num_args (phi); j++)
1964 tree def = gimple_phi_arg_def (phi, j);
1965 if (def == NULL_TREE)
1967 edge e = gimple_phi_arg_edge (phi, j);
1968 tree *definition
1969 = m_phi_mapping.get (gimple_phi_result (phi));
1970 gcc_assert (definition);
1971 add_phi_arg (phi, *definition, e, UNKNOWN_LOCATION);
1978 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1979 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1981 We generate a binary decision tree to select the appropriate target
1982 code. */
1984 void
1985 switch_decision_tree::emit (basic_block bb, tree index_expr,
1986 profile_probability default_prob, tree index_type)
1988 balance_case_nodes (&m_case_list, NULL);
1990 if (dump_file)
1991 dump_function_to_file (current_function_decl, dump_file, dump_flags);
1992 if (dump_file && (dump_flags & TDF_DETAILS))
1994 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1995 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1996 gcc_assert (m_case_list != NULL);
1997 dump_case_nodes (dump_file, m_case_list, indent_step, 0);
2000 bb = emit_case_nodes (bb, index_expr, m_case_list, default_prob, index_type,
2001 gimple_location (m_switch));
2003 if (bb)
2004 emit_jump (bb, m_default_bb);
2006 /* Remove all edges and do just an edge that will reach default_bb. */
2007 bb = gimple_bb (m_switch);
2008 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2009 gsi_remove (&gsi, true);
2011 delete_basic_block (bb);
2014 /* Take an ordered list of case nodes
2015 and transform them into a near optimal binary tree,
2016 on the assumption that any target code selection value is as
2017 likely as any other.
2019 The transformation is performed by splitting the ordered
2020 list into two equal sections plus a pivot. The parts are
2021 then attached to the pivot as left and right branches. Each
2022 branch is then transformed recursively. */
2024 void
2025 switch_decision_tree::balance_case_nodes (case_tree_node **head,
2026 case_tree_node *parent)
2028 case_tree_node *np;
2030 np = *head;
2031 if (np)
2033 int i = 0;
2034 int ranges = 0;
2035 case_tree_node **npp;
2036 case_tree_node *left;
2037 profile_probability prob = profile_probability::never ();
2039 /* Count the number of entries on branch. Also count the ranges. */
2041 while (np)
2043 if (!tree_int_cst_equal (np->m_c->get_low (), np->m_c->get_high ()))
2044 ranges++;
2046 i++;
2047 prob += np->m_c->m_prob;
2048 np = np->m_right;
2051 if (i > 2)
2053 /* Split this list if it is long enough for that to help. */
2054 npp = head;
2055 left = *npp;
2056 profile_probability pivot_prob = prob.apply_scale (1, 2);
2058 /* Find the place in the list that bisects the list's total cost,
2059 where ranges count as 2. */
2060 while (1)
2062 /* Skip nodes while their probability does not reach
2063 that amount. */
2064 prob -= (*npp)->m_c->m_prob;
2065 if ((prob.initialized_p () && prob < pivot_prob)
2066 || ! (*npp)->m_right)
2067 break;
2068 npp = &(*npp)->m_right;
2071 np = *npp;
2072 *npp = 0;
2073 *head = np;
2074 np->m_parent = parent;
2075 np->m_left = left == np ? NULL : left;
2077 /* Optimize each of the two split parts. */
2078 balance_case_nodes (&np->m_left, np);
2079 balance_case_nodes (&np->m_right, np);
2080 np->m_c->m_subtree_prob = np->m_c->m_prob;
2081 if (np->m_left)
2082 np->m_c->m_subtree_prob += np->m_left->m_c->m_subtree_prob;
2083 if (np->m_right)
2084 np->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2086 else
2088 /* Else leave this branch as one level,
2089 but fill in `parent' fields. */
2090 np = *head;
2091 np->m_parent = parent;
2092 np->m_c->m_subtree_prob = np->m_c->m_prob;
2093 for (; np->m_right; np = np->m_right)
2095 np->m_right->m_parent = np;
2096 (*head)->m_c->m_subtree_prob += np->m_right->m_c->m_subtree_prob;
2102 /* Dump ROOT, a list or tree of case nodes, to file. */
2104 void
2105 switch_decision_tree::dump_case_nodes (FILE *f, case_tree_node *root,
2106 int indent_step, int indent_level)
2108 if (root == 0)
2109 return;
2110 indent_level++;
2112 dump_case_nodes (f, root->m_left, indent_step, indent_level);
2114 fputs (";; ", f);
2115 fprintf (f, "%*s", indent_step * indent_level, "");
2116 root->m_c->dump (f);
2117 root->m_c->m_prob.dump (f);
2118 fputs (" subtree: ", f);
2119 root->m_c->m_subtree_prob.dump (f);
2120 fputs (")\n", f);
2122 dump_case_nodes (f, root->m_right, indent_step, indent_level);
2126 /* Add an unconditional jump to CASE_BB that happens in basic block BB. */
2128 void
2129 switch_decision_tree::emit_jump (basic_block bb, basic_block case_bb)
2131 edge e = single_succ_edge (bb);
2132 redirect_edge_succ (e, case_bb);
2135 /* Generate code to compare OP0 with OP1 so that the condition codes are
2136 set and to jump to LABEL_BB if the condition is true.
2137 COMPARISON is the GIMPLE comparison (EQ, NE, GT, etc.).
2138 PROB is the probability of jumping to LABEL_BB. */
2140 basic_block
2141 switch_decision_tree::emit_cmp_and_jump_insns (basic_block bb, tree op0,
2142 tree op1, tree_code comparison,
2143 basic_block label_bb,
2144 profile_probability prob,
2145 location_t loc)
2147 // TODO: it's once called with lhs != index.
2148 op1 = fold_convert (TREE_TYPE (op0), op1);
2150 gcond *cond = gimple_build_cond (comparison, op0, op1, NULL_TREE, NULL_TREE);
2151 gimple_set_location (cond, loc);
2152 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2153 gsi_insert_after (&gsi, cond, GSI_NEW_STMT);
2155 gcc_assert (single_succ_p (bb));
2157 /* Make a new basic block where false branch will take place. */
2158 edge false_edge = split_block (bb, cond);
2159 false_edge->flags = EDGE_FALSE_VALUE;
2160 false_edge->probability = prob.invert ();
2162 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2163 true_edge->probability = prob;
2165 return false_edge->dest;
2168 /* Generate code to jump to LABEL if OP0 and OP1 are equal.
2169 PROB is the probability of jumping to LABEL_BB.
2170 BB is a basic block where the new condition will be placed. */
2172 basic_block
2173 switch_decision_tree::do_jump_if_equal (basic_block bb, tree op0, tree op1,
2174 basic_block label_bb,
2175 profile_probability prob,
2176 location_t loc)
2178 op1 = fold_convert (TREE_TYPE (op0), op1);
2180 gcond *cond = gimple_build_cond (EQ_EXPR, op0, op1, NULL_TREE, NULL_TREE);
2181 gimple_set_location (cond, loc);
2182 gimple_stmt_iterator gsi = gsi_last_bb (bb);
2183 gsi_insert_before (&gsi, cond, GSI_SAME_STMT);
2185 gcc_assert (single_succ_p (bb));
2187 /* Make a new basic block where false branch will take place. */
2188 edge false_edge = split_block (bb, cond);
2189 false_edge->flags = EDGE_FALSE_VALUE;
2190 false_edge->probability = prob.invert ();
2192 edge true_edge = make_edge (bb, label_bb, EDGE_TRUE_VALUE);
2193 true_edge->probability = prob;
2195 return false_edge->dest;
2198 /* Emit step-by-step code to select a case for the value of INDEX.
2199 The thus generated decision tree follows the form of the
2200 case-node binary tree NODE, whose nodes represent test conditions.
2201 DEFAULT_PROB is probability of cases leading to default BB.
2202 INDEX_TYPE is the type of the index of the switch. */
2204 basic_block
2205 switch_decision_tree::emit_case_nodes (basic_block bb, tree index,
2206 case_tree_node *node,
2207 profile_probability default_prob,
2208 tree index_type, location_t loc)
2210 profile_probability p;
2212 /* If node is null, we are done. */
2213 if (node == NULL)
2214 return bb;
2216 /* Single value case. */
2217 if (node->m_c->is_single_value_p ())
2219 /* Node is single valued. First see if the index expression matches
2220 this node and then check our children, if any. */
2221 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2222 bb = do_jump_if_equal (bb, index, node->m_c->get_low (),
2223 node->m_c->m_case_bb, p, loc);
2224 /* Since this case is taken at this point, reduce its weight from
2225 subtree_weight. */
2226 node->m_c->m_subtree_prob -= p;
2228 if (node->m_left != NULL && node->m_right != NULL)
2230 /* 1) the node has both children
2232 If both children are single-valued cases with no
2233 children, finish up all the work. This way, we can save
2234 one ordered comparison. */
2236 if (!node->m_left->has_child ()
2237 && node->m_left->m_c->is_single_value_p ()
2238 && !node->m_right->has_child ()
2239 && node->m_right->m_c->is_single_value_p ())
2241 p = (node->m_right->m_c->m_prob
2242 / (node->m_c->m_subtree_prob + default_prob));
2243 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2244 node->m_right->m_c->m_case_bb, p, loc);
2246 p = (node->m_left->m_c->m_prob
2247 / (node->m_c->m_subtree_prob + default_prob));
2248 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2249 node->m_left->m_c->m_case_bb, p, loc);
2251 else
2253 /* Branch to a label where we will handle it later. */
2254 basic_block test_bb = split_edge (single_succ_edge (bb));
2255 redirect_edge_succ (single_pred_edge (test_bb),
2256 single_succ_edge (bb)->dest);
2258 p = ((node->m_right->m_c->m_subtree_prob
2259 + default_prob.apply_scale (1, 2))
2260 / (node->m_c->m_subtree_prob + default_prob));
2261 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2262 GT_EXPR, test_bb, p, loc);
2263 default_prob = default_prob.apply_scale (1, 2);
2265 /* Handle the left-hand subtree. */
2266 bb = emit_case_nodes (bb, index, node->m_left,
2267 default_prob, index_type, loc);
2269 /* If the left-hand subtree fell through,
2270 don't let it fall into the right-hand subtree. */
2271 if (bb && m_default_bb)
2272 emit_jump (bb, m_default_bb);
2274 bb = emit_case_nodes (test_bb, index, node->m_right,
2275 default_prob, index_type, loc);
2278 else if (node->m_left == NULL && node->m_right != NULL)
2280 /* 2) the node has only right child. */
2282 /* Here we have a right child but no left so we issue a conditional
2283 branch to default and process the right child.
2285 Omit the conditional branch to default if the right child
2286 does not have any children and is single valued; it would
2287 cost too much space to save so little time. */
2289 if (node->m_right->has_child ()
2290 || !node->m_right->m_c->is_single_value_p ())
2292 p = (default_prob.apply_scale (1, 2)
2293 / (node->m_c->m_subtree_prob + default_prob));
2294 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2295 LT_EXPR, m_default_bb, p, loc);
2296 default_prob = default_prob.apply_scale (1, 2);
2298 bb = emit_case_nodes (bb, index, node->m_right, default_prob,
2299 index_type, loc);
2301 else
2303 /* We cannot process node->right normally
2304 since we haven't ruled out the numbers less than
2305 this node's value. So handle node->right explicitly. */
2306 p = (node->m_right->m_c->m_subtree_prob
2307 / (node->m_c->m_subtree_prob + default_prob));
2308 bb = do_jump_if_equal (bb, index, node->m_right->m_c->get_low (),
2309 node->m_right->m_c->m_case_bb, p, loc);
2312 else if (node->m_left != NULL && node->m_right == NULL)
2314 /* 3) just one subtree, on the left. Similar case as previous. */
2316 if (node->m_left->has_child ()
2317 || !node->m_left->m_c->is_single_value_p ())
2319 p = (default_prob.apply_scale (1, 2)
2320 / (node->m_c->m_subtree_prob + default_prob));
2321 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2322 GT_EXPR, m_default_bb, p, loc);
2323 default_prob = default_prob.apply_scale (1, 2);
2325 bb = emit_case_nodes (bb, index, node->m_left, default_prob,
2326 index_type, loc);
2328 else
2330 /* We cannot process node->left normally
2331 since we haven't ruled out the numbers less than
2332 this node's value. So handle node->left explicitly. */
2333 p = (node->m_left->m_c->m_subtree_prob
2334 / (node->m_c->m_subtree_prob + default_prob));
2335 bb = do_jump_if_equal (bb, index, node->m_left->m_c->get_low (),
2336 node->m_left->m_c->m_case_bb, p, loc);
2340 else
2342 /* Node is a range. These cases are very similar to those for a single
2343 value, except that we do not start by testing whether this node
2344 is the one to branch to. */
2345 if (node->has_child () || node->m_c->get_type () != SIMPLE_CASE)
2347 /* Branch to a label where we will handle it later. */
2348 basic_block test_bb = split_edge (single_succ_edge (bb));
2349 redirect_edge_succ (single_pred_edge (test_bb),
2350 single_succ_edge (bb)->dest);
2353 profile_probability right_prob = profile_probability::never ();
2354 if (node->m_right)
2355 right_prob = node->m_right->m_c->m_subtree_prob;
2356 p = ((right_prob + default_prob.apply_scale (1, 2))
2357 / (node->m_c->m_subtree_prob + default_prob));
2359 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_high (),
2360 GT_EXPR, test_bb, p, loc);
2361 default_prob = default_prob.apply_scale (1, 2);
2363 /* Value belongs to this node or to the left-hand subtree. */
2364 p = node->m_c->m_prob / (node->m_c->m_subtree_prob + default_prob);
2365 bb = emit_cmp_and_jump_insns (bb, index, node->m_c->get_low (),
2366 GE_EXPR, node->m_c->m_case_bb, p, loc);
2368 /* Handle the left-hand subtree. */
2369 bb = emit_case_nodes (bb, index, node->m_left,
2370 default_prob, index_type, loc);
2372 /* If the left-hand subtree fell through,
2373 don't let it fall into the right-hand subtree. */
2374 if (bb && m_default_bb)
2375 emit_jump (bb, m_default_bb);
2377 bb = emit_case_nodes (test_bb, index, node->m_right,
2378 default_prob, index_type, loc);
2380 else
2382 /* Node has no children so we check low and high bounds to remove
2383 redundant tests. Only one of the bounds can exist,
2384 since otherwise this node is bounded--a case tested already. */
2385 tree lhs, rhs;
2386 generate_range_test (bb, index, node->m_c->get_low (),
2387 node->m_c->get_high (), &lhs, &rhs);
2388 p = default_prob / (node->m_c->m_subtree_prob + default_prob);
2390 bb = emit_cmp_and_jump_insns (bb, lhs, rhs, GT_EXPR,
2391 m_default_bb, p, loc);
2393 emit_jump (bb, node->m_c->m_case_bb);
2394 return NULL;
2398 return bb;
2401 /* The main function of the pass scans statements for switches and invokes
2402 process_switch on them. */
2404 namespace {
2406 const pass_data pass_data_convert_switch =
2408 GIMPLE_PASS, /* type */
2409 "switchconv", /* name */
2410 OPTGROUP_NONE, /* optinfo_flags */
2411 TV_TREE_SWITCH_CONVERSION, /* tv_id */
2412 ( PROP_cfg | PROP_ssa ), /* properties_required */
2413 0, /* properties_provided */
2414 0, /* properties_destroyed */
2415 0, /* todo_flags_start */
2416 TODO_update_ssa, /* todo_flags_finish */
2419 class pass_convert_switch : public gimple_opt_pass
2421 public:
2422 pass_convert_switch (gcc::context *ctxt)
2423 : gimple_opt_pass (pass_data_convert_switch, ctxt)
2426 /* opt_pass methods: */
2427 virtual bool gate (function *) { return flag_tree_switch_conversion != 0; }
2428 virtual unsigned int execute (function *);
2430 }; // class pass_convert_switch
2432 unsigned int
2433 pass_convert_switch::execute (function *fun)
2435 basic_block bb;
2436 bool cfg_altered = false;
2438 FOR_EACH_BB_FN (bb, fun)
2440 gimple *stmt = last_stmt (bb);
2441 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
2443 if (dump_file)
2445 expanded_location loc = expand_location (gimple_location (stmt));
2447 fprintf (dump_file, "beginning to process the following "
2448 "SWITCH statement (%s:%d) : ------- \n",
2449 loc.file, loc.line);
2450 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2451 putc ('\n', dump_file);
2454 switch_conversion sconv;
2455 sconv.expand (as_a <gswitch *> (stmt));
2456 cfg_altered |= sconv.m_cfg_altered;
2457 if (!sconv.m_reason)
2459 if (dump_file)
2461 fputs ("Switch converted\n", dump_file);
2462 fputs ("--------------------------------\n", dump_file);
2465 /* Make no effort to update the post-dominator tree.
2466 It is actually not that hard for the transformations
2467 we have performed, but it is not supported
2468 by iterate_fix_dominators. */
2469 free_dominance_info (CDI_POST_DOMINATORS);
2471 else
2473 if (dump_file)
2475 fputs ("Bailing out - ", dump_file);
2476 fputs (sconv.m_reason, dump_file);
2477 fputs ("\n--------------------------------\n", dump_file);
2483 return cfg_altered ? TODO_cleanup_cfg : 0;;
2486 } // anon namespace
2488 gimple_opt_pass *
2489 make_pass_convert_switch (gcc::context *ctxt)
2491 return new pass_convert_switch (ctxt);
2494 /* The main function of the pass scans statements for switches and invokes
2495 process_switch on them. */
2497 namespace {
2499 template <bool O0> class pass_lower_switch: public gimple_opt_pass
2501 public:
2502 pass_lower_switch (gcc::context *ctxt) : gimple_opt_pass (data, ctxt) {}
2504 static const pass_data data;
2505 opt_pass *
2506 clone ()
2508 return new pass_lower_switch<O0> (m_ctxt);
2511 virtual bool
2512 gate (function *)
2514 return !O0 || !optimize;
2517 virtual unsigned int execute (function *fun);
2518 }; // class pass_lower_switch
2520 template <bool O0>
2521 const pass_data pass_lower_switch<O0>::data = {
2522 GIMPLE_PASS, /* type */
2523 O0 ? "switchlower_O0" : "switchlower", /* name */
2524 OPTGROUP_NONE, /* optinfo_flags */
2525 TV_TREE_SWITCH_LOWERING, /* tv_id */
2526 ( PROP_cfg | PROP_ssa ), /* properties_required */
2527 0, /* properties_provided */
2528 0, /* properties_destroyed */
2529 0, /* todo_flags_start */
2530 TODO_update_ssa | TODO_cleanup_cfg, /* todo_flags_finish */
2533 template <bool O0>
2534 unsigned int
2535 pass_lower_switch<O0>::execute (function *fun)
2537 basic_block bb;
2538 bool expanded = false;
2540 auto_vec<gimple *> switch_statements;
2541 switch_statements.create (1);
2543 FOR_EACH_BB_FN (bb, fun)
2545 gimple *stmt = last_stmt (bb);
2546 gswitch *swtch;
2547 if (stmt && (swtch = dyn_cast<gswitch *> (stmt)))
2549 if (!O0)
2550 group_case_labels_stmt (swtch);
2551 switch_statements.safe_push (swtch);
2555 for (unsigned i = 0; i < switch_statements.length (); i++)
2557 gimple *stmt = switch_statements[i];
2558 if (dump_file)
2560 expanded_location loc = expand_location (gimple_location (stmt));
2562 fprintf (dump_file, "beginning to process the following "
2563 "SWITCH statement (%s:%d) : ------- \n",
2564 loc.file, loc.line);
2565 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2566 putc ('\n', dump_file);
2569 gswitch *swtch = dyn_cast<gswitch *> (stmt);
2570 if (swtch)
2572 switch_decision_tree dt (swtch);
2573 expanded |= dt.analyze_switch_statement ();
2577 if (expanded)
2579 free_dominance_info (CDI_DOMINATORS);
2580 free_dominance_info (CDI_POST_DOMINATORS);
2581 mark_virtual_operands_for_renaming (cfun);
2584 return 0;
2587 } // anon namespace
2589 gimple_opt_pass *
2590 make_pass_lower_switch_O0 (gcc::context *ctxt)
2592 return new pass_lower_switch<true> (ctxt);
2594 gimple_opt_pass *
2595 make_pass_lower_switch (gcc::context *ctxt)
2597 return new pass_lower_switch<false> (ctxt);