c, c++: attribute format on a ctor with a vbase [PR101833, PR47634]
[official-gcc.git] / gcc / gimple-isel.cc
blob4b309a05a9aea74621225111c1fb9d8fc9cb8d30
1 /* Schedule GIMPLE vector statements.
2 Copyright (C) 2020-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "optabs-tree.h"
31 #include "tree-eh.h"
32 #include "gimple-iterator.h"
33 #include "gimplify-me.h"
34 #include "gimplify.h"
35 #include "tree-cfg.h"
36 #include "bitmap.h"
37 #include "tree-ssa-dce.h"
38 #include "memmodel.h"
39 #include "optabs.h"
40 #include "gimple-fold.h"
41 #include "internal-fn.h"
43 /* Expand all ARRAY_REF(VIEW_CONVERT_EXPR) gimple assignments into calls to
44 internal function based on vector type of selected expansion.
45 i.e.:
46 VIEW_CONVERT_EXPR<int[4]>(u)[_1] = = i_4(D);
48 _7 = u;
49 _8 = .VEC_SET (_7, i_4(D), _1);
50 u = _8; */
52 static bool
53 gimple_expand_vec_set_expr (struct function *fun, gimple_stmt_iterator *gsi)
55 enum tree_code code;
56 gcall *new_stmt = NULL;
57 gassign *ass_stmt = NULL;
58 bool cfg_changed = false;
60 /* Only consider code == GIMPLE_ASSIGN. */
61 gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi));
62 if (!stmt)
63 return false;
65 tree lhs = gimple_assign_lhs (stmt);
66 code = TREE_CODE (lhs);
67 if (code != ARRAY_REF)
68 return false;
70 tree val = gimple_assign_rhs1 (stmt);
71 tree op0 = TREE_OPERAND (lhs, 0);
72 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR && DECL_P (TREE_OPERAND (op0, 0))
73 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
74 && TYPE_MODE (TREE_TYPE (lhs))
75 == TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 0)))))
77 tree pos = TREE_OPERAND (lhs, 1);
78 tree view_op0 = TREE_OPERAND (op0, 0);
79 machine_mode outermode = TYPE_MODE (TREE_TYPE (view_op0));
80 if (auto_var_in_fn_p (view_op0, fun->decl)
81 && !TREE_ADDRESSABLE (view_op0) && can_vec_set_var_idx_p (outermode))
83 location_t loc = gimple_location (stmt);
84 tree var_src = make_ssa_name (TREE_TYPE (view_op0));
85 tree var_dst = make_ssa_name (TREE_TYPE (view_op0));
87 ass_stmt = gimple_build_assign (var_src, view_op0);
88 gimple_set_vuse (ass_stmt, gimple_vuse (stmt));
89 gimple_set_location (ass_stmt, loc);
90 gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
92 new_stmt
93 = gimple_build_call_internal (IFN_VEC_SET, 3, var_src, val, pos);
94 gimple_call_set_lhs (new_stmt, var_dst);
95 gimple_set_location (new_stmt, loc);
96 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
98 ass_stmt = gimple_build_assign (view_op0, var_dst);
99 gimple_set_location (ass_stmt, loc);
100 gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
102 basic_block bb = gimple_bb (stmt);
103 gimple_move_vops (ass_stmt, stmt);
104 if (gsi_remove (gsi, true)
105 && gimple_purge_dead_eh_edges (bb))
106 cfg_changed = true;
110 return cfg_changed;
113 /* Expand all VEC_COND_EXPR gimple assignments into calls to internal
114 function based on type of selected expansion. */
116 static gimple *
117 gimple_expand_vec_cond_expr (struct function *fun, gimple_stmt_iterator *gsi,
118 hash_map<tree, unsigned int> *vec_cond_ssa_name_uses)
120 tree lhs, op0a = NULL_TREE, op0b = NULL_TREE;
121 enum tree_code code;
122 enum tree_code tcode;
123 machine_mode cmp_op_mode;
124 bool unsignedp;
125 enum insn_code icode;
126 imm_use_iterator imm_iter;
128 /* Only consider code == GIMPLE_ASSIGN. */
129 gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi));
130 if (!stmt)
131 return NULL;
133 code = gimple_assign_rhs_code (stmt);
134 if (code != VEC_COND_EXPR)
135 return NULL;
137 tree op0 = gimple_assign_rhs1 (stmt);
138 tree op1 = gimple_assign_rhs2 (stmt);
139 tree op2 = gimple_assign_rhs3 (stmt);
140 lhs = gimple_assign_lhs (stmt);
141 machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
143 /* Lower mask typed, non-vector mode VEC_COND_EXPRs to bitwise operations.
144 Those can end up generated by folding and at least for integer mode masks
145 we cannot expect vcond expanders to exist. We lower a ? b : c
146 to (b & a) | (c & ~a). */
147 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (lhs))
148 && !VECTOR_MODE_P (mode))
150 gcc_assert (types_compatible_p (TREE_TYPE (op0), TREE_TYPE (op1)));
151 gimple_seq stmts = NULL;
152 tree type = TREE_TYPE (lhs);
153 location_t loc = gimple_location (stmt);
154 tree tem0 = gimple_build (&stmts, loc, BIT_AND_EXPR, type, op1, op0);
155 tree tem1 = gimple_build (&stmts, loc, BIT_NOT_EXPR, type, op0);
156 tree tem2 = gimple_build (&stmts, loc, BIT_AND_EXPR, type, op2, tem1);
157 tree tem3 = gimple_build (&stmts, loc, BIT_IOR_EXPR, type, tem0, tem2);
158 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
159 return gimple_build_assign (lhs, tem3);
162 bool can_compute_op0 = true;
163 gcc_assert (!COMPARISON_CLASS_P (op0));
164 if (TREE_CODE (op0) == SSA_NAME)
166 unsigned int used_vec_cond_exprs = 0;
167 unsigned int *slot = vec_cond_ssa_name_uses->get (op0);
168 if (slot)
169 used_vec_cond_exprs = *slot;
170 else
172 gimple *use_stmt;
173 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, op0)
175 gassign *assign = dyn_cast<gassign *> (use_stmt);
176 if (assign != NULL
177 && gimple_assign_rhs_code (assign) == VEC_COND_EXPR
178 && gimple_assign_rhs1 (assign) == op0)
179 used_vec_cond_exprs++;
181 vec_cond_ssa_name_uses->put (op0, used_vec_cond_exprs);
184 gassign *def_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (op0));
185 if (def_stmt)
187 tcode = gimple_assign_rhs_code (def_stmt);
188 op0a = gimple_assign_rhs1 (def_stmt);
189 op0b = gimple_assign_rhs2 (def_stmt);
191 tree op0_type = TREE_TYPE (op0);
192 tree op0a_type = TREE_TYPE (op0a);
193 if (TREE_CODE_CLASS (tcode) == tcc_comparison)
194 can_compute_op0 = expand_vec_cmp_expr_p (op0a_type, op0_type,
195 tcode);
197 /* Try to fold x CMP y ? -1 : 0 to x CMP y. */
198 if (can_compute_op0
199 && integer_minus_onep (op1)
200 && integer_zerop (op2)
201 && TYPE_MODE (TREE_TYPE (lhs)) == TYPE_MODE (TREE_TYPE (op0)))
203 tree conv_op = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), op0);
204 gassign *new_stmt = gimple_build_assign (lhs, conv_op);
205 gsi_replace (gsi, new_stmt, true);
206 return new_stmt;
209 /* When the compare has EH we do not want to forward it when
210 it has multiple uses and in general because of the complication
211 with EH redirection. */
212 if (stmt_can_throw_internal (fun, def_stmt))
213 tcode = TREE_CODE (op0);
215 /* If we can compute op0 and have multiple uses, keep the SSA
216 name and use vcond_mask. */
217 else if (can_compute_op0
218 && used_vec_cond_exprs >= 2
219 && (get_vcond_mask_icode (mode, TYPE_MODE (op0_type))
220 != CODE_FOR_nothing))
221 tcode = TREE_CODE (op0);
223 else
224 tcode = TREE_CODE (op0);
226 else
227 tcode = TREE_CODE (op0);
229 if (TREE_CODE_CLASS (tcode) != tcc_comparison)
231 gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0)));
232 if (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0)))
233 != CODE_FOR_nothing)
234 return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2);
235 /* Fake op0 < 0. */
236 else
238 gcc_assert (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (op0)))
239 == MODE_VECTOR_INT);
240 op0a = op0;
241 op0b = build_zero_cst (TREE_TYPE (op0));
242 tcode = LT_EXPR;
245 cmp_op_mode = TYPE_MODE (TREE_TYPE (op0a));
246 unsignedp = TYPE_UNSIGNED (TREE_TYPE (op0a));
248 gcc_assert (known_eq (GET_MODE_NUNITS (mode),
249 GET_MODE_NUNITS (cmp_op_mode)));
251 icode = get_vcond_icode (mode, cmp_op_mode, unsignedp);
252 /* Some targets do not have vcondeq and only vcond with NE/EQ
253 but not vcondu, so make sure to also try vcond here as
254 vcond_icode_p would canonicalize the optab query to. */
255 if (icode == CODE_FOR_nothing
256 && (tcode == NE_EXPR || tcode == EQ_EXPR)
257 && ((icode = get_vcond_icode (mode, cmp_op_mode, !unsignedp))
258 != CODE_FOR_nothing))
259 unsignedp = !unsignedp;
260 if (icode == CODE_FOR_nothing)
262 if (tcode == LT_EXPR
263 && op0a == op0)
265 /* A VEC_COND_EXPR condition could be folded from EQ_EXPR/NE_EXPR
266 into a constant when only get_vcond_eq_icode is supported.
267 Try changing it to NE_EXPR. */
268 tcode = NE_EXPR;
270 if ((tcode == EQ_EXPR || tcode == NE_EXPR)
271 && direct_internal_fn_supported_p (IFN_VCONDEQ, TREE_TYPE (lhs),
272 TREE_TYPE (op0a),
273 OPTIMIZE_FOR_BOTH))
275 tree tcode_tree = build_int_cst (integer_type_node, tcode);
276 return gimple_build_call_internal (IFN_VCONDEQ, 5, op0a, op0b, op1,
277 op2, tcode_tree);
280 gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0))
281 && can_compute_op0
282 && (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0)))
283 != CODE_FOR_nothing));
284 return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2);
287 tree tcode_tree = build_int_cst (integer_type_node, tcode);
288 return gimple_build_call_internal (unsignedp ? IFN_VCONDU : IFN_VCOND,
289 5, op0a, op0b, op1, op2, tcode_tree);
294 /* Iterate all gimple statements and try to expand
295 VEC_COND_EXPR assignments. */
297 static unsigned int
298 gimple_expand_vec_exprs (struct function *fun)
300 gimple_stmt_iterator gsi;
301 basic_block bb;
302 hash_map<tree, unsigned int> vec_cond_ssa_name_uses;
303 auto_bitmap dce_ssa_names;
304 bool cfg_changed = false;
306 FOR_EACH_BB_FN (bb, fun)
308 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
310 gimple *g = gimple_expand_vec_cond_expr (fun, &gsi,
311 &vec_cond_ssa_name_uses);
312 if (g != NULL)
314 tree lhs = gimple_assign_lhs (gsi_stmt (gsi));
315 gimple_set_lhs (g, lhs);
316 gsi_replace (&gsi, g, false);
319 cfg_changed |= gimple_expand_vec_set_expr (fun, &gsi);
320 if (gsi_end_p (gsi))
321 break;
325 for (hash_map<tree, unsigned int>::iterator it = vec_cond_ssa_name_uses.begin ();
326 it != vec_cond_ssa_name_uses.end (); ++it)
327 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION ((*it).first));
329 simple_dce_from_worklist (dce_ssa_names);
331 return cfg_changed ? TODO_cleanup_cfg : 0;
334 namespace {
336 const pass_data pass_data_gimple_isel =
338 GIMPLE_PASS, /* type */
339 "isel", /* name */
340 OPTGROUP_VEC, /* optinfo_flags */
341 TV_NONE, /* tv_id */
342 PROP_cfg, /* properties_required */
343 0, /* properties_provided */
344 0, /* properties_destroyed */
345 0, /* todo_flags_start */
346 TODO_update_ssa, /* todo_flags_finish */
349 class pass_gimple_isel : public gimple_opt_pass
351 public:
352 pass_gimple_isel (gcc::context *ctxt)
353 : gimple_opt_pass (pass_data_gimple_isel, ctxt)
356 /* opt_pass methods: */
357 virtual bool gate (function *)
359 return true;
362 virtual unsigned int execute (function *fun)
364 return gimple_expand_vec_exprs (fun);
367 }; // class pass_gimple_isel
369 } // anon namespace
371 gimple_opt_pass *
372 make_pass_gimple_isel (gcc::context *ctxt)
374 return new pass_gimple_isel (ctxt);