* gcc.dg/const-elim-1.c: Remove xfail for xtensa-*-*.
[official-gcc.git] / gcc / tree-vect-transform.c
blob17d65897cdb90a2b9cdac6cf12c286bf4829133e
1 /* Transformation Utilities for Loop Vectorization.
2 Copyright (C) 2003,2004,2005 Free Software Foundation, Inc.
3 Contributed by Dorit Naishlos <dorit@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "errors.h"
27 #include "ggc.h"
28 #include "tree.h"
29 #include "target.h"
30 #include "rtl.h"
31 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "timevar.h"
36 #include "cfgloop.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "tree-data-ref.h"
40 #include "tree-chrec.h"
41 #include "tree-scalar-evolution.h"
42 #include "tree-vectorizer.h"
43 #include "langhooks.h"
44 #include "tree-pass.h"
45 #include "toplev.h"
47 /* Utility functions for the code transformation. */
48 static bool vect_transform_stmt (tree, block_stmt_iterator *);
49 static void vect_align_data_ref (tree);
50 static tree vect_create_destination_var (tree, tree);
51 static tree vect_create_data_ref_ptr
52 (tree, block_stmt_iterator *, tree, tree *, bool);
53 static tree vect_create_index_for_vector_ref (loop_vec_info);
54 static tree vect_create_addr_base_for_vector_ref (tree, tree *, tree);
55 static tree vect_get_new_vect_var (tree, enum vect_var_kind, const char *);
56 static tree vect_get_vec_def_for_operand (tree, tree);
57 static tree vect_init_vector (tree, tree);
58 static void vect_finish_stmt_generation
59 (tree stmt, tree vec_stmt, block_stmt_iterator *bsi);
60 static bool vect_is_simple_cond (tree, loop_vec_info);
61 static void update_vuses_to_preheader (tree, struct loop*);
63 /* Utility function dealing with loop peeling (not peeling itself). */
64 static void vect_generate_tmps_on_preheader
65 (loop_vec_info, tree *, tree *, tree *);
66 static tree vect_build_loop_niters (loop_vec_info);
67 static void vect_update_ivs_after_vectorizer (loop_vec_info, tree, edge);
68 static tree vect_gen_niters_for_prolog_loop (loop_vec_info, tree);
69 static void vect_update_init_of_dr (struct data_reference *, tree niters);
70 static void vect_update_inits_of_drs (loop_vec_info, tree);
71 static void vect_do_peeling_for_alignment (loop_vec_info, struct loops *);
72 static void vect_do_peeling_for_loop_bound
73 (loop_vec_info, tree *, struct loops *);
76 /* Function vect_get_new_vect_var.
78 Returns a name for a new variable. The current naming scheme appends the
79 prefix "vect_" or "vect_p" (depending on the value of VAR_KIND) to
80 the name of vectorizer generated variables, and appends that to NAME if
81 provided. */
83 static tree
84 vect_get_new_vect_var (tree type, enum vect_var_kind var_kind, const char *name)
86 const char *prefix;
87 tree new_vect_var;
89 if (var_kind == vect_simple_var)
90 prefix = "vect_";
91 else
92 prefix = "vect_p";
94 if (name)
95 new_vect_var = create_tmp_var (type, concat (prefix, name, NULL));
96 else
97 new_vect_var = create_tmp_var (type, prefix);
99 return new_vect_var;
103 /* Function vect_create_index_for_vector_ref.
105 Create (and return) an index variable, along with it's update chain in the
106 loop. This variable will be used to access a memory location in a vector
107 operation.
109 Input:
110 LOOP: The loop being vectorized.
111 BSI: The block_stmt_iterator where STMT is. Any new stmts created by this
112 function can be added here, or in the loop pre-header.
114 Output:
115 Return an index that will be used to index a vector array. It is expected
116 that a pointer to the first vector will be used as the base address for the
117 indexed reference.
119 FORNOW: we are not trying to be efficient, just creating a new index each
120 time from scratch. At this time all vector references could use the same
121 index.
123 TODO: create only one index to be used by all vector references. Record
124 the index in the LOOP_VINFO the first time this procedure is called and
125 return it on subsequent calls. The increment of this index must be placed
126 just before the conditional expression that ends the single block loop. */
128 static tree
129 vect_create_index_for_vector_ref (loop_vec_info loop_vinfo)
131 tree init, step;
132 block_stmt_iterator incr_bsi;
133 bool insert_after;
134 tree indx_before_incr, indx_after_incr;
135 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
136 tree incr;
138 /* It is assumed that the base pointer used for vectorized access contains
139 the address of the first vector. Therefore the index used for vectorized
140 access must be initialized to zero and incremented by 1. */
142 init = integer_zero_node;
143 step = integer_one_node;
145 standard_iv_increment_position (loop, &incr_bsi, &insert_after);
146 create_iv (init, step, NULL_TREE, loop, &incr_bsi, insert_after,
147 &indx_before_incr, &indx_after_incr);
148 incr = bsi_stmt (incr_bsi);
149 set_stmt_info (stmt_ann (incr), new_stmt_vec_info (incr, loop_vinfo));
151 return indx_before_incr;
155 /* Function vect_create_addr_base_for_vector_ref.
157 Create an expression that computes the address of the first memory location
158 that will be accessed for a data reference.
160 Input:
161 STMT: The statement containing the data reference.
162 NEW_STMT_LIST: Must be initialized to NULL_TREE or a statement list.
163 OFFSET: Optional. If supplied, it is be added to the initial address.
165 Output:
166 1. Return an SSA_NAME whose value is the address of the memory location of
167 the first vector of the data reference.
168 2. If new_stmt_list is not NULL_TREE after return then the caller must insert
169 these statement(s) which define the returned SSA_NAME.
171 FORNOW: We are only handling array accesses with step 1. */
173 static tree
174 vect_create_addr_base_for_vector_ref (tree stmt,
175 tree *new_stmt_list,
176 tree offset)
178 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
179 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
180 tree data_ref_base =
181 unshare_expr (STMT_VINFO_VECT_DR_BASE_ADDRESS (stmt_info));
182 tree base_name = build_fold_indirect_ref (data_ref_base);
183 tree ref = DR_REF (dr);
184 tree scalar_type = TREE_TYPE (ref);
185 tree scalar_ptr_type = build_pointer_type (scalar_type);
186 tree vec_stmt;
187 tree new_temp;
188 tree addr_base, addr_expr;
189 tree dest, new_stmt;
190 tree base_offset = unshare_expr (STMT_VINFO_VECT_INIT_OFFSET (stmt_info));
192 /* Create base_offset */
193 dest = create_tmp_var (TREE_TYPE (base_offset), "base_off");
194 add_referenced_tmp_var (dest);
195 base_offset = force_gimple_operand (base_offset, &new_stmt, false, dest);
196 append_to_statement_list_force (new_stmt, new_stmt_list);
198 if (offset)
200 tree tmp = create_tmp_var (TREE_TYPE (base_offset), "offset");
201 add_referenced_tmp_var (tmp);
202 offset = fold (build2 (MULT_EXPR, TREE_TYPE (offset), offset,
203 STMT_VINFO_VECT_STEP (stmt_info)));
204 base_offset = fold (build2 (PLUS_EXPR, TREE_TYPE (base_offset),
205 base_offset, offset));
206 base_offset = force_gimple_operand (base_offset, &new_stmt, false, tmp);
207 append_to_statement_list_force (new_stmt, new_stmt_list);
210 /* base + base_offset */
211 addr_base = fold (build2 (PLUS_EXPR, TREE_TYPE (data_ref_base), data_ref_base,
212 base_offset));
214 /* addr_expr = addr_base */
215 addr_expr = vect_get_new_vect_var (scalar_ptr_type, vect_pointer_var,
216 get_name (base_name));
217 add_referenced_tmp_var (addr_expr);
218 vec_stmt = build2 (MODIFY_EXPR, void_type_node, addr_expr, addr_base);
219 new_temp = make_ssa_name (addr_expr, vec_stmt);
220 TREE_OPERAND (vec_stmt, 0) = new_temp;
221 append_to_statement_list_force (vec_stmt, new_stmt_list);
223 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
225 fprintf (vect_dump, "created ");
226 print_generic_expr (vect_dump, vec_stmt, TDF_SLIM);
228 return new_temp;
232 /* Function vect_align_data_ref.
234 Handle mislignment of a memory accesses.
236 FORNOW: Can't handle misaligned accesses.
237 Make sure that the dataref is aligned. */
239 static void
240 vect_align_data_ref (tree stmt)
242 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
243 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
245 /* FORNOW: can't handle misaligned accesses;
246 all accesses expected to be aligned. */
247 gcc_assert (aligned_access_p (dr));
251 /* Function vect_create_data_ref_ptr.
253 Create a memory reference expression for vector access, to be used in a
254 vector load/store stmt. The reference is based on a new pointer to vector
255 type (vp).
257 Input:
258 1. STMT: a stmt that references memory. Expected to be of the form
259 MODIFY_EXPR <name, data-ref> or MODIFY_EXPR <data-ref, name>.
260 2. BSI: block_stmt_iterator where new stmts can be added.
261 3. OFFSET (optional): an offset to be added to the initial address accessed
262 by the data-ref in STMT.
263 4. ONLY_INIT: indicate if vp is to be updated in the loop, or remain
264 pointing to the initial address.
266 Output:
267 1. Declare a new ptr to vector_type, and have it point to the base of the
268 data reference (initial addressed accessed by the data reference).
269 For example, for vector of type V8HI, the following code is generated:
271 v8hi *vp;
272 vp = (v8hi *)initial_address;
274 if OFFSET is not supplied:
275 initial_address = &a[init];
276 if OFFSET is supplied:
277 initial_address = &a[init + OFFSET];
279 Return the initial_address in INITIAL_ADDRESS.
281 2. Create a data-reference in the loop based on the new vector pointer vp,
282 and using a new index variable 'idx' as follows:
284 vp' = vp + update
286 where if ONLY_INIT is true:
287 update = zero
288 and otherwise
289 update = idx + vector_type_size
291 Return the pointer vp'.
294 FORNOW: handle only aligned and consecutive accesses. */
296 static tree
297 vect_create_data_ref_ptr (tree stmt, block_stmt_iterator *bsi, tree offset,
298 tree *initial_address, bool only_init)
300 tree base_name;
301 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
302 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
303 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
304 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
305 tree vect_ptr_type;
306 tree vect_ptr;
307 tree tag;
308 tree new_temp;
309 tree vec_stmt;
310 tree new_stmt_list = NULL_TREE;
311 tree idx;
312 edge pe = loop_preheader_edge (loop);
313 basic_block new_bb;
314 tree vect_ptr_init;
315 tree vectype_size;
316 tree ptr_update;
317 tree data_ref_ptr;
318 tree type, tmp, size;
320 base_name = build_fold_indirect_ref (unshare_expr (
321 STMT_VINFO_VECT_DR_BASE_ADDRESS (stmt_info)));
323 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
325 tree data_ref_base = base_name;
326 fprintf (vect_dump, "create array_ref of type: ");
327 print_generic_expr (vect_dump, vectype, TDF_SLIM);
328 if (TREE_CODE (data_ref_base) == VAR_DECL)
329 fprintf (vect_dump, " vectorizing a one dimensional array ref: ");
330 else if (TREE_CODE (data_ref_base) == ARRAY_REF)
331 fprintf (vect_dump, " vectorizing a multidimensional array ref: ");
332 else if (TREE_CODE (data_ref_base) == COMPONENT_REF)
333 fprintf (vect_dump, " vectorizing a record based array ref: ");
334 else if (TREE_CODE (data_ref_base) == SSA_NAME)
335 fprintf (vect_dump, " vectorizing a pointer ref: ");
336 print_generic_expr (vect_dump, base_name, TDF_SLIM);
339 /** (1) Create the new vector-pointer variable: **/
341 vect_ptr_type = build_pointer_type (vectype);
342 vect_ptr = vect_get_new_vect_var (vect_ptr_type, vect_pointer_var,
343 get_name (base_name));
344 add_referenced_tmp_var (vect_ptr);
347 /** (2) Add aliasing information to the new vector-pointer:
348 (The points-to info (SSA_NAME_PTR_INFO) may be defined later.) **/
350 tag = STMT_VINFO_MEMTAG (stmt_info);
351 gcc_assert (tag);
353 /* If the memory tag of the original reference was not a type tag or
354 if the pointed-to type of VECT_PTR has an alias set number
355 different than TAG's, then we need to create a new type tag for
356 VECT_PTR and add TAG to its alias set. */
357 if (var_ann (tag)->mem_tag_kind == NOT_A_TAG
358 || get_alias_set (tag) != get_alias_set (TREE_TYPE (vect_ptr_type)))
359 add_type_alias (vect_ptr, tag);
360 else
361 var_ann (vect_ptr)->type_mem_tag = tag;
363 var_ann (vect_ptr)->subvars = STMT_VINFO_SUBVARS (stmt_info);
365 /** (3) Calculate the initial address the vector-pointer, and set
366 the vector-pointer to point to it before the loop: **/
368 /* Create: (&(base[init_val+offset]) in the loop preheader. */
369 new_temp = vect_create_addr_base_for_vector_ref (stmt, &new_stmt_list,
370 offset);
371 pe = loop_preheader_edge (loop);
372 new_bb = bsi_insert_on_edge_immediate (pe, new_stmt_list);
373 gcc_assert (!new_bb);
374 *initial_address = new_temp;
376 /* Create: p = (vectype *) initial_base */
377 vec_stmt = fold_convert (vect_ptr_type, new_temp);
378 vec_stmt = build2 (MODIFY_EXPR, void_type_node, vect_ptr, vec_stmt);
379 new_temp = make_ssa_name (vect_ptr, vec_stmt);
380 TREE_OPERAND (vec_stmt, 0) = new_temp;
381 new_bb = bsi_insert_on_edge_immediate (pe, vec_stmt);
382 gcc_assert (!new_bb);
383 vect_ptr_init = TREE_OPERAND (vec_stmt, 0);
386 /** (4) Handle the updating of the vector-pointer inside the loop: **/
388 if (only_init) /* No update in loop is required. */
390 /* Copy the points-to information if it exists. */
391 if (STMT_VINFO_PTR_INFO (stmt_info))
392 duplicate_ssa_name_ptr_info (vect_ptr_init,
393 STMT_VINFO_PTR_INFO (stmt_info));
394 return vect_ptr_init;
397 idx = vect_create_index_for_vector_ref (loop_vinfo);
399 /* Create: update = idx * vectype_size */
400 tmp = create_tmp_var (integer_type_node, "update");
401 add_referenced_tmp_var (tmp);
402 size = TYPE_SIZE (vect_ptr_type);
403 type = lang_hooks.types.type_for_size (tree_low_cst (size, 1), 1);
404 ptr_update = create_tmp_var (type, "update");
405 add_referenced_tmp_var (ptr_update);
406 vectype_size = TYPE_SIZE_UNIT (vectype);
407 vec_stmt = build2 (MULT_EXPR, integer_type_node, idx, vectype_size);
408 vec_stmt = build2 (MODIFY_EXPR, void_type_node, tmp, vec_stmt);
409 new_temp = make_ssa_name (tmp, vec_stmt);
410 TREE_OPERAND (vec_stmt, 0) = new_temp;
411 bsi_insert_before (bsi, vec_stmt, BSI_SAME_STMT);
412 vec_stmt = fold_convert (type, new_temp);
413 vec_stmt = build2 (MODIFY_EXPR, void_type_node, ptr_update, vec_stmt);
414 new_temp = make_ssa_name (ptr_update, vec_stmt);
415 TREE_OPERAND (vec_stmt, 0) = new_temp;
416 bsi_insert_before (bsi, vec_stmt, BSI_SAME_STMT);
418 /* Create: data_ref_ptr = vect_ptr_init + update */
419 vec_stmt = build2 (PLUS_EXPR, vect_ptr_type, vect_ptr_init, new_temp);
420 vec_stmt = build2 (MODIFY_EXPR, void_type_node, vect_ptr, vec_stmt);
421 new_temp = make_ssa_name (vect_ptr, vec_stmt);
422 TREE_OPERAND (vec_stmt, 0) = new_temp;
423 bsi_insert_before (bsi, vec_stmt, BSI_SAME_STMT);
424 data_ref_ptr = TREE_OPERAND (vec_stmt, 0);
426 /* Copy the points-to information if it exists. */
427 if (STMT_VINFO_PTR_INFO (stmt_info))
428 duplicate_ssa_name_ptr_info (data_ref_ptr, STMT_VINFO_PTR_INFO (stmt_info));
429 return data_ref_ptr;
433 /* Function vect_create_destination_var.
435 Create a new temporary of type VECTYPE. */
437 static tree
438 vect_create_destination_var (tree scalar_dest, tree vectype)
440 tree vec_dest;
441 const char *new_name;
443 gcc_assert (TREE_CODE (scalar_dest) == SSA_NAME);
445 new_name = get_name (scalar_dest);
446 if (!new_name)
447 new_name = "var_";
448 vec_dest = vect_get_new_vect_var (vectype, vect_simple_var, new_name);
449 add_referenced_tmp_var (vec_dest);
451 return vec_dest;
455 /* Function vect_init_vector.
457 Insert a new stmt (INIT_STMT) that initializes a new vector variable with
458 the vector elements of VECTOR_VAR. Return the DEF of INIT_STMT. It will be
459 used in the vectorization of STMT. */
461 static tree
462 vect_init_vector (tree stmt, tree vector_var)
464 stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
465 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
466 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
467 tree new_var;
468 tree init_stmt;
469 tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
470 tree vec_oprnd;
471 edge pe;
472 tree new_temp;
473 basic_block new_bb;
475 new_var = vect_get_new_vect_var (vectype, vect_simple_var, "cst_");
476 add_referenced_tmp_var (new_var);
478 init_stmt = build2 (MODIFY_EXPR, vectype, new_var, vector_var);
479 new_temp = make_ssa_name (new_var, init_stmt);
480 TREE_OPERAND (init_stmt, 0) = new_temp;
482 pe = loop_preheader_edge (loop);
483 new_bb = bsi_insert_on_edge_immediate (pe, init_stmt);
484 gcc_assert (!new_bb);
486 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
488 fprintf (vect_dump, "created new init_stmt: ");
489 print_generic_expr (vect_dump, init_stmt, TDF_SLIM);
492 vec_oprnd = TREE_OPERAND (init_stmt, 0);
493 return vec_oprnd;
497 /* Function vect_get_vec_def_for_operand.
499 OP is an operand in STMT. This function returns a (vector) def that will be
500 used in the vectorized stmt for STMT.
502 In the case that OP is an SSA_NAME which is defined in the loop, then
503 STMT_VINFO_VEC_STMT of the defining stmt holds the relevant def.
505 In case OP is an invariant or constant, a new stmt that creates a vector def
506 needs to be introduced. */
508 static tree
509 vect_get_vec_def_for_operand (tree op, tree stmt)
511 tree vec_oprnd;
512 tree vec_stmt;
513 tree def_stmt;
514 stmt_vec_info def_stmt_info = NULL;
515 stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
516 tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
517 int nunits = TYPE_VECTOR_SUBPARTS (vectype);
518 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
519 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
520 basic_block bb;
521 tree vec_inv;
522 tree t = NULL_TREE;
523 tree def;
524 int i;
526 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
528 fprintf (vect_dump, "vect_get_vec_def_for_operand: ");
529 print_generic_expr (vect_dump, op, TDF_SLIM);
532 /** ===> Case 1: operand is a constant. **/
534 if (TREE_CODE (op) == INTEGER_CST || TREE_CODE (op) == REAL_CST)
536 /* Create 'vect_cst_ = {cst,cst,...,cst}' */
538 tree vec_cst;
540 /* Build a tree with vector elements. */
541 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
542 fprintf (vect_dump, "Create vector_cst. nunits = %d", nunits);
544 for (i = nunits - 1; i >= 0; --i)
546 t = tree_cons (NULL_TREE, op, t);
548 vec_cst = build_vector (vectype, t);
549 return vect_init_vector (stmt, vec_cst);
552 gcc_assert (TREE_CODE (op) == SSA_NAME);
554 /** ===> Case 2: operand is an SSA_NAME - find the stmt that defines it. **/
556 def_stmt = SSA_NAME_DEF_STMT (op);
557 def_stmt_info = vinfo_for_stmt (def_stmt);
559 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
561 fprintf (vect_dump, "vect_get_vec_def_for_operand: def_stmt: ");
562 print_generic_expr (vect_dump, def_stmt, TDF_SLIM);
566 /** ==> Case 2.1: operand is defined inside the loop. **/
568 if (def_stmt_info)
570 /* Get the def from the vectorized stmt. */
572 vec_stmt = STMT_VINFO_VEC_STMT (def_stmt_info);
573 gcc_assert (vec_stmt);
574 vec_oprnd = TREE_OPERAND (vec_stmt, 0);
575 return vec_oprnd;
579 /** ==> Case 2.2: operand is defined by the loop-header phi-node -
580 it is a reduction/induction. **/
582 bb = bb_for_stmt (def_stmt);
583 if (TREE_CODE (def_stmt) == PHI_NODE && flow_bb_inside_loop_p (loop, bb))
585 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
586 fprintf (vect_dump, "reduction/induction - unsupported.");
587 internal_error ("no support for reduction/induction"); /* FORNOW */
591 /** ==> Case 2.3: operand is defined outside the loop -
592 it is a loop invariant. */
594 switch (TREE_CODE (def_stmt))
596 case PHI_NODE:
597 def = PHI_RESULT (def_stmt);
598 break;
599 case MODIFY_EXPR:
600 def = TREE_OPERAND (def_stmt, 0);
601 break;
602 case NOP_EXPR:
603 def = TREE_OPERAND (def_stmt, 0);
604 gcc_assert (IS_EMPTY_STMT (def_stmt));
605 def = op;
606 break;
607 default:
608 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
610 fprintf (vect_dump, "unsupported defining stmt: ");
611 print_generic_expr (vect_dump, def_stmt, TDF_SLIM);
613 internal_error ("unsupported defining stmt");
616 /* Build a tree with vector elements.
617 Create 'vec_inv = {inv,inv,..,inv}' */
619 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
620 fprintf (vect_dump, "Create vector_inv.");
622 for (i = nunits - 1; i >= 0; --i)
624 t = tree_cons (NULL_TREE, def, t);
627 vec_inv = build_constructor (vectype, t);
628 return vect_init_vector (stmt, vec_inv);
632 /* Function vect_finish_stmt_generation.
634 Insert a new stmt. */
636 static void
637 vect_finish_stmt_generation (tree stmt, tree vec_stmt, block_stmt_iterator *bsi)
639 bsi_insert_before (bsi, vec_stmt, BSI_SAME_STMT);
641 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
643 fprintf (vect_dump, "add new stmt: ");
644 print_generic_expr (vect_dump, vec_stmt, TDF_SLIM);
647 #ifdef ENABLE_CHECKING
648 /* Make sure bsi points to the stmt that is being vectorized. */
649 gcc_assert (stmt == bsi_stmt (*bsi));
650 #endif
652 #ifdef USE_MAPPED_LOCATION
653 SET_EXPR_LOCATION (vec_stmt, EXPR_LOCATION (stmt));
654 #else
655 SET_EXPR_LOCUS (vec_stmt, EXPR_LOCUS (stmt));
656 #endif
660 /* Function vectorizable_assignment.
662 Check if STMT performs an assignment (copy) that can be vectorized.
663 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
664 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
665 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
667 bool
668 vectorizable_assignment (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
670 tree vec_dest;
671 tree scalar_dest;
672 tree op;
673 tree vec_oprnd;
674 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
675 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
676 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
677 tree new_temp;
679 /* Is vectorizable assignment? */
681 if (TREE_CODE (stmt) != MODIFY_EXPR)
682 return false;
684 scalar_dest = TREE_OPERAND (stmt, 0);
685 if (TREE_CODE (scalar_dest) != SSA_NAME)
686 return false;
688 op = TREE_OPERAND (stmt, 1);
689 if (!vect_is_simple_use (op, loop_vinfo, NULL))
691 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
692 fprintf (vect_dump, "use not simple.");
693 return false;
696 if (!vec_stmt) /* transformation not required. */
698 STMT_VINFO_TYPE (stmt_info) = assignment_vec_info_type;
699 return true;
702 /** Transform. **/
703 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
704 fprintf (vect_dump, "transform assignment.");
706 /* Handle def. */
707 vec_dest = vect_create_destination_var (scalar_dest, vectype);
709 /* Handle use. */
710 op = TREE_OPERAND (stmt, 1);
711 vec_oprnd = vect_get_vec_def_for_operand (op, stmt);
713 /* Arguments are ready. create the new vector stmt. */
714 *vec_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, vec_oprnd);
715 new_temp = make_ssa_name (vec_dest, *vec_stmt);
716 TREE_OPERAND (*vec_stmt, 0) = new_temp;
717 vect_finish_stmt_generation (stmt, *vec_stmt, bsi);
719 return true;
723 /* Function vect_min_worthwhile_factor.
725 For a loop where we could vectorize the operation indicated by CODE,
726 return the minimum vectorization factor that makes it worthwhile
727 to use generic vectors. */
728 static int
729 vect_min_worthwhile_factor (enum tree_code code)
731 switch (code)
733 case PLUS_EXPR:
734 case MINUS_EXPR:
735 case NEGATE_EXPR:
736 return 4;
738 case BIT_AND_EXPR:
739 case BIT_IOR_EXPR:
740 case BIT_XOR_EXPR:
741 case BIT_NOT_EXPR:
742 return 2;
744 default:
745 return INT_MAX;
749 /* Function vectorizable_operation.
751 Check if STMT performs a binary or unary operation that can be vectorized.
752 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
753 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
754 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
756 bool
757 vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
759 tree vec_dest;
760 tree scalar_dest;
761 tree operation;
762 tree op0, op1 = NULL;
763 tree vec_oprnd0, vec_oprnd1=NULL;
764 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
765 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
766 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
767 int i;
768 enum tree_code code;
769 enum machine_mode vec_mode;
770 tree new_temp;
771 int op_type;
772 tree op;
773 optab optab;
775 /* Is STMT a vectorizable binary/unary operation? */
776 if (TREE_CODE (stmt) != MODIFY_EXPR)
777 return false;
779 if (TREE_CODE (TREE_OPERAND (stmt, 0)) != SSA_NAME)
780 return false;
782 operation = TREE_OPERAND (stmt, 1);
783 code = TREE_CODE (operation);
784 optab = optab_for_tree_code (code, vectype);
786 /* Support only unary or binary operations. */
787 op_type = TREE_CODE_LENGTH (code);
788 if (op_type != unary_op && op_type != binary_op)
790 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
791 fprintf (vect_dump, "num. args = %d (not unary/binary op).", op_type);
792 return false;
795 for (i = 0; i < op_type; i++)
797 op = TREE_OPERAND (operation, i);
798 if (!vect_is_simple_use (op, loop_vinfo, NULL))
800 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
801 fprintf (vect_dump, "use not simple.");
802 return false;
806 /* Supportable by target? */
807 if (!optab)
809 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
810 fprintf (vect_dump, "no optab.");
811 return false;
813 vec_mode = TYPE_MODE (vectype);
814 if (optab->handlers[(int) vec_mode].insn_code == CODE_FOR_nothing)
816 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
817 fprintf (vect_dump, "op not supported by target.");
818 return false;
821 /* Worthwhile without SIMD support? */
822 if (!VECTOR_MODE_P (TYPE_MODE (vectype))
823 && LOOP_VINFO_VECT_FACTOR (loop_vinfo)
824 < vect_min_worthwhile_factor (code))
826 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
827 fprintf (vect_dump, "not worthwhile without SIMD support.");
828 return false;
831 if (!vec_stmt) /* transformation not required. */
833 STMT_VINFO_TYPE (stmt_info) = op_vec_info_type;
834 return true;
837 /** Transform. **/
839 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
840 fprintf (vect_dump, "transform binary/unary operation.");
842 /* Handle def. */
843 scalar_dest = TREE_OPERAND (stmt, 0);
844 vec_dest = vect_create_destination_var (scalar_dest, vectype);
846 /* Handle uses. */
847 op0 = TREE_OPERAND (operation, 0);
848 vec_oprnd0 = vect_get_vec_def_for_operand (op0, stmt);
850 if (op_type == binary_op)
852 op1 = TREE_OPERAND (operation, 1);
853 vec_oprnd1 = vect_get_vec_def_for_operand (op1, stmt);
856 /* Arguments are ready. create the new vector stmt. */
858 if (op_type == binary_op)
859 *vec_stmt = build2 (MODIFY_EXPR, vectype, vec_dest,
860 build2 (code, vectype, vec_oprnd0, vec_oprnd1));
861 else
862 *vec_stmt = build2 (MODIFY_EXPR, vectype, vec_dest,
863 build1 (code, vectype, vec_oprnd0));
864 new_temp = make_ssa_name (vec_dest, *vec_stmt);
865 TREE_OPERAND (*vec_stmt, 0) = new_temp;
866 vect_finish_stmt_generation (stmt, *vec_stmt, bsi);
868 return true;
872 /* Function vectorizable_store.
874 Check if STMT defines a non scalar data-ref (array/pointer/structure) that
875 can be vectorized.
876 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
877 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
878 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
880 bool
881 vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
883 tree scalar_dest;
884 tree data_ref;
885 tree op;
886 tree vec_oprnd1;
887 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
888 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
889 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
890 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
891 enum machine_mode vec_mode;
892 tree dummy;
893 enum dr_alignment_support alignment_support_cheme;
894 ssa_op_iter iter;
895 tree def;
897 /* Is vectorizable store? */
899 if (TREE_CODE (stmt) != MODIFY_EXPR)
900 return false;
902 scalar_dest = TREE_OPERAND (stmt, 0);
903 if (TREE_CODE (scalar_dest) != ARRAY_REF
904 && TREE_CODE (scalar_dest) != INDIRECT_REF)
905 return false;
907 op = TREE_OPERAND (stmt, 1);
908 if (!vect_is_simple_use (op, loop_vinfo, NULL))
910 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
911 fprintf (vect_dump, "use not simple.");
912 return false;
915 vec_mode = TYPE_MODE (vectype);
916 /* FORNOW. In some cases can vectorize even if data-type not supported
917 (e.g. - array initialization with 0). */
918 if (mov_optab->handlers[(int)vec_mode].insn_code == CODE_FOR_nothing)
919 return false;
921 if (!STMT_VINFO_DATA_REF (stmt_info))
922 return false;
925 if (!vec_stmt) /* transformation not required. */
927 STMT_VINFO_TYPE (stmt_info) = store_vec_info_type;
928 return true;
931 /** Transform. **/
933 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
934 fprintf (vect_dump, "transform store");
936 alignment_support_cheme = vect_supportable_dr_alignment (dr);
937 gcc_assert (alignment_support_cheme);
938 gcc_assert (alignment_support_cheme == dr_aligned); /* FORNOW */
940 /* Handle use - get the vectorized def from the defining stmt. */
941 vec_oprnd1 = vect_get_vec_def_for_operand (op, stmt);
943 /* Handle def. */
944 /* FORNOW: make sure the data reference is aligned. */
945 vect_align_data_ref (stmt);
946 data_ref = vect_create_data_ref_ptr (stmt, bsi, NULL_TREE, &dummy, false);
947 data_ref = build_fold_indirect_ref (data_ref);
949 /* Arguments are ready. create the new vector stmt. */
950 *vec_stmt = build2 (MODIFY_EXPR, vectype, data_ref, vec_oprnd1);
951 vect_finish_stmt_generation (stmt, *vec_stmt, bsi);
953 /* Mark all non-SSA variables in the statement for rewriting. */
954 mark_new_vars_to_rename (*vec_stmt);
956 /* The new vectorized statement will have better aliasing
957 information, so some of the virtual definitions of the old
958 statement will likely disappear from the IL. Mark them to have
959 their SSA form updated. */
960 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VMAYDEF)
961 mark_sym_for_renaming (SSA_NAME_VAR (def));
963 return true;
967 /* vectorizable_load.
969 Check if STMT reads a non scalar data-ref (array/pointer/structure) that
970 can be vectorized.
971 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
972 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
973 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
975 bool
976 vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
978 tree scalar_dest;
979 tree vec_dest = NULL;
980 tree data_ref = NULL;
981 tree op;
982 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
983 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
984 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
985 tree new_temp;
986 int mode;
987 tree init_addr;
988 tree new_stmt;
989 tree dummy;
990 basic_block new_bb;
991 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
992 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
993 edge pe = loop_preheader_edge (loop);
994 enum dr_alignment_support alignment_support_cheme;
996 /* Is vectorizable load? */
998 if (TREE_CODE (stmt) != MODIFY_EXPR)
999 return false;
1001 scalar_dest = TREE_OPERAND (stmt, 0);
1002 if (TREE_CODE (scalar_dest) != SSA_NAME)
1003 return false;
1005 op = TREE_OPERAND (stmt, 1);
1006 if (TREE_CODE (op) != ARRAY_REF && TREE_CODE (op) != INDIRECT_REF)
1007 return false;
1009 if (!STMT_VINFO_DATA_REF (stmt_info))
1010 return false;
1012 mode = (int) TYPE_MODE (vectype);
1014 /* FORNOW. In some cases can vectorize even if data-type not supported
1015 (e.g. - data copies). */
1016 if (mov_optab->handlers[mode].insn_code == CODE_FOR_nothing)
1018 if (vect_print_dump_info (REPORT_DETAILS, LOOP_LOC (loop_vinfo)))
1019 fprintf (vect_dump, "Aligned load, but unsupported type.");
1020 return false;
1023 if (!vec_stmt) /* transformation not required. */
1025 STMT_VINFO_TYPE (stmt_info) = load_vec_info_type;
1026 return true;
1029 /** Transform. **/
1031 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1032 fprintf (vect_dump, "transform load.");
1034 alignment_support_cheme = vect_supportable_dr_alignment (dr);
1035 gcc_assert (alignment_support_cheme);
1037 if (alignment_support_cheme == dr_aligned
1038 || alignment_support_cheme == dr_unaligned_supported)
1040 /* Create:
1041 p = initial_addr;
1042 indx = 0;
1043 loop {
1044 vec_dest = *(p);
1045 indx = indx + 1;
1049 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1050 data_ref = vect_create_data_ref_ptr (stmt, bsi, NULL_TREE, &dummy, false);
1051 if (aligned_access_p (dr))
1052 data_ref = build_fold_indirect_ref (data_ref);
1053 else
1055 int mis = DR_MISALIGNMENT (dr);
1056 tree tmis = (mis == -1 ? size_zero_node : size_int (mis));
1057 tmis = size_binop (MULT_EXPR, tmis, size_int(BITS_PER_UNIT));
1058 data_ref = build2 (MISALIGNED_INDIRECT_REF, vectype, data_ref, tmis);
1060 new_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, data_ref);
1061 new_temp = make_ssa_name (vec_dest, new_stmt);
1062 TREE_OPERAND (new_stmt, 0) = new_temp;
1063 vect_finish_stmt_generation (stmt, new_stmt, bsi);
1064 copy_virtual_operands (new_stmt, stmt);
1066 else if (alignment_support_cheme == dr_unaligned_software_pipeline)
1068 /* Create:
1069 p1 = initial_addr;
1070 msq_init = *(floor(p1))
1071 p2 = initial_addr + VS - 1;
1072 magic = have_builtin ? builtin_result : initial_address;
1073 indx = 0;
1074 loop {
1075 p2' = p2 + indx * vectype_size
1076 lsq = *(floor(p2'))
1077 vec_dest = realign_load (msq, lsq, magic)
1078 indx = indx + 1;
1079 msq = lsq;
1083 tree offset;
1084 tree magic;
1085 tree phi_stmt;
1086 tree msq_init;
1087 tree msq, lsq;
1088 tree dataref_ptr;
1089 tree params;
1091 /* <1> Create msq_init = *(floor(p1)) in the loop preheader */
1092 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1093 data_ref = vect_create_data_ref_ptr (stmt, bsi, NULL_TREE,
1094 &init_addr, true);
1095 data_ref = build1 (ALIGN_INDIRECT_REF, vectype, data_ref);
1096 new_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, data_ref);
1097 new_temp = make_ssa_name (vec_dest, new_stmt);
1098 TREE_OPERAND (new_stmt, 0) = new_temp;
1099 new_bb = bsi_insert_on_edge_immediate (pe, new_stmt);
1100 gcc_assert (!new_bb);
1101 msq_init = TREE_OPERAND (new_stmt, 0);
1102 copy_virtual_operands (new_stmt, stmt);
1103 update_vuses_to_preheader (new_stmt, loop);
1106 /* <2> Create lsq = *(floor(p2')) in the loop */
1107 offset = build_int_cst (integer_type_node,
1108 TYPE_VECTOR_SUBPARTS (vectype));
1109 offset = int_const_binop (MINUS_EXPR, offset, integer_one_node, 1);
1110 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1111 dataref_ptr = vect_create_data_ref_ptr (stmt, bsi, offset, &dummy, false);
1112 data_ref = build1 (ALIGN_INDIRECT_REF, vectype, dataref_ptr);
1113 new_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, data_ref);
1114 new_temp = make_ssa_name (vec_dest, new_stmt);
1115 TREE_OPERAND (new_stmt, 0) = new_temp;
1116 vect_finish_stmt_generation (stmt, new_stmt, bsi);
1117 lsq = TREE_OPERAND (new_stmt, 0);
1118 copy_virtual_operands (new_stmt, stmt);
1121 /* <3> */
1122 if (targetm.vectorize.builtin_mask_for_load)
1124 /* Create permutation mask, if required, in loop preheader. */
1125 tree builtin_decl;
1126 params = build_tree_list (NULL_TREE, init_addr);
1127 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1128 builtin_decl = targetm.vectorize.builtin_mask_for_load ();
1129 new_stmt = build_function_call_expr (builtin_decl, params);
1130 new_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, new_stmt);
1131 new_temp = make_ssa_name (vec_dest, new_stmt);
1132 TREE_OPERAND (new_stmt, 0) = new_temp;
1133 new_bb = bsi_insert_on_edge_immediate (pe, new_stmt);
1134 gcc_assert (!new_bb);
1135 magic = TREE_OPERAND (new_stmt, 0);
1137 /* The result of the CALL_EXPR to this builtin is determined from
1138 the value of the parameter and no global variables are touched
1139 which makes the builtin a "const" function. Requiring the
1140 builtin to have the "const" attribute makes it unnecessary
1141 to call mark_call_clobbered_vars_to_rename. */
1142 gcc_assert (TREE_READONLY (builtin_decl));
1144 else
1146 /* Use current address instead of init_addr for reduced reg pressure.
1148 magic = dataref_ptr;
1152 /* <4> Create msq = phi <msq_init, lsq> in loop */
1153 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1154 msq = make_ssa_name (vec_dest, NULL_TREE);
1155 phi_stmt = create_phi_node (msq, loop->header); /* CHECKME */
1156 SSA_NAME_DEF_STMT (msq) = phi_stmt;
1157 add_phi_arg (phi_stmt, msq_init, loop_preheader_edge (loop));
1158 add_phi_arg (phi_stmt, lsq, loop_latch_edge (loop));
1161 /* <5> Create <vec_dest = realign_load (msq, lsq, magic)> in loop */
1162 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1163 new_stmt = build3 (REALIGN_LOAD_EXPR, vectype, msq, lsq, magic);
1164 new_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, new_stmt);
1165 new_temp = make_ssa_name (vec_dest, new_stmt);
1166 TREE_OPERAND (new_stmt, 0) = new_temp;
1167 vect_finish_stmt_generation (stmt, new_stmt, bsi);
1169 else
1170 gcc_unreachable ();
1172 *vec_stmt = new_stmt;
1173 return true;
1176 /* Function vect_is_simple_cond.
1178 Input:
1179 LOOP - the loop that is being vectorized.
1180 COND - Condition that is checked for simple use.
1182 Returns whether a COND can be vectorized. Checks whether
1183 condition operands are supportable using vec_is_simple_use. */
1185 static bool
1186 vect_is_simple_cond (tree cond, loop_vec_info loop_vinfo)
1188 tree lhs, rhs;
1190 if (!COMPARISON_CLASS_P (cond))
1191 return false;
1193 lhs = TREE_OPERAND (cond, 0);
1194 rhs = TREE_OPERAND (cond, 1);
1196 if (TREE_CODE (lhs) == SSA_NAME)
1198 tree lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
1199 if (!vect_is_simple_use (lhs, loop_vinfo, &lhs_def_stmt))
1200 return false;
1202 else if (TREE_CODE (lhs) != INTEGER_CST && TREE_CODE (lhs) != REAL_CST)
1203 return false;
1205 if (TREE_CODE (rhs) == SSA_NAME)
1207 tree rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1208 if (!vect_is_simple_use (rhs, loop_vinfo, &rhs_def_stmt))
1209 return false;
1211 else if (TREE_CODE (rhs) != INTEGER_CST && TREE_CODE (rhs) != REAL_CST)
1212 return false;
1214 return true;
1217 /* vectorizable_condition.
1219 Check if STMT is conditional modify expression that can be vectorized.
1220 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
1221 stmt using VEC_COND_EXPR to replace it, put it in VEC_STMT, and insert it
1222 at BSI.
1224 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
1226 bool
1227 vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
1229 tree scalar_dest = NULL_TREE;
1230 tree vec_dest = NULL_TREE;
1231 tree op = NULL_TREE;
1232 tree cond_expr, then_clause, else_clause;
1233 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
1234 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
1235 tree vec_cond_lhs, vec_cond_rhs, vec_then_clause, vec_else_clause;
1236 tree vec_compare, vec_cond_expr;
1237 tree new_temp;
1238 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
1239 enum machine_mode vec_mode;
1241 if (!STMT_VINFO_RELEVANT_P (stmt_info))
1242 return false;
1244 if (TREE_CODE (stmt) != MODIFY_EXPR)
1245 return false;
1247 op = TREE_OPERAND (stmt, 1);
1249 if (TREE_CODE (op) != COND_EXPR)
1250 return false;
1252 cond_expr = TREE_OPERAND (op, 0);
1253 then_clause = TREE_OPERAND (op, 1);
1254 else_clause = TREE_OPERAND (op, 2);
1256 if (!vect_is_simple_cond (cond_expr, loop_vinfo))
1257 return false;
1259 if (TREE_CODE (then_clause) == SSA_NAME)
1261 tree then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
1262 if (!vect_is_simple_use (then_clause, loop_vinfo, &then_def_stmt))
1263 return false;
1265 else if (TREE_CODE (then_clause) != INTEGER_CST
1266 && TREE_CODE (then_clause) != REAL_CST)
1267 return false;
1269 if (TREE_CODE (else_clause) == SSA_NAME)
1271 tree else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
1272 if (!vect_is_simple_use (else_clause, loop_vinfo, &else_def_stmt))
1273 return false;
1275 else if (TREE_CODE (else_clause) != INTEGER_CST
1276 && TREE_CODE (else_clause) != REAL_CST)
1277 return false;
1280 vec_mode = TYPE_MODE (vectype);
1282 if (!vec_stmt)
1284 STMT_VINFO_TYPE (stmt_info) = condition_vec_info_type;
1285 return expand_vec_cond_expr_p (op, vec_mode);
1288 /* Transform */
1290 /* Handle def. */
1291 scalar_dest = TREE_OPERAND (stmt, 0);
1292 vec_dest = vect_create_destination_var (scalar_dest, vectype);
1294 /* Handle cond expr. */
1295 vec_cond_lhs =
1296 vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0), stmt);
1297 vec_cond_rhs =
1298 vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 1), stmt);
1299 vec_then_clause = vect_get_vec_def_for_operand (then_clause, stmt);
1300 vec_else_clause = vect_get_vec_def_for_operand (else_clause, stmt);
1302 /* Arguments are ready. create the new vector stmt. */
1303 vec_compare = build2 (TREE_CODE (cond_expr), vectype,
1304 vec_cond_lhs, vec_cond_rhs);
1305 vec_cond_expr = build (VEC_COND_EXPR, vectype,
1306 vec_compare, vec_then_clause, vec_else_clause);
1308 *vec_stmt = build2 (MODIFY_EXPR, vectype, vec_dest, vec_cond_expr);
1309 new_temp = make_ssa_name (vec_dest, *vec_stmt);
1310 TREE_OPERAND (*vec_stmt, 0) = new_temp;
1311 vect_finish_stmt_generation (stmt, *vec_stmt, bsi);
1313 return true;
1316 /* Function vect_transform_stmt.
1318 Create a vectorized stmt to replace STMT, and insert it at BSI. */
1320 bool
1321 vect_transform_stmt (tree stmt, block_stmt_iterator *bsi)
1323 bool is_store = false;
1324 tree vec_stmt = NULL_TREE;
1325 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
1326 bool done;
1328 switch (STMT_VINFO_TYPE (stmt_info))
1330 case op_vec_info_type:
1331 done = vectorizable_operation (stmt, bsi, &vec_stmt);
1332 gcc_assert (done);
1333 break;
1335 case assignment_vec_info_type:
1336 done = vectorizable_assignment (stmt, bsi, &vec_stmt);
1337 gcc_assert (done);
1338 break;
1340 case load_vec_info_type:
1341 done = vectorizable_load (stmt, bsi, &vec_stmt);
1342 gcc_assert (done);
1343 break;
1345 case store_vec_info_type:
1346 done = vectorizable_store (stmt, bsi, &vec_stmt);
1347 gcc_assert (done);
1348 is_store = true;
1349 break;
1351 case condition_vec_info_type:
1352 done = vectorizable_condition (stmt, bsi, &vec_stmt);
1353 gcc_assert (done);
1354 break;
1356 default:
1357 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1358 fprintf (vect_dump, "stmt not supported.");
1359 gcc_unreachable ();
1362 STMT_VINFO_VEC_STMT (stmt_info) = vec_stmt;
1364 return is_store;
1368 /* This function builds ni_name = number of iterations loop executes
1369 on the loop preheader. */
1371 static tree
1372 vect_build_loop_niters (loop_vec_info loop_vinfo)
1374 tree ni_name, stmt, var;
1375 edge pe;
1376 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1377 tree ni = unshare_expr (LOOP_VINFO_NITERS (loop_vinfo));
1379 var = create_tmp_var (TREE_TYPE (ni), "niters");
1380 add_referenced_tmp_var (var);
1381 ni_name = force_gimple_operand (ni, &stmt, false, var);
1383 pe = loop_preheader_edge (loop);
1384 if (stmt)
1386 basic_block new_bb = bsi_insert_on_edge_immediate (pe, stmt);
1387 gcc_assert (!new_bb);
1390 return ni_name;
1394 /* This function generates the following statements:
1396 ni_name = number of iterations loop executes
1397 ratio = ni_name / vf
1398 ratio_mult_vf_name = ratio * vf
1400 and places them at the loop preheader edge. */
1402 static void
1403 vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
1404 tree *ni_name_ptr,
1405 tree *ratio_mult_vf_name_ptr,
1406 tree *ratio_name_ptr)
1409 edge pe;
1410 basic_block new_bb;
1411 tree stmt, ni_name;
1412 tree var;
1413 tree ratio_name;
1414 tree ratio_mult_vf_name;
1415 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1416 tree ni = LOOP_VINFO_NITERS (loop_vinfo);
1417 int vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
1418 tree log_vf = build_int_cst (unsigned_type_node, exact_log2 (vf));
1420 pe = loop_preheader_edge (loop);
1422 /* Generate temporary variable that contains
1423 number of iterations loop executes. */
1425 ni_name = vect_build_loop_niters (loop_vinfo);
1427 /* Create: ratio = ni >> log2(vf) */
1429 var = create_tmp_var (TREE_TYPE (ni), "bnd");
1430 add_referenced_tmp_var (var);
1431 ratio_name = make_ssa_name (var, NULL_TREE);
1432 stmt = build2 (MODIFY_EXPR, void_type_node, ratio_name,
1433 build2 (RSHIFT_EXPR, TREE_TYPE (ni_name), ni_name, log_vf));
1434 SSA_NAME_DEF_STMT (ratio_name) = stmt;
1436 pe = loop_preheader_edge (loop);
1437 new_bb = bsi_insert_on_edge_immediate (pe, stmt);
1438 gcc_assert (!new_bb);
1440 /* Create: ratio_mult_vf = ratio << log2 (vf). */
1442 var = create_tmp_var (TREE_TYPE (ni), "ratio_mult_vf");
1443 add_referenced_tmp_var (var);
1444 ratio_mult_vf_name = make_ssa_name (var, NULL_TREE);
1445 stmt = build2 (MODIFY_EXPR, void_type_node, ratio_mult_vf_name,
1446 build2 (LSHIFT_EXPR, TREE_TYPE (ratio_name), ratio_name, log_vf));
1447 SSA_NAME_DEF_STMT (ratio_mult_vf_name) = stmt;
1449 pe = loop_preheader_edge (loop);
1450 new_bb = bsi_insert_on_edge_immediate (pe, stmt);
1451 gcc_assert (!new_bb);
1453 *ni_name_ptr = ni_name;
1454 *ratio_mult_vf_name_ptr = ratio_mult_vf_name;
1455 *ratio_name_ptr = ratio_name;
1457 return;
1461 /* Function update_vuses_to_preheader.
1463 Input:
1464 STMT - a statement with potential VUSEs.
1465 LOOP - the loop whose preheader will contain STMT.
1467 It's possible to vectorize a loop even though an SSA_NAME from a VUSE
1468 appears to be defined in a V_MAY_DEF in another statement in a loop.
1469 One such case is when the VUSE is at the dereference of a __restricted__
1470 pointer in a load and the V_MAY_DEF is at the dereference of a different
1471 __restricted__ pointer in a store. Vectorization may result in
1472 copy_virtual_uses being called to copy the problematic VUSE to a new
1473 statement that is being inserted in the loop preheader. This procedure
1474 is called to change the SSA_NAME in the new statement's VUSE from the
1475 SSA_NAME updated in the loop to the related SSA_NAME available on the
1476 path entering the loop.
1478 When this function is called, we have the following situation:
1480 # vuse <name1>
1481 S1: vload
1482 do {
1483 # name1 = phi < name0 , name2>
1485 # vuse <name1>
1486 S2: vload
1488 # name2 = vdef <name1>
1489 S3: vstore
1491 }while...
1493 Stmt S1 was created in the loop preheader block as part of misaligned-load
1494 handling. This function fixes the name of the vuse of S1 from 'name1' to
1495 'name0'. */
1497 static void
1498 update_vuses_to_preheader (tree stmt, struct loop *loop)
1500 basic_block header_bb = loop->header;
1501 edge preheader_e = loop_preheader_edge (loop);
1502 ssa_op_iter iter;
1503 use_operand_p use_p;
1505 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_VUSE)
1507 tree ssa_name = USE_FROM_PTR (use_p);
1508 tree def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1509 tree name_var = SSA_NAME_VAR (ssa_name);
1510 basic_block bb = bb_for_stmt (def_stmt);
1512 /* For a use before any definitions, def_stmt is a NOP_EXPR. */
1513 if (!IS_EMPTY_STMT (def_stmt)
1514 && flow_bb_inside_loop_p (loop, bb))
1516 /* If the block containing the statement defining the SSA_NAME
1517 is in the loop then it's necessary to find the definition
1518 outside the loop using the PHI nodes of the header. */
1519 tree phi;
1520 bool updated = false;
1522 for (phi = phi_nodes (header_bb); phi; phi = TREE_CHAIN (phi))
1524 if (SSA_NAME_VAR (PHI_RESULT (phi)) == name_var)
1526 SET_USE (use_p, PHI_ARG_DEF (phi, preheader_e->dest_idx));
1527 updated = true;
1528 break;
1531 gcc_assert (updated);
1537 /* Function vect_update_ivs_after_vectorizer.
1539 "Advance" the induction variables of LOOP to the value they should take
1540 after the execution of LOOP. This is currently necessary because the
1541 vectorizer does not handle induction variables that are used after the
1542 loop. Such a situation occurs when the last iterations of LOOP are
1543 peeled, because:
1544 1. We introduced new uses after LOOP for IVs that were not originally used
1545 after LOOP: the IVs of LOOP are now used by an epilog loop.
1546 2. LOOP is going to be vectorized; this means that it will iterate N/VF
1547 times, whereas the loop IVs should be bumped N times.
1549 Input:
1550 - LOOP - a loop that is going to be vectorized. The last few iterations
1551 of LOOP were peeled.
1552 - NITERS - the number of iterations that LOOP executes (before it is
1553 vectorized). i.e, the number of times the ivs should be bumped.
1554 - UPDATE_E - a successor edge of LOOP->exit that is on the (only) path
1555 coming out from LOOP on which there are uses of the LOOP ivs
1556 (this is the path from LOOP->exit to epilog_loop->preheader).
1558 The new definitions of the ivs are placed in LOOP->exit.
1559 The phi args associated with the edge UPDATE_E in the bb
1560 UPDATE_E->dest are updated accordingly.
1562 Assumption 1: Like the rest of the vectorizer, this function assumes
1563 a single loop exit that has a single predecessor.
1565 Assumption 2: The phi nodes in the LOOP header and in update_bb are
1566 organized in the same order.
1568 Assumption 3: The access function of the ivs is simple enough (see
1569 vect_can_advance_ivs_p). This assumption will be relaxed in the future.
1571 Assumption 4: Exactly one of the successors of LOOP exit-bb is on a path
1572 coming out of LOOP on which the ivs of LOOP are used (this is the path
1573 that leads to the epilog loop; other paths skip the epilog loop). This
1574 path starts with the edge UPDATE_E, and its destination (denoted update_bb)
1575 needs to have its phis updated.
1578 static void
1579 vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo, tree niters,
1580 edge update_e)
1582 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1583 basic_block exit_bb = loop->single_exit->dest;
1584 tree phi, phi1;
1585 basic_block update_bb = update_e->dest;
1587 /* gcc_assert (vect_can_advance_ivs_p (loop_vinfo)); */
1589 /* Make sure there exists a single-predecessor exit bb: */
1590 gcc_assert (single_pred_p (exit_bb));
1592 for (phi = phi_nodes (loop->header), phi1 = phi_nodes (update_bb);
1593 phi && phi1;
1594 phi = PHI_CHAIN (phi), phi1 = PHI_CHAIN (phi1))
1596 tree access_fn = NULL;
1597 tree evolution_part;
1598 tree init_expr;
1599 tree step_expr;
1600 tree var, stmt, ni, ni_name;
1601 block_stmt_iterator last_bsi;
1603 /* Skip virtual phi's. */
1604 if (!is_gimple_reg (SSA_NAME_VAR (PHI_RESULT (phi))))
1606 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1607 fprintf (vect_dump, "virtual phi. skip.");
1608 continue;
1611 access_fn = analyze_scalar_evolution (loop, PHI_RESULT (phi));
1612 gcc_assert (access_fn);
1613 evolution_part =
1614 unshare_expr (evolution_part_in_loop_num (access_fn, loop->num));
1615 gcc_assert (evolution_part != NULL_TREE);
1617 /* FORNOW: We do not support IVs whose evolution function is a polynomial
1618 of degree >= 2 or exponential. */
1619 gcc_assert (!tree_is_chrec (evolution_part));
1621 step_expr = evolution_part;
1622 init_expr = unshare_expr (initial_condition_in_loop_num (access_fn,
1623 loop->num));
1625 ni = build2 (PLUS_EXPR, TREE_TYPE (init_expr),
1626 build2 (MULT_EXPR, TREE_TYPE (niters),
1627 niters, step_expr), init_expr);
1629 var = create_tmp_var (TREE_TYPE (init_expr), "tmp");
1630 add_referenced_tmp_var (var);
1632 ni_name = force_gimple_operand (ni, &stmt, false, var);
1634 /* Insert stmt into exit_bb. */
1635 last_bsi = bsi_last (exit_bb);
1636 if (stmt)
1637 bsi_insert_before (&last_bsi, stmt, BSI_SAME_STMT);
1639 /* Fix phi expressions in the successor bb. */
1640 SET_PHI_ARG_DEF (phi1, update_e->dest_idx, ni_name);
1645 /* Function vect_do_peeling_for_loop_bound
1647 Peel the last iterations of the loop represented by LOOP_VINFO.
1648 The peeled iterations form a new epilog loop. Given that the loop now
1649 iterates NITERS times, the new epilog loop iterates
1650 NITERS % VECTORIZATION_FACTOR times.
1652 The original loop will later be made to iterate
1653 NITERS / VECTORIZATION_FACTOR times (this value is placed into RATIO). */
1655 static void
1656 vect_do_peeling_for_loop_bound (loop_vec_info loop_vinfo, tree *ratio,
1657 struct loops *loops)
1660 tree ni_name, ratio_mult_vf_name;
1661 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1662 struct loop *new_loop;
1663 edge update_e;
1664 basic_block preheader;
1665 #ifdef ENABLE_CHECKING
1666 int loop_num;
1667 #endif
1669 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1670 fprintf (vect_dump, "=== vect_transtorm_for_unknown_loop_bound ===");
1672 /* Generate the following variables on the preheader of original loop:
1674 ni_name = number of iteration the original loop executes
1675 ratio = ni_name / vf
1676 ratio_mult_vf_name = ratio * vf */
1677 vect_generate_tmps_on_preheader (loop_vinfo, &ni_name,
1678 &ratio_mult_vf_name, ratio);
1680 #ifdef ENABLE_CHECKING
1681 loop_num = loop->num;
1682 #endif
1683 new_loop = slpeel_tree_peel_loop_to_edge (loop, loops, loop->single_exit,
1684 ratio_mult_vf_name, ni_name, false);
1685 #ifdef ENABLE_CHECKING
1686 gcc_assert (new_loop);
1687 gcc_assert (loop_num == loop->num);
1688 slpeel_verify_cfg_after_peeling (loop, new_loop);
1689 #endif
1691 /* A guard that controls whether the new_loop is to be executed or skipped
1692 is placed in LOOP->exit. LOOP->exit therefore has two successors - one
1693 is the preheader of NEW_LOOP, where the IVs from LOOP are used. The other
1694 is a bb after NEW_LOOP, where these IVs are not used. Find the edge that
1695 is on the path where the LOOP IVs are used and need to be updated. */
1697 preheader = loop_preheader_edge (new_loop)->src;
1698 if (EDGE_PRED (preheader, 0)->src == loop->single_exit->dest)
1699 update_e = EDGE_PRED (preheader, 0);
1700 else
1701 update_e = EDGE_PRED (preheader, 1);
1703 /* Update IVs of original loop as if they were advanced
1704 by ratio_mult_vf_name steps. */
1705 vect_update_ivs_after_vectorizer (loop_vinfo, ratio_mult_vf_name, update_e);
1707 /* After peeling we have to reset scalar evolution analyzer. */
1708 scev_reset ();
1710 return;
1714 /* Function vect_gen_niters_for_prolog_loop
1716 Set the number of iterations for the loop represented by LOOP_VINFO
1717 to the minimum between LOOP_NITERS (the original iteration count of the loop)
1718 and the misalignment of DR - the data reference recorded in
1719 LOOP_VINFO_UNALIGNED_DR (LOOP_VINFO). As a result, after the execution of
1720 this loop, the data reference DR will refer to an aligned location.
1722 The following computation is generated:
1724 If the misalignment of DR is known at compile time:
1725 addr_mis = int mis = DR_MISALIGNMENT (dr);
1726 Else, compute address misalignment in bytes:
1727 addr_mis = addr & (vectype_size - 1)
1729 prolog_niters = min ( LOOP_NITERS , (VF - addr_mis/elem_size)&(VF-1) )
1731 (elem_size = element type size; an element is the scalar element
1732 whose type is the inner type of the vectype) */
1734 static tree
1735 vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo, tree loop_niters)
1737 struct data_reference *dr = LOOP_VINFO_UNALIGNED_DR (loop_vinfo);
1738 int vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
1739 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1740 tree var, stmt;
1741 tree iters, iters_name;
1742 edge pe;
1743 basic_block new_bb;
1744 tree dr_stmt = DR_STMT (dr);
1745 stmt_vec_info stmt_info = vinfo_for_stmt (dr_stmt);
1746 tree vectype = STMT_VINFO_VECTYPE (stmt_info);
1747 int vectype_align = TYPE_ALIGN (vectype) / BITS_PER_UNIT;
1748 tree vf_minus_1 = build_int_cst (unsigned_type_node, vf - 1);
1749 tree niters_type = TREE_TYPE (loop_niters);
1751 pe = loop_preheader_edge (loop);
1753 if (LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo) > 0)
1755 int byte_misalign = LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo);
1756 int element_size = vectype_align/vf;
1757 int elem_misalign = byte_misalign / element_size;
1759 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1760 fprintf (vect_dump, "known alignment = %d.", byte_misalign);
1761 iters = build_int_cst (niters_type, (vf - elem_misalign)&(vf-1));
1763 else
1765 tree new_stmts = NULL_TREE;
1766 tree start_addr =
1767 vect_create_addr_base_for_vector_ref (dr_stmt, &new_stmts, NULL_TREE);
1768 tree ptr_type = TREE_TYPE (start_addr);
1769 tree size = TYPE_SIZE (ptr_type);
1770 tree type = lang_hooks.types.type_for_size (tree_low_cst (size, 1), 1);
1771 tree vectype_size_minus_1 = build_int_cst (type, vectype_align - 1);
1772 tree elem_size_log =
1773 build_int_cst (unsigned_type_node, exact_log2 (vectype_align/vf));
1774 tree vf_tree = build_int_cst (unsigned_type_node, vf);
1775 tree byte_misalign;
1776 tree elem_misalign;
1778 new_bb = bsi_insert_on_edge_immediate (pe, new_stmts);
1779 gcc_assert (!new_bb);
1781 /* Create: byte_misalign = addr & (vectype_size - 1) */
1782 byte_misalign =
1783 build2 (BIT_AND_EXPR, type, start_addr, vectype_size_minus_1);
1785 /* Create: elem_misalign = byte_misalign / element_size */
1786 elem_misalign =
1787 build2 (RSHIFT_EXPR, unsigned_type_node, byte_misalign, elem_size_log);
1789 /* Create: (niters_type) (VF - elem_misalign)&(VF - 1) */
1790 iters = build2 (MINUS_EXPR, unsigned_type_node, vf_tree, elem_misalign);
1791 iters = build2 (BIT_AND_EXPR, unsigned_type_node, iters, vf_minus_1);
1792 iters = fold_convert (niters_type, iters);
1795 /* Create: prolog_loop_niters = min (iters, loop_niters) */
1796 /* If the loop bound is known at compile time we already verified that it is
1797 greater than vf; since the misalignment ('iters') is at most vf, there's
1798 no need to generate the MIN_EXPR in this case. */
1799 if (TREE_CODE (loop_niters) != INTEGER_CST)
1800 iters = build2 (MIN_EXPR, niters_type, iters, loop_niters);
1802 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1804 fprintf (vect_dump, "niters for prolog loop: ");
1805 print_generic_expr (vect_dump, iters, TDF_SLIM);
1808 var = create_tmp_var (niters_type, "prolog_loop_niters");
1809 add_referenced_tmp_var (var);
1810 iters_name = force_gimple_operand (iters, &stmt, false, var);
1812 /* Insert stmt on loop preheader edge. */
1813 if (stmt)
1815 basic_block new_bb = bsi_insert_on_edge_immediate (pe, stmt);
1816 gcc_assert (!new_bb);
1819 return iters_name;
1823 /* Function vect_update_init_of_dr
1825 NITERS iterations were peeled from LOOP. DR represents a data reference
1826 in LOOP. This function updates the information recorded in DR to
1827 account for the fact that the first NITERS iterations had already been
1828 executed. Specifically, it updates the OFFSET field of stmt_info. */
1830 static void
1831 vect_update_init_of_dr (struct data_reference *dr, tree niters)
1833 stmt_vec_info stmt_info = vinfo_for_stmt (DR_STMT (dr));
1834 tree offset = STMT_VINFO_VECT_INIT_OFFSET (stmt_info);
1836 niters = fold (build2 (MULT_EXPR, TREE_TYPE (niters), niters,
1837 STMT_VINFO_VECT_STEP (stmt_info)));
1838 offset = fold (build2 (PLUS_EXPR, TREE_TYPE (offset), offset, niters));
1839 STMT_VINFO_VECT_INIT_OFFSET (stmt_info) = offset;
1843 /* Function vect_update_inits_of_drs
1845 NITERS iterations were peeled from the loop represented by LOOP_VINFO.
1846 This function updates the information recorded for the data references in
1847 the loop to account for the fact that the first NITERS iterations had
1848 already been executed. Specifically, it updates the initial_condition of the
1849 access_function of all the data_references in the loop. */
1851 static void
1852 vect_update_inits_of_drs (loop_vec_info loop_vinfo, tree niters)
1854 unsigned int i;
1855 varray_type loop_write_datarefs = LOOP_VINFO_DATAREF_WRITES (loop_vinfo);
1856 varray_type loop_read_datarefs = LOOP_VINFO_DATAREF_READS (loop_vinfo);
1858 if (vect_dump && (dump_flags & TDF_DETAILS))
1859 fprintf (vect_dump, "=== vect_update_inits_of_dr ===");
1861 for (i = 0; i < VARRAY_ACTIVE_SIZE (loop_write_datarefs); i++)
1863 struct data_reference *dr = VARRAY_GENERIC_PTR (loop_write_datarefs, i);
1864 vect_update_init_of_dr (dr, niters);
1867 for (i = 0; i < VARRAY_ACTIVE_SIZE (loop_read_datarefs); i++)
1869 struct data_reference *dr = VARRAY_GENERIC_PTR (loop_read_datarefs, i);
1870 vect_update_init_of_dr (dr, niters);
1875 /* Function vect_do_peeling_for_alignment
1877 Peel the first 'niters' iterations of the loop represented by LOOP_VINFO.
1878 'niters' is set to the misalignment of one of the data references in the
1879 loop, thereby forcing it to refer to an aligned location at the beginning
1880 of the execution of this loop. The data reference for which we are
1881 peeling is recorded in LOOP_VINFO_UNALIGNED_DR. */
1883 static void
1884 vect_do_peeling_for_alignment (loop_vec_info loop_vinfo, struct loops *loops)
1886 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1887 tree niters_of_prolog_loop, ni_name;
1888 tree n_iters;
1889 struct loop *new_loop;
1891 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1892 fprintf (vect_dump, "=== vect_do_peeling_for_alignment ===");
1894 ni_name = vect_build_loop_niters (loop_vinfo);
1895 niters_of_prolog_loop = vect_gen_niters_for_prolog_loop (loop_vinfo, ni_name);
1897 /* Peel the prolog loop and iterate it niters_of_prolog_loop. */
1898 new_loop =
1899 slpeel_tree_peel_loop_to_edge (loop, loops, loop_preheader_edge (loop),
1900 niters_of_prolog_loop, ni_name, true);
1901 #ifdef ENABLE_CHECKING
1902 gcc_assert (new_loop);
1903 slpeel_verify_cfg_after_peeling (new_loop, loop);
1904 #endif
1906 /* Update number of times loop executes. */
1907 n_iters = LOOP_VINFO_NITERS (loop_vinfo);
1908 LOOP_VINFO_NITERS (loop_vinfo) = fold (build2 (MINUS_EXPR,
1909 TREE_TYPE (n_iters), n_iters, niters_of_prolog_loop));
1911 /* Update the init conditions of the access functions of all data refs. */
1912 vect_update_inits_of_drs (loop_vinfo, niters_of_prolog_loop);
1914 /* After peeling we have to reset scalar evolution analyzer. */
1915 scev_reset ();
1917 return;
1921 /* Function vect_transform_loop.
1923 The analysis phase has determined that the loop is vectorizable.
1924 Vectorize the loop - created vectorized stmts to replace the scalar
1925 stmts in the loop, and update the loop exit condition. */
1927 void
1928 vect_transform_loop (loop_vec_info loop_vinfo,
1929 struct loops *loops ATTRIBUTE_UNUSED)
1931 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
1932 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
1933 int nbbs = loop->num_nodes;
1934 block_stmt_iterator si;
1935 int i;
1936 tree ratio = NULL;
1937 int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
1939 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1940 fprintf (vect_dump, "=== vec_transform_loop ===");
1943 /* Peel the loop if there are data refs with unknown alignment.
1944 Only one data ref with unknown store is allowed. */
1946 if (LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo))
1947 vect_do_peeling_for_alignment (loop_vinfo, loops);
1949 /* If the loop has a symbolic number of iterations 'n' (i.e. it's not a
1950 compile time constant), or it is a constant that doesn't divide by the
1951 vectorization factor, then an epilog loop needs to be created.
1952 We therefore duplicate the loop: the original loop will be vectorized,
1953 and will compute the first (n/VF) iterations. The second copy of the loop
1954 will remain scalar and will compute the remaining (n%VF) iterations.
1955 (VF is the vectorization factor). */
1957 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)
1958 || (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)
1959 && LOOP_VINFO_INT_NITERS (loop_vinfo) % vectorization_factor != 0))
1960 vect_do_peeling_for_loop_bound (loop_vinfo, &ratio, loops);
1961 else
1962 ratio = build_int_cst (TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo)),
1963 LOOP_VINFO_INT_NITERS (loop_vinfo) / vectorization_factor);
1965 /* 1) Make sure the loop header has exactly two entries
1966 2) Make sure we have a preheader basic block. */
1968 gcc_assert (EDGE_COUNT (loop->header->preds) == 2);
1970 loop_split_edge_with (loop_preheader_edge (loop), NULL);
1973 /* FORNOW: the vectorizer supports only loops which body consist
1974 of one basic block (header + empty latch). When the vectorizer will
1975 support more involved loop forms, the order by which the BBs are
1976 traversed need to be reconsidered. */
1978 for (i = 0; i < nbbs; i++)
1980 basic_block bb = bbs[i];
1982 for (si = bsi_start (bb); !bsi_end_p (si);)
1984 tree stmt = bsi_stmt (si);
1985 stmt_vec_info stmt_info;
1986 bool is_store;
1988 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
1990 fprintf (vect_dump, "------>vectorizing statement: ");
1991 print_generic_expr (vect_dump, stmt, TDF_SLIM);
1993 stmt_info = vinfo_for_stmt (stmt);
1994 gcc_assert (stmt_info);
1995 if (!STMT_VINFO_RELEVANT_P (stmt_info))
1997 bsi_next (&si);
1998 continue;
2000 #ifdef ENABLE_CHECKING
2001 /* FORNOW: Verify that all stmts operate on the same number of
2002 units and no inner unrolling is necessary. */
2003 gcc_assert
2004 (TYPE_VECTOR_SUBPARTS (STMT_VINFO_VECTYPE (stmt_info))
2005 == vectorization_factor);
2006 #endif
2007 /* -------- vectorize statement ------------ */
2008 if (vect_print_dump_info (REPORT_DETAILS, UNKNOWN_LOC))
2009 fprintf (vect_dump, "transform statement.");
2011 is_store = vect_transform_stmt (stmt, &si);
2012 if (is_store)
2014 /* Free the attached stmt_vec_info and remove the stmt. */
2015 stmt_ann_t ann = stmt_ann (stmt);
2016 free (stmt_info);
2017 set_stmt_info (ann, NULL);
2018 bsi_remove (&si);
2019 continue;
2022 bsi_next (&si);
2023 } /* stmts in BB */
2024 } /* BBs in loop */
2026 slpeel_make_loop_iterate_ntimes (loop, ratio);
2028 /* The memory tags and pointers in vectorized statements need to
2029 have their SSA forms updated. FIXME, why can't this be delayed
2030 until all the loops have been transformed? */
2031 update_ssa (TODO_update_ssa);
2033 if (vect_print_dump_info (REPORT_VECTORIZED_LOOPS, LOOP_LOC (loop_vinfo)))
2034 fprintf (vect_dump, "LOOP VECTORIZED.");