1 /* Transformation Utilities for Loop Vectorization.
2 Copyright (C) 2003,2004,2005 Free Software Foundation, Inc.
3 Contributed by Dorit Naishlos <dorit@il.ibm.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
31 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
39 #include "tree-data-ref.h"
40 #include "tree-chrec.h"
41 #include "tree-scalar-evolution.h"
42 #include "tree-vectorizer.h"
43 #include "langhooks.h"
44 #include "tree-pass.h"
47 /* Utility functions for the code transformation. */
48 static bool vect_transform_stmt (tree
, block_stmt_iterator
*);
49 static void vect_align_data_ref (tree
);
50 static tree
vect_create_destination_var (tree
, tree
);
51 static tree vect_create_data_ref_ptr
52 (tree
, block_stmt_iterator
*, tree
, tree
*, bool);
53 static tree
vect_create_index_for_vector_ref (loop_vec_info
);
54 static tree
vect_create_addr_base_for_vector_ref (tree
, tree
*, tree
);
55 static tree
vect_get_new_vect_var (tree
, enum vect_var_kind
, const char *);
56 static tree
vect_get_vec_def_for_operand (tree
, tree
);
57 static tree
vect_init_vector (tree
, tree
);
58 static void vect_finish_stmt_generation
59 (tree stmt
, tree vec_stmt
, block_stmt_iterator
*bsi
);
60 static bool vect_is_simple_cond (tree
, loop_vec_info
);
61 static void update_vuses_to_preheader (tree
, struct loop
*);
63 /* Utility function dealing with loop peeling (not peeling itself). */
64 static void vect_generate_tmps_on_preheader
65 (loop_vec_info
, tree
*, tree
*, tree
*);
66 static tree
vect_build_loop_niters (loop_vec_info
);
67 static void vect_update_ivs_after_vectorizer (loop_vec_info
, tree
, edge
);
68 static tree
vect_gen_niters_for_prolog_loop (loop_vec_info
, tree
);
69 static void vect_update_init_of_dr (struct data_reference
*, tree niters
);
70 static void vect_update_inits_of_drs (loop_vec_info
, tree
);
71 static void vect_do_peeling_for_alignment (loop_vec_info
, struct loops
*);
72 static void vect_do_peeling_for_loop_bound
73 (loop_vec_info
, tree
*, struct loops
*);
76 /* Function vect_get_new_vect_var.
78 Returns a name for a new variable. The current naming scheme appends the
79 prefix "vect_" or "vect_p" (depending on the value of VAR_KIND) to
80 the name of vectorizer generated variables, and appends that to NAME if
84 vect_get_new_vect_var (tree type
, enum vect_var_kind var_kind
, const char *name
)
89 if (var_kind
== vect_simple_var
)
95 new_vect_var
= create_tmp_var (type
, concat (prefix
, name
, NULL
));
97 new_vect_var
= create_tmp_var (type
, prefix
);
103 /* Function vect_create_index_for_vector_ref.
105 Create (and return) an index variable, along with it's update chain in the
106 loop. This variable will be used to access a memory location in a vector
110 LOOP: The loop being vectorized.
111 BSI: The block_stmt_iterator where STMT is. Any new stmts created by this
112 function can be added here, or in the loop pre-header.
115 Return an index that will be used to index a vector array. It is expected
116 that a pointer to the first vector will be used as the base address for the
119 FORNOW: we are not trying to be efficient, just creating a new index each
120 time from scratch. At this time all vector references could use the same
123 TODO: create only one index to be used by all vector references. Record
124 the index in the LOOP_VINFO the first time this procedure is called and
125 return it on subsequent calls. The increment of this index must be placed
126 just before the conditional expression that ends the single block loop. */
129 vect_create_index_for_vector_ref (loop_vec_info loop_vinfo
)
132 block_stmt_iterator incr_bsi
;
134 tree indx_before_incr
, indx_after_incr
;
135 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
138 /* It is assumed that the base pointer used for vectorized access contains
139 the address of the first vector. Therefore the index used for vectorized
140 access must be initialized to zero and incremented by 1. */
142 init
= integer_zero_node
;
143 step
= integer_one_node
;
145 standard_iv_increment_position (loop
, &incr_bsi
, &insert_after
);
146 create_iv (init
, step
, NULL_TREE
, loop
, &incr_bsi
, insert_after
,
147 &indx_before_incr
, &indx_after_incr
);
148 incr
= bsi_stmt (incr_bsi
);
149 set_stmt_info (stmt_ann (incr
), new_stmt_vec_info (incr
, loop_vinfo
));
151 return indx_before_incr
;
155 /* Function vect_create_addr_base_for_vector_ref.
157 Create an expression that computes the address of the first memory location
158 that will be accessed for a data reference.
161 STMT: The statement containing the data reference.
162 NEW_STMT_LIST: Must be initialized to NULL_TREE or a statement list.
163 OFFSET: Optional. If supplied, it is be added to the initial address.
166 1. Return an SSA_NAME whose value is the address of the memory location of
167 the first vector of the data reference.
168 2. If new_stmt_list is not NULL_TREE after return then the caller must insert
169 these statement(s) which define the returned SSA_NAME.
171 FORNOW: We are only handling array accesses with step 1. */
174 vect_create_addr_base_for_vector_ref (tree stmt
,
178 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
179 struct data_reference
*dr
= STMT_VINFO_DATA_REF (stmt_info
);
181 unshare_expr (STMT_VINFO_VECT_DR_BASE_ADDRESS (stmt_info
));
182 tree base_name
= build_fold_indirect_ref (data_ref_base
);
183 tree ref
= DR_REF (dr
);
184 tree scalar_type
= TREE_TYPE (ref
);
185 tree scalar_ptr_type
= build_pointer_type (scalar_type
);
188 tree addr_base
, addr_expr
;
190 tree base_offset
= unshare_expr (STMT_VINFO_VECT_INIT_OFFSET (stmt_info
));
192 /* Create base_offset */
193 dest
= create_tmp_var (TREE_TYPE (base_offset
), "base_off");
194 add_referenced_tmp_var (dest
);
195 base_offset
= force_gimple_operand (base_offset
, &new_stmt
, false, dest
);
196 append_to_statement_list_force (new_stmt
, new_stmt_list
);
200 tree tmp
= create_tmp_var (TREE_TYPE (base_offset
), "offset");
201 add_referenced_tmp_var (tmp
);
202 offset
= fold (build2 (MULT_EXPR
, TREE_TYPE (offset
), offset
,
203 STMT_VINFO_VECT_STEP (stmt_info
)));
204 base_offset
= fold (build2 (PLUS_EXPR
, TREE_TYPE (base_offset
),
205 base_offset
, offset
));
206 base_offset
= force_gimple_operand (base_offset
, &new_stmt
, false, tmp
);
207 append_to_statement_list_force (new_stmt
, new_stmt_list
);
210 /* base + base_offset */
211 addr_base
= fold (build2 (PLUS_EXPR
, TREE_TYPE (data_ref_base
), data_ref_base
,
214 /* addr_expr = addr_base */
215 addr_expr
= vect_get_new_vect_var (scalar_ptr_type
, vect_pointer_var
,
216 get_name (base_name
));
217 add_referenced_tmp_var (addr_expr
);
218 vec_stmt
= build2 (MODIFY_EXPR
, void_type_node
, addr_expr
, addr_base
);
219 new_temp
= make_ssa_name (addr_expr
, vec_stmt
);
220 TREE_OPERAND (vec_stmt
, 0) = new_temp
;
221 append_to_statement_list_force (vec_stmt
, new_stmt_list
);
223 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
225 fprintf (vect_dump
, "created ");
226 print_generic_expr (vect_dump
, vec_stmt
, TDF_SLIM
);
232 /* Function vect_align_data_ref.
234 Handle misalignment of a memory accesses.
236 FORNOW: Can't handle misaligned accesses.
237 Make sure that the dataref is aligned. */
240 vect_align_data_ref (tree stmt
)
242 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
243 struct data_reference
*dr
= STMT_VINFO_DATA_REF (stmt_info
);
245 /* FORNOW: can't handle misaligned accesses;
246 all accesses expected to be aligned. */
247 gcc_assert (aligned_access_p (dr
));
251 /* Function vect_create_data_ref_ptr.
253 Create a memory reference expression for vector access, to be used in a
254 vector load/store stmt. The reference is based on a new pointer to vector
258 1. STMT: a stmt that references memory. Expected to be of the form
259 MODIFY_EXPR <name, data-ref> or MODIFY_EXPR <data-ref, name>.
260 2. BSI: block_stmt_iterator where new stmts can be added.
261 3. OFFSET (optional): an offset to be added to the initial address accessed
262 by the data-ref in STMT.
263 4. ONLY_INIT: indicate if vp is to be updated in the loop, or remain
264 pointing to the initial address.
267 1. Declare a new ptr to vector_type, and have it point to the base of the
268 data reference (initial addressed accessed by the data reference).
269 For example, for vector of type V8HI, the following code is generated:
272 vp = (v8hi *)initial_address;
274 if OFFSET is not supplied:
275 initial_address = &a[init];
276 if OFFSET is supplied:
277 initial_address = &a[init + OFFSET];
279 Return the initial_address in INITIAL_ADDRESS.
281 2. Create a data-reference in the loop based on the new vector pointer vp,
282 and using a new index variable 'idx' as follows:
286 where if ONLY_INIT is true:
289 update = idx + vector_type_size
291 Return the pointer vp'.
294 FORNOW: handle only aligned and consecutive accesses. */
297 vect_create_data_ref_ptr (tree stmt
, block_stmt_iterator
*bsi
, tree offset
,
298 tree
*initial_address
, bool only_init
)
301 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
302 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_info
);
303 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
304 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
310 tree new_stmt_list
= NULL_TREE
;
312 edge pe
= loop_preheader_edge (loop
);
318 tree type
, tmp
, size
;
320 base_name
= build_fold_indirect_ref (unshare_expr (
321 STMT_VINFO_VECT_DR_BASE_ADDRESS (stmt_info
)));
323 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
325 tree data_ref_base
= base_name
;
326 fprintf (vect_dump
, "create array_ref of type: ");
327 print_generic_expr (vect_dump
, vectype
, TDF_SLIM
);
328 if (TREE_CODE (data_ref_base
) == VAR_DECL
)
329 fprintf (vect_dump
, " vectorizing a one dimensional array ref: ");
330 else if (TREE_CODE (data_ref_base
) == ARRAY_REF
)
331 fprintf (vect_dump
, " vectorizing a multidimensional array ref: ");
332 else if (TREE_CODE (data_ref_base
) == COMPONENT_REF
)
333 fprintf (vect_dump
, " vectorizing a record based array ref: ");
334 else if (TREE_CODE (data_ref_base
) == SSA_NAME
)
335 fprintf (vect_dump
, " vectorizing a pointer ref: ");
336 print_generic_expr (vect_dump
, base_name
, TDF_SLIM
);
339 /** (1) Create the new vector-pointer variable: **/
341 vect_ptr_type
= build_pointer_type (vectype
);
342 vect_ptr
= vect_get_new_vect_var (vect_ptr_type
, vect_pointer_var
,
343 get_name (base_name
));
344 add_referenced_tmp_var (vect_ptr
);
347 /** (2) Add aliasing information to the new vector-pointer:
348 (The points-to info (SSA_NAME_PTR_INFO) may be defined later.) **/
350 tag
= STMT_VINFO_MEMTAG (stmt_info
);
353 /* If tag is a variable (and NOT_A_TAG) than a new type alias
354 tag must be created with tag added to its may alias list. */
355 if (var_ann (tag
)->mem_tag_kind
== NOT_A_TAG
)
356 new_type_alias (vect_ptr
, tag
);
358 var_ann (vect_ptr
)->type_mem_tag
= tag
;
360 var_ann (vect_ptr
)->subvars
= STMT_VINFO_SUBVARS (stmt_info
);
362 /** (3) Calculate the initial address the vector-pointer, and set
363 the vector-pointer to point to it before the loop: **/
365 /* Create: (&(base[init_val+offset]) in the loop preheader. */
366 new_temp
= vect_create_addr_base_for_vector_ref (stmt
, &new_stmt_list
,
368 pe
= loop_preheader_edge (loop
);
369 new_bb
= bsi_insert_on_edge_immediate (pe
, new_stmt_list
);
370 gcc_assert (!new_bb
);
371 *initial_address
= new_temp
;
373 /* Create: p = (vectype *) initial_base */
374 vec_stmt
= fold_convert (vect_ptr_type
, new_temp
);
375 vec_stmt
= build2 (MODIFY_EXPR
, void_type_node
, vect_ptr
, vec_stmt
);
376 new_temp
= make_ssa_name (vect_ptr
, vec_stmt
);
377 TREE_OPERAND (vec_stmt
, 0) = new_temp
;
378 new_bb
= bsi_insert_on_edge_immediate (pe
, vec_stmt
);
379 gcc_assert (!new_bb
);
380 vect_ptr_init
= TREE_OPERAND (vec_stmt
, 0);
383 /** (4) Handle the updating of the vector-pointer inside the loop: **/
385 if (only_init
) /* No update in loop is required. */
387 /* Copy the points-to information if it exists. */
388 if (STMT_VINFO_PTR_INFO (stmt_info
))
389 duplicate_ssa_name_ptr_info (vect_ptr_init
,
390 STMT_VINFO_PTR_INFO (stmt_info
));
391 return vect_ptr_init
;
394 idx
= vect_create_index_for_vector_ref (loop_vinfo
);
396 /* Create: update = idx * vectype_size */
397 tmp
= create_tmp_var (integer_type_node
, "update");
398 add_referenced_tmp_var (tmp
);
399 size
= TYPE_SIZE (vect_ptr_type
);
400 type
= lang_hooks
.types
.type_for_size (tree_low_cst (size
, 1), 1);
401 ptr_update
= create_tmp_var (type
, "update");
402 add_referenced_tmp_var (ptr_update
);
403 vectype_size
= TYPE_SIZE_UNIT (vectype
);
404 vec_stmt
= build2 (MULT_EXPR
, integer_type_node
, idx
, vectype_size
);
405 vec_stmt
= build2 (MODIFY_EXPR
, void_type_node
, tmp
, vec_stmt
);
406 new_temp
= make_ssa_name (tmp
, vec_stmt
);
407 TREE_OPERAND (vec_stmt
, 0) = new_temp
;
408 bsi_insert_before (bsi
, vec_stmt
, BSI_SAME_STMT
);
409 vec_stmt
= fold_convert (type
, new_temp
);
410 vec_stmt
= build2 (MODIFY_EXPR
, void_type_node
, ptr_update
, vec_stmt
);
411 new_temp
= make_ssa_name (ptr_update
, vec_stmt
);
412 TREE_OPERAND (vec_stmt
, 0) = new_temp
;
413 bsi_insert_before (bsi
, vec_stmt
, BSI_SAME_STMT
);
415 /* Create: data_ref_ptr = vect_ptr_init + update */
416 vec_stmt
= build2 (PLUS_EXPR
, vect_ptr_type
, vect_ptr_init
, new_temp
);
417 vec_stmt
= build2 (MODIFY_EXPR
, void_type_node
, vect_ptr
, vec_stmt
);
418 new_temp
= make_ssa_name (vect_ptr
, vec_stmt
);
419 TREE_OPERAND (vec_stmt
, 0) = new_temp
;
420 bsi_insert_before (bsi
, vec_stmt
, BSI_SAME_STMT
);
421 data_ref_ptr
= TREE_OPERAND (vec_stmt
, 0);
423 /* Copy the points-to information if it exists. */
424 if (STMT_VINFO_PTR_INFO (stmt_info
))
425 duplicate_ssa_name_ptr_info (data_ref_ptr
, STMT_VINFO_PTR_INFO (stmt_info
));
430 /* Function vect_create_destination_var.
432 Create a new temporary of type VECTYPE. */
435 vect_create_destination_var (tree scalar_dest
, tree vectype
)
438 const char *new_name
;
440 gcc_assert (TREE_CODE (scalar_dest
) == SSA_NAME
);
442 new_name
= get_name (scalar_dest
);
445 vec_dest
= vect_get_new_vect_var (vectype
, vect_simple_var
, new_name
);
446 add_referenced_tmp_var (vec_dest
);
452 /* Function vect_init_vector.
454 Insert a new stmt (INIT_STMT) that initializes a new vector variable with
455 the vector elements of VECTOR_VAR. Return the DEF of INIT_STMT. It will be
456 used in the vectorization of STMT. */
459 vect_init_vector (tree stmt
, tree vector_var
)
461 stmt_vec_info stmt_vinfo
= vinfo_for_stmt (stmt
);
462 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_vinfo
);
463 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
466 tree vectype
= STMT_VINFO_VECTYPE (stmt_vinfo
);
472 new_var
= vect_get_new_vect_var (vectype
, vect_simple_var
, "cst_");
473 add_referenced_tmp_var (new_var
);
475 init_stmt
= build2 (MODIFY_EXPR
, vectype
, new_var
, vector_var
);
476 new_temp
= make_ssa_name (new_var
, init_stmt
);
477 TREE_OPERAND (init_stmt
, 0) = new_temp
;
479 pe
= loop_preheader_edge (loop
);
480 new_bb
= bsi_insert_on_edge_immediate (pe
, init_stmt
);
481 gcc_assert (!new_bb
);
483 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
485 fprintf (vect_dump
, "created new init_stmt: ");
486 print_generic_expr (vect_dump
, init_stmt
, TDF_SLIM
);
489 vec_oprnd
= TREE_OPERAND (init_stmt
, 0);
494 /* Function vect_get_vec_def_for_operand.
496 OP is an operand in STMT. This function returns a (vector) def that will be
497 used in the vectorized stmt for STMT.
499 In the case that OP is an SSA_NAME which is defined in the loop, then
500 STMT_VINFO_VEC_STMT of the defining stmt holds the relevant def.
502 In case OP is an invariant or constant, a new stmt that creates a vector def
503 needs to be introduced. */
506 vect_get_vec_def_for_operand (tree op
, tree stmt
)
511 stmt_vec_info def_stmt_info
= NULL
;
512 stmt_vec_info stmt_vinfo
= vinfo_for_stmt (stmt
);
513 tree vectype
= STMT_VINFO_VECTYPE (stmt_vinfo
);
514 int nunits
= TYPE_VECTOR_SUBPARTS (vectype
);
515 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_vinfo
);
516 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
523 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
525 fprintf (vect_dump
, "vect_get_vec_def_for_operand: ");
526 print_generic_expr (vect_dump
, op
, TDF_SLIM
);
529 /** ===> Case 1: operand is a constant. **/
531 if (TREE_CODE (op
) == INTEGER_CST
|| TREE_CODE (op
) == REAL_CST
)
533 /* Create 'vect_cst_ = {cst,cst,...,cst}' */
537 /* Build a tree with vector elements. */
538 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
539 fprintf (vect_dump
, "Create vector_cst. nunits = %d", nunits
);
541 for (i
= nunits
- 1; i
>= 0; --i
)
543 t
= tree_cons (NULL_TREE
, op
, t
);
545 vec_cst
= build_vector (vectype
, t
);
546 return vect_init_vector (stmt
, vec_cst
);
549 gcc_assert (TREE_CODE (op
) == SSA_NAME
);
551 /** ===> Case 2: operand is an SSA_NAME - find the stmt that defines it. **/
553 def_stmt
= SSA_NAME_DEF_STMT (op
);
554 def_stmt_info
= vinfo_for_stmt (def_stmt
);
556 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
558 fprintf (vect_dump
, "vect_get_vec_def_for_operand: def_stmt: ");
559 print_generic_expr (vect_dump
, def_stmt
, TDF_SLIM
);
563 /** ==> Case 2.1: operand is defined inside the loop. **/
567 /* Get the def from the vectorized stmt. */
569 vec_stmt
= STMT_VINFO_VEC_STMT (def_stmt_info
);
570 gcc_assert (vec_stmt
);
571 vec_oprnd
= TREE_OPERAND (vec_stmt
, 0);
576 /** ==> Case 2.2: operand is defined by the loop-header phi-node -
577 it is a reduction/induction. **/
579 bb
= bb_for_stmt (def_stmt
);
580 if (TREE_CODE (def_stmt
) == PHI_NODE
&& flow_bb_inside_loop_p (loop
, bb
))
582 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
583 fprintf (vect_dump
, "reduction/induction - unsupported.");
584 internal_error ("no support for reduction/induction"); /* FORNOW */
588 /** ==> Case 2.3: operand is defined outside the loop -
589 it is a loop invariant. */
591 switch (TREE_CODE (def_stmt
))
594 def
= PHI_RESULT (def_stmt
);
597 def
= TREE_OPERAND (def_stmt
, 0);
600 def
= TREE_OPERAND (def_stmt
, 0);
601 gcc_assert (IS_EMPTY_STMT (def_stmt
));
605 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
607 fprintf (vect_dump
, "unsupported defining stmt: ");
608 print_generic_expr (vect_dump
, def_stmt
, TDF_SLIM
);
610 internal_error ("unsupported defining stmt");
613 /* Build a tree with vector elements.
614 Create 'vec_inv = {inv,inv,..,inv}' */
616 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
617 fprintf (vect_dump
, "Create vector_inv.");
619 for (i
= nunits
- 1; i
>= 0; --i
)
621 t
= tree_cons (NULL_TREE
, def
, t
);
624 vec_inv
= build_constructor (vectype
, t
);
625 return vect_init_vector (stmt
, vec_inv
);
629 /* Function vect_finish_stmt_generation.
631 Insert a new stmt. */
634 vect_finish_stmt_generation (tree stmt
, tree vec_stmt
, block_stmt_iterator
*bsi
)
636 bsi_insert_before (bsi
, vec_stmt
, BSI_SAME_STMT
);
638 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
640 fprintf (vect_dump
, "add new stmt: ");
641 print_generic_expr (vect_dump
, vec_stmt
, TDF_SLIM
);
644 #ifdef ENABLE_CHECKING
645 /* Make sure bsi points to the stmt that is being vectorized. */
646 gcc_assert (stmt
== bsi_stmt (*bsi
));
649 #ifdef USE_MAPPED_LOCATION
650 SET_EXPR_LOCATION (vec_stmt
, EXPR_LOCATION (stmt
));
652 SET_EXPR_LOCUS (vec_stmt
, EXPR_LOCUS (stmt
));
657 /* Function vectorizable_assignment.
659 Check if STMT performs an assignment (copy) that can be vectorized.
660 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
661 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
662 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
665 vectorizable_assignment (tree stmt
, block_stmt_iterator
*bsi
, tree
*vec_stmt
)
671 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
672 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
673 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_info
);
676 /* Is vectorizable assignment? */
678 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
681 scalar_dest
= TREE_OPERAND (stmt
, 0);
682 if (TREE_CODE (scalar_dest
) != SSA_NAME
)
685 op
= TREE_OPERAND (stmt
, 1);
686 if (!vect_is_simple_use (op
, loop_vinfo
, NULL
))
688 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
689 fprintf (vect_dump
, "use not simple.");
693 if (!vec_stmt
) /* transformation not required. */
695 STMT_VINFO_TYPE (stmt_info
) = assignment_vec_info_type
;
700 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
701 fprintf (vect_dump
, "transform assignment.");
704 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
707 op
= TREE_OPERAND (stmt
, 1);
708 vec_oprnd
= vect_get_vec_def_for_operand (op
, stmt
);
710 /* Arguments are ready. create the new vector stmt. */
711 *vec_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, vec_oprnd
);
712 new_temp
= make_ssa_name (vec_dest
, *vec_stmt
);
713 TREE_OPERAND (*vec_stmt
, 0) = new_temp
;
714 vect_finish_stmt_generation (stmt
, *vec_stmt
, bsi
);
720 /* Function vect_min_worthwhile_factor.
722 For a loop where we could vectorize the operation indicated by CODE,
723 return the minimum vectorization factor that makes it worthwhile
724 to use generic vectors. */
726 vect_min_worthwhile_factor (enum tree_code code
)
746 /* Function vectorizable_operation.
748 Check if STMT performs a binary or unary operation that can be vectorized.
749 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
750 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
751 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
754 vectorizable_operation (tree stmt
, block_stmt_iterator
*bsi
, tree
*vec_stmt
)
759 tree op0
, op1
= NULL
;
760 tree vec_oprnd0
, vec_oprnd1
=NULL
;
761 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
762 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
763 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_info
);
766 enum machine_mode vec_mode
;
772 /* Is STMT a vectorizable binary/unary operation? */
773 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
776 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) != SSA_NAME
)
779 operation
= TREE_OPERAND (stmt
, 1);
780 code
= TREE_CODE (operation
);
781 optab
= optab_for_tree_code (code
, vectype
);
783 /* Support only unary or binary operations. */
784 op_type
= TREE_CODE_LENGTH (code
);
785 if (op_type
!= unary_op
&& op_type
!= binary_op
)
787 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
788 fprintf (vect_dump
, "num. args = %d (not unary/binary op).", op_type
);
792 for (i
= 0; i
< op_type
; i
++)
794 op
= TREE_OPERAND (operation
, i
);
795 if (!vect_is_simple_use (op
, loop_vinfo
, NULL
))
797 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
798 fprintf (vect_dump
, "use not simple.");
803 /* Supportable by target? */
806 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
807 fprintf (vect_dump
, "no optab.");
810 vec_mode
= TYPE_MODE (vectype
);
811 if (optab
->handlers
[(int) vec_mode
].insn_code
== CODE_FOR_nothing
)
813 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
814 fprintf (vect_dump
, "op not supported by target.");
815 if (GET_MODE_SIZE (vec_mode
) != UNITS_PER_WORD
816 || LOOP_VINFO_VECT_FACTOR (loop_vinfo
)
817 < vect_min_worthwhile_factor (code
))
819 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
820 fprintf (vect_dump
, "proceeding using word mode.");
823 /* Worthwhile without SIMD support? */
824 if (!VECTOR_MODE_P (TYPE_MODE (vectype
))
825 && LOOP_VINFO_VECT_FACTOR (loop_vinfo
)
826 < vect_min_worthwhile_factor (code
))
828 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
829 fprintf (vect_dump
, "not worthwhile without SIMD support.");
833 if (!vec_stmt
) /* transformation not required. */
835 STMT_VINFO_TYPE (stmt_info
) = op_vec_info_type
;
841 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
842 fprintf (vect_dump
, "transform binary/unary operation.");
845 scalar_dest
= TREE_OPERAND (stmt
, 0);
846 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
849 op0
= TREE_OPERAND (operation
, 0);
850 vec_oprnd0
= vect_get_vec_def_for_operand (op0
, stmt
);
852 if (op_type
== binary_op
)
854 op1
= TREE_OPERAND (operation
, 1);
855 vec_oprnd1
= vect_get_vec_def_for_operand (op1
, stmt
);
858 /* Arguments are ready. create the new vector stmt. */
860 if (op_type
== binary_op
)
861 *vec_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
,
862 build2 (code
, vectype
, vec_oprnd0
, vec_oprnd1
));
864 *vec_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
,
865 build1 (code
, vectype
, vec_oprnd0
));
866 new_temp
= make_ssa_name (vec_dest
, *vec_stmt
);
867 TREE_OPERAND (*vec_stmt
, 0) = new_temp
;
868 vect_finish_stmt_generation (stmt
, *vec_stmt
, bsi
);
874 /* Function vectorizable_store.
876 Check if STMT defines a non scalar data-ref (array/pointer/structure) that
878 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
879 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
880 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
883 vectorizable_store (tree stmt
, block_stmt_iterator
*bsi
, tree
*vec_stmt
)
889 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
890 struct data_reference
*dr
= STMT_VINFO_DATA_REF (stmt_info
);
891 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
892 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_info
);
893 enum machine_mode vec_mode
;
895 enum dr_alignment_support alignment_support_cheme
;
899 /* Is vectorizable store? */
901 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
904 scalar_dest
= TREE_OPERAND (stmt
, 0);
905 if (TREE_CODE (scalar_dest
) != ARRAY_REF
906 && TREE_CODE (scalar_dest
) != INDIRECT_REF
)
909 op
= TREE_OPERAND (stmt
, 1);
910 if (!vect_is_simple_use (op
, loop_vinfo
, NULL
))
912 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
913 fprintf (vect_dump
, "use not simple.");
917 vec_mode
= TYPE_MODE (vectype
);
918 /* FORNOW. In some cases can vectorize even if data-type not supported
919 (e.g. - array initialization with 0). */
920 if (mov_optab
->handlers
[(int)vec_mode
].insn_code
== CODE_FOR_nothing
)
923 if (!STMT_VINFO_DATA_REF (stmt_info
))
927 if (!vec_stmt
) /* transformation not required. */
929 STMT_VINFO_TYPE (stmt_info
) = store_vec_info_type
;
935 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
936 fprintf (vect_dump
, "transform store");
938 alignment_support_cheme
= vect_supportable_dr_alignment (dr
);
939 gcc_assert (alignment_support_cheme
);
940 gcc_assert (alignment_support_cheme
== dr_aligned
); /* FORNOW */
942 /* Handle use - get the vectorized def from the defining stmt. */
943 vec_oprnd1
= vect_get_vec_def_for_operand (op
, stmt
);
946 /* FORNOW: make sure the data reference is aligned. */
947 vect_align_data_ref (stmt
);
948 data_ref
= vect_create_data_ref_ptr (stmt
, bsi
, NULL_TREE
, &dummy
, false);
949 data_ref
= build_fold_indirect_ref (data_ref
);
951 /* Arguments are ready. create the new vector stmt. */
952 *vec_stmt
= build2 (MODIFY_EXPR
, vectype
, data_ref
, vec_oprnd1
);
953 vect_finish_stmt_generation (stmt
, *vec_stmt
, bsi
);
955 /* Copy the V_MAY_DEFS representing the aliasing of the original array
956 element's definition to the vector's definition then update the
957 defining statement. The original is being deleted so the same
958 SSA_NAMEs can be used. */
959 copy_virtual_operands (*vec_stmt
, stmt
);
961 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_VMAYDEF
)
963 SSA_NAME_DEF_STMT (def
) = *vec_stmt
;
965 /* If this virtual def has a use outside the loop and a loop peel is performed
966 then the def may be renamed by the peel. Mark it for renaming so the
967 later use will also be renamed. */
968 mark_sym_for_renaming (SSA_NAME_VAR (def
));
975 /* vectorizable_load.
977 Check if STMT reads a non scalar data-ref (array/pointer/structure) that
979 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
980 stmt to replace it, put it in VEC_STMT, and insert it at BSI.
981 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
984 vectorizable_load (tree stmt
, block_stmt_iterator
*bsi
, tree
*vec_stmt
)
987 tree vec_dest
= NULL
;
988 tree data_ref
= NULL
;
990 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
991 struct data_reference
*dr
= STMT_VINFO_DATA_REF (stmt_info
);
992 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
999 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_info
);
1000 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1001 edge pe
= loop_preheader_edge (loop
);
1002 enum dr_alignment_support alignment_support_cheme
;
1004 /* Is vectorizable load? */
1006 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
1009 scalar_dest
= TREE_OPERAND (stmt
, 0);
1010 if (TREE_CODE (scalar_dest
) != SSA_NAME
)
1013 op
= TREE_OPERAND (stmt
, 1);
1014 if (TREE_CODE (op
) != ARRAY_REF
&& TREE_CODE (op
) != INDIRECT_REF
)
1017 if (!STMT_VINFO_DATA_REF (stmt_info
))
1020 mode
= (int) TYPE_MODE (vectype
);
1022 /* FORNOW. In some cases can vectorize even if data-type not supported
1023 (e.g. - data copies). */
1024 if (mov_optab
->handlers
[mode
].insn_code
== CODE_FOR_nothing
)
1026 if (vect_print_dump_info (REPORT_DETAILS
, LOOP_LOC (loop_vinfo
)))
1027 fprintf (vect_dump
, "Aligned load, but unsupported type.");
1031 if (!vec_stmt
) /* transformation not required. */
1033 STMT_VINFO_TYPE (stmt_info
) = load_vec_info_type
;
1039 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1040 fprintf (vect_dump
, "transform load.");
1042 alignment_support_cheme
= vect_supportable_dr_alignment (dr
);
1043 gcc_assert (alignment_support_cheme
);
1045 if (alignment_support_cheme
== dr_aligned
1046 || alignment_support_cheme
== dr_unaligned_supported
)
1057 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1058 data_ref
= vect_create_data_ref_ptr (stmt
, bsi
, NULL_TREE
, &dummy
, false);
1059 if (aligned_access_p (dr
))
1060 data_ref
= build_fold_indirect_ref (data_ref
);
1063 int mis
= DR_MISALIGNMENT (dr
);
1064 tree tmis
= (mis
== -1 ? size_zero_node
: size_int (mis
));
1065 tmis
= size_binop (MULT_EXPR
, tmis
, size_int(BITS_PER_UNIT
));
1066 data_ref
= build2 (MISALIGNED_INDIRECT_REF
, vectype
, data_ref
, tmis
);
1068 new_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, data_ref
);
1069 new_temp
= make_ssa_name (vec_dest
, new_stmt
);
1070 TREE_OPERAND (new_stmt
, 0) = new_temp
;
1071 vect_finish_stmt_generation (stmt
, new_stmt
, bsi
);
1072 copy_virtual_operands (new_stmt
, stmt
);
1074 else if (alignment_support_cheme
== dr_unaligned_software_pipeline
)
1078 msq_init = *(floor(p1))
1079 p2 = initial_addr + VS - 1;
1080 magic = have_builtin ? builtin_result : initial_address;
1083 p2' = p2 + indx * vectype_size
1085 vec_dest = realign_load (msq, lsq, magic)
1099 /* <1> Create msq_init = *(floor(p1)) in the loop preheader */
1100 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1101 data_ref
= vect_create_data_ref_ptr (stmt
, bsi
, NULL_TREE
,
1103 data_ref
= build1 (ALIGN_INDIRECT_REF
, vectype
, data_ref
);
1104 new_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, data_ref
);
1105 new_temp
= make_ssa_name (vec_dest
, new_stmt
);
1106 TREE_OPERAND (new_stmt
, 0) = new_temp
;
1107 new_bb
= bsi_insert_on_edge_immediate (pe
, new_stmt
);
1108 gcc_assert (!new_bb
);
1109 msq_init
= TREE_OPERAND (new_stmt
, 0);
1110 copy_virtual_operands (new_stmt
, stmt
);
1111 update_vuses_to_preheader (new_stmt
, loop
);
1114 /* <2> Create lsq = *(floor(p2')) in the loop */
1115 offset
= build_int_cst (integer_type_node
,
1116 TYPE_VECTOR_SUBPARTS (vectype
));
1117 offset
= int_const_binop (MINUS_EXPR
, offset
, integer_one_node
, 1);
1118 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1119 dataref_ptr
= vect_create_data_ref_ptr (stmt
, bsi
, offset
, &dummy
, false);
1120 data_ref
= build1 (ALIGN_INDIRECT_REF
, vectype
, dataref_ptr
);
1121 new_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, data_ref
);
1122 new_temp
= make_ssa_name (vec_dest
, new_stmt
);
1123 TREE_OPERAND (new_stmt
, 0) = new_temp
;
1124 vect_finish_stmt_generation (stmt
, new_stmt
, bsi
);
1125 lsq
= TREE_OPERAND (new_stmt
, 0);
1126 copy_virtual_operands (new_stmt
, stmt
);
1130 if (targetm
.vectorize
.builtin_mask_for_load
)
1132 /* Create permutation mask, if required, in loop preheader. */
1134 params
= build_tree_list (NULL_TREE
, init_addr
);
1135 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1136 builtin_decl
= targetm
.vectorize
.builtin_mask_for_load ();
1137 new_stmt
= build_function_call_expr (builtin_decl
, params
);
1138 new_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, new_stmt
);
1139 new_temp
= make_ssa_name (vec_dest
, new_stmt
);
1140 TREE_OPERAND (new_stmt
, 0) = new_temp
;
1141 new_bb
= bsi_insert_on_edge_immediate (pe
, new_stmt
);
1142 gcc_assert (!new_bb
);
1143 magic
= TREE_OPERAND (new_stmt
, 0);
1145 /* The result of the CALL_EXPR to this builtin is determined from
1146 the value of the parameter and no global variables are touched
1147 which makes the builtin a "const" function. Requiring the
1148 builtin to have the "const" attribute makes it unnecessary
1149 to call mark_call_clobbered_vars_to_rename. */
1150 gcc_assert (TREE_READONLY (builtin_decl
));
1154 /* Use current address instead of init_addr for reduced reg pressure.
1156 magic
= dataref_ptr
;
1160 /* <4> Create msq = phi <msq_init, lsq> in loop */
1161 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1162 msq
= make_ssa_name (vec_dest
, NULL_TREE
);
1163 phi_stmt
= create_phi_node (msq
, loop
->header
); /* CHECKME */
1164 SSA_NAME_DEF_STMT (msq
) = phi_stmt
;
1165 add_phi_arg (phi_stmt
, msq_init
, loop_preheader_edge (loop
));
1166 add_phi_arg (phi_stmt
, lsq
, loop_latch_edge (loop
));
1169 /* <5> Create <vec_dest = realign_load (msq, lsq, magic)> in loop */
1170 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1171 new_stmt
= build3 (REALIGN_LOAD_EXPR
, vectype
, msq
, lsq
, magic
);
1172 new_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, new_stmt
);
1173 new_temp
= make_ssa_name (vec_dest
, new_stmt
);
1174 TREE_OPERAND (new_stmt
, 0) = new_temp
;
1175 vect_finish_stmt_generation (stmt
, new_stmt
, bsi
);
1180 *vec_stmt
= new_stmt
;
1184 /* Function vect_is_simple_cond.
1187 LOOP - the loop that is being vectorized.
1188 COND - Condition that is checked for simple use.
1190 Returns whether a COND can be vectorized. Checks whether
1191 condition operands are supportable using vec_is_simple_use. */
1194 vect_is_simple_cond (tree cond
, loop_vec_info loop_vinfo
)
1198 if (!COMPARISON_CLASS_P (cond
))
1201 lhs
= TREE_OPERAND (cond
, 0);
1202 rhs
= TREE_OPERAND (cond
, 1);
1204 if (TREE_CODE (lhs
) == SSA_NAME
)
1206 tree lhs_def_stmt
= SSA_NAME_DEF_STMT (lhs
);
1207 if (!vect_is_simple_use (lhs
, loop_vinfo
, &lhs_def_stmt
))
1210 else if (TREE_CODE (lhs
) != INTEGER_CST
&& TREE_CODE (lhs
) != REAL_CST
)
1213 if (TREE_CODE (rhs
) == SSA_NAME
)
1215 tree rhs_def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1216 if (!vect_is_simple_use (rhs
, loop_vinfo
, &rhs_def_stmt
))
1219 else if (TREE_CODE (rhs
) != INTEGER_CST
&& TREE_CODE (rhs
) != REAL_CST
)
1225 /* vectorizable_condition.
1227 Check if STMT is conditional modify expression that can be vectorized.
1228 If VEC_STMT is also passed, vectorize the STMT: create a vectorized
1229 stmt using VEC_COND_EXPR to replace it, put it in VEC_STMT, and insert it
1232 Return FALSE if not a vectorizable STMT, TRUE otherwise. */
1235 vectorizable_condition (tree stmt
, block_stmt_iterator
*bsi
, tree
*vec_stmt
)
1237 tree scalar_dest
= NULL_TREE
;
1238 tree vec_dest
= NULL_TREE
;
1239 tree op
= NULL_TREE
;
1240 tree cond_expr
, then_clause
, else_clause
;
1241 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
1242 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
1243 tree vec_cond_lhs
, vec_cond_rhs
, vec_then_clause
, vec_else_clause
;
1244 tree vec_compare
, vec_cond_expr
;
1246 loop_vec_info loop_vinfo
= STMT_VINFO_LOOP_VINFO (stmt_info
);
1247 enum machine_mode vec_mode
;
1249 if (!STMT_VINFO_RELEVANT_P (stmt_info
))
1252 if (TREE_CODE (stmt
) != MODIFY_EXPR
)
1255 op
= TREE_OPERAND (stmt
, 1);
1257 if (TREE_CODE (op
) != COND_EXPR
)
1260 cond_expr
= TREE_OPERAND (op
, 0);
1261 then_clause
= TREE_OPERAND (op
, 1);
1262 else_clause
= TREE_OPERAND (op
, 2);
1264 if (!vect_is_simple_cond (cond_expr
, loop_vinfo
))
1267 if (TREE_CODE (then_clause
) == SSA_NAME
)
1269 tree then_def_stmt
= SSA_NAME_DEF_STMT (then_clause
);
1270 if (!vect_is_simple_use (then_clause
, loop_vinfo
, &then_def_stmt
))
1273 else if (TREE_CODE (then_clause
) != INTEGER_CST
1274 && TREE_CODE (then_clause
) != REAL_CST
)
1277 if (TREE_CODE (else_clause
) == SSA_NAME
)
1279 tree else_def_stmt
= SSA_NAME_DEF_STMT (else_clause
);
1280 if (!vect_is_simple_use (else_clause
, loop_vinfo
, &else_def_stmt
))
1283 else if (TREE_CODE (else_clause
) != INTEGER_CST
1284 && TREE_CODE (else_clause
) != REAL_CST
)
1288 vec_mode
= TYPE_MODE (vectype
);
1292 STMT_VINFO_TYPE (stmt_info
) = condition_vec_info_type
;
1293 return expand_vec_cond_expr_p (op
, vec_mode
);
1299 scalar_dest
= TREE_OPERAND (stmt
, 0);
1300 vec_dest
= vect_create_destination_var (scalar_dest
, vectype
);
1302 /* Handle cond expr. */
1304 vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr
, 0), stmt
);
1306 vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr
, 1), stmt
);
1307 vec_then_clause
= vect_get_vec_def_for_operand (then_clause
, stmt
);
1308 vec_else_clause
= vect_get_vec_def_for_operand (else_clause
, stmt
);
1310 /* Arguments are ready. create the new vector stmt. */
1311 vec_compare
= build2 (TREE_CODE (cond_expr
), vectype
,
1312 vec_cond_lhs
, vec_cond_rhs
);
1313 vec_cond_expr
= build (VEC_COND_EXPR
, vectype
,
1314 vec_compare
, vec_then_clause
, vec_else_clause
);
1316 *vec_stmt
= build2 (MODIFY_EXPR
, vectype
, vec_dest
, vec_cond_expr
);
1317 new_temp
= make_ssa_name (vec_dest
, *vec_stmt
);
1318 TREE_OPERAND (*vec_stmt
, 0) = new_temp
;
1319 vect_finish_stmt_generation (stmt
, *vec_stmt
, bsi
);
1324 /* Function vect_transform_stmt.
1326 Create a vectorized stmt to replace STMT, and insert it at BSI. */
1329 vect_transform_stmt (tree stmt
, block_stmt_iterator
*bsi
)
1331 bool is_store
= false;
1332 tree vec_stmt
= NULL_TREE
;
1333 stmt_vec_info stmt_info
= vinfo_for_stmt (stmt
);
1336 switch (STMT_VINFO_TYPE (stmt_info
))
1338 case op_vec_info_type
:
1339 done
= vectorizable_operation (stmt
, bsi
, &vec_stmt
);
1343 case assignment_vec_info_type
:
1344 done
= vectorizable_assignment (stmt
, bsi
, &vec_stmt
);
1348 case load_vec_info_type
:
1349 done
= vectorizable_load (stmt
, bsi
, &vec_stmt
);
1353 case store_vec_info_type
:
1354 done
= vectorizable_store (stmt
, bsi
, &vec_stmt
);
1359 case condition_vec_info_type
:
1360 done
= vectorizable_condition (stmt
, bsi
, &vec_stmt
);
1365 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1366 fprintf (vect_dump
, "stmt not supported.");
1370 STMT_VINFO_VEC_STMT (stmt_info
) = vec_stmt
;
1376 /* This function builds ni_name = number of iterations loop executes
1377 on the loop preheader. */
1380 vect_build_loop_niters (loop_vec_info loop_vinfo
)
1382 tree ni_name
, stmt
, var
;
1384 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1385 tree ni
= unshare_expr (LOOP_VINFO_NITERS (loop_vinfo
));
1387 var
= create_tmp_var (TREE_TYPE (ni
), "niters");
1388 add_referenced_tmp_var (var
);
1389 ni_name
= force_gimple_operand (ni
, &stmt
, false, var
);
1391 pe
= loop_preheader_edge (loop
);
1394 basic_block new_bb
= bsi_insert_on_edge_immediate (pe
, stmt
);
1395 gcc_assert (!new_bb
);
1402 /* This function generates the following statements:
1404 ni_name = number of iterations loop executes
1405 ratio = ni_name / vf
1406 ratio_mult_vf_name = ratio * vf
1408 and places them at the loop preheader edge. */
1411 vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo
,
1413 tree
*ratio_mult_vf_name_ptr
,
1414 tree
*ratio_name_ptr
)
1422 tree ratio_mult_vf_name
;
1423 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1424 tree ni
= LOOP_VINFO_NITERS (loop_vinfo
);
1425 int vf
= LOOP_VINFO_VECT_FACTOR (loop_vinfo
);
1426 tree log_vf
= build_int_cst (unsigned_type_node
, exact_log2 (vf
));
1428 pe
= loop_preheader_edge (loop
);
1430 /* Generate temporary variable that contains
1431 number of iterations loop executes. */
1433 ni_name
= vect_build_loop_niters (loop_vinfo
);
1435 /* Create: ratio = ni >> log2(vf) */
1437 var
= create_tmp_var (TREE_TYPE (ni
), "bnd");
1438 add_referenced_tmp_var (var
);
1439 ratio_name
= make_ssa_name (var
, NULL_TREE
);
1440 stmt
= build2 (MODIFY_EXPR
, void_type_node
, ratio_name
,
1441 build2 (RSHIFT_EXPR
, TREE_TYPE (ni_name
), ni_name
, log_vf
));
1442 SSA_NAME_DEF_STMT (ratio_name
) = stmt
;
1444 pe
= loop_preheader_edge (loop
);
1445 new_bb
= bsi_insert_on_edge_immediate (pe
, stmt
);
1446 gcc_assert (!new_bb
);
1448 /* Create: ratio_mult_vf = ratio << log2 (vf). */
1450 var
= create_tmp_var (TREE_TYPE (ni
), "ratio_mult_vf");
1451 add_referenced_tmp_var (var
);
1452 ratio_mult_vf_name
= make_ssa_name (var
, NULL_TREE
);
1453 stmt
= build2 (MODIFY_EXPR
, void_type_node
, ratio_mult_vf_name
,
1454 build2 (LSHIFT_EXPR
, TREE_TYPE (ratio_name
), ratio_name
, log_vf
));
1455 SSA_NAME_DEF_STMT (ratio_mult_vf_name
) = stmt
;
1457 pe
= loop_preheader_edge (loop
);
1458 new_bb
= bsi_insert_on_edge_immediate (pe
, stmt
);
1459 gcc_assert (!new_bb
);
1461 *ni_name_ptr
= ni_name
;
1462 *ratio_mult_vf_name_ptr
= ratio_mult_vf_name
;
1463 *ratio_name_ptr
= ratio_name
;
1469 /* Function update_vuses_to_preheader.
1472 STMT - a statement with potential VUSEs.
1473 LOOP - the loop whose preheader will contain STMT.
1475 It's possible to vectorize a loop even though an SSA_NAME from a VUSE
1476 appears to be defined in a V_MAY_DEF in another statement in a loop.
1477 One such case is when the VUSE is at the dereference of a __restricted__
1478 pointer in a load and the V_MAY_DEF is at the dereference of a different
1479 __restricted__ pointer in a store. Vectorization may result in
1480 copy_virtual_uses being called to copy the problematic VUSE to a new
1481 statement that is being inserted in the loop preheader. This procedure
1482 is called to change the SSA_NAME in the new statement's VUSE from the
1483 SSA_NAME updated in the loop to the related SSA_NAME available on the
1484 path entering the loop.
1486 When this function is called, we have the following situation:
1491 # name1 = phi < name0 , name2>
1496 # name2 = vdef <name1>
1501 Stmt S1 was created in the loop preheader block as part of misaligned-load
1502 handling. This function fixes the name of the vuse of S1 from 'name1' to
1506 update_vuses_to_preheader (tree stmt
, struct loop
*loop
)
1508 basic_block header_bb
= loop
->header
;
1509 edge preheader_e
= loop_preheader_edge (loop
);
1511 use_operand_p use_p
;
1513 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_VUSE
)
1515 tree ssa_name
= USE_FROM_PTR (use_p
);
1516 tree def_stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1517 tree name_var
= SSA_NAME_VAR (ssa_name
);
1518 basic_block bb
= bb_for_stmt (def_stmt
);
1520 /* For a use before any definitions, def_stmt is a NOP_EXPR. */
1521 if (!IS_EMPTY_STMT (def_stmt
)
1522 && flow_bb_inside_loop_p (loop
, bb
))
1524 /* If the block containing the statement defining the SSA_NAME
1525 is in the loop then it's necessary to find the definition
1526 outside the loop using the PHI nodes of the header. */
1528 bool updated
= false;
1530 for (phi
= phi_nodes (header_bb
); phi
; phi
= TREE_CHAIN (phi
))
1532 if (SSA_NAME_VAR (PHI_RESULT (phi
)) == name_var
)
1534 SET_USE (use_p
, PHI_ARG_DEF (phi
, preheader_e
->dest_idx
));
1539 gcc_assert (updated
);
1545 /* Function vect_update_ivs_after_vectorizer.
1547 "Advance" the induction variables of LOOP to the value they should take
1548 after the execution of LOOP. This is currently necessary because the
1549 vectorizer does not handle induction variables that are used after the
1550 loop. Such a situation occurs when the last iterations of LOOP are
1552 1. We introduced new uses after LOOP for IVs that were not originally used
1553 after LOOP: the IVs of LOOP are now used by an epilog loop.
1554 2. LOOP is going to be vectorized; this means that it will iterate N/VF
1555 times, whereas the loop IVs should be bumped N times.
1558 - LOOP - a loop that is going to be vectorized. The last few iterations
1559 of LOOP were peeled.
1560 - NITERS - the number of iterations that LOOP executes (before it is
1561 vectorized). i.e, the number of times the ivs should be bumped.
1562 - UPDATE_E - a successor edge of LOOP->exit that is on the (only) path
1563 coming out from LOOP on which there are uses of the LOOP ivs
1564 (this is the path from LOOP->exit to epilog_loop->preheader).
1566 The new definitions of the ivs are placed in LOOP->exit.
1567 The phi args associated with the edge UPDATE_E in the bb
1568 UPDATE_E->dest are updated accordingly.
1570 Assumption 1: Like the rest of the vectorizer, this function assumes
1571 a single loop exit that has a single predecessor.
1573 Assumption 2: The phi nodes in the LOOP header and in update_bb are
1574 organized in the same order.
1576 Assumption 3: The access function of the ivs is simple enough (see
1577 vect_can_advance_ivs_p). This assumption will be relaxed in the future.
1579 Assumption 4: Exactly one of the successors of LOOP exit-bb is on a path
1580 coming out of LOOP on which the ivs of LOOP are used (this is the path
1581 that leads to the epilog loop; other paths skip the epilog loop). This
1582 path starts with the edge UPDATE_E, and its destination (denoted update_bb)
1583 needs to have its phis updated.
1587 vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo
, tree niters
,
1590 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1591 basic_block exit_bb
= loop
->single_exit
->dest
;
1593 basic_block update_bb
= update_e
->dest
;
1595 /* gcc_assert (vect_can_advance_ivs_p (loop_vinfo)); */
1597 /* Make sure there exists a single-predecessor exit bb: */
1598 gcc_assert (single_pred_p (exit_bb
));
1600 for (phi
= phi_nodes (loop
->header
), phi1
= phi_nodes (update_bb
);
1602 phi
= PHI_CHAIN (phi
), phi1
= PHI_CHAIN (phi1
))
1604 tree access_fn
= NULL
;
1605 tree evolution_part
;
1608 tree var
, stmt
, ni
, ni_name
;
1609 block_stmt_iterator last_bsi
;
1611 /* Skip virtual phi's. */
1612 if (!is_gimple_reg (SSA_NAME_VAR (PHI_RESULT (phi
))))
1614 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1615 fprintf (vect_dump
, "virtual phi. skip.");
1619 access_fn
= analyze_scalar_evolution (loop
, PHI_RESULT (phi
));
1620 gcc_assert (access_fn
);
1622 unshare_expr (evolution_part_in_loop_num (access_fn
, loop
->num
));
1623 gcc_assert (evolution_part
!= NULL_TREE
);
1625 /* FORNOW: We do not support IVs whose evolution function is a polynomial
1626 of degree >= 2 or exponential. */
1627 gcc_assert (!tree_is_chrec (evolution_part
));
1629 step_expr
= evolution_part
;
1630 init_expr
= unshare_expr (initial_condition_in_loop_num (access_fn
,
1633 ni
= build2 (PLUS_EXPR
, TREE_TYPE (init_expr
),
1634 build2 (MULT_EXPR
, TREE_TYPE (niters
),
1635 niters
, step_expr
), init_expr
);
1637 var
= create_tmp_var (TREE_TYPE (init_expr
), "tmp");
1638 add_referenced_tmp_var (var
);
1640 ni_name
= force_gimple_operand (ni
, &stmt
, false, var
);
1642 /* Insert stmt into exit_bb. */
1643 last_bsi
= bsi_last (exit_bb
);
1645 bsi_insert_before (&last_bsi
, stmt
, BSI_SAME_STMT
);
1647 /* Fix phi expressions in the successor bb. */
1648 SET_PHI_ARG_DEF (phi1
, update_e
->dest_idx
, ni_name
);
1653 /* Function vect_do_peeling_for_loop_bound
1655 Peel the last iterations of the loop represented by LOOP_VINFO.
1656 The peeled iterations form a new epilog loop. Given that the loop now
1657 iterates NITERS times, the new epilog loop iterates
1658 NITERS % VECTORIZATION_FACTOR times.
1660 The original loop will later be made to iterate
1661 NITERS / VECTORIZATION_FACTOR times (this value is placed into RATIO). */
1664 vect_do_peeling_for_loop_bound (loop_vec_info loop_vinfo
, tree
*ratio
,
1665 struct loops
*loops
)
1668 tree ni_name
, ratio_mult_vf_name
;
1669 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1670 struct loop
*new_loop
;
1672 basic_block preheader
;
1673 #ifdef ENABLE_CHECKING
1677 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1678 fprintf (vect_dump
, "=== vect_transtorm_for_unknown_loop_bound ===");
1680 /* Generate the following variables on the preheader of original loop:
1682 ni_name = number of iteration the original loop executes
1683 ratio = ni_name / vf
1684 ratio_mult_vf_name = ratio * vf */
1685 vect_generate_tmps_on_preheader (loop_vinfo
, &ni_name
,
1686 &ratio_mult_vf_name
, ratio
);
1688 #ifdef ENABLE_CHECKING
1689 loop_num
= loop
->num
;
1691 new_loop
= slpeel_tree_peel_loop_to_edge (loop
, loops
, loop
->single_exit
,
1692 ratio_mult_vf_name
, ni_name
, false);
1693 #ifdef ENABLE_CHECKING
1694 gcc_assert (new_loop
);
1695 gcc_assert (loop_num
== loop
->num
);
1696 slpeel_verify_cfg_after_peeling (loop
, new_loop
);
1699 /* A guard that controls whether the new_loop is to be executed or skipped
1700 is placed in LOOP->exit. LOOP->exit therefore has two successors - one
1701 is the preheader of NEW_LOOP, where the IVs from LOOP are used. The other
1702 is a bb after NEW_LOOP, where these IVs are not used. Find the edge that
1703 is on the path where the LOOP IVs are used and need to be updated. */
1705 preheader
= loop_preheader_edge (new_loop
)->src
;
1706 if (EDGE_PRED (preheader
, 0)->src
== loop
->single_exit
->dest
)
1707 update_e
= EDGE_PRED (preheader
, 0);
1709 update_e
= EDGE_PRED (preheader
, 1);
1711 /* Update IVs of original loop as if they were advanced
1712 by ratio_mult_vf_name steps. */
1713 vect_update_ivs_after_vectorizer (loop_vinfo
, ratio_mult_vf_name
, update_e
);
1715 /* After peeling we have to reset scalar evolution analyzer. */
1722 /* Function vect_gen_niters_for_prolog_loop
1724 Set the number of iterations for the loop represented by LOOP_VINFO
1725 to the minimum between LOOP_NITERS (the original iteration count of the loop)
1726 and the misalignment of DR - the data reference recorded in
1727 LOOP_VINFO_UNALIGNED_DR (LOOP_VINFO). As a result, after the execution of
1728 this loop, the data reference DR will refer to an aligned location.
1730 The following computation is generated:
1732 If the misalignment of DR is known at compile time:
1733 addr_mis = int mis = DR_MISALIGNMENT (dr);
1734 Else, compute address misalignment in bytes:
1735 addr_mis = addr & (vectype_size - 1)
1737 prolog_niters = min ( LOOP_NITERS , (VF - addr_mis/elem_size)&(VF-1) )
1739 (elem_size = element type size; an element is the scalar element
1740 whose type is the inner type of the vectype) */
1743 vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo
, tree loop_niters
)
1745 struct data_reference
*dr
= LOOP_VINFO_UNALIGNED_DR (loop_vinfo
);
1746 int vf
= LOOP_VINFO_VECT_FACTOR (loop_vinfo
);
1747 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1749 tree iters
, iters_name
;
1752 tree dr_stmt
= DR_STMT (dr
);
1753 stmt_vec_info stmt_info
= vinfo_for_stmt (dr_stmt
);
1754 tree vectype
= STMT_VINFO_VECTYPE (stmt_info
);
1755 int vectype_align
= TYPE_ALIGN (vectype
) / BITS_PER_UNIT
;
1756 tree vf_minus_1
= build_int_cst (unsigned_type_node
, vf
- 1);
1757 tree niters_type
= TREE_TYPE (loop_niters
);
1759 pe
= loop_preheader_edge (loop
);
1761 if (LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo
) > 0)
1763 int byte_misalign
= LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo
);
1764 int element_size
= vectype_align
/vf
;
1765 int elem_misalign
= byte_misalign
/ element_size
;
1767 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1768 fprintf (vect_dump
, "known alignment = %d.", byte_misalign
);
1769 iters
= build_int_cst (niters_type
, (vf
- elem_misalign
)&(vf
-1));
1773 tree new_stmts
= NULL_TREE
;
1775 vect_create_addr_base_for_vector_ref (dr_stmt
, &new_stmts
, NULL_TREE
);
1776 tree ptr_type
= TREE_TYPE (start_addr
);
1777 tree size
= TYPE_SIZE (ptr_type
);
1778 tree type
= lang_hooks
.types
.type_for_size (tree_low_cst (size
, 1), 1);
1779 tree vectype_size_minus_1
= build_int_cst (type
, vectype_align
- 1);
1780 tree elem_size_log
=
1781 build_int_cst (unsigned_type_node
, exact_log2 (vectype_align
/vf
));
1782 tree vf_tree
= build_int_cst (unsigned_type_node
, vf
);
1786 new_bb
= bsi_insert_on_edge_immediate (pe
, new_stmts
);
1787 gcc_assert (!new_bb
);
1789 /* Create: byte_misalign = addr & (vectype_size - 1) */
1791 build2 (BIT_AND_EXPR
, type
, start_addr
, vectype_size_minus_1
);
1793 /* Create: elem_misalign = byte_misalign / element_size */
1795 build2 (RSHIFT_EXPR
, unsigned_type_node
, byte_misalign
, elem_size_log
);
1797 /* Create: (niters_type) (VF - elem_misalign)&(VF - 1) */
1798 iters
= build2 (MINUS_EXPR
, unsigned_type_node
, vf_tree
, elem_misalign
);
1799 iters
= build2 (BIT_AND_EXPR
, unsigned_type_node
, iters
, vf_minus_1
);
1800 iters
= fold_convert (niters_type
, iters
);
1803 /* Create: prolog_loop_niters = min (iters, loop_niters) */
1804 /* If the loop bound is known at compile time we already verified that it is
1805 greater than vf; since the misalignment ('iters') is at most vf, there's
1806 no need to generate the MIN_EXPR in this case. */
1807 if (TREE_CODE (loop_niters
) != INTEGER_CST
)
1808 iters
= build2 (MIN_EXPR
, niters_type
, iters
, loop_niters
);
1810 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1812 fprintf (vect_dump
, "niters for prolog loop: ");
1813 print_generic_expr (vect_dump
, iters
, TDF_SLIM
);
1816 var
= create_tmp_var (niters_type
, "prolog_loop_niters");
1817 add_referenced_tmp_var (var
);
1818 iters_name
= force_gimple_operand (iters
, &stmt
, false, var
);
1820 /* Insert stmt on loop preheader edge. */
1823 basic_block new_bb
= bsi_insert_on_edge_immediate (pe
, stmt
);
1824 gcc_assert (!new_bb
);
1831 /* Function vect_update_init_of_dr
1833 NITERS iterations were peeled from LOOP. DR represents a data reference
1834 in LOOP. This function updates the information recorded in DR to
1835 account for the fact that the first NITERS iterations had already been
1836 executed. Specifically, it updates the OFFSET field of stmt_info. */
1839 vect_update_init_of_dr (struct data_reference
*dr
, tree niters
)
1841 stmt_vec_info stmt_info
= vinfo_for_stmt (DR_STMT (dr
));
1842 tree offset
= STMT_VINFO_VECT_INIT_OFFSET (stmt_info
);
1844 niters
= fold (build2 (MULT_EXPR
, TREE_TYPE (niters
), niters
,
1845 STMT_VINFO_VECT_STEP (stmt_info
)));
1846 offset
= fold (build2 (PLUS_EXPR
, TREE_TYPE (offset
), offset
, niters
));
1847 STMT_VINFO_VECT_INIT_OFFSET (stmt_info
) = offset
;
1851 /* Function vect_update_inits_of_drs
1853 NITERS iterations were peeled from the loop represented by LOOP_VINFO.
1854 This function updates the information recorded for the data references in
1855 the loop to account for the fact that the first NITERS iterations had
1856 already been executed. Specifically, it updates the initial_condition of the
1857 access_function of all the data_references in the loop. */
1860 vect_update_inits_of_drs (loop_vec_info loop_vinfo
, tree niters
)
1863 varray_type loop_write_datarefs
= LOOP_VINFO_DATAREF_WRITES (loop_vinfo
);
1864 varray_type loop_read_datarefs
= LOOP_VINFO_DATAREF_READS (loop_vinfo
);
1866 if (vect_dump
&& (dump_flags
& TDF_DETAILS
))
1867 fprintf (vect_dump
, "=== vect_update_inits_of_dr ===");
1869 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (loop_write_datarefs
); i
++)
1871 struct data_reference
*dr
= VARRAY_GENERIC_PTR (loop_write_datarefs
, i
);
1872 vect_update_init_of_dr (dr
, niters
);
1875 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (loop_read_datarefs
); i
++)
1877 struct data_reference
*dr
= VARRAY_GENERIC_PTR (loop_read_datarefs
, i
);
1878 vect_update_init_of_dr (dr
, niters
);
1883 /* Function vect_do_peeling_for_alignment
1885 Peel the first 'niters' iterations of the loop represented by LOOP_VINFO.
1886 'niters' is set to the misalignment of one of the data references in the
1887 loop, thereby forcing it to refer to an aligned location at the beginning
1888 of the execution of this loop. The data reference for which we are
1889 peeling is recorded in LOOP_VINFO_UNALIGNED_DR. */
1892 vect_do_peeling_for_alignment (loop_vec_info loop_vinfo
, struct loops
*loops
)
1894 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1895 tree niters_of_prolog_loop
, ni_name
;
1897 struct loop
*new_loop
;
1899 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1900 fprintf (vect_dump
, "=== vect_do_peeling_for_alignment ===");
1902 ni_name
= vect_build_loop_niters (loop_vinfo
);
1903 niters_of_prolog_loop
= vect_gen_niters_for_prolog_loop (loop_vinfo
, ni_name
);
1905 /* Peel the prolog loop and iterate it niters_of_prolog_loop. */
1907 slpeel_tree_peel_loop_to_edge (loop
, loops
, loop_preheader_edge (loop
),
1908 niters_of_prolog_loop
, ni_name
, true);
1909 #ifdef ENABLE_CHECKING
1910 gcc_assert (new_loop
);
1911 slpeel_verify_cfg_after_peeling (new_loop
, loop
);
1914 /* Update number of times loop executes. */
1915 n_iters
= LOOP_VINFO_NITERS (loop_vinfo
);
1916 LOOP_VINFO_NITERS (loop_vinfo
) = fold (build2 (MINUS_EXPR
,
1917 TREE_TYPE (n_iters
), n_iters
, niters_of_prolog_loop
));
1919 /* Update the init conditions of the access functions of all data refs. */
1920 vect_update_inits_of_drs (loop_vinfo
, niters_of_prolog_loop
);
1922 /* After peeling we have to reset scalar evolution analyzer. */
1929 /* Function vect_transform_loop.
1931 The analysis phase has determined that the loop is vectorizable.
1932 Vectorize the loop - created vectorized stmts to replace the scalar
1933 stmts in the loop, and update the loop exit condition. */
1936 vect_transform_loop (loop_vec_info loop_vinfo
,
1937 struct loops
*loops ATTRIBUTE_UNUSED
)
1939 struct loop
*loop
= LOOP_VINFO_LOOP (loop_vinfo
);
1940 basic_block
*bbs
= LOOP_VINFO_BBS (loop_vinfo
);
1941 int nbbs
= loop
->num_nodes
;
1942 block_stmt_iterator si
;
1945 int vectorization_factor
= LOOP_VINFO_VECT_FACTOR (loop_vinfo
);
1947 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1948 fprintf (vect_dump
, "=== vec_transform_loop ===");
1951 /* Peel the loop if there are data refs with unknown alignment.
1952 Only one data ref with unknown store is allowed. */
1954 if (LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo
))
1955 vect_do_peeling_for_alignment (loop_vinfo
, loops
);
1957 /* If the loop has a symbolic number of iterations 'n' (i.e. it's not a
1958 compile time constant), or it is a constant that doesn't divide by the
1959 vectorization factor, then an epilog loop needs to be created.
1960 We therefore duplicate the loop: the original loop will be vectorized,
1961 and will compute the first (n/VF) iterations. The second copy of the loop
1962 will remain scalar and will compute the remaining (n%VF) iterations.
1963 (VF is the vectorization factor). */
1965 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo
)
1966 || (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo
)
1967 && LOOP_VINFO_INT_NITERS (loop_vinfo
) % vectorization_factor
!= 0))
1968 vect_do_peeling_for_loop_bound (loop_vinfo
, &ratio
, loops
);
1970 ratio
= build_int_cst (TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo
)),
1971 LOOP_VINFO_INT_NITERS (loop_vinfo
) / vectorization_factor
);
1973 /* 1) Make sure the loop header has exactly two entries
1974 2) Make sure we have a preheader basic block. */
1976 gcc_assert (EDGE_COUNT (loop
->header
->preds
) == 2);
1978 loop_split_edge_with (loop_preheader_edge (loop
), NULL
);
1981 /* FORNOW: the vectorizer supports only loops which body consist
1982 of one basic block (header + empty latch). When the vectorizer will
1983 support more involved loop forms, the order by which the BBs are
1984 traversed need to be reconsidered. */
1986 for (i
= 0; i
< nbbs
; i
++)
1988 basic_block bb
= bbs
[i
];
1990 for (si
= bsi_start (bb
); !bsi_end_p (si
);)
1992 tree stmt
= bsi_stmt (si
);
1993 stmt_vec_info stmt_info
;
1996 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
1998 fprintf (vect_dump
, "------>vectorizing statement: ");
1999 print_generic_expr (vect_dump
, stmt
, TDF_SLIM
);
2001 stmt_info
= vinfo_for_stmt (stmt
);
2002 gcc_assert (stmt_info
);
2003 if (!STMT_VINFO_RELEVANT_P (stmt_info
))
2008 #ifdef ENABLE_CHECKING
2009 /* FORNOW: Verify that all stmts operate on the same number of
2010 units and no inner unrolling is necessary. */
2012 (TYPE_VECTOR_SUBPARTS (STMT_VINFO_VECTYPE (stmt_info
))
2013 == vectorization_factor
);
2015 /* -------- vectorize statement ------------ */
2016 if (vect_print_dump_info (REPORT_DETAILS
, UNKNOWN_LOC
))
2017 fprintf (vect_dump
, "transform statement.");
2019 is_store
= vect_transform_stmt (stmt
, &si
);
2022 /* Free the attached stmt_vec_info and remove the stmt. */
2023 stmt_ann_t ann
= stmt_ann (stmt
);
2025 set_stmt_info (ann
, NULL
);
2034 slpeel_make_loop_iterate_ntimes (loop
, ratio
);
2036 /* The memory tags and pointers in vectorized statements need to
2037 have their SSA forms updated. FIXME, why can't this be delayed
2038 until all the loops have been transformed? */
2039 update_ssa (TODO_update_ssa
);
2041 if (vect_print_dump_info (REPORT_VECTORIZED_LOOPS
, LOOP_LOC (loop_vinfo
)))
2042 fprintf (vect_dump
, "LOOP VECTORIZED.");