From eab42b582d062c2215ba1b4968c51195298564ff Mon Sep 17 00:00:00 2001 From: rsandifo Date: Thu, 14 Sep 2017 15:46:08 +0000 Subject: [PATCH] Use vec<> in build_vector This patch makes build_vector take the elements as a vec<> rather than a tree *. This is useful for SVE because it bundles the number of elements with the elements themselves, and enforces the fact that the number is constant. Also, I think things like the folds can be used with any generic GNU vector, not just those that match machine vectors, so the arguments to XALLOCAVEC had no clear limit. 2017-09-14 Richard Sandiford Alan Hayward David Sherwood gcc/ * tree.h (build_vector): Take a vec instead of a tree *. * tree.c (build_vector): Likewise. (build_vector_from_ctor): Update accordingly. (build_vector_from_val): Likewise. * gimple-fold.c (gimple_fold_stmt_to_constant_1): Likewise. * tree-ssa-forwprop.c (simplify_vector_constructor): Likewise. * tree-vect-generic.c (add_rshift): Likewise. (expand_vector_divmod): Likewise. (optimize_vector_constructor): Likewise. * tree-vect-slp.c (vect_get_constant_vectors): Likewise. (vect_transform_slp_perm_load): Likewise. (vect_schedule_slp_instance): Likewise. * tree-vect-stmts.c (vectorizable_bswap): Likewise. (vectorizable_call): Likewise. (vect_gen_perm_mask_any): Likewise. Add elements in order. * expmed.c (make_tree): Likewise. * fold-const.c (fold_negate_expr_1): Use auto_vec when building a vector passed to build_vector. (fold_convert_const): Likewise. (exact_inverse): Likewise. (fold_ternary_loc): Likewise. (fold_relational_const): Likewise. (const_binop): Likewise. Use VECTOR_CST_ELT directly when operating on VECTOR_CSTs, rather than going through vec_cst_ctor_to_array. (const_unop): Likewise. Store the reduction accumulator in a variable rather than an array. (vec_cst_ctor_to_array): Take the number of elements as a parameter. (fold_vec_perm): Update calls accordingly. Use auto_vec for the new vector, rather than constructing it after the input arrays. (native_interpret_vector): Use auto_vec when building a vector passed to build_vector. Add elements in order. * tree-vect-loop.c (get_initial_defs_for_reduction): Use auto_vec when building a vector passed to build_vector. (vect_create_epilog_for_reduction): Likewise. (vectorizable_induction): Likewise. (get_initial_def_for_reduction): Likewise. Fix indentation of case statements. * config/sparc/sparc.c (sparc_handle_vis_mul8x16): Change n_elts to a vec *. (sparc_fold_builtin): Use auto_vec when building a vector passed to build_vector. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@252760 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/ChangeLog | 46 ++++++++++++ gcc/config/sparc/sparc.c | 29 ++++---- gcc/expmed.c | 7 +- gcc/fold-const.c | 177 +++++++++++++++++++++++------------------------ gcc/gimple-fold.c | 10 +-- gcc/tree-ssa-forwprop.c | 6 +- gcc/tree-vect-generic.c | 42 +++++------ gcc/tree-vect-loop.c | 64 +++++++++-------- gcc/tree-vect-slp.c | 19 ++--- gcc/tree-vect-stmts.c | 28 ++++---- gcc/tree.c | 27 ++++---- gcc/tree.h | 2 +- 12 files changed, 253 insertions(+), 204 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index fb9c448efaf..b52eced6e1a 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -2,6 +2,52 @@ Alan Hayward David Sherwood + * tree.h (build_vector): Take a vec instead of a tree *. + * tree.c (build_vector): Likewise. + (build_vector_from_ctor): Update accordingly. + (build_vector_from_val): Likewise. + * gimple-fold.c (gimple_fold_stmt_to_constant_1): Likewise. + * tree-ssa-forwprop.c (simplify_vector_constructor): Likewise. + * tree-vect-generic.c (add_rshift): Likewise. + (expand_vector_divmod): Likewise. + (optimize_vector_constructor): Likewise. + * tree-vect-slp.c (vect_get_constant_vectors): Likewise. + (vect_transform_slp_perm_load): Likewise. + (vect_schedule_slp_instance): Likewise. + * tree-vect-stmts.c (vectorizable_bswap): Likewise. + (vectorizable_call): Likewise. + (vect_gen_perm_mask_any): Likewise. Add elements in order. + * expmed.c (make_tree): Likewise. + * fold-const.c (fold_negate_expr_1): Use auto_vec when building + a vector passed to build_vector. + (fold_convert_const): Likewise. + (exact_inverse): Likewise. + (fold_ternary_loc): Likewise. + (fold_relational_const): Likewise. + (const_binop): Likewise. Use VECTOR_CST_ELT directly when operating + on VECTOR_CSTs, rather than going through vec_cst_ctor_to_array. + (const_unop): Likewise. Store the reduction accumulator in a + variable rather than an array. + (vec_cst_ctor_to_array): Take the number of elements as a parameter. + (fold_vec_perm): Update calls accordingly. Use auto_vec for + the new vector, rather than constructing it after the input arrays. + (native_interpret_vector): Use auto_vec when building + a vector passed to build_vector. Add elements in order. + * tree-vect-loop.c (get_initial_defs_for_reduction): Use + auto_vec when building a vector passed to build_vector. + (vect_create_epilog_for_reduction): Likewise. + (vectorizable_induction): Likewise. + (get_initial_def_for_reduction): Likewise. Fix indentation of + case statements. + * config/sparc/sparc.c (sparc_handle_vis_mul8x16): Change n_elts + to a vec *. + (sparc_fold_builtin): Use auto_vec when building a vector + passed to build_vector. + +2017-09-14 Richard Sandiford + Alan Hayward + David Sherwood + * tree-core.h (tree_base::u): Add an "nelts" field. (tree_vector): Use VECTOR_CST_NELTS as the length. * tree.c (tree_size): Likewise. diff --git a/gcc/config/sparc/sparc.c b/gcc/config/sparc/sparc.c index aa66f24f1b3..53689a1ccfa 100644 --- a/gcc/config/sparc/sparc.c +++ b/gcc/config/sparc/sparc.c @@ -11446,7 +11446,7 @@ sparc_vis_mul8x16 (int e8, int e16) the result into the array N_ELTS, whose elements are of INNER_TYPE. */ static void -sparc_handle_vis_mul8x16 (tree *n_elts, enum sparc_builtins fncode, +sparc_handle_vis_mul8x16 (vec *n_elts, enum sparc_builtins fncode, tree inner_type, tree cst0, tree cst1) { unsigned i, num = VECTOR_CST_NELTS (cst0); @@ -11460,7 +11460,7 @@ sparc_handle_vis_mul8x16 (tree *n_elts, enum sparc_builtins fncode, int val = sparc_vis_mul8x16 (TREE_INT_CST_LOW (VECTOR_CST_ELT (cst0, i)), TREE_INT_CST_LOW (VECTOR_CST_ELT (cst1, i))); - n_elts[i] = build_int_cst (inner_type, val); + n_elts->quick_push (build_int_cst (inner_type, val)); } break; @@ -11472,7 +11472,7 @@ sparc_handle_vis_mul8x16 (tree *n_elts, enum sparc_builtins fncode, int val = sparc_vis_mul8x16 (TREE_INT_CST_LOW (VECTOR_CST_ELT (cst0, i)), scale); - n_elts[i] = build_int_cst (inner_type, val); + n_elts->quick_push (build_int_cst (inner_type, val)); } break; @@ -11484,7 +11484,7 @@ sparc_handle_vis_mul8x16 (tree *n_elts, enum sparc_builtins fncode, int val = sparc_vis_mul8x16 (TREE_INT_CST_LOW (VECTOR_CST_ELT (cst0, i)), scale); - n_elts[i] = build_int_cst (inner_type, val); + n_elts->quick_push (build_int_cst (inner_type, val)); } break; @@ -11533,14 +11533,15 @@ sparc_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, if (TREE_CODE (arg0) == VECTOR_CST) { tree inner_type = TREE_TYPE (rtype); - tree *n_elts; unsigned i; - n_elts = XALLOCAVEC (tree, VECTOR_CST_NELTS (arg0)); + auto_vec n_elts (VECTOR_CST_NELTS (arg0)); for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i) - n_elts[i] = build_int_cst (inner_type, - TREE_INT_CST_LOW - (VECTOR_CST_ELT (arg0, i)) << 4); + { + unsigned HOST_WIDE_INT val + = TREE_INT_CST_LOW (VECTOR_CST_ELT (arg0, i)); + n_elts.quick_push (build_int_cst (inner_type, val << 4)); + } return build_vector (rtype, n_elts); } break; @@ -11556,8 +11557,8 @@ sparc_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, if (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST) { tree inner_type = TREE_TYPE (rtype); - tree *n_elts = XALLOCAVEC (tree, VECTOR_CST_NELTS (arg0)); - sparc_handle_vis_mul8x16 (n_elts, code, inner_type, arg0, arg1); + auto_vec n_elts (VECTOR_CST_NELTS (arg0)); + sparc_handle_vis_mul8x16 (&n_elts, code, inner_type, arg0, arg1); return build_vector (rtype, n_elts); } break; @@ -11570,12 +11571,12 @@ sparc_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, if (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST) { - tree *n_elts = XALLOCAVEC (tree, 2 * VECTOR_CST_NELTS (arg0)); + auto_vec n_elts (2 * VECTOR_CST_NELTS (arg0)); unsigned i; for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i) { - n_elts[2*i] = VECTOR_CST_ELT (arg0, i); - n_elts[2*i+1] = VECTOR_CST_ELT (arg1, i); + n_elts.quick_push (VECTOR_CST_ELT (arg0, i)); + n_elts.quick_push (VECTOR_CST_ELT (arg1, i)); } return build_vector (rtype, n_elts); diff --git a/gcc/expmed.c b/gcc/expmed.c index f81163e15a5..b9d31921047 100644 --- a/gcc/expmed.c +++ b/gcc/expmed.c @@ -5180,15 +5180,14 @@ make_tree (tree type, rtx x) { int units = CONST_VECTOR_NUNITS (x); tree itype = TREE_TYPE (type); - tree *elts; int i; /* Build a tree with vector elements. */ - elts = XALLOCAVEC (tree, units); - for (i = units - 1; i >= 0; --i) + auto_vec elts (units); + for (i = 0; i < units; ++i) { rtx elt = CONST_VECTOR_ELT (x, i); - elts[i] = make_tree (itype, elt); + elts.quick_push (make_tree (itype, elt)); } return build_vector (type, elts); diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 66e7cc7b189..013081da673 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -136,7 +136,6 @@ static tree fold_not_const (const_tree, tree); static tree fold_relational_const (enum tree_code, tree, tree, tree); static tree fold_convert_const (enum tree_code, tree, tree); static tree fold_view_convert_expr (tree, tree); -static bool vec_cst_ctor_to_array (tree, tree *); static tree fold_negate_expr (location_t, tree); @@ -565,13 +564,14 @@ fold_negate_expr_1 (location_t loc, tree t) case VECTOR_CST: { int count = VECTOR_CST_NELTS (t), i; - tree *elts = XALLOCAVEC (tree, count); + auto_vec elts (count); for (i = 0; i < count; i++) { - elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i)); - if (elts[i] == NULL_TREE) + tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i)); + if (elt == NULL_TREE) return NULL_TREE; + elts.quick_push (elt); } return build_vector (type, elts); @@ -1414,19 +1414,20 @@ const_binop (enum tree_code code, tree arg1, tree arg2) { tree type = TREE_TYPE (arg1); int count = VECTOR_CST_NELTS (arg1), i; - tree *elts = XALLOCAVEC (tree, count); + auto_vec elts (count); for (i = 0; i < count; i++) { tree elem1 = VECTOR_CST_ELT (arg1, i); tree elem2 = VECTOR_CST_ELT (arg2, i); - elts[i] = const_binop (code, elem1, elem2); + tree elt = const_binop (code, elem1, elem2); /* It is possible that const_binop cannot handle the given code and return NULL_TREE */ - if (elts[i] == NULL_TREE) + if (elt == NULL_TREE) return NULL_TREE; + elts.quick_push (elt); } return build_vector (type, elts); @@ -1438,18 +1439,19 @@ const_binop (enum tree_code code, tree arg1, tree arg2) { tree type = TREE_TYPE (arg1); int count = VECTOR_CST_NELTS (arg1), i; - tree *elts = XALLOCAVEC (tree, count); + auto_vec elts (count); for (i = 0; i < count; i++) { tree elem1 = VECTOR_CST_ELT (arg1, i); - elts[i] = const_binop (code, elem1, arg2); + tree elt = const_binop (code, elem1, arg2); /* It is possible that const_binop cannot handle the given code and return NULL_TREE. */ - if (elts[i] == NULL_TREE) + if (elt == NULL_TREE) return NULL_TREE; + elts.quick_push (elt); } return build_vector (type, elts); @@ -1481,7 +1483,6 @@ const_binop (enum tree_code code, tree type, tree arg1, tree arg2) case VEC_PACK_TRUNC_EXPR: case VEC_PACK_FIX_TRUNC_EXPR: { - tree *elts; unsigned int out_nelts, in_nelts, i; if (TREE_CODE (arg1) != VECTOR_CST @@ -1493,18 +1494,18 @@ const_binop (enum tree_code code, tree type, tree arg1, tree arg2) gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) && out_nelts == TYPE_VECTOR_SUBPARTS (type)); - elts = XALLOCAVEC (tree, out_nelts); - if (!vec_cst_ctor_to_array (arg1, elts) - || !vec_cst_ctor_to_array (arg2, elts + in_nelts)) - return NULL_TREE; - + auto_vec elts (out_nelts); for (i = 0; i < out_nelts; i++) { - elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR - ? NOP_EXPR : FIX_TRUNC_EXPR, - TREE_TYPE (type), elts[i]); - if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) + tree elt = (i < in_nelts + ? VECTOR_CST_ELT (arg1, i) + : VECTOR_CST_ELT (arg2, i - in_nelts)); + elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR + ? NOP_EXPR : FIX_TRUNC_EXPR, + TREE_TYPE (type), elt); + if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) return NULL_TREE; + elts.quick_push (elt); } return build_vector (type, elts); @@ -1516,7 +1517,6 @@ const_binop (enum tree_code code, tree type, tree arg1, tree arg2) case VEC_WIDEN_MULT_ODD_EXPR: { unsigned int out_nelts, in_nelts, out, ofs, scale; - tree *elts; if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST) return NULL_TREE; @@ -1526,11 +1526,6 @@ const_binop (enum tree_code code, tree type, tree arg1, tree arg2) gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2) && out_nelts == TYPE_VECTOR_SUBPARTS (type)); - elts = XALLOCAVEC (tree, in_nelts * 2); - if (!vec_cst_ctor_to_array (arg1, elts) - || !vec_cst_ctor_to_array (arg2, elts + in_nelts)) - return NULL_TREE; - if (code == VEC_WIDEN_MULT_LO_EXPR) scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0; else if (code == VEC_WIDEN_MULT_HI_EXPR) @@ -1540,20 +1535,21 @@ const_binop (enum tree_code code, tree type, tree arg1, tree arg2) else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */ scale = 1, ofs = 1; + auto_vec elts (out_nelts); for (out = 0; out < out_nelts; out++) { - unsigned int in1 = (out << scale) + ofs; - unsigned int in2 = in1 + in_nelts; - tree t1, t2; - - t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]); - t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]); + unsigned int in = (out << scale) + ofs; + tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), + VECTOR_CST_ELT (arg1, in)); + tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), + VECTOR_CST_ELT (arg2, in)); if (t1 == NULL_TREE || t2 == NULL_TREE) return NULL_TREE; - elts[out] = const_binop (MULT_EXPR, t1, t2); - if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out])) + tree elt = const_binop (MULT_EXPR, t1, t2); + if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) return NULL_TREE; + elts.quick_push (elt); } return build_vector (type, elts); @@ -1638,18 +1634,17 @@ const_unop (enum tree_code code, tree type, tree arg0) /* Perform BIT_NOT_EXPR on each element individually. */ else if (TREE_CODE (arg0) == VECTOR_CST) { - tree *elements; tree elem; unsigned count = VECTOR_CST_NELTS (arg0), i; - elements = XALLOCAVEC (tree, count); + auto_vec elements (count); for (i = 0; i < count; i++) { elem = VECTOR_CST_ELT (arg0, i); elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem); if (elem == NULL_TREE) break; - elements[i] = elem; + elements.quick_push (elem); } if (i == count) return build_vector (type, elements); @@ -1677,7 +1672,6 @@ const_unop (enum tree_code code, tree type, tree arg0) case VEC_UNPACK_FLOAT_HI_EXPR: { unsigned int out_nelts, in_nelts, i; - tree *elts; enum tree_code subcode; if (TREE_CODE (arg0) != VECTOR_CST) @@ -1687,24 +1681,24 @@ const_unop (enum tree_code code, tree type, tree arg0) out_nelts = in_nelts / 2; gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type)); - elts = XALLOCAVEC (tree, in_nelts); - if (!vec_cst_ctor_to_array (arg0, elts)) - return NULL_TREE; - + unsigned int offset = 0; if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_FLOAT_LO_EXPR)) - elts += out_nelts; + offset = out_nelts; if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR) subcode = NOP_EXPR; else subcode = FLOAT_EXPR; + auto_vec elts (out_nelts); for (i = 0; i < out_nelts; i++) { - elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]); - if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) + tree elt = fold_convert_const (subcode, TREE_TYPE (type), + VECTOR_CST_ELT (arg0, i + offset)); + if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt)) return NULL_TREE; + elts.quick_push (elt); } return build_vector (type, elts); @@ -1715,17 +1709,12 @@ const_unop (enum tree_code code, tree type, tree arg0) case REDUC_PLUS_EXPR: { unsigned int nelts, i; - tree *elts; enum tree_code subcode; if (TREE_CODE (arg0) != VECTOR_CST) return NULL_TREE; nelts = VECTOR_CST_NELTS (arg0); - elts = XALLOCAVEC (tree, nelts); - if (!vec_cst_ctor_to_array (arg0, elts)) - return NULL_TREE; - switch (code) { case REDUC_MIN_EXPR: subcode = MIN_EXPR; break; @@ -1734,14 +1723,15 @@ const_unop (enum tree_code code, tree type, tree arg0) default: gcc_unreachable (); } + tree res = VECTOR_CST_ELT (arg0, 0); for (i = 1; i < nelts; i++) { - elts[0] = const_binop (subcode, elts[0], elts[i]); - if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0])) + res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i)); + if (res == NULL_TREE || !CONSTANT_CLASS_P (res)) return NULL_TREE; } - return elts[0]; + return res; } default: @@ -2163,14 +2153,14 @@ fold_convert_const (enum tree_code code, tree type, tree arg1) { int len = VECTOR_CST_NELTS (arg1); tree elttype = TREE_TYPE (type); - tree *v = XALLOCAVEC (tree, len); + auto_vec v (len); for (int i = 0; i < len; ++i) { tree elt = VECTOR_CST_ELT (arg1, i); tree cvt = fold_convert_const (code, elttype, elt); if (cvt == NULL_TREE) return NULL_TREE; - v[i] = cvt; + v.quick_push (cvt); } return build_vector (type, v); } @@ -7392,7 +7382,6 @@ native_interpret_vector (tree type, const unsigned char *ptr, int len) { tree etype, elem; int i, size, count; - tree *elements; etype = TREE_TYPE (type); size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype)); @@ -7400,13 +7389,13 @@ native_interpret_vector (tree type, const unsigned char *ptr, int len) if (size * count > len) return NULL_TREE; - elements = XALLOCAVEC (tree, count); - for (i = count - 1; i >= 0; i--) + auto_vec elements (count); + for (i = 0; i < count; ++i) { elem = native_interpret_expr (etype, ptr+(i*size), size); if (!elem) return NULL_TREE; - elements[i] = elem; + elements.quick_push (elem); } return build_vector (type, elements); } @@ -8761,12 +8750,13 @@ fold_mult_zconjz (location_t loc, tree type, tree expr) /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or - CONSTRUCTOR ARG into array ELTS and return true if successful. */ + CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return + true if successful. */ static bool -vec_cst_ctor_to_array (tree arg, tree *elts) +vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts) { - unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i; + unsigned int i; if (TREE_CODE (arg) == VECTOR_CST) { @@ -8799,7 +8789,6 @@ static tree fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel) { unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; - tree *elts; bool need_ctor = false; gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts @@ -8808,16 +8797,17 @@ fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel) || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type)) return NULL_TREE; - elts = XALLOCAVEC (tree, nelts * 3); - if (!vec_cst_ctor_to_array (arg0, elts) - || !vec_cst_ctor_to_array (arg1, elts + nelts)) + tree *in_elts = XALLOCAVEC (tree, nelts * 2); + if (!vec_cst_ctor_to_array (arg0, nelts, in_elts) + || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts)) return NULL_TREE; + auto_vec out_elts (nelts); for (i = 0; i < nelts; i++) { - if (!CONSTANT_CLASS_P (elts[sel[i]])) + if (!CONSTANT_CLASS_P (in_elts[sel[i]])) need_ctor = true; - elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]); + out_elts.quick_push (unshare_expr (in_elts[sel[i]])); } if (need_ctor) @@ -8825,11 +8815,11 @@ fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel) vec *v; vec_alloc (v, nelts); for (i = 0; i < nelts; i++) - CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]); + CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]); return build_constructor (type, v); } else - return build_vector (type, &elts[2 * nelts]); + return build_vector (type, out_elts); } /* Try to fold a pointer difference of type TYPE two address expressions of @@ -8879,7 +8869,7 @@ tree exact_inverse (tree type, tree cst) { REAL_VALUE_TYPE r; - tree unit_type, *elts; + tree unit_type; machine_mode mode; unsigned vec_nelts, i; @@ -8894,20 +8884,22 @@ exact_inverse (tree type, tree cst) return NULL_TREE; case VECTOR_CST: - vec_nelts = VECTOR_CST_NELTS (cst); - elts = XALLOCAVEC (tree, vec_nelts); - unit_type = TREE_TYPE (type); - mode = TYPE_MODE (unit_type); + { + vec_nelts = VECTOR_CST_NELTS (cst); + unit_type = TREE_TYPE (type); + mode = TYPE_MODE (unit_type); - for (i = 0; i < vec_nelts; i++) - { - r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i)); - if (!exact_real_inverse (mode, &r)) - return NULL_TREE; - elts[i] = build_real (unit_type, r); - } + auto_vec elts (vec_nelts); + for (i = 0; i < vec_nelts; i++) + { + r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i)); + if (!exact_real_inverse (mode, &r)) + return NULL_TREE; + elts.quick_push (build_real (unit_type, r)); + } - return build_vector (type, elts); + return build_vector (type, elts); + } default: return NULL_TREE; @@ -11596,9 +11588,9 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type, if (n == 1) return VECTOR_CST_ELT (arg0, idx); - tree *vals = XALLOCAVEC (tree, n); + auto_vec vals (n); for (unsigned i = 0; i < n; ++i) - vals[i] = VECTOR_CST_ELT (arg0, idx + i); + vals.quick_push (VECTOR_CST_ELT (arg0, idx + i)); return build_vector (type, vals); } } @@ -11731,10 +11723,10 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type, if (need_mask_canon && arg2 == op2) { - tree *tsel = XALLOCAVEC (tree, nelts); tree eltype = TREE_TYPE (TREE_TYPE (arg2)); + auto_vec tsel (nelts); for (i = 0; i < nelts; i++) - tsel[i] = build_int_cst (eltype, sel[i]); + tsel.quick_push (build_int_cst (eltype, sel[i])); op2 = build_vector (TREE_TYPE (arg2), tsel); changed = true; } @@ -11775,8 +11767,10 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type, else { unsigned int nelts = VECTOR_CST_NELTS (arg0); - tree *elts = XALLOCAVEC (tree, nelts); - memcpy (elts, VECTOR_CST_ELTS (arg0), sizeof (tree) * nelts); + auto_vec elts (nelts); + elts.quick_grow (nelts); + memcpy (&elts[0], VECTOR_CST_ELTS (arg0), + sizeof (tree) * nelts); elts[k] = arg1; return build_vector (type, elts); } @@ -13894,10 +13888,10 @@ fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) return constant_boolean_node (true, type); } unsigned count = VECTOR_CST_NELTS (op0); - tree *elts = XALLOCAVEC (tree, count); gcc_assert (VECTOR_CST_NELTS (op1) == count && TYPE_VECTOR_SUBPARTS (type) == count); + auto_vec elts (count); for (unsigned i = 0; i < count; i++) { tree elem_type = TREE_TYPE (type); @@ -13910,7 +13904,8 @@ fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) if (tem == NULL_TREE) return NULL_TREE; - elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1); + elts.quick_push (build_int_cst (elem_type, + integer_zerop (tem) ? 0 : -1)); } return build_vector (type, elts); diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c index 0ec225610aa..bde20a34bfb 100644 --- a/gcc/gimple-fold.c +++ b/gcc/gimple-fold.c @@ -5919,18 +5919,18 @@ gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree), && (CONSTRUCTOR_NELTS (rhs) == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) { - unsigned i; - tree val, *vec; + unsigned i, nelts; + tree val; - vec = XALLOCAVEC (tree, - TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))); + nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)); + auto_vec vec (nelts); FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) { val = (*valueize) (val); if (TREE_CODE (val) == INTEGER_CST || TREE_CODE (val) == REAL_CST || TREE_CODE (val) == FIXED_CST) - vec[i] = val; + vec.quick_push (val); else return NULL_TREE; } diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c index 52962067237..82d940bd36e 100644 --- a/gcc/tree-ssa-forwprop.c +++ b/gcc/tree-ssa-forwprop.c @@ -2051,7 +2051,7 @@ simplify_vector_constructor (gimple_stmt_iterator *gsi) } else { - tree mask_type, *mask_elts; + tree mask_type; if (!can_vec_perm_p (TYPE_MODE (type), false, sel)) return false; @@ -2062,9 +2062,9 @@ simplify_vector_constructor (gimple_stmt_iterator *gsi) || GET_MODE_SIZE (TYPE_MODE (mask_type)) != GET_MODE_SIZE (TYPE_MODE (type))) return false; - mask_elts = XALLOCAVEC (tree, nelts); + auto_vec mask_elts (nelts); for (i = 0; i < nelts; i++) - mask_elts[i] = build_int_cst (TREE_TYPE (mask_type), sel[i]); + mask_elts.quick_push (build_int_cst (TREE_TYPE (mask_type), sel[i])); op2 = build_vector (mask_type, mask_elts); if (conv_code == ERROR_MARK) gimple_assign_set_rhs_with_ops (gsi, VEC_PERM_EXPR, orig, orig, op2); diff --git a/gcc/tree-vect-generic.c b/gcc/tree-vect-generic.c index dc8b2ed38ad..1341d66deed 100644 --- a/gcc/tree-vect-generic.c +++ b/gcc/tree-vect-generic.c @@ -398,9 +398,9 @@ add_rshift (gimple_stmt_iterator *gsi, tree type, tree op0, int *shiftcnts) if (op != unknown_optab && optab_handler (op, TYPE_MODE (type)) != CODE_FOR_nothing) { - tree *vec = XALLOCAVEC (tree, nunits); + auto_vec vec (nunits); for (i = 0; i < nunits; i++) - vec[i] = build_int_cst (TREE_TYPE (type), shiftcnts[i]); + vec.quick_push (build_int_cst (TREE_TYPE (type), shiftcnts[i])); return gimplify_build2 (gsi, RSHIFT_EXPR, type, op0, build_vector (type, vec)); } @@ -429,7 +429,6 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, unsigned int i; signop sign_p = TYPE_SIGN (TREE_TYPE (type)); unsigned HOST_WIDE_INT mask = GET_MODE_MASK (TYPE_MODE (TREE_TYPE (type))); - tree *vec; tree cur_op, mulcst, tem; optab op; @@ -593,8 +592,6 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, mode = -2; } - vec = XALLOCAVEC (tree, nunits); - if (use_pow2) { tree addend = NULL_TREE; @@ -638,10 +635,11 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, mask_type = build_same_sized_truth_vector_type (type); zero = build_zero_cst (type); cond = build2 (LT_EXPR, mask_type, op0, zero); + auto_vec vec (nunits); for (i = 0; i < nunits; i++) - vec[i] = build_int_cst (TREE_TYPE (type), - (HOST_WIDE_INT_1U - << shifts[i]) - 1); + vec.quick_push (build_int_cst (TREE_TYPE (type), + (HOST_WIDE_INT_1U + << shifts[i]) - 1)); cst = build_vector (type, vec); addend = make_ssa_name (type); stmt = gimple_build_assign (addend, VEC_COND_EXPR, cond, @@ -676,10 +674,11 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, else { tree mask; + auto_vec vec (nunits); for (i = 0; i < nunits; i++) - vec[i] = build_int_cst (TREE_TYPE (type), - (HOST_WIDE_INT_1U - << shifts[i]) - 1); + vec.quick_push (build_int_cst (TREE_TYPE (type), + (HOST_WIDE_INT_1U + << shifts[i]) - 1)); mask = build_vector (type, vec); op = optab_for_tree_code (BIT_AND_EXPR, type, optab_default); if (op != unknown_optab @@ -754,8 +753,9 @@ expand_vector_divmod (gimple_stmt_iterator *gsi, tree type, tree op0, return NULL_TREE; } + auto_vec vec (nunits); for (i = 0; i < nunits; i++) - vec[i] = build_int_cst (TREE_TYPE (type), mulc[i]); + vec.quick_push (build_int_cst (TREE_TYPE (type), mulc[i])); mulcst = build_vector (type, vec); cur_op = gimplify_build2 (gsi, MULT_HIGHPART_EXPR, type, cur_op, mulcst); @@ -1066,7 +1066,6 @@ optimize_vector_constructor (gimple_stmt_iterator *gsi) unsigned int i, j, nelts = TYPE_VECTOR_SUBPARTS (type); bool all_same = true; constructor_elt *elt; - tree *cst; gimple *g; tree base = NULL_TREE; optab op; @@ -1105,22 +1104,23 @@ optimize_vector_constructor (gimple_stmt_iterator *gsi) } if (all_same) return; - cst = XALLOCAVEC (tree, nelts); + auto_vec cst (nelts); for (i = 0; i < nelts; i++) { - tree this_base = CONSTRUCTOR_ELT (rhs, i)->value;; - cst[i] = build_zero_cst (TREE_TYPE (base)); + tree this_base = CONSTRUCTOR_ELT (rhs, i)->value; + tree elt = build_zero_cst (TREE_TYPE (base)); while (this_base != base) { g = SSA_NAME_DEF_STMT (this_base); - cst[i] = fold_binary (PLUS_EXPR, TREE_TYPE (base), - cst[i], gimple_assign_rhs2 (g)); - if (cst[i] == NULL_TREE - || TREE_CODE (cst[i]) != INTEGER_CST - || TREE_OVERFLOW (cst[i])) + elt = fold_binary (PLUS_EXPR, TREE_TYPE (base), + elt, gimple_assign_rhs2 (g)); + if (elt == NULL_TREE + || TREE_CODE (elt) != INTEGER_CST + || TREE_OVERFLOW (elt)) return; this_base = gimple_assign_rhs1 (g); } + cst.quick_push (elt); } for (i = 0; i < nelts; i++) CONSTRUCTOR_ELT (rhs, i)->value = base; diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c index 5a1d9ff7a5a..3b4a71eba89 100644 --- a/gcc/tree-vect-loop.c +++ b/gcc/tree-vect-loop.c @@ -3969,7 +3969,6 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val, enum tree_code code = gimple_assign_rhs_code (stmt); tree def_for_init; tree init_def; - tree *elts; int i; bool nested_in_vect_loop = false; REAL_VALUE_TYPE real_init_val = dconst0; @@ -4015,15 +4014,16 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val, switch (code) { - case WIDEN_SUM_EXPR: - case DOT_PROD_EXPR: - case SAD_EXPR: - case PLUS_EXPR: - case MINUS_EXPR: - case BIT_IOR_EXPR: - case BIT_XOR_EXPR: - case MULT_EXPR: - case BIT_AND_EXPR: + case WIDEN_SUM_EXPR: + case DOT_PROD_EXPR: + case SAD_EXPR: + case PLUS_EXPR: + case MINUS_EXPR: + case BIT_IOR_EXPR: + case BIT_XOR_EXPR: + case MULT_EXPR: + case BIT_AND_EXPR: + { /* ADJUSMENT_DEF is NULL when called from vect_create_epilog_for_reduction to vectorize double reduction. */ if (adjustment_def) @@ -4044,17 +4044,19 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val, def_for_init = build_int_cst (scalar_type, int_init_val); /* Create a vector of '0' or '1' except the first element. */ - elts = XALLOCAVEC (tree, nunits); + auto_vec elts (nunits); + elts.quick_grow (nunits); for (i = nunits - 2; i >= 0; --i) elts[i + 1] = def_for_init; /* Option1: the first element is '0' or '1' as well. */ - if (adjustment_def) - { + if (adjustment_def) + { elts[0] = def_for_init; - init_def = build_vector (vectype, elts); - break; - } + + init_def = build_vector (vectype, elts); + break; + } /* Option2: the first element is INIT_VAL. */ elts[0] = init_val; @@ -4069,12 +4071,13 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val, CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]); init_def = build_constructor (vectype, v); } + } + break; - break; - - case MIN_EXPR: - case MAX_EXPR: - case COND_EXPR: + case MIN_EXPR: + case MAX_EXPR: + case COND_EXPR: + { if (adjustment_def) { *adjustment_def = NULL_TREE; @@ -4088,10 +4091,11 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val, if (! gimple_seq_empty_p (stmts)) gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts); init_def = build_vector_from_val (vectype, init_val); - break; + } + break; - default: - gcc_unreachable (); + default: + gcc_unreachable (); } return init_def; @@ -4111,7 +4115,6 @@ get_initial_defs_for_reduction (slp_tree slp_node, stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt); unsigned nunits; tree vec_cst; - tree *elts; unsigned j, number_of_places_left_in_vector; tree vector_type, scalar_type; tree vop; @@ -4195,7 +4198,8 @@ get_initial_defs_for_reduction (slp_tree slp_node, number_of_places_left_in_vector = nunits; constant_p = true; - elts = XALLOCAVEC (tree, nunits); + auto_vec elts (nunits); + elts.quick_grow (nunits); for (j = 0; j < number_of_copies; j++) { for (i = group_size - 1; stmts.iterate (i, &stmt); i--) @@ -4533,9 +4537,9 @@ vect_create_epilog_for_reduction (vec vect_defs, gimple *stmt, vector size (STEP). */ /* Create a {1,2,3,...} vector. */ - tree *vtemp = XALLOCAVEC (tree, nunits_out); + auto_vec vtemp (nunits_out); for (k = 0; k < nunits_out; ++k) - vtemp[k] = build_int_cst (cr_index_scalar_type, k + 1); + vtemp.quick_push (build_int_cst (cr_index_scalar_type, k + 1)); tree series_vect = build_vector (cr_index_vector_type, vtemp); /* Create a vector of the step value. */ @@ -6731,7 +6735,7 @@ vectorizable_induction (gimple *phi, unsigned ivn; for (ivn = 0; ivn < nivs; ++ivn) { - tree *elts = XALLOCAVEC (tree, nunits); + auto_vec elts (nunits); bool constant_p = true; for (unsigned eltn = 0; eltn < nunits; ++eltn) { @@ -6749,7 +6753,7 @@ vectorizable_induction (gimple *phi, } if (! CONSTANT_CLASS_P (elt)) constant_p = false; - elts[eltn] = elt; + elts.quick_push (elt); } if (constant_p) new_vec = build_vector (vectype, elts); diff --git a/gcc/tree-vect-slp.c b/gcc/tree-vect-slp.c index 38738930be3..32ca6afa614 100644 --- a/gcc/tree-vect-slp.c +++ b/gcc/tree-vect-slp.c @@ -3105,7 +3105,6 @@ vect_get_constant_vectors (tree op, slp_tree slp_node, stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt); unsigned nunits; tree vec_cst; - tree *elts; unsigned j, number_of_places_left_in_vector; tree vector_type; tree vop; @@ -3158,7 +3157,8 @@ vect_get_constant_vectors (tree op, slp_tree slp_node, number_of_places_left_in_vector = nunits; constant_p = true; - elts = XALLOCAVEC (tree, nunits); + auto_vec elts (nunits); + elts.quick_grow (nunits); bool place_after_defs = false; for (j = 0; j < number_of_copies; j++) { @@ -3600,10 +3600,10 @@ vect_transform_slp_perm_load (slp_tree node, vec dr_chain, if (! noop_p) { - tree *mask_elts = XALLOCAVEC (tree, nunits); + auto_vec mask_elts (nunits); for (int l = 0; l < nunits; ++l) - mask_elts[l] = build_int_cst (mask_element_type, - mask[l]); + mask_elts.quick_push (build_int_cst (mask_element_type, + mask[l])); mask_vec = build_vector (mask_type, mask_elts); } @@ -3759,13 +3759,14 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance, unsigned k = 0, l; for (j = 0; j < v0.length (); ++j) { - tree *melts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (vectype)); - for (l = 0; l < TYPE_VECTOR_SUBPARTS (vectype); ++l) + unsigned int nunits = TYPE_VECTOR_SUBPARTS (vectype); + auto_vec melts (nunits); + for (l = 0; l < nunits; ++l) { if (k >= group_size) k = 0; - melts[l] = build_int_cst - (meltype, mask[k++] * TYPE_VECTOR_SUBPARTS (vectype) + l); + tree t = build_int_cst (meltype, mask[k++] * nunits + l); + melts.quick_push (t); } tmask = build_vector (mvectype, melts); diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c index df93022f7cc..b5f706c1f31 100644 --- a/gcc/tree-vect-stmts.c +++ b/gcc/tree-vect-stmts.c @@ -2480,10 +2480,10 @@ vectorizable_bswap (gimple *stmt, gimple_stmt_iterator *gsi, if (! char_vectype) return false; - unsigned char *elts - = XALLOCAVEC (unsigned char, TYPE_VECTOR_SUBPARTS (char_vectype)); + unsigned int num_bytes = TYPE_VECTOR_SUBPARTS (char_vectype); + unsigned char *elts = XALLOCAVEC (unsigned char, num_bytes); unsigned char *elt = elts; - unsigned word_bytes = TYPE_VECTOR_SUBPARTS (char_vectype) / nunits; + unsigned word_bytes = num_bytes / nunits; for (unsigned i = 0; i < nunits; ++i) for (unsigned j = 0; j < word_bytes; ++j) *elt++ = (i + 1) * word_bytes - j - 1; @@ -2507,9 +2507,9 @@ vectorizable_bswap (gimple *stmt, gimple_stmt_iterator *gsi, return true; } - tree *telts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (char_vectype)); - for (unsigned i = 0; i < TYPE_VECTOR_SUBPARTS (char_vectype); ++i) - telts[i] = build_int_cst (char_type_node, elts[i]); + auto_vec telts (num_bytes); + for (unsigned i = 0; i < num_bytes; ++i) + telts.quick_push (build_int_cst (char_type_node, elts[i])); tree bswap_vconst = build_vector (char_vectype, telts); /* Transform. */ @@ -2928,10 +2928,10 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt, if (gimple_call_internal_p (stmt) && gimple_call_internal_fn (stmt) == IFN_GOMP_SIMD_LANE) { - tree *v = XALLOCAVEC (tree, nunits_out); - int k; - for (k = 0; k < nunits_out; ++k) - v[k] = build_int_cst (unsigned_type_node, j * nunits_out + k); + auto_vec v (nunits_out); + for (int k = 0; k < nunits_out; ++k) + v.quick_push (build_int_cst (unsigned_type_node, + j * nunits_out + k)); tree cst = build_vector (vectype_out, v); tree new_var = vect_get_new_ssa_name (vectype_out, vect_simple_var, "cst_"); @@ -6505,7 +6505,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt, tree vect_gen_perm_mask_any (tree vectype, const unsigned char *sel) { - tree mask_elt_type, mask_type, mask_vec, *mask_elts; + tree mask_elt_type, mask_type, mask_vec; int i, nunits; nunits = TYPE_VECTOR_SUBPARTS (vectype); @@ -6514,9 +6514,9 @@ vect_gen_perm_mask_any (tree vectype, const unsigned char *sel) (int_mode_for_mode (TYPE_MODE (TREE_TYPE (vectype))).require (), 1); mask_type = get_vectype_for_scalar_type (mask_elt_type); - mask_elts = XALLOCAVEC (tree, nunits); - for (i = nunits - 1; i >= 0; i--) - mask_elts[i] = build_int_cst (mask_elt_type, sel[i]); + auto_vec mask_elts (nunits); + for (i = 0; i < nunits; ++i) + mask_elts.quick_push (build_int_cst (mask_elt_type, sel[i])); mask_vec = build_vector (mask_type, mask_elts); return mask_vec; diff --git a/gcc/tree.c b/gcc/tree.c index 0f505c2db01..788a84b511e 100644 --- a/gcc/tree.c +++ b/gcc/tree.c @@ -1702,18 +1702,20 @@ make_vector (unsigned len MEM_STAT_DECL) } /* Return a new VECTOR_CST node whose type is TYPE and whose values - are in a list pointed to by VALS. */ + are given by VALS. */ tree -build_vector (tree type, tree *vals MEM_STAT_DECL) +build_vector (tree type, vec vals MEM_STAT_DECL) { + unsigned int nelts = vals.length (); + gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type)); int over = 0; unsigned cnt = 0; - tree v = make_vector (TYPE_VECTOR_SUBPARTS (type)); + tree v = make_vector (nelts); TREE_TYPE (v) = type; /* Iterate through elements and check for overflow. */ - for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt) + for (cnt = 0; cnt < nelts; ++cnt) { tree value = vals[cnt]; @@ -1736,20 +1738,21 @@ build_vector (tree type, tree *vals MEM_STAT_DECL) tree build_vector_from_ctor (tree type, vec *v) { - tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type)); - unsigned HOST_WIDE_INT idx, pos = 0; + unsigned int nelts = TYPE_VECTOR_SUBPARTS (type); + unsigned HOST_WIDE_INT idx; tree value; + auto_vec vec (nelts); FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value) { if (TREE_CODE (value) == VECTOR_CST) for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i) - vec[pos++] = VECTOR_CST_ELT (value, i); + vec.quick_push (VECTOR_CST_ELT (value, i)); else - vec[pos++] = value; + vec.quick_push (value); } - while (pos < TYPE_VECTOR_SUBPARTS (type)) - vec[pos++] = build_zero_cst (TREE_TYPE (type)); + while (vec.length () < nelts) + vec.quick_push (build_zero_cst (TREE_TYPE (type))); return build_vector (type, vec); } @@ -1774,9 +1777,9 @@ build_vector_from_val (tree vectype, tree sc) if (CONSTANT_CLASS_P (sc)) { - tree *v = XALLOCAVEC (tree, nunits); + auto_vec v (nunits); for (i = 0; i < nunits; ++i) - v[i] = sc; + v.quick_push (sc); return build_vector (vectype, v); } else diff --git a/gcc/tree.h b/gcc/tree.h index 490c3b6e51d..caa4a69977d 100644 --- a/gcc/tree.h +++ b/gcc/tree.h @@ -4026,7 +4026,7 @@ extern tree build_int_cst (tree, HOST_WIDE_INT); extern tree build_int_cstu (tree type, unsigned HOST_WIDE_INT cst); extern tree build_int_cst_type (tree, HOST_WIDE_INT); extern tree make_vector (unsigned CXX_MEM_STAT_INFO); -extern tree build_vector (tree, tree * CXX_MEM_STAT_INFO); +extern tree build_vector (tree, vec CXX_MEM_STAT_INFO); extern tree build_vector_from_ctor (tree, vec *); extern tree build_vector_from_val (tree, tree); extern void recompute_constructor_flags (tree); -- 2.11.4.GIT