1 /* Data references and dependences detectors.
2 Copyright (C) 2003-2023 Free Software Foundation, Inc.
3 Contributed by Sebastian Pop <pop@cri.ensmp.fr>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #ifndef GCC_TREE_DATA_REF_H
22 #define GCC_TREE_DATA_REF_H
25 #include "tree-chrec.h"
26 #include "opt-problem.h"
29 innermost_loop_behavior describes the evolution of the address of the memory
30 reference in the innermost enclosing loop. The address is expressed as
31 BASE + STEP * # of iteration, and base is further decomposed as the base
32 pointer (BASE_ADDRESS), loop invariant offset (OFFSET) and
33 constant offset (INIT). Examples, in loop nest
35 for (i = 0; i < 100; i++)
36 for (j = 3; j < 100; j++)
39 data-ref a[j].b[i][j] *(p + x + 16B + 4B * j)
42 innermost_loop_behavior
45 init 3 * D_j + offsetof (b) 28
49 struct innermost_loop_behavior
56 /* BASE_ADDRESS is known to be misaligned by BASE_MISALIGNMENT bytes
57 from an alignment boundary of BASE_ALIGNMENT bytes. For example,
60 struct S __attribute__((aligned(16))) { ... };
63 ... *(struct S *) (ptr - 4) ...;
65 the information would be:
72 where init cancels the base misalignment. If instead we had a
73 reference to a particular field:
75 struct S __attribute__((aligned(16))) { ... int f; ... };
78 ... ((struct S *) (ptr - 4))->f ...;
80 the information would be:
85 init: -4 + offsetof (S, f)
87 where base_address + init might also be misaligned, and by a different
88 amount from base_address. */
89 unsigned int base_alignment
;
90 unsigned int base_misalignment
;
92 /* The largest power of two that divides OFFSET, capped to a suitably
93 high value if the offset is zero. This is a byte rather than a bit
95 unsigned int offset_alignment
;
97 /* Likewise for STEP. */
98 unsigned int step_alignment
;
101 /* Describes the evolutions of indices of the memory reference. The indices
102 are indices of the ARRAY_REFs, indexes in artificial dimensions
103 added for member selection of records and the operands of MEM_REFs.
104 BASE_OBJECT is the part of the reference that is loop-invariant
105 (note that this reference does not have to cover the whole object
106 being accessed, in which case UNCONSTRAINED_BASE is set; hence it is
107 not recommended to use BASE_OBJECT in any code generation).
108 For the examples above,
110 base_object: a *(p + x + 4B * j_0)
111 indices: {j_0, +, 1}_2 {16, +, 4}_2
122 /* A list of chrecs. Access functions of the indices. */
123 vec
<tree
> access_fns
;
125 /* Whether BASE_OBJECT is an access representing the whole object
126 or whether the access could not be constrained. */
127 bool unconstrained_base
;
132 /* The alias information that should be used for new pointers to this
134 struct ptr_info_def
*ptr_info
;
137 /* An integer vector. A vector formally consists of an element of a vector
138 space. A vector space is a set that is closed under vector addition
139 and scalar multiplication. In this vector space, an element is a list of
141 typedef HOST_WIDE_INT lambda_int
;
142 typedef lambda_int
*lambda_vector
;
144 /* An integer matrix. A matrix consists of m vectors of length n (IE
145 all vectors are the same length). */
146 typedef lambda_vector
*lambda_matrix
;
150 struct data_reference
152 /* A pointer to the statement that contains this DR. */
155 /* A pointer to the memory reference. */
158 /* Auxiliary info specific to a pass. */
161 /* True when the data reference is in RHS of a stmt. */
164 /* True when the data reference is conditional within STMT,
165 i.e. if it might not occur even when the statement is executed
166 and runs to completion. */
167 bool is_conditional_in_stmt
;
169 /* Alias information for the data reference. */
170 struct dr_alias alias
;
172 /* Behavior of the memory reference in the innermost loop. */
173 struct innermost_loop_behavior innermost
;
175 /* Subscripts of this data reference. */
176 struct indices indices
;
178 /* Alternate subscripts initialized lazily and used by data-dependence
179 analysis only when the main indices of two DRs are not comparable.
180 Keep last to keep vec_info_shared::check_datarefs happy. */
181 struct indices alt_indices
;
184 #define DR_STMT(DR) (DR)->stmt
185 #define DR_REF(DR) (DR)->ref
186 #define DR_BASE_OBJECT(DR) (DR)->indices.base_object
187 #define DR_UNCONSTRAINED_BASE(DR) (DR)->indices.unconstrained_base
188 #define DR_ACCESS_FNS(DR) (DR)->indices.access_fns
189 #define DR_ACCESS_FN(DR, I) DR_ACCESS_FNS (DR)[I]
190 #define DR_NUM_DIMENSIONS(DR) DR_ACCESS_FNS (DR).length ()
191 #define DR_IS_READ(DR) (DR)->is_read
192 #define DR_IS_WRITE(DR) (!DR_IS_READ (DR))
193 #define DR_IS_CONDITIONAL_IN_STMT(DR) (DR)->is_conditional_in_stmt
194 #define DR_BASE_ADDRESS(DR) (DR)->innermost.base_address
195 #define DR_OFFSET(DR) (DR)->innermost.offset
196 #define DR_INIT(DR) (DR)->innermost.init
197 #define DR_STEP(DR) (DR)->innermost.step
198 #define DR_PTR_INFO(DR) (DR)->alias.ptr_info
199 #define DR_BASE_ALIGNMENT(DR) (DR)->innermost.base_alignment
200 #define DR_BASE_MISALIGNMENT(DR) (DR)->innermost.base_misalignment
201 #define DR_OFFSET_ALIGNMENT(DR) (DR)->innermost.offset_alignment
202 #define DR_STEP_ALIGNMENT(DR) (DR)->innermost.step_alignment
203 #define DR_INNERMOST(DR) (DR)->innermost
205 typedef struct data_reference
*data_reference_p
;
207 /* This struct is used to store the information of a data reference,
208 including the data ref itself and the segment length for aliasing
209 checks. This is used to merge alias checks. */
211 class dr_with_seg_len
214 dr_with_seg_len (data_reference_p d
, tree len
, unsigned HOST_WIDE_INT size
,
216 : dr (d
), seg_len (len
), access_size (size
), align (a
) {}
219 /* The offset of the last access that needs to be checked minus
220 the offset of the first. */
222 /* A value that, when added to abs (SEG_LEN), gives the total number of
223 bytes in the segment. */
224 poly_uint64 access_size
;
225 /* The minimum common alignment of DR's start address, SEG_LEN and
230 /* Flags that describe a potential alias between two dr_with_seg_lens.
231 In general, each pair of dr_with_seg_lens represents a composite of
232 multiple access pairs P, so testing flags like DR_IS_READ on the DRs
233 does not give meaningful information.
236 There is a pair in P for which the second reference is a read
237 and the first is a write.
240 There is a pair in P for which the second reference is a write
241 and the first is a read.
244 There is a pair in P for which both references are writes.
248 (a) it isn't possible to classify one pair in P as RAW, WAW or WAR; or
249 (b) there is a pair in P that breaks the ordering assumption below.
251 This flag overrides the RAW, WAR and WAW flags above.
255 Temporary flags that indicate whether there is a pair P whose
256 DRs have or haven't been swapped around.
258 DR_ALIAS_MIXED_STEPS:
259 The DR_STEP for one of the data references in the pair does not
260 accurately describe that reference for all members of P. (Note
261 that the flag does not say anything about whether the DR_STEPs
262 of the two references in the pair are the same.)
264 The ordering assumption mentioned above is that for every pair
267 (1) The original code accesses n elements for DR_A and n elements for DR_B,
268 interleaved as follows:
270 one access of size DR_A.access_size at DR_A.dr
271 one access of size DR_B.access_size at DR_B.dr
272 one access of size DR_A.access_size at DR_A.dr + STEP_A
273 one access of size DR_B.access_size at DR_B.dr + STEP_B
274 one access of size DR_A.access_size at DR_A.dr + STEP_A * 2
275 one access of size DR_B.access_size at DR_B.dr + STEP_B * 2
278 (2) The new code accesses the same data in exactly two chunks:
280 one group of accesses spanning |DR_A.seg_len| + DR_A.access_size
281 one group of accesses spanning |DR_B.seg_len| + DR_B.access_size
283 A pair might break this assumption if the DR_A and DR_B accesses
284 in the original or the new code are mingled in some way. For example,
285 if DR_A.access_size represents the effect of two individual writes
286 to nearby locations, the pair breaks the assumption if those writes
287 occur either side of the access for DR_B.
289 Note that DR_ALIAS_ARBITRARY describes whether the ordering assumption
290 fails to hold for any individual pair in P. If the assumption *does*
291 hold for every pair in P, it doesn't matter whether it holds for the
292 composite pair or not. In other words, P should represent the complete
293 set of pairs that the composite pair is testing, so only the ordering
294 of two accesses in the same member of P matters. */
295 const unsigned int DR_ALIAS_RAW
= 1U << 0;
296 const unsigned int DR_ALIAS_WAR
= 1U << 1;
297 const unsigned int DR_ALIAS_WAW
= 1U << 2;
298 const unsigned int DR_ALIAS_ARBITRARY
= 1U << 3;
299 const unsigned int DR_ALIAS_SWAPPED
= 1U << 4;
300 const unsigned int DR_ALIAS_UNSWAPPED
= 1U << 5;
301 const unsigned int DR_ALIAS_MIXED_STEPS
= 1U << 6;
303 /* This struct contains two dr_with_seg_len objects with aliasing data
304 refs. Two comparisons are generated from them. */
306 class dr_with_seg_len_pair_t
309 /* WELL_ORDERED indicates that the ordering assumption described above
310 DR_ALIAS_ARBITRARY holds. REORDERED indicates that it doesn't. */
311 enum sequencing
{ WELL_ORDERED
, REORDERED
};
313 dr_with_seg_len_pair_t (const dr_with_seg_len
&,
314 const dr_with_seg_len
&, sequencing
);
316 dr_with_seg_len first
;
317 dr_with_seg_len second
;
321 inline dr_with_seg_len_pair_t::
322 dr_with_seg_len_pair_t (const dr_with_seg_len
&d1
, const dr_with_seg_len
&d2
,
324 : first (d1
), second (d2
), flags (0)
326 if (DR_IS_READ (d1
.dr
) && DR_IS_WRITE (d2
.dr
))
327 flags
|= DR_ALIAS_WAR
;
328 else if (DR_IS_WRITE (d1
.dr
) && DR_IS_READ (d2
.dr
))
329 flags
|= DR_ALIAS_RAW
;
330 else if (DR_IS_WRITE (d1
.dr
) && DR_IS_WRITE (d2
.dr
))
331 flags
|= DR_ALIAS_WAW
;
334 if (seq
== REORDERED
)
335 flags
|= DR_ALIAS_ARBITRARY
;
338 enum data_dependence_direction
{
342 dir_positive_or_negative
,
343 dir_positive_or_equal
,
344 dir_negative_or_equal
,
349 /* The description of the grid of iterations that overlap. At most
350 two loops are considered at the same time just now, hence at most
351 two functions are needed. For each of the functions, we store
352 the vector of coefficients, f[0] + x * f[1] + y * f[2] + ...,
353 where x, y, ... are variables. */
357 /* Special values of N. */
358 #define NO_DEPENDENCE 0
359 #define NOT_KNOWN (MAX_DIM + 1)
360 #define CF_NONTRIVIAL_P(CF) ((CF)->n != NO_DEPENDENCE && (CF)->n != NOT_KNOWN)
361 #define CF_NOT_KNOWN_P(CF) ((CF)->n == NOT_KNOWN)
362 #define CF_NO_DEPENDENCE_P(CF) ((CF)->n == NO_DEPENDENCE)
364 typedef vec
<tree
> affine_fn
;
366 struct conflict_function
369 affine_fn fns
[MAX_DIM
];
372 /* What is a subscript? Given two array accesses a subscript is the
373 tuple composed of the access functions for a given dimension.
374 Example: Given A[f1][f2][f3] and B[g1][g2][g3], there are three
375 subscripts: (f1, g1), (f2, g2), (f3, g3). These three subscripts
376 are stored in the data_dependence_relation structure under the form
377 of an array of subscripts. */
381 /* The access functions of the two references. */
384 /* A description of the iterations for which the elements are
386 conflict_function
*conflicting_iterations_in_a
;
387 conflict_function
*conflicting_iterations_in_b
;
389 /* This field stores the information about the iteration domain
390 validity of the dependence relation. */
393 /* Distance from the iteration that access a conflicting element in
394 A to the iteration that access this same conflicting element in
395 B. The distance is a tree scalar expression, i.e. a constant or a
396 symbolic expression, but certainly not a chrec function. */
400 typedef struct subscript
*subscript_p
;
402 #define SUB_ACCESS_FN(SUB, I) (SUB)->access_fn[I]
403 #define SUB_CONFLICTS_IN_A(SUB) (SUB)->conflicting_iterations_in_a
404 #define SUB_CONFLICTS_IN_B(SUB) (SUB)->conflicting_iterations_in_b
405 #define SUB_LAST_CONFLICT(SUB) (SUB)->last_conflict
406 #define SUB_DISTANCE(SUB) (SUB)->distance
408 /* A data_dependence_relation represents a relation between two
409 data_references A and B. */
411 struct data_dependence_relation
414 struct data_reference
*a
;
415 struct data_reference
*b
;
417 /* A "yes/no/maybe" field for the dependence relation:
419 - when "ARE_DEPENDENT == NULL_TREE", there exist a dependence
420 relation between A and B, and the description of this relation
421 is given in the SUBSCRIPTS array,
423 - when "ARE_DEPENDENT == chrec_known", there is no dependence and
426 - when "ARE_DEPENDENT == chrec_dont_know", there may be a dependence,
427 but the analyzer cannot be more specific. */
430 /* If nonnull, COULD_BE_INDEPENDENT_P is true and the accesses are
431 independent when the runtime addresses of OBJECT_A and OBJECT_B
432 are different. The addresses of both objects are invariant in the
437 /* For each subscript in the dependence test, there is an element in
438 this array. This is the attribute that labels the edge A->B of
439 the data_dependence_relation. */
440 vec
<subscript_p
> subscripts
;
442 /* The analyzed loop nest. */
443 vec
<loop_p
> loop_nest
;
445 /* The classic direction vector. */
446 vec
<lambda_vector
> dir_vects
;
448 /* The classic distance vector. */
449 vec
<lambda_vector
> dist_vects
;
451 /* Is the dependence reversed with respect to the lexicographic order? */
454 /* When the dependence relation is affine, it can be represented by
455 a distance vector. */
458 /* Set to true when the dependence relation is on the same data
460 bool self_reference_p
;
462 /* True if the dependence described is conservatively correct rather
463 than exact, and if it is still possible for the accesses to be
464 conditionally independent. For example, the a and b references in:
467 for (int i = 0; i < n; ++i)
470 conservatively have a distance vector of (0), for the case in which
471 a == b, but the accesses are independent if a != b. Similarly,
472 the a and b references in:
475 for (int i = 0; i < n; ++i)
476 a[0].f[i] += b[i].f[i];
478 conservatively have a distance vector of (0), but they are indepenent
479 when a != b + i. In contrast, the references in:
482 for (int i = 0; i < n; ++i)
485 have the same distance vector of (0), but the accesses can never be
487 bool could_be_independent_p
;
490 typedef struct data_dependence_relation
*ddr_p
;
492 #define DDR_A(DDR) (DDR)->a
493 #define DDR_B(DDR) (DDR)->b
494 #define DDR_AFFINE_P(DDR) (DDR)->affine_p
495 #define DDR_ARE_DEPENDENT(DDR) (DDR)->are_dependent
496 #define DDR_OBJECT_A(DDR) (DDR)->object_a
497 #define DDR_OBJECT_B(DDR) (DDR)->object_b
498 #define DDR_SUBSCRIPTS(DDR) (DDR)->subscripts
499 #define DDR_SUBSCRIPT(DDR, I) DDR_SUBSCRIPTS (DDR)[I]
500 #define DDR_NUM_SUBSCRIPTS(DDR) DDR_SUBSCRIPTS (DDR).length ()
502 #define DDR_LOOP_NEST(DDR) (DDR)->loop_nest
503 /* The size of the direction/distance vectors: the number of loops in
505 #define DDR_NB_LOOPS(DDR) (DDR_LOOP_NEST (DDR).length ())
506 #define DDR_SELF_REFERENCE(DDR) (DDR)->self_reference_p
508 #define DDR_DIST_VECTS(DDR) ((DDR)->dist_vects)
509 #define DDR_DIR_VECTS(DDR) ((DDR)->dir_vects)
510 #define DDR_NUM_DIST_VECTS(DDR) \
511 (DDR_DIST_VECTS (DDR).length ())
512 #define DDR_NUM_DIR_VECTS(DDR) \
513 (DDR_DIR_VECTS (DDR).length ())
514 #define DDR_DIR_VECT(DDR, I) \
515 DDR_DIR_VECTS (DDR)[I]
516 #define DDR_DIST_VECT(DDR, I) \
517 DDR_DIST_VECTS (DDR)[I]
518 #define DDR_REVERSED_P(DDR) (DDR)->reversed_p
519 #define DDR_COULD_BE_INDEPENDENT_P(DDR) (DDR)->could_be_independent_p
522 opt_result
dr_analyze_innermost (innermost_loop_behavior
*, tree
,
523 class loop
*, const gimple
*);
524 extern bool compute_data_dependences_for_loop (class loop
*, bool,
526 vec
<data_reference_p
> *,
528 extern void debug_ddrs (vec
<ddr_p
> );
529 extern void dump_data_reference (FILE *, struct data_reference
*);
530 extern void debug (data_reference
&ref
);
531 extern void debug (data_reference
*ptr
);
532 extern void debug_data_reference (struct data_reference
*);
533 extern void debug_data_references (vec
<data_reference_p
> );
534 extern void debug (vec
<data_reference_p
> &ref
);
535 extern void debug (vec
<data_reference_p
> *ptr
);
536 extern void debug_data_dependence_relation (const data_dependence_relation
*);
537 extern void dump_data_dependence_relations (FILE *, const vec
<ddr_p
> &);
538 extern void debug (vec
<ddr_p
> &ref
);
539 extern void debug (vec
<ddr_p
> *ptr
);
540 extern void debug_data_dependence_relations (vec
<ddr_p
> );
541 extern void free_dependence_relation (struct data_dependence_relation
*);
542 extern void free_dependence_relations (vec
<ddr_p
>& );
543 extern void free_data_ref (data_reference_p
);
544 extern void free_data_refs (vec
<data_reference_p
>& );
545 extern opt_result
find_data_references_in_stmt (class loop
*, gimple
*,
546 vec
<data_reference_p
> *);
547 extern bool graphite_find_data_references_in_stmt (edge
, loop_p
, gimple
*,
548 vec
<data_reference_p
> *);
549 tree
find_data_references_in_loop (class loop
*, vec
<data_reference_p
> *);
550 bool loop_nest_has_data_refs (loop_p loop
);
551 struct data_reference
*create_data_ref (edge
, loop_p
, tree
, gimple
*, bool,
553 extern bool find_loop_nest (class loop
*, vec
<loop_p
> *);
554 extern struct data_dependence_relation
*initialize_data_dependence_relation
555 (struct data_reference
*, struct data_reference
*, vec
<loop_p
>);
556 extern void compute_affine_dependence (struct data_dependence_relation
*,
558 extern void compute_self_dependence (struct data_dependence_relation
*);
559 extern bool compute_all_dependences (const vec
<data_reference_p
> &,
561 const vec
<loop_p
> &, bool);
562 extern tree
find_data_references_in_bb (class loop
*, basic_block
,
563 vec
<data_reference_p
> *);
564 extern unsigned int dr_alignment (innermost_loop_behavior
*);
565 extern tree
get_base_for_alignment (tree
, unsigned int *);
567 /* Return the alignment in bytes that DR is guaranteed to have at all
571 dr_alignment (data_reference
*dr
)
573 return dr_alignment (&DR_INNERMOST (dr
));
576 extern bool dr_may_alias_p (const struct data_reference
*,
577 const struct data_reference
*, class loop
*);
578 extern bool dr_equal_offsets_p (struct data_reference
*,
579 struct data_reference
*);
581 extern opt_result
runtime_alias_check_p (ddr_p
, class loop
*, bool);
582 extern int data_ref_compare_tree (tree
, tree
);
583 extern void prune_runtime_alias_test_list (vec
<dr_with_seg_len_pair_t
> *,
585 extern void create_runtime_alias_checks (class loop
*,
586 const vec
<dr_with_seg_len_pair_t
> *,
588 extern tree
dr_direction_indicator (struct data_reference
*);
589 extern tree
dr_zero_step_indicator (struct data_reference
*);
590 extern bool dr_known_forward_stride_p (struct data_reference
*);
592 /* Return true when the base objects of data references A and B are
593 the same memory object. */
596 same_data_refs_base_objects (data_reference_p a
, data_reference_p b
)
598 return DR_NUM_DIMENSIONS (a
) == DR_NUM_DIMENSIONS (b
)
599 && operand_equal_p (DR_BASE_OBJECT (a
), DR_BASE_OBJECT (b
), 0);
602 /* Return true when the data references A and B are accessing the same
603 memory object with the same access functions. Optionally skip the
604 last OFFSET dimensions in the data reference. */
607 same_data_refs (data_reference_p a
, data_reference_p b
, int offset
= 0)
611 /* The references are exactly the same. */
612 if (operand_equal_p (DR_REF (a
), DR_REF (b
), 0))
615 if (!same_data_refs_base_objects (a
, b
))
618 for (i
= offset
; i
< DR_NUM_DIMENSIONS (a
); i
++)
619 if (!eq_evolutions_p (DR_ACCESS_FN (a
, i
), DR_ACCESS_FN (b
, i
)))
625 /* Returns true when all the dependences are computable. */
628 known_dependences_p (vec
<ddr_p
> dependence_relations
)
633 FOR_EACH_VEC_ELT (dependence_relations
, i
, ddr
)
634 if (DDR_ARE_DEPENDENT (ddr
) == chrec_dont_know
)
640 /* Returns the dependence level for a vector DIST of size LENGTH.
641 LEVEL = 0 means a lexicographic dependence, i.e. a dependence due
642 to the sequence of statements, not carried by any loop. */
645 dependence_level (lambda_vector dist_vect
, int length
)
649 for (i
= 0; i
< length
; i
++)
650 if (dist_vect
[i
] != 0)
656 /* Return the dependence level for the DDR relation. */
659 ddr_dependence_level (ddr_p ddr
)
664 if (DDR_DIST_VECTS (ddr
).exists ())
665 level
= dependence_level (DDR_DIST_VECT (ddr
, 0), DDR_NB_LOOPS (ddr
));
667 for (vector
= 1; vector
< DDR_NUM_DIST_VECTS (ddr
); vector
++)
668 level
= MIN (level
, dependence_level (DDR_DIST_VECT (ddr
, vector
),
669 DDR_NB_LOOPS (ddr
)));
673 /* Return the index of the variable VAR in the LOOP_NEST array. */
676 index_in_loop_nest (int var
, const vec
<loop_p
> &loop_nest
)
681 for (var_index
= 0; loop_nest
.iterate (var_index
, &loopi
); var_index
++)
682 if (loopi
->num
== var
)
688 /* Returns true when the data reference DR the form "A[i] = ..."
689 with a stride equal to its unit type size. */
692 adjacent_dr_p (struct data_reference
*dr
)
694 /* If this is a bitfield store bail out. */
695 if (TREE_CODE (DR_REF (dr
)) == COMPONENT_REF
696 && DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr
), 1)))
700 || TREE_CODE (DR_STEP (dr
)) != INTEGER_CST
)
703 return tree_int_cst_equal (fold_unary (ABS_EXPR
, TREE_TYPE (DR_STEP (dr
)),
705 TYPE_SIZE_UNIT (TREE_TYPE (DR_REF (dr
))));
708 void split_constant_offset (tree
, tree
*, tree
*);
710 /* Compute the greatest common divisor of a VECTOR of SIZE numbers. */
713 lambda_vector_gcd (lambda_vector vector
, int size
)
721 for (i
= 1; i
< size
; i
++)
722 gcd1
= gcd (gcd1
, vector
[i
]);
727 /* Allocate a new vector of given SIZE. */
730 lambda_vector_new (int size
)
732 /* ??? We shouldn't abuse the GC allocator here. */
733 return ggc_cleared_vec_alloc
<lambda_int
> (size
);
736 /* Clear out vector VEC1 of length SIZE. */
739 lambda_vector_clear (lambda_vector vec1
, int size
)
741 memset (vec1
, 0, size
* sizeof (*vec1
));
744 /* Returns true when the vector V is lexicographically positive, in
745 other words, when the first nonzero element is positive. */
748 lambda_vector_lexico_pos (lambda_vector v
,
752 for (i
= 0; i
< n
; i
++)
764 /* Return true if vector VEC1 of length SIZE is the zero vector. */
767 lambda_vector_zerop (lambda_vector vec1
, int size
)
770 for (i
= 0; i
< size
; i
++)
776 /* Allocate a matrix of M rows x N cols. */
779 lambda_matrix_new (int m
, int n
, struct obstack
*lambda_obstack
)
784 mat
= XOBNEWVEC (lambda_obstack
, lambda_vector
, m
);
786 for (i
= 0; i
< m
; i
++)
787 mat
[i
] = XOBNEWVEC (lambda_obstack
, lambda_int
, n
);
792 #endif /* GCC_TREE_DATA_REF_H */