1 /* Array translation routines
2 Copyright (C) 2002-2016 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* trans-array.c-- Various array related code, including scalarization,
23 allocation, initialization and other support routines. */
25 /* How the scalarizer works.
26 In gfortran, array expressions use the same core routines as scalar
28 First, a Scalarization State (SS) chain is built. This is done by walking
29 the expression tree, and building a linear list of the terms in the
30 expression. As the tree is walked, scalar subexpressions are translated.
32 The scalarization parameters are stored in a gfc_loopinfo structure.
33 First the start and stride of each term is calculated by
34 gfc_conv_ss_startstride. During this process the expressions for the array
35 descriptors and data pointers are also translated.
37 If the expression is an assignment, we must then resolve any dependencies.
38 In Fortran all the rhs values of an assignment must be evaluated before
39 any assignments take place. This can require a temporary array to store the
40 values. We also require a temporary when we are passing array expressions
41 or vector subscripts as procedure parameters.
43 Array sections are passed without copying to a temporary. These use the
44 scalarizer to determine the shape of the section. The flag
45 loop->array_parameter tells the scalarizer that the actual values and loop
46 variables will not be required.
48 The function gfc_conv_loop_setup generates the scalarization setup code.
49 It determines the range of the scalarizing loop variables. If a temporary
50 is required, this is created and initialized. Code for scalar expressions
51 taken outside the loop is also generated at this time. Next the offset and
52 scaling required to translate from loop variables to array indices for each
55 A call to gfc_start_scalarized_body marks the start of the scalarized
56 expression. This creates a scope and declares the loop variables. Before
57 calling this gfc_make_ss_chain_used must be used to indicate which terms
58 will be used inside this loop.
60 The scalar gfc_conv_* functions are then used to build the main body of the
61 scalarization loop. Scalarization loop variables and precalculated scalar
62 values are automatically substituted. Note that gfc_advance_se_ss_chain
63 must be used, rather than changing the se->ss directly.
65 For assignment expressions requiring a temporary two sub loops are
66 generated. The first stores the result of the expression in the temporary,
67 the second copies it to the result. A call to
68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
69 the start of the copying loop. The temporary may be less than full rank.
71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
72 loops. The loops are added to the pre chain of the loopinfo. The post
73 chain may still contain cleanup code.
75 After the loop code has been added into its parent scope gfc_cleanup_loop
76 is called to free all the SS allocated by the scalarizer. */
80 #include "coretypes.h"
84 #include "gimple-expr.h"
86 #include "fold-const.h"
87 #include "constructor.h"
88 #include "trans-types.h"
89 #include "trans-array.h"
90 #include "trans-const.h"
91 #include "dependency.h"
93 static bool gfc_get_array_constructor_size (mpz_t
*, gfc_constructor_base
);
95 /* The contents of this structure aren't actually used, just the address. */
96 static gfc_ss gfc_ss_terminator_var
;
97 gfc_ss
* const gfc_ss_terminator
= &gfc_ss_terminator_var
;
101 gfc_array_dataptr_type (tree desc
)
103 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc
)));
107 /* Build expressions to access the members of an array descriptor.
108 It's surprisingly easy to mess up here, so never access
109 an array descriptor by "brute force", always use these
110 functions. This also avoids problems if we change the format
111 of an array descriptor.
113 To understand these magic numbers, look at the comments
114 before gfc_build_array_type() in trans-types.c.
116 The code within these defines should be the only code which knows the format
117 of an array descriptor.
119 Any code just needing to read obtain the bounds of an array should use
120 gfc_conv_array_* rather than the following functions as these will return
121 know constant values, and work with arrays which do not have descriptors.
123 Don't forget to #undef these! */
126 #define OFFSET_FIELD 1
127 #define DTYPE_FIELD 2
128 #define DIMENSION_FIELD 3
129 #define CAF_TOKEN_FIELD 4
131 #define STRIDE_SUBFIELD 0
132 #define LBOUND_SUBFIELD 1
133 #define UBOUND_SUBFIELD 2
135 /* This provides READ-ONLY access to the data field. The field itself
136 doesn't have the proper type. */
139 gfc_conv_descriptor_data_get (tree desc
)
143 type
= TREE_TYPE (desc
);
144 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
146 field
= TYPE_FIELDS (type
);
147 gcc_assert (DATA_FIELD
== 0);
149 t
= fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
), desc
,
151 t
= fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type
), t
);
156 /* This provides WRITE access to the data field.
158 TUPLES_P is true if we are generating tuples.
160 This function gets called through the following macros:
161 gfc_conv_descriptor_data_set
162 gfc_conv_descriptor_data_set. */
165 gfc_conv_descriptor_data_set (stmtblock_t
*block
, tree desc
, tree value
)
169 type
= TREE_TYPE (desc
);
170 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
172 field
= TYPE_FIELDS (type
);
173 gcc_assert (DATA_FIELD
== 0);
175 t
= fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
), desc
,
177 gfc_add_modify (block
, t
, fold_convert (TREE_TYPE (field
), value
));
181 /* This provides address access to the data field. This should only be
182 used by array allocation, passing this on to the runtime. */
185 gfc_conv_descriptor_data_addr (tree desc
)
189 type
= TREE_TYPE (desc
);
190 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
192 field
= TYPE_FIELDS (type
);
193 gcc_assert (DATA_FIELD
== 0);
195 t
= fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
), desc
,
197 return gfc_build_addr_expr (NULL_TREE
, t
);
201 gfc_conv_descriptor_offset (tree desc
)
206 type
= TREE_TYPE (desc
);
207 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
209 field
= gfc_advance_chain (TYPE_FIELDS (type
), OFFSET_FIELD
);
210 gcc_assert (field
!= NULL_TREE
&& TREE_TYPE (field
) == gfc_array_index_type
);
212 return fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
213 desc
, field
, NULL_TREE
);
217 gfc_conv_descriptor_offset_get (tree desc
)
219 return gfc_conv_descriptor_offset (desc
);
223 gfc_conv_descriptor_offset_set (stmtblock_t
*block
, tree desc
,
226 tree t
= gfc_conv_descriptor_offset (desc
);
227 gfc_add_modify (block
, t
, fold_convert (TREE_TYPE (t
), value
));
232 gfc_conv_descriptor_dtype (tree desc
)
237 type
= TREE_TYPE (desc
);
238 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
240 field
= gfc_advance_chain (TYPE_FIELDS (type
), DTYPE_FIELD
);
241 gcc_assert (field
!= NULL_TREE
&& TREE_TYPE (field
) == gfc_array_index_type
);
243 return fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
244 desc
, field
, NULL_TREE
);
249 gfc_conv_descriptor_rank (tree desc
)
254 dtype
= gfc_conv_descriptor_dtype (desc
);
255 tmp
= build_int_cst (TREE_TYPE (dtype
), GFC_DTYPE_RANK_MASK
);
256 tmp
= fold_build2_loc (input_location
, BIT_AND_EXPR
, TREE_TYPE (dtype
),
258 return fold_convert (gfc_get_int_type (gfc_default_integer_kind
), tmp
);
263 gfc_get_descriptor_dimension (tree desc
)
267 type
= TREE_TYPE (desc
);
268 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
270 field
= gfc_advance_chain (TYPE_FIELDS (type
), DIMENSION_FIELD
);
271 gcc_assert (field
!= NULL_TREE
272 && TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
273 && TREE_CODE (TREE_TYPE (TREE_TYPE (field
))) == RECORD_TYPE
);
275 return fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
276 desc
, field
, NULL_TREE
);
281 gfc_conv_descriptor_dimension (tree desc
, tree dim
)
285 tmp
= gfc_get_descriptor_dimension (desc
);
287 return gfc_build_array_ref (tmp
, dim
, NULL
);
292 gfc_conv_descriptor_token (tree desc
)
297 type
= TREE_TYPE (desc
);
298 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
299 gcc_assert (flag_coarray
== GFC_FCOARRAY_LIB
);
300 field
= gfc_advance_chain (TYPE_FIELDS (type
), CAF_TOKEN_FIELD
);
302 /* Should be a restricted pointer - except in the finalization wrapper. */
303 gcc_assert (field
!= NULL_TREE
304 && (TREE_TYPE (field
) == prvoid_type_node
305 || TREE_TYPE (field
) == pvoid_type_node
));
307 return fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
308 desc
, field
, NULL_TREE
);
313 gfc_conv_descriptor_stride (tree desc
, tree dim
)
318 tmp
= gfc_conv_descriptor_dimension (desc
, dim
);
319 field
= TYPE_FIELDS (TREE_TYPE (tmp
));
320 field
= gfc_advance_chain (field
, STRIDE_SUBFIELD
);
321 gcc_assert (field
!= NULL_TREE
&& TREE_TYPE (field
) == gfc_array_index_type
);
323 tmp
= fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
324 tmp
, field
, NULL_TREE
);
329 gfc_conv_descriptor_stride_get (tree desc
, tree dim
)
331 tree type
= TREE_TYPE (desc
);
332 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
333 if (integer_zerop (dim
)
334 && (GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ALLOCATABLE
335 ||GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ASSUMED_SHAPE_CONT
336 ||GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_ASSUMED_RANK_CONT
337 ||GFC_TYPE_ARRAY_AKIND (type
) == GFC_ARRAY_POINTER_CONT
))
338 return gfc_index_one_node
;
340 return gfc_conv_descriptor_stride (desc
, dim
);
344 gfc_conv_descriptor_stride_set (stmtblock_t
*block
, tree desc
,
345 tree dim
, tree value
)
347 tree t
= gfc_conv_descriptor_stride (desc
, dim
);
348 gfc_add_modify (block
, t
, fold_convert (TREE_TYPE (t
), value
));
352 gfc_conv_descriptor_lbound (tree desc
, tree dim
)
357 tmp
= gfc_conv_descriptor_dimension (desc
, dim
);
358 field
= TYPE_FIELDS (TREE_TYPE (tmp
));
359 field
= gfc_advance_chain (field
, LBOUND_SUBFIELD
);
360 gcc_assert (field
!= NULL_TREE
&& TREE_TYPE (field
) == gfc_array_index_type
);
362 tmp
= fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
363 tmp
, field
, NULL_TREE
);
368 gfc_conv_descriptor_lbound_get (tree desc
, tree dim
)
370 return gfc_conv_descriptor_lbound (desc
, dim
);
374 gfc_conv_descriptor_lbound_set (stmtblock_t
*block
, tree desc
,
375 tree dim
, tree value
)
377 tree t
= gfc_conv_descriptor_lbound (desc
, dim
);
378 gfc_add_modify (block
, t
, fold_convert (TREE_TYPE (t
), value
));
382 gfc_conv_descriptor_ubound (tree desc
, tree dim
)
387 tmp
= gfc_conv_descriptor_dimension (desc
, dim
);
388 field
= TYPE_FIELDS (TREE_TYPE (tmp
));
389 field
= gfc_advance_chain (field
, UBOUND_SUBFIELD
);
390 gcc_assert (field
!= NULL_TREE
&& TREE_TYPE (field
) == gfc_array_index_type
);
392 tmp
= fold_build3_loc (input_location
, COMPONENT_REF
, TREE_TYPE (field
),
393 tmp
, field
, NULL_TREE
);
398 gfc_conv_descriptor_ubound_get (tree desc
, tree dim
)
400 return gfc_conv_descriptor_ubound (desc
, dim
);
404 gfc_conv_descriptor_ubound_set (stmtblock_t
*block
, tree desc
,
405 tree dim
, tree value
)
407 tree t
= gfc_conv_descriptor_ubound (desc
, dim
);
408 gfc_add_modify (block
, t
, fold_convert (TREE_TYPE (t
), value
));
411 /* Build a null array descriptor constructor. */
414 gfc_build_null_descriptor (tree type
)
419 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type
));
420 gcc_assert (DATA_FIELD
== 0);
421 field
= TYPE_FIELDS (type
);
423 /* Set a NULL data pointer. */
424 tmp
= build_constructor_single (type
, field
, null_pointer_node
);
425 TREE_CONSTANT (tmp
) = 1;
426 /* All other fields are ignored. */
432 /* Modify a descriptor such that the lbound of a given dimension is the value
433 specified. This also updates ubound and offset accordingly. */
436 gfc_conv_shift_descriptor_lbound (stmtblock_t
* block
, tree desc
,
437 int dim
, tree new_lbound
)
439 tree offs
, ubound
, lbound
, stride
;
440 tree diff
, offs_diff
;
442 new_lbound
= fold_convert (gfc_array_index_type
, new_lbound
);
444 offs
= gfc_conv_descriptor_offset_get (desc
);
445 lbound
= gfc_conv_descriptor_lbound_get (desc
, gfc_rank_cst
[dim
]);
446 ubound
= gfc_conv_descriptor_ubound_get (desc
, gfc_rank_cst
[dim
]);
447 stride
= gfc_conv_descriptor_stride_get (desc
, gfc_rank_cst
[dim
]);
449 /* Get difference (new - old) by which to shift stuff. */
450 diff
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
453 /* Shift ubound and offset accordingly. This has to be done before
454 updating the lbound, as they depend on the lbound expression! */
455 ubound
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
457 gfc_conv_descriptor_ubound_set (block
, desc
, gfc_rank_cst
[dim
], ubound
);
458 offs_diff
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
460 offs
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
462 gfc_conv_descriptor_offset_set (block
, desc
, offs
);
464 /* Finally set lbound to value we want. */
465 gfc_conv_descriptor_lbound_set (block
, desc
, gfc_rank_cst
[dim
], new_lbound
);
469 /* Cleanup those #defines. */
474 #undef DIMENSION_FIELD
475 #undef CAF_TOKEN_FIELD
476 #undef STRIDE_SUBFIELD
477 #undef LBOUND_SUBFIELD
478 #undef UBOUND_SUBFIELD
481 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
482 flags & 1 = Main loop body.
483 flags & 2 = temp copy loop. */
486 gfc_mark_ss_chain_used (gfc_ss
* ss
, unsigned flags
)
488 for (; ss
!= gfc_ss_terminator
; ss
= ss
->next
)
489 ss
->info
->useflags
= flags
;
493 /* Free a gfc_ss chain. */
496 gfc_free_ss_chain (gfc_ss
* ss
)
500 while (ss
!= gfc_ss_terminator
)
502 gcc_assert (ss
!= NULL
);
511 free_ss_info (gfc_ss_info
*ss_info
)
516 if (ss_info
->refcount
> 0)
519 gcc_assert (ss_info
->refcount
== 0);
521 switch (ss_info
->type
)
524 for (n
= 0; n
< GFC_MAX_DIMENSIONS
; n
++)
525 if (ss_info
->data
.array
.subscript
[n
])
526 gfc_free_ss_chain (ss_info
->data
.array
.subscript
[n
]);
540 gfc_free_ss (gfc_ss
* ss
)
542 free_ss_info (ss
->info
);
547 /* Creates and initializes an array type gfc_ss struct. */
550 gfc_get_array_ss (gfc_ss
*next
, gfc_expr
*expr
, int dimen
, gfc_ss_type type
)
553 gfc_ss_info
*ss_info
;
556 ss_info
= gfc_get_ss_info ();
558 ss_info
->type
= type
;
559 ss_info
->expr
= expr
;
565 for (i
= 0; i
< ss
->dimen
; i
++)
572 /* Creates and initializes a temporary type gfc_ss struct. */
575 gfc_get_temp_ss (tree type
, tree string_length
, int dimen
)
578 gfc_ss_info
*ss_info
;
581 ss_info
= gfc_get_ss_info ();
583 ss_info
->type
= GFC_SS_TEMP
;
584 ss_info
->string_length
= string_length
;
585 ss_info
->data
.temp
.type
= type
;
589 ss
->next
= gfc_ss_terminator
;
591 for (i
= 0; i
< ss
->dimen
; i
++)
598 /* Creates and initializes a scalar type gfc_ss struct. */
601 gfc_get_scalar_ss (gfc_ss
*next
, gfc_expr
*expr
)
604 gfc_ss_info
*ss_info
;
606 ss_info
= gfc_get_ss_info ();
608 ss_info
->type
= GFC_SS_SCALAR
;
609 ss_info
->expr
= expr
;
619 /* Free all the SS associated with a loop. */
622 gfc_cleanup_loop (gfc_loopinfo
* loop
)
624 gfc_loopinfo
*loop_next
, **ploop
;
629 while (ss
!= gfc_ss_terminator
)
631 gcc_assert (ss
!= NULL
);
632 next
= ss
->loop_chain
;
637 /* Remove reference to self in the parent loop. */
639 for (ploop
= &loop
->parent
->nested
; *ploop
; ploop
= &(*ploop
)->next
)
646 /* Free non-freed nested loops. */
647 for (loop
= loop
->nested
; loop
; loop
= loop_next
)
649 loop_next
= loop
->next
;
650 gfc_cleanup_loop (loop
);
657 set_ss_loop (gfc_ss
*ss
, gfc_loopinfo
*loop
)
661 for (; ss
!= gfc_ss_terminator
; ss
= ss
->next
)
665 if (ss
->info
->type
== GFC_SS_SCALAR
666 || ss
->info
->type
== GFC_SS_REFERENCE
667 || ss
->info
->type
== GFC_SS_TEMP
)
670 for (n
= 0; n
< GFC_MAX_DIMENSIONS
; n
++)
671 if (ss
->info
->data
.array
.subscript
[n
] != NULL
)
672 set_ss_loop (ss
->info
->data
.array
.subscript
[n
], loop
);
677 /* Associate a SS chain with a loop. */
680 gfc_add_ss_to_loop (gfc_loopinfo
* loop
, gfc_ss
* head
)
683 gfc_loopinfo
*nested_loop
;
685 if (head
== gfc_ss_terminator
)
688 set_ss_loop (head
, loop
);
691 for (; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
695 nested_loop
= ss
->nested_ss
->loop
;
697 /* More than one ss can belong to the same loop. Hence, we add the
698 loop to the chain only if it is different from the previously
699 added one, to avoid duplicate nested loops. */
700 if (nested_loop
!= loop
->nested
)
702 gcc_assert (nested_loop
->parent
== NULL
);
703 nested_loop
->parent
= loop
;
705 gcc_assert (nested_loop
->next
== NULL
);
706 nested_loop
->next
= loop
->nested
;
707 loop
->nested
= nested_loop
;
710 gcc_assert (nested_loop
->parent
== loop
);
713 if (ss
->next
== gfc_ss_terminator
)
714 ss
->loop_chain
= loop
->ss
;
716 ss
->loop_chain
= ss
->next
;
718 gcc_assert (ss
== gfc_ss_terminator
);
723 /* Generate an initializer for a static pointer or allocatable array. */
726 gfc_trans_static_array_pointer (gfc_symbol
* sym
)
730 gcc_assert (TREE_STATIC (sym
->backend_decl
));
731 /* Just zero the data member. */
732 type
= TREE_TYPE (sym
->backend_decl
);
733 DECL_INITIAL (sym
->backend_decl
) = gfc_build_null_descriptor (type
);
737 /* If the bounds of SE's loop have not yet been set, see if they can be
738 determined from array spec AS, which is the array spec of a called
739 function. MAPPING maps the callee's dummy arguments to the values
740 that the caller is passing. Add any initialization and finalization
744 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping
* mapping
,
745 gfc_se
* se
, gfc_array_spec
* as
)
747 int n
, dim
, total_dim
;
756 if (!as
|| as
->type
!= AS_EXPLICIT
)
759 for (ss
= se
->ss
; ss
; ss
= ss
->parent
)
761 total_dim
+= ss
->loop
->dimen
;
762 for (n
= 0; n
< ss
->loop
->dimen
; n
++)
764 /* The bound is known, nothing to do. */
765 if (ss
->loop
->to
[n
] != NULL_TREE
)
769 gcc_assert (dim
< as
->rank
);
770 gcc_assert (ss
->loop
->dimen
<= as
->rank
);
772 /* Evaluate the lower bound. */
773 gfc_init_se (&tmpse
, NULL
);
774 gfc_apply_interface_mapping (mapping
, &tmpse
, as
->lower
[dim
]);
775 gfc_add_block_to_block (&se
->pre
, &tmpse
.pre
);
776 gfc_add_block_to_block (&se
->post
, &tmpse
.post
);
777 lower
= fold_convert (gfc_array_index_type
, tmpse
.expr
);
779 /* ...and the upper bound. */
780 gfc_init_se (&tmpse
, NULL
);
781 gfc_apply_interface_mapping (mapping
, &tmpse
, as
->upper
[dim
]);
782 gfc_add_block_to_block (&se
->pre
, &tmpse
.pre
);
783 gfc_add_block_to_block (&se
->post
, &tmpse
.post
);
784 upper
= fold_convert (gfc_array_index_type
, tmpse
.expr
);
786 /* Set the upper bound of the loop to UPPER - LOWER. */
787 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
788 gfc_array_index_type
, upper
, lower
);
789 tmp
= gfc_evaluate_now (tmp
, &se
->pre
);
790 ss
->loop
->to
[n
] = tmp
;
794 gcc_assert (total_dim
== as
->rank
);
798 /* Generate code to allocate an array temporary, or create a variable to
799 hold the data. If size is NULL, zero the descriptor so that the
800 callee will allocate the array. If DEALLOC is true, also generate code to
801 free the array afterwards.
803 If INITIAL is not NULL, it is packed using internal_pack and the result used
804 as data instead of allocating a fresh, unitialized area of memory.
806 Initialization code is added to PRE and finalization code to POST.
807 DYNAMIC is true if the caller may want to extend the array later
808 using realloc. This prevents us from putting the array on the stack. */
811 gfc_trans_allocate_array_storage (stmtblock_t
* pre
, stmtblock_t
* post
,
812 gfc_array_info
* info
, tree size
, tree nelem
,
813 tree initial
, bool dynamic
, bool dealloc
)
819 desc
= info
->descriptor
;
820 info
->offset
= gfc_index_zero_node
;
821 if (size
== NULL_TREE
|| integer_zerop (size
))
823 /* A callee allocated array. */
824 gfc_conv_descriptor_data_set (pre
, desc
, null_pointer_node
);
829 /* Allocate the temporary. */
830 onstack
= !dynamic
&& initial
== NULL_TREE
831 && (flag_stack_arrays
832 || gfc_can_put_var_on_stack (size
));
836 /* Make a temporary variable to hold the data. */
837 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (nelem
),
838 nelem
, gfc_index_one_node
);
839 tmp
= gfc_evaluate_now (tmp
, pre
);
840 tmp
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
,
842 tmp
= build_array_type (gfc_get_element_type (TREE_TYPE (desc
)),
844 tmp
= gfc_create_var (tmp
, "A");
845 /* If we're here only because of -fstack-arrays we have to
846 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
847 if (!gfc_can_put_var_on_stack (size
))
848 gfc_add_expr_to_block (pre
,
849 fold_build1_loc (input_location
,
850 DECL_EXPR
, TREE_TYPE (tmp
),
852 tmp
= gfc_build_addr_expr (NULL_TREE
, tmp
);
853 gfc_conv_descriptor_data_set (pre
, desc
, tmp
);
857 /* Allocate memory to hold the data or call internal_pack. */
858 if (initial
== NULL_TREE
)
860 tmp
= gfc_call_malloc (pre
, NULL
, size
);
861 tmp
= gfc_evaluate_now (tmp
, pre
);
868 stmtblock_t do_copying
;
870 tmp
= TREE_TYPE (initial
); /* Pointer to descriptor. */
871 gcc_assert (TREE_CODE (tmp
) == POINTER_TYPE
);
872 tmp
= TREE_TYPE (tmp
); /* The descriptor itself. */
873 tmp
= gfc_get_element_type (tmp
);
874 gcc_assert (tmp
== gfc_get_element_type (TREE_TYPE (desc
)));
875 packed
= gfc_create_var (build_pointer_type (tmp
), "data");
877 tmp
= build_call_expr_loc (input_location
,
878 gfor_fndecl_in_pack
, 1, initial
);
879 tmp
= fold_convert (TREE_TYPE (packed
), tmp
);
880 gfc_add_modify (pre
, packed
, tmp
);
882 tmp
= build_fold_indirect_ref_loc (input_location
,
884 source_data
= gfc_conv_descriptor_data_get (tmp
);
886 /* internal_pack may return source->data without any allocation
887 or copying if it is already packed. If that's the case, we
888 need to allocate and copy manually. */
890 gfc_start_block (&do_copying
);
891 tmp
= gfc_call_malloc (&do_copying
, NULL
, size
);
892 tmp
= fold_convert (TREE_TYPE (packed
), tmp
);
893 gfc_add_modify (&do_copying
, packed
, tmp
);
894 tmp
= gfc_build_memcpy_call (packed
, source_data
, size
);
895 gfc_add_expr_to_block (&do_copying
, tmp
);
897 was_packed
= fold_build2_loc (input_location
, EQ_EXPR
,
898 boolean_type_node
, packed
,
900 tmp
= gfc_finish_block (&do_copying
);
901 tmp
= build3_v (COND_EXPR
, was_packed
, tmp
,
902 build_empty_stmt (input_location
));
903 gfc_add_expr_to_block (pre
, tmp
);
905 tmp
= fold_convert (pvoid_type_node
, packed
);
908 gfc_conv_descriptor_data_set (pre
, desc
, tmp
);
911 info
->data
= gfc_conv_descriptor_data_get (desc
);
913 /* The offset is zero because we create temporaries with a zero
915 gfc_conv_descriptor_offset_set (pre
, desc
, gfc_index_zero_node
);
917 if (dealloc
&& !onstack
)
919 /* Free the temporary. */
920 tmp
= gfc_conv_descriptor_data_get (desc
);
921 tmp
= gfc_call_free (tmp
);
922 gfc_add_expr_to_block (post
, tmp
);
927 /* Get the scalarizer array dimension corresponding to actual array dimension
930 For example, if SS represents the array ref a(1,:,:,1), it is a
931 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
932 and 1 for ARRAY_DIM=2.
933 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
934 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
936 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
937 array. If called on the inner ss, the result would be respectively 0,1,2 for
938 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
939 for ARRAY_DIM=1,2. */
942 get_scalarizer_dim_for_array_dim (gfc_ss
*ss
, int array_dim
)
949 for (; ss
; ss
= ss
->parent
)
950 for (n
= 0; n
< ss
->dimen
; n
++)
951 if (ss
->dim
[n
] < array_dim
)
954 return array_ref_dim
;
959 innermost_ss (gfc_ss
*ss
)
961 while (ss
->nested_ss
!= NULL
)
969 /* Get the array reference dimension corresponding to the given loop dimension.
970 It is different from the true array dimension given by the dim array in
971 the case of a partial array reference (i.e. a(:,:,1,:) for example)
972 It is different from the loop dimension in the case of a transposed array.
976 get_array_ref_dim_for_loop_dim (gfc_ss
*ss
, int loop_dim
)
978 return get_scalarizer_dim_for_array_dim (innermost_ss (ss
),
983 /* Generate code to create and initialize the descriptor for a temporary
984 array. This is used for both temporaries needed by the scalarizer, and
985 functions returning arrays. Adjusts the loop variables to be
986 zero-based, and calculates the loop bounds for callee allocated arrays.
987 Allocate the array unless it's callee allocated (we have a callee
988 allocated array if 'callee_alloc' is true, or if loop->to[n] is
989 NULL_TREE for any n). Also fills in the descriptor, data and offset
990 fields of info if known. Returns the size of the array, or NULL for a
991 callee allocated array.
993 'eltype' == NULL signals that the temporary should be a class object.
994 The 'initial' expression is used to obtain the size of the dynamic
995 type; otherwise the allocation and initialization proceeds as for any
998 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
999 gfc_trans_allocate_array_storage. */
1002 gfc_trans_create_temp_array (stmtblock_t
* pre
, stmtblock_t
* post
, gfc_ss
* ss
,
1003 tree eltype
, tree initial
, bool dynamic
,
1004 bool dealloc
, bool callee_alloc
, locus
* where
)
1008 gfc_array_info
*info
;
1009 tree from
[GFC_MAX_DIMENSIONS
], to
[GFC_MAX_DIMENSIONS
];
1017 tree class_expr
= NULL_TREE
;
1018 int n
, dim
, tmp_dim
;
1021 /* This signals a class array for which we need the size of the
1022 dynamic type. Generate an eltype and then the class expression. */
1023 if (eltype
== NULL_TREE
&& initial
)
1025 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial
)));
1026 class_expr
= build_fold_indirect_ref_loc (input_location
, initial
);
1027 eltype
= TREE_TYPE (class_expr
);
1028 eltype
= gfc_get_element_type (eltype
);
1029 /* Obtain the structure (class) expression. */
1030 class_expr
= TREE_OPERAND (class_expr
, 0);
1031 gcc_assert (class_expr
);
1034 memset (from
, 0, sizeof (from
));
1035 memset (to
, 0, sizeof (to
));
1037 info
= &ss
->info
->data
.array
;
1039 gcc_assert (ss
->dimen
> 0);
1040 gcc_assert (ss
->loop
->dimen
== ss
->dimen
);
1042 if (warn_array_temporaries
&& where
)
1043 gfc_warning (OPT_Warray_temporaries
,
1044 "Creating array temporary at %L", where
);
1046 /* Set the lower bound to zero. */
1047 for (s
= ss
; s
; s
= s
->parent
)
1051 total_dim
+= loop
->dimen
;
1052 for (n
= 0; n
< loop
->dimen
; n
++)
1056 /* Callee allocated arrays may not have a known bound yet. */
1058 loop
->to
[n
] = gfc_evaluate_now (
1059 fold_build2_loc (input_location
, MINUS_EXPR
,
1060 gfc_array_index_type
,
1061 loop
->to
[n
], loop
->from
[n
]),
1063 loop
->from
[n
] = gfc_index_zero_node
;
1065 /* We have just changed the loop bounds, we must clear the
1066 corresponding specloop, so that delta calculation is not skipped
1067 later in gfc_set_delta. */
1068 loop
->specloop
[n
] = NULL
;
1070 /* We are constructing the temporary's descriptor based on the loop
1071 dimensions. As the dimensions may be accessed in arbitrary order
1072 (think of transpose) the size taken from the n'th loop may not map
1073 to the n'th dimension of the array. We need to reconstruct loop
1074 infos in the right order before using it to set the descriptor
1076 tmp_dim
= get_scalarizer_dim_for_array_dim (ss
, dim
);
1077 from
[tmp_dim
] = loop
->from
[n
];
1078 to
[tmp_dim
] = loop
->to
[n
];
1080 info
->delta
[dim
] = gfc_index_zero_node
;
1081 info
->start
[dim
] = gfc_index_zero_node
;
1082 info
->end
[dim
] = gfc_index_zero_node
;
1083 info
->stride
[dim
] = gfc_index_one_node
;
1087 /* Initialize the descriptor. */
1089 gfc_get_array_type_bounds (eltype
, total_dim
, 0, from
, to
, 1,
1090 GFC_ARRAY_UNKNOWN
, true);
1091 desc
= gfc_create_var (type
, "atmp");
1092 GFC_DECL_PACKED_ARRAY (desc
) = 1;
1094 info
->descriptor
= desc
;
1095 size
= gfc_index_one_node
;
1097 /* Emit a DECL_EXPR for the variable sized array type in
1098 GFC_TYPE_ARRAY_DATAPTR_TYPE so the gimplification of its type
1099 sizes works correctly. */
1100 tree arraytype
= TREE_TYPE (GFC_TYPE_ARRAY_DATAPTR_TYPE (type
));
1101 if (! TYPE_NAME (arraytype
))
1102 TYPE_NAME (arraytype
) = build_decl (UNKNOWN_LOCATION
, TYPE_DECL
,
1103 NULL_TREE
, arraytype
);
1104 gfc_add_expr_to_block (pre
, build1 (DECL_EXPR
,
1105 arraytype
, TYPE_NAME (arraytype
)));
1107 /* Fill in the array dtype. */
1108 tmp
= gfc_conv_descriptor_dtype (desc
);
1109 gfc_add_modify (pre
, tmp
, gfc_get_dtype (TREE_TYPE (desc
)));
1112 Fill in the bounds and stride. This is a packed array, so:
1115 for (n = 0; n < rank; n++)
1118 delta = ubound[n] + 1 - lbound[n];
1119 size = size * delta;
1121 size = size * sizeof(element);
1124 or_expr
= NULL_TREE
;
1126 /* If there is at least one null loop->to[n], it is a callee allocated
1128 for (n
= 0; n
< total_dim
; n
++)
1129 if (to
[n
] == NULL_TREE
)
1135 if (size
== NULL_TREE
)
1136 for (s
= ss
; s
; s
= s
->parent
)
1137 for (n
= 0; n
< s
->loop
->dimen
; n
++)
1139 dim
= get_scalarizer_dim_for_array_dim (ss
, s
->dim
[n
]);
1141 /* For a callee allocated array express the loop bounds in terms
1142 of the descriptor fields. */
1143 tmp
= fold_build2_loc (input_location
,
1144 MINUS_EXPR
, gfc_array_index_type
,
1145 gfc_conv_descriptor_ubound_get (desc
, gfc_rank_cst
[dim
]),
1146 gfc_conv_descriptor_lbound_get (desc
, gfc_rank_cst
[dim
]));
1147 s
->loop
->to
[n
] = tmp
;
1151 for (n
= 0; n
< total_dim
; n
++)
1153 /* Store the stride and bound components in the descriptor. */
1154 gfc_conv_descriptor_stride_set (pre
, desc
, gfc_rank_cst
[n
], size
);
1156 gfc_conv_descriptor_lbound_set (pre
, desc
, gfc_rank_cst
[n
],
1157 gfc_index_zero_node
);
1159 gfc_conv_descriptor_ubound_set (pre
, desc
, gfc_rank_cst
[n
], to
[n
]);
1161 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
1162 gfc_array_index_type
,
1163 to
[n
], gfc_index_one_node
);
1165 /* Check whether the size for this dimension is negative. */
1166 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1167 tmp
, gfc_index_zero_node
);
1168 cond
= gfc_evaluate_now (cond
, pre
);
1173 or_expr
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
1174 boolean_type_node
, or_expr
, cond
);
1176 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1177 gfc_array_index_type
, size
, tmp
);
1178 size
= gfc_evaluate_now (size
, pre
);
1182 /* Get the size of the array. */
1183 if (size
&& !callee_alloc
)
1186 /* If or_expr is true, then the extent in at least one
1187 dimension is zero and the size is set to zero. */
1188 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
1189 or_expr
, gfc_index_zero_node
, size
);
1192 if (class_expr
== NULL_TREE
)
1193 elemsize
= fold_convert (gfc_array_index_type
,
1194 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
1196 elemsize
= gfc_class_vtab_size_get (class_expr
);
1198 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
1207 gfc_trans_allocate_array_storage (pre
, post
, info
, size
, nelem
, initial
,
1213 if (ss
->dimen
> ss
->loop
->temp_dim
)
1214 ss
->loop
->temp_dim
= ss
->dimen
;
1220 /* Return the number of iterations in a loop that starts at START,
1221 ends at END, and has step STEP. */
1224 gfc_get_iteration_count (tree start
, tree end
, tree step
)
1229 type
= TREE_TYPE (step
);
1230 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, type
, end
, start
);
1231 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, type
, tmp
, step
);
1232 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, type
, tmp
,
1233 build_int_cst (type
, 1));
1234 tmp
= fold_build2_loc (input_location
, MAX_EXPR
, type
, tmp
,
1235 build_int_cst (type
, 0));
1236 return fold_convert (gfc_array_index_type
, tmp
);
1240 /* Extend the data in array DESC by EXTRA elements. */
1243 gfc_grow_array (stmtblock_t
* pblock
, tree desc
, tree extra
)
1250 if (integer_zerop (extra
))
1253 ubound
= gfc_conv_descriptor_ubound_get (desc
, gfc_rank_cst
[0]);
1255 /* Add EXTRA to the upper bound. */
1256 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1258 gfc_conv_descriptor_ubound_set (pblock
, desc
, gfc_rank_cst
[0], tmp
);
1260 /* Get the value of the current data pointer. */
1261 arg0
= gfc_conv_descriptor_data_get (desc
);
1263 /* Calculate the new array size. */
1264 size
= TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc
)));
1265 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1266 ubound
, gfc_index_one_node
);
1267 arg1
= fold_build2_loc (input_location
, MULT_EXPR
, size_type_node
,
1268 fold_convert (size_type_node
, tmp
),
1269 fold_convert (size_type_node
, size
));
1271 /* Call the realloc() function. */
1272 tmp
= gfc_call_realloc (pblock
, arg0
, arg1
);
1273 gfc_conv_descriptor_data_set (pblock
, desc
, tmp
);
1277 /* Return true if the bounds of iterator I can only be determined
1281 gfc_iterator_has_dynamic_bounds (gfc_iterator
* i
)
1283 return (i
->start
->expr_type
!= EXPR_CONSTANT
1284 || i
->end
->expr_type
!= EXPR_CONSTANT
1285 || i
->step
->expr_type
!= EXPR_CONSTANT
);
1289 /* Split the size of constructor element EXPR into the sum of two terms,
1290 one of which can be determined at compile time and one of which must
1291 be calculated at run time. Set *SIZE to the former and return true
1292 if the latter might be nonzero. */
1295 gfc_get_array_constructor_element_size (mpz_t
* size
, gfc_expr
* expr
)
1297 if (expr
->expr_type
== EXPR_ARRAY
)
1298 return gfc_get_array_constructor_size (size
, expr
->value
.constructor
);
1299 else if (expr
->rank
> 0)
1301 /* Calculate everything at run time. */
1302 mpz_set_ui (*size
, 0);
1307 /* A single element. */
1308 mpz_set_ui (*size
, 1);
1314 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1315 of array constructor C. */
1318 gfc_get_array_constructor_size (mpz_t
* size
, gfc_constructor_base base
)
1326 mpz_set_ui (*size
, 0);
1331 for (c
= gfc_constructor_first (base
); c
; c
= gfc_constructor_next (c
))
1334 if (i
&& gfc_iterator_has_dynamic_bounds (i
))
1338 dynamic
|= gfc_get_array_constructor_element_size (&len
, c
->expr
);
1341 /* Multiply the static part of the element size by the
1342 number of iterations. */
1343 mpz_sub (val
, i
->end
->value
.integer
, i
->start
->value
.integer
);
1344 mpz_fdiv_q (val
, val
, i
->step
->value
.integer
);
1345 mpz_add_ui (val
, val
, 1);
1346 if (mpz_sgn (val
) > 0)
1347 mpz_mul (len
, len
, val
);
1349 mpz_set_ui (len
, 0);
1351 mpz_add (*size
, *size
, len
);
1360 /* Make sure offset is a variable. */
1363 gfc_put_offset_into_var (stmtblock_t
* pblock
, tree
* poffset
,
1366 /* We should have already created the offset variable. We cannot
1367 create it here because we may be in an inner scope. */
1368 gcc_assert (*offsetvar
!= NULL_TREE
);
1369 gfc_add_modify (pblock
, *offsetvar
, *poffset
);
1370 *poffset
= *offsetvar
;
1371 TREE_USED (*offsetvar
) = 1;
1375 /* Variables needed for bounds-checking. */
1376 static bool first_len
;
1377 static tree first_len_val
;
1378 static bool typespec_chararray_ctor
;
1381 gfc_trans_array_ctor_element (stmtblock_t
* pblock
, tree desc
,
1382 tree offset
, gfc_se
* se
, gfc_expr
* expr
)
1386 gfc_conv_expr (se
, expr
);
1388 /* Store the value. */
1389 tmp
= build_fold_indirect_ref_loc (input_location
,
1390 gfc_conv_descriptor_data_get (desc
));
1391 tmp
= gfc_build_array_ref (tmp
, offset
, NULL
);
1393 if (expr
->ts
.type
== BT_CHARACTER
)
1395 int i
= gfc_validate_kind (BT_CHARACTER
, expr
->ts
.kind
, false);
1398 esize
= size_in_bytes (gfc_get_element_type (TREE_TYPE (desc
)));
1399 esize
= fold_convert (gfc_charlen_type_node
, esize
);
1400 esize
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
1401 gfc_charlen_type_node
, esize
,
1402 build_int_cst (gfc_charlen_type_node
,
1403 gfc_character_kinds
[i
].bit_size
/ 8));
1405 gfc_conv_string_parameter (se
);
1406 if (POINTER_TYPE_P (TREE_TYPE (tmp
)))
1408 /* The temporary is an array of pointers. */
1409 se
->expr
= fold_convert (TREE_TYPE (tmp
), se
->expr
);
1410 gfc_add_modify (&se
->pre
, tmp
, se
->expr
);
1414 /* The temporary is an array of string values. */
1415 tmp
= gfc_build_addr_expr (gfc_get_pchar_type (expr
->ts
.kind
), tmp
);
1416 /* We know the temporary and the value will be the same length,
1417 so can use memcpy. */
1418 gfc_trans_string_copy (&se
->pre
, esize
, tmp
, expr
->ts
.kind
,
1419 se
->string_length
, se
->expr
, expr
->ts
.kind
);
1421 if ((gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
) && !typespec_chararray_ctor
)
1425 gfc_add_modify (&se
->pre
, first_len_val
,
1431 /* Verify that all constructor elements are of the same
1433 tree cond
= fold_build2_loc (input_location
, NE_EXPR
,
1434 boolean_type_node
, first_len_val
,
1436 gfc_trans_runtime_check
1437 (true, false, cond
, &se
->pre
, &expr
->where
,
1438 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1439 fold_convert (long_integer_type_node
, first_len_val
),
1440 fold_convert (long_integer_type_node
, se
->string_length
));
1446 /* TODO: Should the frontend already have done this conversion? */
1447 se
->expr
= fold_convert (TREE_TYPE (tmp
), se
->expr
);
1448 gfc_add_modify (&se
->pre
, tmp
, se
->expr
);
1451 gfc_add_block_to_block (pblock
, &se
->pre
);
1452 gfc_add_block_to_block (pblock
, &se
->post
);
1456 /* Add the contents of an array to the constructor. DYNAMIC is as for
1457 gfc_trans_array_constructor_value. */
1460 gfc_trans_array_constructor_subarray (stmtblock_t
* pblock
,
1461 tree type ATTRIBUTE_UNUSED
,
1462 tree desc
, gfc_expr
* expr
,
1463 tree
* poffset
, tree
* offsetvar
,
1474 /* We need this to be a variable so we can increment it. */
1475 gfc_put_offset_into_var (pblock
, poffset
, offsetvar
);
1477 gfc_init_se (&se
, NULL
);
1479 /* Walk the array expression. */
1480 ss
= gfc_walk_expr (expr
);
1481 gcc_assert (ss
!= gfc_ss_terminator
);
1483 /* Initialize the scalarizer. */
1484 gfc_init_loopinfo (&loop
);
1485 gfc_add_ss_to_loop (&loop
, ss
);
1487 /* Initialize the loop. */
1488 gfc_conv_ss_startstride (&loop
);
1489 gfc_conv_loop_setup (&loop
, &expr
->where
);
1491 /* Make sure the constructed array has room for the new data. */
1494 /* Set SIZE to the total number of elements in the subarray. */
1495 size
= gfc_index_one_node
;
1496 for (n
= 0; n
< loop
.dimen
; n
++)
1498 tmp
= gfc_get_iteration_count (loop
.from
[n
], loop
.to
[n
],
1499 gfc_index_one_node
);
1500 size
= fold_build2_loc (input_location
, MULT_EXPR
,
1501 gfc_array_index_type
, size
, tmp
);
1504 /* Grow the constructed array by SIZE elements. */
1505 gfc_grow_array (&loop
.pre
, desc
, size
);
1508 /* Make the loop body. */
1509 gfc_mark_ss_chain_used (ss
, 1);
1510 gfc_start_scalarized_body (&loop
, &body
);
1511 gfc_copy_loopinfo_to_se (&se
, &loop
);
1514 gfc_trans_array_ctor_element (&body
, desc
, *poffset
, &se
, expr
);
1515 gcc_assert (se
.ss
== gfc_ss_terminator
);
1517 /* Increment the offset. */
1518 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
1519 *poffset
, gfc_index_one_node
);
1520 gfc_add_modify (&body
, *poffset
, tmp
);
1522 /* Finish the loop. */
1523 gfc_trans_scalarizing_loops (&loop
, &body
);
1524 gfc_add_block_to_block (&loop
.pre
, &loop
.post
);
1525 tmp
= gfc_finish_block (&loop
.pre
);
1526 gfc_add_expr_to_block (pblock
, tmp
);
1528 gfc_cleanup_loop (&loop
);
1532 /* Assign the values to the elements of an array constructor. DYNAMIC
1533 is true if descriptor DESC only contains enough data for the static
1534 size calculated by gfc_get_array_constructor_size. When true, memory
1535 for the dynamic parts must be allocated using realloc. */
1538 gfc_trans_array_constructor_value (stmtblock_t
* pblock
, tree type
,
1539 tree desc
, gfc_constructor_base base
,
1540 tree
* poffset
, tree
* offsetvar
,
1544 tree start
= NULL_TREE
;
1545 tree end
= NULL_TREE
;
1546 tree step
= NULL_TREE
;
1552 tree shadow_loopvar
= NULL_TREE
;
1553 gfc_saved_var saved_loopvar
;
1556 for (c
= gfc_constructor_first (base
); c
; c
= gfc_constructor_next (c
))
1558 /* If this is an iterator or an array, the offset must be a variable. */
1559 if ((c
->iterator
|| c
->expr
->rank
> 0) && INTEGER_CST_P (*poffset
))
1560 gfc_put_offset_into_var (pblock
, poffset
, offsetvar
);
1562 /* Shadowing the iterator avoids changing its value and saves us from
1563 keeping track of it. Further, it makes sure that there's always a
1564 backend-decl for the symbol, even if there wasn't one before,
1565 e.g. in the case of an iterator that appears in a specification
1566 expression in an interface mapping. */
1572 /* Evaluate loop bounds before substituting the loop variable
1573 in case they depend on it. Such a case is invalid, but it is
1574 not more expensive to do the right thing here.
1576 gfc_init_se (&se
, NULL
);
1577 gfc_conv_expr_val (&se
, c
->iterator
->start
);
1578 gfc_add_block_to_block (pblock
, &se
.pre
);
1579 start
= gfc_evaluate_now (se
.expr
, pblock
);
1581 gfc_init_se (&se
, NULL
);
1582 gfc_conv_expr_val (&se
, c
->iterator
->end
);
1583 gfc_add_block_to_block (pblock
, &se
.pre
);
1584 end
= gfc_evaluate_now (se
.expr
, pblock
);
1586 gfc_init_se (&se
, NULL
);
1587 gfc_conv_expr_val (&se
, c
->iterator
->step
);
1588 gfc_add_block_to_block (pblock
, &se
.pre
);
1589 step
= gfc_evaluate_now (se
.expr
, pblock
);
1591 sym
= c
->iterator
->var
->symtree
->n
.sym
;
1592 type
= gfc_typenode_for_spec (&sym
->ts
);
1594 shadow_loopvar
= gfc_create_var (type
, "shadow_loopvar");
1595 gfc_shadow_sym (sym
, shadow_loopvar
, &saved_loopvar
);
1598 gfc_start_block (&body
);
1600 if (c
->expr
->expr_type
== EXPR_ARRAY
)
1602 /* Array constructors can be nested. */
1603 gfc_trans_array_constructor_value (&body
, type
, desc
,
1604 c
->expr
->value
.constructor
,
1605 poffset
, offsetvar
, dynamic
);
1607 else if (c
->expr
->rank
> 0)
1609 gfc_trans_array_constructor_subarray (&body
, type
, desc
, c
->expr
,
1610 poffset
, offsetvar
, dynamic
);
1614 /* This code really upsets the gimplifier so don't bother for now. */
1621 while (p
&& !(p
->iterator
|| p
->expr
->expr_type
!= EXPR_CONSTANT
))
1623 p
= gfc_constructor_next (p
);
1628 /* Scalar values. */
1629 gfc_init_se (&se
, NULL
);
1630 gfc_trans_array_ctor_element (&body
, desc
, *poffset
,
1633 *poffset
= fold_build2_loc (input_location
, PLUS_EXPR
,
1634 gfc_array_index_type
,
1635 *poffset
, gfc_index_one_node
);
1639 /* Collect multiple scalar constants into a constructor. */
1640 vec
<constructor_elt
, va_gc
> *v
= NULL
;
1644 HOST_WIDE_INT idx
= 0;
1647 /* Count the number of consecutive scalar constants. */
1648 while (p
&& !(p
->iterator
1649 || p
->expr
->expr_type
!= EXPR_CONSTANT
))
1651 gfc_init_se (&se
, NULL
);
1652 gfc_conv_constant (&se
, p
->expr
);
1654 if (c
->expr
->ts
.type
!= BT_CHARACTER
)
1655 se
.expr
= fold_convert (type
, se
.expr
);
1656 /* For constant character array constructors we build
1657 an array of pointers. */
1658 else if (POINTER_TYPE_P (type
))
1659 se
.expr
= gfc_build_addr_expr
1660 (gfc_get_pchar_type (p
->expr
->ts
.kind
),
1663 CONSTRUCTOR_APPEND_ELT (v
,
1664 build_int_cst (gfc_array_index_type
,
1668 p
= gfc_constructor_next (p
);
1671 bound
= size_int (n
- 1);
1672 /* Create an array type to hold them. */
1673 tmptype
= build_range_type (gfc_array_index_type
,
1674 gfc_index_zero_node
, bound
);
1675 tmptype
= build_array_type (type
, tmptype
);
1677 init
= build_constructor (tmptype
, v
);
1678 TREE_CONSTANT (init
) = 1;
1679 TREE_STATIC (init
) = 1;
1680 /* Create a static variable to hold the data. */
1681 tmp
= gfc_create_var (tmptype
, "data");
1682 TREE_STATIC (tmp
) = 1;
1683 TREE_CONSTANT (tmp
) = 1;
1684 TREE_READONLY (tmp
) = 1;
1685 DECL_INITIAL (tmp
) = init
;
1688 /* Use BUILTIN_MEMCPY to assign the values. */
1689 tmp
= gfc_conv_descriptor_data_get (desc
);
1690 tmp
= build_fold_indirect_ref_loc (input_location
,
1692 tmp
= gfc_build_array_ref (tmp
, *poffset
, NULL
);
1693 tmp
= gfc_build_addr_expr (NULL_TREE
, tmp
);
1694 init
= gfc_build_addr_expr (NULL_TREE
, init
);
1696 size
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type
));
1697 bound
= build_int_cst (size_type_node
, n
* size
);
1698 tmp
= build_call_expr_loc (input_location
,
1699 builtin_decl_explicit (BUILT_IN_MEMCPY
),
1700 3, tmp
, init
, bound
);
1701 gfc_add_expr_to_block (&body
, tmp
);
1703 *poffset
= fold_build2_loc (input_location
, PLUS_EXPR
,
1704 gfc_array_index_type
, *poffset
,
1705 build_int_cst (gfc_array_index_type
, n
));
1707 if (!INTEGER_CST_P (*poffset
))
1709 gfc_add_modify (&body
, *offsetvar
, *poffset
);
1710 *poffset
= *offsetvar
;
1714 /* The frontend should already have done any expansions
1718 /* Pass the code as is. */
1719 tmp
= gfc_finish_block (&body
);
1720 gfc_add_expr_to_block (pblock
, tmp
);
1724 /* Build the implied do-loop. */
1725 stmtblock_t implied_do_block
;
1731 loopbody
= gfc_finish_block (&body
);
1733 /* Create a new block that holds the implied-do loop. A temporary
1734 loop-variable is used. */
1735 gfc_start_block(&implied_do_block
);
1737 /* Initialize the loop. */
1738 gfc_add_modify (&implied_do_block
, shadow_loopvar
, start
);
1740 /* If this array expands dynamically, and the number of iterations
1741 is not constant, we won't have allocated space for the static
1742 part of C->EXPR's size. Do that now. */
1743 if (dynamic
&& gfc_iterator_has_dynamic_bounds (c
->iterator
))
1745 /* Get the number of iterations. */
1746 tmp
= gfc_get_iteration_count (shadow_loopvar
, end
, step
);
1748 /* Get the static part of C->EXPR's size. */
1749 gfc_get_array_constructor_element_size (&size
, c
->expr
);
1750 tmp2
= gfc_conv_mpz_to_tree (size
, gfc_index_integer_kind
);
1752 /* Grow the array by TMP * TMP2 elements. */
1753 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1754 gfc_array_index_type
, tmp
, tmp2
);
1755 gfc_grow_array (&implied_do_block
, desc
, tmp
);
1758 /* Generate the loop body. */
1759 exit_label
= gfc_build_label_decl (NULL_TREE
);
1760 gfc_start_block (&body
);
1762 /* Generate the exit condition. Depending on the sign of
1763 the step variable we have to generate the correct
1765 tmp
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
1766 step
, build_int_cst (TREE_TYPE (step
), 0));
1767 cond
= fold_build3_loc (input_location
, COND_EXPR
,
1768 boolean_type_node
, tmp
,
1769 fold_build2_loc (input_location
, GT_EXPR
,
1770 boolean_type_node
, shadow_loopvar
, end
),
1771 fold_build2_loc (input_location
, LT_EXPR
,
1772 boolean_type_node
, shadow_loopvar
, end
));
1773 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1774 TREE_USED (exit_label
) = 1;
1775 tmp
= build3_v (COND_EXPR
, cond
, tmp
,
1776 build_empty_stmt (input_location
));
1777 gfc_add_expr_to_block (&body
, tmp
);
1779 /* The main loop body. */
1780 gfc_add_expr_to_block (&body
, loopbody
);
1782 /* Increase loop variable by step. */
1783 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
1784 TREE_TYPE (shadow_loopvar
), shadow_loopvar
,
1786 gfc_add_modify (&body
, shadow_loopvar
, tmp
);
1788 /* Finish the loop. */
1789 tmp
= gfc_finish_block (&body
);
1790 tmp
= build1_v (LOOP_EXPR
, tmp
);
1791 gfc_add_expr_to_block (&implied_do_block
, tmp
);
1793 /* Add the exit label. */
1794 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1795 gfc_add_expr_to_block (&implied_do_block
, tmp
);
1797 /* Finish the implied-do loop. */
1798 tmp
= gfc_finish_block(&implied_do_block
);
1799 gfc_add_expr_to_block(pblock
, tmp
);
1801 gfc_restore_sym (c
->iterator
->var
->symtree
->n
.sym
, &saved_loopvar
);
1808 /* The array constructor code can create a string length with an operand
1809 in the form of a temporary variable. This variable will retain its
1810 context (current_function_decl). If we store this length tree in a
1811 gfc_charlen structure which is shared by a variable in another
1812 context, the resulting gfc_charlen structure with a variable in a
1813 different context, we could trip the assertion in expand_expr_real_1
1814 when it sees that a variable has been created in one context and
1815 referenced in another.
1817 If this might be the case, we create a new gfc_charlen structure and
1818 link it into the current namespace. */
1821 store_backend_decl (gfc_charlen
**clp
, tree len
, bool force_new_cl
)
1825 gfc_charlen
*new_cl
= gfc_new_charlen (gfc_current_ns
, *clp
);
1828 (*clp
)->backend_decl
= len
;
1831 /* A catch-all to obtain the string length for anything that is not
1832 a substring of non-constant length, a constant, array or variable. */
1835 get_array_ctor_all_strlen (stmtblock_t
*block
, gfc_expr
*e
, tree
*len
)
1839 /* Don't bother if we already know the length is a constant. */
1840 if (*len
&& INTEGER_CST_P (*len
))
1843 if (!e
->ref
&& e
->ts
.u
.cl
&& e
->ts
.u
.cl
->length
1844 && e
->ts
.u
.cl
->length
->expr_type
== EXPR_CONSTANT
)
1847 gfc_conv_const_charlen (e
->ts
.u
.cl
);
1848 *len
= e
->ts
.u
.cl
->backend_decl
;
1852 /* Otherwise, be brutal even if inefficient. */
1853 gfc_init_se (&se
, NULL
);
1855 /* No function call, in case of side effects. */
1856 se
.no_function_call
= 1;
1858 gfc_conv_expr (&se
, e
);
1860 gfc_conv_expr_descriptor (&se
, e
);
1862 /* Fix the value. */
1863 *len
= gfc_evaluate_now (se
.string_length
, &se
.pre
);
1865 gfc_add_block_to_block (block
, &se
.pre
);
1866 gfc_add_block_to_block (block
, &se
.post
);
1868 store_backend_decl (&e
->ts
.u
.cl
, *len
, true);
1873 /* Figure out the string length of a variable reference expression.
1874 Used by get_array_ctor_strlen. */
1877 get_array_ctor_var_strlen (stmtblock_t
*block
, gfc_expr
* expr
, tree
* len
)
1883 /* Don't bother if we already know the length is a constant. */
1884 if (*len
&& INTEGER_CST_P (*len
))
1887 ts
= &expr
->symtree
->n
.sym
->ts
;
1888 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
1893 /* Array references don't change the string length. */
1897 /* Use the length of the component. */
1898 ts
= &ref
->u
.c
.component
->ts
;
1902 if (ref
->u
.ss
.start
->expr_type
!= EXPR_CONSTANT
1903 || ref
->u
.ss
.end
->expr_type
!= EXPR_CONSTANT
)
1905 /* Note that this might evaluate expr. */
1906 get_array_ctor_all_strlen (block
, expr
, len
);
1909 mpz_init_set_ui (char_len
, 1);
1910 mpz_add (char_len
, char_len
, ref
->u
.ss
.end
->value
.integer
);
1911 mpz_sub (char_len
, char_len
, ref
->u
.ss
.start
->value
.integer
);
1912 *len
= gfc_conv_mpz_to_tree (char_len
, gfc_default_integer_kind
);
1913 *len
= convert (gfc_charlen_type_node
, *len
);
1914 mpz_clear (char_len
);
1922 *len
= ts
->u
.cl
->backend_decl
;
1926 /* Figure out the string length of a character array constructor.
1927 If len is NULL, don't calculate the length; this happens for recursive calls
1928 when a sub-array-constructor is an element but not at the first position,
1929 so when we're not interested in the length.
1930 Returns TRUE if all elements are character constants. */
1933 get_array_ctor_strlen (stmtblock_t
*block
, gfc_constructor_base base
, tree
* len
)
1940 if (gfc_constructor_first (base
) == NULL
)
1943 *len
= build_int_cstu (gfc_charlen_type_node
, 0);
1947 /* Loop over all constructor elements to find out is_const, but in len we
1948 want to store the length of the first, not the last, element. We can
1949 of course exit the loop as soon as is_const is found to be false. */
1950 for (c
= gfc_constructor_first (base
);
1951 c
&& is_const
; c
= gfc_constructor_next (c
))
1953 switch (c
->expr
->expr_type
)
1956 if (len
&& !(*len
&& INTEGER_CST_P (*len
)))
1957 *len
= build_int_cstu (gfc_charlen_type_node
,
1958 c
->expr
->value
.character
.length
);
1962 if (!get_array_ctor_strlen (block
, c
->expr
->value
.constructor
, len
))
1969 get_array_ctor_var_strlen (block
, c
->expr
, len
);
1975 get_array_ctor_all_strlen (block
, c
->expr
, len
);
1979 /* After the first iteration, we don't want the length modified. */
1986 /* Check whether the array constructor C consists entirely of constant
1987 elements, and if so returns the number of those elements, otherwise
1988 return zero. Note, an empty or NULL array constructor returns zero. */
1990 unsigned HOST_WIDE_INT
1991 gfc_constant_array_constructor_p (gfc_constructor_base base
)
1993 unsigned HOST_WIDE_INT nelem
= 0;
1995 gfc_constructor
*c
= gfc_constructor_first (base
);
1999 || c
->expr
->rank
> 0
2000 || c
->expr
->expr_type
!= EXPR_CONSTANT
)
2002 c
= gfc_constructor_next (c
);
2009 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
2010 and the tree type of it's elements, TYPE, return a static constant
2011 variable that is compile-time initialized. */
2014 gfc_build_constant_array_constructor (gfc_expr
* expr
, tree type
)
2016 tree tmptype
, init
, tmp
;
2017 HOST_WIDE_INT nelem
;
2022 vec
<constructor_elt
, va_gc
> *v
= NULL
;
2024 /* First traverse the constructor list, converting the constants
2025 to tree to build an initializer. */
2027 c
= gfc_constructor_first (expr
->value
.constructor
);
2030 gfc_init_se (&se
, NULL
);
2031 gfc_conv_constant (&se
, c
->expr
);
2032 if (c
->expr
->ts
.type
!= BT_CHARACTER
)
2033 se
.expr
= fold_convert (type
, se
.expr
);
2034 else if (POINTER_TYPE_P (type
))
2035 se
.expr
= gfc_build_addr_expr (gfc_get_pchar_type (c
->expr
->ts
.kind
),
2037 CONSTRUCTOR_APPEND_ELT (v
, build_int_cst (gfc_array_index_type
, nelem
),
2039 c
= gfc_constructor_next (c
);
2043 /* Next determine the tree type for the array. We use the gfortran
2044 front-end's gfc_get_nodesc_array_type in order to create a suitable
2045 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2047 memset (&as
, 0, sizeof (gfc_array_spec
));
2049 as
.rank
= expr
->rank
;
2050 as
.type
= AS_EXPLICIT
;
2053 as
.lower
[0] = gfc_get_int_expr (gfc_default_integer_kind
, NULL
, 0);
2054 as
.upper
[0] = gfc_get_int_expr (gfc_default_integer_kind
,
2058 for (i
= 0; i
< expr
->rank
; i
++)
2060 int tmp
= (int) mpz_get_si (expr
->shape
[i
]);
2061 as
.lower
[i
] = gfc_get_int_expr (gfc_default_integer_kind
, NULL
, 0);
2062 as
.upper
[i
] = gfc_get_int_expr (gfc_default_integer_kind
,
2066 tmptype
= gfc_get_nodesc_array_type (type
, &as
, PACKED_STATIC
, true);
2068 /* as is not needed anymore. */
2069 for (i
= 0; i
< as
.rank
+ as
.corank
; i
++)
2071 gfc_free_expr (as
.lower
[i
]);
2072 gfc_free_expr (as
.upper
[i
]);
2075 init
= build_constructor (tmptype
, v
);
2077 TREE_CONSTANT (init
) = 1;
2078 TREE_STATIC (init
) = 1;
2080 tmp
= build_decl (input_location
, VAR_DECL
, create_tmp_var_name ("A"),
2082 DECL_ARTIFICIAL (tmp
) = 1;
2083 DECL_IGNORED_P (tmp
) = 1;
2084 TREE_STATIC (tmp
) = 1;
2085 TREE_CONSTANT (tmp
) = 1;
2086 TREE_READONLY (tmp
) = 1;
2087 DECL_INITIAL (tmp
) = init
;
2094 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2095 This mostly initializes the scalarizer state info structure with the
2096 appropriate values to directly use the array created by the function
2097 gfc_build_constant_array_constructor. */
2100 trans_constant_array_constructor (gfc_ss
* ss
, tree type
)
2102 gfc_array_info
*info
;
2106 tmp
= gfc_build_constant_array_constructor (ss
->info
->expr
, type
);
2108 info
= &ss
->info
->data
.array
;
2110 info
->descriptor
= tmp
;
2111 info
->data
= gfc_build_addr_expr (NULL_TREE
, tmp
);
2112 info
->offset
= gfc_index_zero_node
;
2114 for (i
= 0; i
< ss
->dimen
; i
++)
2116 info
->delta
[i
] = gfc_index_zero_node
;
2117 info
->start
[i
] = gfc_index_zero_node
;
2118 info
->end
[i
] = gfc_index_zero_node
;
2119 info
->stride
[i
] = gfc_index_one_node
;
2125 get_rank (gfc_loopinfo
*loop
)
2130 for (; loop
; loop
= loop
->parent
)
2131 rank
+= loop
->dimen
;
2137 /* Helper routine of gfc_trans_array_constructor to determine if the
2138 bounds of the loop specified by LOOP are constant and simple enough
2139 to use with trans_constant_array_constructor. Returns the
2140 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2143 constant_array_constructor_loop_size (gfc_loopinfo
* l
)
2146 tree size
= gfc_index_one_node
;
2150 total_dim
= get_rank (l
);
2152 for (loop
= l
; loop
; loop
= loop
->parent
)
2154 for (i
= 0; i
< loop
->dimen
; i
++)
2156 /* If the bounds aren't constant, return NULL_TREE. */
2157 if (!INTEGER_CST_P (loop
->from
[i
]) || !INTEGER_CST_P (loop
->to
[i
]))
2159 if (!integer_zerop (loop
->from
[i
]))
2161 /* Only allow nonzero "from" in one-dimensional arrays. */
2164 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
2165 gfc_array_index_type
,
2166 loop
->to
[i
], loop
->from
[i
]);
2170 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
2171 gfc_array_index_type
, tmp
, gfc_index_one_node
);
2172 size
= fold_build2_loc (input_location
, MULT_EXPR
,
2173 gfc_array_index_type
, size
, tmp
);
2182 get_loop_upper_bound_for_array (gfc_ss
*array
, int array_dim
)
2187 gcc_assert (array
->nested_ss
== NULL
);
2189 for (ss
= array
; ss
; ss
= ss
->parent
)
2190 for (n
= 0; n
< ss
->loop
->dimen
; n
++)
2191 if (array_dim
== get_array_ref_dim_for_loop_dim (ss
, n
))
2192 return &(ss
->loop
->to
[n
]);
2198 static gfc_loopinfo
*
2199 outermost_loop (gfc_loopinfo
* loop
)
2201 while (loop
->parent
!= NULL
)
2202 loop
= loop
->parent
;
2208 /* Array constructors are handled by constructing a temporary, then using that
2209 within the scalarization loop. This is not optimal, but seems by far the
2213 trans_array_constructor (gfc_ss
* ss
, locus
* where
)
2215 gfc_constructor_base c
;
2223 bool old_first_len
, old_typespec_chararray_ctor
;
2224 tree old_first_len_val
;
2225 gfc_loopinfo
*loop
, *outer_loop
;
2226 gfc_ss_info
*ss_info
;
2230 /* Save the old values for nested checking. */
2231 old_first_len
= first_len
;
2232 old_first_len_val
= first_len_val
;
2233 old_typespec_chararray_ctor
= typespec_chararray_ctor
;
2236 outer_loop
= outermost_loop (loop
);
2238 expr
= ss_info
->expr
;
2240 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2241 typespec was given for the array constructor. */
2242 typespec_chararray_ctor
= (expr
->ts
.u
.cl
2243 && expr
->ts
.u
.cl
->length_from_typespec
);
2245 if ((gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
2246 && expr
->ts
.type
== BT_CHARACTER
&& !typespec_chararray_ctor
)
2248 first_len_val
= gfc_create_var (gfc_charlen_type_node
, "len");
2252 gcc_assert (ss
->dimen
== ss
->loop
->dimen
);
2254 c
= expr
->value
.constructor
;
2255 if (expr
->ts
.type
== BT_CHARACTER
)
2258 bool force_new_cl
= false;
2260 /* get_array_ctor_strlen walks the elements of the constructor, if a
2261 typespec was given, we already know the string length and want the one
2263 if (typespec_chararray_ctor
&& expr
->ts
.u
.cl
->length
2264 && expr
->ts
.u
.cl
->length
->expr_type
!= EXPR_CONSTANT
)
2268 const_string
= false;
2269 gfc_init_se (&length_se
, NULL
);
2270 gfc_conv_expr_type (&length_se
, expr
->ts
.u
.cl
->length
,
2271 gfc_charlen_type_node
);
2272 ss_info
->string_length
= length_se
.expr
;
2273 gfc_add_block_to_block (&outer_loop
->pre
, &length_se
.pre
);
2274 gfc_add_block_to_block (&outer_loop
->post
, &length_se
.post
);
2278 const_string
= get_array_ctor_strlen (&outer_loop
->pre
, c
,
2279 &ss_info
->string_length
);
2280 force_new_cl
= true;
2283 /* Complex character array constructors should have been taken care of
2284 and not end up here. */
2285 gcc_assert (ss_info
->string_length
);
2287 store_backend_decl (&expr
->ts
.u
.cl
, ss_info
->string_length
, force_new_cl
);
2289 type
= gfc_get_character_type_len (expr
->ts
.kind
, ss_info
->string_length
);
2291 type
= build_pointer_type (type
);
2294 type
= gfc_typenode_for_spec (&expr
->ts
);
2296 /* See if the constructor determines the loop bounds. */
2299 loop_ubound0
= get_loop_upper_bound_for_array (ss
, 0);
2301 if (expr
->shape
&& get_rank (loop
) > 1 && *loop_ubound0
== NULL_TREE
)
2303 /* We have a multidimensional parameter. */
2304 for (s
= ss
; s
; s
= s
->parent
)
2307 for (n
= 0; n
< s
->loop
->dimen
; n
++)
2309 s
->loop
->from
[n
] = gfc_index_zero_node
;
2310 s
->loop
->to
[n
] = gfc_conv_mpz_to_tree (expr
->shape
[s
->dim
[n
]],
2311 gfc_index_integer_kind
);
2312 s
->loop
->to
[n
] = fold_build2_loc (input_location
, MINUS_EXPR
,
2313 gfc_array_index_type
,
2315 gfc_index_one_node
);
2320 if (*loop_ubound0
== NULL_TREE
)
2324 /* We should have a 1-dimensional, zero-based loop. */
2325 gcc_assert (loop
->parent
== NULL
&& loop
->nested
== NULL
);
2326 gcc_assert (loop
->dimen
== 1);
2327 gcc_assert (integer_zerop (loop
->from
[0]));
2329 /* Split the constructor size into a static part and a dynamic part.
2330 Allocate the static size up-front and record whether the dynamic
2331 size might be nonzero. */
2333 dynamic
= gfc_get_array_constructor_size (&size
, c
);
2334 mpz_sub_ui (size
, size
, 1);
2335 loop
->to
[0] = gfc_conv_mpz_to_tree (size
, gfc_index_integer_kind
);
2339 /* Special case constant array constructors. */
2342 unsigned HOST_WIDE_INT nelem
= gfc_constant_array_constructor_p (c
);
2345 tree size
= constant_array_constructor_loop_size (loop
);
2346 if (size
&& compare_tree_int (size
, nelem
) == 0)
2348 trans_constant_array_constructor (ss
, type
);
2354 gfc_trans_create_temp_array (&outer_loop
->pre
, &outer_loop
->post
, ss
, type
,
2355 NULL_TREE
, dynamic
, true, false, where
);
2357 desc
= ss_info
->data
.array
.descriptor
;
2358 offset
= gfc_index_zero_node
;
2359 offsetvar
= gfc_create_var_np (gfc_array_index_type
, "offset");
2360 TREE_NO_WARNING (offsetvar
) = 1;
2361 TREE_USED (offsetvar
) = 0;
2362 gfc_trans_array_constructor_value (&outer_loop
->pre
, type
, desc
, c
,
2363 &offset
, &offsetvar
, dynamic
);
2365 /* If the array grows dynamically, the upper bound of the loop variable
2366 is determined by the array's final upper bound. */
2369 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
2370 gfc_array_index_type
,
2371 offsetvar
, gfc_index_one_node
);
2372 tmp
= gfc_evaluate_now (tmp
, &outer_loop
->pre
);
2373 gfc_conv_descriptor_ubound_set (&loop
->pre
, desc
, gfc_rank_cst
[0], tmp
);
2374 if (*loop_ubound0
&& TREE_CODE (*loop_ubound0
) == VAR_DECL
)
2375 gfc_add_modify (&outer_loop
->pre
, *loop_ubound0
, tmp
);
2377 *loop_ubound0
= tmp
;
2380 if (TREE_USED (offsetvar
))
2381 pushdecl (offsetvar
);
2383 gcc_assert (INTEGER_CST_P (offset
));
2386 /* Disable bound checking for now because it's probably broken. */
2387 if (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
2394 /* Restore old values of globals. */
2395 first_len
= old_first_len
;
2396 first_len_val
= old_first_len_val
;
2397 typespec_chararray_ctor
= old_typespec_chararray_ctor
;
2401 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2402 called after evaluating all of INFO's vector dimensions. Go through
2403 each such vector dimension and see if we can now fill in any missing
2407 set_vector_loop_bounds (gfc_ss
* ss
)
2409 gfc_loopinfo
*loop
, *outer_loop
;
2410 gfc_array_info
*info
;
2418 outer_loop
= outermost_loop (ss
->loop
);
2420 info
= &ss
->info
->data
.array
;
2422 for (; ss
; ss
= ss
->parent
)
2426 for (n
= 0; n
< loop
->dimen
; n
++)
2429 if (info
->ref
->u
.ar
.dimen_type
[dim
] != DIMEN_VECTOR
2430 || loop
->to
[n
] != NULL
)
2433 /* Loop variable N indexes vector dimension DIM, and we don't
2434 yet know the upper bound of loop variable N. Set it to the
2435 difference between the vector's upper and lower bounds. */
2436 gcc_assert (loop
->from
[n
] == gfc_index_zero_node
);
2437 gcc_assert (info
->subscript
[dim
]
2438 && info
->subscript
[dim
]->info
->type
== GFC_SS_VECTOR
);
2440 gfc_init_se (&se
, NULL
);
2441 desc
= info
->subscript
[dim
]->info
->data
.array
.descriptor
;
2442 zero
= gfc_rank_cst
[0];
2443 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
2444 gfc_array_index_type
,
2445 gfc_conv_descriptor_ubound_get (desc
, zero
),
2446 gfc_conv_descriptor_lbound_get (desc
, zero
));
2447 tmp
= gfc_evaluate_now (tmp
, &outer_loop
->pre
);
2454 /* Tells whether a scalar argument to an elemental procedure is saved out
2455 of a scalarization loop as a value or as a reference. */
2458 gfc_scalar_elemental_arg_saved_as_reference (gfc_ss_info
* ss_info
)
2460 if (ss_info
->type
!= GFC_SS_REFERENCE
)
2463 /* If the actual argument can be absent (in other words, it can
2464 be a NULL reference), don't try to evaluate it; pass instead
2465 the reference directly. */
2466 if (ss_info
->can_be_null_ref
)
2469 /* If the expression is of polymorphic type, it's actual size is not known,
2470 so we avoid copying it anywhere. */
2471 if (ss_info
->data
.scalar
.dummy_arg
2472 && ss_info
->data
.scalar
.dummy_arg
->ts
.type
== BT_CLASS
2473 && ss_info
->expr
->ts
.type
== BT_CLASS
)
2476 /* If the expression is a data reference of aggregate type,
2477 and the data reference is not used on the left hand side,
2478 avoid a copy by saving a reference to the content. */
2479 if (!ss_info
->data
.scalar
.needs_temporary
2480 && (ss_info
->expr
->ts
.type
== BT_DERIVED
2481 || ss_info
->expr
->ts
.type
== BT_CLASS
)
2482 && gfc_expr_is_variable (ss_info
->expr
))
2485 /* Otherwise the expression is evaluated to a temporary variable before the
2486 scalarization loop. */
2491 /* Add the pre and post chains for all the scalar expressions in a SS chain
2492 to loop. This is called after the loop parameters have been calculated,
2493 but before the actual scalarizing loops. */
2496 gfc_add_loop_ss_code (gfc_loopinfo
* loop
, gfc_ss
* ss
, bool subscript
,
2499 gfc_loopinfo
*nested_loop
, *outer_loop
;
2501 gfc_ss_info
*ss_info
;
2502 gfc_array_info
*info
;
2506 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2507 arguments could get evaluated multiple times. */
2508 if (ss
->is_alloc_lhs
)
2511 outer_loop
= outermost_loop (loop
);
2513 /* TODO: This can generate bad code if there are ordering dependencies,
2514 e.g., a callee allocated function and an unknown size constructor. */
2515 gcc_assert (ss
!= NULL
);
2517 for (; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
2521 /* Cross loop arrays are handled from within the most nested loop. */
2522 if (ss
->nested_ss
!= NULL
)
2526 expr
= ss_info
->expr
;
2527 info
= &ss_info
->data
.array
;
2529 switch (ss_info
->type
)
2532 /* Scalar expression. Evaluate this now. This includes elemental
2533 dimension indices, but not array section bounds. */
2534 gfc_init_se (&se
, NULL
);
2535 gfc_conv_expr (&se
, expr
);
2536 gfc_add_block_to_block (&outer_loop
->pre
, &se
.pre
);
2538 if (expr
->ts
.type
!= BT_CHARACTER
2539 && !gfc_is_alloc_class_scalar_function (expr
))
2541 /* Move the evaluation of scalar expressions outside the
2542 scalarization loop, except for WHERE assignments. */
2544 se
.expr
= convert(gfc_array_index_type
, se
.expr
);
2545 if (!ss_info
->where
)
2546 se
.expr
= gfc_evaluate_now (se
.expr
, &outer_loop
->pre
);
2547 gfc_add_block_to_block (&outer_loop
->pre
, &se
.post
);
2550 gfc_add_block_to_block (&outer_loop
->post
, &se
.post
);
2552 ss_info
->data
.scalar
.value
= se
.expr
;
2553 ss_info
->string_length
= se
.string_length
;
2556 case GFC_SS_REFERENCE
:
2557 /* Scalar argument to elemental procedure. */
2558 gfc_init_se (&se
, NULL
);
2559 if (gfc_scalar_elemental_arg_saved_as_reference (ss_info
))
2560 gfc_conv_expr_reference (&se
, expr
);
2563 /* Evaluate the argument outside the loop and pass
2564 a reference to the value. */
2565 gfc_conv_expr (&se
, expr
);
2568 /* Ensure that a pointer to the string is stored. */
2569 if (expr
->ts
.type
== BT_CHARACTER
)
2570 gfc_conv_string_parameter (&se
);
2572 gfc_add_block_to_block (&outer_loop
->pre
, &se
.pre
);
2573 gfc_add_block_to_block (&outer_loop
->post
, &se
.post
);
2574 if (gfc_is_class_scalar_expr (expr
))
2575 /* This is necessary because the dynamic type will always be
2576 large than the declared type. In consequence, assigning
2577 the value to a temporary could segfault.
2578 OOP-TODO: see if this is generally correct or is the value
2579 has to be written to an allocated temporary, whose address
2580 is passed via ss_info. */
2581 ss_info
->data
.scalar
.value
= se
.expr
;
2583 ss_info
->data
.scalar
.value
= gfc_evaluate_now (se
.expr
,
2586 ss_info
->string_length
= se
.string_length
;
2589 case GFC_SS_SECTION
:
2590 /* Add the expressions for scalar and vector subscripts. */
2591 for (n
= 0; n
< GFC_MAX_DIMENSIONS
; n
++)
2592 if (info
->subscript
[n
])
2593 gfc_add_loop_ss_code (loop
, info
->subscript
[n
], true, where
);
2595 set_vector_loop_bounds (ss
);
2599 /* Get the vector's descriptor and store it in SS. */
2600 gfc_init_se (&se
, NULL
);
2601 gfc_conv_expr_descriptor (&se
, expr
);
2602 gfc_add_block_to_block (&outer_loop
->pre
, &se
.pre
);
2603 gfc_add_block_to_block (&outer_loop
->post
, &se
.post
);
2604 info
->descriptor
= se
.expr
;
2607 case GFC_SS_INTRINSIC
:
2608 gfc_add_intrinsic_ss_code (loop
, ss
);
2611 case GFC_SS_FUNCTION
:
2612 /* Array function return value. We call the function and save its
2613 result in a temporary for use inside the loop. */
2614 gfc_init_se (&se
, NULL
);
2617 gfc_conv_expr (&se
, expr
);
2618 gfc_add_block_to_block (&outer_loop
->pre
, &se
.pre
);
2619 gfc_add_block_to_block (&outer_loop
->post
, &se
.post
);
2620 ss_info
->string_length
= se
.string_length
;
2623 case GFC_SS_CONSTRUCTOR
:
2624 if (expr
->ts
.type
== BT_CHARACTER
2625 && ss_info
->string_length
== NULL
2627 && expr
->ts
.u
.cl
->length
2628 && expr
->ts
.u
.cl
->length
->expr_type
== EXPR_CONSTANT
)
2630 gfc_init_se (&se
, NULL
);
2631 gfc_conv_expr_type (&se
, expr
->ts
.u
.cl
->length
,
2632 gfc_charlen_type_node
);
2633 ss_info
->string_length
= se
.expr
;
2634 gfc_add_block_to_block (&outer_loop
->pre
, &se
.pre
);
2635 gfc_add_block_to_block (&outer_loop
->post
, &se
.post
);
2637 trans_array_constructor (ss
, where
);
2641 case GFC_SS_COMPONENT
:
2642 /* Do nothing. These are handled elsewhere. */
2651 for (nested_loop
= loop
->nested
; nested_loop
;
2652 nested_loop
= nested_loop
->next
)
2653 gfc_add_loop_ss_code (nested_loop
, nested_loop
->ss
, subscript
, where
);
2657 /* Translate expressions for the descriptor and data pointer of a SS. */
2661 gfc_conv_ss_descriptor (stmtblock_t
* block
, gfc_ss
* ss
, int base
)
2664 gfc_ss_info
*ss_info
;
2665 gfc_array_info
*info
;
2669 info
= &ss_info
->data
.array
;
2671 /* Get the descriptor for the array to be scalarized. */
2672 gcc_assert (ss_info
->expr
->expr_type
== EXPR_VARIABLE
);
2673 gfc_init_se (&se
, NULL
);
2674 se
.descriptor_only
= 1;
2675 gfc_conv_expr_lhs (&se
, ss_info
->expr
);
2676 gfc_add_block_to_block (block
, &se
.pre
);
2677 info
->descriptor
= se
.expr
;
2678 ss_info
->string_length
= se
.string_length
;
2682 /* Also the data pointer. */
2683 tmp
= gfc_conv_array_data (se
.expr
);
2684 /* If this is a variable or address of a variable we use it directly.
2685 Otherwise we must evaluate it now to avoid breaking dependency
2686 analysis by pulling the expressions for elemental array indices
2689 || (TREE_CODE (tmp
) == ADDR_EXPR
2690 && DECL_P (TREE_OPERAND (tmp
, 0)))))
2691 tmp
= gfc_evaluate_now (tmp
, block
);
2694 tmp
= gfc_conv_array_offset (se
.expr
);
2695 info
->offset
= gfc_evaluate_now (tmp
, block
);
2697 /* Make absolutely sure that the saved_offset is indeed saved
2698 so that the variable is still accessible after the loops
2700 info
->saved_offset
= info
->offset
;
2705 /* Initialize a gfc_loopinfo structure. */
2708 gfc_init_loopinfo (gfc_loopinfo
* loop
)
2712 memset (loop
, 0, sizeof (gfc_loopinfo
));
2713 gfc_init_block (&loop
->pre
);
2714 gfc_init_block (&loop
->post
);
2716 /* Initially scalarize in order and default to no loop reversal. */
2717 for (n
= 0; n
< GFC_MAX_DIMENSIONS
; n
++)
2720 loop
->reverse
[n
] = GFC_INHIBIT_REVERSE
;
2723 loop
->ss
= gfc_ss_terminator
;
2727 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2731 gfc_copy_loopinfo_to_se (gfc_se
* se
, gfc_loopinfo
* loop
)
2737 /* Return an expression for the data pointer of an array. */
2740 gfc_conv_array_data (tree descriptor
)
2744 type
= TREE_TYPE (descriptor
);
2745 if (GFC_ARRAY_TYPE_P (type
))
2747 if (TREE_CODE (type
) == POINTER_TYPE
)
2751 /* Descriptorless arrays. */
2752 return gfc_build_addr_expr (NULL_TREE
, descriptor
);
2756 return gfc_conv_descriptor_data_get (descriptor
);
2760 /* Return an expression for the base offset of an array. */
2763 gfc_conv_array_offset (tree descriptor
)
2767 type
= TREE_TYPE (descriptor
);
2768 if (GFC_ARRAY_TYPE_P (type
))
2769 return GFC_TYPE_ARRAY_OFFSET (type
);
2771 return gfc_conv_descriptor_offset_get (descriptor
);
2775 /* Get an expression for the array stride. */
2778 gfc_conv_array_stride (tree descriptor
, int dim
)
2783 type
= TREE_TYPE (descriptor
);
2785 /* For descriptorless arrays use the array size. */
2786 tmp
= GFC_TYPE_ARRAY_STRIDE (type
, dim
);
2787 if (tmp
!= NULL_TREE
)
2790 tmp
= gfc_conv_descriptor_stride_get (descriptor
, gfc_rank_cst
[dim
]);
2795 /* Like gfc_conv_array_stride, but for the lower bound. */
2798 gfc_conv_array_lbound (tree descriptor
, int dim
)
2803 type
= TREE_TYPE (descriptor
);
2805 tmp
= GFC_TYPE_ARRAY_LBOUND (type
, dim
);
2806 if (tmp
!= NULL_TREE
)
2809 tmp
= gfc_conv_descriptor_lbound_get (descriptor
, gfc_rank_cst
[dim
]);
2814 /* Like gfc_conv_array_stride, but for the upper bound. */
2817 gfc_conv_array_ubound (tree descriptor
, int dim
)
2822 type
= TREE_TYPE (descriptor
);
2824 tmp
= GFC_TYPE_ARRAY_UBOUND (type
, dim
);
2825 if (tmp
!= NULL_TREE
)
2828 /* This should only ever happen when passing an assumed shape array
2829 as an actual parameter. The value will never be used. */
2830 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor
)))
2831 return gfc_index_zero_node
;
2833 tmp
= gfc_conv_descriptor_ubound_get (descriptor
, gfc_rank_cst
[dim
]);
2838 /* Generate code to perform an array index bound check. */
2841 trans_array_bound_check (gfc_se
* se
, gfc_ss
*ss
, tree index
, int n
,
2842 locus
* where
, bool check_upper
)
2845 tree tmp_lo
, tmp_up
;
2848 const char * name
= NULL
;
2850 if (!(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
))
2853 descriptor
= ss
->info
->data
.array
.descriptor
;
2855 index
= gfc_evaluate_now (index
, &se
->pre
);
2857 /* We find a name for the error message. */
2858 name
= ss
->info
->expr
->symtree
->n
.sym
->name
;
2859 gcc_assert (name
!= NULL
);
2861 if (TREE_CODE (descriptor
) == VAR_DECL
)
2862 name
= IDENTIFIER_POINTER (DECL_NAME (descriptor
));
2864 /* If upper bound is present, include both bounds in the error message. */
2867 tmp_lo
= gfc_conv_array_lbound (descriptor
, n
);
2868 tmp_up
= gfc_conv_array_ubound (descriptor
, n
);
2871 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2872 "outside of expected range (%%ld:%%ld)", n
+1, name
);
2874 msg
= xasprintf ("Index '%%ld' of dimension %d "
2875 "outside of expected range (%%ld:%%ld)", n
+1);
2877 fault
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
2879 gfc_trans_runtime_check (true, false, fault
, &se
->pre
, where
, msg
,
2880 fold_convert (long_integer_type_node
, index
),
2881 fold_convert (long_integer_type_node
, tmp_lo
),
2882 fold_convert (long_integer_type_node
, tmp_up
));
2883 fault
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
2885 gfc_trans_runtime_check (true, false, fault
, &se
->pre
, where
, msg
,
2886 fold_convert (long_integer_type_node
, index
),
2887 fold_convert (long_integer_type_node
, tmp_lo
),
2888 fold_convert (long_integer_type_node
, tmp_up
));
2893 tmp_lo
= gfc_conv_array_lbound (descriptor
, n
);
2896 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2897 "below lower bound of %%ld", n
+1, name
);
2899 msg
= xasprintf ("Index '%%ld' of dimension %d "
2900 "below lower bound of %%ld", n
+1);
2902 fault
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
2904 gfc_trans_runtime_check (true, false, fault
, &se
->pre
, where
, msg
,
2905 fold_convert (long_integer_type_node
, index
),
2906 fold_convert (long_integer_type_node
, tmp_lo
));
2914 /* Return the offset for an index. Performs bound checking for elemental
2915 dimensions. Single element references are processed separately.
2916 DIM is the array dimension, I is the loop dimension. */
2919 conv_array_index_offset (gfc_se
* se
, gfc_ss
* ss
, int dim
, int i
,
2920 gfc_array_ref
* ar
, tree stride
)
2922 gfc_array_info
*info
;
2927 info
= &ss
->info
->data
.array
;
2929 /* Get the index into the array for this dimension. */
2932 gcc_assert (ar
->type
!= AR_ELEMENT
);
2933 switch (ar
->dimen_type
[dim
])
2935 case DIMEN_THIS_IMAGE
:
2939 /* Elemental dimension. */
2940 gcc_assert (info
->subscript
[dim
]
2941 && info
->subscript
[dim
]->info
->type
== GFC_SS_SCALAR
);
2942 /* We've already translated this value outside the loop. */
2943 index
= info
->subscript
[dim
]->info
->data
.scalar
.value
;
2945 index
= trans_array_bound_check (se
, ss
, index
, dim
, &ar
->where
,
2946 ar
->as
->type
!= AS_ASSUMED_SIZE
2947 || dim
< ar
->dimen
- 1);
2951 gcc_assert (info
&& se
->loop
);
2952 gcc_assert (info
->subscript
[dim
]
2953 && info
->subscript
[dim
]->info
->type
== GFC_SS_VECTOR
);
2954 desc
= info
->subscript
[dim
]->info
->data
.array
.descriptor
;
2956 /* Get a zero-based index into the vector. */
2957 index
= fold_build2_loc (input_location
, MINUS_EXPR
,
2958 gfc_array_index_type
,
2959 se
->loop
->loopvar
[i
], se
->loop
->from
[i
]);
2961 /* Multiply the index by the stride. */
2962 index
= fold_build2_loc (input_location
, MULT_EXPR
,
2963 gfc_array_index_type
,
2964 index
, gfc_conv_array_stride (desc
, 0));
2966 /* Read the vector to get an index into info->descriptor. */
2967 data
= build_fold_indirect_ref_loc (input_location
,
2968 gfc_conv_array_data (desc
));
2969 index
= gfc_build_array_ref (data
, index
, NULL
);
2970 index
= gfc_evaluate_now (index
, &se
->pre
);
2971 index
= fold_convert (gfc_array_index_type
, index
);
2973 /* Do any bounds checking on the final info->descriptor index. */
2974 index
= trans_array_bound_check (se
, ss
, index
, dim
, &ar
->where
,
2975 ar
->as
->type
!= AS_ASSUMED_SIZE
2976 || dim
< ar
->dimen
- 1);
2980 /* Scalarized dimension. */
2981 gcc_assert (info
&& se
->loop
);
2983 /* Multiply the loop variable by the stride and delta. */
2984 index
= se
->loop
->loopvar
[i
];
2985 if (!integer_onep (info
->stride
[dim
]))
2986 index
= fold_build2_loc (input_location
, MULT_EXPR
,
2987 gfc_array_index_type
, index
,
2989 if (!integer_zerop (info
->delta
[dim
]))
2990 index
= fold_build2_loc (input_location
, PLUS_EXPR
,
2991 gfc_array_index_type
, index
,
3001 /* Temporary array or derived type component. */
3002 gcc_assert (se
->loop
);
3003 index
= se
->loop
->loopvar
[se
->loop
->order
[i
]];
3005 /* Pointer functions can have stride[0] different from unity.
3006 Use the stride returned by the function call and stored in
3007 the descriptor for the temporary. */
3008 if (se
->ss
&& se
->ss
->info
->type
== GFC_SS_FUNCTION
3009 && se
->ss
->info
->expr
3010 && se
->ss
->info
->expr
->symtree
3011 && se
->ss
->info
->expr
->symtree
->n
.sym
->result
3012 && se
->ss
->info
->expr
->symtree
->n
.sym
->result
->attr
.pointer
)
3013 stride
= gfc_conv_descriptor_stride_get (info
->descriptor
,
3016 if (info
->delta
[dim
] && !integer_zerop (info
->delta
[dim
]))
3017 index
= fold_build2_loc (input_location
, PLUS_EXPR
,
3018 gfc_array_index_type
, index
, info
->delta
[dim
]);
3021 /* Multiply by the stride. */
3022 if (!integer_onep (stride
))
3023 index
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3030 /* Build a scalarized array reference using the vptr 'size'. */
3033 build_class_array_ref (gfc_se
*se
, tree base
, tree index
)
3040 gfc_expr
*expr
= se
->ss
->info
->expr
;
3046 || (expr
->ts
.type
!= BT_CLASS
3047 && !gfc_is_alloc_class_array_function (expr
)))
3050 if (expr
->symtree
&& expr
->symtree
->n
.sym
->ts
.type
== BT_CLASS
)
3051 ts
= &expr
->symtree
->n
.sym
->ts
;
3056 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
3058 if (ref
->type
== REF_COMPONENT
3059 && ref
->u
.c
.component
->ts
.type
== BT_CLASS
3060 && ref
->next
&& ref
->next
->type
== REF_COMPONENT
3061 && strcmp (ref
->next
->u
.c
.component
->name
, "_data") == 0
3063 && ref
->next
->next
->type
== REF_ARRAY
3064 && ref
->next
->next
->u
.ar
.type
!= AR_ELEMENT
)
3066 ts
= &ref
->u
.c
.component
->ts
;
3075 if (class_ref
== NULL
&& expr
->symtree
->n
.sym
->attr
.function
3076 && expr
->symtree
->n
.sym
== expr
->symtree
->n
.sym
->result
)
3078 gcc_assert (expr
->symtree
->n
.sym
->backend_decl
== current_function_decl
);
3079 decl
= gfc_get_fake_result_decl (expr
->symtree
->n
.sym
, 0);
3081 else if (gfc_is_alloc_class_array_function (expr
))
3085 for (tmp
= base
; tmp
; tmp
= TREE_OPERAND (tmp
, 0))
3088 type
= TREE_TYPE (tmp
);
3091 if (GFC_CLASS_TYPE_P (type
))
3093 if (type
!= TYPE_CANONICAL (type
))
3094 type
= TYPE_CANONICAL (type
);
3098 if (TREE_CODE (tmp
) == VAR_DECL
)
3102 if (decl
== NULL_TREE
)
3105 else if (class_ref
== NULL
)
3107 decl
= expr
->symtree
->n
.sym
->backend_decl
;
3108 /* For class arrays the tree containing the class is stored in
3109 GFC_DECL_SAVED_DESCRIPTOR of the sym's backend_decl.
3110 For all others it's sym's backend_decl directly. */
3111 if (DECL_LANG_SPECIFIC (decl
) && GFC_DECL_SAVED_DESCRIPTOR (decl
))
3112 decl
= GFC_DECL_SAVED_DESCRIPTOR (decl
);
3116 /* Remove everything after the last class reference, convert the
3117 expression and then recover its tailend once more. */
3119 ref
= class_ref
->next
;
3120 class_ref
->next
= NULL
;
3121 gfc_init_se (&tmpse
, NULL
);
3122 gfc_conv_expr (&tmpse
, expr
);
3124 class_ref
->next
= ref
;
3127 if (POINTER_TYPE_P (TREE_TYPE (decl
)))
3128 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
3130 if (!GFC_CLASS_TYPE_P (TREE_TYPE (decl
)))
3133 size
= gfc_class_vtab_size_get (decl
);
3135 /* Build the address of the element. */
3136 type
= TREE_TYPE (TREE_TYPE (base
));
3137 size
= fold_convert (TREE_TYPE (index
), size
);
3138 offset
= fold_build2_loc (input_location
, MULT_EXPR
,
3139 gfc_array_index_type
,
3141 tmp
= gfc_build_addr_expr (pvoid_type_node
, base
);
3142 tmp
= fold_build_pointer_plus_loc (input_location
, tmp
, offset
);
3143 tmp
= fold_convert (build_pointer_type (type
), tmp
);
3145 /* Return the element in the se expression. */
3146 se
->expr
= build_fold_indirect_ref_loc (input_location
, tmp
);
3151 /* Build a scalarized reference to an array. */
3154 gfc_conv_scalarized_array_ref (gfc_se
* se
, gfc_array_ref
* ar
)
3156 gfc_array_info
*info
;
3157 tree decl
= NULL_TREE
;
3165 expr
= ss
->info
->expr
;
3166 info
= &ss
->info
->data
.array
;
3168 n
= se
->loop
->order
[0];
3172 index
= conv_array_index_offset (se
, ss
, ss
->dim
[n
], n
, ar
, info
->stride0
);
3173 /* Add the offset for this dimension to the stored offset for all other
3175 if (info
->offset
&& !integer_zerop (info
->offset
))
3176 index
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3177 index
, info
->offset
);
3179 if (expr
&& (is_subref_array (expr
)
3180 || (expr
->ts
.deferred
&& (expr
->expr_type
== EXPR_VARIABLE
3181 || expr
->expr_type
== EXPR_FUNCTION
))))
3182 decl
= expr
->symtree
->n
.sym
->backend_decl
;
3184 tmp
= build_fold_indirect_ref_loc (input_location
, info
->data
);
3186 /* Use the vptr 'size' field to access a class the element of a class
3188 if (build_class_array_ref (se
, tmp
, index
))
3191 se
->expr
= gfc_build_array_ref (tmp
, index
, decl
);
3195 /* Translate access of temporary array. */
3198 gfc_conv_tmp_array_ref (gfc_se
* se
)
3200 se
->string_length
= se
->ss
->info
->string_length
;
3201 gfc_conv_scalarized_array_ref (se
, NULL
);
3202 gfc_advance_se_ss_chain (se
);
3205 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3208 add_to_offset (tree
*cst_offset
, tree
*offset
, tree t
)
3210 if (TREE_CODE (t
) == INTEGER_CST
)
3211 *cst_offset
= int_const_binop (PLUS_EXPR
, *cst_offset
, t
);
3214 if (!integer_zerop (*offset
))
3215 *offset
= fold_build2_loc (input_location
, PLUS_EXPR
,
3216 gfc_array_index_type
, *offset
, t
);
3224 build_array_ref (tree desc
, tree offset
, tree decl
, tree vptr
)
3229 bool classarray
= false;
3231 /* For class arrays the class declaration is stored in the saved
3233 if (INDIRECT_REF_P (desc
)
3234 && DECL_LANG_SPECIFIC (TREE_OPERAND (desc
, 0))
3235 && GFC_DECL_SAVED_DESCRIPTOR (TREE_OPERAND (desc
, 0)))
3236 cdecl = gfc_class_data_get (GFC_DECL_SAVED_DESCRIPTOR (
3237 TREE_OPERAND (desc
, 0)));
3241 /* Class container types do not always have the GFC_CLASS_TYPE_P
3242 but the canonical type does. */
3243 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (cdecl))
3244 && TREE_CODE (cdecl) == COMPONENT_REF
)
3246 type
= TREE_TYPE (TREE_OPERAND (cdecl, 0));
3247 if (TYPE_CANONICAL (type
)
3248 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type
)))
3250 type
= TREE_TYPE (desc
);
3257 /* Class array references need special treatment because the assigned
3258 type size needs to be used to point to the element. */
3261 type
= gfc_get_element_type (type
);
3262 tmp
= TREE_OPERAND (cdecl, 0);
3263 tmp
= gfc_get_class_array_ref (offset
, tmp
, NULL_TREE
);
3264 tmp
= fold_convert (build_pointer_type (type
), tmp
);
3265 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3269 tmp
= gfc_conv_array_data (desc
);
3270 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3271 tmp
= gfc_build_array_ref (tmp
, offset
, decl
, vptr
);
3276 /* Build an array reference. se->expr already holds the array descriptor.
3277 This should be either a variable, indirect variable reference or component
3278 reference. For arrays which do not have a descriptor, se->expr will be
3280 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3283 gfc_conv_array_ref (gfc_se
* se
, gfc_array_ref
* ar
, gfc_expr
*expr
,
3287 tree offset
, cst_offset
;
3292 gfc_symbol
* sym
= expr
->symtree
->n
.sym
;
3293 char *var_name
= NULL
;
3297 gcc_assert (ar
->codimen
);
3299 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
->expr
)))
3300 se
->expr
= build_fold_indirect_ref (gfc_conv_array_data (se
->expr
));
3303 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se
->expr
))
3304 && TREE_CODE (TREE_TYPE (se
->expr
)) == POINTER_TYPE
)
3305 se
->expr
= build_fold_indirect_ref_loc (input_location
, se
->expr
);
3307 /* Use the actual tree type and not the wrapped coarray. */
3308 if (!se
->want_pointer
)
3309 se
->expr
= fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se
->expr
)),
3316 /* Handle scalarized references separately. */
3317 if (ar
->type
!= AR_ELEMENT
)
3319 gfc_conv_scalarized_array_ref (se
, ar
);
3320 gfc_advance_se_ss_chain (se
);
3324 if (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
3329 len
= strlen (sym
->name
) + 1;
3330 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
3332 if (ref
->type
== REF_ARRAY
&& &ref
->u
.ar
== ar
)
3334 if (ref
->type
== REF_COMPONENT
)
3335 len
+= 1 + strlen (ref
->u
.c
.component
->name
);
3338 var_name
= XALLOCAVEC (char, len
);
3339 strcpy (var_name
, sym
->name
);
3341 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
3343 if (ref
->type
== REF_ARRAY
&& &ref
->u
.ar
== ar
)
3345 if (ref
->type
== REF_COMPONENT
)
3347 strcat (var_name
, "%%");
3348 strcat (var_name
, ref
->u
.c
.component
->name
);
3353 cst_offset
= offset
= gfc_index_zero_node
;
3354 add_to_offset (&cst_offset
, &offset
, gfc_conv_array_offset (se
->expr
));
3356 /* Calculate the offsets from all the dimensions. Make sure to associate
3357 the final offset so that we form a chain of loop invariant summands. */
3358 for (n
= ar
->dimen
- 1; n
>= 0; n
--)
3360 /* Calculate the index for this dimension. */
3361 gfc_init_se (&indexse
, se
);
3362 gfc_conv_expr_type (&indexse
, ar
->start
[n
], gfc_array_index_type
);
3363 gfc_add_block_to_block (&se
->pre
, &indexse
.pre
);
3365 if (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
3367 /* Check array bounds. */
3371 /* Evaluate the indexse.expr only once. */
3372 indexse
.expr
= save_expr (indexse
.expr
);
3375 tmp
= gfc_conv_array_lbound (se
->expr
, n
);
3376 if (sym
->attr
.temporary
)
3378 gfc_init_se (&tmpse
, se
);
3379 gfc_conv_expr_type (&tmpse
, ar
->as
->lower
[n
],
3380 gfc_array_index_type
);
3381 gfc_add_block_to_block (&se
->pre
, &tmpse
.pre
);
3385 cond
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
3387 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3388 "below lower bound of %%ld", n
+1, var_name
);
3389 gfc_trans_runtime_check (true, false, cond
, &se
->pre
, where
, msg
,
3390 fold_convert (long_integer_type_node
,
3392 fold_convert (long_integer_type_node
, tmp
));
3395 /* Upper bound, but not for the last dimension of assumed-size
3397 if (n
< ar
->dimen
- 1 || ar
->as
->type
!= AS_ASSUMED_SIZE
)
3399 tmp
= gfc_conv_array_ubound (se
->expr
, n
);
3400 if (sym
->attr
.temporary
)
3402 gfc_init_se (&tmpse
, se
);
3403 gfc_conv_expr_type (&tmpse
, ar
->as
->upper
[n
],
3404 gfc_array_index_type
);
3405 gfc_add_block_to_block (&se
->pre
, &tmpse
.pre
);
3409 cond
= fold_build2_loc (input_location
, GT_EXPR
,
3410 boolean_type_node
, indexse
.expr
, tmp
);
3411 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3412 "above upper bound of %%ld", n
+1, var_name
);
3413 gfc_trans_runtime_check (true, false, cond
, &se
->pre
, where
, msg
,
3414 fold_convert (long_integer_type_node
,
3416 fold_convert (long_integer_type_node
, tmp
));
3421 /* Multiply the index by the stride. */
3422 stride
= gfc_conv_array_stride (se
->expr
, n
);
3423 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3424 indexse
.expr
, stride
);
3426 /* And add it to the total. */
3427 add_to_offset (&cst_offset
, &offset
, tmp
);
3430 if (!integer_zerop (cst_offset
))
3431 offset
= fold_build2_loc (input_location
, PLUS_EXPR
,
3432 gfc_array_index_type
, offset
, cst_offset
);
3434 se
->expr
= build_array_ref (se
->expr
, offset
, sym
->ts
.type
== BT_CLASS
?
3435 NULL_TREE
: sym
->backend_decl
, se
->class_vptr
);
3439 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3440 LOOP_DIM dimension (if any) to array's offset. */
3443 add_array_offset (stmtblock_t
*pblock
, gfc_loopinfo
*loop
, gfc_ss
*ss
,
3444 gfc_array_ref
*ar
, int array_dim
, int loop_dim
)
3447 gfc_array_info
*info
;
3450 info
= &ss
->info
->data
.array
;
3452 gfc_init_se (&se
, NULL
);
3454 se
.expr
= info
->descriptor
;
3455 stride
= gfc_conv_array_stride (info
->descriptor
, array_dim
);
3456 index
= conv_array_index_offset (&se
, ss
, array_dim
, loop_dim
, ar
, stride
);
3457 gfc_add_block_to_block (pblock
, &se
.pre
);
3459 info
->offset
= fold_build2_loc (input_location
, PLUS_EXPR
,
3460 gfc_array_index_type
,
3461 info
->offset
, index
);
3462 info
->offset
= gfc_evaluate_now (info
->offset
, pblock
);
3466 /* Generate the code to be executed immediately before entering a
3467 scalarization loop. */
3470 gfc_trans_preloop_setup (gfc_loopinfo
* loop
, int dim
, int flag
,
3471 stmtblock_t
* pblock
)
3474 gfc_ss_info
*ss_info
;
3475 gfc_array_info
*info
;
3476 gfc_ss_type ss_type
;
3478 gfc_loopinfo
*ploop
;
3482 /* This code will be executed before entering the scalarization loop
3483 for this dimension. */
3484 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
3488 if ((ss_info
->useflags
& flag
) == 0)
3491 ss_type
= ss_info
->type
;
3492 if (ss_type
!= GFC_SS_SECTION
3493 && ss_type
!= GFC_SS_FUNCTION
3494 && ss_type
!= GFC_SS_CONSTRUCTOR
3495 && ss_type
!= GFC_SS_COMPONENT
)
3498 info
= &ss_info
->data
.array
;
3500 gcc_assert (dim
< ss
->dimen
);
3501 gcc_assert (ss
->dimen
== loop
->dimen
);
3504 ar
= &info
->ref
->u
.ar
;
3508 if (dim
== loop
->dimen
- 1 && loop
->parent
!= NULL
)
3510 /* If we are in the outermost dimension of this loop, the previous
3511 dimension shall be in the parent loop. */
3512 gcc_assert (ss
->parent
!= NULL
);
3515 ploop
= loop
->parent
;
3517 /* ss and ss->parent are about the same array. */
3518 gcc_assert (ss_info
== pss
->info
);
3526 if (dim
== loop
->dimen
- 1)
3531 /* For the time being, there is no loop reordering. */
3532 gcc_assert (i
== ploop
->order
[i
]);
3533 i
= ploop
->order
[i
];
3535 if (dim
== loop
->dimen
- 1 && loop
->parent
== NULL
)
3537 stride
= gfc_conv_array_stride (info
->descriptor
,
3538 innermost_ss (ss
)->dim
[i
]);
3540 /* Calculate the stride of the innermost loop. Hopefully this will
3541 allow the backend optimizers to do their stuff more effectively.
3543 info
->stride0
= gfc_evaluate_now (stride
, pblock
);
3545 /* For the outermost loop calculate the offset due to any
3546 elemental dimensions. It will have been initialized with the
3547 base offset of the array. */
3550 for (i
= 0; i
< ar
->dimen
; i
++)
3552 if (ar
->dimen_type
[i
] != DIMEN_ELEMENT
)
3555 add_array_offset (pblock
, loop
, ss
, ar
, i
, /* unused */ -1);
3560 /* Add the offset for the previous loop dimension. */
3561 add_array_offset (pblock
, ploop
, ss
, ar
, pss
->dim
[i
], i
);
3563 /* Remember this offset for the second loop. */
3564 if (dim
== loop
->temp_dim
- 1 && loop
->parent
== NULL
)
3565 info
->saved_offset
= info
->offset
;
3570 /* Start a scalarized expression. Creates a scope and declares loop
3574 gfc_start_scalarized_body (gfc_loopinfo
* loop
, stmtblock_t
* pbody
)
3580 gcc_assert (!loop
->array_parameter
);
3582 for (dim
= loop
->dimen
- 1; dim
>= 0; dim
--)
3584 n
= loop
->order
[dim
];
3586 gfc_start_block (&loop
->code
[n
]);
3588 /* Create the loop variable. */
3589 loop
->loopvar
[n
] = gfc_create_var (gfc_array_index_type
, "S");
3591 if (dim
< loop
->temp_dim
)
3595 /* Calculate values that will be constant within this loop. */
3596 gfc_trans_preloop_setup (loop
, dim
, flags
, &loop
->code
[n
]);
3598 gfc_start_block (pbody
);
3602 /* Generates the actual loop code for a scalarization loop. */
3605 gfc_trans_scalarized_loop_end (gfc_loopinfo
* loop
, int n
,
3606 stmtblock_t
* pbody
)
3617 if ((ompws_flags
& (OMPWS_WORKSHARE_FLAG
| OMPWS_SCALARIZER_WS
3618 | OMPWS_SCALARIZER_BODY
))
3619 == (OMPWS_WORKSHARE_FLAG
| OMPWS_SCALARIZER_WS
)
3620 && n
== loop
->dimen
- 1)
3622 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3623 init
= make_tree_vec (1);
3624 cond
= make_tree_vec (1);
3625 incr
= make_tree_vec (1);
3627 /* Cycle statement is implemented with a goto. Exit statement must not
3628 be present for this loop. */
3629 exit_label
= gfc_build_label_decl (NULL_TREE
);
3630 TREE_USED (exit_label
) = 1;
3632 /* Label for cycle statements (if needed). */
3633 tmp
= build1_v (LABEL_EXPR
, exit_label
);
3634 gfc_add_expr_to_block (pbody
, tmp
);
3636 stmt
= make_node (OMP_FOR
);
3638 TREE_TYPE (stmt
) = void_type_node
;
3639 OMP_FOR_BODY (stmt
) = loopbody
= gfc_finish_block (pbody
);
3641 OMP_FOR_CLAUSES (stmt
) = build_omp_clause (input_location
,
3642 OMP_CLAUSE_SCHEDULE
);
3643 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt
))
3644 = OMP_CLAUSE_SCHEDULE_STATIC
;
3645 if (ompws_flags
& OMPWS_NOWAIT
)
3646 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt
))
3647 = build_omp_clause (input_location
, OMP_CLAUSE_NOWAIT
);
3649 /* Initialize the loopvar. */
3650 TREE_VEC_ELT (init
, 0) = build2_v (MODIFY_EXPR
, loop
->loopvar
[n
],
3652 OMP_FOR_INIT (stmt
) = init
;
3653 /* The exit condition. */
3654 TREE_VEC_ELT (cond
, 0) = build2_loc (input_location
, LE_EXPR
,
3656 loop
->loopvar
[n
], loop
->to
[n
]);
3657 SET_EXPR_LOCATION (TREE_VEC_ELT (cond
, 0), input_location
);
3658 OMP_FOR_COND (stmt
) = cond
;
3659 /* Increment the loopvar. */
3660 tmp
= build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3661 loop
->loopvar
[n
], gfc_index_one_node
);
3662 TREE_VEC_ELT (incr
, 0) = fold_build2_loc (input_location
, MODIFY_EXPR
,
3663 void_type_node
, loop
->loopvar
[n
], tmp
);
3664 OMP_FOR_INCR (stmt
) = incr
;
3666 ompws_flags
&= ~OMPWS_CURR_SINGLEUNIT
;
3667 gfc_add_expr_to_block (&loop
->code
[n
], stmt
);
3671 bool reverse_loop
= (loop
->reverse
[n
] == GFC_REVERSE_SET
)
3672 && (loop
->temp_ss
== NULL
);
3674 loopbody
= gfc_finish_block (pbody
);
3677 std::swap (loop
->from
[n
], loop
->to
[n
]);
3679 /* Initialize the loopvar. */
3680 if (loop
->loopvar
[n
] != loop
->from
[n
])
3681 gfc_add_modify (&loop
->code
[n
], loop
->loopvar
[n
], loop
->from
[n
]);
3683 exit_label
= gfc_build_label_decl (NULL_TREE
);
3685 /* Generate the loop body. */
3686 gfc_init_block (&block
);
3688 /* The exit condition. */
3689 cond
= fold_build2_loc (input_location
, reverse_loop
? LT_EXPR
: GT_EXPR
,
3690 boolean_type_node
, loop
->loopvar
[n
], loop
->to
[n
]);
3691 tmp
= build1_v (GOTO_EXPR
, exit_label
);
3692 TREE_USED (exit_label
) = 1;
3693 tmp
= build3_v (COND_EXPR
, cond
, tmp
, build_empty_stmt (input_location
));
3694 gfc_add_expr_to_block (&block
, tmp
);
3696 /* The main body. */
3697 gfc_add_expr_to_block (&block
, loopbody
);
3699 /* Increment the loopvar. */
3700 tmp
= fold_build2_loc (input_location
,
3701 reverse_loop
? MINUS_EXPR
: PLUS_EXPR
,
3702 gfc_array_index_type
, loop
->loopvar
[n
],
3703 gfc_index_one_node
);
3705 gfc_add_modify (&block
, loop
->loopvar
[n
], tmp
);
3707 /* Build the loop. */
3708 tmp
= gfc_finish_block (&block
);
3709 tmp
= build1_v (LOOP_EXPR
, tmp
);
3710 gfc_add_expr_to_block (&loop
->code
[n
], tmp
);
3712 /* Add the exit label. */
3713 tmp
= build1_v (LABEL_EXPR
, exit_label
);
3714 gfc_add_expr_to_block (&loop
->code
[n
], tmp
);
3720 /* Finishes and generates the loops for a scalarized expression. */
3723 gfc_trans_scalarizing_loops (gfc_loopinfo
* loop
, stmtblock_t
* body
)
3728 stmtblock_t
*pblock
;
3732 /* Generate the loops. */
3733 for (dim
= 0; dim
< loop
->dimen
; dim
++)
3735 n
= loop
->order
[dim
];
3736 gfc_trans_scalarized_loop_end (loop
, n
, pblock
);
3737 loop
->loopvar
[n
] = NULL_TREE
;
3738 pblock
= &loop
->code
[n
];
3741 tmp
= gfc_finish_block (pblock
);
3742 gfc_add_expr_to_block (&loop
->pre
, tmp
);
3744 /* Clear all the used flags. */
3745 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
3746 if (ss
->parent
== NULL
)
3747 ss
->info
->useflags
= 0;
3751 /* Finish the main body of a scalarized expression, and start the secondary
3755 gfc_trans_scalarized_loop_boundary (gfc_loopinfo
* loop
, stmtblock_t
* body
)
3759 stmtblock_t
*pblock
;
3763 /* We finish as many loops as are used by the temporary. */
3764 for (dim
= 0; dim
< loop
->temp_dim
- 1; dim
++)
3766 n
= loop
->order
[dim
];
3767 gfc_trans_scalarized_loop_end (loop
, n
, pblock
);
3768 loop
->loopvar
[n
] = NULL_TREE
;
3769 pblock
= &loop
->code
[n
];
3772 /* We don't want to finish the outermost loop entirely. */
3773 n
= loop
->order
[loop
->temp_dim
- 1];
3774 gfc_trans_scalarized_loop_end (loop
, n
, pblock
);
3776 /* Restore the initial offsets. */
3777 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
3779 gfc_ss_type ss_type
;
3780 gfc_ss_info
*ss_info
;
3784 if ((ss_info
->useflags
& 2) == 0)
3787 ss_type
= ss_info
->type
;
3788 if (ss_type
!= GFC_SS_SECTION
3789 && ss_type
!= GFC_SS_FUNCTION
3790 && ss_type
!= GFC_SS_CONSTRUCTOR
3791 && ss_type
!= GFC_SS_COMPONENT
)
3794 ss_info
->data
.array
.offset
= ss_info
->data
.array
.saved_offset
;
3797 /* Restart all the inner loops we just finished. */
3798 for (dim
= loop
->temp_dim
- 2; dim
>= 0; dim
--)
3800 n
= loop
->order
[dim
];
3802 gfc_start_block (&loop
->code
[n
]);
3804 loop
->loopvar
[n
] = gfc_create_var (gfc_array_index_type
, "Q");
3806 gfc_trans_preloop_setup (loop
, dim
, 2, &loop
->code
[n
]);
3809 /* Start a block for the secondary copying code. */
3810 gfc_start_block (body
);
3814 /* Precalculate (either lower or upper) bound of an array section.
3815 BLOCK: Block in which the (pre)calculation code will go.
3816 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3817 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3818 DESC: Array descriptor from which the bound will be picked if unspecified
3819 (either lower or upper bound according to LBOUND). */
3822 evaluate_bound (stmtblock_t
*block
, tree
*bounds
, gfc_expr
** values
,
3823 tree desc
, int dim
, bool lbound
, bool deferred
)
3826 gfc_expr
* input_val
= values
[dim
];
3827 tree
*output
= &bounds
[dim
];
3832 /* Specified section bound. */
3833 gfc_init_se (&se
, NULL
);
3834 gfc_conv_expr_type (&se
, input_val
, gfc_array_index_type
);
3835 gfc_add_block_to_block (block
, &se
.pre
);
3838 else if (deferred
&& GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc
)))
3840 /* The gfc_conv_array_lbound () routine returns a constant zero for
3841 deferred length arrays, which in the scalarizer wreaks havoc, when
3842 copying to a (newly allocated) one-based array.
3843 Keep returning the actual result in sync for both bounds. */
3844 *output
= lbound
? gfc_conv_descriptor_lbound_get (desc
,
3846 gfc_conv_descriptor_ubound_get (desc
,
3851 /* No specific bound specified so use the bound of the array. */
3852 *output
= lbound
? gfc_conv_array_lbound (desc
, dim
) :
3853 gfc_conv_array_ubound (desc
, dim
);
3855 *output
= gfc_evaluate_now (*output
, block
);
3859 /* Calculate the lower bound of an array section. */
3862 gfc_conv_section_startstride (stmtblock_t
* block
, gfc_ss
* ss
, int dim
)
3864 gfc_expr
*stride
= NULL
;
3867 gfc_array_info
*info
;
3870 gcc_assert (ss
->info
->type
== GFC_SS_SECTION
);
3872 info
= &ss
->info
->data
.array
;
3873 ar
= &info
->ref
->u
.ar
;
3875 if (ar
->dimen_type
[dim
] == DIMEN_VECTOR
)
3877 /* We use a zero-based index to access the vector. */
3878 info
->start
[dim
] = gfc_index_zero_node
;
3879 info
->end
[dim
] = NULL
;
3880 info
->stride
[dim
] = gfc_index_one_node
;
3884 gcc_assert (ar
->dimen_type
[dim
] == DIMEN_RANGE
3885 || ar
->dimen_type
[dim
] == DIMEN_THIS_IMAGE
);
3886 desc
= info
->descriptor
;
3887 stride
= ar
->stride
[dim
];
3890 /* Calculate the start of the range. For vector subscripts this will
3891 be the range of the vector. */
3892 evaluate_bound (block
, info
->start
, ar
->start
, desc
, dim
, true,
3893 ar
->as
->type
== AS_DEFERRED
);
3895 /* Similarly calculate the end. Although this is not used in the
3896 scalarizer, it is needed when checking bounds and where the end
3897 is an expression with side-effects. */
3898 evaluate_bound (block
, info
->end
, ar
->end
, desc
, dim
, false,
3899 ar
->as
->type
== AS_DEFERRED
);
3902 /* Calculate the stride. */
3904 info
->stride
[dim
] = gfc_index_one_node
;
3907 gfc_init_se (&se
, NULL
);
3908 gfc_conv_expr_type (&se
, stride
, gfc_array_index_type
);
3909 gfc_add_block_to_block (block
, &se
.pre
);
3910 info
->stride
[dim
] = gfc_evaluate_now (se
.expr
, block
);
3915 /* Calculates the range start and stride for a SS chain. Also gets the
3916 descriptor and data pointer. The range of vector subscripts is the size
3917 of the vector. Array bounds are also checked. */
3920 gfc_conv_ss_startstride (gfc_loopinfo
* loop
)
3927 gfc_loopinfo
* const outer_loop
= outermost_loop (loop
);
3930 /* Determine the rank of the loop. */
3931 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
3933 switch (ss
->info
->type
)
3935 case GFC_SS_SECTION
:
3936 case GFC_SS_CONSTRUCTOR
:
3937 case GFC_SS_FUNCTION
:
3938 case GFC_SS_COMPONENT
:
3939 loop
->dimen
= ss
->dimen
;
3942 /* As usual, lbound and ubound are exceptions!. */
3943 case GFC_SS_INTRINSIC
:
3944 switch (ss
->info
->expr
->value
.function
.isym
->id
)
3946 case GFC_ISYM_LBOUND
:
3947 case GFC_ISYM_UBOUND
:
3948 case GFC_ISYM_LCOBOUND
:
3949 case GFC_ISYM_UCOBOUND
:
3950 case GFC_ISYM_THIS_IMAGE
:
3951 loop
->dimen
= ss
->dimen
;
3963 /* We should have determined the rank of the expression by now. If
3964 not, that's bad news. */
3968 /* Loop over all the SS in the chain. */
3969 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
3971 gfc_ss_info
*ss_info
;
3972 gfc_array_info
*info
;
3976 expr
= ss_info
->expr
;
3977 info
= &ss_info
->data
.array
;
3979 if (expr
&& expr
->shape
&& !info
->shape
)
3980 info
->shape
= expr
->shape
;
3982 switch (ss_info
->type
)
3984 case GFC_SS_SECTION
:
3985 /* Get the descriptor for the array. If it is a cross loops array,
3986 we got the descriptor already in the outermost loop. */
3987 if (ss
->parent
== NULL
)
3988 gfc_conv_ss_descriptor (&outer_loop
->pre
, ss
,
3989 !loop
->array_parameter
);
3991 for (n
= 0; n
< ss
->dimen
; n
++)
3992 gfc_conv_section_startstride (&outer_loop
->pre
, ss
, ss
->dim
[n
]);
3995 case GFC_SS_INTRINSIC
:
3996 switch (expr
->value
.function
.isym
->id
)
3998 /* Fall through to supply start and stride. */
3999 case GFC_ISYM_LBOUND
:
4000 case GFC_ISYM_UBOUND
:
4004 /* This is the variant without DIM=... */
4005 gcc_assert (expr
->value
.function
.actual
->next
->expr
== NULL
);
4007 arg
= expr
->value
.function
.actual
->expr
;
4008 if (arg
->rank
== -1)
4013 /* The rank (hence the return value's shape) is unknown,
4014 we have to retrieve it. */
4015 gfc_init_se (&se
, NULL
);
4016 se
.descriptor_only
= 1;
4017 gfc_conv_expr (&se
, arg
);
4018 /* This is a bare variable, so there is no preliminary
4020 gcc_assert (se
.pre
.head
== NULL_TREE
4021 && se
.post
.head
== NULL_TREE
);
4022 rank
= gfc_conv_descriptor_rank (se
.expr
);
4023 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
4024 gfc_array_index_type
,
4025 fold_convert (gfc_array_index_type
,
4027 gfc_index_one_node
);
4028 info
->end
[0] = gfc_evaluate_now (tmp
, &outer_loop
->pre
);
4029 info
->start
[0] = gfc_index_zero_node
;
4030 info
->stride
[0] = gfc_index_one_node
;
4033 /* Otherwise fall through GFC_SS_FUNCTION. */
4035 case GFC_ISYM_LCOBOUND
:
4036 case GFC_ISYM_UCOBOUND
:
4037 case GFC_ISYM_THIS_IMAGE
:
4044 case GFC_SS_CONSTRUCTOR
:
4045 case GFC_SS_FUNCTION
:
4046 for (n
= 0; n
< ss
->dimen
; n
++)
4048 int dim
= ss
->dim
[n
];
4050 info
->start
[dim
] = gfc_index_zero_node
;
4051 info
->end
[dim
] = gfc_index_zero_node
;
4052 info
->stride
[dim
] = gfc_index_one_node
;
4061 /* The rest is just runtime bound checking. */
4062 if (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
4065 tree lbound
, ubound
;
4067 tree size
[GFC_MAX_DIMENSIONS
];
4068 tree stride_pos
, stride_neg
, non_zerosized
, tmp2
, tmp3
;
4069 gfc_array_info
*info
;
4073 gfc_start_block (&block
);
4075 for (n
= 0; n
< loop
->dimen
; n
++)
4076 size
[n
] = NULL_TREE
;
4078 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
4081 gfc_ss_info
*ss_info
;
4084 const char *expr_name
;
4087 if (ss_info
->type
!= GFC_SS_SECTION
)
4090 /* Catch allocatable lhs in f2003. */
4091 if (flag_realloc_lhs
&& ss
->is_alloc_lhs
)
4094 expr
= ss_info
->expr
;
4095 expr_loc
= &expr
->where
;
4096 expr_name
= expr
->symtree
->name
;
4098 gfc_start_block (&inner
);
4100 /* TODO: range checking for mapped dimensions. */
4101 info
= &ss_info
->data
.array
;
4103 /* This code only checks ranges. Elemental and vector
4104 dimensions are checked later. */
4105 for (n
= 0; n
< loop
->dimen
; n
++)
4110 if (info
->ref
->u
.ar
.dimen_type
[dim
] != DIMEN_RANGE
)
4113 if (dim
== info
->ref
->u
.ar
.dimen
- 1
4114 && info
->ref
->u
.ar
.as
->type
== AS_ASSUMED_SIZE
)
4115 check_upper
= false;
4119 /* Zero stride is not allowed. */
4120 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
4121 info
->stride
[dim
], gfc_index_zero_node
);
4122 msg
= xasprintf ("Zero stride is not allowed, for dimension %d "
4123 "of array '%s'", dim
+ 1, expr_name
);
4124 gfc_trans_runtime_check (true, false, tmp
, &inner
,
4128 desc
= info
->descriptor
;
4130 /* This is the run-time equivalent of resolve.c's
4131 check_dimension(). The logical is more readable there
4132 than it is here, with all the trees. */
4133 lbound
= gfc_conv_array_lbound (desc
, dim
);
4134 end
= info
->end
[dim
];
4136 ubound
= gfc_conv_array_ubound (desc
, dim
);
4140 /* non_zerosized is true when the selected range is not
4142 stride_pos
= fold_build2_loc (input_location
, GT_EXPR
,
4143 boolean_type_node
, info
->stride
[dim
],
4144 gfc_index_zero_node
);
4145 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
4146 info
->start
[dim
], end
);
4147 stride_pos
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4148 boolean_type_node
, stride_pos
, tmp
);
4150 stride_neg
= fold_build2_loc (input_location
, LT_EXPR
,
4152 info
->stride
[dim
], gfc_index_zero_node
);
4153 tmp
= fold_build2_loc (input_location
, GE_EXPR
, boolean_type_node
,
4154 info
->start
[dim
], end
);
4155 stride_neg
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4158 non_zerosized
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
4160 stride_pos
, stride_neg
);
4162 /* Check the start of the range against the lower and upper
4163 bounds of the array, if the range is not empty.
4164 If upper bound is present, include both bounds in the
4168 tmp
= fold_build2_loc (input_location
, LT_EXPR
,
4170 info
->start
[dim
], lbound
);
4171 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4173 non_zerosized
, tmp
);
4174 tmp2
= fold_build2_loc (input_location
, GT_EXPR
,
4176 info
->start
[dim
], ubound
);
4177 tmp2
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4179 non_zerosized
, tmp2
);
4180 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4181 "outside of expected range (%%ld:%%ld)",
4182 dim
+ 1, expr_name
);
4183 gfc_trans_runtime_check (true, false, tmp
, &inner
,
4185 fold_convert (long_integer_type_node
, info
->start
[dim
]),
4186 fold_convert (long_integer_type_node
, lbound
),
4187 fold_convert (long_integer_type_node
, ubound
));
4188 gfc_trans_runtime_check (true, false, tmp2
, &inner
,
4190 fold_convert (long_integer_type_node
, info
->start
[dim
]),
4191 fold_convert (long_integer_type_node
, lbound
),
4192 fold_convert (long_integer_type_node
, ubound
));
4197 tmp
= fold_build2_loc (input_location
, LT_EXPR
,
4199 info
->start
[dim
], lbound
);
4200 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4201 boolean_type_node
, non_zerosized
, tmp
);
4202 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4203 "below lower bound of %%ld",
4204 dim
+ 1, expr_name
);
4205 gfc_trans_runtime_check (true, false, tmp
, &inner
,
4207 fold_convert (long_integer_type_node
, info
->start
[dim
]),
4208 fold_convert (long_integer_type_node
, lbound
));
4212 /* Compute the last element of the range, which is not
4213 necessarily "end" (think 0:5:3, which doesn't contain 5)
4214 and check it against both lower and upper bounds. */
4216 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
4217 gfc_array_index_type
, end
,
4219 tmp
= fold_build2_loc (input_location
, TRUNC_MOD_EXPR
,
4220 gfc_array_index_type
, tmp
,
4222 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
4223 gfc_array_index_type
, end
, tmp
);
4224 tmp2
= fold_build2_loc (input_location
, LT_EXPR
,
4225 boolean_type_node
, tmp
, lbound
);
4226 tmp2
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4227 boolean_type_node
, non_zerosized
, tmp2
);
4230 tmp3
= fold_build2_loc (input_location
, GT_EXPR
,
4231 boolean_type_node
, tmp
, ubound
);
4232 tmp3
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
4233 boolean_type_node
, non_zerosized
, tmp3
);
4234 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4235 "outside of expected range (%%ld:%%ld)",
4236 dim
+ 1, expr_name
);
4237 gfc_trans_runtime_check (true, false, tmp2
, &inner
,
4239 fold_convert (long_integer_type_node
, tmp
),
4240 fold_convert (long_integer_type_node
, ubound
),
4241 fold_convert (long_integer_type_node
, lbound
));
4242 gfc_trans_runtime_check (true, false, tmp3
, &inner
,
4244 fold_convert (long_integer_type_node
, tmp
),
4245 fold_convert (long_integer_type_node
, ubound
),
4246 fold_convert (long_integer_type_node
, lbound
));
4251 msg
= xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4252 "below lower bound of %%ld",
4253 dim
+ 1, expr_name
);
4254 gfc_trans_runtime_check (true, false, tmp2
, &inner
,
4256 fold_convert (long_integer_type_node
, tmp
),
4257 fold_convert (long_integer_type_node
, lbound
));
4261 /* Check the section sizes match. */
4262 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
4263 gfc_array_index_type
, end
,
4265 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
,
4266 gfc_array_index_type
, tmp
,
4268 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4269 gfc_array_index_type
,
4270 gfc_index_one_node
, tmp
);
4271 tmp
= fold_build2_loc (input_location
, MAX_EXPR
,
4272 gfc_array_index_type
, tmp
,
4273 build_int_cst (gfc_array_index_type
, 0));
4274 /* We remember the size of the first section, and check all the
4275 others against this. */
4278 tmp3
= fold_build2_loc (input_location
, NE_EXPR
,
4279 boolean_type_node
, tmp
, size
[n
]);
4280 msg
= xasprintf ("Array bound mismatch for dimension %d "
4281 "of array '%s' (%%ld/%%ld)",
4282 dim
+ 1, expr_name
);
4284 gfc_trans_runtime_check (true, false, tmp3
, &inner
,
4286 fold_convert (long_integer_type_node
, tmp
),
4287 fold_convert (long_integer_type_node
, size
[n
]));
4292 size
[n
] = gfc_evaluate_now (tmp
, &inner
);
4295 tmp
= gfc_finish_block (&inner
);
4297 /* For optional arguments, only check bounds if the argument is
4299 if (expr
->symtree
->n
.sym
->attr
.optional
4300 || expr
->symtree
->n
.sym
->attr
.not_always_present
)
4301 tmp
= build3_v (COND_EXPR
,
4302 gfc_conv_expr_present (expr
->symtree
->n
.sym
),
4303 tmp
, build_empty_stmt (input_location
));
4305 gfc_add_expr_to_block (&block
, tmp
);
4309 tmp
= gfc_finish_block (&block
);
4310 gfc_add_expr_to_block (&outer_loop
->pre
, tmp
);
4313 for (loop
= loop
->nested
; loop
; loop
= loop
->next
)
4314 gfc_conv_ss_startstride (loop
);
4317 /* Return true if both symbols could refer to the same data object. Does
4318 not take account of aliasing due to equivalence statements. */
4321 symbols_could_alias (gfc_symbol
*lsym
, gfc_symbol
*rsym
, bool lsym_pointer
,
4322 bool lsym_target
, bool rsym_pointer
, bool rsym_target
)
4324 /* Aliasing isn't possible if the symbols have different base types. */
4325 if (gfc_compare_types (&lsym
->ts
, &rsym
->ts
) == 0)
4328 /* Pointers can point to other pointers and target objects. */
4330 if ((lsym_pointer
&& (rsym_pointer
|| rsym_target
))
4331 || (rsym_pointer
&& (lsym_pointer
|| lsym_target
)))
4334 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4335 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4337 if (lsym_target
&& rsym_target
4338 && ((lsym
->attr
.dummy
&& !lsym
->attr
.contiguous
4339 && (!lsym
->attr
.dimension
|| lsym
->as
->type
== AS_ASSUMED_SHAPE
))
4340 || (rsym
->attr
.dummy
&& !rsym
->attr
.contiguous
4341 && (!rsym
->attr
.dimension
4342 || rsym
->as
->type
== AS_ASSUMED_SHAPE
))))
4349 /* Return true if the two SS could be aliased, i.e. both point to the same data
4351 /* TODO: resolve aliases based on frontend expressions. */
4354 gfc_could_be_alias (gfc_ss
* lss
, gfc_ss
* rss
)
4358 gfc_expr
*lexpr
, *rexpr
;
4361 bool lsym_pointer
, lsym_target
, rsym_pointer
, rsym_target
;
4363 lexpr
= lss
->info
->expr
;
4364 rexpr
= rss
->info
->expr
;
4366 lsym
= lexpr
->symtree
->n
.sym
;
4367 rsym
= rexpr
->symtree
->n
.sym
;
4369 lsym_pointer
= lsym
->attr
.pointer
;
4370 lsym_target
= lsym
->attr
.target
;
4371 rsym_pointer
= rsym
->attr
.pointer
;
4372 rsym_target
= rsym
->attr
.target
;
4374 if (symbols_could_alias (lsym
, rsym
, lsym_pointer
, lsym_target
,
4375 rsym_pointer
, rsym_target
))
4378 if (rsym
->ts
.type
!= BT_DERIVED
&& rsym
->ts
.type
!= BT_CLASS
4379 && lsym
->ts
.type
!= BT_DERIVED
&& lsym
->ts
.type
!= BT_CLASS
)
4382 /* For derived types we must check all the component types. We can ignore
4383 array references as these will have the same base type as the previous
4385 for (lref
= lexpr
->ref
; lref
!= lss
->info
->data
.array
.ref
; lref
= lref
->next
)
4387 if (lref
->type
!= REF_COMPONENT
)
4390 lsym_pointer
= lsym_pointer
|| lref
->u
.c
.sym
->attr
.pointer
;
4391 lsym_target
= lsym_target
|| lref
->u
.c
.sym
->attr
.target
;
4393 if (symbols_could_alias (lref
->u
.c
.sym
, rsym
, lsym_pointer
, lsym_target
,
4394 rsym_pointer
, rsym_target
))
4397 if ((lsym_pointer
&& (rsym_pointer
|| rsym_target
))
4398 || (rsym_pointer
&& (lsym_pointer
|| lsym_target
)))
4400 if (gfc_compare_types (&lref
->u
.c
.component
->ts
,
4405 for (rref
= rexpr
->ref
; rref
!= rss
->info
->data
.array
.ref
;
4408 if (rref
->type
!= REF_COMPONENT
)
4411 rsym_pointer
= rsym_pointer
|| rref
->u
.c
.sym
->attr
.pointer
;
4412 rsym_target
= lsym_target
|| rref
->u
.c
.sym
->attr
.target
;
4414 if (symbols_could_alias (lref
->u
.c
.sym
, rref
->u
.c
.sym
,
4415 lsym_pointer
, lsym_target
,
4416 rsym_pointer
, rsym_target
))
4419 if ((lsym_pointer
&& (rsym_pointer
|| rsym_target
))
4420 || (rsym_pointer
&& (lsym_pointer
|| lsym_target
)))
4422 if (gfc_compare_types (&lref
->u
.c
.component
->ts
,
4423 &rref
->u
.c
.sym
->ts
))
4425 if (gfc_compare_types (&lref
->u
.c
.sym
->ts
,
4426 &rref
->u
.c
.component
->ts
))
4428 if (gfc_compare_types (&lref
->u
.c
.component
->ts
,
4429 &rref
->u
.c
.component
->ts
))
4435 lsym_pointer
= lsym
->attr
.pointer
;
4436 lsym_target
= lsym
->attr
.target
;
4437 lsym_pointer
= lsym
->attr
.pointer
;
4438 lsym_target
= lsym
->attr
.target
;
4440 for (rref
= rexpr
->ref
; rref
!= rss
->info
->data
.array
.ref
; rref
= rref
->next
)
4442 if (rref
->type
!= REF_COMPONENT
)
4445 rsym_pointer
= rsym_pointer
|| rref
->u
.c
.sym
->attr
.pointer
;
4446 rsym_target
= lsym_target
|| rref
->u
.c
.sym
->attr
.target
;
4448 if (symbols_could_alias (rref
->u
.c
.sym
, lsym
,
4449 lsym_pointer
, lsym_target
,
4450 rsym_pointer
, rsym_target
))
4453 if ((lsym_pointer
&& (rsym_pointer
|| rsym_target
))
4454 || (rsym_pointer
&& (lsym_pointer
|| lsym_target
)))
4456 if (gfc_compare_types (&lsym
->ts
, &rref
->u
.c
.component
->ts
))
4465 /* Resolve array data dependencies. Creates a temporary if required. */
4466 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4470 gfc_conv_resolve_dependencies (gfc_loopinfo
* loop
, gfc_ss
* dest
,
4476 gfc_ss_info
*ss_info
;
4477 gfc_expr
*dest_expr
;
4482 loop
->temp_ss
= NULL
;
4483 dest_expr
= dest
->info
->expr
;
4485 for (ss
= rss
; ss
!= gfc_ss_terminator
; ss
= ss
->next
)
4488 ss_expr
= ss_info
->expr
;
4490 if (ss_info
->array_outer_dependency
)
4496 if (ss_info
->type
!= GFC_SS_SECTION
)
4498 if (flag_realloc_lhs
4499 && dest_expr
!= ss_expr
4500 && gfc_is_reallocatable_lhs (dest_expr
)
4502 nDepend
= gfc_check_dependency (dest_expr
, ss_expr
, true);
4504 /* Check for cases like c(:)(1:2) = c(2)(2:3) */
4505 if (!nDepend
&& dest_expr
->rank
> 0
4506 && dest_expr
->ts
.type
== BT_CHARACTER
4507 && ss_expr
->expr_type
== EXPR_VARIABLE
)
4509 nDepend
= gfc_check_dependency (dest_expr
, ss_expr
, false);
4511 if (ss_info
->type
== GFC_SS_REFERENCE
4512 && gfc_check_dependency (dest_expr
, ss_expr
, false))
4513 ss_info
->data
.scalar
.needs_temporary
= 1;
4518 if (dest_expr
->symtree
->n
.sym
!= ss_expr
->symtree
->n
.sym
)
4520 if (gfc_could_be_alias (dest
, ss
)
4521 || gfc_are_equivalenced_arrays (dest_expr
, ss_expr
))
4529 lref
= dest_expr
->ref
;
4530 rref
= ss_expr
->ref
;
4532 nDepend
= gfc_dep_resolver (lref
, rref
, &loop
->reverse
[0]);
4537 for (i
= 0; i
< dest
->dimen
; i
++)
4538 for (j
= 0; j
< ss
->dimen
; j
++)
4540 && dest
->dim
[i
] == ss
->dim
[j
])
4542 /* If we don't access array elements in the same order,
4543 there is a dependency. */
4548 /* TODO : loop shifting. */
4551 /* Mark the dimensions for LOOP SHIFTING */
4552 for (n
= 0; n
< loop
->dimen
; n
++)
4554 int dim
= dest
->data
.info
.dim
[n
];
4556 if (lref
->u
.ar
.dimen_type
[dim
] == DIMEN_VECTOR
)
4558 else if (! gfc_is_same_range (&lref
->u
.ar
,
4559 &rref
->u
.ar
, dim
, 0))
4563 /* Put all the dimensions with dependencies in the
4566 for (n
= 0; n
< loop
->dimen
; n
++)
4568 gcc_assert (loop
->order
[n
] == n
);
4570 loop
->order
[dim
++] = n
;
4572 for (n
= 0; n
< loop
->dimen
; n
++)
4575 loop
->order
[dim
++] = n
;
4578 gcc_assert (dim
== loop
->dimen
);
4589 tree base_type
= gfc_typenode_for_spec (&dest_expr
->ts
);
4590 if (GFC_ARRAY_TYPE_P (base_type
)
4591 || GFC_DESCRIPTOR_TYPE_P (base_type
))
4592 base_type
= gfc_get_element_type (base_type
);
4593 loop
->temp_ss
= gfc_get_temp_ss (base_type
, dest
->info
->string_length
,
4595 gfc_add_ss_to_loop (loop
, loop
->temp_ss
);
4598 loop
->temp_ss
= NULL
;
4602 /* Browse through each array's information from the scalarizer and set the loop
4603 bounds according to the "best" one (per dimension), i.e. the one which
4604 provides the most information (constant bounds, shape, etc.). */
4607 set_loop_bounds (gfc_loopinfo
*loop
)
4609 int n
, dim
, spec_dim
;
4610 gfc_array_info
*info
;
4611 gfc_array_info
*specinfo
;
4615 bool dynamic
[GFC_MAX_DIMENSIONS
];
4618 bool nonoptional_arr
;
4620 gfc_loopinfo
* const outer_loop
= outermost_loop (loop
);
4622 loopspec
= loop
->specloop
;
4625 for (n
= 0; n
< loop
->dimen
; n
++)
4630 /* If there are both optional and nonoptional array arguments, scalarize
4631 over the nonoptional; otherwise, it does not matter as then all
4632 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4634 nonoptional_arr
= false;
4636 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
4637 if (ss
->info
->type
!= GFC_SS_SCALAR
&& ss
->info
->type
!= GFC_SS_TEMP
4638 && ss
->info
->type
!= GFC_SS_REFERENCE
&& !ss
->info
->can_be_null_ref
)
4640 nonoptional_arr
= true;
4644 /* We use one SS term, and use that to determine the bounds of the
4645 loop for this dimension. We try to pick the simplest term. */
4646 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
4648 gfc_ss_type ss_type
;
4650 ss_type
= ss
->info
->type
;
4651 if (ss_type
== GFC_SS_SCALAR
4652 || ss_type
== GFC_SS_TEMP
4653 || ss_type
== GFC_SS_REFERENCE
4654 || (ss
->info
->can_be_null_ref
&& nonoptional_arr
))
4657 info
= &ss
->info
->data
.array
;
4660 if (loopspec
[n
] != NULL
)
4662 specinfo
= &loopspec
[n
]->info
->data
.array
;
4663 spec_dim
= loopspec
[n
]->dim
[n
];
4667 /* Silence uninitialized warnings. */
4674 gcc_assert (info
->shape
[dim
]);
4675 /* The frontend has worked out the size for us. */
4678 || !integer_zerop (specinfo
->start
[spec_dim
]))
4679 /* Prefer zero-based descriptors if possible. */
4684 if (ss_type
== GFC_SS_CONSTRUCTOR
)
4686 gfc_constructor_base base
;
4687 /* An unknown size constructor will always be rank one.
4688 Higher rank constructors will either have known shape,
4689 or still be wrapped in a call to reshape. */
4690 gcc_assert (loop
->dimen
== 1);
4692 /* Always prefer to use the constructor bounds if the size
4693 can be determined at compile time. Prefer not to otherwise,
4694 since the general case involves realloc, and it's better to
4695 avoid that overhead if possible. */
4696 base
= ss
->info
->expr
->value
.constructor
;
4697 dynamic
[n
] = gfc_get_array_constructor_size (&i
, base
);
4698 if (!dynamic
[n
] || !loopspec
[n
])
4703 /* Avoid using an allocatable lhs in an assignment, since
4704 there might be a reallocation coming. */
4705 if (loopspec
[n
] && ss
->is_alloc_lhs
)
4710 /* Criteria for choosing a loop specifier (most important first):
4711 doesn't need realloc
4717 else if (loopspec
[n
]->info
->type
== GFC_SS_CONSTRUCTOR
&& dynamic
[n
])
4719 else if (integer_onep (info
->stride
[dim
])
4720 && !integer_onep (specinfo
->stride
[spec_dim
]))
4722 else if (INTEGER_CST_P (info
->stride
[dim
])
4723 && !INTEGER_CST_P (specinfo
->stride
[spec_dim
]))
4725 else if (INTEGER_CST_P (info
->start
[dim
])
4726 && !INTEGER_CST_P (specinfo
->start
[spec_dim
])
4727 && integer_onep (info
->stride
[dim
])
4728 == integer_onep (specinfo
->stride
[spec_dim
])
4729 && INTEGER_CST_P (info
->stride
[dim
])
4730 == INTEGER_CST_P (specinfo
->stride
[spec_dim
]))
4732 /* We don't work out the upper bound.
4733 else if (INTEGER_CST_P (info->finish[n])
4734 && ! INTEGER_CST_P (specinfo->finish[n]))
4735 loopspec[n] = ss; */
4738 /* We should have found the scalarization loop specifier. If not,
4740 gcc_assert (loopspec
[n
]);
4742 info
= &loopspec
[n
]->info
->data
.array
;
4743 dim
= loopspec
[n
]->dim
[n
];
4745 /* Set the extents of this range. */
4746 cshape
= info
->shape
;
4747 if (cshape
&& INTEGER_CST_P (info
->start
[dim
])
4748 && INTEGER_CST_P (info
->stride
[dim
]))
4750 loop
->from
[n
] = info
->start
[dim
];
4751 mpz_set (i
, cshape
[get_array_ref_dim_for_loop_dim (loopspec
[n
], n
)]);
4752 mpz_sub_ui (i
, i
, 1);
4753 /* To = from + (size - 1) * stride. */
4754 tmp
= gfc_conv_mpz_to_tree (i
, gfc_index_integer_kind
);
4755 if (!integer_onep (info
->stride
[dim
]))
4756 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
4757 gfc_array_index_type
, tmp
,
4759 loop
->to
[n
] = fold_build2_loc (input_location
, PLUS_EXPR
,
4760 gfc_array_index_type
,
4761 loop
->from
[n
], tmp
);
4765 loop
->from
[n
] = info
->start
[dim
];
4766 switch (loopspec
[n
]->info
->type
)
4768 case GFC_SS_CONSTRUCTOR
:
4769 /* The upper bound is calculated when we expand the
4771 gcc_assert (loop
->to
[n
] == NULL_TREE
);
4774 case GFC_SS_SECTION
:
4775 /* Use the end expression if it exists and is not constant,
4776 so that it is only evaluated once. */
4777 loop
->to
[n
] = info
->end
[dim
];
4780 case GFC_SS_FUNCTION
:
4781 /* The loop bound will be set when we generate the call. */
4782 gcc_assert (loop
->to
[n
] == NULL_TREE
);
4785 case GFC_SS_INTRINSIC
:
4787 gfc_expr
*expr
= loopspec
[n
]->info
->expr
;
4789 /* The {l,u}bound of an assumed rank. */
4790 gcc_assert ((expr
->value
.function
.isym
->id
== GFC_ISYM_LBOUND
4791 || expr
->value
.function
.isym
->id
== GFC_ISYM_UBOUND
)
4792 && expr
->value
.function
.actual
->next
->expr
== NULL
4793 && expr
->value
.function
.actual
->expr
->rank
== -1);
4795 loop
->to
[n
] = info
->end
[dim
];
4804 /* Transform everything so we have a simple incrementing variable. */
4805 if (integer_onep (info
->stride
[dim
]))
4806 info
->delta
[dim
] = gfc_index_zero_node
;
4809 /* Set the delta for this section. */
4810 info
->delta
[dim
] = gfc_evaluate_now (loop
->from
[n
], &outer_loop
->pre
);
4811 /* Number of iterations is (end - start + step) / step.
4812 with start = 0, this simplifies to
4814 for (i = 0; i<=last; i++){...}; */
4815 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
4816 gfc_array_index_type
, loop
->to
[n
],
4818 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
,
4819 gfc_array_index_type
, tmp
, info
->stride
[dim
]);
4820 tmp
= fold_build2_loc (input_location
, MAX_EXPR
, gfc_array_index_type
,
4821 tmp
, build_int_cst (gfc_array_index_type
, -1));
4822 loop
->to
[n
] = gfc_evaluate_now (tmp
, &outer_loop
->pre
);
4823 /* Make the loop variable start at 0. */
4824 loop
->from
[n
] = gfc_index_zero_node
;
4829 for (loop
= loop
->nested
; loop
; loop
= loop
->next
)
4830 set_loop_bounds (loop
);
4834 /* Initialize the scalarization loop. Creates the loop variables. Determines
4835 the range of the loop variables. Creates a temporary if required.
4836 Also generates code for scalar expressions which have been
4837 moved outside the loop. */
4840 gfc_conv_loop_setup (gfc_loopinfo
* loop
, locus
* where
)
4845 set_loop_bounds (loop
);
4847 /* Add all the scalar code that can be taken out of the loops.
4848 This may include calculating the loop bounds, so do it before
4849 allocating the temporary. */
4850 gfc_add_loop_ss_code (loop
, loop
->ss
, false, where
);
4852 tmp_ss
= loop
->temp_ss
;
4853 /* If we want a temporary then create it. */
4856 gfc_ss_info
*tmp_ss_info
;
4858 tmp_ss_info
= tmp_ss
->info
;
4859 gcc_assert (tmp_ss_info
->type
== GFC_SS_TEMP
);
4860 gcc_assert (loop
->parent
== NULL
);
4862 /* Make absolutely sure that this is a complete type. */
4863 if (tmp_ss_info
->string_length
)
4864 tmp_ss_info
->data
.temp
.type
4865 = gfc_get_character_type_len_for_eltype
4866 (TREE_TYPE (tmp_ss_info
->data
.temp
.type
),
4867 tmp_ss_info
->string_length
);
4869 tmp
= tmp_ss_info
->data
.temp
.type
;
4870 memset (&tmp_ss_info
->data
.array
, 0, sizeof (gfc_array_info
));
4871 tmp_ss_info
->type
= GFC_SS_SECTION
;
4873 gcc_assert (tmp_ss
->dimen
!= 0);
4875 gfc_trans_create_temp_array (&loop
->pre
, &loop
->post
, tmp_ss
, tmp
,
4876 NULL_TREE
, false, true, false, where
);
4879 /* For array parameters we don't have loop variables, so don't calculate the
4881 if (!loop
->array_parameter
)
4882 gfc_set_delta (loop
);
4886 /* Calculates how to transform from loop variables to array indices for each
4887 array: once loop bounds are chosen, sets the difference (DELTA field) between
4888 loop bounds and array reference bounds, for each array info. */
4891 gfc_set_delta (gfc_loopinfo
*loop
)
4893 gfc_ss
*ss
, **loopspec
;
4894 gfc_array_info
*info
;
4898 gfc_loopinfo
* const outer_loop
= outermost_loop (loop
);
4900 loopspec
= loop
->specloop
;
4902 /* Calculate the translation from loop variables to array indices. */
4903 for (ss
= loop
->ss
; ss
!= gfc_ss_terminator
; ss
= ss
->loop_chain
)
4905 gfc_ss_type ss_type
;
4907 ss_type
= ss
->info
->type
;
4908 if (ss_type
!= GFC_SS_SECTION
4909 && ss_type
!= GFC_SS_COMPONENT
4910 && ss_type
!= GFC_SS_CONSTRUCTOR
)
4913 info
= &ss
->info
->data
.array
;
4915 for (n
= 0; n
< ss
->dimen
; n
++)
4917 /* If we are specifying the range the delta is already set. */
4918 if (loopspec
[n
] != ss
)
4922 /* Calculate the offset relative to the loop variable.
4923 First multiply by the stride. */
4924 tmp
= loop
->from
[n
];
4925 if (!integer_onep (info
->stride
[dim
]))
4926 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
4927 gfc_array_index_type
,
4928 tmp
, info
->stride
[dim
]);
4930 /* Then subtract this from our starting value. */
4931 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
4932 gfc_array_index_type
,
4933 info
->start
[dim
], tmp
);
4935 info
->delta
[dim
] = gfc_evaluate_now (tmp
, &outer_loop
->pre
);
4940 for (loop
= loop
->nested
; loop
; loop
= loop
->next
)
4941 gfc_set_delta (loop
);
4945 /* Calculate the size of a given array dimension from the bounds. This
4946 is simply (ubound - lbound + 1) if this expression is positive
4947 or 0 if it is negative (pick either one if it is zero). Optionally
4948 (if or_expr is present) OR the (expression != 0) condition to it. */
4951 gfc_conv_array_extent_dim (tree lbound
, tree ubound
, tree
* or_expr
)
4956 /* Calculate (ubound - lbound + 1). */
4957 res
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
4959 res
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
, res
,
4960 gfc_index_one_node
);
4962 /* Check whether the size for this dimension is negative. */
4963 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, res
,
4964 gfc_index_zero_node
);
4965 res
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
, cond
,
4966 gfc_index_zero_node
, res
);
4968 /* Build OR expression. */
4970 *or_expr
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
4971 boolean_type_node
, *or_expr
, cond
);
4977 /* For an array descriptor, get the total number of elements. This is just
4978 the product of the extents along from_dim to to_dim. */
4981 gfc_conv_descriptor_size_1 (tree desc
, int from_dim
, int to_dim
)
4986 res
= gfc_index_one_node
;
4988 for (dim
= from_dim
; dim
< to_dim
; ++dim
)
4994 lbound
= gfc_conv_descriptor_lbound_get (desc
, gfc_rank_cst
[dim
]);
4995 ubound
= gfc_conv_descriptor_ubound_get (desc
, gfc_rank_cst
[dim
]);
4997 extent
= gfc_conv_array_extent_dim (lbound
, ubound
, NULL
);
4998 res
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
5006 /* Full size of an array. */
5009 gfc_conv_descriptor_size (tree desc
, int rank
)
5011 return gfc_conv_descriptor_size_1 (desc
, 0, rank
);
5015 /* Size of a coarray for all dimensions but the last. */
5018 gfc_conv_descriptor_cosize (tree desc
, int rank
, int corank
)
5020 return gfc_conv_descriptor_size_1 (desc
, rank
, rank
+ corank
- 1);
5024 /* Fills in an array descriptor, and returns the size of the array.
5025 The size will be a simple_val, ie a variable or a constant. Also
5026 calculates the offset of the base. The pointer argument overflow,
5027 which should be of integer type, will increase in value if overflow
5028 occurs during the size calculation. Returns the size of the array.
5032 for (n = 0; n < rank; n++)
5034 a.lbound[n] = specified_lower_bound;
5035 offset = offset + a.lbond[n] * stride;
5037 a.ubound[n] = specified_upper_bound;
5038 a.stride[n] = stride;
5039 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
5040 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
5041 stride = stride * size;
5043 for (n = rank; n < rank+corank; n++)
5044 (Set lcobound/ucobound as above.)
5045 element_size = sizeof (array element);
5048 stride = (size_t) stride;
5049 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
5050 stride = stride * element_size;
5056 gfc_array_init_size (tree descriptor
, int rank
, int corank
, tree
* poffset
,
5057 gfc_expr
** lower
, gfc_expr
** upper
, stmtblock_t
* pblock
,
5058 stmtblock_t
* descriptor_block
, tree
* overflow
,
5059 tree expr3_elem_size
, tree
*nelems
, gfc_expr
*expr3
,
5060 tree expr3_desc
, bool e3_is_array_constr
, gfc_expr
*expr
)
5073 stmtblock_t thenblock
;
5074 stmtblock_t elseblock
;
5079 type
= TREE_TYPE (descriptor
);
5081 stride
= gfc_index_one_node
;
5082 offset
= gfc_index_zero_node
;
5084 /* Set the dtype. */
5085 if (expr
->ts
.type
== BT_CHARACTER
&& expr
->ts
.deferred
5086 && TREE_CODE (expr
->ts
.u
.cl
->backend_decl
) == VAR_DECL
)
5088 type
= gfc_typenode_for_spec (&expr
->ts
);
5089 tmp
= gfc_conv_descriptor_dtype (descriptor
);
5090 gfc_add_modify (descriptor_block
, tmp
,
5091 gfc_get_dtype_rank_type (rank
, type
));
5095 tmp
= gfc_conv_descriptor_dtype (descriptor
);
5096 gfc_add_modify (descriptor_block
, tmp
, gfc_get_dtype (type
));
5099 or_expr
= boolean_false_node
;
5101 for (n
= 0; n
< rank
; n
++)
5106 /* We have 3 possibilities for determining the size of the array:
5107 lower == NULL => lbound = 1, ubound = upper[n]
5108 upper[n] = NULL => lbound = 1, ubound = lower[n]
5109 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
5112 /* Set lower bound. */
5113 gfc_init_se (&se
, NULL
);
5114 if (expr3_desc
!= NULL_TREE
)
5116 if (e3_is_array_constr
)
5117 /* The lbound of a constant array [] starts at zero, but when
5118 allocating it, the standard expects the array to start at
5120 se
.expr
= gfc_index_one_node
;
5122 se
.expr
= gfc_conv_descriptor_lbound_get (expr3_desc
,
5125 else if (lower
== NULL
)
5126 se
.expr
= gfc_index_one_node
;
5129 gcc_assert (lower
[n
]);
5132 gfc_conv_expr_type (&se
, lower
[n
], gfc_array_index_type
);
5133 gfc_add_block_to_block (pblock
, &se
.pre
);
5137 se
.expr
= gfc_index_one_node
;
5141 gfc_conv_descriptor_lbound_set (descriptor_block
, descriptor
,
5142 gfc_rank_cst
[n
], se
.expr
);
5143 conv_lbound
= se
.expr
;
5145 /* Work out the offset for this component. */
5146 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
5148 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
5149 gfc_array_index_type
, offset
, tmp
);
5151 /* Set upper bound. */
5152 gfc_init_se (&se
, NULL
);
5153 if (expr3_desc
!= NULL_TREE
)
5155 if (e3_is_array_constr
)
5157 /* The lbound of a constant array [] starts at zero, but when
5158 allocating it, the standard expects the array to start at
5159 one. Therefore fix the upper bound to be
5160 (desc.ubound - desc.lbound)+ 1. */
5161 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
5162 gfc_array_index_type
,
5163 gfc_conv_descriptor_ubound_get (
5164 expr3_desc
, gfc_rank_cst
[n
]),
5165 gfc_conv_descriptor_lbound_get (
5166 expr3_desc
, gfc_rank_cst
[n
]));
5167 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
5168 gfc_array_index_type
, tmp
,
5169 gfc_index_one_node
);
5170 se
.expr
= gfc_evaluate_now (tmp
, pblock
);
5173 se
.expr
= gfc_conv_descriptor_ubound_get (expr3_desc
,
5178 gcc_assert (ubound
);
5179 gfc_conv_expr_type (&se
, ubound
, gfc_array_index_type
);
5180 gfc_add_block_to_block (pblock
, &se
.pre
);
5181 if (ubound
->expr_type
== EXPR_FUNCTION
)
5182 se
.expr
= gfc_evaluate_now (se
.expr
, pblock
);
5184 gfc_conv_descriptor_ubound_set (descriptor_block
, descriptor
,
5185 gfc_rank_cst
[n
], se
.expr
);
5186 conv_ubound
= se
.expr
;
5188 /* Store the stride. */
5189 gfc_conv_descriptor_stride_set (descriptor_block
, descriptor
,
5190 gfc_rank_cst
[n
], stride
);
5192 /* Calculate size and check whether extent is negative. */
5193 size
= gfc_conv_array_extent_dim (conv_lbound
, conv_ubound
, &or_expr
);
5194 size
= gfc_evaluate_now (size
, pblock
);
5196 /* Check whether multiplying the stride by the number of
5197 elements in this dimension would overflow. We must also check
5198 whether the current dimension has zero size in order to avoid
5201 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
5202 gfc_array_index_type
,
5203 fold_convert (gfc_array_index_type
,
5204 TYPE_MAX_VALUE (gfc_array_index_type
)),
5206 cond
= gfc_unlikely (fold_build2_loc (input_location
, LT_EXPR
,
5207 boolean_type_node
, tmp
, stride
),
5208 PRED_FORTRAN_OVERFLOW
);
5209 tmp
= fold_build3_loc (input_location
, COND_EXPR
, integer_type_node
, cond
,
5210 integer_one_node
, integer_zero_node
);
5211 cond
= gfc_unlikely (fold_build2_loc (input_location
, EQ_EXPR
,
5212 boolean_type_node
, size
,
5213 gfc_index_zero_node
),
5214 PRED_FORTRAN_SIZE_ZERO
);
5215 tmp
= fold_build3_loc (input_location
, COND_EXPR
, integer_type_node
, cond
,
5216 integer_zero_node
, tmp
);
5217 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, integer_type_node
,
5219 *overflow
= gfc_evaluate_now (tmp
, pblock
);
5221 /* Multiply the stride by the number of elements in this dimension. */
5222 stride
= fold_build2_loc (input_location
, MULT_EXPR
,
5223 gfc_array_index_type
, stride
, size
);
5224 stride
= gfc_evaluate_now (stride
, pblock
);
5227 for (n
= rank
; n
< rank
+ corank
; n
++)
5231 /* Set lower bound. */
5232 gfc_init_se (&se
, NULL
);
5233 if (lower
== NULL
|| lower
[n
] == NULL
)
5235 gcc_assert (n
== rank
+ corank
- 1);
5236 se
.expr
= gfc_index_one_node
;
5240 if (ubound
|| n
== rank
+ corank
- 1)
5242 gfc_conv_expr_type (&se
, lower
[n
], gfc_array_index_type
);
5243 gfc_add_block_to_block (pblock
, &se
.pre
);
5247 se
.expr
= gfc_index_one_node
;
5251 gfc_conv_descriptor_lbound_set (descriptor_block
, descriptor
,
5252 gfc_rank_cst
[n
], se
.expr
);
5254 if (n
< rank
+ corank
- 1)
5256 gfc_init_se (&se
, NULL
);
5257 gcc_assert (ubound
);
5258 gfc_conv_expr_type (&se
, ubound
, gfc_array_index_type
);
5259 gfc_add_block_to_block (pblock
, &se
.pre
);
5260 gfc_conv_descriptor_ubound_set (descriptor_block
, descriptor
,
5261 gfc_rank_cst
[n
], se
.expr
);
5265 /* The stride is the number of elements in the array, so multiply by the
5266 size of an element to get the total size. Obviously, if there is a
5267 SOURCE expression (expr3) we must use its element size. */
5268 if (expr3_elem_size
!= NULL_TREE
)
5269 tmp
= expr3_elem_size
;
5270 else if (expr3
!= NULL
)
5272 if (expr3
->ts
.type
== BT_CLASS
)
5275 gfc_expr
*sz
= gfc_copy_expr (expr3
);
5276 gfc_add_vptr_component (sz
);
5277 gfc_add_size_component (sz
);
5278 gfc_init_se (&se_sz
, NULL
);
5279 gfc_conv_expr (&se_sz
, sz
);
5285 tmp
= gfc_typenode_for_spec (&expr3
->ts
);
5286 tmp
= TYPE_SIZE_UNIT (tmp
);
5290 tmp
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
5292 /* Convert to size_t. */
5293 element_size
= fold_convert (size_type_node
, tmp
);
5296 return element_size
;
5298 *nelems
= gfc_evaluate_now (stride
, pblock
);
5299 stride
= fold_convert (size_type_node
, stride
);
5301 /* First check for overflow. Since an array of type character can
5302 have zero element_size, we must check for that before
5304 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
5306 TYPE_MAX_VALUE (size_type_node
), element_size
);
5307 cond
= gfc_unlikely (fold_build2_loc (input_location
, LT_EXPR
,
5308 boolean_type_node
, tmp
, stride
),
5309 PRED_FORTRAN_OVERFLOW
);
5310 tmp
= fold_build3_loc (input_location
, COND_EXPR
, integer_type_node
, cond
,
5311 integer_one_node
, integer_zero_node
);
5312 cond
= gfc_unlikely (fold_build2_loc (input_location
, EQ_EXPR
,
5313 boolean_type_node
, element_size
,
5314 build_int_cst (size_type_node
, 0)),
5315 PRED_FORTRAN_SIZE_ZERO
);
5316 tmp
= fold_build3_loc (input_location
, COND_EXPR
, integer_type_node
, cond
,
5317 integer_zero_node
, tmp
);
5318 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, integer_type_node
,
5320 *overflow
= gfc_evaluate_now (tmp
, pblock
);
5322 size
= fold_build2_loc (input_location
, MULT_EXPR
, size_type_node
,
5323 stride
, element_size
);
5325 if (poffset
!= NULL
)
5327 offset
= gfc_evaluate_now (offset
, pblock
);
5331 if (integer_zerop (or_expr
))
5333 if (integer_onep (or_expr
))
5334 return build_int_cst (size_type_node
, 0);
5336 var
= gfc_create_var (TREE_TYPE (size
), "size");
5337 gfc_start_block (&thenblock
);
5338 gfc_add_modify (&thenblock
, var
, build_int_cst (size_type_node
, 0));
5339 thencase
= gfc_finish_block (&thenblock
);
5341 gfc_start_block (&elseblock
);
5342 gfc_add_modify (&elseblock
, var
, size
);
5343 elsecase
= gfc_finish_block (&elseblock
);
5345 tmp
= gfc_evaluate_now (or_expr
, pblock
);
5346 tmp
= build3_v (COND_EXPR
, tmp
, thencase
, elsecase
);
5347 gfc_add_expr_to_block (pblock
, tmp
);
5353 /* Retrieve the last ref from the chain. This routine is specific to
5354 gfc_array_allocate ()'s needs. */
5357 retrieve_last_ref (gfc_ref
**ref_in
, gfc_ref
**prev_ref_in
)
5359 gfc_ref
*ref
, *prev_ref
;
5362 /* Prevent warnings for uninitialized variables. */
5363 prev_ref
= *prev_ref_in
;
5364 while (ref
&& ref
->next
!= NULL
)
5366 gcc_assert (ref
->type
!= REF_ARRAY
|| ref
->u
.ar
.type
== AR_ELEMENT
5367 || (ref
->u
.ar
.dimen
== 0 && ref
->u
.ar
.codimen
> 0));
5372 if (ref
== NULL
|| ref
->type
!= REF_ARRAY
)
5376 *prev_ref_in
= prev_ref
;
5380 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5381 the work for an ALLOCATE statement. */
5385 gfc_array_allocate (gfc_se
* se
, gfc_expr
* expr
, tree status
, tree errmsg
,
5386 tree errlen
, tree label_finish
, tree expr3_elem_size
,
5387 tree
*nelems
, gfc_expr
*expr3
, tree e3_arr_desc
,
5388 bool e3_is_array_constr
)
5392 tree offset
= NULL_TREE
;
5393 tree token
= NULL_TREE
;
5396 tree error
= NULL_TREE
;
5397 tree overflow
; /* Boolean storing whether size calculation overflows. */
5398 tree var_overflow
= NULL_TREE
;
5400 tree set_descriptor
;
5401 stmtblock_t set_descriptor_block
;
5402 stmtblock_t elseblock
;
5405 gfc_ref
*ref
, *prev_ref
= NULL
;
5406 bool allocatable
, coarray
, dimension
, alloc_w_e3_arr_spec
= false;
5410 /* Find the last reference in the chain. */
5411 if (!retrieve_last_ref (&ref
, &prev_ref
))
5414 /* Take the allocatable and coarray properties solely from the expr-ref's
5415 attributes and not from source=-expression. */
5418 allocatable
= expr
->symtree
->n
.sym
->attr
.allocatable
;
5419 coarray
= expr
->symtree
->n
.sym
->attr
.codimension
;
5420 dimension
= expr
->symtree
->n
.sym
->attr
.dimension
;
5424 allocatable
= prev_ref
->u
.c
.component
->attr
.allocatable
;
5425 coarray
= prev_ref
->u
.c
.component
->attr
.codimension
;
5426 dimension
= prev_ref
->u
.c
.component
->attr
.dimension
;
5430 gcc_assert (coarray
);
5432 if (ref
->u
.ar
.type
== AR_FULL
&& expr3
!= NULL
)
5434 /* F08:C633: Array shape from expr3. */
5437 /* Find the last reference in the chain. */
5438 if (!retrieve_last_ref (&ref
, &prev_ref
))
5440 alloc_w_e3_arr_spec
= true;
5443 /* Figure out the size of the array. */
5444 switch (ref
->u
.ar
.type
)
5450 upper
= ref
->u
.ar
.start
;
5456 lower
= ref
->u
.ar
.start
;
5457 upper
= ref
->u
.ar
.end
;
5461 gcc_assert (ref
->u
.ar
.as
->type
== AS_EXPLICIT
5462 || alloc_w_e3_arr_spec
);
5464 lower
= ref
->u
.ar
.as
->lower
;
5465 upper
= ref
->u
.ar
.as
->upper
;
5473 overflow
= integer_zero_node
;
5475 gfc_init_block (&set_descriptor_block
);
5476 size
= gfc_array_init_size (se
->expr
, alloc_w_e3_arr_spec
? expr
->rank
5477 : ref
->u
.ar
.as
->rank
,
5478 coarray
? ref
->u
.ar
.as
->corank
: 0,
5479 &offset
, lower
, upper
,
5480 &se
->pre
, &set_descriptor_block
, &overflow
,
5481 expr3_elem_size
, nelems
, expr3
, e3_arr_desc
,
5482 e3_is_array_constr
, expr
);
5486 var_overflow
= gfc_create_var (integer_type_node
, "overflow");
5487 gfc_add_modify (&se
->pre
, var_overflow
, overflow
);
5489 if (status
== NULL_TREE
)
5491 /* Generate the block of code handling overflow. */
5492 msg
= gfc_build_addr_expr (pchar_type_node
,
5493 gfc_build_localized_cstring_const
5494 ("Integer overflow when calculating the amount of "
5495 "memory to allocate"));
5496 error
= build_call_expr_loc (input_location
,
5497 gfor_fndecl_runtime_error
, 1, msg
);
5501 tree status_type
= TREE_TYPE (status
);
5502 stmtblock_t set_status_block
;
5504 gfc_start_block (&set_status_block
);
5505 gfc_add_modify (&set_status_block
, status
,
5506 build_int_cst (status_type
, LIBERROR_ALLOCATION
));
5507 error
= gfc_finish_block (&set_status_block
);
5511 gfc_start_block (&elseblock
);
5513 /* Allocate memory to store the data. */
5514 if (POINTER_TYPE_P (TREE_TYPE (se
->expr
)))
5515 se
->expr
= build_fold_indirect_ref_loc (input_location
, se
->expr
);
5517 pointer
= gfc_conv_descriptor_data_get (se
->expr
);
5518 STRIP_NOPS (pointer
);
5520 if (coarray
&& flag_coarray
== GFC_FCOARRAY_LIB
)
5521 token
= gfc_build_addr_expr (NULL_TREE
,
5522 gfc_conv_descriptor_token (se
->expr
));
5524 /* The allocatable variant takes the old pointer as first argument. */
5526 gfc_allocate_allocatable (&elseblock
, pointer
, size
, token
,
5527 status
, errmsg
, errlen
, label_finish
, expr
);
5529 gfc_allocate_using_malloc (&elseblock
, pointer
, size
, status
);
5533 cond
= gfc_unlikely (fold_build2_loc (input_location
, NE_EXPR
,
5534 boolean_type_node
, var_overflow
, integer_zero_node
),
5535 PRED_FORTRAN_OVERFLOW
);
5536 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
, cond
,
5537 error
, gfc_finish_block (&elseblock
));
5540 tmp
= gfc_finish_block (&elseblock
);
5542 gfc_add_expr_to_block (&se
->pre
, tmp
);
5544 /* Update the array descriptors. */
5546 gfc_conv_descriptor_offset_set (&set_descriptor_block
, se
->expr
, offset
);
5548 set_descriptor
= gfc_finish_block (&set_descriptor_block
);
5549 if (status
!= NULL_TREE
)
5551 cond
= fold_build2_loc (input_location
, EQ_EXPR
,
5552 boolean_type_node
, status
,
5553 build_int_cst (TREE_TYPE (status
), 0));
5554 gfc_add_expr_to_block (&se
->pre
,
5555 fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5558 build_empty_stmt (input_location
)));
5561 gfc_add_expr_to_block (&se
->pre
, set_descriptor
);
5563 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
5566 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, se
->expr
,
5567 ref
->u
.ar
.as
->rank
);
5568 gfc_add_expr_to_block (&se
->pre
, tmp
);
5575 /* Deallocate an array variable. Also used when an allocated variable goes
5580 gfc_array_deallocate (tree descriptor
, tree pstat
, tree errmsg
, tree errlen
,
5581 tree label_finish
, gfc_expr
* expr
)
5586 bool coarray
= gfc_is_coarray (expr
);
5588 gfc_start_block (&block
);
5590 /* Get a pointer to the data. */
5591 var
= gfc_conv_descriptor_data_get (descriptor
);
5594 /* Parameter is the address of the data component. */
5595 tmp
= gfc_deallocate_with_status (coarray
? descriptor
: var
, pstat
, errmsg
,
5596 errlen
, label_finish
, false, expr
, coarray
);
5597 gfc_add_expr_to_block (&block
, tmp
);
5599 /* Zero the data pointer; only for coarrays an error can occur and then
5600 the allocation status may not be changed. */
5601 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5602 var
, build_int_cst (TREE_TYPE (var
), 0));
5603 if (pstat
!= NULL_TREE
&& coarray
&& flag_coarray
== GFC_FCOARRAY_LIB
)
5606 tree stat
= build_fold_indirect_ref_loc (input_location
, pstat
);
5608 cond
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
5609 stat
, build_int_cst (TREE_TYPE (stat
), 0));
5610 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5611 cond
, tmp
, build_empty_stmt (input_location
));
5614 gfc_add_expr_to_block (&block
, tmp
);
5616 return gfc_finish_block (&block
);
5620 /* Create an array constructor from an initialization expression.
5621 We assume the frontend already did any expansions and conversions. */
5624 gfc_conv_array_initializer (tree type
, gfc_expr
* expr
)
5631 vec
<constructor_elt
, va_gc
> *v
= NULL
;
5633 if (expr
->expr_type
== EXPR_VARIABLE
5634 && expr
->symtree
->n
.sym
->attr
.flavor
== FL_PARAMETER
5635 && expr
->symtree
->n
.sym
->value
)
5636 expr
= expr
->symtree
->n
.sym
->value
;
5638 switch (expr
->expr_type
)
5641 case EXPR_STRUCTURE
:
5642 /* A single scalar or derived type value. Create an array with all
5643 elements equal to that value. */
5644 gfc_init_se (&se
, NULL
);
5646 if (expr
->expr_type
== EXPR_CONSTANT
)
5647 gfc_conv_constant (&se
, expr
);
5649 gfc_conv_structure (&se
, expr
, 1);
5651 wtmp
= wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type
))) + 1;
5652 /* This will probably eat buckets of memory for large arrays. */
5655 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, se
.expr
);
5661 /* Create a vector of all the elements. */
5662 for (c
= gfc_constructor_first (expr
->value
.constructor
);
5663 c
; c
= gfc_constructor_next (c
))
5667 /* Problems occur when we get something like
5668 integer :: a(lots) = (/(i, i=1, lots)/) */
5669 gfc_fatal_error ("The number of elements in the array "
5670 "constructor at %L requires an increase of "
5671 "the allowed %d upper limit. See "
5672 "%<-fmax-array-constructor%> option",
5673 &expr
->where
, flag_max_array_constructor
);
5676 if (mpz_cmp_si (c
->offset
, 0) != 0)
5677 index
= gfc_conv_mpz_to_tree (c
->offset
, gfc_index_integer_kind
);
5681 if (mpz_cmp_si (c
->repeat
, 1) > 0)
5687 mpz_add (maxval
, c
->offset
, c
->repeat
);
5688 mpz_sub_ui (maxval
, maxval
, 1);
5689 tmp2
= gfc_conv_mpz_to_tree (maxval
, gfc_index_integer_kind
);
5690 if (mpz_cmp_si (c
->offset
, 0) != 0)
5692 mpz_add_ui (maxval
, c
->offset
, 1);
5693 tmp1
= gfc_conv_mpz_to_tree (maxval
, gfc_index_integer_kind
);
5696 tmp1
= gfc_conv_mpz_to_tree (c
->offset
, gfc_index_integer_kind
);
5698 range
= fold_build2 (RANGE_EXPR
, gfc_array_index_type
, tmp1
, tmp2
);
5704 gfc_init_se (&se
, NULL
);
5705 switch (c
->expr
->expr_type
)
5708 gfc_conv_constant (&se
, c
->expr
);
5711 case EXPR_STRUCTURE
:
5712 gfc_conv_structure (&se
, c
->expr
, 1);
5716 /* Catch those occasional beasts that do not simplify
5717 for one reason or another, assuming that if they are
5718 standard defying the frontend will catch them. */
5719 gfc_conv_expr (&se
, c
->expr
);
5723 if (range
== NULL_TREE
)
5724 CONSTRUCTOR_APPEND_ELT (v
, index
, se
.expr
);
5727 if (index
!= NULL_TREE
)
5728 CONSTRUCTOR_APPEND_ELT (v
, index
, se
.expr
);
5729 CONSTRUCTOR_APPEND_ELT (v
, range
, se
.expr
);
5735 return gfc_build_null_descriptor (type
);
5741 /* Create a constructor from the list of elements. */
5742 tmp
= build_constructor (type
, v
);
5743 TREE_CONSTANT (tmp
) = 1;
5748 /* Generate code to evaluate non-constant coarray cobounds. */
5751 gfc_trans_array_cobounds (tree type
, stmtblock_t
* pblock
,
5752 const gfc_symbol
*sym
)
5760 as
= IS_CLASS_ARRAY (sym
) ? CLASS_DATA (sym
)->as
: sym
->as
;
5762 for (dim
= as
->rank
; dim
< as
->rank
+ as
->corank
; dim
++)
5764 /* Evaluate non-constant array bound expressions. */
5765 lbound
= GFC_TYPE_ARRAY_LBOUND (type
, dim
);
5766 if (as
->lower
[dim
] && !INTEGER_CST_P (lbound
))
5768 gfc_init_se (&se
, NULL
);
5769 gfc_conv_expr_type (&se
, as
->lower
[dim
], gfc_array_index_type
);
5770 gfc_add_block_to_block (pblock
, &se
.pre
);
5771 gfc_add_modify (pblock
, lbound
, se
.expr
);
5773 ubound
= GFC_TYPE_ARRAY_UBOUND (type
, dim
);
5774 if (as
->upper
[dim
] && !INTEGER_CST_P (ubound
))
5776 gfc_init_se (&se
, NULL
);
5777 gfc_conv_expr_type (&se
, as
->upper
[dim
], gfc_array_index_type
);
5778 gfc_add_block_to_block (pblock
, &se
.pre
);
5779 gfc_add_modify (pblock
, ubound
, se
.expr
);
5785 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5786 returns the size (in elements) of the array. */
5789 gfc_trans_array_bounds (tree type
, gfc_symbol
* sym
, tree
* poffset
,
5790 stmtblock_t
* pblock
)
5803 as
= IS_CLASS_ARRAY (sym
) ? CLASS_DATA (sym
)->as
: sym
->as
;
5805 size
= gfc_index_one_node
;
5806 offset
= gfc_index_zero_node
;
5807 for (dim
= 0; dim
< as
->rank
; dim
++)
5809 /* Evaluate non-constant array bound expressions. */
5810 lbound
= GFC_TYPE_ARRAY_LBOUND (type
, dim
);
5811 if (as
->lower
[dim
] && !INTEGER_CST_P (lbound
))
5813 gfc_init_se (&se
, NULL
);
5814 gfc_conv_expr_type (&se
, as
->lower
[dim
], gfc_array_index_type
);
5815 gfc_add_block_to_block (pblock
, &se
.pre
);
5816 gfc_add_modify (pblock
, lbound
, se
.expr
);
5818 ubound
= GFC_TYPE_ARRAY_UBOUND (type
, dim
);
5819 if (as
->upper
[dim
] && !INTEGER_CST_P (ubound
))
5821 gfc_init_se (&se
, NULL
);
5822 gfc_conv_expr_type (&se
, as
->upper
[dim
], gfc_array_index_type
);
5823 gfc_add_block_to_block (pblock
, &se
.pre
);
5824 gfc_add_modify (pblock
, ubound
, se
.expr
);
5826 /* The offset of this dimension. offset = offset - lbound * stride. */
5827 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
5829 offset
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
5832 /* The size of this dimension, and the stride of the next. */
5833 if (dim
+ 1 < as
->rank
)
5834 stride
= GFC_TYPE_ARRAY_STRIDE (type
, dim
+ 1);
5836 stride
= GFC_TYPE_ARRAY_SIZE (type
);
5838 if (ubound
!= NULL_TREE
&& !(stride
&& INTEGER_CST_P (stride
)))
5840 /* Calculate stride = size * (ubound + 1 - lbound). */
5841 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
5842 gfc_array_index_type
,
5843 gfc_index_one_node
, lbound
);
5844 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
5845 gfc_array_index_type
, ubound
, tmp
);
5846 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
5847 gfc_array_index_type
, size
, tmp
);
5849 gfc_add_modify (pblock
, stride
, tmp
);
5851 stride
= gfc_evaluate_now (tmp
, pblock
);
5853 /* Make sure that negative size arrays are translated
5854 to being zero size. */
5855 tmp
= fold_build2_loc (input_location
, GE_EXPR
, boolean_type_node
,
5856 stride
, gfc_index_zero_node
);
5857 tmp
= fold_build3_loc (input_location
, COND_EXPR
,
5858 gfc_array_index_type
, tmp
,
5859 stride
, gfc_index_zero_node
);
5860 gfc_add_modify (pblock
, stride
, tmp
);
5866 gfc_trans_array_cobounds (type
, pblock
, sym
);
5867 gfc_trans_vla_type_sizes (sym
, pblock
);
5874 /* Generate code to initialize/allocate an array variable. */
5877 gfc_trans_auto_array_allocation (tree decl
, gfc_symbol
* sym
,
5878 gfc_wrapped_block
* block
)
5882 tree tmp
= NULL_TREE
;
5889 gcc_assert (!(sym
->attr
.pointer
|| sym
->attr
.allocatable
));
5891 /* Do nothing for USEd variables. */
5892 if (sym
->attr
.use_assoc
)
5895 type
= TREE_TYPE (decl
);
5896 gcc_assert (GFC_ARRAY_TYPE_P (type
));
5897 onstack
= TREE_CODE (type
) != POINTER_TYPE
;
5899 gfc_init_block (&init
);
5901 /* Evaluate character string length. */
5902 if (sym
->ts
.type
== BT_CHARACTER
5903 && onstack
&& !INTEGER_CST_P (sym
->ts
.u
.cl
->backend_decl
))
5905 gfc_conv_string_length (sym
->ts
.u
.cl
, NULL
, &init
);
5907 gfc_trans_vla_type_sizes (sym
, &init
);
5909 /* Emit a DECL_EXPR for this variable, which will cause the
5910 gimplifier to allocate storage, and all that good stuff. */
5911 tmp
= fold_build1_loc (input_location
, DECL_EXPR
, TREE_TYPE (decl
), decl
);
5912 gfc_add_expr_to_block (&init
, tmp
);
5917 gfc_add_init_cleanup (block
, gfc_finish_block (&init
), NULL_TREE
);
5921 type
= TREE_TYPE (type
);
5923 gcc_assert (!sym
->attr
.use_assoc
);
5924 gcc_assert (!TREE_STATIC (decl
));
5925 gcc_assert (!sym
->module
);
5927 if (sym
->ts
.type
== BT_CHARACTER
5928 && !INTEGER_CST_P (sym
->ts
.u
.cl
->backend_decl
))
5929 gfc_conv_string_length (sym
->ts
.u
.cl
, NULL
, &init
);
5931 size
= gfc_trans_array_bounds (type
, sym
, &offset
, &init
);
5933 /* Don't actually allocate space for Cray Pointees. */
5934 if (sym
->attr
.cray_pointee
)
5936 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type
)) == VAR_DECL
)
5937 gfc_add_modify (&init
, GFC_TYPE_ARRAY_OFFSET (type
), offset
);
5939 gfc_add_init_cleanup (block
, gfc_finish_block (&init
), NULL_TREE
);
5943 if (flag_stack_arrays
)
5945 gcc_assert (TREE_CODE (TREE_TYPE (decl
)) == POINTER_TYPE
);
5946 space
= build_decl (sym
->declared_at
.lb
->location
,
5947 VAR_DECL
, create_tmp_var_name ("A"),
5948 TREE_TYPE (TREE_TYPE (decl
)));
5949 gfc_trans_vla_type_sizes (sym
, &init
);
5953 /* The size is the number of elements in the array, so multiply by the
5954 size of an element to get the total size. */
5955 tmp
= TYPE_SIZE_UNIT (gfc_get_element_type (type
));
5956 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
5957 size
, fold_convert (gfc_array_index_type
, tmp
));
5959 /* Allocate memory to hold the data. */
5960 tmp
= gfc_call_malloc (&init
, TREE_TYPE (decl
), size
);
5961 gfc_add_modify (&init
, decl
, tmp
);
5963 /* Free the temporary. */
5964 tmp
= gfc_call_free (decl
);
5968 /* Set offset of the array. */
5969 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type
)) == VAR_DECL
)
5970 gfc_add_modify (&init
, GFC_TYPE_ARRAY_OFFSET (type
), offset
);
5972 /* Automatic arrays should not have initializers. */
5973 gcc_assert (!sym
->value
);
5975 inittree
= gfc_finish_block (&init
);
5982 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5983 where also space is located. */
5984 gfc_init_block (&init
);
5985 tmp
= fold_build1_loc (input_location
, DECL_EXPR
,
5986 TREE_TYPE (space
), space
);
5987 gfc_add_expr_to_block (&init
, tmp
);
5988 addr
= fold_build1_loc (sym
->declared_at
.lb
->location
,
5989 ADDR_EXPR
, TREE_TYPE (decl
), space
);
5990 gfc_add_modify (&init
, decl
, addr
);
5991 gfc_add_init_cleanup (block
, gfc_finish_block (&init
), NULL_TREE
);
5994 gfc_add_init_cleanup (block
, inittree
, tmp
);
5998 /* Generate entry and exit code for g77 calling convention arrays. */
6001 gfc_trans_g77_array (gfc_symbol
* sym
, gfc_wrapped_block
* block
)
6011 gfc_save_backend_locus (&loc
);
6012 gfc_set_backend_locus (&sym
->declared_at
);
6014 /* Descriptor type. */
6015 parm
= sym
->backend_decl
;
6016 type
= TREE_TYPE (parm
);
6017 gcc_assert (GFC_ARRAY_TYPE_P (type
));
6019 gfc_start_block (&init
);
6021 if (sym
->ts
.type
== BT_CHARACTER
6022 && TREE_CODE (sym
->ts
.u
.cl
->backend_decl
) == VAR_DECL
)
6023 gfc_conv_string_length (sym
->ts
.u
.cl
, NULL
, &init
);
6025 /* Evaluate the bounds of the array. */
6026 gfc_trans_array_bounds (type
, sym
, &offset
, &init
);
6028 /* Set the offset. */
6029 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type
)) == VAR_DECL
)
6030 gfc_add_modify (&init
, GFC_TYPE_ARRAY_OFFSET (type
), offset
);
6032 /* Set the pointer itself if we aren't using the parameter directly. */
6033 if (TREE_CODE (parm
) != PARM_DECL
)
6035 tmp
= convert (TREE_TYPE (parm
), GFC_DECL_SAVED_DESCRIPTOR (parm
));
6036 gfc_add_modify (&init
, parm
, tmp
);
6038 stmt
= gfc_finish_block (&init
);
6040 gfc_restore_backend_locus (&loc
);
6042 /* Add the initialization code to the start of the function. */
6044 if (sym
->attr
.optional
|| sym
->attr
.not_always_present
)
6046 tmp
= gfc_conv_expr_present (sym
);
6047 stmt
= build3_v (COND_EXPR
, tmp
, stmt
, build_empty_stmt (input_location
));
6050 gfc_add_init_cleanup (block
, stmt
, NULL_TREE
);
6054 /* Modify the descriptor of an array parameter so that it has the
6055 correct lower bound. Also move the upper bound accordingly.
6056 If the array is not packed, it will be copied into a temporary.
6057 For each dimension we set the new lower and upper bounds. Then we copy the
6058 stride and calculate the offset for this dimension. We also work out
6059 what the stride of a packed array would be, and see it the two match.
6060 If the array need repacking, we set the stride to the values we just
6061 calculated, recalculate the offset and copy the array data.
6062 Code is also added to copy the data back at the end of the function.
6066 gfc_trans_dummy_array_bias (gfc_symbol
* sym
, tree tmpdesc
,
6067 gfc_wrapped_block
* block
)
6074 tree stmtInit
, stmtCleanup
;
6081 tree stride
, stride2
;
6091 bool is_classarray
= IS_CLASS_ARRAY (sym
);
6093 /* Do nothing for pointer and allocatable arrays. */
6094 if ((sym
->ts
.type
!= BT_CLASS
&& sym
->attr
.pointer
)
6095 || (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.class_pointer
)
6096 || sym
->attr
.allocatable
6097 || (is_classarray
&& CLASS_DATA (sym
)->attr
.allocatable
))
6100 if (!is_classarray
&& sym
->attr
.dummy
&& gfc_is_nodesc_array (sym
))
6102 gfc_trans_g77_array (sym
, block
);
6106 gfc_save_backend_locus (&loc
);
6107 gfc_set_backend_locus (&sym
->declared_at
);
6109 /* Descriptor type. */
6110 type
= TREE_TYPE (tmpdesc
);
6111 gcc_assert (GFC_ARRAY_TYPE_P (type
));
6112 dumdesc
= GFC_DECL_SAVED_DESCRIPTOR (tmpdesc
);
6114 /* For a class array the dummy array descriptor is in the _class
6116 dumdesc
= gfc_class_data_get (dumdesc
);
6118 dumdesc
= build_fold_indirect_ref_loc (input_location
, dumdesc
);
6119 as
= IS_CLASS_ARRAY (sym
) ? CLASS_DATA (sym
)->as
: sym
->as
;
6120 gfc_start_block (&init
);
6122 if (sym
->ts
.type
== BT_CHARACTER
6123 && TREE_CODE (sym
->ts
.u
.cl
->backend_decl
) == VAR_DECL
)
6124 gfc_conv_string_length (sym
->ts
.u
.cl
, NULL
, &init
);
6126 checkparm
= (as
->type
== AS_EXPLICIT
6127 && (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
));
6129 no_repack
= !(GFC_DECL_PACKED_ARRAY (tmpdesc
)
6130 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc
));
6132 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc
))
6134 /* For non-constant shape arrays we only check if the first dimension
6135 is contiguous. Repacking higher dimensions wouldn't gain us
6136 anything as we still don't know the array stride. */
6137 partial
= gfc_create_var (boolean_type_node
, "partial");
6138 TREE_USED (partial
) = 1;
6139 tmp
= gfc_conv_descriptor_stride_get (dumdesc
, gfc_rank_cst
[0]);
6140 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, tmp
,
6141 gfc_index_one_node
);
6142 gfc_add_modify (&init
, partial
, tmp
);
6145 partial
= NULL_TREE
;
6147 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
6148 here, however I think it does the right thing. */
6151 /* Set the first stride. */
6152 stride
= gfc_conv_descriptor_stride_get (dumdesc
, gfc_rank_cst
[0]);
6153 stride
= gfc_evaluate_now (stride
, &init
);
6155 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
6156 stride
, gfc_index_zero_node
);
6157 tmp
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
6158 tmp
, gfc_index_one_node
, stride
);
6159 stride
= GFC_TYPE_ARRAY_STRIDE (type
, 0);
6160 gfc_add_modify (&init
, stride
, tmp
);
6162 /* Allow the user to disable array repacking. */
6163 stmt_unpacked
= NULL_TREE
;
6167 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type
, 0)));
6168 /* A library call to repack the array if necessary. */
6169 tmp
= GFC_DECL_SAVED_DESCRIPTOR (tmpdesc
);
6170 stmt_unpacked
= build_call_expr_loc (input_location
,
6171 gfor_fndecl_in_pack
, 1, tmp
);
6173 stride
= gfc_index_one_node
;
6175 if (warn_array_temporaries
)
6176 gfc_warning (OPT_Warray_temporaries
,
6177 "Creating array temporary at %L", &loc
);
6180 /* This is for the case where the array data is used directly without
6181 calling the repack function. */
6182 if (no_repack
|| partial
!= NULL_TREE
)
6183 stmt_packed
= gfc_conv_descriptor_data_get (dumdesc
);
6185 stmt_packed
= NULL_TREE
;
6187 /* Assign the data pointer. */
6188 if (stmt_packed
!= NULL_TREE
&& stmt_unpacked
!= NULL_TREE
)
6190 /* Don't repack unknown shape arrays when the first stride is 1. */
6191 tmp
= fold_build3_loc (input_location
, COND_EXPR
, TREE_TYPE (stmt_packed
),
6192 partial
, stmt_packed
, stmt_unpacked
);
6195 tmp
= stmt_packed
!= NULL_TREE
? stmt_packed
: stmt_unpacked
;
6196 gfc_add_modify (&init
, tmpdesc
, fold_convert (type
, tmp
));
6198 offset
= gfc_index_zero_node
;
6199 size
= gfc_index_one_node
;
6201 /* Evaluate the bounds of the array. */
6202 for (n
= 0; n
< as
->rank
; n
++)
6204 if (checkparm
|| !as
->upper
[n
])
6206 /* Get the bounds of the actual parameter. */
6207 dubound
= gfc_conv_descriptor_ubound_get (dumdesc
, gfc_rank_cst
[n
]);
6208 dlbound
= gfc_conv_descriptor_lbound_get (dumdesc
, gfc_rank_cst
[n
]);
6212 dubound
= NULL_TREE
;
6213 dlbound
= NULL_TREE
;
6216 lbound
= GFC_TYPE_ARRAY_LBOUND (type
, n
);
6217 if (!INTEGER_CST_P (lbound
))
6219 gfc_init_se (&se
, NULL
);
6220 gfc_conv_expr_type (&se
, as
->lower
[n
],
6221 gfc_array_index_type
);
6222 gfc_add_block_to_block (&init
, &se
.pre
);
6223 gfc_add_modify (&init
, lbound
, se
.expr
);
6226 ubound
= GFC_TYPE_ARRAY_UBOUND (type
, n
);
6227 /* Set the desired upper bound. */
6230 /* We know what we want the upper bound to be. */
6231 if (!INTEGER_CST_P (ubound
))
6233 gfc_init_se (&se
, NULL
);
6234 gfc_conv_expr_type (&se
, as
->upper
[n
],
6235 gfc_array_index_type
);
6236 gfc_add_block_to_block (&init
, &se
.pre
);
6237 gfc_add_modify (&init
, ubound
, se
.expr
);
6240 /* Check the sizes match. */
6243 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
6247 temp
= fold_build2_loc (input_location
, MINUS_EXPR
,
6248 gfc_array_index_type
, ubound
, lbound
);
6249 temp
= fold_build2_loc (input_location
, PLUS_EXPR
,
6250 gfc_array_index_type
,
6251 gfc_index_one_node
, temp
);
6252 stride2
= fold_build2_loc (input_location
, MINUS_EXPR
,
6253 gfc_array_index_type
, dubound
,
6255 stride2
= fold_build2_loc (input_location
, PLUS_EXPR
,
6256 gfc_array_index_type
,
6257 gfc_index_one_node
, stride2
);
6258 tmp
= fold_build2_loc (input_location
, NE_EXPR
,
6259 gfc_array_index_type
, temp
, stride2
);
6260 msg
= xasprintf ("Dimension %d of array '%s' has extent "
6261 "%%ld instead of %%ld", n
+1, sym
->name
);
6263 gfc_trans_runtime_check (true, false, tmp
, &init
, &loc
, msg
,
6264 fold_convert (long_integer_type_node
, temp
),
6265 fold_convert (long_integer_type_node
, stride2
));
6272 /* For assumed shape arrays move the upper bound by the same amount
6273 as the lower bound. */
6274 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
6275 gfc_array_index_type
, dubound
, dlbound
);
6276 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
6277 gfc_array_index_type
, tmp
, lbound
);
6278 gfc_add_modify (&init
, ubound
, tmp
);
6280 /* The offset of this dimension. offset = offset - lbound * stride. */
6281 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
6283 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
6284 gfc_array_index_type
, offset
, tmp
);
6286 /* The size of this dimension, and the stride of the next. */
6287 if (n
+ 1 < as
->rank
)
6289 stride
= GFC_TYPE_ARRAY_STRIDE (type
, n
+ 1);
6291 if (no_repack
|| partial
!= NULL_TREE
)
6293 gfc_conv_descriptor_stride_get (dumdesc
, gfc_rank_cst
[n
+1]);
6295 /* Figure out the stride if not a known constant. */
6296 if (!INTEGER_CST_P (stride
))
6299 stmt_packed
= NULL_TREE
;
6302 /* Calculate stride = size * (ubound + 1 - lbound). */
6303 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
6304 gfc_array_index_type
,
6305 gfc_index_one_node
, lbound
);
6306 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
6307 gfc_array_index_type
, ubound
, tmp
);
6308 size
= fold_build2_loc (input_location
, MULT_EXPR
,
6309 gfc_array_index_type
, size
, tmp
);
6313 /* Assign the stride. */
6314 if (stmt_packed
!= NULL_TREE
&& stmt_unpacked
!= NULL_TREE
)
6315 tmp
= fold_build3_loc (input_location
, COND_EXPR
,
6316 gfc_array_index_type
, partial
,
6317 stmt_unpacked
, stmt_packed
);
6319 tmp
= (stmt_packed
!= NULL_TREE
) ? stmt_packed
: stmt_unpacked
;
6320 gfc_add_modify (&init
, stride
, tmp
);
6325 stride
= GFC_TYPE_ARRAY_SIZE (type
);
6327 if (stride
&& !INTEGER_CST_P (stride
))
6329 /* Calculate size = stride * (ubound + 1 - lbound). */
6330 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
6331 gfc_array_index_type
,
6332 gfc_index_one_node
, lbound
);
6333 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
6334 gfc_array_index_type
,
6336 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
6337 gfc_array_index_type
,
6338 GFC_TYPE_ARRAY_STRIDE (type
, n
), tmp
);
6339 gfc_add_modify (&init
, stride
, tmp
);
6344 gfc_trans_array_cobounds (type
, &init
, sym
);
6346 /* Set the offset. */
6347 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type
)) == VAR_DECL
)
6348 gfc_add_modify (&init
, GFC_TYPE_ARRAY_OFFSET (type
), offset
);
6350 gfc_trans_vla_type_sizes (sym
, &init
);
6352 stmtInit
= gfc_finish_block (&init
);
6354 /* Only do the entry/initialization code if the arg is present. */
6355 dumdesc
= GFC_DECL_SAVED_DESCRIPTOR (tmpdesc
);
6356 optional_arg
= (sym
->attr
.optional
6357 || (sym
->ns
->proc_name
->attr
.entry_master
6358 && sym
->attr
.dummy
));
6361 tmp
= gfc_conv_expr_present (sym
);
6362 stmtInit
= build3_v (COND_EXPR
, tmp
, stmtInit
,
6363 build_empty_stmt (input_location
));
6368 stmtCleanup
= NULL_TREE
;
6371 stmtblock_t cleanup
;
6372 gfc_start_block (&cleanup
);
6374 if (sym
->attr
.intent
!= INTENT_IN
)
6376 /* Copy the data back. */
6377 tmp
= build_call_expr_loc (input_location
,
6378 gfor_fndecl_in_unpack
, 2, dumdesc
, tmpdesc
);
6379 gfc_add_expr_to_block (&cleanup
, tmp
);
6382 /* Free the temporary. */
6383 tmp
= gfc_call_free (tmpdesc
);
6384 gfc_add_expr_to_block (&cleanup
, tmp
);
6386 stmtCleanup
= gfc_finish_block (&cleanup
);
6388 /* Only do the cleanup if the array was repacked. */
6390 /* For a class array the dummy array descriptor is in the _class
6392 tmp
= gfc_class_data_get (dumdesc
);
6394 tmp
= build_fold_indirect_ref_loc (input_location
, dumdesc
);
6395 tmp
= gfc_conv_descriptor_data_get (tmp
);
6396 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
6398 stmtCleanup
= build3_v (COND_EXPR
, tmp
, stmtCleanup
,
6399 build_empty_stmt (input_location
));
6403 tmp
= gfc_conv_expr_present (sym
);
6404 stmtCleanup
= build3_v (COND_EXPR
, tmp
, stmtCleanup
,
6405 build_empty_stmt (input_location
));
6409 /* We don't need to free any memory allocated by internal_pack as it will
6410 be freed at the end of the function by pop_context. */
6411 gfc_add_init_cleanup (block
, stmtInit
, stmtCleanup
);
6413 gfc_restore_backend_locus (&loc
);
6417 /* Calculate the overall offset, including subreferences. */
6419 gfc_get_dataptr_offset (stmtblock_t
*block
, tree parm
, tree desc
, tree offset
,
6420 bool subref
, gfc_expr
*expr
)
6430 /* If offset is NULL and this is not a subreferenced array, there is
6432 if (offset
== NULL_TREE
)
6435 offset
= gfc_index_zero_node
;
6440 tmp
= build_array_ref (desc
, offset
, NULL
, NULL
);
6442 /* Offset the data pointer for pointer assignments from arrays with
6443 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6446 /* Go past the array reference. */
6447 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
6448 if (ref
->type
== REF_ARRAY
&&
6449 ref
->u
.ar
.type
!= AR_ELEMENT
)
6455 /* Calculate the offset for each subsequent subreference. */
6456 for (; ref
; ref
= ref
->next
)
6461 field
= ref
->u
.c
.component
->backend_decl
;
6462 gcc_assert (field
&& TREE_CODE (field
) == FIELD_DECL
);
6463 tmp
= fold_build3_loc (input_location
, COMPONENT_REF
,
6465 tmp
, field
, NULL_TREE
);
6469 gcc_assert (TREE_CODE (TREE_TYPE (tmp
)) == ARRAY_TYPE
);
6470 gfc_init_se (&start
, NULL
);
6471 gfc_conv_expr_type (&start
, ref
->u
.ss
.start
, gfc_charlen_type_node
);
6472 gfc_add_block_to_block (block
, &start
.pre
);
6473 tmp
= gfc_build_array_ref (tmp
, start
.expr
, NULL
);
6477 gcc_assert (TREE_CODE (TREE_TYPE (tmp
)) == ARRAY_TYPE
6478 && ref
->u
.ar
.type
== AR_ELEMENT
);
6480 /* TODO - Add bounds checking. */
6481 stride
= gfc_index_one_node
;
6482 index
= gfc_index_zero_node
;
6483 for (n
= 0; n
< ref
->u
.ar
.dimen
; n
++)
6488 /* Update the index. */
6489 gfc_init_se (&start
, NULL
);
6490 gfc_conv_expr_type (&start
, ref
->u
.ar
.start
[n
], gfc_array_index_type
);
6491 itmp
= gfc_evaluate_now (start
.expr
, block
);
6492 gfc_init_se (&start
, NULL
);
6493 gfc_conv_expr_type (&start
, ref
->u
.ar
.as
->lower
[n
], gfc_array_index_type
);
6494 jtmp
= gfc_evaluate_now (start
.expr
, block
);
6495 itmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
6496 gfc_array_index_type
, itmp
, jtmp
);
6497 itmp
= fold_build2_loc (input_location
, MULT_EXPR
,
6498 gfc_array_index_type
, itmp
, stride
);
6499 index
= fold_build2_loc (input_location
, PLUS_EXPR
,
6500 gfc_array_index_type
, itmp
, index
);
6501 index
= gfc_evaluate_now (index
, block
);
6503 /* Update the stride. */
6504 gfc_init_se (&start
, NULL
);
6505 gfc_conv_expr_type (&start
, ref
->u
.ar
.as
->upper
[n
], gfc_array_index_type
);
6506 itmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
6507 gfc_array_index_type
, start
.expr
,
6509 itmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
6510 gfc_array_index_type
,
6511 gfc_index_one_node
, itmp
);
6512 stride
= fold_build2_loc (input_location
, MULT_EXPR
,
6513 gfc_array_index_type
, stride
, itmp
);
6514 stride
= gfc_evaluate_now (stride
, block
);
6517 /* Apply the index to obtain the array element. */
6518 tmp
= gfc_build_array_ref (tmp
, index
, NULL
);
6528 /* Set the target data pointer. */
6529 offset
= gfc_build_addr_expr (gfc_array_dataptr_type (desc
), tmp
);
6530 gfc_conv_descriptor_data_set (block
, parm
, offset
);
6534 /* gfc_conv_expr_descriptor needs the string length an expression
6535 so that the size of the temporary can be obtained. This is done
6536 by adding up the string lengths of all the elements in the
6537 expression. Function with non-constant expressions have their
6538 string lengths mapped onto the actual arguments using the
6539 interface mapping machinery in trans-expr.c. */
6541 get_array_charlen (gfc_expr
*expr
, gfc_se
*se
)
6543 gfc_interface_mapping mapping
;
6544 gfc_formal_arglist
*formal
;
6545 gfc_actual_arglist
*arg
;
6548 if (expr
->ts
.u
.cl
->length
6549 && gfc_is_constant_expr (expr
->ts
.u
.cl
->length
))
6551 if (!expr
->ts
.u
.cl
->backend_decl
)
6552 gfc_conv_string_length (expr
->ts
.u
.cl
, expr
, &se
->pre
);
6556 switch (expr
->expr_type
)
6559 get_array_charlen (expr
->value
.op
.op1
, se
);
6561 /* For parentheses the expression ts.u.cl is identical. */
6562 if (expr
->value
.op
.op
== INTRINSIC_PARENTHESES
)
6565 expr
->ts
.u
.cl
->backend_decl
=
6566 gfc_create_var (gfc_charlen_type_node
, "sln");
6568 if (expr
->value
.op
.op2
)
6570 get_array_charlen (expr
->value
.op
.op2
, se
);
6572 gcc_assert (expr
->value
.op
.op
== INTRINSIC_CONCAT
);
6574 /* Add the string lengths and assign them to the expression
6575 string length backend declaration. */
6576 gfc_add_modify (&se
->pre
, expr
->ts
.u
.cl
->backend_decl
,
6577 fold_build2_loc (input_location
, PLUS_EXPR
,
6578 gfc_charlen_type_node
,
6579 expr
->value
.op
.op1
->ts
.u
.cl
->backend_decl
,
6580 expr
->value
.op
.op2
->ts
.u
.cl
->backend_decl
));
6583 gfc_add_modify (&se
->pre
, expr
->ts
.u
.cl
->backend_decl
,
6584 expr
->value
.op
.op1
->ts
.u
.cl
->backend_decl
);
6588 if (expr
->value
.function
.esym
== NULL
6589 || expr
->ts
.u
.cl
->length
->expr_type
== EXPR_CONSTANT
)
6591 gfc_conv_string_length (expr
->ts
.u
.cl
, expr
, &se
->pre
);
6595 /* Map expressions involving the dummy arguments onto the actual
6596 argument expressions. */
6597 gfc_init_interface_mapping (&mapping
);
6598 formal
= gfc_sym_get_dummy_args (expr
->symtree
->n
.sym
);
6599 arg
= expr
->value
.function
.actual
;
6601 /* Set se = NULL in the calls to the interface mapping, to suppress any
6603 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
6608 gfc_add_interface_mapping (&mapping
, formal
->sym
, NULL
, arg
->expr
);
6611 gfc_init_se (&tse
, NULL
);
6613 /* Build the expression for the character length and convert it. */
6614 gfc_apply_interface_mapping (&mapping
, &tse
, expr
->ts
.u
.cl
->length
);
6616 gfc_add_block_to_block (&se
->pre
, &tse
.pre
);
6617 gfc_add_block_to_block (&se
->post
, &tse
.post
);
6618 tse
.expr
= fold_convert (gfc_charlen_type_node
, tse
.expr
);
6619 tse
.expr
= fold_build2_loc (input_location
, MAX_EXPR
,
6620 gfc_charlen_type_node
, tse
.expr
,
6621 build_int_cst (gfc_charlen_type_node
, 0));
6622 expr
->ts
.u
.cl
->backend_decl
= tse
.expr
;
6623 gfc_free_interface_mapping (&mapping
);
6627 gfc_conv_string_length (expr
->ts
.u
.cl
, expr
, &se
->pre
);
6633 /* Helper function to check dimensions. */
6635 transposed_dims (gfc_ss
*ss
)
6639 for (n
= 0; n
< ss
->dimen
; n
++)
6640 if (ss
->dim
[n
] != n
)
6646 /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
6647 AR_FULL, suitable for the scalarizer. */
6650 walk_coarray (gfc_expr
*e
)
6654 gcc_assert (gfc_get_corank (e
) > 0);
6656 ss
= gfc_walk_expr (e
);
6658 /* Fix scalar coarray. */
6659 if (ss
== gfc_ss_terminator
)
6666 if (ref
->type
== REF_ARRAY
6667 && ref
->u
.ar
.codimen
> 0)
6673 gcc_assert (ref
!= NULL
);
6674 if (ref
->u
.ar
.type
== AR_ELEMENT
)
6675 ref
->u
.ar
.type
= AR_SECTION
;
6676 ss
= gfc_reverse_ss (gfc_walk_array_ref (ss
, e
, ref
));
6683 /* Convert an array for passing as an actual argument. Expressions and
6684 vector subscripts are evaluated and stored in a temporary, which is then
6685 passed. For whole arrays the descriptor is passed. For array sections
6686 a modified copy of the descriptor is passed, but using the original data.
6688 This function is also used for array pointer assignments, and there
6691 - se->want_pointer && !se->direct_byref
6692 EXPR is an actual argument. On exit, se->expr contains a
6693 pointer to the array descriptor.
6695 - !se->want_pointer && !se->direct_byref
6696 EXPR is an actual argument to an intrinsic function or the
6697 left-hand side of a pointer assignment. On exit, se->expr
6698 contains the descriptor for EXPR.
6700 - !se->want_pointer && se->direct_byref
6701 EXPR is the right-hand side of a pointer assignment and
6702 se->expr is the descriptor for the previously-evaluated
6703 left-hand side. The function creates an assignment from
6707 The se->force_tmp flag disables the non-copying descriptor optimization
6708 that is used for transpose. It may be used in cases where there is an
6709 alias between the transpose argument and another argument in the same
6713 gfc_conv_expr_descriptor (gfc_se
*se
, gfc_expr
*expr
)
6716 gfc_ss_type ss_type
;
6717 gfc_ss_info
*ss_info
;
6719 gfc_array_info
*info
;
6728 bool subref_array_target
= false;
6729 gfc_expr
*arg
, *ss_expr
;
6731 if (se
->want_coarray
)
6732 ss
= walk_coarray (expr
);
6734 ss
= gfc_walk_expr (expr
);
6736 gcc_assert (ss
!= NULL
);
6737 gcc_assert (ss
!= gfc_ss_terminator
);
6740 ss_type
= ss_info
->type
;
6741 ss_expr
= ss_info
->expr
;
6743 /* Special case: TRANSPOSE which needs no temporary. */
6744 while (expr
->expr_type
== EXPR_FUNCTION
&& expr
->value
.function
.isym
6745 && NULL
!= (arg
= gfc_get_noncopying_intrinsic_argument (expr
)))
6747 /* This is a call to transpose which has already been handled by the
6748 scalarizer, so that we just need to get its argument's descriptor. */
6749 gcc_assert (expr
->value
.function
.isym
->id
== GFC_ISYM_TRANSPOSE
);
6750 expr
= expr
->value
.function
.actual
->expr
;
6753 /* Special case things we know we can pass easily. */
6754 switch (expr
->expr_type
)
6757 /* If we have a linear array section, we can pass it directly.
6758 Otherwise we need to copy it into a temporary. */
6760 gcc_assert (ss_type
== GFC_SS_SECTION
);
6761 gcc_assert (ss_expr
== expr
);
6762 info
= &ss_info
->data
.array
;
6764 /* Get the descriptor for the array. */
6765 gfc_conv_ss_descriptor (&se
->pre
, ss
, 0);
6766 desc
= info
->descriptor
;
6768 subref_array_target
= se
->direct_byref
&& is_subref_array (expr
);
6769 need_tmp
= gfc_ref_needs_temporary_p (expr
->ref
)
6770 && !subref_array_target
;
6777 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc
)))
6779 /* Create a new descriptor if the array doesn't have one. */
6782 else if (info
->ref
->u
.ar
.type
== AR_FULL
|| se
->descriptor_only
)
6784 else if (se
->direct_byref
)
6787 full
= gfc_full_array_ref_p (info
->ref
, NULL
);
6789 if (full
&& !transposed_dims (ss
))
6791 if (se
->direct_byref
&& !se
->byref_noassign
)
6793 /* Copy the descriptor for pointer assignments. */
6794 gfc_add_modify (&se
->pre
, se
->expr
, desc
);
6796 /* Add any offsets from subreferences. */
6797 gfc_get_dataptr_offset (&se
->pre
, se
->expr
, desc
, NULL_TREE
,
6798 subref_array_target
, expr
);
6800 else if (se
->want_pointer
)
6802 /* We pass full arrays directly. This means that pointers and
6803 allocatable arrays should also work. */
6804 se
->expr
= gfc_build_addr_expr (NULL_TREE
, desc
);
6811 if (expr
->ts
.type
== BT_CHARACTER
)
6812 se
->string_length
= gfc_get_expr_charlen (expr
);
6814 gfc_free_ss_chain (ss
);
6820 /* A transformational function return value will be a temporary
6821 array descriptor. We still need to go through the scalarizer
6822 to create the descriptor. Elemental functions are handled as
6823 arbitrary expressions, i.e. copy to a temporary. */
6825 if (se
->direct_byref
)
6827 gcc_assert (ss_type
== GFC_SS_FUNCTION
&& ss_expr
== expr
);
6829 /* For pointer assignments pass the descriptor directly. */
6833 gcc_assert (se
->ss
== ss
);
6834 se
->expr
= gfc_build_addr_expr (NULL_TREE
, se
->expr
);
6835 gfc_conv_expr (se
, expr
);
6836 gfc_free_ss_chain (ss
);
6840 if (ss_expr
!= expr
|| ss_type
!= GFC_SS_FUNCTION
)
6842 if (ss_expr
!= expr
)
6843 /* Elemental function. */
6844 gcc_assert ((expr
->value
.function
.esym
!= NULL
6845 && expr
->value
.function
.esym
->attr
.elemental
)
6846 || (expr
->value
.function
.isym
!= NULL
6847 && expr
->value
.function
.isym
->elemental
)
6848 || gfc_inline_intrinsic_function_p (expr
));
6850 gcc_assert (ss_type
== GFC_SS_INTRINSIC
);
6853 if (expr
->ts
.type
== BT_CHARACTER
6854 && expr
->ts
.u
.cl
->length
->expr_type
!= EXPR_CONSTANT
)
6855 get_array_charlen (expr
, se
);
6861 /* Transformational function. */
6862 info
= &ss_info
->data
.array
;
6868 /* Constant array constructors don't need a temporary. */
6869 if (ss_type
== GFC_SS_CONSTRUCTOR
6870 && expr
->ts
.type
!= BT_CHARACTER
6871 && gfc_constant_array_constructor_p (expr
->value
.constructor
))
6874 info
= &ss_info
->data
.array
;
6884 /* Something complicated. Copy it into a temporary. */
6890 /* If we are creating a temporary, we don't need to bother about aliases
6895 gfc_init_loopinfo (&loop
);
6897 /* Associate the SS with the loop. */
6898 gfc_add_ss_to_loop (&loop
, ss
);
6900 /* Tell the scalarizer not to bother creating loop variables, etc. */
6902 loop
.array_parameter
= 1;
6904 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6905 gcc_assert (!se
->direct_byref
);
6907 /* Setup the scalarizing loops and bounds. */
6908 gfc_conv_ss_startstride (&loop
);
6912 if (expr
->ts
.type
== BT_CHARACTER
&& !expr
->ts
.u
.cl
->backend_decl
)
6913 get_array_charlen (expr
, se
);
6915 /* Tell the scalarizer to make a temporary. */
6916 loop
.temp_ss
= gfc_get_temp_ss (gfc_typenode_for_spec (&expr
->ts
),
6917 ((expr
->ts
.type
== BT_CHARACTER
)
6918 ? expr
->ts
.u
.cl
->backend_decl
6922 se
->string_length
= loop
.temp_ss
->info
->string_length
;
6923 gcc_assert (loop
.temp_ss
->dimen
== loop
.dimen
);
6924 gfc_add_ss_to_loop (&loop
, loop
.temp_ss
);
6927 gfc_conv_loop_setup (&loop
, & expr
->where
);
6931 /* Copy into a temporary and pass that. We don't need to copy the data
6932 back because expressions and vector subscripts must be INTENT_IN. */
6933 /* TODO: Optimize passing function return values. */
6937 /* Start the copying loops. */
6938 gfc_mark_ss_chain_used (loop
.temp_ss
, 1);
6939 gfc_mark_ss_chain_used (ss
, 1);
6940 gfc_start_scalarized_body (&loop
, &block
);
6942 /* Copy each data element. */
6943 gfc_init_se (&lse
, NULL
);
6944 gfc_copy_loopinfo_to_se (&lse
, &loop
);
6945 gfc_init_se (&rse
, NULL
);
6946 gfc_copy_loopinfo_to_se (&rse
, &loop
);
6948 lse
.ss
= loop
.temp_ss
;
6951 gfc_conv_scalarized_array_ref (&lse
, NULL
);
6952 if (expr
->ts
.type
== BT_CHARACTER
)
6954 gfc_conv_expr (&rse
, expr
);
6955 if (POINTER_TYPE_P (TREE_TYPE (rse
.expr
)))
6956 rse
.expr
= build_fold_indirect_ref_loc (input_location
,
6960 gfc_conv_expr_val (&rse
, expr
);
6962 gfc_add_block_to_block (&block
, &rse
.pre
);
6963 gfc_add_block_to_block (&block
, &lse
.pre
);
6965 lse
.string_length
= rse
.string_length
;
6966 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
,
6967 expr
->expr_type
== EXPR_VARIABLE
6968 || expr
->expr_type
== EXPR_ARRAY
, false);
6969 gfc_add_expr_to_block (&block
, tmp
);
6971 /* Finish the copying loops. */
6972 gfc_trans_scalarizing_loops (&loop
, &block
);
6974 desc
= loop
.temp_ss
->info
->data
.array
.descriptor
;
6976 else if (expr
->expr_type
== EXPR_FUNCTION
&& !transposed_dims (ss
))
6978 desc
= info
->descriptor
;
6979 se
->string_length
= ss_info
->string_length
;
6983 /* We pass sections without copying to a temporary. Make a new
6984 descriptor and point it at the section we want. The loop variable
6985 limits will be the limits of the section.
6986 A function may decide to repack the array to speed up access, but
6987 we're not bothered about that here. */
6988 int dim
, ndim
, codim
;
6995 bool onebased
= false, rank_remap
;
6997 ndim
= info
->ref
? info
->ref
->u
.ar
.dimen
: ss
->dimen
;
6998 rank_remap
= ss
->dimen
< ndim
;
7000 if (se
->want_coarray
)
7002 gfc_array_ref
*ar
= &info
->ref
->u
.ar
;
7004 codim
= gfc_get_corank (expr
);
7005 for (n
= 0; n
< codim
- 1; n
++)
7007 /* Make sure we are not lost somehow. */
7008 gcc_assert (ar
->dimen_type
[n
+ ndim
] == DIMEN_THIS_IMAGE
);
7010 /* Make sure the call to gfc_conv_section_startstride won't
7011 generate unnecessary code to calculate stride. */
7012 gcc_assert (ar
->stride
[n
+ ndim
] == NULL
);
7014 gfc_conv_section_startstride (&loop
.pre
, ss
, n
+ ndim
);
7015 loop
.from
[n
+ loop
.dimen
] = info
->start
[n
+ ndim
];
7016 loop
.to
[n
+ loop
.dimen
] = info
->end
[n
+ ndim
];
7019 gcc_assert (n
== codim
- 1);
7020 evaluate_bound (&loop
.pre
, info
->start
, ar
->start
,
7021 info
->descriptor
, n
+ ndim
, true,
7022 ar
->as
->type
== AS_DEFERRED
);
7023 loop
.from
[n
+ loop
.dimen
] = info
->start
[n
+ ndim
];
7028 /* Set the string_length for a character array. */
7029 if (expr
->ts
.type
== BT_CHARACTER
)
7030 se
->string_length
= gfc_get_expr_charlen (expr
);
7032 /* If we have an array section or are assigning make sure that
7033 the lower bound is 1. References to the full
7034 array should otherwise keep the original bounds. */
7035 if ((!info
->ref
|| info
->ref
->u
.ar
.type
!= AR_FULL
) && !se
->want_pointer
)
7036 for (dim
= 0; dim
< loop
.dimen
; dim
++)
7037 if (!integer_onep (loop
.from
[dim
]))
7039 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
7040 gfc_array_index_type
, gfc_index_one_node
,
7042 loop
.to
[dim
] = fold_build2_loc (input_location
, PLUS_EXPR
,
7043 gfc_array_index_type
,
7045 loop
.from
[dim
] = gfc_index_one_node
;
7048 desc
= info
->descriptor
;
7049 if (se
->direct_byref
&& !se
->byref_noassign
)
7051 /* For pointer assignments we fill in the destination. */
7053 parmtype
= TREE_TYPE (parm
);
7057 /* Otherwise make a new one. */
7058 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
7059 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, codim
,
7060 loop
.from
, loop
.to
, 0,
7061 GFC_ARRAY_UNKNOWN
, false);
7062 parm
= gfc_create_var (parmtype
, "parm");
7065 offset
= gfc_index_zero_node
;
7067 /* The following can be somewhat confusing. We have two
7068 descriptors, a new one and the original array.
7069 {parm, parmtype, dim} refer to the new one.
7070 {desc, type, n, loop} refer to the original, which maybe
7071 a descriptorless array.
7072 The bounds of the scalarization are the bounds of the section.
7073 We don't have to worry about numeric overflows when calculating
7074 the offsets because all elements are within the array data. */
7076 /* Set the dtype. */
7077 tmp
= gfc_conv_descriptor_dtype (parm
);
7078 gfc_add_modify (&loop
.pre
, tmp
, gfc_get_dtype (parmtype
));
7080 /* Set offset for assignments to pointer only to zero if it is not
7082 if ((se
->direct_byref
|| se
->use_offset
)
7083 && ((info
->ref
&& info
->ref
->u
.ar
.type
!= AR_FULL
)
7084 || (expr
->expr_type
== EXPR_ARRAY
&& se
->use_offset
)))
7085 base
= gfc_index_zero_node
;
7086 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc
)))
7087 base
= gfc_evaluate_now (gfc_conv_array_offset (desc
), &loop
.pre
);
7091 for (n
= 0; n
< ndim
; n
++)
7093 stride
= gfc_conv_array_stride (desc
, n
);
7095 /* Work out the offset. */
7097 && info
->ref
->u
.ar
.dimen_type
[n
] == DIMEN_ELEMENT
)
7099 gcc_assert (info
->subscript
[n
]
7100 && info
->subscript
[n
]->info
->type
== GFC_SS_SCALAR
);
7101 start
= info
->subscript
[n
]->info
->data
.scalar
.value
;
7105 /* Evaluate and remember the start of the section. */
7106 start
= info
->start
[n
];
7107 stride
= gfc_evaluate_now (stride
, &loop
.pre
);
7110 tmp
= gfc_conv_array_lbound (desc
, n
);
7111 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (tmp
),
7113 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, TREE_TYPE (tmp
),
7115 offset
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (tmp
),
7119 && info
->ref
->u
.ar
.dimen_type
[n
] == DIMEN_ELEMENT
)
7121 /* For elemental dimensions, we only need the offset. */
7125 /* Vector subscripts need copying and are handled elsewhere. */
7127 gcc_assert (info
->ref
->u
.ar
.dimen_type
[n
] == DIMEN_RANGE
);
7129 /* look for the corresponding scalarizer dimension: dim. */
7130 for (dim
= 0; dim
< ndim
; dim
++)
7131 if (ss
->dim
[dim
] == n
)
7134 /* loop exited early: the DIM being looked for has been found. */
7135 gcc_assert (dim
< ndim
);
7137 /* Set the new lower bound. */
7138 from
= loop
.from
[dim
];
7141 onebased
= integer_onep (from
);
7142 gfc_conv_descriptor_lbound_set (&loop
.pre
, parm
,
7143 gfc_rank_cst
[dim
], from
);
7145 /* Set the new upper bound. */
7146 gfc_conv_descriptor_ubound_set (&loop
.pre
, parm
,
7147 gfc_rank_cst
[dim
], to
);
7149 /* Multiply the stride by the section stride to get the
7151 stride
= fold_build2_loc (input_location
, MULT_EXPR
,
7152 gfc_array_index_type
,
7153 stride
, info
->stride
[n
]);
7155 if ((se
->direct_byref
|| se
->use_offset
)
7156 && ((info
->ref
&& info
->ref
->u
.ar
.type
!= AR_FULL
)
7157 || (expr
->expr_type
== EXPR_ARRAY
&& se
->use_offset
)))
7159 base
= fold_build2_loc (input_location
, MINUS_EXPR
,
7160 TREE_TYPE (base
), base
, stride
);
7162 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc
)) || se
->use_offset
)
7165 tmp
= gfc_conv_array_lbound (desc
, n
);
7166 toonebased
= integer_onep (tmp
);
7167 // lb(arr) - from (- start + 1)
7168 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
7169 TREE_TYPE (base
), tmp
, from
);
7170 if (onebased
&& toonebased
)
7172 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
7173 TREE_TYPE (base
), tmp
, start
);
7174 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
7175 TREE_TYPE (base
), tmp
,
7176 gfc_index_one_node
);
7178 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
7179 TREE_TYPE (base
), tmp
,
7180 gfc_conv_array_stride (desc
, n
));
7181 base
= fold_build2_loc (input_location
, PLUS_EXPR
,
7182 TREE_TYPE (base
), tmp
, base
);
7185 /* Store the new stride. */
7186 gfc_conv_descriptor_stride_set (&loop
.pre
, parm
,
7187 gfc_rank_cst
[dim
], stride
);
7190 for (n
= loop
.dimen
; n
< loop
.dimen
+ codim
; n
++)
7192 from
= loop
.from
[n
];
7194 gfc_conv_descriptor_lbound_set (&loop
.pre
, parm
,
7195 gfc_rank_cst
[n
], from
);
7196 if (n
< loop
.dimen
+ codim
- 1)
7197 gfc_conv_descriptor_ubound_set (&loop
.pre
, parm
,
7198 gfc_rank_cst
[n
], to
);
7201 if (se
->data_not_needed
)
7202 gfc_conv_descriptor_data_set (&loop
.pre
, parm
,
7203 gfc_index_zero_node
);
7205 /* Point the data pointer at the 1st element in the section. */
7206 gfc_get_dataptr_offset (&loop
.pre
, parm
, desc
, offset
,
7207 subref_array_target
, expr
);
7209 /* Force the offset to be -1, when the lower bound of the highest
7210 dimension is one and the symbol is present and is not a
7211 pointer/allocatable or associated. */
7212 if (((se
->direct_byref
|| GFC_ARRAY_TYPE_P (TREE_TYPE (desc
)))
7213 && !se
->data_not_needed
)
7214 || (se
->use_offset
&& base
!= NULL_TREE
))
7216 /* Set the offset depending on base. */
7217 tmp
= rank_remap
&& !se
->direct_byref
?
7218 fold_build2_loc (input_location
, PLUS_EXPR
,
7219 gfc_array_index_type
, base
,
7222 gfc_conv_descriptor_offset_set (&loop
.pre
, parm
, tmp
);
7224 else if (onebased
&& (!rank_remap
|| se
->use_offset
)
7226 && !(expr
->symtree
->n
.sym
&& expr
->symtree
->n
.sym
->ts
.type
== BT_CLASS
7227 && !CLASS_DATA (expr
->symtree
->n
.sym
)->attr
.class_pointer
)
7228 && !expr
->symtree
->n
.sym
->attr
.allocatable
7229 && !expr
->symtree
->n
.sym
->attr
.pointer
7230 && !expr
->symtree
->n
.sym
->attr
.host_assoc
7231 && !expr
->symtree
->n
.sym
->attr
.use_assoc
)
7233 /* Set the offset to -1. */
7235 mpz_init_set_si (minus_one
, -1);
7236 tmp
= gfc_conv_mpz_to_tree (minus_one
, gfc_index_integer_kind
);
7237 gfc_conv_descriptor_offset_set (&loop
.pre
, parm
, tmp
);
7241 /* Only the callee knows what the correct offset it, so just set
7243 gfc_conv_descriptor_offset_set (&loop
.pre
, parm
, gfc_index_zero_node
);
7248 /* For class arrays add the class tree into the saved descriptor to
7249 enable getting of _vptr and the like. */
7250 if (expr
->expr_type
== EXPR_VARIABLE
&& VAR_P (desc
)
7251 && IS_CLASS_ARRAY (expr
->symtree
->n
.sym
))
7253 gfc_allocate_lang_decl (desc
);
7254 GFC_DECL_SAVED_DESCRIPTOR (desc
) =
7255 DECL_LANG_SPECIFIC (expr
->symtree
->n
.sym
->backend_decl
) ?
7256 GFC_DECL_SAVED_DESCRIPTOR (expr
->symtree
->n
.sym
->backend_decl
)
7257 : expr
->symtree
->n
.sym
->backend_decl
;
7259 if (!se
->direct_byref
|| se
->byref_noassign
)
7261 /* Get a pointer to the new descriptor. */
7262 if (se
->want_pointer
)
7263 se
->expr
= gfc_build_addr_expr (NULL_TREE
, desc
);
7268 gfc_add_block_to_block (&se
->pre
, &loop
.pre
);
7269 gfc_add_block_to_block (&se
->post
, &loop
.post
);
7271 /* Cleanup the scalarizer. */
7272 gfc_cleanup_loop (&loop
);
7275 /* Helper function for gfc_conv_array_parameter if array size needs to be
7279 array_parameter_size (tree desc
, gfc_expr
*expr
, tree
*size
)
7282 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc
)))
7283 *size
= GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc
));
7284 else if (expr
->rank
> 1)
7285 *size
= build_call_expr_loc (input_location
,
7286 gfor_fndecl_size0
, 1,
7287 gfc_build_addr_expr (NULL
, desc
));
7290 tree ubound
= gfc_conv_descriptor_ubound_get (desc
, gfc_index_zero_node
);
7291 tree lbound
= gfc_conv_descriptor_lbound_get (desc
, gfc_index_zero_node
);
7293 *size
= fold_build2_loc (input_location
, MINUS_EXPR
,
7294 gfc_array_index_type
, ubound
, lbound
);
7295 *size
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
7296 *size
, gfc_index_one_node
);
7297 *size
= fold_build2_loc (input_location
, MAX_EXPR
, gfc_array_index_type
,
7298 *size
, gfc_index_zero_node
);
7300 elem
= TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc
)));
7301 *size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
7302 *size
, fold_convert (gfc_array_index_type
, elem
));
7305 /* Convert an array for passing as an actual parameter. */
7306 /* TODO: Optimize passing g77 arrays. */
7309 gfc_conv_array_parameter (gfc_se
* se
, gfc_expr
* expr
, bool g77
,
7310 const gfc_symbol
*fsym
, const char *proc_name
,
7315 tree tmp
= NULL_TREE
;
7317 tree parent
= DECL_CONTEXT (current_function_decl
);
7318 bool full_array_var
;
7319 bool this_array_result
;
7322 bool array_constructor
;
7323 bool good_allocatable
;
7324 bool ultimate_ptr_comp
;
7325 bool ultimate_alloc_comp
;
7330 ultimate_ptr_comp
= false;
7331 ultimate_alloc_comp
= false;
7333 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
7335 if (ref
->next
== NULL
)
7338 if (ref
->type
== REF_COMPONENT
)
7340 ultimate_ptr_comp
= ref
->u
.c
.component
->attr
.pointer
;
7341 ultimate_alloc_comp
= ref
->u
.c
.component
->attr
.allocatable
;
7345 full_array_var
= false;
7348 if (expr
->expr_type
== EXPR_VARIABLE
&& ref
&& !ultimate_ptr_comp
)
7349 full_array_var
= gfc_full_array_ref_p (ref
, &contiguous
);
7351 sym
= full_array_var
? expr
->symtree
->n
.sym
: NULL
;
7353 /* The symbol should have an array specification. */
7354 gcc_assert (!sym
|| sym
->as
|| ref
->u
.ar
.as
);
7356 if (expr
->expr_type
== EXPR_ARRAY
&& expr
->ts
.type
== BT_CHARACTER
)
7358 get_array_ctor_strlen (&se
->pre
, expr
->value
.constructor
, &tmp
);
7359 expr
->ts
.u
.cl
->backend_decl
= tmp
;
7360 se
->string_length
= tmp
;
7363 /* Is this the result of the enclosing procedure? */
7364 this_array_result
= (full_array_var
&& sym
->attr
.flavor
== FL_PROCEDURE
);
7365 if (this_array_result
7366 && (sym
->backend_decl
!= current_function_decl
)
7367 && (sym
->backend_decl
!= parent
))
7368 this_array_result
= false;
7370 /* Passing address of the array if it is not pointer or assumed-shape. */
7371 if (full_array_var
&& g77
&& !this_array_result
7372 && sym
->ts
.type
!= BT_DERIVED
&& sym
->ts
.type
!= BT_CLASS
)
7374 tmp
= gfc_get_symbol_decl (sym
);
7376 if (sym
->ts
.type
== BT_CHARACTER
)
7377 se
->string_length
= sym
->ts
.u
.cl
->backend_decl
;
7379 if (!sym
->attr
.pointer
7381 && sym
->as
->type
!= AS_ASSUMED_SHAPE
7382 && sym
->as
->type
!= AS_DEFERRED
7383 && sym
->as
->type
!= AS_ASSUMED_RANK
7384 && !sym
->attr
.allocatable
)
7386 /* Some variables are declared directly, others are declared as
7387 pointers and allocated on the heap. */
7388 if (sym
->attr
.dummy
|| POINTER_TYPE_P (TREE_TYPE (tmp
)))
7391 se
->expr
= gfc_build_addr_expr (NULL_TREE
, tmp
);
7393 array_parameter_size (tmp
, expr
, size
);
7397 if (sym
->attr
.allocatable
)
7399 if (sym
->attr
.dummy
|| sym
->attr
.result
)
7401 gfc_conv_expr_descriptor (se
, expr
);
7405 array_parameter_size (tmp
, expr
, size
);
7406 se
->expr
= gfc_conv_array_data (tmp
);
7411 /* A convenient reduction in scope. */
7412 contiguous
= g77
&& !this_array_result
&& contiguous
;
7414 /* There is no need to pack and unpack the array, if it is contiguous
7415 and not a deferred- or assumed-shape array, or if it is simply
7417 no_pack
= ((sym
&& sym
->as
7418 && !sym
->attr
.pointer
7419 && sym
->as
->type
!= AS_DEFERRED
7420 && sym
->as
->type
!= AS_ASSUMED_RANK
7421 && sym
->as
->type
!= AS_ASSUMED_SHAPE
)
7423 (ref
&& ref
->u
.ar
.as
7424 && ref
->u
.ar
.as
->type
!= AS_DEFERRED
7425 && ref
->u
.ar
.as
->type
!= AS_ASSUMED_RANK
7426 && ref
->u
.ar
.as
->type
!= AS_ASSUMED_SHAPE
)
7428 gfc_is_simply_contiguous (expr
, false, true));
7430 no_pack
= contiguous
&& no_pack
;
7432 /* Array constructors are always contiguous and do not need packing. */
7433 array_constructor
= g77
&& !this_array_result
&& expr
->expr_type
== EXPR_ARRAY
;
7435 /* Same is true of contiguous sections from allocatable variables. */
7436 good_allocatable
= contiguous
7438 && expr
->symtree
->n
.sym
->attr
.allocatable
;
7440 /* Or ultimate allocatable components. */
7441 ultimate_alloc_comp
= contiguous
&& ultimate_alloc_comp
;
7443 if (no_pack
|| array_constructor
|| good_allocatable
|| ultimate_alloc_comp
)
7445 gfc_conv_expr_descriptor (se
, expr
);
7446 /* Deallocate the allocatable components of structures that are
7448 if ((expr
->ts
.type
== BT_DERIVED
|| expr
->ts
.type
== BT_CLASS
)
7449 && expr
->ts
.u
.derived
->attr
.alloc_comp
7450 && expr
->expr_type
!= EXPR_VARIABLE
)
7452 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
->expr
, expr
->rank
);
7454 /* The components shall be deallocated before their containing entity. */
7455 gfc_prepend_expr_to_block (&se
->post
, tmp
);
7457 if (expr
->ts
.type
== BT_CHARACTER
)
7458 se
->string_length
= expr
->ts
.u
.cl
->backend_decl
;
7460 array_parameter_size (se
->expr
, expr
, size
);
7461 se
->expr
= gfc_conv_array_data (se
->expr
);
7465 if (this_array_result
)
7467 /* Result of the enclosing function. */
7468 gfc_conv_expr_descriptor (se
, expr
);
7470 array_parameter_size (se
->expr
, expr
, size
);
7471 se
->expr
= gfc_build_addr_expr (NULL_TREE
, se
->expr
);
7473 if (g77
&& TREE_TYPE (TREE_TYPE (se
->expr
)) != NULL_TREE
7474 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se
->expr
))))
7475 se
->expr
= gfc_conv_array_data (build_fold_indirect_ref_loc (input_location
,
7482 /* Every other type of array. */
7483 se
->want_pointer
= 1;
7484 gfc_conv_expr_descriptor (se
, expr
);
7486 array_parameter_size (build_fold_indirect_ref_loc (input_location
,
7491 /* Deallocate the allocatable components of structures that are
7492 not variable, for descriptorless arguments.
7493 Arguments with a descriptor are handled in gfc_conv_procedure_call. */
7494 if (g77
&& (expr
->ts
.type
== BT_DERIVED
|| expr
->ts
.type
== BT_CLASS
)
7495 && expr
->ts
.u
.derived
->attr
.alloc_comp
7496 && expr
->expr_type
!= EXPR_VARIABLE
)
7498 tmp
= build_fold_indirect_ref_loc (input_location
, se
->expr
);
7499 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, tmp
, expr
->rank
);
7501 /* The components shall be deallocated before their containing entity. */
7502 gfc_prepend_expr_to_block (&se
->post
, tmp
);
7505 if (g77
|| (fsym
&& fsym
->attr
.contiguous
7506 && !gfc_is_simply_contiguous (expr
, false, true)))
7508 tree origptr
= NULL_TREE
;
7512 /* For contiguous arrays, save the original value of the descriptor. */
7515 origptr
= gfc_create_var (pvoid_type_node
, "origptr");
7516 tmp
= build_fold_indirect_ref_loc (input_location
, desc
);
7517 tmp
= gfc_conv_array_data (tmp
);
7518 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
7519 TREE_TYPE (origptr
), origptr
,
7520 fold_convert (TREE_TYPE (origptr
), tmp
));
7521 gfc_add_expr_to_block (&se
->pre
, tmp
);
7524 /* Repack the array. */
7525 if (warn_array_temporaries
)
7528 gfc_warning (OPT_Warray_temporaries
,
7529 "Creating array temporary at %L for argument %qs",
7530 &expr
->where
, fsym
->name
);
7532 gfc_warning (OPT_Warray_temporaries
,
7533 "Creating array temporary at %L", &expr
->where
);
7536 ptr
= build_call_expr_loc (input_location
,
7537 gfor_fndecl_in_pack
, 1, desc
);
7539 if (fsym
&& fsym
->attr
.optional
&& sym
&& sym
->attr
.optional
)
7541 tmp
= gfc_conv_expr_present (sym
);
7542 ptr
= build3_loc (input_location
, COND_EXPR
, TREE_TYPE (se
->expr
),
7543 tmp
, fold_convert (TREE_TYPE (se
->expr
), ptr
),
7544 fold_convert (TREE_TYPE (se
->expr
), null_pointer_node
));
7547 ptr
= gfc_evaluate_now (ptr
, &se
->pre
);
7549 /* Use the packed data for the actual argument, except for contiguous arrays,
7550 where the descriptor's data component is set. */
7555 tmp
= build_fold_indirect_ref_loc (input_location
, desc
);
7557 gfc_ss
* ss
= gfc_walk_expr (expr
);
7558 if (!transposed_dims (ss
))
7559 gfc_conv_descriptor_data_set (&se
->pre
, tmp
, ptr
);
7562 tree old_field
, new_field
;
7564 /* The original descriptor has transposed dims so we can't reuse
7565 it directly; we have to create a new one. */
7566 tree old_desc
= tmp
;
7567 tree new_desc
= gfc_create_var (TREE_TYPE (old_desc
), "arg_desc");
7569 old_field
= gfc_conv_descriptor_dtype (old_desc
);
7570 new_field
= gfc_conv_descriptor_dtype (new_desc
);
7571 gfc_add_modify (&se
->pre
, new_field
, old_field
);
7573 old_field
= gfc_conv_descriptor_offset (old_desc
);
7574 new_field
= gfc_conv_descriptor_offset (new_desc
);
7575 gfc_add_modify (&se
->pre
, new_field
, old_field
);
7577 for (int i
= 0; i
< expr
->rank
; i
++)
7579 old_field
= gfc_conv_descriptor_dimension (old_desc
,
7580 gfc_rank_cst
[get_array_ref_dim_for_loop_dim (ss
, i
)]);
7581 new_field
= gfc_conv_descriptor_dimension (new_desc
,
7583 gfc_add_modify (&se
->pre
, new_field
, old_field
);
7586 if (flag_coarray
== GFC_FCOARRAY_LIB
7587 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc
))
7588 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc
))
7589 == GFC_ARRAY_ALLOCATABLE
)
7591 old_field
= gfc_conv_descriptor_token (old_desc
);
7592 new_field
= gfc_conv_descriptor_token (new_desc
);
7593 gfc_add_modify (&se
->pre
, new_field
, old_field
);
7596 gfc_conv_descriptor_data_set (&se
->pre
, new_desc
, ptr
);
7597 se
->expr
= gfc_build_addr_expr (NULL_TREE
, new_desc
);
7602 if (gfc_option
.rtcheck
& GFC_RTCHECK_ARRAY_TEMPS
)
7606 if (fsym
&& proc_name
)
7607 msg
= xasprintf ("An array temporary was created for argument "
7608 "'%s' of procedure '%s'", fsym
->name
, proc_name
);
7610 msg
= xasprintf ("An array temporary was created");
7612 tmp
= build_fold_indirect_ref_loc (input_location
,
7614 tmp
= gfc_conv_array_data (tmp
);
7615 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
7616 fold_convert (TREE_TYPE (tmp
), ptr
), tmp
);
7618 if (fsym
&& fsym
->attr
.optional
&& sym
&& sym
->attr
.optional
)
7619 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
7621 gfc_conv_expr_present (sym
), tmp
);
7623 gfc_trans_runtime_check (false, true, tmp
, &se
->pre
,
7628 gfc_start_block (&block
);
7630 /* Copy the data back. */
7631 if (fsym
== NULL
|| fsym
->attr
.intent
!= INTENT_IN
)
7633 tmp
= build_call_expr_loc (input_location
,
7634 gfor_fndecl_in_unpack
, 2, desc
, ptr
);
7635 gfc_add_expr_to_block (&block
, tmp
);
7638 /* Free the temporary. */
7639 tmp
= gfc_call_free (ptr
);
7640 gfc_add_expr_to_block (&block
, tmp
);
7642 stmt
= gfc_finish_block (&block
);
7644 gfc_init_block (&block
);
7645 /* Only if it was repacked. This code needs to be executed before the
7646 loop cleanup code. */
7647 tmp
= build_fold_indirect_ref_loc (input_location
,
7649 tmp
= gfc_conv_array_data (tmp
);
7650 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
7651 fold_convert (TREE_TYPE (tmp
), ptr
), tmp
);
7653 if (fsym
&& fsym
->attr
.optional
&& sym
&& sym
->attr
.optional
)
7654 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
7656 gfc_conv_expr_present (sym
), tmp
);
7658 tmp
= build3_v (COND_EXPR
, tmp
, stmt
, build_empty_stmt (input_location
));
7660 gfc_add_expr_to_block (&block
, tmp
);
7661 gfc_add_block_to_block (&block
, &se
->post
);
7663 gfc_init_block (&se
->post
);
7665 /* Reset the descriptor pointer. */
7668 tmp
= build_fold_indirect_ref_loc (input_location
, desc
);
7669 gfc_conv_descriptor_data_set (&se
->post
, tmp
, origptr
);
7672 gfc_add_block_to_block (&se
->post
, &block
);
7677 /* Generate code to deallocate an array, if it is allocated. */
7680 gfc_trans_dealloc_allocated (tree descriptor
, bool coarray
, gfc_expr
*expr
)
7686 gfc_start_block (&block
);
7688 var
= gfc_conv_descriptor_data_get (descriptor
);
7691 /* Call array_deallocate with an int * present in the second argument.
7692 Although it is ignored here, it's presence ensures that arrays that
7693 are already deallocated are ignored. */
7694 tmp
= gfc_deallocate_with_status (coarray
? descriptor
: var
, NULL_TREE
,
7695 NULL_TREE
, NULL_TREE
, NULL_TREE
, true,
7697 gfc_add_expr_to_block (&block
, tmp
);
7699 /* Zero the data pointer. */
7700 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
7701 var
, build_int_cst (TREE_TYPE (var
), 0));
7702 gfc_add_expr_to_block (&block
, tmp
);
7704 return gfc_finish_block (&block
);
7708 /* This helper function calculates the size in words of a full array. */
7711 gfc_full_array_size (stmtblock_t
*block
, tree decl
, int rank
)
7716 idx
= gfc_rank_cst
[rank
- 1];
7717 nelems
= gfc_conv_descriptor_ubound_get (decl
, idx
);
7718 tmp
= gfc_conv_descriptor_lbound_get (decl
, idx
);
7719 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
7721 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
7722 tmp
, gfc_index_one_node
);
7723 tmp
= gfc_evaluate_now (tmp
, block
);
7725 nelems
= gfc_conv_descriptor_stride_get (decl
, idx
);
7726 tmp
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
7728 return gfc_evaluate_now (tmp
, block
);
7732 /* Allocate dest to the same size as src, and copy src -> dest.
7733 If no_malloc is set, only the copy is done. */
7736 duplicate_allocatable (tree dest
, tree src
, tree type
, int rank
,
7737 bool no_malloc
, bool no_memcpy
, tree str_sz
,
7738 tree add_when_allocated
)
7747 /* If the source is null, set the destination to null. Then,
7748 allocate memory to the destination. */
7749 gfc_init_block (&block
);
7751 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest
)))
7753 tmp
= null_pointer_node
;
7754 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, type
, dest
, tmp
);
7755 gfc_add_expr_to_block (&block
, tmp
);
7756 null_data
= gfc_finish_block (&block
);
7758 gfc_init_block (&block
);
7759 if (str_sz
!= NULL_TREE
)
7762 size
= TYPE_SIZE_UNIT (TREE_TYPE (type
));
7766 tmp
= gfc_call_malloc (&block
, type
, size
);
7767 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
7768 dest
, fold_convert (type
, tmp
));
7769 gfc_add_expr_to_block (&block
, tmp
);
7774 tmp
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
7775 tmp
= build_call_expr_loc (input_location
, tmp
, 3, dest
, src
,
7776 fold_convert (size_type_node
, size
));
7777 gfc_add_expr_to_block (&block
, tmp
);
7782 gfc_conv_descriptor_data_set (&block
, dest
, null_pointer_node
);
7783 null_data
= gfc_finish_block (&block
);
7785 gfc_init_block (&block
);
7787 nelems
= gfc_full_array_size (&block
, src
, rank
);
7789 nelems
= gfc_index_one_node
;
7791 if (str_sz
!= NULL_TREE
)
7792 tmp
= fold_convert (gfc_array_index_type
, str_sz
);
7794 tmp
= fold_convert (gfc_array_index_type
,
7795 TYPE_SIZE_UNIT (gfc_get_element_type (type
)));
7796 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
7800 tmp
= TREE_TYPE (gfc_conv_descriptor_data_get (src
));
7801 tmp
= gfc_call_malloc (&block
, tmp
, size
);
7802 gfc_conv_descriptor_data_set (&block
, dest
, tmp
);
7805 /* We know the temporary and the value will be the same length,
7806 so can use memcpy. */
7809 tmp
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
7810 tmp
= build_call_expr_loc (input_location
, tmp
, 3,
7811 gfc_conv_descriptor_data_get (dest
),
7812 gfc_conv_descriptor_data_get (src
),
7813 fold_convert (size_type_node
, size
));
7814 gfc_add_expr_to_block (&block
, tmp
);
7818 gfc_add_expr_to_block (&block
, add_when_allocated
);
7819 tmp
= gfc_finish_block (&block
);
7821 /* Null the destination if the source is null; otherwise do
7822 the allocate and copy. */
7823 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src
)))
7826 null_cond
= gfc_conv_descriptor_data_get (src
);
7828 null_cond
= convert (pvoid_type_node
, null_cond
);
7829 null_cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
7830 null_cond
, null_pointer_node
);
7831 return build3_v (COND_EXPR
, null_cond
, tmp
, null_data
);
7835 /* Allocate dest to the same size as src, and copy data src -> dest. */
7838 gfc_duplicate_allocatable (tree dest
, tree src
, tree type
, int rank
,
7839 tree add_when_allocated
)
7841 return duplicate_allocatable (dest
, src
, type
, rank
, false, false,
7842 NULL_TREE
, add_when_allocated
);
7846 /* Copy data src -> dest. */
7849 gfc_copy_allocatable_data (tree dest
, tree src
, tree type
, int rank
)
7851 return duplicate_allocatable (dest
, src
, type
, rank
, true, false,
7852 NULL_TREE
, NULL_TREE
);
7855 /* Allocate dest to the same size as src, but don't copy anything. */
7858 gfc_duplicate_allocatable_nocopy (tree dest
, tree src
, tree type
, int rank
)
7860 return duplicate_allocatable (dest
, src
, type
, rank
, false, true,
7861 NULL_TREE
, NULL_TREE
);
7865 /* Recursively traverse an object of derived type, generating code to
7866 deallocate, nullify or copy allocatable components. This is the work horse
7867 function for the functions named in this enum. */
7869 enum {DEALLOCATE_ALLOC_COMP
= 1, DEALLOCATE_ALLOC_COMP_NO_CAF
,
7870 NULLIFY_ALLOC_COMP
, COPY_ALLOC_COMP
, COPY_ONLY_ALLOC_COMP
,
7871 COPY_ALLOC_COMP_CAF
};
7874 structure_alloc_comps (gfc_symbol
* der_type
, tree decl
,
7875 tree dest
, int rank
, int purpose
)
7879 stmtblock_t fnblock
;
7880 stmtblock_t loopbody
;
7881 stmtblock_t tmpblock
;
7892 tree null_cond
= NULL_TREE
;
7893 tree add_when_allocated
;
7894 bool called_dealloc_with_status
;
7896 gfc_init_block (&fnblock
);
7898 decl_type
= TREE_TYPE (decl
);
7900 if ((POINTER_TYPE_P (decl_type
))
7901 || (TREE_CODE (decl_type
) == REFERENCE_TYPE
&& rank
== 0))
7903 decl
= build_fold_indirect_ref_loc (input_location
, decl
);
7904 /* Deref dest in sync with decl, but only when it is not NULL. */
7906 dest
= build_fold_indirect_ref_loc (input_location
, dest
);
7909 /* Just in case it gets dereferenced. */
7910 decl_type
= TREE_TYPE (decl
);
7912 /* If this is an array of derived types with allocatable components
7913 build a loop and recursively call this function. */
7914 if (TREE_CODE (decl_type
) == ARRAY_TYPE
7915 || (GFC_DESCRIPTOR_TYPE_P (decl_type
) && rank
!= 0))
7917 tmp
= gfc_conv_array_data (decl
);
7918 var
= build_fold_indirect_ref_loc (input_location
, tmp
);
7920 /* Get the number of elements - 1 and set the counter. */
7921 if (GFC_DESCRIPTOR_TYPE_P (decl_type
))
7923 /* Use the descriptor for an allocatable array. Since this
7924 is a full array reference, we only need the descriptor
7925 information from dimension = rank. */
7926 tmp
= gfc_full_array_size (&fnblock
, decl
, rank
);
7927 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
7928 gfc_array_index_type
, tmp
,
7929 gfc_index_one_node
);
7931 null_cond
= gfc_conv_descriptor_data_get (decl
);
7932 null_cond
= fold_build2_loc (input_location
, NE_EXPR
,
7933 boolean_type_node
, null_cond
,
7934 build_int_cst (TREE_TYPE (null_cond
), 0));
7938 /* Otherwise use the TYPE_DOMAIN information. */
7939 tmp
= array_type_nelts (decl_type
);
7940 tmp
= fold_convert (gfc_array_index_type
, tmp
);
7943 /* Remember that this is, in fact, the no. of elements - 1. */
7944 nelems
= gfc_evaluate_now (tmp
, &fnblock
);
7945 index
= gfc_create_var (gfc_array_index_type
, "S");
7947 /* Build the body of the loop. */
7948 gfc_init_block (&loopbody
);
7950 vref
= gfc_build_array_ref (var
, index
, NULL
);
7952 if (purpose
== COPY_ALLOC_COMP
|| purpose
== COPY_ONLY_ALLOC_COMP
)
7954 tmp
= build_fold_indirect_ref_loc (input_location
,
7955 gfc_conv_array_data (dest
));
7956 dref
= gfc_build_array_ref (tmp
, index
, NULL
);
7957 tmp
= structure_alloc_comps (der_type
, vref
, dref
, rank
,
7961 tmp
= structure_alloc_comps (der_type
, vref
, NULL_TREE
, rank
, purpose
);
7963 gfc_add_expr_to_block (&loopbody
, tmp
);
7965 /* Build the loop and return. */
7966 gfc_init_loopinfo (&loop
);
7968 loop
.from
[0] = gfc_index_zero_node
;
7969 loop
.loopvar
[0] = index
;
7970 loop
.to
[0] = nelems
;
7971 gfc_trans_scalarizing_loops (&loop
, &loopbody
);
7972 gfc_add_block_to_block (&fnblock
, &loop
.pre
);
7974 tmp
= gfc_finish_block (&fnblock
);
7975 /* When copying allocateable components, the above implements the
7976 deep copy. Nevertheless is a deep copy only allowed, when the current
7977 component is allocated, for which code will be generated in
7978 gfc_duplicate_allocatable (), where the deep copy code is just added
7979 into the if's body, by adding tmp (the deep copy code) as last
7980 argument to gfc_duplicate_allocatable (). */
7981 if (purpose
== COPY_ALLOC_COMP
7982 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest
)))
7983 tmp
= gfc_duplicate_allocatable (dest
, decl
, decl_type
, rank
,
7985 else if (null_cond
!= NULL_TREE
)
7986 tmp
= build3_v (COND_EXPR
, null_cond
, tmp
,
7987 build_empty_stmt (input_location
));
7992 /* Otherwise, act on the components or recursively call self to
7993 act on a chain of components. */
7994 for (c
= der_type
->components
; c
; c
= c
->next
)
7996 bool cmp_has_alloc_comps
= (c
->ts
.type
== BT_DERIVED
7997 || c
->ts
.type
== BT_CLASS
)
7998 && c
->ts
.u
.derived
->attr
.alloc_comp
;
7999 cdecl = c
->backend_decl
;
8000 ctype
= TREE_TYPE (cdecl);
8004 case DEALLOCATE_ALLOC_COMP
:
8005 case DEALLOCATE_ALLOC_COMP_NO_CAF
:
8007 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
8008 (i.e. this function) so generate all the calls and suppress the
8009 recursion from here, if necessary. */
8010 called_dealloc_with_status
= false;
8011 gfc_init_block (&tmpblock
);
8013 if ((c
->ts
.type
== BT_DERIVED
&& !c
->attr
.pointer
)
8014 || (c
->ts
.type
== BT_CLASS
&& !CLASS_DATA (c
)->attr
.class_pointer
))
8016 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8017 decl
, cdecl, NULL_TREE
);
8019 /* The finalizer frees allocatable components. */
8020 called_dealloc_with_status
8021 = gfc_add_comp_finalizer_call (&tmpblock
, comp
, c
,
8022 purpose
== DEALLOCATE_ALLOC_COMP
);
8027 if (c
->attr
.allocatable
&& !c
->attr
.proc_pointer
8028 && (c
->attr
.dimension
8029 || (c
->attr
.codimension
8030 && purpose
!= DEALLOCATE_ALLOC_COMP_NO_CAF
)))
8032 if (comp
== NULL_TREE
)
8033 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8034 decl
, cdecl, NULL_TREE
);
8035 tmp
= gfc_trans_dealloc_allocated (comp
, c
->attr
.codimension
, NULL
);
8036 gfc_add_expr_to_block (&tmpblock
, tmp
);
8038 else if (c
->attr
.allocatable
&& !c
->attr
.codimension
)
8040 /* Allocatable scalar components. */
8041 if (comp
== NULL_TREE
)
8042 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8043 decl
, cdecl, NULL_TREE
);
8045 tmp
= gfc_deallocate_scalar_with_status (comp
, NULL
, true, NULL
,
8047 gfc_add_expr_to_block (&tmpblock
, tmp
);
8048 called_dealloc_with_status
= true;
8050 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
8051 void_type_node
, comp
,
8052 build_int_cst (TREE_TYPE (comp
), 0));
8053 gfc_add_expr_to_block (&tmpblock
, tmp
);
8055 else if (c
->ts
.type
== BT_CLASS
&& CLASS_DATA (c
)->attr
.allocatable
8056 && (!CLASS_DATA (c
)->attr
.codimension
8057 || purpose
!= DEALLOCATE_ALLOC_COMP_NO_CAF
))
8059 /* Allocatable CLASS components. */
8061 /* Add reference to '_data' component. */
8062 tmp
= CLASS_DATA (c
)->backend_decl
;
8063 comp
= fold_build3_loc (input_location
, COMPONENT_REF
,
8064 TREE_TYPE (tmp
), comp
, tmp
, NULL_TREE
);
8066 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp
)))
8067 tmp
= gfc_trans_dealloc_allocated (comp
,
8068 CLASS_DATA (c
)->attr
.codimension
, NULL
);
8071 tmp
= gfc_deallocate_scalar_with_status (comp
, NULL_TREE
, true, NULL
,
8072 CLASS_DATA (c
)->ts
);
8073 gfc_add_expr_to_block (&tmpblock
, tmp
);
8074 called_dealloc_with_status
= true;
8076 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
8077 void_type_node
, comp
,
8078 build_int_cst (TREE_TYPE (comp
), 0));
8080 gfc_add_expr_to_block (&tmpblock
, tmp
);
8082 /* Finally, reset the vptr to the declared type vtable and, if
8083 necessary reset the _len field.
8085 First recover the reference to the component and obtain
8087 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8088 decl
, cdecl, NULL_TREE
);
8089 tmp
= gfc_class_vptr_get (comp
);
8091 if (UNLIMITED_POLY (c
))
8093 /* Both vptr and _len field should be nulled. */
8094 gfc_add_modify (&tmpblock
, tmp
,
8095 build_int_cst (TREE_TYPE (tmp
), 0));
8096 tmp
= gfc_class_len_get (comp
);
8097 gfc_add_modify (&tmpblock
, tmp
,
8098 build_int_cst (TREE_TYPE (tmp
), 0));
8102 /* Build the vtable address and set the vptr with it. */
8105 vtable
= gfc_find_derived_vtab (c
->ts
.u
.derived
);
8106 vtab
= vtable
->backend_decl
;
8107 if (vtab
== NULL_TREE
)
8108 vtab
= gfc_get_symbol_decl (vtable
);
8109 vtab
= gfc_build_addr_expr (NULL
, vtab
);
8110 vtab
= fold_convert (TREE_TYPE (tmp
), vtab
);
8111 gfc_add_modify (&tmpblock
, tmp
, vtab
);
8115 if (cmp_has_alloc_comps
8116 && !c
->attr
.pointer
&& !c
->attr
.proc_pointer
8117 && !called_dealloc_with_status
)
8119 /* Do not deallocate the components of ultimate pointer
8120 components or iteratively call self if call has been made
8121 to gfc_trans_dealloc_allocated */
8122 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8123 decl
, cdecl, NULL_TREE
);
8124 rank
= c
->as
? c
->as
->rank
: 0;
8125 tmp
= structure_alloc_comps (c
->ts
.u
.derived
, comp
, NULL_TREE
,
8127 gfc_add_expr_to_block (&fnblock
, tmp
);
8130 /* Now add the deallocation of this component. */
8131 gfc_add_block_to_block (&fnblock
, &tmpblock
);
8134 case NULLIFY_ALLOC_COMP
:
8135 if (c
->attr
.pointer
|| c
->attr
.proc_pointer
)
8137 else if (c
->attr
.allocatable
8138 && (c
->attr
.dimension
|| c
->attr
.codimension
))
8140 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8141 decl
, cdecl, NULL_TREE
);
8142 gfc_conv_descriptor_data_set (&fnblock
, comp
, null_pointer_node
);
8144 else if (c
->attr
.allocatable
)
8146 /* Allocatable scalar components. */
8147 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8148 decl
, cdecl, NULL_TREE
);
8149 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
8150 void_type_node
, comp
,
8151 build_int_cst (TREE_TYPE (comp
), 0));
8152 gfc_add_expr_to_block (&fnblock
, tmp
);
8153 if (gfc_deferred_strlen (c
, &comp
))
8155 comp
= fold_build3_loc (input_location
, COMPONENT_REF
,
8157 decl
, comp
, NULL_TREE
);
8158 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
8159 TREE_TYPE (comp
), comp
,
8160 build_int_cst (TREE_TYPE (comp
), 0));
8161 gfc_add_expr_to_block (&fnblock
, tmp
);
8164 else if (c
->ts
.type
== BT_CLASS
&& CLASS_DATA (c
)->attr
.allocatable
)
8166 /* Allocatable CLASS components. */
8167 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8168 decl
, cdecl, NULL_TREE
);
8169 /* Add reference to '_data' component. */
8170 tmp
= CLASS_DATA (c
)->backend_decl
;
8171 comp
= fold_build3_loc (input_location
, COMPONENT_REF
,
8172 TREE_TYPE (tmp
), comp
, tmp
, NULL_TREE
);
8173 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp
)))
8174 gfc_conv_descriptor_data_set (&fnblock
, comp
, null_pointer_node
);
8177 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
8178 void_type_node
, comp
,
8179 build_int_cst (TREE_TYPE (comp
), 0));
8180 gfc_add_expr_to_block (&fnblock
, tmp
);
8183 else if (cmp_has_alloc_comps
)
8185 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
,
8186 decl
, cdecl, NULL_TREE
);
8187 rank
= c
->as
? c
->as
->rank
: 0;
8188 tmp
= structure_alloc_comps (c
->ts
.u
.derived
, comp
, NULL_TREE
,
8190 gfc_add_expr_to_block (&fnblock
, tmp
);
8194 case COPY_ALLOC_COMP_CAF
:
8195 if (!c
->attr
.codimension
8196 && (c
->ts
.type
!= BT_CLASS
|| CLASS_DATA (c
)->attr
.coarray_comp
)
8197 && (c
->ts
.type
!= BT_DERIVED
8198 || !c
->ts
.u
.derived
->attr
.coarray_comp
))
8201 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
, decl
,
8203 dcmp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
, dest
,
8206 if (c
->attr
.codimension
)
8208 if (c
->ts
.type
== BT_CLASS
)
8210 comp
= gfc_class_data_get (comp
);
8211 dcmp
= gfc_class_data_get (dcmp
);
8213 gfc_conv_descriptor_data_set (&fnblock
, dcmp
,
8214 gfc_conv_descriptor_data_get (comp
));
8218 tmp
= structure_alloc_comps (c
->ts
.u
.derived
, comp
, dcmp
,
8220 gfc_add_expr_to_block (&fnblock
, tmp
);
8225 case COPY_ALLOC_COMP
:
8226 if (c
->attr
.pointer
)
8229 /* We need source and destination components. */
8230 comp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
, decl
,
8232 dcmp
= fold_build3_loc (input_location
, COMPONENT_REF
, ctype
, dest
,
8234 dcmp
= fold_convert (TREE_TYPE (comp
), dcmp
);
8236 if (c
->ts
.type
== BT_CLASS
&& CLASS_DATA (c
)->attr
.allocatable
)
8244 dst_data
= gfc_class_data_get (dcmp
);
8245 src_data
= gfc_class_data_get (comp
);
8246 size
= fold_convert (size_type_node
,
8247 gfc_class_vtab_size_get (comp
));
8249 if (CLASS_DATA (c
)->attr
.dimension
)
8251 nelems
= gfc_conv_descriptor_size (src_data
,
8252 CLASS_DATA (c
)->as
->rank
);
8253 size
= fold_build2_loc (input_location
, MULT_EXPR
,
8254 size_type_node
, size
,
8255 fold_convert (size_type_node
,
8259 nelems
= build_int_cst (size_type_node
, 1);
8261 if (CLASS_DATA (c
)->attr
.dimension
8262 || CLASS_DATA (c
)->attr
.codimension
)
8264 src_data
= gfc_conv_descriptor_data_get (src_data
);
8265 dst_data
= gfc_conv_descriptor_data_get (dst_data
);
8268 gfc_init_block (&tmpblock
);
8270 /* Coarray component have to have the same allocation status and
8271 shape/type-parameter/effective-type on the LHS and RHS of an
8272 intrinsic assignment. Hence, we did not deallocated them - and
8273 do not allocate them here. */
8274 if (!CLASS_DATA (c
)->attr
.codimension
)
8276 ftn_tree
= builtin_decl_explicit (BUILT_IN_MALLOC
);
8277 tmp
= build_call_expr_loc (input_location
, ftn_tree
, 1, size
);
8278 gfc_add_modify (&tmpblock
, dst_data
,
8279 fold_convert (TREE_TYPE (dst_data
), tmp
));
8282 tmp
= gfc_copy_class_to_class (comp
, dcmp
, nelems
,
8283 UNLIMITED_POLY (c
));
8284 gfc_add_expr_to_block (&tmpblock
, tmp
);
8285 tmp
= gfc_finish_block (&tmpblock
);
8287 gfc_init_block (&tmpblock
);
8288 gfc_add_modify (&tmpblock
, dst_data
,
8289 fold_convert (TREE_TYPE (dst_data
),
8290 null_pointer_node
));
8291 null_data
= gfc_finish_block (&tmpblock
);
8293 null_cond
= fold_build2_loc (input_location
, NE_EXPR
,
8294 boolean_type_node
, src_data
,
8297 gfc_add_expr_to_block (&fnblock
, build3_v (COND_EXPR
, null_cond
,
8302 /* To implement guarded deep copy, i.e., deep copy only allocatable
8303 components that are really allocated, the deep copy code has to
8304 be generated first and then added to the if-block in
8305 gfc_duplicate_allocatable (). */
8306 if (cmp_has_alloc_comps
8307 && !c
->attr
.proc_pointer
)
8309 rank
= c
->as
? c
->as
->rank
: 0;
8310 tmp
= fold_convert (TREE_TYPE (dcmp
), comp
);
8311 gfc_add_modify (&fnblock
, dcmp
, tmp
);
8312 add_when_allocated
= structure_alloc_comps (c
->ts
.u
.derived
,
8317 add_when_allocated
= NULL_TREE
;
8319 if (gfc_deferred_strlen (c
, &tmp
))
8323 tmp
= fold_build3_loc (input_location
, COMPONENT_REF
,
8325 decl
, len
, NULL_TREE
);
8326 len
= fold_build3_loc (input_location
, COMPONENT_REF
,
8328 dest
, len
, NULL_TREE
);
8329 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
,
8330 TREE_TYPE (len
), len
, tmp
);
8331 gfc_add_expr_to_block (&fnblock
, tmp
);
8332 size
= size_of_string_in_bytes (c
->ts
.kind
, len
);
8333 /* This component can not have allocatable components,
8334 therefore add_when_allocated of duplicate_allocatable ()
8336 tmp
= duplicate_allocatable (dcmp
, comp
, ctype
, rank
,
8337 false, false, size
, NULL_TREE
);
8338 gfc_add_expr_to_block (&fnblock
, tmp
);
8340 else if (c
->attr
.allocatable
&& !c
->attr
.proc_pointer
8341 && (!(cmp_has_alloc_comps
&& c
->as
)
8342 || c
->attr
.codimension
))
8344 rank
= c
->as
? c
->as
->rank
: 0;
8345 if (c
->attr
.codimension
)
8346 tmp
= gfc_copy_allocatable_data (dcmp
, comp
, ctype
, rank
);
8348 tmp
= gfc_duplicate_allocatable (dcmp
, comp
, ctype
, rank
,
8349 add_when_allocated
);
8350 gfc_add_expr_to_block (&fnblock
, tmp
);
8353 if (cmp_has_alloc_comps
)
8354 gfc_add_expr_to_block (&fnblock
, add_when_allocated
);
8364 return gfc_finish_block (&fnblock
);
8367 /* Recursively traverse an object of derived type, generating code to
8368 nullify allocatable components. */
8371 gfc_nullify_alloc_comp (gfc_symbol
* der_type
, tree decl
, int rank
)
8373 return structure_alloc_comps (der_type
, decl
, NULL_TREE
, rank
,
8374 NULLIFY_ALLOC_COMP
);
8378 /* Recursively traverse an object of derived type, generating code to
8379 deallocate allocatable components. */
8382 gfc_deallocate_alloc_comp (gfc_symbol
* der_type
, tree decl
, int rank
)
8384 return structure_alloc_comps (der_type
, decl
, NULL_TREE
, rank
,
8385 DEALLOCATE_ALLOC_COMP
);
8389 /* Recursively traverse an object of derived type, generating code to
8390 deallocate allocatable components. But do not deallocate coarrays.
8391 To be used for intrinsic assignment, which may not change the allocation
8392 status of coarrays. */
8395 gfc_deallocate_alloc_comp_no_caf (gfc_symbol
* der_type
, tree decl
, int rank
)
8397 return structure_alloc_comps (der_type
, decl
, NULL_TREE
, rank
,
8398 DEALLOCATE_ALLOC_COMP_NO_CAF
);
8403 gfc_reassign_alloc_comp_caf (gfc_symbol
*der_type
, tree decl
, tree dest
)
8405 return structure_alloc_comps (der_type
, decl
, dest
, 0, COPY_ALLOC_COMP_CAF
);
8409 /* Recursively traverse an object of derived type, generating code to
8410 copy it and its allocatable components. */
8413 gfc_copy_alloc_comp (gfc_symbol
* der_type
, tree decl
, tree dest
, int rank
)
8415 return structure_alloc_comps (der_type
, decl
, dest
, rank
, COPY_ALLOC_COMP
);
8419 /* Recursively traverse an object of derived type, generating code to
8420 copy only its allocatable components. */
8423 gfc_copy_only_alloc_comp (gfc_symbol
* der_type
, tree decl
, tree dest
, int rank
)
8425 return structure_alloc_comps (der_type
, decl
, dest
, rank
, COPY_ONLY_ALLOC_COMP
);
8429 /* Returns the value of LBOUND for an expression. This could be broken out
8430 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
8431 called by gfc_alloc_allocatable_for_assignment. */
8433 get_std_lbound (gfc_expr
*expr
, tree desc
, int dim
, bool assumed_size
)
8438 tree cond
, cond1
, cond3
, cond4
;
8442 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc
)))
8444 tmp
= gfc_rank_cst
[dim
];
8445 lbound
= gfc_conv_descriptor_lbound_get (desc
, tmp
);
8446 ubound
= gfc_conv_descriptor_ubound_get (desc
, tmp
);
8447 stride
= gfc_conv_descriptor_stride_get (desc
, tmp
);
8448 cond1
= fold_build2_loc (input_location
, GE_EXPR
, boolean_type_node
,
8450 cond3
= fold_build2_loc (input_location
, GE_EXPR
, boolean_type_node
,
8451 stride
, gfc_index_zero_node
);
8452 cond3
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
8453 boolean_type_node
, cond3
, cond1
);
8454 cond4
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
8455 stride
, gfc_index_zero_node
);
8457 cond
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
8458 tmp
, build_int_cst (gfc_array_index_type
,
8461 cond
= boolean_false_node
;
8463 cond1
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
8464 boolean_type_node
, cond3
, cond4
);
8465 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
8466 boolean_type_node
, cond
, cond1
);
8468 return fold_build3_loc (input_location
, COND_EXPR
,
8469 gfc_array_index_type
, cond
,
8470 lbound
, gfc_index_one_node
);
8473 if (expr
->expr_type
== EXPR_FUNCTION
)
8475 /* A conversion function, so use the argument. */
8476 gcc_assert (expr
->value
.function
.isym
8477 && expr
->value
.function
.isym
->conversion
);
8478 expr
= expr
->value
.function
.actual
->expr
;
8481 if (expr
->expr_type
== EXPR_VARIABLE
)
8483 tmp
= TREE_TYPE (expr
->symtree
->n
.sym
->backend_decl
);
8484 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
8486 if (ref
->type
== REF_COMPONENT
8487 && ref
->u
.c
.component
->as
8489 && ref
->next
->u
.ar
.type
== AR_FULL
)
8490 tmp
= TREE_TYPE (ref
->u
.c
.component
->backend_decl
);
8492 return GFC_TYPE_ARRAY_LBOUND(tmp
, dim
);
8495 return gfc_index_one_node
;
8499 /* Returns true if an expression represents an lhs that can be reallocated
8503 gfc_is_reallocatable_lhs (gfc_expr
*expr
)
8510 /* An allocatable variable. */
8511 if (expr
->symtree
->n
.sym
->attr
.allocatable
8513 && expr
->ref
->type
== REF_ARRAY
8514 && expr
->ref
->u
.ar
.type
== AR_FULL
)
8517 /* All that can be left are allocatable components. */
8518 if ((expr
->symtree
->n
.sym
->ts
.type
!= BT_DERIVED
8519 && expr
->symtree
->n
.sym
->ts
.type
!= BT_CLASS
)
8520 || !expr
->symtree
->n
.sym
->ts
.u
.derived
->attr
.alloc_comp
)
8523 /* Find a component ref followed by an array reference. */
8524 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
8526 && ref
->type
== REF_COMPONENT
8527 && ref
->next
->type
== REF_ARRAY
8528 && !ref
->next
->next
)
8534 /* Return true if valid reallocatable lhs. */
8535 if (ref
->u
.c
.component
->attr
.allocatable
8536 && ref
->next
->u
.ar
.type
== AR_FULL
)
8544 concat_str_length (gfc_expr
* expr
)
8551 type
= gfc_typenode_for_spec (&expr
->value
.op
.op1
->ts
);
8552 len1
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8553 if (len1
== NULL_TREE
)
8555 if (expr
->value
.op
.op1
->expr_type
== EXPR_OP
)
8556 len1
= concat_str_length (expr
->value
.op
.op1
);
8557 else if (expr
->value
.op
.op1
->expr_type
== EXPR_CONSTANT
)
8558 len1
= build_int_cst (gfc_charlen_type_node
,
8559 expr
->value
.op
.op1
->value
.character
.length
);
8560 else if (expr
->value
.op
.op1
->ts
.u
.cl
->length
)
8562 gfc_init_se (&se
, NULL
);
8563 gfc_conv_expr (&se
, expr
->value
.op
.op1
->ts
.u
.cl
->length
);
8569 gfc_init_se (&se
, NULL
);
8570 se
.want_pointer
= 1;
8571 se
.descriptor_only
= 1;
8572 gfc_conv_expr (&se
, expr
->value
.op
.op1
);
8573 len1
= se
.string_length
;
8577 type
= gfc_typenode_for_spec (&expr
->value
.op
.op2
->ts
);
8578 len2
= TYPE_MAX_VALUE (TYPE_DOMAIN (type
));
8579 if (len2
== NULL_TREE
)
8581 if (expr
->value
.op
.op2
->expr_type
== EXPR_OP
)
8582 len2
= concat_str_length (expr
->value
.op
.op2
);
8583 else if (expr
->value
.op
.op2
->expr_type
== EXPR_CONSTANT
)
8584 len2
= build_int_cst (gfc_charlen_type_node
,
8585 expr
->value
.op
.op2
->value
.character
.length
);
8586 else if (expr
->value
.op
.op2
->ts
.u
.cl
->length
)
8588 gfc_init_se (&se
, NULL
);
8589 gfc_conv_expr (&se
, expr
->value
.op
.op2
->ts
.u
.cl
->length
);
8595 gfc_init_se (&se
, NULL
);
8596 se
.want_pointer
= 1;
8597 se
.descriptor_only
= 1;
8598 gfc_conv_expr (&se
, expr
->value
.op
.op2
);
8599 len2
= se
.string_length
;
8603 gcc_assert(len1
&& len2
);
8604 len1
= fold_convert (gfc_charlen_type_node
, len1
);
8605 len2
= fold_convert (gfc_charlen_type_node
, len2
);
8607 return fold_build2_loc (input_location
, PLUS_EXPR
,
8608 gfc_charlen_type_node
, len1
, len2
);
8612 /* Allocate the lhs of an assignment to an allocatable array, otherwise
8616 gfc_alloc_allocatable_for_assignment (gfc_loopinfo
*loop
,
8620 stmtblock_t realloc_block
;
8621 stmtblock_t alloc_block
;
8625 gfc_array_info
*linfo
;
8647 gfc_array_spec
* as
;
8649 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
8650 Find the lhs expression in the loop chain and set expr1 and
8651 expr2 accordingly. */
8652 if (expr1
->expr_type
== EXPR_FUNCTION
&& expr2
== NULL
)
8655 /* Find the ss for the lhs. */
8657 for (; lss
&& lss
!= gfc_ss_terminator
; lss
= lss
->loop_chain
)
8658 if (lss
->info
->expr
&& lss
->info
->expr
->expr_type
== EXPR_VARIABLE
)
8660 if (lss
== gfc_ss_terminator
)
8662 expr1
= lss
->info
->expr
;
8665 /* Bail out if this is not a valid allocate on assignment. */
8666 if (!gfc_is_reallocatable_lhs (expr1
)
8667 || (expr2
&& !expr2
->rank
))
8670 /* Find the ss for the lhs. */
8672 for (; lss
&& lss
!= gfc_ss_terminator
; lss
= lss
->loop_chain
)
8673 if (lss
->info
->expr
== expr1
)
8676 if (lss
== gfc_ss_terminator
)
8679 linfo
= &lss
->info
->data
.array
;
8681 /* Find an ss for the rhs. For operator expressions, we see the
8682 ss's for the operands. Any one of these will do. */
8684 for (; rss
&& rss
!= gfc_ss_terminator
; rss
= rss
->loop_chain
)
8685 if (rss
->info
->expr
!= expr1
&& rss
!= loop
->temp_ss
)
8688 if (expr2
&& rss
== gfc_ss_terminator
)
8691 gfc_start_block (&fblock
);
8693 /* Since the lhs is allocatable, this must be a descriptor type.
8694 Get the data and array size. */
8695 desc
= linfo
->descriptor
;
8696 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc
)));
8697 array1
= gfc_conv_descriptor_data_get (desc
);
8699 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
8700 deallocated if expr is an array of different shape or any of the
8701 corresponding length type parameter values of variable and expr
8702 differ." This assures F95 compatibility. */
8703 jump_label1
= gfc_build_label_decl (NULL_TREE
);
8704 jump_label2
= gfc_build_label_decl (NULL_TREE
);
8706 /* Allocate if data is NULL. */
8707 cond_null
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
,
8708 array1
, build_int_cst (TREE_TYPE (array1
), 0));
8710 if (expr1
->ts
.deferred
)
8711 cond_null
= gfc_evaluate_now (boolean_true_node
, &fblock
);
8713 cond_null
= gfc_evaluate_now (cond_null
, &fblock
);
8715 tmp
= build3_v (COND_EXPR
, cond_null
,
8716 build1_v (GOTO_EXPR
, jump_label1
),
8717 build_empty_stmt (input_location
));
8718 gfc_add_expr_to_block (&fblock
, tmp
);
8720 /* Get arrayspec if expr is a full array. */
8721 if (expr2
&& expr2
->expr_type
== EXPR_FUNCTION
8722 && expr2
->value
.function
.isym
8723 && expr2
->value
.function
.isym
->conversion
)
8725 /* For conversion functions, take the arg. */
8726 gfc_expr
*arg
= expr2
->value
.function
.actual
->expr
;
8727 as
= gfc_get_full_arrayspec_from_expr (arg
);
8730 as
= gfc_get_full_arrayspec_from_expr (expr2
);
8734 /* If the lhs shape is not the same as the rhs jump to setting the
8735 bounds and doing the reallocation....... */
8736 for (n
= 0; n
< expr1
->rank
; n
++)
8738 /* Check the shape. */
8739 lbound
= gfc_conv_descriptor_lbound_get (desc
, gfc_rank_cst
[n
]);
8740 ubound
= gfc_conv_descriptor_ubound_get (desc
, gfc_rank_cst
[n
]);
8741 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
8742 gfc_array_index_type
,
8743 loop
->to
[n
], loop
->from
[n
]);
8744 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
8745 gfc_array_index_type
,
8747 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
8748 gfc_array_index_type
,
8750 cond
= fold_build2_loc (input_location
, NE_EXPR
,
8752 tmp
, gfc_index_zero_node
);
8753 tmp
= build3_v (COND_EXPR
, cond
,
8754 build1_v (GOTO_EXPR
, jump_label1
),
8755 build_empty_stmt (input_location
));
8756 gfc_add_expr_to_block (&fblock
, tmp
);
8759 /* ....else jump past the (re)alloc code. */
8760 tmp
= build1_v (GOTO_EXPR
, jump_label2
);
8761 gfc_add_expr_to_block (&fblock
, tmp
);
8763 /* Add the label to start automatic (re)allocation. */
8764 tmp
= build1_v (LABEL_EXPR
, jump_label1
);
8765 gfc_add_expr_to_block (&fblock
, tmp
);
8767 /* If the lhs has not been allocated, its bounds will not have been
8768 initialized and so its size is set to zero. */
8769 size1
= gfc_create_var (gfc_array_index_type
, NULL
);
8770 gfc_init_block (&alloc_block
);
8771 gfc_add_modify (&alloc_block
, size1
, gfc_index_zero_node
);
8772 gfc_init_block (&realloc_block
);
8773 gfc_add_modify (&realloc_block
, size1
,
8774 gfc_conv_descriptor_size (desc
, expr1
->rank
));
8775 tmp
= build3_v (COND_EXPR
, cond_null
,
8776 gfc_finish_block (&alloc_block
),
8777 gfc_finish_block (&realloc_block
));
8778 gfc_add_expr_to_block (&fblock
, tmp
);
8780 /* Get the rhs size and fix it. */
8782 desc2
= rss
->info
->data
.array
.descriptor
;
8786 size2
= gfc_index_one_node
;
8787 for (n
= 0; n
< expr2
->rank
; n
++)
8789 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
8790 gfc_array_index_type
,
8791 loop
->to
[n
], loop
->from
[n
]);
8792 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
8793 gfc_array_index_type
,
8794 tmp
, gfc_index_one_node
);
8795 size2
= fold_build2_loc (input_location
, MULT_EXPR
,
8796 gfc_array_index_type
,
8799 size2
= gfc_evaluate_now (size2
, &fblock
);
8801 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
8804 /* If the lhs is deferred length, assume that the element size
8805 changes and force a reallocation. */
8806 if (expr1
->ts
.deferred
)
8807 neq_size
= gfc_evaluate_now (boolean_true_node
, &fblock
);
8809 neq_size
= gfc_evaluate_now (cond
, &fblock
);
8811 /* Deallocation of allocatable components will have to occur on
8812 reallocation. Fix the old descriptor now. */
8813 if ((expr1
->ts
.type
== BT_DERIVED
)
8814 && expr1
->ts
.u
.derived
->attr
.alloc_comp
)
8815 old_desc
= gfc_evaluate_now (desc
, &fblock
);
8817 old_desc
= NULL_TREE
;
8819 /* Now modify the lhs descriptor and the associated scalarizer
8820 variables. F2003 7.4.1.3: "If variable is or becomes an
8821 unallocated allocatable variable, then it is allocated with each
8822 deferred type parameter equal to the corresponding type parameters
8823 of expr , with the shape of expr , and with each lower bound equal
8824 to the corresponding element of LBOUND(expr)."
8825 Reuse size1 to keep a dimension-by-dimension track of the
8826 stride of the new array. */
8827 size1
= gfc_index_one_node
;
8828 offset
= gfc_index_zero_node
;
8830 for (n
= 0; n
< expr2
->rank
; n
++)
8832 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
8833 gfc_array_index_type
,
8834 loop
->to
[n
], loop
->from
[n
]);
8835 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
8836 gfc_array_index_type
,
8837 tmp
, gfc_index_one_node
);
8839 lbound
= gfc_index_one_node
;
8844 lbd
= get_std_lbound (expr2
, desc2
, n
,
8845 as
->type
== AS_ASSUMED_SIZE
);
8846 ubound
= fold_build2_loc (input_location
,
8848 gfc_array_index_type
,
8850 ubound
= fold_build2_loc (input_location
,
8852 gfc_array_index_type
,
8857 gfc_conv_descriptor_lbound_set (&fblock
, desc
,
8860 gfc_conv_descriptor_ubound_set (&fblock
, desc
,
8863 gfc_conv_descriptor_stride_set (&fblock
, desc
,
8866 lbound
= gfc_conv_descriptor_lbound_get (desc
,
8868 tmp2
= fold_build2_loc (input_location
, MULT_EXPR
,
8869 gfc_array_index_type
,
8871 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
8872 gfc_array_index_type
,
8874 size1
= fold_build2_loc (input_location
, MULT_EXPR
,
8875 gfc_array_index_type
,
8879 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8880 the array offset is saved and the info.offset is used for a
8881 running offset. Use the saved_offset instead. */
8882 tmp
= gfc_conv_descriptor_offset (desc
);
8883 gfc_add_modify (&fblock
, tmp
, offset
);
8884 if (linfo
->saved_offset
8885 && TREE_CODE (linfo
->saved_offset
) == VAR_DECL
)
8886 gfc_add_modify (&fblock
, linfo
->saved_offset
, tmp
);
8888 /* Now set the deltas for the lhs. */
8889 for (n
= 0; n
< expr1
->rank
; n
++)
8891 tmp
= gfc_conv_descriptor_lbound_get (desc
, gfc_rank_cst
[n
]);
8893 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
8894 gfc_array_index_type
, tmp
,
8896 if (linfo
->delta
[dim
]
8897 && TREE_CODE (linfo
->delta
[dim
]) == VAR_DECL
)
8898 gfc_add_modify (&fblock
, linfo
->delta
[dim
], tmp
);
8901 /* Get the new lhs size in bytes. */
8902 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.deferred
)
8904 if (expr2
->ts
.deferred
)
8906 if (TREE_CODE (expr2
->ts
.u
.cl
->backend_decl
) == VAR_DECL
)
8907 tmp
= expr2
->ts
.u
.cl
->backend_decl
;
8909 tmp
= rss
->info
->string_length
;
8913 tmp
= expr2
->ts
.u
.cl
->backend_decl
;
8914 if (!tmp
&& expr2
->expr_type
== EXPR_OP
8915 && expr2
->value
.op
.op
== INTRINSIC_CONCAT
)
8917 tmp
= concat_str_length (expr2
);
8918 expr2
->ts
.u
.cl
->backend_decl
= gfc_evaluate_now (tmp
, &fblock
);
8920 tmp
= fold_convert (TREE_TYPE (expr1
->ts
.u
.cl
->backend_decl
), tmp
);
8923 if (expr1
->ts
.u
.cl
->backend_decl
8924 && TREE_CODE (expr1
->ts
.u
.cl
->backend_decl
) == VAR_DECL
)
8925 gfc_add_modify (&fblock
, expr1
->ts
.u
.cl
->backend_decl
, tmp
);
8927 gfc_add_modify (&fblock
, lss
->info
->string_length
, tmp
);
8929 else if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->backend_decl
)
8931 tmp
= TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1
->ts
)));
8932 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
8933 gfc_array_index_type
, tmp
,
8934 expr1
->ts
.u
.cl
->backend_decl
);
8937 tmp
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1
->ts
));
8938 tmp
= fold_convert (gfc_array_index_type
, tmp
);
8939 size2
= fold_build2_loc (input_location
, MULT_EXPR
,
8940 gfc_array_index_type
,
8942 size2
= fold_convert (size_type_node
, size2
);
8943 size2
= fold_build2_loc (input_location
, MAX_EXPR
, size_type_node
,
8944 size2
, size_one_node
);
8945 size2
= gfc_evaluate_now (size2
, &fblock
);
8947 /* For deferred character length, the 'size' field of the dtype might
8948 have changed so set the dtype. */
8949 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc
))
8950 && expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.deferred
)
8953 tmp
= gfc_conv_descriptor_dtype (desc
);
8954 if (expr2
->ts
.u
.cl
->backend_decl
)
8955 type
= gfc_typenode_for_spec (&expr2
->ts
);
8957 type
= gfc_typenode_for_spec (&expr1
->ts
);
8959 gfc_add_modify (&fblock
, tmp
,
8960 gfc_get_dtype_rank_type (expr1
->rank
,type
));
8963 /* Realloc expression. Note that the scalarizer uses desc.data
8964 in the array reference - (*desc.data)[<element>]. */
8965 gfc_init_block (&realloc_block
);
8967 if ((expr1
->ts
.type
== BT_DERIVED
)
8968 && expr1
->ts
.u
.derived
->attr
.alloc_comp
)
8970 tmp
= gfc_deallocate_alloc_comp_no_caf (expr1
->ts
.u
.derived
, old_desc
,
8972 gfc_add_expr_to_block (&realloc_block
, tmp
);
8975 tmp
= build_call_expr_loc (input_location
,
8976 builtin_decl_explicit (BUILT_IN_REALLOC
), 2,
8977 fold_convert (pvoid_type_node
, array1
),
8979 gfc_conv_descriptor_data_set (&realloc_block
,
8982 if ((expr1
->ts
.type
== BT_DERIVED
)
8983 && expr1
->ts
.u
.derived
->attr
.alloc_comp
)
8985 tmp
= gfc_nullify_alloc_comp (expr1
->ts
.u
.derived
, desc
,
8987 gfc_add_expr_to_block (&realloc_block
, tmp
);
8990 realloc_expr
= gfc_finish_block (&realloc_block
);
8992 /* Only reallocate if sizes are different. */
8993 tmp
= build3_v (COND_EXPR
, neq_size
, realloc_expr
,
8994 build_empty_stmt (input_location
));
8998 /* Malloc expression. */
8999 gfc_init_block (&alloc_block
);
9000 tmp
= build_call_expr_loc (input_location
,
9001 builtin_decl_explicit (BUILT_IN_MALLOC
),
9003 gfc_conv_descriptor_data_set (&alloc_block
,
9006 /* We already set the dtype in the case of deferred character
9008 if (!(GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc
))
9009 && expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.deferred
))
9011 tmp
= gfc_conv_descriptor_dtype (desc
);
9012 gfc_add_modify (&alloc_block
, tmp
, gfc_get_dtype (TREE_TYPE (desc
)));
9015 if ((expr1
->ts
.type
== BT_DERIVED
)
9016 && expr1
->ts
.u
.derived
->attr
.alloc_comp
)
9018 tmp
= gfc_nullify_alloc_comp (expr1
->ts
.u
.derived
, desc
,
9020 gfc_add_expr_to_block (&alloc_block
, tmp
);
9022 alloc_expr
= gfc_finish_block (&alloc_block
);
9024 /* Malloc if not allocated; realloc otherwise. */
9025 tmp
= build_int_cst (TREE_TYPE (array1
), 0);
9026 cond
= fold_build2_loc (input_location
, EQ_EXPR
,
9029 tmp
= build3_v (COND_EXPR
, cond
, alloc_expr
, realloc_expr
);
9030 gfc_add_expr_to_block (&fblock
, tmp
);
9032 /* Make sure that the scalarizer data pointer is updated. */
9034 && TREE_CODE (linfo
->data
) == VAR_DECL
)
9036 tmp
= gfc_conv_descriptor_data_get (desc
);
9037 gfc_add_modify (&fblock
, linfo
->data
, tmp
);
9040 /* Add the exit label. */
9041 tmp
= build1_v (LABEL_EXPR
, jump_label2
);
9042 gfc_add_expr_to_block (&fblock
, tmp
);
9044 return gfc_finish_block (&fblock
);
9048 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
9049 Do likewise, recursively if necessary, with the allocatable components of
9053 gfc_trans_deferred_array (gfc_symbol
* sym
, gfc_wrapped_block
* block
)
9059 stmtblock_t cleanup
;
9062 bool sym_has_alloc_comp
, has_finalizer
;
9064 sym_has_alloc_comp
= (sym
->ts
.type
== BT_DERIVED
9065 || sym
->ts
.type
== BT_CLASS
)
9066 && sym
->ts
.u
.derived
->attr
.alloc_comp
;
9067 has_finalizer
= sym
->ts
.type
== BT_CLASS
|| sym
->ts
.type
== BT_DERIVED
9068 ? gfc_is_finalizable (sym
->ts
.u
.derived
, NULL
) : false;
9070 /* Make sure the frontend gets these right. */
9071 gcc_assert (sym
->attr
.pointer
|| sym
->attr
.allocatable
|| sym_has_alloc_comp
9074 gfc_save_backend_locus (&loc
);
9075 gfc_set_backend_locus (&sym
->declared_at
);
9076 gfc_init_block (&init
);
9078 gcc_assert (TREE_CODE (sym
->backend_decl
) == VAR_DECL
9079 || TREE_CODE (sym
->backend_decl
) == PARM_DECL
);
9081 if (sym
->ts
.type
== BT_CHARACTER
9082 && !INTEGER_CST_P (sym
->ts
.u
.cl
->backend_decl
))
9084 gfc_conv_string_length (sym
->ts
.u
.cl
, NULL
, &init
);
9085 gfc_trans_vla_type_sizes (sym
, &init
);
9088 /* Dummy, use associated and result variables don't need anything special. */
9089 if (sym
->attr
.dummy
|| sym
->attr
.use_assoc
|| sym
->attr
.result
)
9091 gfc_add_init_cleanup (block
, gfc_finish_block (&init
), NULL_TREE
);
9092 gfc_restore_backend_locus (&loc
);
9096 descriptor
= sym
->backend_decl
;
9098 /* Although static, derived types with default initializers and
9099 allocatable components must not be nulled wholesale; instead they
9100 are treated component by component. */
9101 if (TREE_STATIC (descriptor
) && !sym_has_alloc_comp
&& !has_finalizer
)
9103 /* SAVEd variables are not freed on exit. */
9104 gfc_trans_static_array_pointer (sym
);
9106 gfc_add_init_cleanup (block
, gfc_finish_block (&init
), NULL_TREE
);
9107 gfc_restore_backend_locus (&loc
);
9111 /* Get the descriptor type. */
9112 type
= TREE_TYPE (sym
->backend_decl
);
9114 if ((sym_has_alloc_comp
|| (has_finalizer
&& sym
->ts
.type
!= BT_CLASS
))
9115 && !(sym
->attr
.pointer
|| sym
->attr
.allocatable
))
9118 && !(TREE_STATIC (sym
->backend_decl
) && sym
->attr
.is_main_program
))
9120 if (sym
->value
== NULL
9121 || !gfc_has_default_initializer (sym
->ts
.u
.derived
))
9123 rank
= sym
->as
? sym
->as
->rank
: 0;
9124 tmp
= gfc_nullify_alloc_comp (sym
->ts
.u
.derived
,
9126 gfc_add_expr_to_block (&init
, tmp
);
9129 gfc_init_default_dt (sym
, &init
, false);
9132 else if (!GFC_DESCRIPTOR_TYPE_P (type
))
9134 /* If the backend_decl is not a descriptor, we must have a pointer
9136 descriptor
= build_fold_indirect_ref_loc (input_location
,
9138 type
= TREE_TYPE (descriptor
);
9141 /* NULLIFY the data pointer, for non-saved allocatables. */
9142 if (GFC_DESCRIPTOR_TYPE_P (type
) && !sym
->attr
.save
&& sym
->attr
.allocatable
)
9143 gfc_conv_descriptor_data_set (&init
, descriptor
, null_pointer_node
);
9145 gfc_restore_backend_locus (&loc
);
9146 gfc_init_block (&cleanup
);
9148 /* Allocatable arrays need to be freed when they go out of scope.
9149 The allocatable components of pointers must not be touched. */
9150 if (!sym
->attr
.allocatable
&& has_finalizer
&& sym
->ts
.type
!= BT_CLASS
9151 && !sym
->attr
.pointer
&& !sym
->attr
.artificial
&& !sym
->attr
.save
9152 && !sym
->ns
->proc_name
->attr
.is_main_program
)
9155 sym
->attr
.referenced
= 1;
9156 e
= gfc_lval_expr_from_sym (sym
);
9157 gfc_add_finalizer_call (&cleanup
, e
);
9160 else if ((!sym
->attr
.allocatable
|| !has_finalizer
)
9161 && sym_has_alloc_comp
&& !(sym
->attr
.function
|| sym
->attr
.result
)
9162 && !sym
->attr
.pointer
&& !sym
->attr
.save
9163 && !sym
->ns
->proc_name
->attr
.is_main_program
)
9166 rank
= sym
->as
? sym
->as
->rank
: 0;
9167 tmp
= gfc_deallocate_alloc_comp (sym
->ts
.u
.derived
, descriptor
, rank
);
9168 gfc_add_expr_to_block (&cleanup
, tmp
);
9171 if (sym
->attr
.allocatable
&& (sym
->attr
.dimension
|| sym
->attr
.codimension
)
9172 && !sym
->attr
.save
&& !sym
->attr
.result
9173 && !sym
->ns
->proc_name
->attr
.is_main_program
)
9176 e
= has_finalizer
? gfc_lval_expr_from_sym (sym
) : NULL
;
9177 tmp
= gfc_trans_dealloc_allocated (sym
->backend_decl
,
9178 sym
->attr
.codimension
, e
);
9181 gfc_add_expr_to_block (&cleanup
, tmp
);
9184 gfc_add_init_cleanup (block
, gfc_finish_block (&init
),
9185 gfc_finish_block (&cleanup
));
9188 /************ Expression Walking Functions ******************/
9190 /* Walk a variable reference.
9192 Possible extension - multiple component subscripts.
9193 x(:,:) = foo%a(:)%b(:)
9195 forall (i=..., j=...)
9196 x(i,j) = foo%a(j)%b(i)
9198 This adds a fair amount of complexity because you need to deal with more
9199 than one ref. Maybe handle in a similar manner to vector subscripts.
9200 Maybe not worth the effort. */
9204 gfc_walk_variable_expr (gfc_ss
* ss
, gfc_expr
* expr
)
9208 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
9209 if (ref
->type
== REF_ARRAY
&& ref
->u
.ar
.type
!= AR_ELEMENT
)
9212 return gfc_walk_array_ref (ss
, expr
, ref
);
9217 gfc_walk_array_ref (gfc_ss
* ss
, gfc_expr
* expr
, gfc_ref
* ref
)
9223 for (; ref
; ref
= ref
->next
)
9225 if (ref
->type
== REF_SUBSTRING
)
9227 ss
= gfc_get_scalar_ss (ss
, ref
->u
.ss
.start
);
9228 ss
= gfc_get_scalar_ss (ss
, ref
->u
.ss
.end
);
9231 /* We're only interested in array sections from now on. */
9232 if (ref
->type
!= REF_ARRAY
)
9240 for (n
= ar
->dimen
- 1; n
>= 0; n
--)
9241 ss
= gfc_get_scalar_ss (ss
, ar
->start
[n
]);
9245 newss
= gfc_get_array_ss (ss
, expr
, ar
->as
->rank
, GFC_SS_SECTION
);
9246 newss
->info
->data
.array
.ref
= ref
;
9248 /* Make sure array is the same as array(:,:), this way
9249 we don't need to special case all the time. */
9250 ar
->dimen
= ar
->as
->rank
;
9251 for (n
= 0; n
< ar
->dimen
; n
++)
9253 ar
->dimen_type
[n
] = DIMEN_RANGE
;
9255 gcc_assert (ar
->start
[n
] == NULL
);
9256 gcc_assert (ar
->end
[n
] == NULL
);
9257 gcc_assert (ar
->stride
[n
] == NULL
);
9263 newss
= gfc_get_array_ss (ss
, expr
, 0, GFC_SS_SECTION
);
9264 newss
->info
->data
.array
.ref
= ref
;
9266 /* We add SS chains for all the subscripts in the section. */
9267 for (n
= 0; n
< ar
->dimen
; n
++)
9271 switch (ar
->dimen_type
[n
])
9274 /* Add SS for elemental (scalar) subscripts. */
9275 gcc_assert (ar
->start
[n
]);
9276 indexss
= gfc_get_scalar_ss (gfc_ss_terminator
, ar
->start
[n
]);
9277 indexss
->loop_chain
= gfc_ss_terminator
;
9278 newss
->info
->data
.array
.subscript
[n
] = indexss
;
9282 /* We don't add anything for sections, just remember this
9283 dimension for later. */
9284 newss
->dim
[newss
->dimen
] = n
;
9289 /* Create a GFC_SS_VECTOR index in which we can store
9290 the vector's descriptor. */
9291 indexss
= gfc_get_array_ss (gfc_ss_terminator
, ar
->start
[n
],
9293 indexss
->loop_chain
= gfc_ss_terminator
;
9294 newss
->info
->data
.array
.subscript
[n
] = indexss
;
9295 newss
->dim
[newss
->dimen
] = n
;
9300 /* We should know what sort of section it is by now. */
9304 /* We should have at least one non-elemental dimension,
9305 unless we are creating a descriptor for a (scalar) coarray. */
9306 gcc_assert (newss
->dimen
> 0
9307 || newss
->info
->data
.array
.ref
->u
.ar
.as
->corank
> 0);
9312 /* We should know what sort of section it is by now. */
9321 /* Walk an expression operator. If only one operand of a binary expression is
9322 scalar, we must also add the scalar term to the SS chain. */
9325 gfc_walk_op_expr (gfc_ss
* ss
, gfc_expr
* expr
)
9330 head
= gfc_walk_subexpr (ss
, expr
->value
.op
.op1
);
9331 if (expr
->value
.op
.op2
== NULL
)
9334 head2
= gfc_walk_subexpr (head
, expr
->value
.op
.op2
);
9336 /* All operands are scalar. Pass back and let the caller deal with it. */
9340 /* All operands require scalarization. */
9341 if (head
!= ss
&& (expr
->value
.op
.op2
== NULL
|| head2
!= head
))
9344 /* One of the operands needs scalarization, the other is scalar.
9345 Create a gfc_ss for the scalar expression. */
9348 /* First operand is scalar. We build the chain in reverse order, so
9349 add the scalar SS after the second operand. */
9351 while (head
&& head
->next
!= ss
)
9353 /* Check we haven't somehow broken the chain. */
9355 head
->next
= gfc_get_scalar_ss (ss
, expr
->value
.op
.op1
);
9357 else /* head2 == head */
9359 gcc_assert (head2
== head
);
9360 /* Second operand is scalar. */
9361 head2
= gfc_get_scalar_ss (head2
, expr
->value
.op
.op2
);
9368 /* Reverse a SS chain. */
9371 gfc_reverse_ss (gfc_ss
* ss
)
9376 gcc_assert (ss
!= NULL
);
9378 head
= gfc_ss_terminator
;
9379 while (ss
!= gfc_ss_terminator
)
9382 /* Check we didn't somehow break the chain. */
9383 gcc_assert (next
!= NULL
);
9393 /* Given an expression referring to a procedure, return the symbol of its
9394 interface. We can't get the procedure symbol directly as we have to handle
9395 the case of (deferred) type-bound procedures. */
9398 gfc_get_proc_ifc_for_expr (gfc_expr
*procedure_ref
)
9403 if (procedure_ref
== NULL
)
9406 /* Normal procedure case. */
9407 if (procedure_ref
->expr_type
== EXPR_FUNCTION
9408 && procedure_ref
->value
.function
.esym
)
9409 sym
= procedure_ref
->value
.function
.esym
;
9411 sym
= procedure_ref
->symtree
->n
.sym
;
9413 /* Typebound procedure case. */
9414 for (ref
= procedure_ref
->ref
; ref
; ref
= ref
->next
)
9416 if (ref
->type
== REF_COMPONENT
9417 && ref
->u
.c
.component
->attr
.proc_pointer
)
9418 sym
= ref
->u
.c
.component
->ts
.interface
;
9427 /* Walk the arguments of an elemental function.
9428 PROC_EXPR is used to check whether an argument is permitted to be absent. If
9429 it is NULL, we don't do the check and the argument is assumed to be present.
9433 gfc_walk_elemental_function_args (gfc_ss
* ss
, gfc_actual_arglist
*arg
,
9434 gfc_symbol
*proc_ifc
, gfc_ss_type type
)
9436 gfc_formal_arglist
*dummy_arg
;
9442 head
= gfc_ss_terminator
;
9446 dummy_arg
= gfc_sym_get_dummy_args (proc_ifc
);
9451 for (; arg
; arg
= arg
->next
)
9453 if (!arg
->expr
|| arg
->expr
->expr_type
== EXPR_NULL
)
9456 newss
= gfc_walk_subexpr (head
, arg
->expr
);
9459 /* Scalar argument. */
9460 gcc_assert (type
== GFC_SS_SCALAR
|| type
== GFC_SS_REFERENCE
);
9461 newss
= gfc_get_scalar_ss (head
, arg
->expr
);
9462 newss
->info
->type
= type
;
9464 newss
->info
->data
.scalar
.dummy_arg
= dummy_arg
->sym
;
9469 if (dummy_arg
!= NULL
9470 && dummy_arg
->sym
->attr
.optional
9471 && arg
->expr
->expr_type
== EXPR_VARIABLE
9472 && (gfc_expr_attr (arg
->expr
).optional
9473 || gfc_expr_attr (arg
->expr
).allocatable
9474 || gfc_expr_attr (arg
->expr
).pointer
))
9475 newss
->info
->can_be_null_ref
= true;
9481 while (tail
->next
!= gfc_ss_terminator
)
9486 if (dummy_arg
!= NULL
)
9487 dummy_arg
= dummy_arg
->next
;
9492 /* If all the arguments are scalar we don't need the argument SS. */
9493 gfc_free_ss_chain (head
);
9498 /* Add it onto the existing chain. */
9504 /* Walk a function call. Scalar functions are passed back, and taken out of
9505 scalarization loops. For elemental functions we walk their arguments.
9506 The result of functions returning arrays is stored in a temporary outside
9507 the loop, so that the function is only called once. Hence we do not need
9508 to walk their arguments. */
9511 gfc_walk_function_expr (gfc_ss
* ss
, gfc_expr
* expr
)
9513 gfc_intrinsic_sym
*isym
;
9515 gfc_component
*comp
= NULL
;
9517 isym
= expr
->value
.function
.isym
;
9519 /* Handle intrinsic functions separately. */
9521 return gfc_walk_intrinsic_function (ss
, expr
, isym
);
9523 sym
= expr
->value
.function
.esym
;
9525 sym
= expr
->symtree
->n
.sym
;
9527 if (gfc_is_alloc_class_array_function (expr
))
9528 return gfc_get_array_ss (ss
, expr
,
9529 CLASS_DATA (expr
->value
.function
.esym
->result
)->as
->rank
,
9532 /* A function that returns arrays. */
9533 comp
= gfc_get_proc_ptr_comp (expr
);
9534 if ((!comp
&& gfc_return_by_reference (sym
) && sym
->result
->attr
.dimension
)
9535 || (comp
&& comp
->attr
.dimension
))
9536 return gfc_get_array_ss (ss
, expr
, expr
->rank
, GFC_SS_FUNCTION
);
9538 /* Walk the parameters of an elemental function. For now we always pass
9540 if (sym
->attr
.elemental
|| (comp
&& comp
->attr
.elemental
))
9542 gfc_ss
*old_ss
= ss
;
9544 ss
= gfc_walk_elemental_function_args (old_ss
,
9545 expr
->value
.function
.actual
,
9546 gfc_get_proc_ifc_for_expr (expr
),
9550 || sym
->attr
.proc_pointer
9551 || sym
->attr
.if_source
!= IFSRC_DECL
9552 || sym
->attr
.array_outer_dependency
))
9553 ss
->info
->array_outer_dependency
= 1;
9556 /* Scalar functions are OK as these are evaluated outside the scalarization
9557 loop. Pass back and let the caller deal with it. */
9562 /* An array temporary is constructed for array constructors. */
9565 gfc_walk_array_constructor (gfc_ss
* ss
, gfc_expr
* expr
)
9567 return gfc_get_array_ss (ss
, expr
, expr
->rank
, GFC_SS_CONSTRUCTOR
);
9571 /* Walk an expression. Add walked expressions to the head of the SS chain.
9572 A wholly scalar expression will not be added. */
9575 gfc_walk_subexpr (gfc_ss
* ss
, gfc_expr
* expr
)
9579 switch (expr
->expr_type
)
9582 head
= gfc_walk_variable_expr (ss
, expr
);
9586 head
= gfc_walk_op_expr (ss
, expr
);
9590 head
= gfc_walk_function_expr (ss
, expr
);
9595 case EXPR_STRUCTURE
:
9596 /* Pass back and let the caller deal with it. */
9600 head
= gfc_walk_array_constructor (ss
, expr
);
9603 case EXPR_SUBSTRING
:
9604 /* Pass back and let the caller deal with it. */
9608 gfc_internal_error ("bad expression type during walk (%d)",
9615 /* Entry point for expression walking.
9616 A return value equal to the passed chain means this is
9617 a scalar expression. It is up to the caller to take whatever action is
9618 necessary to translate these. */
9621 gfc_walk_expr (gfc_expr
* expr
)
9625 res
= gfc_walk_subexpr (gfc_ss_terminator
, expr
);
9626 return gfc_reverse_ss (res
);