2015-09-25 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / fortran / trans-array.c
bloba6b761baba80d8292565659ebea75ca63e4a4e86
1 /* Array translation routines
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* trans-array.c-- Various array related code, including scalarization,
23 allocation, initialization and other support routines. */
25 /* How the scalarizer works.
26 In gfortran, array expressions use the same core routines as scalar
27 expressions.
28 First, a Scalarization State (SS) chain is built. This is done by walking
29 the expression tree, and building a linear list of the terms in the
30 expression. As the tree is walked, scalar subexpressions are translated.
32 The scalarization parameters are stored in a gfc_loopinfo structure.
33 First the start and stride of each term is calculated by
34 gfc_conv_ss_startstride. During this process the expressions for the array
35 descriptors and data pointers are also translated.
37 If the expression is an assignment, we must then resolve any dependencies.
38 In Fortran all the rhs values of an assignment must be evaluated before
39 any assignments take place. This can require a temporary array to store the
40 values. We also require a temporary when we are passing array expressions
41 or vector subscripts as procedure parameters.
43 Array sections are passed without copying to a temporary. These use the
44 scalarizer to determine the shape of the section. The flag
45 loop->array_parameter tells the scalarizer that the actual values and loop
46 variables will not be required.
48 The function gfc_conv_loop_setup generates the scalarization setup code.
49 It determines the range of the scalarizing loop variables. If a temporary
50 is required, this is created and initialized. Code for scalar expressions
51 taken outside the loop is also generated at this time. Next the offset and
52 scaling required to translate from loop variables to array indices for each
53 term is calculated.
55 A call to gfc_start_scalarized_body marks the start of the scalarized
56 expression. This creates a scope and declares the loop variables. Before
57 calling this gfc_make_ss_chain_used must be used to indicate which terms
58 will be used inside this loop.
60 The scalar gfc_conv_* functions are then used to build the main body of the
61 scalarization loop. Scalarization loop variables and precalculated scalar
62 values are automatically substituted. Note that gfc_advance_se_ss_chain
63 must be used, rather than changing the se->ss directly.
65 For assignment expressions requiring a temporary two sub loops are
66 generated. The first stores the result of the expression in the temporary,
67 the second copies it to the result. A call to
68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
69 the start of the copying loop. The temporary may be less than full rank.
71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
72 loops. The loops are added to the pre chain of the loopinfo. The post
73 chain may still contain cleanup code.
75 After the loop code has been added into its parent scope gfc_cleanup_loop
76 is called to free all the SS allocated by the scalarizer. */
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "gfortran.h"
82 #include "alias.h"
83 #include "tree.h"
84 #include "options.h"
85 #include "fold-const.h"
86 #include "gimple-expr.h"
87 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
88 #include "flags.h"
89 #include "constructor.h"
90 #include "trans.h"
91 #include "trans-stmt.h"
92 #include "trans-types.h"
93 #include "trans-array.h"
94 #include "trans-const.h"
95 #include "dependency.h"
97 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
99 /* The contents of this structure aren't actually used, just the address. */
100 static gfc_ss gfc_ss_terminator_var;
101 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
104 static tree
105 gfc_array_dataptr_type (tree desc)
107 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
111 /* Build expressions to access the members of an array descriptor.
112 It's surprisingly easy to mess up here, so never access
113 an array descriptor by "brute force", always use these
114 functions. This also avoids problems if we change the format
115 of an array descriptor.
117 To understand these magic numbers, look at the comments
118 before gfc_build_array_type() in trans-types.c.
120 The code within these defines should be the only code which knows the format
121 of an array descriptor.
123 Any code just needing to read obtain the bounds of an array should use
124 gfc_conv_array_* rather than the following functions as these will return
125 know constant values, and work with arrays which do not have descriptors.
127 Don't forget to #undef these! */
129 #define DATA_FIELD 0
130 #define OFFSET_FIELD 1
131 #define DTYPE_FIELD 2
132 #define DIMENSION_FIELD 3
133 #define CAF_TOKEN_FIELD 4
135 #define STRIDE_SUBFIELD 0
136 #define LBOUND_SUBFIELD 1
137 #define UBOUND_SUBFIELD 2
139 /* This provides READ-ONLY access to the data field. The field itself
140 doesn't have the proper type. */
142 tree
143 gfc_conv_descriptor_data_get (tree desc)
145 tree field, type, t;
147 type = TREE_TYPE (desc);
148 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
150 field = TYPE_FIELDS (type);
151 gcc_assert (DATA_FIELD == 0);
153 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
154 field, NULL_TREE);
155 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
157 return t;
160 /* This provides WRITE access to the data field.
162 TUPLES_P is true if we are generating tuples.
164 This function gets called through the following macros:
165 gfc_conv_descriptor_data_set
166 gfc_conv_descriptor_data_set. */
168 void
169 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
171 tree field, type, t;
173 type = TREE_TYPE (desc);
174 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
176 field = TYPE_FIELDS (type);
177 gcc_assert (DATA_FIELD == 0);
179 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
180 field, NULL_TREE);
181 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
185 /* This provides address access to the data field. This should only be
186 used by array allocation, passing this on to the runtime. */
188 tree
189 gfc_conv_descriptor_data_addr (tree desc)
191 tree field, type, t;
193 type = TREE_TYPE (desc);
194 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
196 field = TYPE_FIELDS (type);
197 gcc_assert (DATA_FIELD == 0);
199 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
200 field, NULL_TREE);
201 return gfc_build_addr_expr (NULL_TREE, t);
204 static tree
205 gfc_conv_descriptor_offset (tree desc)
207 tree type;
208 tree field;
210 type = TREE_TYPE (desc);
211 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
213 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
214 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
216 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
217 desc, field, NULL_TREE);
220 tree
221 gfc_conv_descriptor_offset_get (tree desc)
223 return gfc_conv_descriptor_offset (desc);
226 void
227 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
228 tree value)
230 tree t = gfc_conv_descriptor_offset (desc);
231 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
235 tree
236 gfc_conv_descriptor_dtype (tree desc)
238 tree field;
239 tree type;
241 type = TREE_TYPE (desc);
242 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
244 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
245 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
247 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
248 desc, field, NULL_TREE);
252 tree
253 gfc_conv_descriptor_rank (tree desc)
255 tree tmp;
256 tree dtype;
258 dtype = gfc_conv_descriptor_dtype (desc);
259 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
260 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
261 dtype, tmp);
262 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
266 tree
267 gfc_get_descriptor_dimension (tree desc)
269 tree type, field;
271 type = TREE_TYPE (desc);
272 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
274 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
275 gcc_assert (field != NULL_TREE
276 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
277 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
279 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
280 desc, field, NULL_TREE);
284 static tree
285 gfc_conv_descriptor_dimension (tree desc, tree dim)
287 tree tmp;
289 tmp = gfc_get_descriptor_dimension (desc);
291 return gfc_build_array_ref (tmp, dim, NULL);
295 tree
296 gfc_conv_descriptor_token (tree desc)
298 tree type;
299 tree field;
301 type = TREE_TYPE (desc);
302 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
303 gcc_assert (flag_coarray == GFC_FCOARRAY_LIB);
304 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
306 /* Should be a restricted pointer - except in the finalization wrapper. */
307 gcc_assert (field != NULL_TREE
308 && (TREE_TYPE (field) == prvoid_type_node
309 || TREE_TYPE (field) == pvoid_type_node));
311 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
312 desc, field, NULL_TREE);
316 static tree
317 gfc_conv_descriptor_stride (tree desc, tree dim)
319 tree tmp;
320 tree field;
322 tmp = gfc_conv_descriptor_dimension (desc, dim);
323 field = TYPE_FIELDS (TREE_TYPE (tmp));
324 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
325 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
327 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
328 tmp, field, NULL_TREE);
329 return tmp;
332 tree
333 gfc_conv_descriptor_stride_get (tree desc, tree dim)
335 tree type = TREE_TYPE (desc);
336 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
337 if (integer_zerop (dim)
338 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
339 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
340 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
341 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
342 return gfc_index_one_node;
344 return gfc_conv_descriptor_stride (desc, dim);
347 void
348 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
349 tree dim, tree value)
351 tree t = gfc_conv_descriptor_stride (desc, dim);
352 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
355 static tree
356 gfc_conv_descriptor_lbound (tree desc, tree dim)
358 tree tmp;
359 tree field;
361 tmp = gfc_conv_descriptor_dimension (desc, dim);
362 field = TYPE_FIELDS (TREE_TYPE (tmp));
363 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
364 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
366 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
367 tmp, field, NULL_TREE);
368 return tmp;
371 tree
372 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
374 return gfc_conv_descriptor_lbound (desc, dim);
377 void
378 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
379 tree dim, tree value)
381 tree t = gfc_conv_descriptor_lbound (desc, dim);
382 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
385 static tree
386 gfc_conv_descriptor_ubound (tree desc, tree dim)
388 tree tmp;
389 tree field;
391 tmp = gfc_conv_descriptor_dimension (desc, dim);
392 field = TYPE_FIELDS (TREE_TYPE (tmp));
393 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
394 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
396 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
397 tmp, field, NULL_TREE);
398 return tmp;
401 tree
402 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
404 return gfc_conv_descriptor_ubound (desc, dim);
407 void
408 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
409 tree dim, tree value)
411 tree t = gfc_conv_descriptor_ubound (desc, dim);
412 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
415 /* Build a null array descriptor constructor. */
417 tree
418 gfc_build_null_descriptor (tree type)
420 tree field;
421 tree tmp;
423 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
424 gcc_assert (DATA_FIELD == 0);
425 field = TYPE_FIELDS (type);
427 /* Set a NULL data pointer. */
428 tmp = build_constructor_single (type, field, null_pointer_node);
429 TREE_CONSTANT (tmp) = 1;
430 /* All other fields are ignored. */
432 return tmp;
436 /* Modify a descriptor such that the lbound of a given dimension is the value
437 specified. This also updates ubound and offset accordingly. */
439 void
440 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
441 int dim, tree new_lbound)
443 tree offs, ubound, lbound, stride;
444 tree diff, offs_diff;
446 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
448 offs = gfc_conv_descriptor_offset_get (desc);
449 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
450 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
451 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
453 /* Get difference (new - old) by which to shift stuff. */
454 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
455 new_lbound, lbound);
457 /* Shift ubound and offset accordingly. This has to be done before
458 updating the lbound, as they depend on the lbound expression! */
459 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
460 ubound, diff);
461 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
462 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
463 diff, stride);
464 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
465 offs, offs_diff);
466 gfc_conv_descriptor_offset_set (block, desc, offs);
468 /* Finally set lbound to value we want. */
469 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
473 /* Cleanup those #defines. */
475 #undef DATA_FIELD
476 #undef OFFSET_FIELD
477 #undef DTYPE_FIELD
478 #undef DIMENSION_FIELD
479 #undef CAF_TOKEN_FIELD
480 #undef STRIDE_SUBFIELD
481 #undef LBOUND_SUBFIELD
482 #undef UBOUND_SUBFIELD
485 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
486 flags & 1 = Main loop body.
487 flags & 2 = temp copy loop. */
489 void
490 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
492 for (; ss != gfc_ss_terminator; ss = ss->next)
493 ss->info->useflags = flags;
497 /* Free a gfc_ss chain. */
499 void
500 gfc_free_ss_chain (gfc_ss * ss)
502 gfc_ss *next;
504 while (ss != gfc_ss_terminator)
506 gcc_assert (ss != NULL);
507 next = ss->next;
508 gfc_free_ss (ss);
509 ss = next;
514 static void
515 free_ss_info (gfc_ss_info *ss_info)
517 int n;
519 ss_info->refcount--;
520 if (ss_info->refcount > 0)
521 return;
523 gcc_assert (ss_info->refcount == 0);
525 switch (ss_info->type)
527 case GFC_SS_SECTION:
528 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
529 if (ss_info->data.array.subscript[n])
530 gfc_free_ss_chain (ss_info->data.array.subscript[n]);
531 break;
533 default:
534 break;
537 free (ss_info);
541 /* Free a SS. */
543 void
544 gfc_free_ss (gfc_ss * ss)
546 free_ss_info (ss->info);
547 free (ss);
551 /* Creates and initializes an array type gfc_ss struct. */
553 gfc_ss *
554 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
556 gfc_ss *ss;
557 gfc_ss_info *ss_info;
558 int i;
560 ss_info = gfc_get_ss_info ();
561 ss_info->refcount++;
562 ss_info->type = type;
563 ss_info->expr = expr;
565 ss = gfc_get_ss ();
566 ss->info = ss_info;
567 ss->next = next;
568 ss->dimen = dimen;
569 for (i = 0; i < ss->dimen; i++)
570 ss->dim[i] = i;
572 return ss;
576 /* Creates and initializes a temporary type gfc_ss struct. */
578 gfc_ss *
579 gfc_get_temp_ss (tree type, tree string_length, int dimen)
581 gfc_ss *ss;
582 gfc_ss_info *ss_info;
583 int i;
585 ss_info = gfc_get_ss_info ();
586 ss_info->refcount++;
587 ss_info->type = GFC_SS_TEMP;
588 ss_info->string_length = string_length;
589 ss_info->data.temp.type = type;
591 ss = gfc_get_ss ();
592 ss->info = ss_info;
593 ss->next = gfc_ss_terminator;
594 ss->dimen = dimen;
595 for (i = 0; i < ss->dimen; i++)
596 ss->dim[i] = i;
598 return ss;
602 /* Creates and initializes a scalar type gfc_ss struct. */
604 gfc_ss *
605 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
607 gfc_ss *ss;
608 gfc_ss_info *ss_info;
610 ss_info = gfc_get_ss_info ();
611 ss_info->refcount++;
612 ss_info->type = GFC_SS_SCALAR;
613 ss_info->expr = expr;
615 ss = gfc_get_ss ();
616 ss->info = ss_info;
617 ss->next = next;
619 return ss;
623 /* Free all the SS associated with a loop. */
625 void
626 gfc_cleanup_loop (gfc_loopinfo * loop)
628 gfc_loopinfo *loop_next, **ploop;
629 gfc_ss *ss;
630 gfc_ss *next;
632 ss = loop->ss;
633 while (ss != gfc_ss_terminator)
635 gcc_assert (ss != NULL);
636 next = ss->loop_chain;
637 gfc_free_ss (ss);
638 ss = next;
641 /* Remove reference to self in the parent loop. */
642 if (loop->parent)
643 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
644 if (*ploop == loop)
646 *ploop = loop->next;
647 break;
650 /* Free non-freed nested loops. */
651 for (loop = loop->nested; loop; loop = loop_next)
653 loop_next = loop->next;
654 gfc_cleanup_loop (loop);
655 free (loop);
660 static void
661 set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
663 int n;
665 for (; ss != gfc_ss_terminator; ss = ss->next)
667 ss->loop = loop;
669 if (ss->info->type == GFC_SS_SCALAR
670 || ss->info->type == GFC_SS_REFERENCE
671 || ss->info->type == GFC_SS_TEMP)
672 continue;
674 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
675 if (ss->info->data.array.subscript[n] != NULL)
676 set_ss_loop (ss->info->data.array.subscript[n], loop);
681 /* Associate a SS chain with a loop. */
683 void
684 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
686 gfc_ss *ss;
687 gfc_loopinfo *nested_loop;
689 if (head == gfc_ss_terminator)
690 return;
692 set_ss_loop (head, loop);
694 ss = head;
695 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
697 if (ss->nested_ss)
699 nested_loop = ss->nested_ss->loop;
701 /* More than one ss can belong to the same loop. Hence, we add the
702 loop to the chain only if it is different from the previously
703 added one, to avoid duplicate nested loops. */
704 if (nested_loop != loop->nested)
706 gcc_assert (nested_loop->parent == NULL);
707 nested_loop->parent = loop;
709 gcc_assert (nested_loop->next == NULL);
710 nested_loop->next = loop->nested;
711 loop->nested = nested_loop;
713 else
714 gcc_assert (nested_loop->parent == loop);
717 if (ss->next == gfc_ss_terminator)
718 ss->loop_chain = loop->ss;
719 else
720 ss->loop_chain = ss->next;
722 gcc_assert (ss == gfc_ss_terminator);
723 loop->ss = head;
727 /* Generate an initializer for a static pointer or allocatable array. */
729 void
730 gfc_trans_static_array_pointer (gfc_symbol * sym)
732 tree type;
734 gcc_assert (TREE_STATIC (sym->backend_decl));
735 /* Just zero the data member. */
736 type = TREE_TYPE (sym->backend_decl);
737 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
741 /* If the bounds of SE's loop have not yet been set, see if they can be
742 determined from array spec AS, which is the array spec of a called
743 function. MAPPING maps the callee's dummy arguments to the values
744 that the caller is passing. Add any initialization and finalization
745 code to SE. */
747 void
748 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
749 gfc_se * se, gfc_array_spec * as)
751 int n, dim, total_dim;
752 gfc_se tmpse;
753 gfc_ss *ss;
754 tree lower;
755 tree upper;
756 tree tmp;
758 total_dim = 0;
760 if (!as || as->type != AS_EXPLICIT)
761 return;
763 for (ss = se->ss; ss; ss = ss->parent)
765 total_dim += ss->loop->dimen;
766 for (n = 0; n < ss->loop->dimen; n++)
768 /* The bound is known, nothing to do. */
769 if (ss->loop->to[n] != NULL_TREE)
770 continue;
772 dim = ss->dim[n];
773 gcc_assert (dim < as->rank);
774 gcc_assert (ss->loop->dimen <= as->rank);
776 /* Evaluate the lower bound. */
777 gfc_init_se (&tmpse, NULL);
778 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
779 gfc_add_block_to_block (&se->pre, &tmpse.pre);
780 gfc_add_block_to_block (&se->post, &tmpse.post);
781 lower = fold_convert (gfc_array_index_type, tmpse.expr);
783 /* ...and the upper bound. */
784 gfc_init_se (&tmpse, NULL);
785 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
786 gfc_add_block_to_block (&se->pre, &tmpse.pre);
787 gfc_add_block_to_block (&se->post, &tmpse.post);
788 upper = fold_convert (gfc_array_index_type, tmpse.expr);
790 /* Set the upper bound of the loop to UPPER - LOWER. */
791 tmp = fold_build2_loc (input_location, MINUS_EXPR,
792 gfc_array_index_type, upper, lower);
793 tmp = gfc_evaluate_now (tmp, &se->pre);
794 ss->loop->to[n] = tmp;
798 gcc_assert (total_dim == as->rank);
802 /* Generate code to allocate an array temporary, or create a variable to
803 hold the data. If size is NULL, zero the descriptor so that the
804 callee will allocate the array. If DEALLOC is true, also generate code to
805 free the array afterwards.
807 If INITIAL is not NULL, it is packed using internal_pack and the result used
808 as data instead of allocating a fresh, unitialized area of memory.
810 Initialization code is added to PRE and finalization code to POST.
811 DYNAMIC is true if the caller may want to extend the array later
812 using realloc. This prevents us from putting the array on the stack. */
814 static void
815 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
816 gfc_array_info * info, tree size, tree nelem,
817 tree initial, bool dynamic, bool dealloc)
819 tree tmp;
820 tree desc;
821 bool onstack;
823 desc = info->descriptor;
824 info->offset = gfc_index_zero_node;
825 if (size == NULL_TREE || integer_zerop (size))
827 /* A callee allocated array. */
828 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
829 onstack = FALSE;
831 else
833 /* Allocate the temporary. */
834 onstack = !dynamic && initial == NULL_TREE
835 && (flag_stack_arrays
836 || gfc_can_put_var_on_stack (size));
838 if (onstack)
840 /* Make a temporary variable to hold the data. */
841 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
842 nelem, gfc_index_one_node);
843 tmp = gfc_evaluate_now (tmp, pre);
844 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
845 tmp);
846 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
847 tmp);
848 tmp = gfc_create_var (tmp, "A");
849 /* If we're here only because of -fstack-arrays we have to
850 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
851 if (!gfc_can_put_var_on_stack (size))
852 gfc_add_expr_to_block (pre,
853 fold_build1_loc (input_location,
854 DECL_EXPR, TREE_TYPE (tmp),
855 tmp));
856 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
857 gfc_conv_descriptor_data_set (pre, desc, tmp);
859 else
861 /* Allocate memory to hold the data or call internal_pack. */
862 if (initial == NULL_TREE)
864 tmp = gfc_call_malloc (pre, NULL, size);
865 tmp = gfc_evaluate_now (tmp, pre);
867 else
869 tree packed;
870 tree source_data;
871 tree was_packed;
872 stmtblock_t do_copying;
874 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
875 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
876 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
877 tmp = gfc_get_element_type (tmp);
878 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
879 packed = gfc_create_var (build_pointer_type (tmp), "data");
881 tmp = build_call_expr_loc (input_location,
882 gfor_fndecl_in_pack, 1, initial);
883 tmp = fold_convert (TREE_TYPE (packed), tmp);
884 gfc_add_modify (pre, packed, tmp);
886 tmp = build_fold_indirect_ref_loc (input_location,
887 initial);
888 source_data = gfc_conv_descriptor_data_get (tmp);
890 /* internal_pack may return source->data without any allocation
891 or copying if it is already packed. If that's the case, we
892 need to allocate and copy manually. */
894 gfc_start_block (&do_copying);
895 tmp = gfc_call_malloc (&do_copying, NULL, size);
896 tmp = fold_convert (TREE_TYPE (packed), tmp);
897 gfc_add_modify (&do_copying, packed, tmp);
898 tmp = gfc_build_memcpy_call (packed, source_data, size);
899 gfc_add_expr_to_block (&do_copying, tmp);
901 was_packed = fold_build2_loc (input_location, EQ_EXPR,
902 boolean_type_node, packed,
903 source_data);
904 tmp = gfc_finish_block (&do_copying);
905 tmp = build3_v (COND_EXPR, was_packed, tmp,
906 build_empty_stmt (input_location));
907 gfc_add_expr_to_block (pre, tmp);
909 tmp = fold_convert (pvoid_type_node, packed);
912 gfc_conv_descriptor_data_set (pre, desc, tmp);
915 info->data = gfc_conv_descriptor_data_get (desc);
917 /* The offset is zero because we create temporaries with a zero
918 lower bound. */
919 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
921 if (dealloc && !onstack)
923 /* Free the temporary. */
924 tmp = gfc_conv_descriptor_data_get (desc);
925 tmp = gfc_call_free (tmp);
926 gfc_add_expr_to_block (post, tmp);
931 /* Get the scalarizer array dimension corresponding to actual array dimension
932 given by ARRAY_DIM.
934 For example, if SS represents the array ref a(1,:,:,1), it is a
935 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
936 and 1 for ARRAY_DIM=2.
937 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
938 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
939 ARRAY_DIM=3.
940 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
941 array. If called on the inner ss, the result would be respectively 0,1,2 for
942 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
943 for ARRAY_DIM=1,2. */
945 static int
946 get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
948 int array_ref_dim;
949 int n;
951 array_ref_dim = 0;
953 for (; ss; ss = ss->parent)
954 for (n = 0; n < ss->dimen; n++)
955 if (ss->dim[n] < array_dim)
956 array_ref_dim++;
958 return array_ref_dim;
962 static gfc_ss *
963 innermost_ss (gfc_ss *ss)
965 while (ss->nested_ss != NULL)
966 ss = ss->nested_ss;
968 return ss;
973 /* Get the array reference dimension corresponding to the given loop dimension.
974 It is different from the true array dimension given by the dim array in
975 the case of a partial array reference (i.e. a(:,:,1,:) for example)
976 It is different from the loop dimension in the case of a transposed array.
979 static int
980 get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
982 return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
983 ss->dim[loop_dim]);
987 /* Generate code to create and initialize the descriptor for a temporary
988 array. This is used for both temporaries needed by the scalarizer, and
989 functions returning arrays. Adjusts the loop variables to be
990 zero-based, and calculates the loop bounds for callee allocated arrays.
991 Allocate the array unless it's callee allocated (we have a callee
992 allocated array if 'callee_alloc' is true, or if loop->to[n] is
993 NULL_TREE for any n). Also fills in the descriptor, data and offset
994 fields of info if known. Returns the size of the array, or NULL for a
995 callee allocated array.
997 'eltype' == NULL signals that the temporary should be a class object.
998 The 'initial' expression is used to obtain the size of the dynamic
999 type; otherwise the allocation and initialization proceeds as for any
1000 other expression
1002 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
1003 gfc_trans_allocate_array_storage. */
1005 tree
1006 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
1007 tree eltype, tree initial, bool dynamic,
1008 bool dealloc, bool callee_alloc, locus * where)
1010 gfc_loopinfo *loop;
1011 gfc_ss *s;
1012 gfc_array_info *info;
1013 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
1014 tree type;
1015 tree desc;
1016 tree tmp;
1017 tree size;
1018 tree nelem;
1019 tree cond;
1020 tree or_expr;
1021 tree class_expr = NULL_TREE;
1022 int n, dim, tmp_dim;
1023 int total_dim = 0;
1025 /* This signals a class array for which we need the size of the
1026 dynamic type. Generate an eltype and then the class expression. */
1027 if (eltype == NULL_TREE && initial)
1029 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial)));
1030 class_expr = build_fold_indirect_ref_loc (input_location, initial);
1031 eltype = TREE_TYPE (class_expr);
1032 eltype = gfc_get_element_type (eltype);
1033 /* Obtain the structure (class) expression. */
1034 class_expr = TREE_OPERAND (class_expr, 0);
1035 gcc_assert (class_expr);
1038 memset (from, 0, sizeof (from));
1039 memset (to, 0, sizeof (to));
1041 info = &ss->info->data.array;
1043 gcc_assert (ss->dimen > 0);
1044 gcc_assert (ss->loop->dimen == ss->dimen);
1046 if (warn_array_temporaries && where)
1047 gfc_warning (OPT_Warray_temporaries,
1048 "Creating array temporary at %L", where);
1050 /* Set the lower bound to zero. */
1051 for (s = ss; s; s = s->parent)
1053 loop = s->loop;
1055 total_dim += loop->dimen;
1056 for (n = 0; n < loop->dimen; n++)
1058 dim = s->dim[n];
1060 /* Callee allocated arrays may not have a known bound yet. */
1061 if (loop->to[n])
1062 loop->to[n] = gfc_evaluate_now (
1063 fold_build2_loc (input_location, MINUS_EXPR,
1064 gfc_array_index_type,
1065 loop->to[n], loop->from[n]),
1066 pre);
1067 loop->from[n] = gfc_index_zero_node;
1069 /* We have just changed the loop bounds, we must clear the
1070 corresponding specloop, so that delta calculation is not skipped
1071 later in gfc_set_delta. */
1072 loop->specloop[n] = NULL;
1074 /* We are constructing the temporary's descriptor based on the loop
1075 dimensions. As the dimensions may be accessed in arbitrary order
1076 (think of transpose) the size taken from the n'th loop may not map
1077 to the n'th dimension of the array. We need to reconstruct loop
1078 infos in the right order before using it to set the descriptor
1079 bounds. */
1080 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
1081 from[tmp_dim] = loop->from[n];
1082 to[tmp_dim] = loop->to[n];
1084 info->delta[dim] = gfc_index_zero_node;
1085 info->start[dim] = gfc_index_zero_node;
1086 info->end[dim] = gfc_index_zero_node;
1087 info->stride[dim] = gfc_index_one_node;
1091 /* Initialize the descriptor. */
1092 type =
1093 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
1094 GFC_ARRAY_UNKNOWN, true);
1095 desc = gfc_create_var (type, "atmp");
1096 GFC_DECL_PACKED_ARRAY (desc) = 1;
1098 info->descriptor = desc;
1099 size = gfc_index_one_node;
1101 /* Fill in the array dtype. */
1102 tmp = gfc_conv_descriptor_dtype (desc);
1103 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
1106 Fill in the bounds and stride. This is a packed array, so:
1108 size = 1;
1109 for (n = 0; n < rank; n++)
1111 stride[n] = size
1112 delta = ubound[n] + 1 - lbound[n];
1113 size = size * delta;
1115 size = size * sizeof(element);
1118 or_expr = NULL_TREE;
1120 /* If there is at least one null loop->to[n], it is a callee allocated
1121 array. */
1122 for (n = 0; n < total_dim; n++)
1123 if (to[n] == NULL_TREE)
1125 size = NULL_TREE;
1126 break;
1129 if (size == NULL_TREE)
1130 for (s = ss; s; s = s->parent)
1131 for (n = 0; n < s->loop->dimen; n++)
1133 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
1135 /* For a callee allocated array express the loop bounds in terms
1136 of the descriptor fields. */
1137 tmp = fold_build2_loc (input_location,
1138 MINUS_EXPR, gfc_array_index_type,
1139 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
1140 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
1141 s->loop->to[n] = tmp;
1143 else
1145 for (n = 0; n < total_dim; n++)
1147 /* Store the stride and bound components in the descriptor. */
1148 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
1150 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
1151 gfc_index_zero_node);
1153 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
1155 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1156 gfc_array_index_type,
1157 to[n], gfc_index_one_node);
1159 /* Check whether the size for this dimension is negative. */
1160 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1161 tmp, gfc_index_zero_node);
1162 cond = gfc_evaluate_now (cond, pre);
1164 if (n == 0)
1165 or_expr = cond;
1166 else
1167 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1168 boolean_type_node, or_expr, cond);
1170 size = fold_build2_loc (input_location, MULT_EXPR,
1171 gfc_array_index_type, size, tmp);
1172 size = gfc_evaluate_now (size, pre);
1176 /* Get the size of the array. */
1177 if (size && !callee_alloc)
1179 tree elemsize;
1180 /* If or_expr is true, then the extent in at least one
1181 dimension is zero and the size is set to zero. */
1182 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1183 or_expr, gfc_index_zero_node, size);
1185 nelem = size;
1186 if (class_expr == NULL_TREE)
1187 elemsize = fold_convert (gfc_array_index_type,
1188 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
1189 else
1190 elemsize = gfc_class_vtab_size_get (class_expr);
1192 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1193 size, elemsize);
1195 else
1197 nelem = size;
1198 size = NULL_TREE;
1201 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1202 dynamic, dealloc);
1204 while (ss->parent)
1205 ss = ss->parent;
1207 if (ss->dimen > ss->loop->temp_dim)
1208 ss->loop->temp_dim = ss->dimen;
1210 return size;
1214 /* Return the number of iterations in a loop that starts at START,
1215 ends at END, and has step STEP. */
1217 static tree
1218 gfc_get_iteration_count (tree start, tree end, tree step)
1220 tree tmp;
1221 tree type;
1223 type = TREE_TYPE (step);
1224 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1225 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1226 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1227 build_int_cst (type, 1));
1228 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1229 build_int_cst (type, 0));
1230 return fold_convert (gfc_array_index_type, tmp);
1234 /* Extend the data in array DESC by EXTRA elements. */
1236 static void
1237 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1239 tree arg0, arg1;
1240 tree tmp;
1241 tree size;
1242 tree ubound;
1244 if (integer_zerop (extra))
1245 return;
1247 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1249 /* Add EXTRA to the upper bound. */
1250 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1251 ubound, extra);
1252 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1254 /* Get the value of the current data pointer. */
1255 arg0 = gfc_conv_descriptor_data_get (desc);
1257 /* Calculate the new array size. */
1258 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1259 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1260 ubound, gfc_index_one_node);
1261 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1262 fold_convert (size_type_node, tmp),
1263 fold_convert (size_type_node, size));
1265 /* Call the realloc() function. */
1266 tmp = gfc_call_realloc (pblock, arg0, arg1);
1267 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1271 /* Return true if the bounds of iterator I can only be determined
1272 at run time. */
1274 static inline bool
1275 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1277 return (i->start->expr_type != EXPR_CONSTANT
1278 || i->end->expr_type != EXPR_CONSTANT
1279 || i->step->expr_type != EXPR_CONSTANT);
1283 /* Split the size of constructor element EXPR into the sum of two terms,
1284 one of which can be determined at compile time and one of which must
1285 be calculated at run time. Set *SIZE to the former and return true
1286 if the latter might be nonzero. */
1288 static bool
1289 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1291 if (expr->expr_type == EXPR_ARRAY)
1292 return gfc_get_array_constructor_size (size, expr->value.constructor);
1293 else if (expr->rank > 0)
1295 /* Calculate everything at run time. */
1296 mpz_set_ui (*size, 0);
1297 return true;
1299 else
1301 /* A single element. */
1302 mpz_set_ui (*size, 1);
1303 return false;
1308 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1309 of array constructor C. */
1311 static bool
1312 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1314 gfc_constructor *c;
1315 gfc_iterator *i;
1316 mpz_t val;
1317 mpz_t len;
1318 bool dynamic;
1320 mpz_set_ui (*size, 0);
1321 mpz_init (len);
1322 mpz_init (val);
1324 dynamic = false;
1325 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1327 i = c->iterator;
1328 if (i && gfc_iterator_has_dynamic_bounds (i))
1329 dynamic = true;
1330 else
1332 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1333 if (i)
1335 /* Multiply the static part of the element size by the
1336 number of iterations. */
1337 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1338 mpz_fdiv_q (val, val, i->step->value.integer);
1339 mpz_add_ui (val, val, 1);
1340 if (mpz_sgn (val) > 0)
1341 mpz_mul (len, len, val);
1342 else
1343 mpz_set_ui (len, 0);
1345 mpz_add (*size, *size, len);
1348 mpz_clear (len);
1349 mpz_clear (val);
1350 return dynamic;
1354 /* Make sure offset is a variable. */
1356 static void
1357 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1358 tree * offsetvar)
1360 /* We should have already created the offset variable. We cannot
1361 create it here because we may be in an inner scope. */
1362 gcc_assert (*offsetvar != NULL_TREE);
1363 gfc_add_modify (pblock, *offsetvar, *poffset);
1364 *poffset = *offsetvar;
1365 TREE_USED (*offsetvar) = 1;
1369 /* Variables needed for bounds-checking. */
1370 static bool first_len;
1371 static tree first_len_val;
1372 static bool typespec_chararray_ctor;
1374 static void
1375 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1376 tree offset, gfc_se * se, gfc_expr * expr)
1378 tree tmp;
1380 gfc_conv_expr (se, expr);
1382 /* Store the value. */
1383 tmp = build_fold_indirect_ref_loc (input_location,
1384 gfc_conv_descriptor_data_get (desc));
1385 tmp = gfc_build_array_ref (tmp, offset, NULL);
1387 if (expr->ts.type == BT_CHARACTER)
1389 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1390 tree esize;
1392 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1393 esize = fold_convert (gfc_charlen_type_node, esize);
1394 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1395 gfc_charlen_type_node, esize,
1396 build_int_cst (gfc_charlen_type_node,
1397 gfc_character_kinds[i].bit_size / 8));
1399 gfc_conv_string_parameter (se);
1400 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1402 /* The temporary is an array of pointers. */
1403 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1404 gfc_add_modify (&se->pre, tmp, se->expr);
1406 else
1408 /* The temporary is an array of string values. */
1409 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1410 /* We know the temporary and the value will be the same length,
1411 so can use memcpy. */
1412 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1413 se->string_length, se->expr, expr->ts.kind);
1415 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1417 if (first_len)
1419 gfc_add_modify (&se->pre, first_len_val,
1420 se->string_length);
1421 first_len = false;
1423 else
1425 /* Verify that all constructor elements are of the same
1426 length. */
1427 tree cond = fold_build2_loc (input_location, NE_EXPR,
1428 boolean_type_node, first_len_val,
1429 se->string_length);
1430 gfc_trans_runtime_check
1431 (true, false, cond, &se->pre, &expr->where,
1432 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1433 fold_convert (long_integer_type_node, first_len_val),
1434 fold_convert (long_integer_type_node, se->string_length));
1438 else
1440 /* TODO: Should the frontend already have done this conversion? */
1441 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1442 gfc_add_modify (&se->pre, tmp, se->expr);
1445 gfc_add_block_to_block (pblock, &se->pre);
1446 gfc_add_block_to_block (pblock, &se->post);
1450 /* Add the contents of an array to the constructor. DYNAMIC is as for
1451 gfc_trans_array_constructor_value. */
1453 static void
1454 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1455 tree type ATTRIBUTE_UNUSED,
1456 tree desc, gfc_expr * expr,
1457 tree * poffset, tree * offsetvar,
1458 bool dynamic)
1460 gfc_se se;
1461 gfc_ss *ss;
1462 gfc_loopinfo loop;
1463 stmtblock_t body;
1464 tree tmp;
1465 tree size;
1466 int n;
1468 /* We need this to be a variable so we can increment it. */
1469 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1471 gfc_init_se (&se, NULL);
1473 /* Walk the array expression. */
1474 ss = gfc_walk_expr (expr);
1475 gcc_assert (ss != gfc_ss_terminator);
1477 /* Initialize the scalarizer. */
1478 gfc_init_loopinfo (&loop);
1479 gfc_add_ss_to_loop (&loop, ss);
1481 /* Initialize the loop. */
1482 gfc_conv_ss_startstride (&loop);
1483 gfc_conv_loop_setup (&loop, &expr->where);
1485 /* Make sure the constructed array has room for the new data. */
1486 if (dynamic)
1488 /* Set SIZE to the total number of elements in the subarray. */
1489 size = gfc_index_one_node;
1490 for (n = 0; n < loop.dimen; n++)
1492 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1493 gfc_index_one_node);
1494 size = fold_build2_loc (input_location, MULT_EXPR,
1495 gfc_array_index_type, size, tmp);
1498 /* Grow the constructed array by SIZE elements. */
1499 gfc_grow_array (&loop.pre, desc, size);
1502 /* Make the loop body. */
1503 gfc_mark_ss_chain_used (ss, 1);
1504 gfc_start_scalarized_body (&loop, &body);
1505 gfc_copy_loopinfo_to_se (&se, &loop);
1506 se.ss = ss;
1508 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1509 gcc_assert (se.ss == gfc_ss_terminator);
1511 /* Increment the offset. */
1512 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1513 *poffset, gfc_index_one_node);
1514 gfc_add_modify (&body, *poffset, tmp);
1516 /* Finish the loop. */
1517 gfc_trans_scalarizing_loops (&loop, &body);
1518 gfc_add_block_to_block (&loop.pre, &loop.post);
1519 tmp = gfc_finish_block (&loop.pre);
1520 gfc_add_expr_to_block (pblock, tmp);
1522 gfc_cleanup_loop (&loop);
1526 /* Assign the values to the elements of an array constructor. DYNAMIC
1527 is true if descriptor DESC only contains enough data for the static
1528 size calculated by gfc_get_array_constructor_size. When true, memory
1529 for the dynamic parts must be allocated using realloc. */
1531 static void
1532 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1533 tree desc, gfc_constructor_base base,
1534 tree * poffset, tree * offsetvar,
1535 bool dynamic)
1537 tree tmp;
1538 tree start = NULL_TREE;
1539 tree end = NULL_TREE;
1540 tree step = NULL_TREE;
1541 stmtblock_t body;
1542 gfc_se se;
1543 mpz_t size;
1544 gfc_constructor *c;
1546 tree shadow_loopvar = NULL_TREE;
1547 gfc_saved_var saved_loopvar;
1549 mpz_init (size);
1550 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1552 /* If this is an iterator or an array, the offset must be a variable. */
1553 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1554 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1556 /* Shadowing the iterator avoids changing its value and saves us from
1557 keeping track of it. Further, it makes sure that there's always a
1558 backend-decl for the symbol, even if there wasn't one before,
1559 e.g. in the case of an iterator that appears in a specification
1560 expression in an interface mapping. */
1561 if (c->iterator)
1563 gfc_symbol *sym;
1564 tree type;
1566 /* Evaluate loop bounds before substituting the loop variable
1567 in case they depend on it. Such a case is invalid, but it is
1568 not more expensive to do the right thing here.
1569 See PR 44354. */
1570 gfc_init_se (&se, NULL);
1571 gfc_conv_expr_val (&se, c->iterator->start);
1572 gfc_add_block_to_block (pblock, &se.pre);
1573 start = gfc_evaluate_now (se.expr, pblock);
1575 gfc_init_se (&se, NULL);
1576 gfc_conv_expr_val (&se, c->iterator->end);
1577 gfc_add_block_to_block (pblock, &se.pre);
1578 end = gfc_evaluate_now (se.expr, pblock);
1580 gfc_init_se (&se, NULL);
1581 gfc_conv_expr_val (&se, c->iterator->step);
1582 gfc_add_block_to_block (pblock, &se.pre);
1583 step = gfc_evaluate_now (se.expr, pblock);
1585 sym = c->iterator->var->symtree->n.sym;
1586 type = gfc_typenode_for_spec (&sym->ts);
1588 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1589 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1592 gfc_start_block (&body);
1594 if (c->expr->expr_type == EXPR_ARRAY)
1596 /* Array constructors can be nested. */
1597 gfc_trans_array_constructor_value (&body, type, desc,
1598 c->expr->value.constructor,
1599 poffset, offsetvar, dynamic);
1601 else if (c->expr->rank > 0)
1603 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1604 poffset, offsetvar, dynamic);
1606 else
1608 /* This code really upsets the gimplifier so don't bother for now. */
1609 gfc_constructor *p;
1610 HOST_WIDE_INT n;
1611 HOST_WIDE_INT size;
1613 p = c;
1614 n = 0;
1615 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1617 p = gfc_constructor_next (p);
1618 n++;
1620 if (n < 4)
1622 /* Scalar values. */
1623 gfc_init_se (&se, NULL);
1624 gfc_trans_array_ctor_element (&body, desc, *poffset,
1625 &se, c->expr);
1627 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1628 gfc_array_index_type,
1629 *poffset, gfc_index_one_node);
1631 else
1633 /* Collect multiple scalar constants into a constructor. */
1634 vec<constructor_elt, va_gc> *v = NULL;
1635 tree init;
1636 tree bound;
1637 tree tmptype;
1638 HOST_WIDE_INT idx = 0;
1640 p = c;
1641 /* Count the number of consecutive scalar constants. */
1642 while (p && !(p->iterator
1643 || p->expr->expr_type != EXPR_CONSTANT))
1645 gfc_init_se (&se, NULL);
1646 gfc_conv_constant (&se, p->expr);
1648 if (c->expr->ts.type != BT_CHARACTER)
1649 se.expr = fold_convert (type, se.expr);
1650 /* For constant character array constructors we build
1651 an array of pointers. */
1652 else if (POINTER_TYPE_P (type))
1653 se.expr = gfc_build_addr_expr
1654 (gfc_get_pchar_type (p->expr->ts.kind),
1655 se.expr);
1657 CONSTRUCTOR_APPEND_ELT (v,
1658 build_int_cst (gfc_array_index_type,
1659 idx++),
1660 se.expr);
1661 c = p;
1662 p = gfc_constructor_next (p);
1665 bound = size_int (n - 1);
1666 /* Create an array type to hold them. */
1667 tmptype = build_range_type (gfc_array_index_type,
1668 gfc_index_zero_node, bound);
1669 tmptype = build_array_type (type, tmptype);
1671 init = build_constructor (tmptype, v);
1672 TREE_CONSTANT (init) = 1;
1673 TREE_STATIC (init) = 1;
1674 /* Create a static variable to hold the data. */
1675 tmp = gfc_create_var (tmptype, "data");
1676 TREE_STATIC (tmp) = 1;
1677 TREE_CONSTANT (tmp) = 1;
1678 TREE_READONLY (tmp) = 1;
1679 DECL_INITIAL (tmp) = init;
1680 init = tmp;
1682 /* Use BUILTIN_MEMCPY to assign the values. */
1683 tmp = gfc_conv_descriptor_data_get (desc);
1684 tmp = build_fold_indirect_ref_loc (input_location,
1685 tmp);
1686 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1687 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1688 init = gfc_build_addr_expr (NULL_TREE, init);
1690 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1691 bound = build_int_cst (size_type_node, n * size);
1692 tmp = build_call_expr_loc (input_location,
1693 builtin_decl_explicit (BUILT_IN_MEMCPY),
1694 3, tmp, init, bound);
1695 gfc_add_expr_to_block (&body, tmp);
1697 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1698 gfc_array_index_type, *poffset,
1699 build_int_cst (gfc_array_index_type, n));
1701 if (!INTEGER_CST_P (*poffset))
1703 gfc_add_modify (&body, *offsetvar, *poffset);
1704 *poffset = *offsetvar;
1708 /* The frontend should already have done any expansions
1709 at compile-time. */
1710 if (!c->iterator)
1712 /* Pass the code as is. */
1713 tmp = gfc_finish_block (&body);
1714 gfc_add_expr_to_block (pblock, tmp);
1716 else
1718 /* Build the implied do-loop. */
1719 stmtblock_t implied_do_block;
1720 tree cond;
1721 tree exit_label;
1722 tree loopbody;
1723 tree tmp2;
1725 loopbody = gfc_finish_block (&body);
1727 /* Create a new block that holds the implied-do loop. A temporary
1728 loop-variable is used. */
1729 gfc_start_block(&implied_do_block);
1731 /* Initialize the loop. */
1732 gfc_add_modify (&implied_do_block, shadow_loopvar, start);
1734 /* If this array expands dynamically, and the number of iterations
1735 is not constant, we won't have allocated space for the static
1736 part of C->EXPR's size. Do that now. */
1737 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1739 /* Get the number of iterations. */
1740 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1742 /* Get the static part of C->EXPR's size. */
1743 gfc_get_array_constructor_element_size (&size, c->expr);
1744 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1746 /* Grow the array by TMP * TMP2 elements. */
1747 tmp = fold_build2_loc (input_location, MULT_EXPR,
1748 gfc_array_index_type, tmp, tmp2);
1749 gfc_grow_array (&implied_do_block, desc, tmp);
1752 /* Generate the loop body. */
1753 exit_label = gfc_build_label_decl (NULL_TREE);
1754 gfc_start_block (&body);
1756 /* Generate the exit condition. Depending on the sign of
1757 the step variable we have to generate the correct
1758 comparison. */
1759 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1760 step, build_int_cst (TREE_TYPE (step), 0));
1761 cond = fold_build3_loc (input_location, COND_EXPR,
1762 boolean_type_node, tmp,
1763 fold_build2_loc (input_location, GT_EXPR,
1764 boolean_type_node, shadow_loopvar, end),
1765 fold_build2_loc (input_location, LT_EXPR,
1766 boolean_type_node, shadow_loopvar, end));
1767 tmp = build1_v (GOTO_EXPR, exit_label);
1768 TREE_USED (exit_label) = 1;
1769 tmp = build3_v (COND_EXPR, cond, tmp,
1770 build_empty_stmt (input_location));
1771 gfc_add_expr_to_block (&body, tmp);
1773 /* The main loop body. */
1774 gfc_add_expr_to_block (&body, loopbody);
1776 /* Increase loop variable by step. */
1777 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1778 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1779 step);
1780 gfc_add_modify (&body, shadow_loopvar, tmp);
1782 /* Finish the loop. */
1783 tmp = gfc_finish_block (&body);
1784 tmp = build1_v (LOOP_EXPR, tmp);
1785 gfc_add_expr_to_block (&implied_do_block, tmp);
1787 /* Add the exit label. */
1788 tmp = build1_v (LABEL_EXPR, exit_label);
1789 gfc_add_expr_to_block (&implied_do_block, tmp);
1791 /* Finish the implied-do loop. */
1792 tmp = gfc_finish_block(&implied_do_block);
1793 gfc_add_expr_to_block(pblock, tmp);
1795 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1798 mpz_clear (size);
1802 /* A catch-all to obtain the string length for anything that is not
1803 a substring of non-constant length, a constant, array or variable. */
1805 static void
1806 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1808 gfc_se se;
1810 /* Don't bother if we already know the length is a constant. */
1811 if (*len && INTEGER_CST_P (*len))
1812 return;
1814 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1815 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1817 /* This is easy. */
1818 gfc_conv_const_charlen (e->ts.u.cl);
1819 *len = e->ts.u.cl->backend_decl;
1821 else
1823 /* Otherwise, be brutal even if inefficient. */
1824 gfc_init_se (&se, NULL);
1826 /* No function call, in case of side effects. */
1827 se.no_function_call = 1;
1828 if (e->rank == 0)
1829 gfc_conv_expr (&se, e);
1830 else
1831 gfc_conv_expr_descriptor (&se, e);
1833 /* Fix the value. */
1834 *len = gfc_evaluate_now (se.string_length, &se.pre);
1836 gfc_add_block_to_block (block, &se.pre);
1837 gfc_add_block_to_block (block, &se.post);
1839 e->ts.u.cl->backend_decl = *len;
1844 /* Figure out the string length of a variable reference expression.
1845 Used by get_array_ctor_strlen. */
1847 static void
1848 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1850 gfc_ref *ref;
1851 gfc_typespec *ts;
1852 mpz_t char_len;
1854 /* Don't bother if we already know the length is a constant. */
1855 if (*len && INTEGER_CST_P (*len))
1856 return;
1858 ts = &expr->symtree->n.sym->ts;
1859 for (ref = expr->ref; ref; ref = ref->next)
1861 switch (ref->type)
1863 case REF_ARRAY:
1864 /* Array references don't change the string length. */
1865 break;
1867 case REF_COMPONENT:
1868 /* Use the length of the component. */
1869 ts = &ref->u.c.component->ts;
1870 break;
1872 case REF_SUBSTRING:
1873 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1874 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1876 /* Note that this might evaluate expr. */
1877 get_array_ctor_all_strlen (block, expr, len);
1878 return;
1880 mpz_init_set_ui (char_len, 1);
1881 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1882 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1883 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1884 *len = convert (gfc_charlen_type_node, *len);
1885 mpz_clear (char_len);
1886 return;
1888 default:
1889 gcc_unreachable ();
1893 *len = ts->u.cl->backend_decl;
1897 /* Figure out the string length of a character array constructor.
1898 If len is NULL, don't calculate the length; this happens for recursive calls
1899 when a sub-array-constructor is an element but not at the first position,
1900 so when we're not interested in the length.
1901 Returns TRUE if all elements are character constants. */
1903 bool
1904 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1906 gfc_constructor *c;
1907 bool is_const;
1909 is_const = TRUE;
1911 if (gfc_constructor_first (base) == NULL)
1913 if (len)
1914 *len = build_int_cstu (gfc_charlen_type_node, 0);
1915 return is_const;
1918 /* Loop over all constructor elements to find out is_const, but in len we
1919 want to store the length of the first, not the last, element. We can
1920 of course exit the loop as soon as is_const is found to be false. */
1921 for (c = gfc_constructor_first (base);
1922 c && is_const; c = gfc_constructor_next (c))
1924 switch (c->expr->expr_type)
1926 case EXPR_CONSTANT:
1927 if (len && !(*len && INTEGER_CST_P (*len)))
1928 *len = build_int_cstu (gfc_charlen_type_node,
1929 c->expr->value.character.length);
1930 break;
1932 case EXPR_ARRAY:
1933 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1934 is_const = false;
1935 break;
1937 case EXPR_VARIABLE:
1938 is_const = false;
1939 if (len)
1940 get_array_ctor_var_strlen (block, c->expr, len);
1941 break;
1943 default:
1944 is_const = false;
1945 if (len)
1946 get_array_ctor_all_strlen (block, c->expr, len);
1947 break;
1950 /* After the first iteration, we don't want the length modified. */
1951 len = NULL;
1954 return is_const;
1957 /* Check whether the array constructor C consists entirely of constant
1958 elements, and if so returns the number of those elements, otherwise
1959 return zero. Note, an empty or NULL array constructor returns zero. */
1961 unsigned HOST_WIDE_INT
1962 gfc_constant_array_constructor_p (gfc_constructor_base base)
1964 unsigned HOST_WIDE_INT nelem = 0;
1966 gfc_constructor *c = gfc_constructor_first (base);
1967 while (c)
1969 if (c->iterator
1970 || c->expr->rank > 0
1971 || c->expr->expr_type != EXPR_CONSTANT)
1972 return 0;
1973 c = gfc_constructor_next (c);
1974 nelem++;
1976 return nelem;
1980 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1981 and the tree type of it's elements, TYPE, return a static constant
1982 variable that is compile-time initialized. */
1984 tree
1985 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1987 tree tmptype, init, tmp;
1988 HOST_WIDE_INT nelem;
1989 gfc_constructor *c;
1990 gfc_array_spec as;
1991 gfc_se se;
1992 int i;
1993 vec<constructor_elt, va_gc> *v = NULL;
1995 /* First traverse the constructor list, converting the constants
1996 to tree to build an initializer. */
1997 nelem = 0;
1998 c = gfc_constructor_first (expr->value.constructor);
1999 while (c)
2001 gfc_init_se (&se, NULL);
2002 gfc_conv_constant (&se, c->expr);
2003 if (c->expr->ts.type != BT_CHARACTER)
2004 se.expr = fold_convert (type, se.expr);
2005 else if (POINTER_TYPE_P (type))
2006 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
2007 se.expr);
2008 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
2009 se.expr);
2010 c = gfc_constructor_next (c);
2011 nelem++;
2014 /* Next determine the tree type for the array. We use the gfortran
2015 front-end's gfc_get_nodesc_array_type in order to create a suitable
2016 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2018 memset (&as, 0, sizeof (gfc_array_spec));
2020 as.rank = expr->rank;
2021 as.type = AS_EXPLICIT;
2022 if (!expr->shape)
2024 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2025 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
2026 NULL, nelem - 1);
2028 else
2029 for (i = 0; i < expr->rank; i++)
2031 int tmp = (int) mpz_get_si (expr->shape[i]);
2032 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2033 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
2034 NULL, tmp - 1);
2037 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
2039 /* as is not needed anymore. */
2040 for (i = 0; i < as.rank + as.corank; i++)
2042 gfc_free_expr (as.lower[i]);
2043 gfc_free_expr (as.upper[i]);
2046 init = build_constructor (tmptype, v);
2048 TREE_CONSTANT (init) = 1;
2049 TREE_STATIC (init) = 1;
2051 tmp = build_decl (input_location, VAR_DECL, create_tmp_var_name ("A"),
2052 tmptype);
2053 DECL_ARTIFICIAL (tmp) = 1;
2054 DECL_IGNORED_P (tmp) = 1;
2055 TREE_STATIC (tmp) = 1;
2056 TREE_CONSTANT (tmp) = 1;
2057 TREE_READONLY (tmp) = 1;
2058 DECL_INITIAL (tmp) = init;
2059 pushdecl (tmp);
2061 return tmp;
2065 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2066 This mostly initializes the scalarizer state info structure with the
2067 appropriate values to directly use the array created by the function
2068 gfc_build_constant_array_constructor. */
2070 static void
2071 trans_constant_array_constructor (gfc_ss * ss, tree type)
2073 gfc_array_info *info;
2074 tree tmp;
2075 int i;
2077 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
2079 info = &ss->info->data.array;
2081 info->descriptor = tmp;
2082 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
2083 info->offset = gfc_index_zero_node;
2085 for (i = 0; i < ss->dimen; i++)
2087 info->delta[i] = gfc_index_zero_node;
2088 info->start[i] = gfc_index_zero_node;
2089 info->end[i] = gfc_index_zero_node;
2090 info->stride[i] = gfc_index_one_node;
2095 static int
2096 get_rank (gfc_loopinfo *loop)
2098 int rank;
2100 rank = 0;
2101 for (; loop; loop = loop->parent)
2102 rank += loop->dimen;
2104 return rank;
2108 /* Helper routine of gfc_trans_array_constructor to determine if the
2109 bounds of the loop specified by LOOP are constant and simple enough
2110 to use with trans_constant_array_constructor. Returns the
2111 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2113 static tree
2114 constant_array_constructor_loop_size (gfc_loopinfo * l)
2116 gfc_loopinfo *loop;
2117 tree size = gfc_index_one_node;
2118 tree tmp;
2119 int i, total_dim;
2121 total_dim = get_rank (l);
2123 for (loop = l; loop; loop = loop->parent)
2125 for (i = 0; i < loop->dimen; i++)
2127 /* If the bounds aren't constant, return NULL_TREE. */
2128 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
2129 return NULL_TREE;
2130 if (!integer_zerop (loop->from[i]))
2132 /* Only allow nonzero "from" in one-dimensional arrays. */
2133 if (total_dim != 1)
2134 return NULL_TREE;
2135 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2136 gfc_array_index_type,
2137 loop->to[i], loop->from[i]);
2139 else
2140 tmp = loop->to[i];
2141 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2142 gfc_array_index_type, tmp, gfc_index_one_node);
2143 size = fold_build2_loc (input_location, MULT_EXPR,
2144 gfc_array_index_type, size, tmp);
2148 return size;
2152 static tree *
2153 get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
2155 gfc_ss *ss;
2156 int n;
2158 gcc_assert (array->nested_ss == NULL);
2160 for (ss = array; ss; ss = ss->parent)
2161 for (n = 0; n < ss->loop->dimen; n++)
2162 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
2163 return &(ss->loop->to[n]);
2165 gcc_unreachable ();
2169 static gfc_loopinfo *
2170 outermost_loop (gfc_loopinfo * loop)
2172 while (loop->parent != NULL)
2173 loop = loop->parent;
2175 return loop;
2179 /* Array constructors are handled by constructing a temporary, then using that
2180 within the scalarization loop. This is not optimal, but seems by far the
2181 simplest method. */
2183 static void
2184 trans_array_constructor (gfc_ss * ss, locus * where)
2186 gfc_constructor_base c;
2187 tree offset;
2188 tree offsetvar;
2189 tree desc;
2190 tree type;
2191 tree tmp;
2192 tree *loop_ubound0;
2193 bool dynamic;
2194 bool old_first_len, old_typespec_chararray_ctor;
2195 tree old_first_len_val;
2196 gfc_loopinfo *loop, *outer_loop;
2197 gfc_ss_info *ss_info;
2198 gfc_expr *expr;
2199 gfc_ss *s;
2201 /* Save the old values for nested checking. */
2202 old_first_len = first_len;
2203 old_first_len_val = first_len_val;
2204 old_typespec_chararray_ctor = typespec_chararray_ctor;
2206 loop = ss->loop;
2207 outer_loop = outermost_loop (loop);
2208 ss_info = ss->info;
2209 expr = ss_info->expr;
2211 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2212 typespec was given for the array constructor. */
2213 typespec_chararray_ctor = (expr->ts.u.cl
2214 && expr->ts.u.cl->length_from_typespec);
2216 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2217 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
2219 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
2220 first_len = true;
2223 gcc_assert (ss->dimen == ss->loop->dimen);
2225 c = expr->value.constructor;
2226 if (expr->ts.type == BT_CHARACTER)
2228 bool const_string;
2230 /* get_array_ctor_strlen walks the elements of the constructor, if a
2231 typespec was given, we already know the string length and want the one
2232 specified there. */
2233 if (typespec_chararray_ctor && expr->ts.u.cl->length
2234 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
2236 gfc_se length_se;
2238 const_string = false;
2239 gfc_init_se (&length_se, NULL);
2240 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2241 gfc_charlen_type_node);
2242 ss_info->string_length = length_se.expr;
2243 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
2244 gfc_add_block_to_block (&outer_loop->post, &length_se.post);
2246 else
2247 const_string = get_array_ctor_strlen (&outer_loop->pre, c,
2248 &ss_info->string_length);
2250 /* Complex character array constructors should have been taken care of
2251 and not end up here. */
2252 gcc_assert (ss_info->string_length);
2254 expr->ts.u.cl->backend_decl = ss_info->string_length;
2256 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2257 if (const_string)
2258 type = build_pointer_type (type);
2260 else
2261 type = gfc_typenode_for_spec (&expr->ts);
2263 /* See if the constructor determines the loop bounds. */
2264 dynamic = false;
2266 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
2268 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
2270 /* We have a multidimensional parameter. */
2271 for (s = ss; s; s = s->parent)
2273 int n;
2274 for (n = 0; n < s->loop->dimen; n++)
2276 s->loop->from[n] = gfc_index_zero_node;
2277 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
2278 gfc_index_integer_kind);
2279 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2280 gfc_array_index_type,
2281 s->loop->to[n],
2282 gfc_index_one_node);
2287 if (*loop_ubound0 == NULL_TREE)
2289 mpz_t size;
2291 /* We should have a 1-dimensional, zero-based loop. */
2292 gcc_assert (loop->parent == NULL && loop->nested == NULL);
2293 gcc_assert (loop->dimen == 1);
2294 gcc_assert (integer_zerop (loop->from[0]));
2296 /* Split the constructor size into a static part and a dynamic part.
2297 Allocate the static size up-front and record whether the dynamic
2298 size might be nonzero. */
2299 mpz_init (size);
2300 dynamic = gfc_get_array_constructor_size (&size, c);
2301 mpz_sub_ui (size, size, 1);
2302 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2303 mpz_clear (size);
2306 /* Special case constant array constructors. */
2307 if (!dynamic)
2309 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2310 if (nelem > 0)
2312 tree size = constant_array_constructor_loop_size (loop);
2313 if (size && compare_tree_int (size, nelem) == 0)
2315 trans_constant_array_constructor (ss, type);
2316 goto finish;
2321 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
2322 NULL_TREE, dynamic, true, false, where);
2324 desc = ss_info->data.array.descriptor;
2325 offset = gfc_index_zero_node;
2326 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2327 TREE_NO_WARNING (offsetvar) = 1;
2328 TREE_USED (offsetvar) = 0;
2329 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
2330 &offset, &offsetvar, dynamic);
2332 /* If the array grows dynamically, the upper bound of the loop variable
2333 is determined by the array's final upper bound. */
2334 if (dynamic)
2336 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2337 gfc_array_index_type,
2338 offsetvar, gfc_index_one_node);
2339 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2340 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2341 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
2342 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
2343 else
2344 *loop_ubound0 = tmp;
2347 if (TREE_USED (offsetvar))
2348 pushdecl (offsetvar);
2349 else
2350 gcc_assert (INTEGER_CST_P (offset));
2352 #if 0
2353 /* Disable bound checking for now because it's probably broken. */
2354 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2356 gcc_unreachable ();
2358 #endif
2360 finish:
2361 /* Restore old values of globals. */
2362 first_len = old_first_len;
2363 first_len_val = old_first_len_val;
2364 typespec_chararray_ctor = old_typespec_chararray_ctor;
2368 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2369 called after evaluating all of INFO's vector dimensions. Go through
2370 each such vector dimension and see if we can now fill in any missing
2371 loop bounds. */
2373 static void
2374 set_vector_loop_bounds (gfc_ss * ss)
2376 gfc_loopinfo *loop, *outer_loop;
2377 gfc_array_info *info;
2378 gfc_se se;
2379 tree tmp;
2380 tree desc;
2381 tree zero;
2382 int n;
2383 int dim;
2385 outer_loop = outermost_loop (ss->loop);
2387 info = &ss->info->data.array;
2389 for (; ss; ss = ss->parent)
2391 loop = ss->loop;
2393 for (n = 0; n < loop->dimen; n++)
2395 dim = ss->dim[n];
2396 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
2397 || loop->to[n] != NULL)
2398 continue;
2400 /* Loop variable N indexes vector dimension DIM, and we don't
2401 yet know the upper bound of loop variable N. Set it to the
2402 difference between the vector's upper and lower bounds. */
2403 gcc_assert (loop->from[n] == gfc_index_zero_node);
2404 gcc_assert (info->subscript[dim]
2405 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2407 gfc_init_se (&se, NULL);
2408 desc = info->subscript[dim]->info->data.array.descriptor;
2409 zero = gfc_rank_cst[0];
2410 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2411 gfc_array_index_type,
2412 gfc_conv_descriptor_ubound_get (desc, zero),
2413 gfc_conv_descriptor_lbound_get (desc, zero));
2414 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2415 loop->to[n] = tmp;
2421 /* Tells whether a scalar argument to an elemental procedure is saved out
2422 of a scalarization loop as a value or as a reference. */
2424 bool
2425 gfc_scalar_elemental_arg_saved_as_reference (gfc_ss_info * ss_info)
2427 if (ss_info->type != GFC_SS_REFERENCE)
2428 return false;
2430 /* If the actual argument can be absent (in other words, it can
2431 be a NULL reference), don't try to evaluate it; pass instead
2432 the reference directly. */
2433 if (ss_info->can_be_null_ref)
2434 return true;
2436 /* If the expression is of polymorphic type, it's actual size is not known,
2437 so we avoid copying it anywhere. */
2438 if (ss_info->data.scalar.dummy_arg
2439 && ss_info->data.scalar.dummy_arg->ts.type == BT_CLASS
2440 && ss_info->expr->ts.type == BT_CLASS)
2441 return true;
2443 /* If the expression is a data reference of aggregate type,
2444 avoid a copy by saving a reference to the content. */
2445 if (ss_info->expr->expr_type == EXPR_VARIABLE
2446 && (ss_info->expr->ts.type == BT_DERIVED
2447 || ss_info->expr->ts.type == BT_CLASS))
2448 return true;
2450 /* Otherwise the expression is evaluated to a temporary variable before the
2451 scalarization loop. */
2452 return false;
2456 /* Add the pre and post chains for all the scalar expressions in a SS chain
2457 to loop. This is called after the loop parameters have been calculated,
2458 but before the actual scalarizing loops. */
2460 static void
2461 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2462 locus * where)
2464 gfc_loopinfo *nested_loop, *outer_loop;
2465 gfc_se se;
2466 gfc_ss_info *ss_info;
2467 gfc_array_info *info;
2468 gfc_expr *expr;
2469 int n;
2471 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2472 arguments could get evaluated multiple times. */
2473 if (ss->is_alloc_lhs)
2474 return;
2476 outer_loop = outermost_loop (loop);
2478 /* TODO: This can generate bad code if there are ordering dependencies,
2479 e.g., a callee allocated function and an unknown size constructor. */
2480 gcc_assert (ss != NULL);
2482 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2484 gcc_assert (ss);
2486 /* Cross loop arrays are handled from within the most nested loop. */
2487 if (ss->nested_ss != NULL)
2488 continue;
2490 ss_info = ss->info;
2491 expr = ss_info->expr;
2492 info = &ss_info->data.array;
2494 switch (ss_info->type)
2496 case GFC_SS_SCALAR:
2497 /* Scalar expression. Evaluate this now. This includes elemental
2498 dimension indices, but not array section bounds. */
2499 gfc_init_se (&se, NULL);
2500 gfc_conv_expr (&se, expr);
2501 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2503 if (expr->ts.type != BT_CHARACTER
2504 && !gfc_is_alloc_class_scalar_function (expr))
2506 /* Move the evaluation of scalar expressions outside the
2507 scalarization loop, except for WHERE assignments. */
2508 if (subscript)
2509 se.expr = convert(gfc_array_index_type, se.expr);
2510 if (!ss_info->where)
2511 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
2512 gfc_add_block_to_block (&outer_loop->pre, &se.post);
2514 else
2515 gfc_add_block_to_block (&outer_loop->post, &se.post);
2517 ss_info->data.scalar.value = se.expr;
2518 ss_info->string_length = se.string_length;
2519 break;
2521 case GFC_SS_REFERENCE:
2522 /* Scalar argument to elemental procedure. */
2523 gfc_init_se (&se, NULL);
2524 if (gfc_scalar_elemental_arg_saved_as_reference (ss_info))
2525 gfc_conv_expr_reference (&se, expr);
2526 else
2528 /* Evaluate the argument outside the loop and pass
2529 a reference to the value. */
2530 gfc_conv_expr (&se, expr);
2533 /* Ensure that a pointer to the string is stored. */
2534 if (expr->ts.type == BT_CHARACTER)
2535 gfc_conv_string_parameter (&se);
2537 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2538 gfc_add_block_to_block (&outer_loop->post, &se.post);
2539 if (gfc_is_class_scalar_expr (expr))
2540 /* This is necessary because the dynamic type will always be
2541 large than the declared type. In consequence, assigning
2542 the value to a temporary could segfault.
2543 OOP-TODO: see if this is generally correct or is the value
2544 has to be written to an allocated temporary, whose address
2545 is passed via ss_info. */
2546 ss_info->data.scalar.value = se.expr;
2547 else
2548 ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
2549 &outer_loop->pre);
2551 ss_info->string_length = se.string_length;
2552 break;
2554 case GFC_SS_SECTION:
2555 /* Add the expressions for scalar and vector subscripts. */
2556 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2557 if (info->subscript[n])
2558 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2560 set_vector_loop_bounds (ss);
2561 break;
2563 case GFC_SS_VECTOR:
2564 /* Get the vector's descriptor and store it in SS. */
2565 gfc_init_se (&se, NULL);
2566 gfc_conv_expr_descriptor (&se, expr);
2567 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2568 gfc_add_block_to_block (&outer_loop->post, &se.post);
2569 info->descriptor = se.expr;
2570 break;
2572 case GFC_SS_INTRINSIC:
2573 gfc_add_intrinsic_ss_code (loop, ss);
2574 break;
2576 case GFC_SS_FUNCTION:
2577 /* Array function return value. We call the function and save its
2578 result in a temporary for use inside the loop. */
2579 gfc_init_se (&se, NULL);
2580 se.loop = loop;
2581 se.ss = ss;
2582 gfc_conv_expr (&se, expr);
2583 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2584 gfc_add_block_to_block (&outer_loop->post, &se.post);
2585 ss_info->string_length = se.string_length;
2586 break;
2588 case GFC_SS_CONSTRUCTOR:
2589 if (expr->ts.type == BT_CHARACTER
2590 && ss_info->string_length == NULL
2591 && expr->ts.u.cl
2592 && expr->ts.u.cl->length)
2594 gfc_init_se (&se, NULL);
2595 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2596 gfc_charlen_type_node);
2597 ss_info->string_length = se.expr;
2598 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2599 gfc_add_block_to_block (&outer_loop->post, &se.post);
2601 trans_array_constructor (ss, where);
2602 break;
2604 case GFC_SS_TEMP:
2605 case GFC_SS_COMPONENT:
2606 /* Do nothing. These are handled elsewhere. */
2607 break;
2609 default:
2610 gcc_unreachable ();
2614 if (!subscript)
2615 for (nested_loop = loop->nested; nested_loop;
2616 nested_loop = nested_loop->next)
2617 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
2621 /* Translate expressions for the descriptor and data pointer of a SS. */
2622 /*GCC ARRAYS*/
2624 static void
2625 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2627 gfc_se se;
2628 gfc_ss_info *ss_info;
2629 gfc_array_info *info;
2630 tree tmp;
2632 ss_info = ss->info;
2633 info = &ss_info->data.array;
2635 /* Get the descriptor for the array to be scalarized. */
2636 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2637 gfc_init_se (&se, NULL);
2638 se.descriptor_only = 1;
2639 gfc_conv_expr_lhs (&se, ss_info->expr);
2640 gfc_add_block_to_block (block, &se.pre);
2641 info->descriptor = se.expr;
2642 ss_info->string_length = se.string_length;
2644 if (base)
2646 /* Also the data pointer. */
2647 tmp = gfc_conv_array_data (se.expr);
2648 /* If this is a variable or address of a variable we use it directly.
2649 Otherwise we must evaluate it now to avoid breaking dependency
2650 analysis by pulling the expressions for elemental array indices
2651 inside the loop. */
2652 if (!(DECL_P (tmp)
2653 || (TREE_CODE (tmp) == ADDR_EXPR
2654 && DECL_P (TREE_OPERAND (tmp, 0)))))
2655 tmp = gfc_evaluate_now (tmp, block);
2656 info->data = tmp;
2658 tmp = gfc_conv_array_offset (se.expr);
2659 info->offset = gfc_evaluate_now (tmp, block);
2661 /* Make absolutely sure that the saved_offset is indeed saved
2662 so that the variable is still accessible after the loops
2663 are translated. */
2664 info->saved_offset = info->offset;
2669 /* Initialize a gfc_loopinfo structure. */
2671 void
2672 gfc_init_loopinfo (gfc_loopinfo * loop)
2674 int n;
2676 memset (loop, 0, sizeof (gfc_loopinfo));
2677 gfc_init_block (&loop->pre);
2678 gfc_init_block (&loop->post);
2680 /* Initially scalarize in order and default to no loop reversal. */
2681 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2683 loop->order[n] = n;
2684 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2687 loop->ss = gfc_ss_terminator;
2691 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2692 chain. */
2694 void
2695 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2697 se->loop = loop;
2701 /* Return an expression for the data pointer of an array. */
2703 tree
2704 gfc_conv_array_data (tree descriptor)
2706 tree type;
2708 type = TREE_TYPE (descriptor);
2709 if (GFC_ARRAY_TYPE_P (type))
2711 if (TREE_CODE (type) == POINTER_TYPE)
2712 return descriptor;
2713 else
2715 /* Descriptorless arrays. */
2716 return gfc_build_addr_expr (NULL_TREE, descriptor);
2719 else
2720 return gfc_conv_descriptor_data_get (descriptor);
2724 /* Return an expression for the base offset of an array. */
2726 tree
2727 gfc_conv_array_offset (tree descriptor)
2729 tree type;
2731 type = TREE_TYPE (descriptor);
2732 if (GFC_ARRAY_TYPE_P (type))
2733 return GFC_TYPE_ARRAY_OFFSET (type);
2734 else
2735 return gfc_conv_descriptor_offset_get (descriptor);
2739 /* Get an expression for the array stride. */
2741 tree
2742 gfc_conv_array_stride (tree descriptor, int dim)
2744 tree tmp;
2745 tree type;
2747 type = TREE_TYPE (descriptor);
2749 /* For descriptorless arrays use the array size. */
2750 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2751 if (tmp != NULL_TREE)
2752 return tmp;
2754 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2755 return tmp;
2759 /* Like gfc_conv_array_stride, but for the lower bound. */
2761 tree
2762 gfc_conv_array_lbound (tree descriptor, int dim)
2764 tree tmp;
2765 tree type;
2767 type = TREE_TYPE (descriptor);
2769 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2770 if (tmp != NULL_TREE)
2771 return tmp;
2773 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2774 return tmp;
2778 /* Like gfc_conv_array_stride, but for the upper bound. */
2780 tree
2781 gfc_conv_array_ubound (tree descriptor, int dim)
2783 tree tmp;
2784 tree type;
2786 type = TREE_TYPE (descriptor);
2788 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2789 if (tmp != NULL_TREE)
2790 return tmp;
2792 /* This should only ever happen when passing an assumed shape array
2793 as an actual parameter. The value will never be used. */
2794 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2795 return gfc_index_zero_node;
2797 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2798 return tmp;
2802 /* Generate code to perform an array index bound check. */
2804 static tree
2805 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2806 locus * where, bool check_upper)
2808 tree fault;
2809 tree tmp_lo, tmp_up;
2810 tree descriptor;
2811 char *msg;
2812 const char * name = NULL;
2814 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2815 return index;
2817 descriptor = ss->info->data.array.descriptor;
2819 index = gfc_evaluate_now (index, &se->pre);
2821 /* We find a name for the error message. */
2822 name = ss->info->expr->symtree->n.sym->name;
2823 gcc_assert (name != NULL);
2825 if (TREE_CODE (descriptor) == VAR_DECL)
2826 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2828 /* If upper bound is present, include both bounds in the error message. */
2829 if (check_upper)
2831 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2832 tmp_up = gfc_conv_array_ubound (descriptor, n);
2834 if (name)
2835 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2836 "outside of expected range (%%ld:%%ld)", n+1, name);
2837 else
2838 msg = xasprintf ("Index '%%ld' of dimension %d "
2839 "outside of expected range (%%ld:%%ld)", n+1);
2841 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2842 index, tmp_lo);
2843 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2844 fold_convert (long_integer_type_node, index),
2845 fold_convert (long_integer_type_node, tmp_lo),
2846 fold_convert (long_integer_type_node, tmp_up));
2847 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2848 index, tmp_up);
2849 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2850 fold_convert (long_integer_type_node, index),
2851 fold_convert (long_integer_type_node, tmp_lo),
2852 fold_convert (long_integer_type_node, tmp_up));
2853 free (msg);
2855 else
2857 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2859 if (name)
2860 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2861 "below lower bound of %%ld", n+1, name);
2862 else
2863 msg = xasprintf ("Index '%%ld' of dimension %d "
2864 "below lower bound of %%ld", n+1);
2866 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2867 index, tmp_lo);
2868 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2869 fold_convert (long_integer_type_node, index),
2870 fold_convert (long_integer_type_node, tmp_lo));
2871 free (msg);
2874 return index;
2878 /* Return the offset for an index. Performs bound checking for elemental
2879 dimensions. Single element references are processed separately.
2880 DIM is the array dimension, I is the loop dimension. */
2882 static tree
2883 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2884 gfc_array_ref * ar, tree stride)
2886 gfc_array_info *info;
2887 tree index;
2888 tree desc;
2889 tree data;
2891 info = &ss->info->data.array;
2893 /* Get the index into the array for this dimension. */
2894 if (ar)
2896 gcc_assert (ar->type != AR_ELEMENT);
2897 switch (ar->dimen_type[dim])
2899 case DIMEN_THIS_IMAGE:
2900 gcc_unreachable ();
2901 break;
2902 case DIMEN_ELEMENT:
2903 /* Elemental dimension. */
2904 gcc_assert (info->subscript[dim]
2905 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2906 /* We've already translated this value outside the loop. */
2907 index = info->subscript[dim]->info->data.scalar.value;
2909 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2910 ar->as->type != AS_ASSUMED_SIZE
2911 || dim < ar->dimen - 1);
2912 break;
2914 case DIMEN_VECTOR:
2915 gcc_assert (info && se->loop);
2916 gcc_assert (info->subscript[dim]
2917 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2918 desc = info->subscript[dim]->info->data.array.descriptor;
2920 /* Get a zero-based index into the vector. */
2921 index = fold_build2_loc (input_location, MINUS_EXPR,
2922 gfc_array_index_type,
2923 se->loop->loopvar[i], se->loop->from[i]);
2925 /* Multiply the index by the stride. */
2926 index = fold_build2_loc (input_location, MULT_EXPR,
2927 gfc_array_index_type,
2928 index, gfc_conv_array_stride (desc, 0));
2930 /* Read the vector to get an index into info->descriptor. */
2931 data = build_fold_indirect_ref_loc (input_location,
2932 gfc_conv_array_data (desc));
2933 index = gfc_build_array_ref (data, index, NULL);
2934 index = gfc_evaluate_now (index, &se->pre);
2935 index = fold_convert (gfc_array_index_type, index);
2937 /* Do any bounds checking on the final info->descriptor index. */
2938 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2939 ar->as->type != AS_ASSUMED_SIZE
2940 || dim < ar->dimen - 1);
2941 break;
2943 case DIMEN_RANGE:
2944 /* Scalarized dimension. */
2945 gcc_assert (info && se->loop);
2947 /* Multiply the loop variable by the stride and delta. */
2948 index = se->loop->loopvar[i];
2949 if (!integer_onep (info->stride[dim]))
2950 index = fold_build2_loc (input_location, MULT_EXPR,
2951 gfc_array_index_type, index,
2952 info->stride[dim]);
2953 if (!integer_zerop (info->delta[dim]))
2954 index = fold_build2_loc (input_location, PLUS_EXPR,
2955 gfc_array_index_type, index,
2956 info->delta[dim]);
2957 break;
2959 default:
2960 gcc_unreachable ();
2963 else
2965 /* Temporary array or derived type component. */
2966 gcc_assert (se->loop);
2967 index = se->loop->loopvar[se->loop->order[i]];
2969 /* Pointer functions can have stride[0] different from unity.
2970 Use the stride returned by the function call and stored in
2971 the descriptor for the temporary. */
2972 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
2973 && se->ss->info->expr
2974 && se->ss->info->expr->symtree
2975 && se->ss->info->expr->symtree->n.sym->result
2976 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
2977 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2978 gfc_rank_cst[dim]);
2980 if (info->delta[dim] && !integer_zerop (info->delta[dim]))
2981 index = fold_build2_loc (input_location, PLUS_EXPR,
2982 gfc_array_index_type, index, info->delta[dim]);
2985 /* Multiply by the stride. */
2986 if (!integer_onep (stride))
2987 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2988 index, stride);
2990 return index;
2994 /* Build a scalarized array reference using the vptr 'size'. */
2996 static bool
2997 build_class_array_ref (gfc_se *se, tree base, tree index)
2999 tree type;
3000 tree size;
3001 tree offset;
3002 tree decl;
3003 tree tmp;
3004 gfc_expr *expr = se->ss->info->expr;
3005 gfc_ref *ref;
3006 gfc_ref *class_ref;
3007 gfc_typespec *ts;
3009 if (expr == NULL
3010 || (expr->ts.type != BT_CLASS
3011 && !gfc_is_alloc_class_array_function (expr)))
3012 return false;
3014 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
3015 ts = &expr->symtree->n.sym->ts;
3016 else
3017 ts = NULL;
3018 class_ref = NULL;
3020 for (ref = expr->ref; ref; ref = ref->next)
3022 if (ref->type == REF_COMPONENT
3023 && ref->u.c.component->ts.type == BT_CLASS
3024 && ref->next && ref->next->type == REF_COMPONENT
3025 && strcmp (ref->next->u.c.component->name, "_data") == 0
3026 && ref->next->next
3027 && ref->next->next->type == REF_ARRAY
3028 && ref->next->next->u.ar.type != AR_ELEMENT)
3030 ts = &ref->u.c.component->ts;
3031 class_ref = ref;
3032 break;
3036 if (ts == NULL)
3037 return false;
3039 if (class_ref == NULL && expr->symtree->n.sym->attr.function
3040 && expr->symtree->n.sym == expr->symtree->n.sym->result)
3042 gcc_assert (expr->symtree->n.sym->backend_decl == current_function_decl);
3043 decl = gfc_get_fake_result_decl (expr->symtree->n.sym, 0);
3045 else if (gfc_is_alloc_class_array_function (expr))
3047 size = NULL_TREE;
3048 decl = NULL_TREE;
3049 for (tmp = base; tmp; tmp = TREE_OPERAND (tmp, 0))
3051 tree type;
3052 type = TREE_TYPE (tmp);
3053 while (type)
3055 if (GFC_CLASS_TYPE_P (type))
3056 decl = tmp;
3057 if (type != TYPE_CANONICAL (type))
3058 type = TYPE_CANONICAL (type);
3059 else
3060 type = NULL_TREE;
3062 if (TREE_CODE (tmp) == VAR_DECL)
3063 break;
3066 if (decl == NULL_TREE)
3067 return false;
3069 else if (class_ref == NULL)
3071 decl = expr->symtree->n.sym->backend_decl;
3072 /* For class arrays the tree containing the class is stored in
3073 GFC_DECL_SAVED_DESCRIPTOR of the sym's backend_decl.
3074 For all others it's sym's backend_decl directly. */
3075 if (DECL_LANG_SPECIFIC (decl) && GFC_DECL_SAVED_DESCRIPTOR (decl))
3076 decl = GFC_DECL_SAVED_DESCRIPTOR (decl);
3078 else
3080 /* Remove everything after the last class reference, convert the
3081 expression and then recover its tailend once more. */
3082 gfc_se tmpse;
3083 ref = class_ref->next;
3084 class_ref->next = NULL;
3085 gfc_init_se (&tmpse, NULL);
3086 gfc_conv_expr (&tmpse, expr);
3087 decl = tmpse.expr;
3088 class_ref->next = ref;
3091 if (POINTER_TYPE_P (TREE_TYPE (decl)))
3092 decl = build_fold_indirect_ref_loc (input_location, decl);
3094 if (!GFC_CLASS_TYPE_P (TREE_TYPE (decl)))
3095 return false;
3097 size = gfc_class_vtab_size_get (decl);
3099 /* Build the address of the element. */
3100 type = TREE_TYPE (TREE_TYPE (base));
3101 size = fold_convert (TREE_TYPE (index), size);
3102 offset = fold_build2_loc (input_location, MULT_EXPR,
3103 gfc_array_index_type,
3104 index, size);
3105 tmp = gfc_build_addr_expr (pvoid_type_node, base);
3106 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
3107 tmp = fold_convert (build_pointer_type (type), tmp);
3109 /* Return the element in the se expression. */
3110 se->expr = build_fold_indirect_ref_loc (input_location, tmp);
3111 return true;
3115 /* Build a scalarized reference to an array. */
3117 static void
3118 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
3120 gfc_array_info *info;
3121 tree decl = NULL_TREE;
3122 tree index;
3123 tree tmp;
3124 gfc_ss *ss;
3125 gfc_expr *expr;
3126 int n;
3128 ss = se->ss;
3129 expr = ss->info->expr;
3130 info = &ss->info->data.array;
3131 if (ar)
3132 n = se->loop->order[0];
3133 else
3134 n = 0;
3136 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
3137 /* Add the offset for this dimension to the stored offset for all other
3138 dimensions. */
3139 if (info->offset && !integer_zerop (info->offset))
3140 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3141 index, info->offset);
3143 if (expr && is_subref_array (expr))
3144 decl = expr->symtree->n.sym->backend_decl;
3146 tmp = build_fold_indirect_ref_loc (input_location, info->data);
3148 /* Use the vptr 'size' field to access a class the element of a class
3149 array. */
3150 if (build_class_array_ref (se, tmp, index))
3151 return;
3153 se->expr = gfc_build_array_ref (tmp, index, decl);
3157 /* Translate access of temporary array. */
3159 void
3160 gfc_conv_tmp_array_ref (gfc_se * se)
3162 se->string_length = se->ss->info->string_length;
3163 gfc_conv_scalarized_array_ref (se, NULL);
3164 gfc_advance_se_ss_chain (se);
3167 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3169 static void
3170 add_to_offset (tree *cst_offset, tree *offset, tree t)
3172 if (TREE_CODE (t) == INTEGER_CST)
3173 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
3174 else
3176 if (!integer_zerop (*offset))
3177 *offset = fold_build2_loc (input_location, PLUS_EXPR,
3178 gfc_array_index_type, *offset, t);
3179 else
3180 *offset = t;
3185 static tree
3186 build_array_ref (tree desc, tree offset, tree decl, tree vptr)
3188 tree tmp;
3189 tree type;
3190 tree cdecl;
3191 bool classarray = false;
3193 /* For class arrays the class declaration is stored in the saved
3194 descriptor. */
3195 if (INDIRECT_REF_P (desc)
3196 && DECL_LANG_SPECIFIC (TREE_OPERAND (desc, 0))
3197 && GFC_DECL_SAVED_DESCRIPTOR (TREE_OPERAND (desc, 0)))
3198 cdecl = gfc_class_data_get (GFC_DECL_SAVED_DESCRIPTOR (
3199 TREE_OPERAND (desc, 0)));
3200 else
3201 cdecl = desc;
3203 /* Class container types do not always have the GFC_CLASS_TYPE_P
3204 but the canonical type does. */
3205 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (cdecl))
3206 && TREE_CODE (cdecl) == COMPONENT_REF)
3208 type = TREE_TYPE (TREE_OPERAND (cdecl, 0));
3209 if (TYPE_CANONICAL (type)
3210 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type)))
3212 type = TREE_TYPE (desc);
3213 classarray = true;
3216 else
3217 type = NULL;
3219 /* Class array references need special treatment because the assigned
3220 type size needs to be used to point to the element. */
3221 if (classarray)
3223 type = gfc_get_element_type (type);
3224 tmp = TREE_OPERAND (cdecl, 0);
3225 tmp = gfc_get_class_array_ref (offset, tmp);
3226 tmp = fold_convert (build_pointer_type (type), tmp);
3227 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3228 return tmp;
3231 tmp = gfc_conv_array_data (desc);
3232 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3233 tmp = gfc_build_array_ref (tmp, offset, decl, vptr);
3234 return tmp;
3238 /* Build an array reference. se->expr already holds the array descriptor.
3239 This should be either a variable, indirect variable reference or component
3240 reference. For arrays which do not have a descriptor, se->expr will be
3241 the data pointer.
3242 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3244 void
3245 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_expr *expr,
3246 locus * where)
3248 int n;
3249 tree offset, cst_offset;
3250 tree tmp;
3251 tree stride;
3252 gfc_se indexse;
3253 gfc_se tmpse;
3254 gfc_symbol * sym = expr->symtree->n.sym;
3255 char *var_name = NULL;
3257 if (ar->dimen == 0)
3259 gcc_assert (ar->codimen);
3261 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
3262 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
3263 else
3265 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
3266 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
3267 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
3269 /* Use the actual tree type and not the wrapped coarray. */
3270 if (!se->want_pointer)
3271 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
3272 se->expr);
3275 return;
3278 /* Handle scalarized references separately. */
3279 if (ar->type != AR_ELEMENT)
3281 gfc_conv_scalarized_array_ref (se, ar);
3282 gfc_advance_se_ss_chain (se);
3283 return;
3286 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3288 size_t len;
3289 gfc_ref *ref;
3291 len = strlen (sym->name) + 1;
3292 for (ref = expr->ref; ref; ref = ref->next)
3294 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3295 break;
3296 if (ref->type == REF_COMPONENT)
3297 len += 1 + strlen (ref->u.c.component->name);
3300 var_name = XALLOCAVEC (char, len);
3301 strcpy (var_name, sym->name);
3303 for (ref = expr->ref; ref; ref = ref->next)
3305 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3306 break;
3307 if (ref->type == REF_COMPONENT)
3309 strcat (var_name, "%%");
3310 strcat (var_name, ref->u.c.component->name);
3315 cst_offset = offset = gfc_index_zero_node;
3316 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
3318 /* Calculate the offsets from all the dimensions. Make sure to associate
3319 the final offset so that we form a chain of loop invariant summands. */
3320 for (n = ar->dimen - 1; n >= 0; n--)
3322 /* Calculate the index for this dimension. */
3323 gfc_init_se (&indexse, se);
3324 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
3325 gfc_add_block_to_block (&se->pre, &indexse.pre);
3327 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3329 /* Check array bounds. */
3330 tree cond;
3331 char *msg;
3333 /* Evaluate the indexse.expr only once. */
3334 indexse.expr = save_expr (indexse.expr);
3336 /* Lower bound. */
3337 tmp = gfc_conv_array_lbound (se->expr, n);
3338 if (sym->attr.temporary)
3340 gfc_init_se (&tmpse, se);
3341 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
3342 gfc_array_index_type);
3343 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3344 tmp = tmpse.expr;
3347 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
3348 indexse.expr, tmp);
3349 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3350 "below lower bound of %%ld", n+1, var_name);
3351 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3352 fold_convert (long_integer_type_node,
3353 indexse.expr),
3354 fold_convert (long_integer_type_node, tmp));
3355 free (msg);
3357 /* Upper bound, but not for the last dimension of assumed-size
3358 arrays. */
3359 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
3361 tmp = gfc_conv_array_ubound (se->expr, n);
3362 if (sym->attr.temporary)
3364 gfc_init_se (&tmpse, se);
3365 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
3366 gfc_array_index_type);
3367 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3368 tmp = tmpse.expr;
3371 cond = fold_build2_loc (input_location, GT_EXPR,
3372 boolean_type_node, indexse.expr, tmp);
3373 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3374 "above upper bound of %%ld", n+1, var_name);
3375 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3376 fold_convert (long_integer_type_node,
3377 indexse.expr),
3378 fold_convert (long_integer_type_node, tmp));
3379 free (msg);
3383 /* Multiply the index by the stride. */
3384 stride = gfc_conv_array_stride (se->expr, n);
3385 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3386 indexse.expr, stride);
3388 /* And add it to the total. */
3389 add_to_offset (&cst_offset, &offset, tmp);
3392 if (!integer_zerop (cst_offset))
3393 offset = fold_build2_loc (input_location, PLUS_EXPR,
3394 gfc_array_index_type, offset, cst_offset);
3396 se->expr = build_array_ref (se->expr, offset, sym->ts.type == BT_CLASS ?
3397 NULL_TREE : sym->backend_decl, se->class_vptr);
3401 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3402 LOOP_DIM dimension (if any) to array's offset. */
3404 static void
3405 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
3406 gfc_array_ref *ar, int array_dim, int loop_dim)
3408 gfc_se se;
3409 gfc_array_info *info;
3410 tree stride, index;
3412 info = &ss->info->data.array;
3414 gfc_init_se (&se, NULL);
3415 se.loop = loop;
3416 se.expr = info->descriptor;
3417 stride = gfc_conv_array_stride (info->descriptor, array_dim);
3418 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
3419 gfc_add_block_to_block (pblock, &se.pre);
3421 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
3422 gfc_array_index_type,
3423 info->offset, index);
3424 info->offset = gfc_evaluate_now (info->offset, pblock);
3428 /* Generate the code to be executed immediately before entering a
3429 scalarization loop. */
3431 static void
3432 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
3433 stmtblock_t * pblock)
3435 tree stride;
3436 gfc_ss_info *ss_info;
3437 gfc_array_info *info;
3438 gfc_ss_type ss_type;
3439 gfc_ss *ss, *pss;
3440 gfc_loopinfo *ploop;
3441 gfc_array_ref *ar;
3442 int i;
3444 /* This code will be executed before entering the scalarization loop
3445 for this dimension. */
3446 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3448 ss_info = ss->info;
3450 if ((ss_info->useflags & flag) == 0)
3451 continue;
3453 ss_type = ss_info->type;
3454 if (ss_type != GFC_SS_SECTION
3455 && ss_type != GFC_SS_FUNCTION
3456 && ss_type != GFC_SS_CONSTRUCTOR
3457 && ss_type != GFC_SS_COMPONENT)
3458 continue;
3460 info = &ss_info->data.array;
3462 gcc_assert (dim < ss->dimen);
3463 gcc_assert (ss->dimen == loop->dimen);
3465 if (info->ref)
3466 ar = &info->ref->u.ar;
3467 else
3468 ar = NULL;
3470 if (dim == loop->dimen - 1 && loop->parent != NULL)
3472 /* If we are in the outermost dimension of this loop, the previous
3473 dimension shall be in the parent loop. */
3474 gcc_assert (ss->parent != NULL);
3476 pss = ss->parent;
3477 ploop = loop->parent;
3479 /* ss and ss->parent are about the same array. */
3480 gcc_assert (ss_info == pss->info);
3482 else
3484 ploop = loop;
3485 pss = ss;
3488 if (dim == loop->dimen - 1)
3489 i = 0;
3490 else
3491 i = dim + 1;
3493 /* For the time being, there is no loop reordering. */
3494 gcc_assert (i == ploop->order[i]);
3495 i = ploop->order[i];
3497 if (dim == loop->dimen - 1 && loop->parent == NULL)
3499 stride = gfc_conv_array_stride (info->descriptor,
3500 innermost_ss (ss)->dim[i]);
3502 /* Calculate the stride of the innermost loop. Hopefully this will
3503 allow the backend optimizers to do their stuff more effectively.
3505 info->stride0 = gfc_evaluate_now (stride, pblock);
3507 /* For the outermost loop calculate the offset due to any
3508 elemental dimensions. It will have been initialized with the
3509 base offset of the array. */
3510 if (info->ref)
3512 for (i = 0; i < ar->dimen; i++)
3514 if (ar->dimen_type[i] != DIMEN_ELEMENT)
3515 continue;
3517 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
3521 else
3522 /* Add the offset for the previous loop dimension. */
3523 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
3525 /* Remember this offset for the second loop. */
3526 if (dim == loop->temp_dim - 1 && loop->parent == NULL)
3527 info->saved_offset = info->offset;
3532 /* Start a scalarized expression. Creates a scope and declares loop
3533 variables. */
3535 void
3536 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
3538 int dim;
3539 int n;
3540 int flags;
3542 gcc_assert (!loop->array_parameter);
3544 for (dim = loop->dimen - 1; dim >= 0; dim--)
3546 n = loop->order[dim];
3548 gfc_start_block (&loop->code[n]);
3550 /* Create the loop variable. */
3551 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
3553 if (dim < loop->temp_dim)
3554 flags = 3;
3555 else
3556 flags = 1;
3557 /* Calculate values that will be constant within this loop. */
3558 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3560 gfc_start_block (pbody);
3564 /* Generates the actual loop code for a scalarization loop. */
3566 void
3567 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3568 stmtblock_t * pbody)
3570 stmtblock_t block;
3571 tree cond;
3572 tree tmp;
3573 tree loopbody;
3574 tree exit_label;
3575 tree stmt;
3576 tree init;
3577 tree incr;
3579 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
3580 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3581 && n == loop->dimen - 1)
3583 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3584 init = make_tree_vec (1);
3585 cond = make_tree_vec (1);
3586 incr = make_tree_vec (1);
3588 /* Cycle statement is implemented with a goto. Exit statement must not
3589 be present for this loop. */
3590 exit_label = gfc_build_label_decl (NULL_TREE);
3591 TREE_USED (exit_label) = 1;
3593 /* Label for cycle statements (if needed). */
3594 tmp = build1_v (LABEL_EXPR, exit_label);
3595 gfc_add_expr_to_block (pbody, tmp);
3597 stmt = make_node (OMP_FOR);
3599 TREE_TYPE (stmt) = void_type_node;
3600 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3602 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3603 OMP_CLAUSE_SCHEDULE);
3604 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3605 = OMP_CLAUSE_SCHEDULE_STATIC;
3606 if (ompws_flags & OMPWS_NOWAIT)
3607 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3608 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3610 /* Initialize the loopvar. */
3611 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3612 loop->from[n]);
3613 OMP_FOR_INIT (stmt) = init;
3614 /* The exit condition. */
3615 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3616 boolean_type_node,
3617 loop->loopvar[n], loop->to[n]);
3618 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3619 OMP_FOR_COND (stmt) = cond;
3620 /* Increment the loopvar. */
3621 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3622 loop->loopvar[n], gfc_index_one_node);
3623 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3624 void_type_node, loop->loopvar[n], tmp);
3625 OMP_FOR_INCR (stmt) = incr;
3627 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3628 gfc_add_expr_to_block (&loop->code[n], stmt);
3630 else
3632 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3633 && (loop->temp_ss == NULL);
3635 loopbody = gfc_finish_block (pbody);
3637 if (reverse_loop)
3638 std::swap (loop->from[n], loop->to[n]);
3640 /* Initialize the loopvar. */
3641 if (loop->loopvar[n] != loop->from[n])
3642 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3644 exit_label = gfc_build_label_decl (NULL_TREE);
3646 /* Generate the loop body. */
3647 gfc_init_block (&block);
3649 /* The exit condition. */
3650 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3651 boolean_type_node, loop->loopvar[n], loop->to[n]);
3652 tmp = build1_v (GOTO_EXPR, exit_label);
3653 TREE_USED (exit_label) = 1;
3654 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3655 gfc_add_expr_to_block (&block, tmp);
3657 /* The main body. */
3658 gfc_add_expr_to_block (&block, loopbody);
3660 /* Increment the loopvar. */
3661 tmp = fold_build2_loc (input_location,
3662 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3663 gfc_array_index_type, loop->loopvar[n],
3664 gfc_index_one_node);
3666 gfc_add_modify (&block, loop->loopvar[n], tmp);
3668 /* Build the loop. */
3669 tmp = gfc_finish_block (&block);
3670 tmp = build1_v (LOOP_EXPR, tmp);
3671 gfc_add_expr_to_block (&loop->code[n], tmp);
3673 /* Add the exit label. */
3674 tmp = build1_v (LABEL_EXPR, exit_label);
3675 gfc_add_expr_to_block (&loop->code[n], tmp);
3681 /* Finishes and generates the loops for a scalarized expression. */
3683 void
3684 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3686 int dim;
3687 int n;
3688 gfc_ss *ss;
3689 stmtblock_t *pblock;
3690 tree tmp;
3692 pblock = body;
3693 /* Generate the loops. */
3694 for (dim = 0; dim < loop->dimen; dim++)
3696 n = loop->order[dim];
3697 gfc_trans_scalarized_loop_end (loop, n, pblock);
3698 loop->loopvar[n] = NULL_TREE;
3699 pblock = &loop->code[n];
3702 tmp = gfc_finish_block (pblock);
3703 gfc_add_expr_to_block (&loop->pre, tmp);
3705 /* Clear all the used flags. */
3706 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3707 if (ss->parent == NULL)
3708 ss->info->useflags = 0;
3712 /* Finish the main body of a scalarized expression, and start the secondary
3713 copying body. */
3715 void
3716 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3718 int dim;
3719 int n;
3720 stmtblock_t *pblock;
3721 gfc_ss *ss;
3723 pblock = body;
3724 /* We finish as many loops as are used by the temporary. */
3725 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3727 n = loop->order[dim];
3728 gfc_trans_scalarized_loop_end (loop, n, pblock);
3729 loop->loopvar[n] = NULL_TREE;
3730 pblock = &loop->code[n];
3733 /* We don't want to finish the outermost loop entirely. */
3734 n = loop->order[loop->temp_dim - 1];
3735 gfc_trans_scalarized_loop_end (loop, n, pblock);
3737 /* Restore the initial offsets. */
3738 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3740 gfc_ss_type ss_type;
3741 gfc_ss_info *ss_info;
3743 ss_info = ss->info;
3745 if ((ss_info->useflags & 2) == 0)
3746 continue;
3748 ss_type = ss_info->type;
3749 if (ss_type != GFC_SS_SECTION
3750 && ss_type != GFC_SS_FUNCTION
3751 && ss_type != GFC_SS_CONSTRUCTOR
3752 && ss_type != GFC_SS_COMPONENT)
3753 continue;
3755 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3758 /* Restart all the inner loops we just finished. */
3759 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3761 n = loop->order[dim];
3763 gfc_start_block (&loop->code[n]);
3765 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3767 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3770 /* Start a block for the secondary copying code. */
3771 gfc_start_block (body);
3775 /* Precalculate (either lower or upper) bound of an array section.
3776 BLOCK: Block in which the (pre)calculation code will go.
3777 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3778 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3779 DESC: Array descriptor from which the bound will be picked if unspecified
3780 (either lower or upper bound according to LBOUND). */
3782 static void
3783 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3784 tree desc, int dim, bool lbound)
3786 gfc_se se;
3787 gfc_expr * input_val = values[dim];
3788 tree *output = &bounds[dim];
3791 if (input_val)
3793 /* Specified section bound. */
3794 gfc_init_se (&se, NULL);
3795 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3796 gfc_add_block_to_block (block, &se.pre);
3797 *output = se.expr;
3799 else
3801 /* No specific bound specified so use the bound of the array. */
3802 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3803 gfc_conv_array_ubound (desc, dim);
3805 *output = gfc_evaluate_now (*output, block);
3809 /* Calculate the lower bound of an array section. */
3811 static void
3812 gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim)
3814 gfc_expr *stride = NULL;
3815 tree desc;
3816 gfc_se se;
3817 gfc_array_info *info;
3818 gfc_array_ref *ar;
3820 gcc_assert (ss->info->type == GFC_SS_SECTION);
3822 info = &ss->info->data.array;
3823 ar = &info->ref->u.ar;
3825 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3827 /* We use a zero-based index to access the vector. */
3828 info->start[dim] = gfc_index_zero_node;
3829 info->end[dim] = NULL;
3830 info->stride[dim] = gfc_index_one_node;
3831 return;
3834 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3835 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3836 desc = info->descriptor;
3837 stride = ar->stride[dim];
3839 /* Calculate the start of the range. For vector subscripts this will
3840 be the range of the vector. */
3841 evaluate_bound (block, info->start, ar->start, desc, dim, true);
3843 /* Similarly calculate the end. Although this is not used in the
3844 scalarizer, it is needed when checking bounds and where the end
3845 is an expression with side-effects. */
3846 evaluate_bound (block, info->end, ar->end, desc, dim, false);
3848 /* Calculate the stride. */
3849 if (stride == NULL)
3850 info->stride[dim] = gfc_index_one_node;
3851 else
3853 gfc_init_se (&se, NULL);
3854 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3855 gfc_add_block_to_block (block, &se.pre);
3856 info->stride[dim] = gfc_evaluate_now (se.expr, block);
3861 /* Calculates the range start and stride for a SS chain. Also gets the
3862 descriptor and data pointer. The range of vector subscripts is the size
3863 of the vector. Array bounds are also checked. */
3865 void
3866 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3868 int n;
3869 tree tmp;
3870 gfc_ss *ss;
3871 tree desc;
3873 gfc_loopinfo * const outer_loop = outermost_loop (loop);
3875 loop->dimen = 0;
3876 /* Determine the rank of the loop. */
3877 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3879 switch (ss->info->type)
3881 case GFC_SS_SECTION:
3882 case GFC_SS_CONSTRUCTOR:
3883 case GFC_SS_FUNCTION:
3884 case GFC_SS_COMPONENT:
3885 loop->dimen = ss->dimen;
3886 goto done;
3888 /* As usual, lbound and ubound are exceptions!. */
3889 case GFC_SS_INTRINSIC:
3890 switch (ss->info->expr->value.function.isym->id)
3892 case GFC_ISYM_LBOUND:
3893 case GFC_ISYM_UBOUND:
3894 case GFC_ISYM_LCOBOUND:
3895 case GFC_ISYM_UCOBOUND:
3896 case GFC_ISYM_THIS_IMAGE:
3897 loop->dimen = ss->dimen;
3898 goto done;
3900 default:
3901 break;
3904 default:
3905 break;
3909 /* We should have determined the rank of the expression by now. If
3910 not, that's bad news. */
3911 gcc_unreachable ();
3913 done:
3914 /* Loop over all the SS in the chain. */
3915 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3917 gfc_ss_info *ss_info;
3918 gfc_array_info *info;
3919 gfc_expr *expr;
3921 ss_info = ss->info;
3922 expr = ss_info->expr;
3923 info = &ss_info->data.array;
3925 if (expr && expr->shape && !info->shape)
3926 info->shape = expr->shape;
3928 switch (ss_info->type)
3930 case GFC_SS_SECTION:
3931 /* Get the descriptor for the array. If it is a cross loops array,
3932 we got the descriptor already in the outermost loop. */
3933 if (ss->parent == NULL)
3934 gfc_conv_ss_descriptor (&outer_loop->pre, ss,
3935 !loop->array_parameter);
3937 for (n = 0; n < ss->dimen; n++)
3938 gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]);
3939 break;
3941 case GFC_SS_INTRINSIC:
3942 switch (expr->value.function.isym->id)
3944 /* Fall through to supply start and stride. */
3945 case GFC_ISYM_LBOUND:
3946 case GFC_ISYM_UBOUND:
3948 gfc_expr *arg;
3950 /* This is the variant without DIM=... */
3951 gcc_assert (expr->value.function.actual->next->expr == NULL);
3953 arg = expr->value.function.actual->expr;
3954 if (arg->rank == -1)
3956 gfc_se se;
3957 tree rank, tmp;
3959 /* The rank (hence the return value's shape) is unknown,
3960 we have to retrieve it. */
3961 gfc_init_se (&se, NULL);
3962 se.descriptor_only = 1;
3963 gfc_conv_expr (&se, arg);
3964 /* This is a bare variable, so there is no preliminary
3965 or cleanup code. */
3966 gcc_assert (se.pre.head == NULL_TREE
3967 && se.post.head == NULL_TREE);
3968 rank = gfc_conv_descriptor_rank (se.expr);
3969 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3970 gfc_array_index_type,
3971 fold_convert (gfc_array_index_type,
3972 rank),
3973 gfc_index_one_node);
3974 info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre);
3975 info->start[0] = gfc_index_zero_node;
3976 info->stride[0] = gfc_index_one_node;
3977 continue;
3979 /* Otherwise fall through GFC_SS_FUNCTION. */
3981 case GFC_ISYM_LCOBOUND:
3982 case GFC_ISYM_UCOBOUND:
3983 case GFC_ISYM_THIS_IMAGE:
3984 break;
3986 default:
3987 continue;
3990 case GFC_SS_CONSTRUCTOR:
3991 case GFC_SS_FUNCTION:
3992 for (n = 0; n < ss->dimen; n++)
3994 int dim = ss->dim[n];
3996 info->start[dim] = gfc_index_zero_node;
3997 info->end[dim] = gfc_index_zero_node;
3998 info->stride[dim] = gfc_index_one_node;
4000 break;
4002 default:
4003 break;
4007 /* The rest is just runtime bound checking. */
4008 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
4010 stmtblock_t block;
4011 tree lbound, ubound;
4012 tree end;
4013 tree size[GFC_MAX_DIMENSIONS];
4014 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
4015 gfc_array_info *info;
4016 char *msg;
4017 int dim;
4019 gfc_start_block (&block);
4021 for (n = 0; n < loop->dimen; n++)
4022 size[n] = NULL_TREE;
4024 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4026 stmtblock_t inner;
4027 gfc_ss_info *ss_info;
4028 gfc_expr *expr;
4029 locus *expr_loc;
4030 const char *expr_name;
4032 ss_info = ss->info;
4033 if (ss_info->type != GFC_SS_SECTION)
4034 continue;
4036 /* Catch allocatable lhs in f2003. */
4037 if (flag_realloc_lhs && ss->is_alloc_lhs)
4038 continue;
4040 expr = ss_info->expr;
4041 expr_loc = &expr->where;
4042 expr_name = expr->symtree->name;
4044 gfc_start_block (&inner);
4046 /* TODO: range checking for mapped dimensions. */
4047 info = &ss_info->data.array;
4049 /* This code only checks ranges. Elemental and vector
4050 dimensions are checked later. */
4051 for (n = 0; n < loop->dimen; n++)
4053 bool check_upper;
4055 dim = ss->dim[n];
4056 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
4057 continue;
4059 if (dim == info->ref->u.ar.dimen - 1
4060 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
4061 check_upper = false;
4062 else
4063 check_upper = true;
4065 /* Zero stride is not allowed. */
4066 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
4067 info->stride[dim], gfc_index_zero_node);
4068 msg = xasprintf ("Zero stride is not allowed, for dimension %d "
4069 "of array '%s'", dim + 1, expr_name);
4070 gfc_trans_runtime_check (true, false, tmp, &inner,
4071 expr_loc, msg);
4072 free (msg);
4074 desc = info->descriptor;
4076 /* This is the run-time equivalent of resolve.c's
4077 check_dimension(). The logical is more readable there
4078 than it is here, with all the trees. */
4079 lbound = gfc_conv_array_lbound (desc, dim);
4080 end = info->end[dim];
4081 if (check_upper)
4082 ubound = gfc_conv_array_ubound (desc, dim);
4083 else
4084 ubound = NULL;
4086 /* non_zerosized is true when the selected range is not
4087 empty. */
4088 stride_pos = fold_build2_loc (input_location, GT_EXPR,
4089 boolean_type_node, info->stride[dim],
4090 gfc_index_zero_node);
4091 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
4092 info->start[dim], end);
4093 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4094 boolean_type_node, stride_pos, tmp);
4096 stride_neg = fold_build2_loc (input_location, LT_EXPR,
4097 boolean_type_node,
4098 info->stride[dim], gfc_index_zero_node);
4099 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4100 info->start[dim], end);
4101 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4102 boolean_type_node,
4103 stride_neg, tmp);
4104 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4105 boolean_type_node,
4106 stride_pos, stride_neg);
4108 /* Check the start of the range against the lower and upper
4109 bounds of the array, if the range is not empty.
4110 If upper bound is present, include both bounds in the
4111 error message. */
4112 if (check_upper)
4114 tmp = fold_build2_loc (input_location, LT_EXPR,
4115 boolean_type_node,
4116 info->start[dim], lbound);
4117 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4118 boolean_type_node,
4119 non_zerosized, tmp);
4120 tmp2 = fold_build2_loc (input_location, GT_EXPR,
4121 boolean_type_node,
4122 info->start[dim], ubound);
4123 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4124 boolean_type_node,
4125 non_zerosized, tmp2);
4126 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4127 "outside of expected range (%%ld:%%ld)",
4128 dim + 1, expr_name);
4129 gfc_trans_runtime_check (true, false, tmp, &inner,
4130 expr_loc, msg,
4131 fold_convert (long_integer_type_node, info->start[dim]),
4132 fold_convert (long_integer_type_node, lbound),
4133 fold_convert (long_integer_type_node, ubound));
4134 gfc_trans_runtime_check (true, false, tmp2, &inner,
4135 expr_loc, msg,
4136 fold_convert (long_integer_type_node, info->start[dim]),
4137 fold_convert (long_integer_type_node, lbound),
4138 fold_convert (long_integer_type_node, ubound));
4139 free (msg);
4141 else
4143 tmp = fold_build2_loc (input_location, LT_EXPR,
4144 boolean_type_node,
4145 info->start[dim], lbound);
4146 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4147 boolean_type_node, non_zerosized, tmp);
4148 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4149 "below lower bound of %%ld",
4150 dim + 1, expr_name);
4151 gfc_trans_runtime_check (true, false, tmp, &inner,
4152 expr_loc, msg,
4153 fold_convert (long_integer_type_node, info->start[dim]),
4154 fold_convert (long_integer_type_node, lbound));
4155 free (msg);
4158 /* Compute the last element of the range, which is not
4159 necessarily "end" (think 0:5:3, which doesn't contain 5)
4160 and check it against both lower and upper bounds. */
4162 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4163 gfc_array_index_type, end,
4164 info->start[dim]);
4165 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
4166 gfc_array_index_type, tmp,
4167 info->stride[dim]);
4168 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4169 gfc_array_index_type, end, tmp);
4170 tmp2 = fold_build2_loc (input_location, LT_EXPR,
4171 boolean_type_node, tmp, lbound);
4172 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4173 boolean_type_node, non_zerosized, tmp2);
4174 if (check_upper)
4176 tmp3 = fold_build2_loc (input_location, GT_EXPR,
4177 boolean_type_node, tmp, ubound);
4178 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4179 boolean_type_node, non_zerosized, tmp3);
4180 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4181 "outside of expected range (%%ld:%%ld)",
4182 dim + 1, expr_name);
4183 gfc_trans_runtime_check (true, false, tmp2, &inner,
4184 expr_loc, msg,
4185 fold_convert (long_integer_type_node, tmp),
4186 fold_convert (long_integer_type_node, ubound),
4187 fold_convert (long_integer_type_node, lbound));
4188 gfc_trans_runtime_check (true, false, tmp3, &inner,
4189 expr_loc, msg,
4190 fold_convert (long_integer_type_node, tmp),
4191 fold_convert (long_integer_type_node, ubound),
4192 fold_convert (long_integer_type_node, lbound));
4193 free (msg);
4195 else
4197 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4198 "below lower bound of %%ld",
4199 dim + 1, expr_name);
4200 gfc_trans_runtime_check (true, false, tmp2, &inner,
4201 expr_loc, msg,
4202 fold_convert (long_integer_type_node, tmp),
4203 fold_convert (long_integer_type_node, lbound));
4204 free (msg);
4207 /* Check the section sizes match. */
4208 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4209 gfc_array_index_type, end,
4210 info->start[dim]);
4211 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4212 gfc_array_index_type, tmp,
4213 info->stride[dim]);
4214 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4215 gfc_array_index_type,
4216 gfc_index_one_node, tmp);
4217 tmp = fold_build2_loc (input_location, MAX_EXPR,
4218 gfc_array_index_type, tmp,
4219 build_int_cst (gfc_array_index_type, 0));
4220 /* We remember the size of the first section, and check all the
4221 others against this. */
4222 if (size[n])
4224 tmp3 = fold_build2_loc (input_location, NE_EXPR,
4225 boolean_type_node, tmp, size[n]);
4226 msg = xasprintf ("Array bound mismatch for dimension %d "
4227 "of array '%s' (%%ld/%%ld)",
4228 dim + 1, expr_name);
4230 gfc_trans_runtime_check (true, false, tmp3, &inner,
4231 expr_loc, msg,
4232 fold_convert (long_integer_type_node, tmp),
4233 fold_convert (long_integer_type_node, size[n]));
4235 free (msg);
4237 else
4238 size[n] = gfc_evaluate_now (tmp, &inner);
4241 tmp = gfc_finish_block (&inner);
4243 /* For optional arguments, only check bounds if the argument is
4244 present. */
4245 if (expr->symtree->n.sym->attr.optional
4246 || expr->symtree->n.sym->attr.not_always_present)
4247 tmp = build3_v (COND_EXPR,
4248 gfc_conv_expr_present (expr->symtree->n.sym),
4249 tmp, build_empty_stmt (input_location));
4251 gfc_add_expr_to_block (&block, tmp);
4255 tmp = gfc_finish_block (&block);
4256 gfc_add_expr_to_block (&outer_loop->pre, tmp);
4259 for (loop = loop->nested; loop; loop = loop->next)
4260 gfc_conv_ss_startstride (loop);
4263 /* Return true if both symbols could refer to the same data object. Does
4264 not take account of aliasing due to equivalence statements. */
4266 static int
4267 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
4268 bool lsym_target, bool rsym_pointer, bool rsym_target)
4270 /* Aliasing isn't possible if the symbols have different base types. */
4271 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
4272 return 0;
4274 /* Pointers can point to other pointers and target objects. */
4276 if ((lsym_pointer && (rsym_pointer || rsym_target))
4277 || (rsym_pointer && (lsym_pointer || lsym_target)))
4278 return 1;
4280 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4281 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4282 checked above. */
4283 if (lsym_target && rsym_target
4284 && ((lsym->attr.dummy && !lsym->attr.contiguous
4285 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
4286 || (rsym->attr.dummy && !rsym->attr.contiguous
4287 && (!rsym->attr.dimension
4288 || rsym->as->type == AS_ASSUMED_SHAPE))))
4289 return 1;
4291 return 0;
4295 /* Return true if the two SS could be aliased, i.e. both point to the same data
4296 object. */
4297 /* TODO: resolve aliases based on frontend expressions. */
4299 static int
4300 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
4302 gfc_ref *lref;
4303 gfc_ref *rref;
4304 gfc_expr *lexpr, *rexpr;
4305 gfc_symbol *lsym;
4306 gfc_symbol *rsym;
4307 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
4309 lexpr = lss->info->expr;
4310 rexpr = rss->info->expr;
4312 lsym = lexpr->symtree->n.sym;
4313 rsym = rexpr->symtree->n.sym;
4315 lsym_pointer = lsym->attr.pointer;
4316 lsym_target = lsym->attr.target;
4317 rsym_pointer = rsym->attr.pointer;
4318 rsym_target = rsym->attr.target;
4320 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
4321 rsym_pointer, rsym_target))
4322 return 1;
4324 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
4325 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
4326 return 0;
4328 /* For derived types we must check all the component types. We can ignore
4329 array references as these will have the same base type as the previous
4330 component ref. */
4331 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
4333 if (lref->type != REF_COMPONENT)
4334 continue;
4336 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
4337 lsym_target = lsym_target || lref->u.c.sym->attr.target;
4339 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
4340 rsym_pointer, rsym_target))
4341 return 1;
4343 if ((lsym_pointer && (rsym_pointer || rsym_target))
4344 || (rsym_pointer && (lsym_pointer || lsym_target)))
4346 if (gfc_compare_types (&lref->u.c.component->ts,
4347 &rsym->ts))
4348 return 1;
4351 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
4352 rref = rref->next)
4354 if (rref->type != REF_COMPONENT)
4355 continue;
4357 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4358 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4360 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
4361 lsym_pointer, lsym_target,
4362 rsym_pointer, rsym_target))
4363 return 1;
4365 if ((lsym_pointer && (rsym_pointer || rsym_target))
4366 || (rsym_pointer && (lsym_pointer || lsym_target)))
4368 if (gfc_compare_types (&lref->u.c.component->ts,
4369 &rref->u.c.sym->ts))
4370 return 1;
4371 if (gfc_compare_types (&lref->u.c.sym->ts,
4372 &rref->u.c.component->ts))
4373 return 1;
4374 if (gfc_compare_types (&lref->u.c.component->ts,
4375 &rref->u.c.component->ts))
4376 return 1;
4381 lsym_pointer = lsym->attr.pointer;
4382 lsym_target = lsym->attr.target;
4383 lsym_pointer = lsym->attr.pointer;
4384 lsym_target = lsym->attr.target;
4386 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
4388 if (rref->type != REF_COMPONENT)
4389 break;
4391 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4392 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4394 if (symbols_could_alias (rref->u.c.sym, lsym,
4395 lsym_pointer, lsym_target,
4396 rsym_pointer, rsym_target))
4397 return 1;
4399 if ((lsym_pointer && (rsym_pointer || rsym_target))
4400 || (rsym_pointer && (lsym_pointer || lsym_target)))
4402 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
4403 return 1;
4407 return 0;
4411 /* Resolve array data dependencies. Creates a temporary if required. */
4412 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4413 dependency.c. */
4415 void
4416 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
4417 gfc_ss * rss)
4419 gfc_ss *ss;
4420 gfc_ref *lref;
4421 gfc_ref *rref;
4422 gfc_expr *dest_expr;
4423 gfc_expr *ss_expr;
4424 int nDepend = 0;
4425 int i, j;
4427 loop->temp_ss = NULL;
4428 dest_expr = dest->info->expr;
4430 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
4432 ss_expr = ss->info->expr;
4434 if (ss->info->array_outer_dependency)
4436 nDepend = 1;
4437 break;
4440 if (ss->info->type != GFC_SS_SECTION)
4442 if (flag_realloc_lhs
4443 && dest_expr != ss_expr
4444 && gfc_is_reallocatable_lhs (dest_expr)
4445 && ss_expr->rank)
4446 nDepend = gfc_check_dependency (dest_expr, ss_expr, true);
4448 /* Check for cases like c(:)(1:2) = c(2)(2:3) */
4449 if (!nDepend && dest_expr->rank > 0
4450 && dest_expr->ts.type == BT_CHARACTER
4451 && ss_expr->expr_type == EXPR_VARIABLE)
4453 nDepend = gfc_check_dependency (dest_expr, ss_expr, false);
4455 continue;
4458 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
4460 if (gfc_could_be_alias (dest, ss)
4461 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
4463 nDepend = 1;
4464 break;
4467 else
4469 lref = dest_expr->ref;
4470 rref = ss_expr->ref;
4472 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
4474 if (nDepend == 1)
4475 break;
4477 for (i = 0; i < dest->dimen; i++)
4478 for (j = 0; j < ss->dimen; j++)
4479 if (i != j
4480 && dest->dim[i] == ss->dim[j])
4482 /* If we don't access array elements in the same order,
4483 there is a dependency. */
4484 nDepend = 1;
4485 goto temporary;
4487 #if 0
4488 /* TODO : loop shifting. */
4489 if (nDepend == 1)
4491 /* Mark the dimensions for LOOP SHIFTING */
4492 for (n = 0; n < loop->dimen; n++)
4494 int dim = dest->data.info.dim[n];
4496 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
4497 depends[n] = 2;
4498 else if (! gfc_is_same_range (&lref->u.ar,
4499 &rref->u.ar, dim, 0))
4500 depends[n] = 1;
4503 /* Put all the dimensions with dependencies in the
4504 innermost loops. */
4505 dim = 0;
4506 for (n = 0; n < loop->dimen; n++)
4508 gcc_assert (loop->order[n] == n);
4509 if (depends[n])
4510 loop->order[dim++] = n;
4512 for (n = 0; n < loop->dimen; n++)
4514 if (! depends[n])
4515 loop->order[dim++] = n;
4518 gcc_assert (dim == loop->dimen);
4519 break;
4521 #endif
4525 temporary:
4527 if (nDepend == 1)
4529 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
4530 if (GFC_ARRAY_TYPE_P (base_type)
4531 || GFC_DESCRIPTOR_TYPE_P (base_type))
4532 base_type = gfc_get_element_type (base_type);
4533 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
4534 loop->dimen);
4535 gfc_add_ss_to_loop (loop, loop->temp_ss);
4537 else
4538 loop->temp_ss = NULL;
4542 /* Browse through each array's information from the scalarizer and set the loop
4543 bounds according to the "best" one (per dimension), i.e. the one which
4544 provides the most information (constant bounds, shape, etc.). */
4546 static void
4547 set_loop_bounds (gfc_loopinfo *loop)
4549 int n, dim, spec_dim;
4550 gfc_array_info *info;
4551 gfc_array_info *specinfo;
4552 gfc_ss *ss;
4553 tree tmp;
4554 gfc_ss **loopspec;
4555 bool dynamic[GFC_MAX_DIMENSIONS];
4556 mpz_t *cshape;
4557 mpz_t i;
4558 bool nonoptional_arr;
4560 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4562 loopspec = loop->specloop;
4564 mpz_init (i);
4565 for (n = 0; n < loop->dimen; n++)
4567 loopspec[n] = NULL;
4568 dynamic[n] = false;
4570 /* If there are both optional and nonoptional array arguments, scalarize
4571 over the nonoptional; otherwise, it does not matter as then all
4572 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4574 nonoptional_arr = false;
4576 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4577 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
4578 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
4580 nonoptional_arr = true;
4581 break;
4584 /* We use one SS term, and use that to determine the bounds of the
4585 loop for this dimension. We try to pick the simplest term. */
4586 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4588 gfc_ss_type ss_type;
4590 ss_type = ss->info->type;
4591 if (ss_type == GFC_SS_SCALAR
4592 || ss_type == GFC_SS_TEMP
4593 || ss_type == GFC_SS_REFERENCE
4594 || (ss->info->can_be_null_ref && nonoptional_arr))
4595 continue;
4597 info = &ss->info->data.array;
4598 dim = ss->dim[n];
4600 if (loopspec[n] != NULL)
4602 specinfo = &loopspec[n]->info->data.array;
4603 spec_dim = loopspec[n]->dim[n];
4605 else
4607 /* Silence uninitialized warnings. */
4608 specinfo = NULL;
4609 spec_dim = 0;
4612 if (info->shape)
4614 gcc_assert (info->shape[dim]);
4615 /* The frontend has worked out the size for us. */
4616 if (!loopspec[n]
4617 || !specinfo->shape
4618 || !integer_zerop (specinfo->start[spec_dim]))
4619 /* Prefer zero-based descriptors if possible. */
4620 loopspec[n] = ss;
4621 continue;
4624 if (ss_type == GFC_SS_CONSTRUCTOR)
4626 gfc_constructor_base base;
4627 /* An unknown size constructor will always be rank one.
4628 Higher rank constructors will either have known shape,
4629 or still be wrapped in a call to reshape. */
4630 gcc_assert (loop->dimen == 1);
4632 /* Always prefer to use the constructor bounds if the size
4633 can be determined at compile time. Prefer not to otherwise,
4634 since the general case involves realloc, and it's better to
4635 avoid that overhead if possible. */
4636 base = ss->info->expr->value.constructor;
4637 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4638 if (!dynamic[n] || !loopspec[n])
4639 loopspec[n] = ss;
4640 continue;
4643 /* Avoid using an allocatable lhs in an assignment, since
4644 there might be a reallocation coming. */
4645 if (loopspec[n] && ss->is_alloc_lhs)
4646 continue;
4648 if (!loopspec[n])
4649 loopspec[n] = ss;
4650 /* Criteria for choosing a loop specifier (most important first):
4651 doesn't need realloc
4652 stride of one
4653 known stride
4654 known lower bound
4655 known upper bound
4657 else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4658 loopspec[n] = ss;
4659 else if (integer_onep (info->stride[dim])
4660 && !integer_onep (specinfo->stride[spec_dim]))
4661 loopspec[n] = ss;
4662 else if (INTEGER_CST_P (info->stride[dim])
4663 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4664 loopspec[n] = ss;
4665 else if (INTEGER_CST_P (info->start[dim])
4666 && !INTEGER_CST_P (specinfo->start[spec_dim])
4667 && integer_onep (info->stride[dim])
4668 == integer_onep (specinfo->stride[spec_dim])
4669 && INTEGER_CST_P (info->stride[dim])
4670 == INTEGER_CST_P (specinfo->stride[spec_dim]))
4671 loopspec[n] = ss;
4672 /* We don't work out the upper bound.
4673 else if (INTEGER_CST_P (info->finish[n])
4674 && ! INTEGER_CST_P (specinfo->finish[n]))
4675 loopspec[n] = ss; */
4678 /* We should have found the scalarization loop specifier. If not,
4679 that's bad news. */
4680 gcc_assert (loopspec[n]);
4682 info = &loopspec[n]->info->data.array;
4683 dim = loopspec[n]->dim[n];
4685 /* Set the extents of this range. */
4686 cshape = info->shape;
4687 if (cshape && INTEGER_CST_P (info->start[dim])
4688 && INTEGER_CST_P (info->stride[dim]))
4690 loop->from[n] = info->start[dim];
4691 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
4692 mpz_sub_ui (i, i, 1);
4693 /* To = from + (size - 1) * stride. */
4694 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4695 if (!integer_onep (info->stride[dim]))
4696 tmp = fold_build2_loc (input_location, MULT_EXPR,
4697 gfc_array_index_type, tmp,
4698 info->stride[dim]);
4699 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4700 gfc_array_index_type,
4701 loop->from[n], tmp);
4703 else
4705 loop->from[n] = info->start[dim];
4706 switch (loopspec[n]->info->type)
4708 case GFC_SS_CONSTRUCTOR:
4709 /* The upper bound is calculated when we expand the
4710 constructor. */
4711 gcc_assert (loop->to[n] == NULL_TREE);
4712 break;
4714 case GFC_SS_SECTION:
4715 /* Use the end expression if it exists and is not constant,
4716 so that it is only evaluated once. */
4717 loop->to[n] = info->end[dim];
4718 break;
4720 case GFC_SS_FUNCTION:
4721 /* The loop bound will be set when we generate the call. */
4722 gcc_assert (loop->to[n] == NULL_TREE);
4723 break;
4725 case GFC_SS_INTRINSIC:
4727 gfc_expr *expr = loopspec[n]->info->expr;
4729 /* The {l,u}bound of an assumed rank. */
4730 gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND
4731 || expr->value.function.isym->id == GFC_ISYM_UBOUND)
4732 && expr->value.function.actual->next->expr == NULL
4733 && expr->value.function.actual->expr->rank == -1);
4735 loop->to[n] = info->end[dim];
4736 break;
4739 default:
4740 gcc_unreachable ();
4744 /* Transform everything so we have a simple incrementing variable. */
4745 if (integer_onep (info->stride[dim]))
4746 info->delta[dim] = gfc_index_zero_node;
4747 else
4749 /* Set the delta for this section. */
4750 info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre);
4751 /* Number of iterations is (end - start + step) / step.
4752 with start = 0, this simplifies to
4753 last = end / step;
4754 for (i = 0; i<=last; i++){...}; */
4755 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4756 gfc_array_index_type, loop->to[n],
4757 loop->from[n]);
4758 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4759 gfc_array_index_type, tmp, info->stride[dim]);
4760 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4761 tmp, build_int_cst (gfc_array_index_type, -1));
4762 loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre);
4763 /* Make the loop variable start at 0. */
4764 loop->from[n] = gfc_index_zero_node;
4767 mpz_clear (i);
4769 for (loop = loop->nested; loop; loop = loop->next)
4770 set_loop_bounds (loop);
4774 /* Initialize the scalarization loop. Creates the loop variables. Determines
4775 the range of the loop variables. Creates a temporary if required.
4776 Also generates code for scalar expressions which have been
4777 moved outside the loop. */
4779 void
4780 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4782 gfc_ss *tmp_ss;
4783 tree tmp;
4785 set_loop_bounds (loop);
4787 /* Add all the scalar code that can be taken out of the loops.
4788 This may include calculating the loop bounds, so do it before
4789 allocating the temporary. */
4790 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4792 tmp_ss = loop->temp_ss;
4793 /* If we want a temporary then create it. */
4794 if (tmp_ss != NULL)
4796 gfc_ss_info *tmp_ss_info;
4798 tmp_ss_info = tmp_ss->info;
4799 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4800 gcc_assert (loop->parent == NULL);
4802 /* Make absolutely sure that this is a complete type. */
4803 if (tmp_ss_info->string_length)
4804 tmp_ss_info->data.temp.type
4805 = gfc_get_character_type_len_for_eltype
4806 (TREE_TYPE (tmp_ss_info->data.temp.type),
4807 tmp_ss_info->string_length);
4809 tmp = tmp_ss_info->data.temp.type;
4810 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4811 tmp_ss_info->type = GFC_SS_SECTION;
4813 gcc_assert (tmp_ss->dimen != 0);
4815 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
4816 NULL_TREE, false, true, false, where);
4819 /* For array parameters we don't have loop variables, so don't calculate the
4820 translations. */
4821 if (!loop->array_parameter)
4822 gfc_set_delta (loop);
4826 /* Calculates how to transform from loop variables to array indices for each
4827 array: once loop bounds are chosen, sets the difference (DELTA field) between
4828 loop bounds and array reference bounds, for each array info. */
4830 void
4831 gfc_set_delta (gfc_loopinfo *loop)
4833 gfc_ss *ss, **loopspec;
4834 gfc_array_info *info;
4835 tree tmp;
4836 int n, dim;
4838 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4840 loopspec = loop->specloop;
4842 /* Calculate the translation from loop variables to array indices. */
4843 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4845 gfc_ss_type ss_type;
4847 ss_type = ss->info->type;
4848 if (ss_type != GFC_SS_SECTION
4849 && ss_type != GFC_SS_COMPONENT
4850 && ss_type != GFC_SS_CONSTRUCTOR)
4851 continue;
4853 info = &ss->info->data.array;
4855 for (n = 0; n < ss->dimen; n++)
4857 /* If we are specifying the range the delta is already set. */
4858 if (loopspec[n] != ss)
4860 dim = ss->dim[n];
4862 /* Calculate the offset relative to the loop variable.
4863 First multiply by the stride. */
4864 tmp = loop->from[n];
4865 if (!integer_onep (info->stride[dim]))
4866 tmp = fold_build2_loc (input_location, MULT_EXPR,
4867 gfc_array_index_type,
4868 tmp, info->stride[dim]);
4870 /* Then subtract this from our starting value. */
4871 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4872 gfc_array_index_type,
4873 info->start[dim], tmp);
4875 info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre);
4880 for (loop = loop->nested; loop; loop = loop->next)
4881 gfc_set_delta (loop);
4885 /* Calculate the size of a given array dimension from the bounds. This
4886 is simply (ubound - lbound + 1) if this expression is positive
4887 or 0 if it is negative (pick either one if it is zero). Optionally
4888 (if or_expr is present) OR the (expression != 0) condition to it. */
4890 tree
4891 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4893 tree res;
4894 tree cond;
4896 /* Calculate (ubound - lbound + 1). */
4897 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4898 ubound, lbound);
4899 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4900 gfc_index_one_node);
4902 /* Check whether the size for this dimension is negative. */
4903 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4904 gfc_index_zero_node);
4905 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4906 gfc_index_zero_node, res);
4908 /* Build OR expression. */
4909 if (or_expr)
4910 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4911 boolean_type_node, *or_expr, cond);
4913 return res;
4917 /* For an array descriptor, get the total number of elements. This is just
4918 the product of the extents along from_dim to to_dim. */
4920 static tree
4921 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4923 tree res;
4924 int dim;
4926 res = gfc_index_one_node;
4928 for (dim = from_dim; dim < to_dim; ++dim)
4930 tree lbound;
4931 tree ubound;
4932 tree extent;
4934 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4935 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4937 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4938 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4939 res, extent);
4942 return res;
4946 /* Full size of an array. */
4948 tree
4949 gfc_conv_descriptor_size (tree desc, int rank)
4951 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4955 /* Size of a coarray for all dimensions but the last. */
4957 tree
4958 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4960 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4964 /* Fills in an array descriptor, and returns the size of the array.
4965 The size will be a simple_val, ie a variable or a constant. Also
4966 calculates the offset of the base. The pointer argument overflow,
4967 which should be of integer type, will increase in value if overflow
4968 occurs during the size calculation. Returns the size of the array.
4970 stride = 1;
4971 offset = 0;
4972 for (n = 0; n < rank; n++)
4974 a.lbound[n] = specified_lower_bound;
4975 offset = offset + a.lbond[n] * stride;
4976 size = 1 - lbound;
4977 a.ubound[n] = specified_upper_bound;
4978 a.stride[n] = stride;
4979 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4980 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4981 stride = stride * size;
4983 for (n = rank; n < rank+corank; n++)
4984 (Set lcobound/ucobound as above.)
4985 element_size = sizeof (array element);
4986 if (!rank)
4987 return element_size
4988 stride = (size_t) stride;
4989 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4990 stride = stride * element_size;
4991 return (stride);
4992 } */
4993 /*GCC ARRAYS*/
4995 static tree
4996 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4997 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4998 stmtblock_t * descriptor_block, tree * overflow,
4999 tree expr3_elem_size, tree *nelems, gfc_expr *expr3,
5000 tree expr3_desc, bool e3_is_array_constr)
5002 tree type;
5003 tree tmp;
5004 tree size;
5005 tree offset;
5006 tree stride;
5007 tree element_size;
5008 tree or_expr;
5009 tree thencase;
5010 tree elsecase;
5011 tree cond;
5012 tree var;
5013 stmtblock_t thenblock;
5014 stmtblock_t elseblock;
5015 gfc_expr *ubound;
5016 gfc_se se;
5017 int n;
5019 type = TREE_TYPE (descriptor);
5021 stride = gfc_index_one_node;
5022 offset = gfc_index_zero_node;
5024 /* Set the dtype. */
5025 tmp = gfc_conv_descriptor_dtype (descriptor);
5026 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (type));
5028 or_expr = boolean_false_node;
5030 for (n = 0; n < rank; n++)
5032 tree conv_lbound;
5033 tree conv_ubound;
5035 /* We have 3 possibilities for determining the size of the array:
5036 lower == NULL => lbound = 1, ubound = upper[n]
5037 upper[n] = NULL => lbound = 1, ubound = lower[n]
5038 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
5039 ubound = upper[n];
5041 /* Set lower bound. */
5042 gfc_init_se (&se, NULL);
5043 if (expr3_desc != NULL_TREE)
5045 if (e3_is_array_constr)
5046 /* The lbound of a constant array [] starts at zero, but when
5047 allocating it, the standard expects the array to start at
5048 one. */
5049 se.expr = gfc_index_one_node;
5050 else
5051 se.expr = gfc_conv_descriptor_lbound_get (expr3_desc,
5052 gfc_rank_cst[n]);
5054 else if (lower == NULL)
5055 se.expr = gfc_index_one_node;
5056 else
5058 gcc_assert (lower[n]);
5059 if (ubound)
5061 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5062 gfc_add_block_to_block (pblock, &se.pre);
5064 else
5066 se.expr = gfc_index_one_node;
5067 ubound = lower[n];
5070 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5071 gfc_rank_cst[n], se.expr);
5072 conv_lbound = se.expr;
5074 /* Work out the offset for this component. */
5075 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5076 se.expr, stride);
5077 offset = fold_build2_loc (input_location, MINUS_EXPR,
5078 gfc_array_index_type, offset, tmp);
5080 /* Set upper bound. */
5081 gfc_init_se (&se, NULL);
5082 if (expr3_desc != NULL_TREE)
5084 if (e3_is_array_constr)
5086 /* The lbound of a constant array [] starts at zero, but when
5087 allocating it, the standard expects the array to start at
5088 one. Therefore fix the upper bound to be
5089 (desc.ubound - desc.lbound)+ 1. */
5090 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5091 gfc_array_index_type,
5092 gfc_conv_descriptor_ubound_get (
5093 expr3_desc, gfc_rank_cst[n]),
5094 gfc_conv_descriptor_lbound_get (
5095 expr3_desc, gfc_rank_cst[n]));
5096 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5097 gfc_array_index_type, tmp,
5098 gfc_index_one_node);
5099 se.expr = gfc_evaluate_now (tmp, pblock);
5101 else
5102 se.expr = gfc_conv_descriptor_ubound_get (expr3_desc,
5103 gfc_rank_cst[n]);
5105 else
5107 gcc_assert (ubound);
5108 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5109 gfc_add_block_to_block (pblock, &se.pre);
5111 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5112 gfc_rank_cst[n], se.expr);
5113 conv_ubound = se.expr;
5115 /* Store the stride. */
5116 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
5117 gfc_rank_cst[n], stride);
5119 /* Calculate size and check whether extent is negative. */
5120 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
5121 size = gfc_evaluate_now (size, pblock);
5123 /* Check whether multiplying the stride by the number of
5124 elements in this dimension would overflow. We must also check
5125 whether the current dimension has zero size in order to avoid
5126 division by zero.
5128 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5129 gfc_array_index_type,
5130 fold_convert (gfc_array_index_type,
5131 TYPE_MAX_VALUE (gfc_array_index_type)),
5132 size);
5133 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5134 boolean_type_node, tmp, stride),
5135 PRED_FORTRAN_OVERFLOW);
5136 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5137 integer_one_node, integer_zero_node);
5138 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5139 boolean_type_node, size,
5140 gfc_index_zero_node),
5141 PRED_FORTRAN_SIZE_ZERO);
5142 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5143 integer_zero_node, tmp);
5144 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5145 *overflow, tmp);
5146 *overflow = gfc_evaluate_now (tmp, pblock);
5148 /* Multiply the stride by the number of elements in this dimension. */
5149 stride = fold_build2_loc (input_location, MULT_EXPR,
5150 gfc_array_index_type, stride, size);
5151 stride = gfc_evaluate_now (stride, pblock);
5154 for (n = rank; n < rank + corank; n++)
5156 ubound = upper[n];
5158 /* Set lower bound. */
5159 gfc_init_se (&se, NULL);
5160 if (lower == NULL || lower[n] == NULL)
5162 gcc_assert (n == rank + corank - 1);
5163 se.expr = gfc_index_one_node;
5165 else
5167 if (ubound || n == rank + corank - 1)
5169 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5170 gfc_add_block_to_block (pblock, &se.pre);
5172 else
5174 se.expr = gfc_index_one_node;
5175 ubound = lower[n];
5178 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5179 gfc_rank_cst[n], se.expr);
5181 if (n < rank + corank - 1)
5183 gfc_init_se (&se, NULL);
5184 gcc_assert (ubound);
5185 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5186 gfc_add_block_to_block (pblock, &se.pre);
5187 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5188 gfc_rank_cst[n], se.expr);
5192 /* The stride is the number of elements in the array, so multiply by the
5193 size of an element to get the total size. Obviously, if there is a
5194 SOURCE expression (expr3) we must use its element size. */
5195 if (expr3_elem_size != NULL_TREE)
5196 tmp = expr3_elem_size;
5197 else if (expr3 != NULL)
5199 if (expr3->ts.type == BT_CLASS)
5201 gfc_se se_sz;
5202 gfc_expr *sz = gfc_copy_expr (expr3);
5203 gfc_add_vptr_component (sz);
5204 gfc_add_size_component (sz);
5205 gfc_init_se (&se_sz, NULL);
5206 gfc_conv_expr (&se_sz, sz);
5207 gfc_free_expr (sz);
5208 tmp = se_sz.expr;
5210 else
5212 tmp = gfc_typenode_for_spec (&expr3->ts);
5213 tmp = TYPE_SIZE_UNIT (tmp);
5216 else
5217 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5219 /* Convert to size_t. */
5220 element_size = fold_convert (size_type_node, tmp);
5222 if (rank == 0)
5223 return element_size;
5225 *nelems = gfc_evaluate_now (stride, pblock);
5226 stride = fold_convert (size_type_node, stride);
5228 /* First check for overflow. Since an array of type character can
5229 have zero element_size, we must check for that before
5230 dividing. */
5231 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5232 size_type_node,
5233 TYPE_MAX_VALUE (size_type_node), element_size);
5234 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5235 boolean_type_node, tmp, stride),
5236 PRED_FORTRAN_OVERFLOW);
5237 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5238 integer_one_node, integer_zero_node);
5239 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5240 boolean_type_node, element_size,
5241 build_int_cst (size_type_node, 0)),
5242 PRED_FORTRAN_SIZE_ZERO);
5243 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5244 integer_zero_node, tmp);
5245 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5246 *overflow, tmp);
5247 *overflow = gfc_evaluate_now (tmp, pblock);
5249 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
5250 stride, element_size);
5252 if (poffset != NULL)
5254 offset = gfc_evaluate_now (offset, pblock);
5255 *poffset = offset;
5258 if (integer_zerop (or_expr))
5259 return size;
5260 if (integer_onep (or_expr))
5261 return build_int_cst (size_type_node, 0);
5263 var = gfc_create_var (TREE_TYPE (size), "size");
5264 gfc_start_block (&thenblock);
5265 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
5266 thencase = gfc_finish_block (&thenblock);
5268 gfc_start_block (&elseblock);
5269 gfc_add_modify (&elseblock, var, size);
5270 elsecase = gfc_finish_block (&elseblock);
5272 tmp = gfc_evaluate_now (or_expr, pblock);
5273 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
5274 gfc_add_expr_to_block (pblock, tmp);
5276 return var;
5280 /* Retrieve the last ref from the chain. This routine is specific to
5281 gfc_array_allocate ()'s needs. */
5283 bool
5284 retrieve_last_ref (gfc_ref **ref_in, gfc_ref **prev_ref_in)
5286 gfc_ref *ref, *prev_ref;
5288 ref = *ref_in;
5289 /* Prevent warnings for uninitialized variables. */
5290 prev_ref = *prev_ref_in;
5291 while (ref && ref->next != NULL)
5293 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
5294 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
5295 prev_ref = ref;
5296 ref = ref->next;
5299 if (ref == NULL || ref->type != REF_ARRAY)
5300 return false;
5302 *ref_in = ref;
5303 *prev_ref_in = prev_ref;
5304 return true;
5307 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5308 the work for an ALLOCATE statement. */
5309 /*GCC ARRAYS*/
5311 bool
5312 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
5313 tree errlen, tree label_finish, tree expr3_elem_size,
5314 tree *nelems, gfc_expr *expr3, tree e3_arr_desc,
5315 bool e3_is_array_constr)
5317 tree tmp;
5318 tree pointer;
5319 tree offset = NULL_TREE;
5320 tree token = NULL_TREE;
5321 tree size;
5322 tree msg;
5323 tree error = NULL_TREE;
5324 tree overflow; /* Boolean storing whether size calculation overflows. */
5325 tree var_overflow = NULL_TREE;
5326 tree cond;
5327 tree set_descriptor;
5328 stmtblock_t set_descriptor_block;
5329 stmtblock_t elseblock;
5330 gfc_expr **lower;
5331 gfc_expr **upper;
5332 gfc_ref *ref, *prev_ref = NULL;
5333 bool allocatable, coarray, dimension, alloc_w_e3_arr_spec = false;
5335 ref = expr->ref;
5337 /* Find the last reference in the chain. */
5338 if (!retrieve_last_ref (&ref, &prev_ref))
5339 return false;
5341 if (ref->u.ar.type == AR_FULL && expr3 != NULL)
5343 /* F08:C633: Array shape from expr3. */
5344 ref = expr3->ref;
5346 /* Find the last reference in the chain. */
5347 if (!retrieve_last_ref (&ref, &prev_ref))
5348 return false;
5349 alloc_w_e3_arr_spec = true;
5352 if (!prev_ref)
5354 allocatable = expr->symtree->n.sym->attr.allocatable;
5355 coarray = expr->symtree->n.sym->attr.codimension;
5356 dimension = expr->symtree->n.sym->attr.dimension;
5358 else
5360 allocatable = prev_ref->u.c.component->attr.allocatable;
5361 coarray = prev_ref->u.c.component->attr.codimension;
5362 dimension = prev_ref->u.c.component->attr.dimension;
5365 if (!dimension)
5366 gcc_assert (coarray);
5368 /* Figure out the size of the array. */
5369 switch (ref->u.ar.type)
5371 case AR_ELEMENT:
5372 if (!coarray)
5374 lower = NULL;
5375 upper = ref->u.ar.start;
5376 break;
5378 /* Fall through. */
5380 case AR_SECTION:
5381 lower = ref->u.ar.start;
5382 upper = ref->u.ar.end;
5383 break;
5385 case AR_FULL:
5386 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT
5387 || alloc_w_e3_arr_spec);
5389 lower = ref->u.ar.as->lower;
5390 upper = ref->u.ar.as->upper;
5391 break;
5393 default:
5394 gcc_unreachable ();
5395 break;
5398 overflow = integer_zero_node;
5400 gfc_init_block (&set_descriptor_block);
5401 size = gfc_array_init_size (se->expr, alloc_w_e3_arr_spec ? expr->rank
5402 : ref->u.ar.as->rank,
5403 ref->u.ar.as->corank, &offset, lower, upper,
5404 &se->pre, &set_descriptor_block, &overflow,
5405 expr3_elem_size, nelems, expr3, e3_arr_desc,
5406 e3_is_array_constr);
5408 if (dimension)
5410 var_overflow = gfc_create_var (integer_type_node, "overflow");
5411 gfc_add_modify (&se->pre, var_overflow, overflow);
5413 if (status == NULL_TREE)
5415 /* Generate the block of code handling overflow. */
5416 msg = gfc_build_addr_expr (pchar_type_node,
5417 gfc_build_localized_cstring_const
5418 ("Integer overflow when calculating the amount of "
5419 "memory to allocate"));
5420 error = build_call_expr_loc (input_location,
5421 gfor_fndecl_runtime_error, 1, msg);
5423 else
5425 tree status_type = TREE_TYPE (status);
5426 stmtblock_t set_status_block;
5428 gfc_start_block (&set_status_block);
5429 gfc_add_modify (&set_status_block, status,
5430 build_int_cst (status_type, LIBERROR_ALLOCATION));
5431 error = gfc_finish_block (&set_status_block);
5435 gfc_start_block (&elseblock);
5437 /* Allocate memory to store the data. */
5438 if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
5439 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
5441 pointer = gfc_conv_descriptor_data_get (se->expr);
5442 STRIP_NOPS (pointer);
5444 if (coarray && flag_coarray == GFC_FCOARRAY_LIB)
5445 token = gfc_build_addr_expr (NULL_TREE,
5446 gfc_conv_descriptor_token (se->expr));
5448 /* The allocatable variant takes the old pointer as first argument. */
5449 if (allocatable)
5450 gfc_allocate_allocatable (&elseblock, pointer, size, token,
5451 status, errmsg, errlen, label_finish, expr);
5452 else
5453 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
5455 if (dimension)
5457 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
5458 boolean_type_node, var_overflow, integer_zero_node),
5459 PRED_FORTRAN_OVERFLOW);
5460 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
5461 error, gfc_finish_block (&elseblock));
5463 else
5464 tmp = gfc_finish_block (&elseblock);
5466 gfc_add_expr_to_block (&se->pre, tmp);
5468 /* Update the array descriptors. */
5469 if (dimension)
5470 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
5472 set_descriptor = gfc_finish_block (&set_descriptor_block);
5473 if (status != NULL_TREE)
5475 cond = fold_build2_loc (input_location, EQ_EXPR,
5476 boolean_type_node, status,
5477 build_int_cst (TREE_TYPE (status), 0));
5478 gfc_add_expr_to_block (&se->pre,
5479 fold_build3_loc (input_location, COND_EXPR, void_type_node,
5480 gfc_likely (cond, PRED_FORTRAN_FAIL_ALLOC),
5481 set_descriptor,
5482 build_empty_stmt (input_location)));
5484 else
5485 gfc_add_expr_to_block (&se->pre, set_descriptor);
5487 if ((expr->ts.type == BT_DERIVED)
5488 && expr->ts.u.derived->attr.alloc_comp)
5490 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
5491 ref->u.ar.as->rank);
5492 gfc_add_expr_to_block (&se->pre, tmp);
5495 return true;
5499 /* Deallocate an array variable. Also used when an allocated variable goes
5500 out of scope. */
5501 /*GCC ARRAYS*/
5503 tree
5504 gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
5505 tree label_finish, gfc_expr* expr)
5507 tree var;
5508 tree tmp;
5509 stmtblock_t block;
5510 bool coarray = gfc_is_coarray (expr);
5512 gfc_start_block (&block);
5514 /* Get a pointer to the data. */
5515 var = gfc_conv_descriptor_data_get (descriptor);
5516 STRIP_NOPS (var);
5518 /* Parameter is the address of the data component. */
5519 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
5520 errlen, label_finish, false, expr, coarray);
5521 gfc_add_expr_to_block (&block, tmp);
5523 /* Zero the data pointer; only for coarrays an error can occur and then
5524 the allocation status may not be changed. */
5525 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5526 var, build_int_cst (TREE_TYPE (var), 0));
5527 if (pstat != NULL_TREE && coarray && flag_coarray == GFC_FCOARRAY_LIB)
5529 tree cond;
5530 tree stat = build_fold_indirect_ref_loc (input_location, pstat);
5532 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5533 stat, build_int_cst (TREE_TYPE (stat), 0));
5534 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5535 cond, tmp, build_empty_stmt (input_location));
5538 gfc_add_expr_to_block (&block, tmp);
5540 return gfc_finish_block (&block);
5544 /* Create an array constructor from an initialization expression.
5545 We assume the frontend already did any expansions and conversions. */
5547 tree
5548 gfc_conv_array_initializer (tree type, gfc_expr * expr)
5550 gfc_constructor *c;
5551 tree tmp;
5552 offset_int wtmp;
5553 gfc_se se;
5554 tree index, range;
5555 vec<constructor_elt, va_gc> *v = NULL;
5557 if (expr->expr_type == EXPR_VARIABLE
5558 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
5559 && expr->symtree->n.sym->value)
5560 expr = expr->symtree->n.sym->value;
5562 switch (expr->expr_type)
5564 case EXPR_CONSTANT:
5565 case EXPR_STRUCTURE:
5566 /* A single scalar or derived type value. Create an array with all
5567 elements equal to that value. */
5568 gfc_init_se (&se, NULL);
5570 if (expr->expr_type == EXPR_CONSTANT)
5571 gfc_conv_constant (&se, expr);
5572 else
5573 gfc_conv_structure (&se, expr, 1);
5575 wtmp = wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
5576 /* This will probably eat buckets of memory for large arrays. */
5577 while (wtmp != 0)
5579 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
5580 wtmp -= 1;
5582 break;
5584 case EXPR_ARRAY:
5585 /* Create a vector of all the elements. */
5586 for (c = gfc_constructor_first (expr->value.constructor);
5587 c; c = gfc_constructor_next (c))
5589 if (c->iterator)
5591 /* Problems occur when we get something like
5592 integer :: a(lots) = (/(i, i=1, lots)/) */
5593 gfc_fatal_error ("The number of elements in the array "
5594 "constructor at %L requires an increase of "
5595 "the allowed %d upper limit. See "
5596 "%<-fmax-array-constructor%> option",
5597 &expr->where, flag_max_array_constructor);
5598 return NULL_TREE;
5600 if (mpz_cmp_si (c->offset, 0) != 0)
5601 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5602 else
5603 index = NULL_TREE;
5605 if (mpz_cmp_si (c->repeat, 1) > 0)
5607 tree tmp1, tmp2;
5608 mpz_t maxval;
5610 mpz_init (maxval);
5611 mpz_add (maxval, c->offset, c->repeat);
5612 mpz_sub_ui (maxval, maxval, 1);
5613 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5614 if (mpz_cmp_si (c->offset, 0) != 0)
5616 mpz_add_ui (maxval, c->offset, 1);
5617 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5619 else
5620 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5622 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
5623 mpz_clear (maxval);
5625 else
5626 range = NULL;
5628 gfc_init_se (&se, NULL);
5629 switch (c->expr->expr_type)
5631 case EXPR_CONSTANT:
5632 gfc_conv_constant (&se, c->expr);
5633 break;
5635 case EXPR_STRUCTURE:
5636 gfc_conv_structure (&se, c->expr, 1);
5637 break;
5639 default:
5640 /* Catch those occasional beasts that do not simplify
5641 for one reason or another, assuming that if they are
5642 standard defying the frontend will catch them. */
5643 gfc_conv_expr (&se, c->expr);
5644 break;
5647 if (range == NULL_TREE)
5648 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5649 else
5651 if (index != NULL_TREE)
5652 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5653 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
5656 break;
5658 case EXPR_NULL:
5659 return gfc_build_null_descriptor (type);
5661 default:
5662 gcc_unreachable ();
5665 /* Create a constructor from the list of elements. */
5666 tmp = build_constructor (type, v);
5667 TREE_CONSTANT (tmp) = 1;
5668 return tmp;
5672 /* Generate code to evaluate non-constant coarray cobounds. */
5674 void
5675 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
5676 const gfc_symbol *sym)
5678 int dim;
5679 tree ubound;
5680 tree lbound;
5681 gfc_se se;
5682 gfc_array_spec *as;
5684 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
5686 for (dim = as->rank; dim < as->rank + as->corank; dim++)
5688 /* Evaluate non-constant array bound expressions. */
5689 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5690 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5692 gfc_init_se (&se, NULL);
5693 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5694 gfc_add_block_to_block (pblock, &se.pre);
5695 gfc_add_modify (pblock, lbound, se.expr);
5697 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5698 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5700 gfc_init_se (&se, NULL);
5701 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5702 gfc_add_block_to_block (pblock, &se.pre);
5703 gfc_add_modify (pblock, ubound, se.expr);
5709 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5710 returns the size (in elements) of the array. */
5712 static tree
5713 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
5714 stmtblock_t * pblock)
5716 gfc_array_spec *as;
5717 tree size;
5718 tree stride;
5719 tree offset;
5720 tree ubound;
5721 tree lbound;
5722 tree tmp;
5723 gfc_se se;
5725 int dim;
5727 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
5729 size = gfc_index_one_node;
5730 offset = gfc_index_zero_node;
5731 for (dim = 0; dim < as->rank; dim++)
5733 /* Evaluate non-constant array bound expressions. */
5734 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5735 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5737 gfc_init_se (&se, NULL);
5738 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5739 gfc_add_block_to_block (pblock, &se.pre);
5740 gfc_add_modify (pblock, lbound, se.expr);
5742 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5743 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5745 gfc_init_se (&se, NULL);
5746 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5747 gfc_add_block_to_block (pblock, &se.pre);
5748 gfc_add_modify (pblock, ubound, se.expr);
5750 /* The offset of this dimension. offset = offset - lbound * stride. */
5751 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5752 lbound, size);
5753 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
5754 offset, tmp);
5756 /* The size of this dimension, and the stride of the next. */
5757 if (dim + 1 < as->rank)
5758 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
5759 else
5760 stride = GFC_TYPE_ARRAY_SIZE (type);
5762 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5764 /* Calculate stride = size * (ubound + 1 - lbound). */
5765 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5766 gfc_array_index_type,
5767 gfc_index_one_node, lbound);
5768 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5769 gfc_array_index_type, ubound, tmp);
5770 tmp = fold_build2_loc (input_location, MULT_EXPR,
5771 gfc_array_index_type, size, tmp);
5772 if (stride)
5773 gfc_add_modify (pblock, stride, tmp);
5774 else
5775 stride = gfc_evaluate_now (tmp, pblock);
5777 /* Make sure that negative size arrays are translated
5778 to being zero size. */
5779 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5780 stride, gfc_index_zero_node);
5781 tmp = fold_build3_loc (input_location, COND_EXPR,
5782 gfc_array_index_type, tmp,
5783 stride, gfc_index_zero_node);
5784 gfc_add_modify (pblock, stride, tmp);
5787 size = stride;
5790 gfc_trans_array_cobounds (type, pblock, sym);
5791 gfc_trans_vla_type_sizes (sym, pblock);
5793 *poffset = offset;
5794 return size;
5798 /* Generate code to initialize/allocate an array variable. */
5800 void
5801 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5802 gfc_wrapped_block * block)
5804 stmtblock_t init;
5805 tree type;
5806 tree tmp = NULL_TREE;
5807 tree size;
5808 tree offset;
5809 tree space;
5810 tree inittree;
5811 bool onstack;
5813 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5815 /* Do nothing for USEd variables. */
5816 if (sym->attr.use_assoc)
5817 return;
5819 type = TREE_TYPE (decl);
5820 gcc_assert (GFC_ARRAY_TYPE_P (type));
5821 onstack = TREE_CODE (type) != POINTER_TYPE;
5823 gfc_init_block (&init);
5825 /* Evaluate character string length. */
5826 if (sym->ts.type == BT_CHARACTER
5827 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5829 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5831 gfc_trans_vla_type_sizes (sym, &init);
5833 /* Emit a DECL_EXPR for this variable, which will cause the
5834 gimplifier to allocate storage, and all that good stuff. */
5835 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5836 gfc_add_expr_to_block (&init, tmp);
5839 if (onstack)
5841 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5842 return;
5845 type = TREE_TYPE (type);
5847 gcc_assert (!sym->attr.use_assoc);
5848 gcc_assert (!TREE_STATIC (decl));
5849 gcc_assert (!sym->module);
5851 if (sym->ts.type == BT_CHARACTER
5852 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5853 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5855 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5857 /* Don't actually allocate space for Cray Pointees. */
5858 if (sym->attr.cray_pointee)
5860 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5861 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5863 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5864 return;
5867 if (flag_stack_arrays)
5869 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5870 space = build_decl (sym->declared_at.lb->location,
5871 VAR_DECL, create_tmp_var_name ("A"),
5872 TREE_TYPE (TREE_TYPE (decl)));
5873 gfc_trans_vla_type_sizes (sym, &init);
5875 else
5877 /* The size is the number of elements in the array, so multiply by the
5878 size of an element to get the total size. */
5879 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5880 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5881 size, fold_convert (gfc_array_index_type, tmp));
5883 /* Allocate memory to hold the data. */
5884 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5885 gfc_add_modify (&init, decl, tmp);
5887 /* Free the temporary. */
5888 tmp = gfc_call_free (decl);
5889 space = NULL_TREE;
5892 /* Set offset of the array. */
5893 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5894 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5896 /* Automatic arrays should not have initializers. */
5897 gcc_assert (!sym->value);
5899 inittree = gfc_finish_block (&init);
5901 if (space)
5903 tree addr;
5904 pushdecl (space);
5906 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5907 where also space is located. */
5908 gfc_init_block (&init);
5909 tmp = fold_build1_loc (input_location, DECL_EXPR,
5910 TREE_TYPE (space), space);
5911 gfc_add_expr_to_block (&init, tmp);
5912 addr = fold_build1_loc (sym->declared_at.lb->location,
5913 ADDR_EXPR, TREE_TYPE (decl), space);
5914 gfc_add_modify (&init, decl, addr);
5915 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5916 tmp = NULL_TREE;
5918 gfc_add_init_cleanup (block, inittree, tmp);
5922 /* Generate entry and exit code for g77 calling convention arrays. */
5924 void
5925 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5927 tree parm;
5928 tree type;
5929 locus loc;
5930 tree offset;
5931 tree tmp;
5932 tree stmt;
5933 stmtblock_t init;
5935 gfc_save_backend_locus (&loc);
5936 gfc_set_backend_locus (&sym->declared_at);
5938 /* Descriptor type. */
5939 parm = sym->backend_decl;
5940 type = TREE_TYPE (parm);
5941 gcc_assert (GFC_ARRAY_TYPE_P (type));
5943 gfc_start_block (&init);
5945 if (sym->ts.type == BT_CHARACTER
5946 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5947 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5949 /* Evaluate the bounds of the array. */
5950 gfc_trans_array_bounds (type, sym, &offset, &init);
5952 /* Set the offset. */
5953 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5954 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5956 /* Set the pointer itself if we aren't using the parameter directly. */
5957 if (TREE_CODE (parm) != PARM_DECL)
5959 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5960 gfc_add_modify (&init, parm, tmp);
5962 stmt = gfc_finish_block (&init);
5964 gfc_restore_backend_locus (&loc);
5966 /* Add the initialization code to the start of the function. */
5968 if (sym->attr.optional || sym->attr.not_always_present)
5970 tmp = gfc_conv_expr_present (sym);
5971 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5974 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5978 /* Modify the descriptor of an array parameter so that it has the
5979 correct lower bound. Also move the upper bound accordingly.
5980 If the array is not packed, it will be copied into a temporary.
5981 For each dimension we set the new lower and upper bounds. Then we copy the
5982 stride and calculate the offset for this dimension. We also work out
5983 what the stride of a packed array would be, and see it the two match.
5984 If the array need repacking, we set the stride to the values we just
5985 calculated, recalculate the offset and copy the array data.
5986 Code is also added to copy the data back at the end of the function.
5989 void
5990 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5991 gfc_wrapped_block * block)
5993 tree size;
5994 tree type;
5995 tree offset;
5996 locus loc;
5997 stmtblock_t init;
5998 tree stmtInit, stmtCleanup;
5999 tree lbound;
6000 tree ubound;
6001 tree dubound;
6002 tree dlbound;
6003 tree dumdesc;
6004 tree tmp;
6005 tree stride, stride2;
6006 tree stmt_packed;
6007 tree stmt_unpacked;
6008 tree partial;
6009 gfc_se se;
6010 int n;
6011 int checkparm;
6012 int no_repack;
6013 bool optional_arg;
6014 gfc_array_spec *as;
6015 bool is_classarray = IS_CLASS_ARRAY (sym);
6017 /* Do nothing for pointer and allocatable arrays. */
6018 if ((sym->ts.type != BT_CLASS && sym->attr.pointer)
6019 || (sym->ts.type == BT_CLASS && CLASS_DATA (sym)->attr.class_pointer)
6020 || sym->attr.allocatable
6021 || (is_classarray && CLASS_DATA (sym)->attr.allocatable))
6022 return;
6024 if (!is_classarray && sym->attr.dummy && gfc_is_nodesc_array (sym))
6026 gfc_trans_g77_array (sym, block);
6027 return;
6030 gfc_save_backend_locus (&loc);
6031 gfc_set_backend_locus (&sym->declared_at);
6033 /* Descriptor type. */
6034 type = TREE_TYPE (tmpdesc);
6035 gcc_assert (GFC_ARRAY_TYPE_P (type));
6036 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6037 if (is_classarray)
6038 /* For a class array the dummy array descriptor is in the _class
6039 component. */
6040 dumdesc = gfc_class_data_get (dumdesc);
6041 else
6042 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
6043 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
6044 gfc_start_block (&init);
6046 if (sym->ts.type == BT_CHARACTER
6047 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
6048 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
6050 checkparm = (as->type == AS_EXPLICIT
6051 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
6053 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
6054 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
6056 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
6058 /* For non-constant shape arrays we only check if the first dimension
6059 is contiguous. Repacking higher dimensions wouldn't gain us
6060 anything as we still don't know the array stride. */
6061 partial = gfc_create_var (boolean_type_node, "partial");
6062 TREE_USED (partial) = 1;
6063 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
6064 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
6065 gfc_index_one_node);
6066 gfc_add_modify (&init, partial, tmp);
6068 else
6069 partial = NULL_TREE;
6071 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
6072 here, however I think it does the right thing. */
6073 if (no_repack)
6075 /* Set the first stride. */
6076 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
6077 stride = gfc_evaluate_now (stride, &init);
6079 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
6080 stride, gfc_index_zero_node);
6081 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
6082 tmp, gfc_index_one_node, stride);
6083 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
6084 gfc_add_modify (&init, stride, tmp);
6086 /* Allow the user to disable array repacking. */
6087 stmt_unpacked = NULL_TREE;
6089 else
6091 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
6092 /* A library call to repack the array if necessary. */
6093 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6094 stmt_unpacked = build_call_expr_loc (input_location,
6095 gfor_fndecl_in_pack, 1, tmp);
6097 stride = gfc_index_one_node;
6099 if (warn_array_temporaries)
6100 gfc_warning (OPT_Warray_temporaries,
6101 "Creating array temporary at %L", &loc);
6104 /* This is for the case where the array data is used directly without
6105 calling the repack function. */
6106 if (no_repack || partial != NULL_TREE)
6107 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
6108 else
6109 stmt_packed = NULL_TREE;
6111 /* Assign the data pointer. */
6112 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6114 /* Don't repack unknown shape arrays when the first stride is 1. */
6115 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
6116 partial, stmt_packed, stmt_unpacked);
6118 else
6119 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
6120 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
6122 offset = gfc_index_zero_node;
6123 size = gfc_index_one_node;
6125 /* Evaluate the bounds of the array. */
6126 for (n = 0; n < as->rank; n++)
6128 if (checkparm || !as->upper[n])
6130 /* Get the bounds of the actual parameter. */
6131 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
6132 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
6134 else
6136 dubound = NULL_TREE;
6137 dlbound = NULL_TREE;
6140 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
6141 if (!INTEGER_CST_P (lbound))
6143 gfc_init_se (&se, NULL);
6144 gfc_conv_expr_type (&se, as->lower[n],
6145 gfc_array_index_type);
6146 gfc_add_block_to_block (&init, &se.pre);
6147 gfc_add_modify (&init, lbound, se.expr);
6150 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
6151 /* Set the desired upper bound. */
6152 if (as->upper[n])
6154 /* We know what we want the upper bound to be. */
6155 if (!INTEGER_CST_P (ubound))
6157 gfc_init_se (&se, NULL);
6158 gfc_conv_expr_type (&se, as->upper[n],
6159 gfc_array_index_type);
6160 gfc_add_block_to_block (&init, &se.pre);
6161 gfc_add_modify (&init, ubound, se.expr);
6164 /* Check the sizes match. */
6165 if (checkparm)
6167 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
6168 char * msg;
6169 tree temp;
6171 temp = fold_build2_loc (input_location, MINUS_EXPR,
6172 gfc_array_index_type, ubound, lbound);
6173 temp = fold_build2_loc (input_location, PLUS_EXPR,
6174 gfc_array_index_type,
6175 gfc_index_one_node, temp);
6176 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
6177 gfc_array_index_type, dubound,
6178 dlbound);
6179 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
6180 gfc_array_index_type,
6181 gfc_index_one_node, stride2);
6182 tmp = fold_build2_loc (input_location, NE_EXPR,
6183 gfc_array_index_type, temp, stride2);
6184 msg = xasprintf ("Dimension %d of array '%s' has extent "
6185 "%%ld instead of %%ld", n+1, sym->name);
6187 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
6188 fold_convert (long_integer_type_node, temp),
6189 fold_convert (long_integer_type_node, stride2));
6191 free (msg);
6194 else
6196 /* For assumed shape arrays move the upper bound by the same amount
6197 as the lower bound. */
6198 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6199 gfc_array_index_type, dubound, dlbound);
6200 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6201 gfc_array_index_type, tmp, lbound);
6202 gfc_add_modify (&init, ubound, tmp);
6204 /* The offset of this dimension. offset = offset - lbound * stride. */
6205 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6206 lbound, stride);
6207 offset = fold_build2_loc (input_location, MINUS_EXPR,
6208 gfc_array_index_type, offset, tmp);
6210 /* The size of this dimension, and the stride of the next. */
6211 if (n + 1 < as->rank)
6213 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
6215 if (no_repack || partial != NULL_TREE)
6216 stmt_unpacked =
6217 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
6219 /* Figure out the stride if not a known constant. */
6220 if (!INTEGER_CST_P (stride))
6222 if (no_repack)
6223 stmt_packed = NULL_TREE;
6224 else
6226 /* Calculate stride = size * (ubound + 1 - lbound). */
6227 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6228 gfc_array_index_type,
6229 gfc_index_one_node, lbound);
6230 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6231 gfc_array_index_type, ubound, tmp);
6232 size = fold_build2_loc (input_location, MULT_EXPR,
6233 gfc_array_index_type, size, tmp);
6234 stmt_packed = size;
6237 /* Assign the stride. */
6238 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6239 tmp = fold_build3_loc (input_location, COND_EXPR,
6240 gfc_array_index_type, partial,
6241 stmt_unpacked, stmt_packed);
6242 else
6243 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
6244 gfc_add_modify (&init, stride, tmp);
6247 else
6249 stride = GFC_TYPE_ARRAY_SIZE (type);
6251 if (stride && !INTEGER_CST_P (stride))
6253 /* Calculate size = stride * (ubound + 1 - lbound). */
6254 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6255 gfc_array_index_type,
6256 gfc_index_one_node, lbound);
6257 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6258 gfc_array_index_type,
6259 ubound, tmp);
6260 tmp = fold_build2_loc (input_location, MULT_EXPR,
6261 gfc_array_index_type,
6262 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
6263 gfc_add_modify (&init, stride, tmp);
6268 gfc_trans_array_cobounds (type, &init, sym);
6270 /* Set the offset. */
6271 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
6272 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6274 gfc_trans_vla_type_sizes (sym, &init);
6276 stmtInit = gfc_finish_block (&init);
6278 /* Only do the entry/initialization code if the arg is present. */
6279 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6280 optional_arg = (sym->attr.optional
6281 || (sym->ns->proc_name->attr.entry_master
6282 && sym->attr.dummy));
6283 if (optional_arg)
6285 tmp = gfc_conv_expr_present (sym);
6286 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
6287 build_empty_stmt (input_location));
6290 /* Cleanup code. */
6291 if (no_repack)
6292 stmtCleanup = NULL_TREE;
6293 else
6295 stmtblock_t cleanup;
6296 gfc_start_block (&cleanup);
6298 if (sym->attr.intent != INTENT_IN)
6300 /* Copy the data back. */
6301 tmp = build_call_expr_loc (input_location,
6302 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
6303 gfc_add_expr_to_block (&cleanup, tmp);
6306 /* Free the temporary. */
6307 tmp = gfc_call_free (tmpdesc);
6308 gfc_add_expr_to_block (&cleanup, tmp);
6310 stmtCleanup = gfc_finish_block (&cleanup);
6312 /* Only do the cleanup if the array was repacked. */
6313 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
6314 tmp = gfc_conv_descriptor_data_get (tmp);
6315 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6316 tmp, tmpdesc);
6317 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6318 build_empty_stmt (input_location));
6320 if (optional_arg)
6322 tmp = gfc_conv_expr_present (sym);
6323 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6324 build_empty_stmt (input_location));
6328 /* We don't need to free any memory allocated by internal_pack as it will
6329 be freed at the end of the function by pop_context. */
6330 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
6332 gfc_restore_backend_locus (&loc);
6336 /* Calculate the overall offset, including subreferences. */
6337 static void
6338 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
6339 bool subref, gfc_expr *expr)
6341 tree tmp;
6342 tree field;
6343 tree stride;
6344 tree index;
6345 gfc_ref *ref;
6346 gfc_se start;
6347 int n;
6349 /* If offset is NULL and this is not a subreferenced array, there is
6350 nothing to do. */
6351 if (offset == NULL_TREE)
6353 if (subref)
6354 offset = gfc_index_zero_node;
6355 else
6356 return;
6359 tmp = build_array_ref (desc, offset, NULL, NULL);
6361 /* Offset the data pointer for pointer assignments from arrays with
6362 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6363 if (subref)
6365 /* Go past the array reference. */
6366 for (ref = expr->ref; ref; ref = ref->next)
6367 if (ref->type == REF_ARRAY &&
6368 ref->u.ar.type != AR_ELEMENT)
6370 ref = ref->next;
6371 break;
6374 /* Calculate the offset for each subsequent subreference. */
6375 for (; ref; ref = ref->next)
6377 switch (ref->type)
6379 case REF_COMPONENT:
6380 field = ref->u.c.component->backend_decl;
6381 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
6382 tmp = fold_build3_loc (input_location, COMPONENT_REF,
6383 TREE_TYPE (field),
6384 tmp, field, NULL_TREE);
6385 break;
6387 case REF_SUBSTRING:
6388 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
6389 gfc_init_se (&start, NULL);
6390 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
6391 gfc_add_block_to_block (block, &start.pre);
6392 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
6393 break;
6395 case REF_ARRAY:
6396 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
6397 && ref->u.ar.type == AR_ELEMENT);
6399 /* TODO - Add bounds checking. */
6400 stride = gfc_index_one_node;
6401 index = gfc_index_zero_node;
6402 for (n = 0; n < ref->u.ar.dimen; n++)
6404 tree itmp;
6405 tree jtmp;
6407 /* Update the index. */
6408 gfc_init_se (&start, NULL);
6409 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
6410 itmp = gfc_evaluate_now (start.expr, block);
6411 gfc_init_se (&start, NULL);
6412 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
6413 jtmp = gfc_evaluate_now (start.expr, block);
6414 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6415 gfc_array_index_type, itmp, jtmp);
6416 itmp = fold_build2_loc (input_location, MULT_EXPR,
6417 gfc_array_index_type, itmp, stride);
6418 index = fold_build2_loc (input_location, PLUS_EXPR,
6419 gfc_array_index_type, itmp, index);
6420 index = gfc_evaluate_now (index, block);
6422 /* Update the stride. */
6423 gfc_init_se (&start, NULL);
6424 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
6425 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6426 gfc_array_index_type, start.expr,
6427 jtmp);
6428 itmp = fold_build2_loc (input_location, PLUS_EXPR,
6429 gfc_array_index_type,
6430 gfc_index_one_node, itmp);
6431 stride = fold_build2_loc (input_location, MULT_EXPR,
6432 gfc_array_index_type, stride, itmp);
6433 stride = gfc_evaluate_now (stride, block);
6436 /* Apply the index to obtain the array element. */
6437 tmp = gfc_build_array_ref (tmp, index, NULL);
6438 break;
6440 default:
6441 gcc_unreachable ();
6442 break;
6447 /* Set the target data pointer. */
6448 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
6449 gfc_conv_descriptor_data_set (block, parm, offset);
6453 /* gfc_conv_expr_descriptor needs the string length an expression
6454 so that the size of the temporary can be obtained. This is done
6455 by adding up the string lengths of all the elements in the
6456 expression. Function with non-constant expressions have their
6457 string lengths mapped onto the actual arguments using the
6458 interface mapping machinery in trans-expr.c. */
6459 static void
6460 get_array_charlen (gfc_expr *expr, gfc_se *se)
6462 gfc_interface_mapping mapping;
6463 gfc_formal_arglist *formal;
6464 gfc_actual_arglist *arg;
6465 gfc_se tse;
6467 if (expr->ts.u.cl->length
6468 && gfc_is_constant_expr (expr->ts.u.cl->length))
6470 if (!expr->ts.u.cl->backend_decl)
6471 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6472 return;
6475 switch (expr->expr_type)
6477 case EXPR_OP:
6478 get_array_charlen (expr->value.op.op1, se);
6480 /* For parentheses the expression ts.u.cl is identical. */
6481 if (expr->value.op.op == INTRINSIC_PARENTHESES)
6482 return;
6484 expr->ts.u.cl->backend_decl =
6485 gfc_create_var (gfc_charlen_type_node, "sln");
6487 if (expr->value.op.op2)
6489 get_array_charlen (expr->value.op.op2, se);
6491 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
6493 /* Add the string lengths and assign them to the expression
6494 string length backend declaration. */
6495 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6496 fold_build2_loc (input_location, PLUS_EXPR,
6497 gfc_charlen_type_node,
6498 expr->value.op.op1->ts.u.cl->backend_decl,
6499 expr->value.op.op2->ts.u.cl->backend_decl));
6501 else
6502 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6503 expr->value.op.op1->ts.u.cl->backend_decl);
6504 break;
6506 case EXPR_FUNCTION:
6507 if (expr->value.function.esym == NULL
6508 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
6510 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6511 break;
6514 /* Map expressions involving the dummy arguments onto the actual
6515 argument expressions. */
6516 gfc_init_interface_mapping (&mapping);
6517 formal = gfc_sym_get_dummy_args (expr->symtree->n.sym);
6518 arg = expr->value.function.actual;
6520 /* Set se = NULL in the calls to the interface mapping, to suppress any
6521 backend stuff. */
6522 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
6524 if (!arg->expr)
6525 continue;
6526 if (formal->sym)
6527 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
6530 gfc_init_se (&tse, NULL);
6532 /* Build the expression for the character length and convert it. */
6533 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
6535 gfc_add_block_to_block (&se->pre, &tse.pre);
6536 gfc_add_block_to_block (&se->post, &tse.post);
6537 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
6538 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
6539 gfc_charlen_type_node, tse.expr,
6540 build_int_cst (gfc_charlen_type_node, 0));
6541 expr->ts.u.cl->backend_decl = tse.expr;
6542 gfc_free_interface_mapping (&mapping);
6543 break;
6545 default:
6546 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6547 break;
6552 /* Helper function to check dimensions. */
6553 static bool
6554 transposed_dims (gfc_ss *ss)
6556 int n;
6558 for (n = 0; n < ss->dimen; n++)
6559 if (ss->dim[n] != n)
6560 return true;
6561 return false;
6565 /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
6566 AR_FULL, suitable for the scalarizer. */
6568 static gfc_ss *
6569 walk_coarray (gfc_expr *e)
6571 gfc_ss *ss;
6573 gcc_assert (gfc_get_corank (e) > 0);
6575 ss = gfc_walk_expr (e);
6577 /* Fix scalar coarray. */
6578 if (ss == gfc_ss_terminator)
6580 gfc_ref *ref;
6582 ref = e->ref;
6583 while (ref)
6585 if (ref->type == REF_ARRAY
6586 && ref->u.ar.codimen > 0)
6587 break;
6589 ref = ref->next;
6592 gcc_assert (ref != NULL);
6593 if (ref->u.ar.type == AR_ELEMENT)
6594 ref->u.ar.type = AR_SECTION;
6595 ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref));
6598 return ss;
6602 /* Convert an array for passing as an actual argument. Expressions and
6603 vector subscripts are evaluated and stored in a temporary, which is then
6604 passed. For whole arrays the descriptor is passed. For array sections
6605 a modified copy of the descriptor is passed, but using the original data.
6607 This function is also used for array pointer assignments, and there
6608 are three cases:
6610 - se->want_pointer && !se->direct_byref
6611 EXPR is an actual argument. On exit, se->expr contains a
6612 pointer to the array descriptor.
6614 - !se->want_pointer && !se->direct_byref
6615 EXPR is an actual argument to an intrinsic function or the
6616 left-hand side of a pointer assignment. On exit, se->expr
6617 contains the descriptor for EXPR.
6619 - !se->want_pointer && se->direct_byref
6620 EXPR is the right-hand side of a pointer assignment and
6621 se->expr is the descriptor for the previously-evaluated
6622 left-hand side. The function creates an assignment from
6623 EXPR to se->expr.
6626 The se->force_tmp flag disables the non-copying descriptor optimization
6627 that is used for transpose. It may be used in cases where there is an
6628 alias between the transpose argument and another argument in the same
6629 function call. */
6631 void
6632 gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr)
6634 gfc_ss *ss;
6635 gfc_ss_type ss_type;
6636 gfc_ss_info *ss_info;
6637 gfc_loopinfo loop;
6638 gfc_array_info *info;
6639 int need_tmp;
6640 int n;
6641 tree tmp;
6642 tree desc;
6643 stmtblock_t block;
6644 tree start;
6645 tree offset;
6646 int full;
6647 bool subref_array_target = false;
6648 gfc_expr *arg, *ss_expr;
6650 if (se->want_coarray)
6651 ss = walk_coarray (expr);
6652 else
6653 ss = gfc_walk_expr (expr);
6655 gcc_assert (ss != NULL);
6656 gcc_assert (ss != gfc_ss_terminator);
6658 ss_info = ss->info;
6659 ss_type = ss_info->type;
6660 ss_expr = ss_info->expr;
6662 /* Special case: TRANSPOSE which needs no temporary. */
6663 while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym
6664 && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr)))
6666 /* This is a call to transpose which has already been handled by the
6667 scalarizer, so that we just need to get its argument's descriptor. */
6668 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
6669 expr = expr->value.function.actual->expr;
6672 /* Special case things we know we can pass easily. */
6673 switch (expr->expr_type)
6675 case EXPR_VARIABLE:
6676 /* If we have a linear array section, we can pass it directly.
6677 Otherwise we need to copy it into a temporary. */
6679 gcc_assert (ss_type == GFC_SS_SECTION);
6680 gcc_assert (ss_expr == expr);
6681 info = &ss_info->data.array;
6683 /* Get the descriptor for the array. */
6684 gfc_conv_ss_descriptor (&se->pre, ss, 0);
6685 desc = info->descriptor;
6687 subref_array_target = se->direct_byref && is_subref_array (expr);
6688 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
6689 && !subref_array_target;
6691 if (se->force_tmp)
6692 need_tmp = 1;
6694 if (need_tmp)
6695 full = 0;
6696 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6698 /* Create a new descriptor if the array doesn't have one. */
6699 full = 0;
6701 else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only)
6702 full = 1;
6703 else if (se->direct_byref)
6704 full = 0;
6705 else
6706 full = gfc_full_array_ref_p (info->ref, NULL);
6708 if (full && !transposed_dims (ss))
6710 if (se->direct_byref && !se->byref_noassign)
6712 /* Copy the descriptor for pointer assignments. */
6713 gfc_add_modify (&se->pre, se->expr, desc);
6715 /* Add any offsets from subreferences. */
6716 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
6717 subref_array_target, expr);
6719 else if (se->want_pointer)
6721 /* We pass full arrays directly. This means that pointers and
6722 allocatable arrays should also work. */
6723 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6725 else
6727 se->expr = desc;
6730 if (expr->ts.type == BT_CHARACTER)
6731 se->string_length = gfc_get_expr_charlen (expr);
6733 gfc_free_ss_chain (ss);
6734 return;
6736 break;
6738 case EXPR_FUNCTION:
6739 /* A transformational function return value will be a temporary
6740 array descriptor. We still need to go through the scalarizer
6741 to create the descriptor. Elemental functions are handled as
6742 arbitrary expressions, i.e. copy to a temporary. */
6744 if (se->direct_byref)
6746 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
6748 /* For pointer assignments pass the descriptor directly. */
6749 if (se->ss == NULL)
6750 se->ss = ss;
6751 else
6752 gcc_assert (se->ss == ss);
6753 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6754 gfc_conv_expr (se, expr);
6755 gfc_free_ss_chain (ss);
6756 return;
6759 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
6761 if (ss_expr != expr)
6762 /* Elemental function. */
6763 gcc_assert ((expr->value.function.esym != NULL
6764 && expr->value.function.esym->attr.elemental)
6765 || (expr->value.function.isym != NULL
6766 && expr->value.function.isym->elemental)
6767 || gfc_inline_intrinsic_function_p (expr));
6768 else
6769 gcc_assert (ss_type == GFC_SS_INTRINSIC);
6771 need_tmp = 1;
6772 if (expr->ts.type == BT_CHARACTER
6773 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
6774 get_array_charlen (expr, se);
6776 info = NULL;
6778 else
6780 /* Transformational function. */
6781 info = &ss_info->data.array;
6782 need_tmp = 0;
6784 break;
6786 case EXPR_ARRAY:
6787 /* Constant array constructors don't need a temporary. */
6788 if (ss_type == GFC_SS_CONSTRUCTOR
6789 && expr->ts.type != BT_CHARACTER
6790 && gfc_constant_array_constructor_p (expr->value.constructor))
6792 need_tmp = 0;
6793 info = &ss_info->data.array;
6795 else
6797 need_tmp = 1;
6798 info = NULL;
6800 break;
6802 default:
6803 /* Something complicated. Copy it into a temporary. */
6804 need_tmp = 1;
6805 info = NULL;
6806 break;
6809 /* If we are creating a temporary, we don't need to bother about aliases
6810 anymore. */
6811 if (need_tmp)
6812 se->force_tmp = 0;
6814 gfc_init_loopinfo (&loop);
6816 /* Associate the SS with the loop. */
6817 gfc_add_ss_to_loop (&loop, ss);
6819 /* Tell the scalarizer not to bother creating loop variables, etc. */
6820 if (!need_tmp)
6821 loop.array_parameter = 1;
6822 else
6823 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6824 gcc_assert (!se->direct_byref);
6826 /* Setup the scalarizing loops and bounds. */
6827 gfc_conv_ss_startstride (&loop);
6829 if (need_tmp)
6831 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6832 get_array_charlen (expr, se);
6834 /* Tell the scalarizer to make a temporary. */
6835 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6836 ((expr->ts.type == BT_CHARACTER)
6837 ? expr->ts.u.cl->backend_decl
6838 : NULL),
6839 loop.dimen);
6841 se->string_length = loop.temp_ss->info->string_length;
6842 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6843 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6846 gfc_conv_loop_setup (&loop, & expr->where);
6848 if (need_tmp)
6850 /* Copy into a temporary and pass that. We don't need to copy the data
6851 back because expressions and vector subscripts must be INTENT_IN. */
6852 /* TODO: Optimize passing function return values. */
6853 gfc_se lse;
6854 gfc_se rse;
6856 /* Start the copying loops. */
6857 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6858 gfc_mark_ss_chain_used (ss, 1);
6859 gfc_start_scalarized_body (&loop, &block);
6861 /* Copy each data element. */
6862 gfc_init_se (&lse, NULL);
6863 gfc_copy_loopinfo_to_se (&lse, &loop);
6864 gfc_init_se (&rse, NULL);
6865 gfc_copy_loopinfo_to_se (&rse, &loop);
6867 lse.ss = loop.temp_ss;
6868 rse.ss = ss;
6870 gfc_conv_scalarized_array_ref (&lse, NULL);
6871 if (expr->ts.type == BT_CHARACTER)
6873 gfc_conv_expr (&rse, expr);
6874 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6875 rse.expr = build_fold_indirect_ref_loc (input_location,
6876 rse.expr);
6878 else
6879 gfc_conv_expr_val (&rse, expr);
6881 gfc_add_block_to_block (&block, &rse.pre);
6882 gfc_add_block_to_block (&block, &lse.pre);
6884 lse.string_length = rse.string_length;
6885 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts,
6886 expr->expr_type == EXPR_VARIABLE
6887 || expr->expr_type == EXPR_ARRAY, false);
6888 gfc_add_expr_to_block (&block, tmp);
6890 /* Finish the copying loops. */
6891 gfc_trans_scalarizing_loops (&loop, &block);
6893 desc = loop.temp_ss->info->data.array.descriptor;
6895 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6897 desc = info->descriptor;
6898 se->string_length = ss_info->string_length;
6900 else
6902 /* We pass sections without copying to a temporary. Make a new
6903 descriptor and point it at the section we want. The loop variable
6904 limits will be the limits of the section.
6905 A function may decide to repack the array to speed up access, but
6906 we're not bothered about that here. */
6907 int dim, ndim, codim;
6908 tree parm;
6909 tree parmtype;
6910 tree stride;
6911 tree from;
6912 tree to;
6913 tree base;
6914 bool onebased = false, rank_remap;
6916 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
6917 rank_remap = ss->dimen < ndim;
6919 if (se->want_coarray)
6921 gfc_array_ref *ar = &info->ref->u.ar;
6923 codim = gfc_get_corank (expr);
6924 for (n = 0; n < codim - 1; n++)
6926 /* Make sure we are not lost somehow. */
6927 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
6929 /* Make sure the call to gfc_conv_section_startstride won't
6930 generate unnecessary code to calculate stride. */
6931 gcc_assert (ar->stride[n + ndim] == NULL);
6933 gfc_conv_section_startstride (&loop.pre, ss, n + ndim);
6934 loop.from[n + loop.dimen] = info->start[n + ndim];
6935 loop.to[n + loop.dimen] = info->end[n + ndim];
6938 gcc_assert (n == codim - 1);
6939 evaluate_bound (&loop.pre, info->start, ar->start,
6940 info->descriptor, n + ndim, true);
6941 loop.from[n + loop.dimen] = info->start[n + ndim];
6943 else
6944 codim = 0;
6946 /* Set the string_length for a character array. */
6947 if (expr->ts.type == BT_CHARACTER)
6948 se->string_length = gfc_get_expr_charlen (expr);
6950 /* If we have an array section or are assigning make sure that
6951 the lower bound is 1. References to the full
6952 array should otherwise keep the original bounds. */
6953 if ((!info->ref || info->ref->u.ar.type != AR_FULL) && !se->want_pointer)
6954 for (dim = 0; dim < loop.dimen; dim++)
6955 if (!integer_onep (loop.from[dim]))
6957 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6958 gfc_array_index_type, gfc_index_one_node,
6959 loop.from[dim]);
6960 loop.to[dim] = fold_build2_loc (input_location, PLUS_EXPR,
6961 gfc_array_index_type,
6962 loop.to[dim], tmp);
6963 loop.from[dim] = gfc_index_one_node;
6966 desc = info->descriptor;
6967 if (se->direct_byref && !se->byref_noassign)
6969 /* For pointer assignments we fill in the destination. */
6970 parm = se->expr;
6971 parmtype = TREE_TYPE (parm);
6973 else
6975 /* Otherwise make a new one. */
6976 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6977 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6978 loop.from, loop.to, 0,
6979 GFC_ARRAY_UNKNOWN, false);
6980 parm = gfc_create_var (parmtype, "parm");
6983 offset = gfc_index_zero_node;
6985 /* The following can be somewhat confusing. We have two
6986 descriptors, a new one and the original array.
6987 {parm, parmtype, dim} refer to the new one.
6988 {desc, type, n, loop} refer to the original, which maybe
6989 a descriptorless array.
6990 The bounds of the scalarization are the bounds of the section.
6991 We don't have to worry about numeric overflows when calculating
6992 the offsets because all elements are within the array data. */
6994 /* Set the dtype. */
6995 tmp = gfc_conv_descriptor_dtype (parm);
6996 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6998 /* Set offset for assignments to pointer only to zero if it is not
6999 the full array. */
7000 if ((se->direct_byref || se->use_offset)
7001 && ((info->ref && info->ref->u.ar.type != AR_FULL)
7002 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
7003 base = gfc_index_zero_node;
7004 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7005 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
7006 else
7007 base = NULL_TREE;
7009 for (n = 0; n < ndim; n++)
7011 stride = gfc_conv_array_stride (desc, n);
7013 /* Work out the offset. */
7014 if (info->ref
7015 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
7017 gcc_assert (info->subscript[n]
7018 && info->subscript[n]->info->type == GFC_SS_SCALAR);
7019 start = info->subscript[n]->info->data.scalar.value;
7021 else
7023 /* Evaluate and remember the start of the section. */
7024 start = info->start[n];
7025 stride = gfc_evaluate_now (stride, &loop.pre);
7028 tmp = gfc_conv_array_lbound (desc, n);
7029 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
7030 start, tmp);
7031 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
7032 tmp, stride);
7033 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
7034 offset, tmp);
7036 if (info->ref
7037 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
7039 /* For elemental dimensions, we only need the offset. */
7040 continue;
7043 /* Vector subscripts need copying and are handled elsewhere. */
7044 if (info->ref)
7045 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
7047 /* look for the corresponding scalarizer dimension: dim. */
7048 for (dim = 0; dim < ndim; dim++)
7049 if (ss->dim[dim] == n)
7050 break;
7052 /* loop exited early: the DIM being looked for has been found. */
7053 gcc_assert (dim < ndim);
7055 /* Set the new lower bound. */
7056 from = loop.from[dim];
7057 to = loop.to[dim];
7059 onebased = integer_onep (from);
7060 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
7061 gfc_rank_cst[dim], from);
7063 /* Set the new upper bound. */
7064 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
7065 gfc_rank_cst[dim], to);
7067 /* Multiply the stride by the section stride to get the
7068 total stride. */
7069 stride = fold_build2_loc (input_location, MULT_EXPR,
7070 gfc_array_index_type,
7071 stride, info->stride[n]);
7073 if (se->direct_byref
7074 && ((info->ref && info->ref->u.ar.type != AR_FULL)
7075 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
7077 base = fold_build2_loc (input_location, MINUS_EXPR,
7078 TREE_TYPE (base), base, stride);
7080 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)) || se->use_offset)
7082 tmp = gfc_conv_array_lbound (desc, n);
7083 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7084 TREE_TYPE (base), tmp, from);
7085 tmp = fold_build2_loc (input_location, MULT_EXPR,
7086 TREE_TYPE (base), tmp,
7087 gfc_conv_array_stride (desc, n));
7088 base = fold_build2_loc (input_location, PLUS_EXPR,
7089 TREE_TYPE (base), tmp, base);
7092 /* Store the new stride. */
7093 gfc_conv_descriptor_stride_set (&loop.pre, parm,
7094 gfc_rank_cst[dim], stride);
7097 for (n = loop.dimen; n < loop.dimen + codim; n++)
7099 from = loop.from[n];
7100 to = loop.to[n];
7101 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
7102 gfc_rank_cst[n], from);
7103 if (n < loop.dimen + codim - 1)
7104 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
7105 gfc_rank_cst[n], to);
7108 if (se->data_not_needed)
7109 gfc_conv_descriptor_data_set (&loop.pre, parm,
7110 gfc_index_zero_node);
7111 else
7112 /* Point the data pointer at the 1st element in the section. */
7113 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
7114 subref_array_target, expr);
7116 /* Force the offset to be -1, when the lower bound of the highest
7117 dimension is one and the symbol is present and is not a
7118 pointer/allocatable or associated. */
7119 if (((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7120 && !se->data_not_needed)
7121 || (se->use_offset && base != NULL_TREE))
7123 /* Set the offset depending on base. */
7124 tmp = rank_remap && !se->direct_byref ?
7125 fold_build2_loc (input_location, PLUS_EXPR,
7126 gfc_array_index_type, base,
7127 offset)
7128 : base;
7129 gfc_conv_descriptor_offset_set (&loop.pre, parm, tmp);
7131 else if (onebased && (!rank_remap || se->use_offset)
7132 && expr->symtree
7133 && !(expr->symtree->n.sym && expr->symtree->n.sym->ts.type == BT_CLASS
7134 && !CLASS_DATA (expr->symtree->n.sym)->attr.class_pointer)
7135 && !expr->symtree->n.sym->attr.allocatable
7136 && !expr->symtree->n.sym->attr.pointer
7137 && !expr->symtree->n.sym->attr.host_assoc
7138 && !expr->symtree->n.sym->attr.use_assoc)
7140 /* Set the offset to -1. */
7141 mpz_t minus_one;
7142 mpz_init_set_si (minus_one, -1);
7143 tmp = gfc_conv_mpz_to_tree (minus_one, gfc_index_integer_kind);
7144 gfc_conv_descriptor_offset_set (&loop.pre, parm, tmp);
7146 else
7148 /* Only the callee knows what the correct offset it, so just set
7149 it to zero here. */
7150 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
7152 desc = parm;
7155 /* For class arrays add the class tree into the saved descriptor to
7156 enable getting of _vptr and the like. */
7157 if (expr->expr_type == EXPR_VARIABLE && VAR_P (desc)
7158 && IS_CLASS_ARRAY (expr->symtree->n.sym)
7159 && DECL_LANG_SPECIFIC (expr->symtree->n.sym->backend_decl))
7161 gfc_allocate_lang_decl (desc);
7162 GFC_DECL_SAVED_DESCRIPTOR (desc) =
7163 GFC_DECL_SAVED_DESCRIPTOR (expr->symtree->n.sym->backend_decl);
7165 if (!se->direct_byref || se->byref_noassign)
7167 /* Get a pointer to the new descriptor. */
7168 if (se->want_pointer)
7169 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
7170 else
7171 se->expr = desc;
7174 gfc_add_block_to_block (&se->pre, &loop.pre);
7175 gfc_add_block_to_block (&se->post, &loop.post);
7177 /* Cleanup the scalarizer. */
7178 gfc_cleanup_loop (&loop);
7181 /* Helper function for gfc_conv_array_parameter if array size needs to be
7182 computed. */
7184 static void
7185 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
7187 tree elem;
7188 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7189 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
7190 else if (expr->rank > 1)
7191 *size = build_call_expr_loc (input_location,
7192 gfor_fndecl_size0, 1,
7193 gfc_build_addr_expr (NULL, desc));
7194 else
7196 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
7197 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
7199 *size = fold_build2_loc (input_location, MINUS_EXPR,
7200 gfc_array_index_type, ubound, lbound);
7201 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7202 *size, gfc_index_one_node);
7203 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
7204 *size, gfc_index_zero_node);
7206 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
7207 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7208 *size, fold_convert (gfc_array_index_type, elem));
7211 /* Convert an array for passing as an actual parameter. */
7212 /* TODO: Optimize passing g77 arrays. */
7214 void
7215 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77,
7216 const gfc_symbol *fsym, const char *proc_name,
7217 tree *size)
7219 tree ptr;
7220 tree desc;
7221 tree tmp = NULL_TREE;
7222 tree stmt;
7223 tree parent = DECL_CONTEXT (current_function_decl);
7224 bool full_array_var;
7225 bool this_array_result;
7226 bool contiguous;
7227 bool no_pack;
7228 bool array_constructor;
7229 bool good_allocatable;
7230 bool ultimate_ptr_comp;
7231 bool ultimate_alloc_comp;
7232 gfc_symbol *sym;
7233 stmtblock_t block;
7234 gfc_ref *ref;
7236 ultimate_ptr_comp = false;
7237 ultimate_alloc_comp = false;
7239 for (ref = expr->ref; ref; ref = ref->next)
7241 if (ref->next == NULL)
7242 break;
7244 if (ref->type == REF_COMPONENT)
7246 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
7247 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
7251 full_array_var = false;
7252 contiguous = false;
7254 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
7255 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
7257 sym = full_array_var ? expr->symtree->n.sym : NULL;
7259 /* The symbol should have an array specification. */
7260 gcc_assert (!sym || sym->as || ref->u.ar.as);
7262 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
7264 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
7265 expr->ts.u.cl->backend_decl = tmp;
7266 se->string_length = tmp;
7269 /* Is this the result of the enclosing procedure? */
7270 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
7271 if (this_array_result
7272 && (sym->backend_decl != current_function_decl)
7273 && (sym->backend_decl != parent))
7274 this_array_result = false;
7276 /* Passing address of the array if it is not pointer or assumed-shape. */
7277 if (full_array_var && g77 && !this_array_result
7278 && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS)
7280 tmp = gfc_get_symbol_decl (sym);
7282 if (sym->ts.type == BT_CHARACTER)
7283 se->string_length = sym->ts.u.cl->backend_decl;
7285 if (!sym->attr.pointer
7286 && sym->as
7287 && sym->as->type != AS_ASSUMED_SHAPE
7288 && sym->as->type != AS_DEFERRED
7289 && sym->as->type != AS_ASSUMED_RANK
7290 && !sym->attr.allocatable)
7292 /* Some variables are declared directly, others are declared as
7293 pointers and allocated on the heap. */
7294 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
7295 se->expr = tmp;
7296 else
7297 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
7298 if (size)
7299 array_parameter_size (tmp, expr, size);
7300 return;
7303 if (sym->attr.allocatable)
7305 if (sym->attr.dummy || sym->attr.result)
7307 gfc_conv_expr_descriptor (se, expr);
7308 tmp = se->expr;
7310 if (size)
7311 array_parameter_size (tmp, expr, size);
7312 se->expr = gfc_conv_array_data (tmp);
7313 return;
7317 /* A convenient reduction in scope. */
7318 contiguous = g77 && !this_array_result && contiguous;
7320 /* There is no need to pack and unpack the array, if it is contiguous
7321 and not a deferred- or assumed-shape array, or if it is simply
7322 contiguous. */
7323 no_pack = ((sym && sym->as
7324 && !sym->attr.pointer
7325 && sym->as->type != AS_DEFERRED
7326 && sym->as->type != AS_ASSUMED_RANK
7327 && sym->as->type != AS_ASSUMED_SHAPE)
7329 (ref && ref->u.ar.as
7330 && ref->u.ar.as->type != AS_DEFERRED
7331 && ref->u.ar.as->type != AS_ASSUMED_RANK
7332 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
7334 gfc_is_simply_contiguous (expr, false));
7336 no_pack = contiguous && no_pack;
7338 /* Array constructors are always contiguous and do not need packing. */
7339 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
7341 /* Same is true of contiguous sections from allocatable variables. */
7342 good_allocatable = contiguous
7343 && expr->symtree
7344 && expr->symtree->n.sym->attr.allocatable;
7346 /* Or ultimate allocatable components. */
7347 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
7349 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
7351 gfc_conv_expr_descriptor (se, expr);
7352 /* Deallocate the allocatable components of structures that are
7353 not variable. */
7354 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7355 && expr->ts.u.derived->attr.alloc_comp
7356 && expr->expr_type != EXPR_VARIABLE)
7358 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se->expr, expr->rank);
7360 /* The components shall be deallocated before their containing entity. */
7361 gfc_prepend_expr_to_block (&se->post, tmp);
7363 if (expr->ts.type == BT_CHARACTER)
7364 se->string_length = expr->ts.u.cl->backend_decl;
7365 if (size)
7366 array_parameter_size (se->expr, expr, size);
7367 se->expr = gfc_conv_array_data (se->expr);
7368 return;
7371 if (this_array_result)
7373 /* Result of the enclosing function. */
7374 gfc_conv_expr_descriptor (se, expr);
7375 if (size)
7376 array_parameter_size (se->expr, expr, size);
7377 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
7379 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
7380 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
7381 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
7382 se->expr));
7384 return;
7386 else
7388 /* Every other type of array. */
7389 se->want_pointer = 1;
7390 gfc_conv_expr_descriptor (se, expr);
7391 if (size)
7392 array_parameter_size (build_fold_indirect_ref_loc (input_location,
7393 se->expr),
7394 expr, size);
7397 /* Deallocate the allocatable components of structures that are
7398 not variable, for descriptorless arguments.
7399 Arguments with a descriptor are handled in gfc_conv_procedure_call. */
7400 if (g77 && (expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7401 && expr->ts.u.derived->attr.alloc_comp
7402 && expr->expr_type != EXPR_VARIABLE)
7404 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
7405 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
7407 /* The components shall be deallocated before their containing entity. */
7408 gfc_prepend_expr_to_block (&se->post, tmp);
7411 if (g77 || (fsym && fsym->attr.contiguous
7412 && !gfc_is_simply_contiguous (expr, false)))
7414 tree origptr = NULL_TREE;
7416 desc = se->expr;
7418 /* For contiguous arrays, save the original value of the descriptor. */
7419 if (!g77)
7421 origptr = gfc_create_var (pvoid_type_node, "origptr");
7422 tmp = build_fold_indirect_ref_loc (input_location, desc);
7423 tmp = gfc_conv_array_data (tmp);
7424 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7425 TREE_TYPE (origptr), origptr,
7426 fold_convert (TREE_TYPE (origptr), tmp));
7427 gfc_add_expr_to_block (&se->pre, tmp);
7430 /* Repack the array. */
7431 if (warn_array_temporaries)
7433 if (fsym)
7434 gfc_warning (OPT_Warray_temporaries,
7435 "Creating array temporary at %L for argument %qs",
7436 &expr->where, fsym->name);
7437 else
7438 gfc_warning (OPT_Warray_temporaries,
7439 "Creating array temporary at %L", &expr->where);
7442 ptr = build_call_expr_loc (input_location,
7443 gfor_fndecl_in_pack, 1, desc);
7445 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7447 tmp = gfc_conv_expr_present (sym);
7448 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
7449 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
7450 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
7453 ptr = gfc_evaluate_now (ptr, &se->pre);
7455 /* Use the packed data for the actual argument, except for contiguous arrays,
7456 where the descriptor's data component is set. */
7457 if (g77)
7458 se->expr = ptr;
7459 else
7461 tmp = build_fold_indirect_ref_loc (input_location, desc);
7463 gfc_ss * ss = gfc_walk_expr (expr);
7464 if (!transposed_dims (ss))
7465 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
7466 else
7468 tree old_field, new_field;
7470 /* The original descriptor has transposed dims so we can't reuse
7471 it directly; we have to create a new one. */
7472 tree old_desc = tmp;
7473 tree new_desc = gfc_create_var (TREE_TYPE (old_desc), "arg_desc");
7475 old_field = gfc_conv_descriptor_dtype (old_desc);
7476 new_field = gfc_conv_descriptor_dtype (new_desc);
7477 gfc_add_modify (&se->pre, new_field, old_field);
7479 old_field = gfc_conv_descriptor_offset (old_desc);
7480 new_field = gfc_conv_descriptor_offset (new_desc);
7481 gfc_add_modify (&se->pre, new_field, old_field);
7483 for (int i = 0; i < expr->rank; i++)
7485 old_field = gfc_conv_descriptor_dimension (old_desc,
7486 gfc_rank_cst[get_array_ref_dim_for_loop_dim (ss, i)]);
7487 new_field = gfc_conv_descriptor_dimension (new_desc,
7488 gfc_rank_cst[i]);
7489 gfc_add_modify (&se->pre, new_field, old_field);
7492 if (flag_coarray == GFC_FCOARRAY_LIB
7493 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc))
7494 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc))
7495 == GFC_ARRAY_ALLOCATABLE)
7497 old_field = gfc_conv_descriptor_token (old_desc);
7498 new_field = gfc_conv_descriptor_token (new_desc);
7499 gfc_add_modify (&se->pre, new_field, old_field);
7502 gfc_conv_descriptor_data_set (&se->pre, new_desc, ptr);
7503 se->expr = gfc_build_addr_expr (NULL_TREE, new_desc);
7505 gfc_free_ss (ss);
7508 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
7510 char * msg;
7512 if (fsym && proc_name)
7513 msg = xasprintf ("An array temporary was created for argument "
7514 "'%s' of procedure '%s'", fsym->name, proc_name);
7515 else
7516 msg = xasprintf ("An array temporary was created");
7518 tmp = build_fold_indirect_ref_loc (input_location,
7519 desc);
7520 tmp = gfc_conv_array_data (tmp);
7521 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7522 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7524 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7525 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7526 boolean_type_node,
7527 gfc_conv_expr_present (sym), tmp);
7529 gfc_trans_runtime_check (false, true, tmp, &se->pre,
7530 &expr->where, msg);
7531 free (msg);
7534 gfc_start_block (&block);
7536 /* Copy the data back. */
7537 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
7539 tmp = build_call_expr_loc (input_location,
7540 gfor_fndecl_in_unpack, 2, desc, ptr);
7541 gfc_add_expr_to_block (&block, tmp);
7544 /* Free the temporary. */
7545 tmp = gfc_call_free (ptr);
7546 gfc_add_expr_to_block (&block, tmp);
7548 stmt = gfc_finish_block (&block);
7550 gfc_init_block (&block);
7551 /* Only if it was repacked. This code needs to be executed before the
7552 loop cleanup code. */
7553 tmp = build_fold_indirect_ref_loc (input_location,
7554 desc);
7555 tmp = gfc_conv_array_data (tmp);
7556 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7557 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7559 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7560 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7561 boolean_type_node,
7562 gfc_conv_expr_present (sym), tmp);
7564 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
7566 gfc_add_expr_to_block (&block, tmp);
7567 gfc_add_block_to_block (&block, &se->post);
7569 gfc_init_block (&se->post);
7571 /* Reset the descriptor pointer. */
7572 if (!g77)
7574 tmp = build_fold_indirect_ref_loc (input_location, desc);
7575 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
7578 gfc_add_block_to_block (&se->post, &block);
7583 /* Generate code to deallocate an array, if it is allocated. */
7585 tree
7586 gfc_trans_dealloc_allocated (tree descriptor, bool coarray, gfc_expr *expr)
7588 tree tmp;
7589 tree var;
7590 stmtblock_t block;
7592 gfc_start_block (&block);
7594 var = gfc_conv_descriptor_data_get (descriptor);
7595 STRIP_NOPS (var);
7597 /* Call array_deallocate with an int * present in the second argument.
7598 Although it is ignored here, it's presence ensures that arrays that
7599 are already deallocated are ignored. */
7600 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
7601 NULL_TREE, NULL_TREE, NULL_TREE, true,
7602 expr, coarray);
7603 gfc_add_expr_to_block (&block, tmp);
7605 /* Zero the data pointer. */
7606 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7607 var, build_int_cst (TREE_TYPE (var), 0));
7608 gfc_add_expr_to_block (&block, tmp);
7610 return gfc_finish_block (&block);
7614 /* This helper function calculates the size in words of a full array. */
7616 tree
7617 gfc_full_array_size (stmtblock_t *block, tree decl, int rank)
7619 tree idx;
7620 tree nelems;
7621 tree tmp;
7622 idx = gfc_rank_cst[rank - 1];
7623 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
7624 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
7625 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
7626 nelems, tmp);
7627 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7628 tmp, gfc_index_one_node);
7629 tmp = gfc_evaluate_now (tmp, block);
7631 nelems = gfc_conv_descriptor_stride_get (decl, idx);
7632 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7633 nelems, tmp);
7634 return gfc_evaluate_now (tmp, block);
7638 /* Allocate dest to the same size as src, and copy src -> dest.
7639 If no_malloc is set, only the copy is done. */
7641 static tree
7642 duplicate_allocatable (tree dest, tree src, tree type, int rank,
7643 bool no_malloc, bool no_memcpy, tree str_sz,
7644 tree add_when_allocated)
7646 tree tmp;
7647 tree size;
7648 tree nelems;
7649 tree null_cond;
7650 tree null_data;
7651 stmtblock_t block;
7653 /* If the source is null, set the destination to null. Then,
7654 allocate memory to the destination. */
7655 gfc_init_block (&block);
7657 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7659 tmp = null_pointer_node;
7660 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
7661 gfc_add_expr_to_block (&block, tmp);
7662 null_data = gfc_finish_block (&block);
7664 gfc_init_block (&block);
7665 if (str_sz != NULL_TREE)
7666 size = str_sz;
7667 else
7668 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7670 if (!no_malloc)
7672 tmp = gfc_call_malloc (&block, type, size);
7673 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7674 dest, fold_convert (type, tmp));
7675 gfc_add_expr_to_block (&block, tmp);
7678 if (!no_memcpy)
7680 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7681 tmp = build_call_expr_loc (input_location, tmp, 3, dest, src,
7682 fold_convert (size_type_node, size));
7683 gfc_add_expr_to_block (&block, tmp);
7686 else
7688 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
7689 null_data = gfc_finish_block (&block);
7691 gfc_init_block (&block);
7692 if (rank)
7693 nelems = gfc_full_array_size (&block, src, rank);
7694 else
7695 nelems = gfc_index_one_node;
7697 if (str_sz != NULL_TREE)
7698 tmp = fold_convert (gfc_array_index_type, str_sz);
7699 else
7700 tmp = fold_convert (gfc_array_index_type,
7701 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
7702 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7703 nelems, tmp);
7704 if (!no_malloc)
7706 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
7707 tmp = gfc_call_malloc (&block, tmp, size);
7708 gfc_conv_descriptor_data_set (&block, dest, tmp);
7711 /* We know the temporary and the value will be the same length,
7712 so can use memcpy. */
7713 if (!no_memcpy)
7715 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7716 tmp = build_call_expr_loc (input_location, tmp, 3,
7717 gfc_conv_descriptor_data_get (dest),
7718 gfc_conv_descriptor_data_get (src),
7719 fold_convert (size_type_node, size));
7720 gfc_add_expr_to_block (&block, tmp);
7724 gfc_add_expr_to_block (&block, add_when_allocated);
7725 tmp = gfc_finish_block (&block);
7727 /* Null the destination if the source is null; otherwise do
7728 the allocate and copy. */
7729 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src)))
7730 null_cond = src;
7731 else
7732 null_cond = gfc_conv_descriptor_data_get (src);
7734 null_cond = convert (pvoid_type_node, null_cond);
7735 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7736 null_cond, null_pointer_node);
7737 return build3_v (COND_EXPR, null_cond, tmp, null_data);
7741 /* Allocate dest to the same size as src, and copy data src -> dest. */
7743 tree
7744 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank,
7745 tree add_when_allocated)
7747 return duplicate_allocatable (dest, src, type, rank, false, false,
7748 NULL_TREE, add_when_allocated);
7752 /* Copy data src -> dest. */
7754 tree
7755 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
7757 return duplicate_allocatable (dest, src, type, rank, true, false,
7758 NULL_TREE, NULL_TREE);
7761 /* Allocate dest to the same size as src, but don't copy anything. */
7763 tree
7764 gfc_duplicate_allocatable_nocopy (tree dest, tree src, tree type, int rank)
7766 return duplicate_allocatable (dest, src, type, rank, false, true,
7767 NULL_TREE, NULL_TREE);
7771 /* Recursively traverse an object of derived type, generating code to
7772 deallocate, nullify or copy allocatable components. This is the work horse
7773 function for the functions named in this enum. */
7775 enum {DEALLOCATE_ALLOC_COMP = 1, DEALLOCATE_ALLOC_COMP_NO_CAF,
7776 NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP, COPY_ONLY_ALLOC_COMP,
7777 COPY_ALLOC_COMP_CAF};
7779 static tree
7780 structure_alloc_comps (gfc_symbol * der_type, tree decl,
7781 tree dest, int rank, int purpose)
7783 gfc_component *c;
7784 gfc_loopinfo loop;
7785 stmtblock_t fnblock;
7786 stmtblock_t loopbody;
7787 stmtblock_t tmpblock;
7788 tree decl_type;
7789 tree tmp;
7790 tree comp;
7791 tree dcmp;
7792 tree nelems;
7793 tree index;
7794 tree var;
7795 tree cdecl;
7796 tree ctype;
7797 tree vref, dref;
7798 tree null_cond = NULL_TREE;
7799 tree add_when_allocated;
7800 bool called_dealloc_with_status;
7802 gfc_init_block (&fnblock);
7804 decl_type = TREE_TYPE (decl);
7806 if ((POINTER_TYPE_P (decl_type))
7807 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
7809 decl = build_fold_indirect_ref_loc (input_location, decl);
7810 /* Deref dest in sync with decl, but only when it is not NULL. */
7811 if (dest)
7812 dest = build_fold_indirect_ref_loc (input_location, dest);
7815 /* Just in case it gets dereferenced. */
7816 decl_type = TREE_TYPE (decl);
7818 /* If this is an array of derived types with allocatable components
7819 build a loop and recursively call this function. */
7820 if (TREE_CODE (decl_type) == ARRAY_TYPE
7821 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
7823 tmp = gfc_conv_array_data (decl);
7824 var = build_fold_indirect_ref_loc (input_location, tmp);
7826 /* Get the number of elements - 1 and set the counter. */
7827 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
7829 /* Use the descriptor for an allocatable array. Since this
7830 is a full array reference, we only need the descriptor
7831 information from dimension = rank. */
7832 tmp = gfc_full_array_size (&fnblock, decl, rank);
7833 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7834 gfc_array_index_type, tmp,
7835 gfc_index_one_node);
7837 null_cond = gfc_conv_descriptor_data_get (decl);
7838 null_cond = fold_build2_loc (input_location, NE_EXPR,
7839 boolean_type_node, null_cond,
7840 build_int_cst (TREE_TYPE (null_cond), 0));
7842 else
7844 /* Otherwise use the TYPE_DOMAIN information. */
7845 tmp = array_type_nelts (decl_type);
7846 tmp = fold_convert (gfc_array_index_type, tmp);
7849 /* Remember that this is, in fact, the no. of elements - 1. */
7850 nelems = gfc_evaluate_now (tmp, &fnblock);
7851 index = gfc_create_var (gfc_array_index_type, "S");
7853 /* Build the body of the loop. */
7854 gfc_init_block (&loopbody);
7856 vref = gfc_build_array_ref (var, index, NULL);
7858 if (purpose == COPY_ALLOC_COMP || purpose == COPY_ONLY_ALLOC_COMP)
7860 tmp = build_fold_indirect_ref_loc (input_location,
7861 gfc_conv_array_data (dest));
7862 dref = gfc_build_array_ref (tmp, index, NULL);
7863 tmp = structure_alloc_comps (der_type, vref, dref, rank,
7864 COPY_ALLOC_COMP);
7866 else
7867 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
7869 gfc_add_expr_to_block (&loopbody, tmp);
7871 /* Build the loop and return. */
7872 gfc_init_loopinfo (&loop);
7873 loop.dimen = 1;
7874 loop.from[0] = gfc_index_zero_node;
7875 loop.loopvar[0] = index;
7876 loop.to[0] = nelems;
7877 gfc_trans_scalarizing_loops (&loop, &loopbody);
7878 gfc_add_block_to_block (&fnblock, &loop.pre);
7880 tmp = gfc_finish_block (&fnblock);
7881 /* When copying allocateable components, the above implements the
7882 deep copy. Nevertheless is a deep copy only allowed, when the current
7883 component is allocated, for which code will be generated in
7884 gfc_duplicate_allocatable (), where the deep copy code is just added
7885 into the if's body, by adding tmp (the deep copy code) as last
7886 argument to gfc_duplicate_allocatable (). */
7887 if (purpose == COPY_ALLOC_COMP
7888 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7889 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank,
7890 tmp);
7891 else if (null_cond != NULL_TREE)
7892 tmp = build3_v (COND_EXPR, null_cond, tmp,
7893 build_empty_stmt (input_location));
7895 return tmp;
7898 /* Otherwise, act on the components or recursively call self to
7899 act on a chain of components. */
7900 for (c = der_type->components; c; c = c->next)
7902 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
7903 || c->ts.type == BT_CLASS)
7904 && c->ts.u.derived->attr.alloc_comp;
7905 cdecl = c->backend_decl;
7906 ctype = TREE_TYPE (cdecl);
7908 switch (purpose)
7910 case DEALLOCATE_ALLOC_COMP:
7911 case DEALLOCATE_ALLOC_COMP_NO_CAF:
7913 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
7914 (i.e. this function) so generate all the calls and suppress the
7915 recursion from here, if necessary. */
7916 called_dealloc_with_status = false;
7917 gfc_init_block (&tmpblock);
7919 if ((c->ts.type == BT_DERIVED && !c->attr.pointer)
7920 || (c->ts.type == BT_CLASS && !CLASS_DATA (c)->attr.class_pointer))
7922 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7923 decl, cdecl, NULL_TREE);
7925 /* The finalizer frees allocatable components. */
7926 called_dealloc_with_status
7927 = gfc_add_comp_finalizer_call (&tmpblock, comp, c,
7928 purpose == DEALLOCATE_ALLOC_COMP);
7930 else
7931 comp = NULL_TREE;
7933 if (c->attr.allocatable && !c->attr.proc_pointer
7934 && (c->attr.dimension
7935 || (c->attr.codimension
7936 && purpose != DEALLOCATE_ALLOC_COMP_NO_CAF)))
7938 if (comp == NULL_TREE)
7939 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7940 decl, cdecl, NULL_TREE);
7941 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension, NULL);
7942 gfc_add_expr_to_block (&tmpblock, tmp);
7944 else if (c->attr.allocatable && !c->attr.codimension)
7946 /* Allocatable scalar components. */
7947 if (comp == NULL_TREE)
7948 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7949 decl, cdecl, NULL_TREE);
7951 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
7952 c->ts);
7953 gfc_add_expr_to_block (&tmpblock, tmp);
7954 called_dealloc_with_status = true;
7956 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7957 void_type_node, comp,
7958 build_int_cst (TREE_TYPE (comp), 0));
7959 gfc_add_expr_to_block (&tmpblock, tmp);
7961 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable
7962 && (!CLASS_DATA (c)->attr.codimension
7963 || purpose != DEALLOCATE_ALLOC_COMP_NO_CAF))
7965 /* Allocatable CLASS components. */
7967 /* Add reference to '_data' component. */
7968 tmp = CLASS_DATA (c)->backend_decl;
7969 comp = fold_build3_loc (input_location, COMPONENT_REF,
7970 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7972 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
7973 tmp = gfc_trans_dealloc_allocated (comp,
7974 CLASS_DATA (c)->attr.codimension, NULL);
7975 else
7977 tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL,
7978 CLASS_DATA (c)->ts);
7979 gfc_add_expr_to_block (&tmpblock, tmp);
7980 called_dealloc_with_status = true;
7982 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7983 void_type_node, comp,
7984 build_int_cst (TREE_TYPE (comp), 0));
7986 gfc_add_expr_to_block (&tmpblock, tmp);
7989 if (cmp_has_alloc_comps
7990 && !c->attr.pointer
7991 && !called_dealloc_with_status)
7993 /* Do not deallocate the components of ultimate pointer
7994 components or iteratively call self if call has been made
7995 to gfc_trans_dealloc_allocated */
7996 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7997 decl, cdecl, NULL_TREE);
7998 rank = c->as ? c->as->rank : 0;
7999 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
8000 rank, purpose);
8001 gfc_add_expr_to_block (&fnblock, tmp);
8004 /* Now add the deallocation of this component. */
8005 gfc_add_block_to_block (&fnblock, &tmpblock);
8006 break;
8008 case NULLIFY_ALLOC_COMP:
8009 if (c->attr.pointer || c->attr.proc_pointer)
8010 continue;
8011 else if (c->attr.allocatable
8012 && (c->attr.dimension|| c->attr.codimension))
8014 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8015 decl, cdecl, NULL_TREE);
8016 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
8018 else if (c->attr.allocatable)
8020 /* Allocatable scalar components. */
8021 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8022 decl, cdecl, NULL_TREE);
8023 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8024 void_type_node, comp,
8025 build_int_cst (TREE_TYPE (comp), 0));
8026 gfc_add_expr_to_block (&fnblock, tmp);
8027 if (gfc_deferred_strlen (c, &comp))
8029 comp = fold_build3_loc (input_location, COMPONENT_REF,
8030 TREE_TYPE (comp),
8031 decl, comp, NULL_TREE);
8032 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8033 TREE_TYPE (comp), comp,
8034 build_int_cst (TREE_TYPE (comp), 0));
8035 gfc_add_expr_to_block (&fnblock, tmp);
8038 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
8040 /* Allocatable CLASS components. */
8041 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8042 decl, cdecl, NULL_TREE);
8043 /* Add reference to '_data' component. */
8044 tmp = CLASS_DATA (c)->backend_decl;
8045 comp = fold_build3_loc (input_location, COMPONENT_REF,
8046 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
8047 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
8048 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
8049 else
8051 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8052 void_type_node, comp,
8053 build_int_cst (TREE_TYPE (comp), 0));
8054 gfc_add_expr_to_block (&fnblock, tmp);
8057 else if (cmp_has_alloc_comps)
8059 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8060 decl, cdecl, NULL_TREE);
8061 rank = c->as ? c->as->rank : 0;
8062 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
8063 rank, purpose);
8064 gfc_add_expr_to_block (&fnblock, tmp);
8066 break;
8068 case COPY_ALLOC_COMP_CAF:
8069 if (!c->attr.codimension
8070 && (c->ts.type != BT_CLASS || CLASS_DATA (c)->attr.coarray_comp)
8071 && (c->ts.type != BT_DERIVED
8072 || !c->ts.u.derived->attr.coarray_comp))
8073 continue;
8075 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
8076 cdecl, NULL_TREE);
8077 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
8078 cdecl, NULL_TREE);
8080 if (c->attr.codimension)
8082 if (c->ts.type == BT_CLASS)
8084 comp = gfc_class_data_get (comp);
8085 dcmp = gfc_class_data_get (dcmp);
8087 gfc_conv_descriptor_data_set (&fnblock, dcmp,
8088 gfc_conv_descriptor_data_get (comp));
8090 else
8092 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
8093 rank, purpose);
8094 gfc_add_expr_to_block (&fnblock, tmp);
8097 break;
8099 case COPY_ALLOC_COMP:
8100 if (c->attr.pointer)
8101 continue;
8103 /* We need source and destination components. */
8104 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
8105 cdecl, NULL_TREE);
8106 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
8107 cdecl, NULL_TREE);
8108 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
8110 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
8112 tree ftn_tree;
8113 tree size;
8114 tree dst_data;
8115 tree src_data;
8116 tree null_data;
8118 dst_data = gfc_class_data_get (dcmp);
8119 src_data = gfc_class_data_get (comp);
8120 size = fold_convert (size_type_node,
8121 gfc_class_vtab_size_get (comp));
8123 if (CLASS_DATA (c)->attr.dimension)
8125 nelems = gfc_conv_descriptor_size (src_data,
8126 CLASS_DATA (c)->as->rank);
8127 size = fold_build2_loc (input_location, MULT_EXPR,
8128 size_type_node, size,
8129 fold_convert (size_type_node,
8130 nelems));
8132 else
8133 nelems = build_int_cst (size_type_node, 1);
8135 if (CLASS_DATA (c)->attr.dimension
8136 || CLASS_DATA (c)->attr.codimension)
8138 src_data = gfc_conv_descriptor_data_get (src_data);
8139 dst_data = gfc_conv_descriptor_data_get (dst_data);
8142 gfc_init_block (&tmpblock);
8144 /* Coarray component have to have the same allocation status and
8145 shape/type-parameter/effective-type on the LHS and RHS of an
8146 intrinsic assignment. Hence, we did not deallocated them - and
8147 do not allocate them here. */
8148 if (!CLASS_DATA (c)->attr.codimension)
8150 ftn_tree = builtin_decl_explicit (BUILT_IN_MALLOC);
8151 tmp = build_call_expr_loc (input_location, ftn_tree, 1, size);
8152 gfc_add_modify (&tmpblock, dst_data,
8153 fold_convert (TREE_TYPE (dst_data), tmp));
8156 tmp = gfc_copy_class_to_class (comp, dcmp, nelems,
8157 UNLIMITED_POLY (c));
8158 gfc_add_expr_to_block (&tmpblock, tmp);
8159 tmp = gfc_finish_block (&tmpblock);
8161 gfc_init_block (&tmpblock);
8162 gfc_add_modify (&tmpblock, dst_data,
8163 fold_convert (TREE_TYPE (dst_data),
8164 null_pointer_node));
8165 null_data = gfc_finish_block (&tmpblock);
8167 null_cond = fold_build2_loc (input_location, NE_EXPR,
8168 boolean_type_node, src_data,
8169 null_pointer_node);
8171 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
8172 tmp, null_data));
8173 continue;
8176 /* To implement guarded deep copy, i.e., deep copy only allocatable
8177 components that are really allocated, the deep copy code has to
8178 be generated first and then added to the if-block in
8179 gfc_duplicate_allocatable (). */
8180 if (cmp_has_alloc_comps)
8182 rank = c->as ? c->as->rank : 0;
8183 tmp = fold_convert (TREE_TYPE (dcmp), comp);
8184 gfc_add_modify (&fnblock, dcmp, tmp);
8185 add_when_allocated = structure_alloc_comps (c->ts.u.derived,
8186 comp, dcmp,
8187 rank, purpose);
8189 else
8190 add_when_allocated = NULL_TREE;
8192 if (gfc_deferred_strlen (c, &tmp))
8194 tree len, size;
8195 len = tmp;
8196 tmp = fold_build3_loc (input_location, COMPONENT_REF,
8197 TREE_TYPE (len),
8198 decl, len, NULL_TREE);
8199 len = fold_build3_loc (input_location, COMPONENT_REF,
8200 TREE_TYPE (len),
8201 dest, len, NULL_TREE);
8202 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8203 TREE_TYPE (len), len, tmp);
8204 gfc_add_expr_to_block (&fnblock, tmp);
8205 size = size_of_string_in_bytes (c->ts.kind, len);
8206 /* This component can not have allocatable components,
8207 therefore add_when_allocated of duplicate_allocatable ()
8208 is always NULL. */
8209 tmp = duplicate_allocatable (dcmp, comp, ctype, rank,
8210 false, false, size, NULL_TREE);
8211 gfc_add_expr_to_block (&fnblock, tmp);
8213 else if (c->attr.allocatable && !c->attr.proc_pointer
8214 && (!(cmp_has_alloc_comps && c->as)
8215 || c->attr.codimension))
8217 rank = c->as ? c->as->rank : 0;
8218 if (c->attr.codimension)
8219 tmp = gfc_copy_allocatable_data (dcmp, comp, ctype, rank);
8220 else
8221 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank,
8222 add_when_allocated);
8223 gfc_add_expr_to_block (&fnblock, tmp);
8225 else
8226 if (cmp_has_alloc_comps)
8227 gfc_add_expr_to_block (&fnblock, add_when_allocated);
8229 break;
8231 default:
8232 gcc_unreachable ();
8233 break;
8237 return gfc_finish_block (&fnblock);
8240 /* Recursively traverse an object of derived type, generating code to
8241 nullify allocatable components. */
8243 tree
8244 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
8246 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8247 NULLIFY_ALLOC_COMP);
8251 /* Recursively traverse an object of derived type, generating code to
8252 deallocate allocatable components. */
8254 tree
8255 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
8257 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8258 DEALLOCATE_ALLOC_COMP);
8262 /* Recursively traverse an object of derived type, generating code to
8263 deallocate allocatable components. But do not deallocate coarrays.
8264 To be used for intrinsic assignment, which may not change the allocation
8265 status of coarrays. */
8267 tree
8268 gfc_deallocate_alloc_comp_no_caf (gfc_symbol * der_type, tree decl, int rank)
8270 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8271 DEALLOCATE_ALLOC_COMP_NO_CAF);
8275 tree
8276 gfc_reassign_alloc_comp_caf (gfc_symbol *der_type, tree decl, tree dest)
8278 return structure_alloc_comps (der_type, decl, dest, 0, COPY_ALLOC_COMP_CAF);
8282 /* Recursively traverse an object of derived type, generating code to
8283 copy it and its allocatable components. */
8285 tree
8286 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8288 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
8292 /* Recursively traverse an object of derived type, generating code to
8293 copy only its allocatable components. */
8295 tree
8296 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8298 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
8302 /* Returns the value of LBOUND for an expression. This could be broken out
8303 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
8304 called by gfc_alloc_allocatable_for_assignment. */
8305 static tree
8306 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
8308 tree lbound;
8309 tree ubound;
8310 tree stride;
8311 tree cond, cond1, cond3, cond4;
8312 tree tmp;
8313 gfc_ref *ref;
8315 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
8317 tmp = gfc_rank_cst[dim];
8318 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
8319 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
8320 stride = gfc_conv_descriptor_stride_get (desc, tmp);
8321 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8322 ubound, lbound);
8323 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8324 stride, gfc_index_zero_node);
8325 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
8326 boolean_type_node, cond3, cond1);
8327 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
8328 stride, gfc_index_zero_node);
8329 if (assumed_size)
8330 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8331 tmp, build_int_cst (gfc_array_index_type,
8332 expr->rank - 1));
8333 else
8334 cond = boolean_false_node;
8336 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8337 boolean_type_node, cond3, cond4);
8338 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8339 boolean_type_node, cond, cond1);
8341 return fold_build3_loc (input_location, COND_EXPR,
8342 gfc_array_index_type, cond,
8343 lbound, gfc_index_one_node);
8346 if (expr->expr_type == EXPR_FUNCTION)
8348 /* A conversion function, so use the argument. */
8349 gcc_assert (expr->value.function.isym
8350 && expr->value.function.isym->conversion);
8351 expr = expr->value.function.actual->expr;
8354 if (expr->expr_type == EXPR_VARIABLE)
8356 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
8357 for (ref = expr->ref; ref; ref = ref->next)
8359 if (ref->type == REF_COMPONENT
8360 && ref->u.c.component->as
8361 && ref->next
8362 && ref->next->u.ar.type == AR_FULL)
8363 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
8365 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
8368 return gfc_index_one_node;
8372 /* Returns true if an expression represents an lhs that can be reallocated
8373 on assignment. */
8375 bool
8376 gfc_is_reallocatable_lhs (gfc_expr *expr)
8378 gfc_ref * ref;
8380 if (!expr->ref)
8381 return false;
8383 /* An allocatable variable. */
8384 if (expr->symtree->n.sym->attr.allocatable
8385 && expr->ref
8386 && expr->ref->type == REF_ARRAY
8387 && expr->ref->u.ar.type == AR_FULL)
8388 return true;
8390 /* All that can be left are allocatable components. */
8391 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
8392 && expr->symtree->n.sym->ts.type != BT_CLASS)
8393 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
8394 return false;
8396 /* Find a component ref followed by an array reference. */
8397 for (ref = expr->ref; ref; ref = ref->next)
8398 if (ref->next
8399 && ref->type == REF_COMPONENT
8400 && ref->next->type == REF_ARRAY
8401 && !ref->next->next)
8402 break;
8404 if (!ref)
8405 return false;
8407 /* Return true if valid reallocatable lhs. */
8408 if (ref->u.c.component->attr.allocatable
8409 && ref->next->u.ar.type == AR_FULL)
8410 return true;
8412 return false;
8416 /* Allocate the lhs of an assignment to an allocatable array, otherwise
8417 reallocate it. */
8419 tree
8420 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
8421 gfc_expr *expr1,
8422 gfc_expr *expr2)
8424 stmtblock_t realloc_block;
8425 stmtblock_t alloc_block;
8426 stmtblock_t fblock;
8427 gfc_ss *rss;
8428 gfc_ss *lss;
8429 gfc_array_info *linfo;
8430 tree realloc_expr;
8431 tree alloc_expr;
8432 tree size1;
8433 tree size2;
8434 tree array1;
8435 tree cond_null;
8436 tree cond;
8437 tree tmp;
8438 tree tmp2;
8439 tree lbound;
8440 tree ubound;
8441 tree desc;
8442 tree old_desc;
8443 tree desc2;
8444 tree offset;
8445 tree jump_label1;
8446 tree jump_label2;
8447 tree neq_size;
8448 tree lbd;
8449 int n;
8450 int dim;
8451 gfc_array_spec * as;
8453 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
8454 Find the lhs expression in the loop chain and set expr1 and
8455 expr2 accordingly. */
8456 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
8458 expr2 = expr1;
8459 /* Find the ss for the lhs. */
8460 lss = loop->ss;
8461 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8462 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
8463 break;
8464 if (lss == gfc_ss_terminator)
8465 return NULL_TREE;
8466 expr1 = lss->info->expr;
8469 /* Bail out if this is not a valid allocate on assignment. */
8470 if (!gfc_is_reallocatable_lhs (expr1)
8471 || (expr2 && !expr2->rank))
8472 return NULL_TREE;
8474 /* Find the ss for the lhs. */
8475 lss = loop->ss;
8476 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8477 if (lss->info->expr == expr1)
8478 break;
8480 if (lss == gfc_ss_terminator)
8481 return NULL_TREE;
8483 linfo = &lss->info->data.array;
8485 /* Find an ss for the rhs. For operator expressions, we see the
8486 ss's for the operands. Any one of these will do. */
8487 rss = loop->ss;
8488 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
8489 if (rss->info->expr != expr1 && rss != loop->temp_ss)
8490 break;
8492 if (expr2 && rss == gfc_ss_terminator)
8493 return NULL_TREE;
8495 gfc_start_block (&fblock);
8497 /* Since the lhs is allocatable, this must be a descriptor type.
8498 Get the data and array size. */
8499 desc = linfo->descriptor;
8500 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
8501 array1 = gfc_conv_descriptor_data_get (desc);
8503 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
8504 deallocated if expr is an array of different shape or any of the
8505 corresponding length type parameter values of variable and expr
8506 differ." This assures F95 compatibility. */
8507 jump_label1 = gfc_build_label_decl (NULL_TREE);
8508 jump_label2 = gfc_build_label_decl (NULL_TREE);
8510 /* Allocate if data is NULL. */
8511 cond_null = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8512 array1, build_int_cst (TREE_TYPE (array1), 0));
8513 tmp = build3_v (COND_EXPR, cond_null,
8514 build1_v (GOTO_EXPR, jump_label1),
8515 build_empty_stmt (input_location));
8516 gfc_add_expr_to_block (&fblock, tmp);
8518 /* Get arrayspec if expr is a full array. */
8519 if (expr2 && expr2->expr_type == EXPR_FUNCTION
8520 && expr2->value.function.isym
8521 && expr2->value.function.isym->conversion)
8523 /* For conversion functions, take the arg. */
8524 gfc_expr *arg = expr2->value.function.actual->expr;
8525 as = gfc_get_full_arrayspec_from_expr (arg);
8527 else if (expr2)
8528 as = gfc_get_full_arrayspec_from_expr (expr2);
8529 else
8530 as = NULL;
8532 /* If the lhs shape is not the same as the rhs jump to setting the
8533 bounds and doing the reallocation....... */
8534 for (n = 0; n < expr1->rank; n++)
8536 /* Check the shape. */
8537 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8538 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
8539 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8540 gfc_array_index_type,
8541 loop->to[n], loop->from[n]);
8542 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8543 gfc_array_index_type,
8544 tmp, lbound);
8545 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8546 gfc_array_index_type,
8547 tmp, ubound);
8548 cond = fold_build2_loc (input_location, NE_EXPR,
8549 boolean_type_node,
8550 tmp, gfc_index_zero_node);
8551 tmp = build3_v (COND_EXPR, cond,
8552 build1_v (GOTO_EXPR, jump_label1),
8553 build_empty_stmt (input_location));
8554 gfc_add_expr_to_block (&fblock, tmp);
8557 /* ....else jump past the (re)alloc code. */
8558 tmp = build1_v (GOTO_EXPR, jump_label2);
8559 gfc_add_expr_to_block (&fblock, tmp);
8561 /* Add the label to start automatic (re)allocation. */
8562 tmp = build1_v (LABEL_EXPR, jump_label1);
8563 gfc_add_expr_to_block (&fblock, tmp);
8565 /* If the lhs has not been allocated, its bounds will not have been
8566 initialized and so its size is set to zero. */
8567 size1 = gfc_create_var (gfc_array_index_type, NULL);
8568 gfc_init_block (&alloc_block);
8569 gfc_add_modify (&alloc_block, size1, gfc_index_zero_node);
8570 gfc_init_block (&realloc_block);
8571 gfc_add_modify (&realloc_block, size1,
8572 gfc_conv_descriptor_size (desc, expr1->rank));
8573 tmp = build3_v (COND_EXPR, cond_null,
8574 gfc_finish_block (&alloc_block),
8575 gfc_finish_block (&realloc_block));
8576 gfc_add_expr_to_block (&fblock, tmp);
8578 /* Get the rhs size and fix it. */
8579 if (expr2)
8580 desc2 = rss->info->data.array.descriptor;
8581 else
8582 desc2 = NULL_TREE;
8584 size2 = gfc_index_one_node;
8585 for (n = 0; n < expr2->rank; n++)
8587 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8588 gfc_array_index_type,
8589 loop->to[n], loop->from[n]);
8590 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8591 gfc_array_index_type,
8592 tmp, gfc_index_one_node);
8593 size2 = fold_build2_loc (input_location, MULT_EXPR,
8594 gfc_array_index_type,
8595 tmp, size2);
8597 size2 = gfc_evaluate_now (size2, &fblock);
8599 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
8600 size1, size2);
8601 neq_size = gfc_evaluate_now (cond, &fblock);
8603 /* Deallocation of allocatable components will have to occur on
8604 reallocation. Fix the old descriptor now. */
8605 if ((expr1->ts.type == BT_DERIVED)
8606 && expr1->ts.u.derived->attr.alloc_comp)
8607 old_desc = gfc_evaluate_now (desc, &fblock);
8608 else
8609 old_desc = NULL_TREE;
8611 /* Now modify the lhs descriptor and the associated scalarizer
8612 variables. F2003 7.4.1.3: "If variable is or becomes an
8613 unallocated allocatable variable, then it is allocated with each
8614 deferred type parameter equal to the corresponding type parameters
8615 of expr , with the shape of expr , and with each lower bound equal
8616 to the corresponding element of LBOUND(expr)."
8617 Reuse size1 to keep a dimension-by-dimension track of the
8618 stride of the new array. */
8619 size1 = gfc_index_one_node;
8620 offset = gfc_index_zero_node;
8622 for (n = 0; n < expr2->rank; n++)
8624 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8625 gfc_array_index_type,
8626 loop->to[n], loop->from[n]);
8627 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8628 gfc_array_index_type,
8629 tmp, gfc_index_one_node);
8631 lbound = gfc_index_one_node;
8632 ubound = tmp;
8634 if (as)
8636 lbd = get_std_lbound (expr2, desc2, n,
8637 as->type == AS_ASSUMED_SIZE);
8638 ubound = fold_build2_loc (input_location,
8639 MINUS_EXPR,
8640 gfc_array_index_type,
8641 ubound, lbound);
8642 ubound = fold_build2_loc (input_location,
8643 PLUS_EXPR,
8644 gfc_array_index_type,
8645 ubound, lbd);
8646 lbound = lbd;
8649 gfc_conv_descriptor_lbound_set (&fblock, desc,
8650 gfc_rank_cst[n],
8651 lbound);
8652 gfc_conv_descriptor_ubound_set (&fblock, desc,
8653 gfc_rank_cst[n],
8654 ubound);
8655 gfc_conv_descriptor_stride_set (&fblock, desc,
8656 gfc_rank_cst[n],
8657 size1);
8658 lbound = gfc_conv_descriptor_lbound_get (desc,
8659 gfc_rank_cst[n]);
8660 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
8661 gfc_array_index_type,
8662 lbound, size1);
8663 offset = fold_build2_loc (input_location, MINUS_EXPR,
8664 gfc_array_index_type,
8665 offset, tmp2);
8666 size1 = fold_build2_loc (input_location, MULT_EXPR,
8667 gfc_array_index_type,
8668 tmp, size1);
8671 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8672 the array offset is saved and the info.offset is used for a
8673 running offset. Use the saved_offset instead. */
8674 tmp = gfc_conv_descriptor_offset (desc);
8675 gfc_add_modify (&fblock, tmp, offset);
8676 if (linfo->saved_offset
8677 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
8678 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
8680 /* Now set the deltas for the lhs. */
8681 for (n = 0; n < expr1->rank; n++)
8683 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8684 dim = lss->dim[n];
8685 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8686 gfc_array_index_type, tmp,
8687 loop->from[dim]);
8688 if (linfo->delta[dim]
8689 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
8690 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
8693 /* Get the new lhs size in bytes. */
8694 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8696 if (expr2->ts.deferred)
8698 if (TREE_CODE (expr2->ts.u.cl->backend_decl) == VAR_DECL)
8699 tmp = expr2->ts.u.cl->backend_decl;
8700 else
8701 tmp = rss->info->string_length;
8703 else
8705 tmp = expr2->ts.u.cl->backend_decl;
8706 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
8709 if (expr1->ts.u.cl->backend_decl
8710 && TREE_CODE (expr1->ts.u.cl->backend_decl) == VAR_DECL)
8711 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
8712 else
8713 gfc_add_modify (&fblock, lss->info->string_length, tmp);
8715 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
8717 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
8718 tmp = fold_build2_loc (input_location, MULT_EXPR,
8719 gfc_array_index_type, tmp,
8720 expr1->ts.u.cl->backend_decl);
8722 else
8723 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
8724 tmp = fold_convert (gfc_array_index_type, tmp);
8725 size2 = fold_build2_loc (input_location, MULT_EXPR,
8726 gfc_array_index_type,
8727 tmp, size2);
8728 size2 = fold_convert (size_type_node, size2);
8729 size2 = fold_build2_loc (input_location, MAX_EXPR, size_type_node,
8730 size2, size_one_node);
8731 size2 = gfc_evaluate_now (size2, &fblock);
8733 /* Realloc expression. Note that the scalarizer uses desc.data
8734 in the array reference - (*desc.data)[<element>]. */
8735 gfc_init_block (&realloc_block);
8737 if ((expr1->ts.type == BT_DERIVED)
8738 && expr1->ts.u.derived->attr.alloc_comp)
8740 tmp = gfc_deallocate_alloc_comp_no_caf (expr1->ts.u.derived, old_desc,
8741 expr1->rank);
8742 gfc_add_expr_to_block (&realloc_block, tmp);
8745 tmp = build_call_expr_loc (input_location,
8746 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
8747 fold_convert (pvoid_type_node, array1),
8748 size2);
8749 gfc_conv_descriptor_data_set (&realloc_block,
8750 desc, tmp);
8752 if ((expr1->ts.type == BT_DERIVED)
8753 && expr1->ts.u.derived->attr.alloc_comp)
8755 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8756 expr1->rank);
8757 gfc_add_expr_to_block (&realloc_block, tmp);
8760 realloc_expr = gfc_finish_block (&realloc_block);
8762 /* Only reallocate if sizes are different. */
8763 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
8764 build_empty_stmt (input_location));
8765 realloc_expr = tmp;
8768 /* Malloc expression. */
8769 gfc_init_block (&alloc_block);
8770 tmp = build_call_expr_loc (input_location,
8771 builtin_decl_explicit (BUILT_IN_MALLOC),
8772 1, size2);
8773 gfc_conv_descriptor_data_set (&alloc_block,
8774 desc, tmp);
8775 tmp = gfc_conv_descriptor_dtype (desc);
8776 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
8777 if ((expr1->ts.type == BT_DERIVED)
8778 && expr1->ts.u.derived->attr.alloc_comp)
8780 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8781 expr1->rank);
8782 gfc_add_expr_to_block (&alloc_block, tmp);
8784 alloc_expr = gfc_finish_block (&alloc_block);
8786 /* Malloc if not allocated; realloc otherwise. */
8787 tmp = build_int_cst (TREE_TYPE (array1), 0);
8788 cond = fold_build2_loc (input_location, EQ_EXPR,
8789 boolean_type_node,
8790 array1, tmp);
8791 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
8792 gfc_add_expr_to_block (&fblock, tmp);
8794 /* Make sure that the scalarizer data pointer is updated. */
8795 if (linfo->data
8796 && TREE_CODE (linfo->data) == VAR_DECL)
8798 tmp = gfc_conv_descriptor_data_get (desc);
8799 gfc_add_modify (&fblock, linfo->data, tmp);
8802 /* Add the exit label. */
8803 tmp = build1_v (LABEL_EXPR, jump_label2);
8804 gfc_add_expr_to_block (&fblock, tmp);
8806 return gfc_finish_block (&fblock);
8810 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
8811 Do likewise, recursively if necessary, with the allocatable components of
8812 derived types. */
8814 void
8815 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
8817 tree type;
8818 tree tmp;
8819 tree descriptor;
8820 stmtblock_t init;
8821 stmtblock_t cleanup;
8822 locus loc;
8823 int rank;
8824 bool sym_has_alloc_comp, has_finalizer;
8826 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
8827 || sym->ts.type == BT_CLASS)
8828 && sym->ts.u.derived->attr.alloc_comp;
8829 has_finalizer = sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED
8830 ? gfc_is_finalizable (sym->ts.u.derived, NULL) : false;
8832 /* Make sure the frontend gets these right. */
8833 gcc_assert (sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp
8834 || has_finalizer);
8836 gfc_save_backend_locus (&loc);
8837 gfc_set_backend_locus (&sym->declared_at);
8838 gfc_init_block (&init);
8840 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
8841 || TREE_CODE (sym->backend_decl) == PARM_DECL);
8843 if (sym->ts.type == BT_CHARACTER
8844 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
8846 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
8847 gfc_trans_vla_type_sizes (sym, &init);
8850 /* Dummy, use associated and result variables don't need anything special. */
8851 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
8853 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8854 gfc_restore_backend_locus (&loc);
8855 return;
8858 descriptor = sym->backend_decl;
8860 /* Although static, derived types with default initializers and
8861 allocatable components must not be nulled wholesale; instead they
8862 are treated component by component. */
8863 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp && !has_finalizer)
8865 /* SAVEd variables are not freed on exit. */
8866 gfc_trans_static_array_pointer (sym);
8868 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8869 gfc_restore_backend_locus (&loc);
8870 return;
8873 /* Get the descriptor type. */
8874 type = TREE_TYPE (sym->backend_decl);
8876 if ((sym_has_alloc_comp || (has_finalizer && sym->ts.type != BT_CLASS))
8877 && !(sym->attr.pointer || sym->attr.allocatable))
8879 if (!sym->attr.save
8880 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
8882 if (sym->value == NULL
8883 || !gfc_has_default_initializer (sym->ts.u.derived))
8885 rank = sym->as ? sym->as->rank : 0;
8886 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
8887 descriptor, rank);
8888 gfc_add_expr_to_block (&init, tmp);
8890 else
8891 gfc_init_default_dt (sym, &init, false);
8894 else if (!GFC_DESCRIPTOR_TYPE_P (type))
8896 /* If the backend_decl is not a descriptor, we must have a pointer
8897 to one. */
8898 descriptor = build_fold_indirect_ref_loc (input_location,
8899 sym->backend_decl);
8900 type = TREE_TYPE (descriptor);
8903 /* NULLIFY the data pointer, for non-saved allocatables. */
8904 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save && sym->attr.allocatable)
8905 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
8907 gfc_restore_backend_locus (&loc);
8908 gfc_init_block (&cleanup);
8910 /* Allocatable arrays need to be freed when they go out of scope.
8911 The allocatable components of pointers must not be touched. */
8912 if (!sym->attr.allocatable && has_finalizer && sym->ts.type != BT_CLASS
8913 && !sym->attr.pointer && !sym->attr.artificial && !sym->attr.save
8914 && !sym->ns->proc_name->attr.is_main_program)
8916 gfc_expr *e;
8917 sym->attr.referenced = 1;
8918 e = gfc_lval_expr_from_sym (sym);
8919 gfc_add_finalizer_call (&cleanup, e);
8920 gfc_free_expr (e);
8922 else if ((!sym->attr.allocatable || !has_finalizer)
8923 && sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
8924 && !sym->attr.pointer && !sym->attr.save
8925 && !sym->ns->proc_name->attr.is_main_program)
8927 int rank;
8928 rank = sym->as ? sym->as->rank : 0;
8929 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
8930 gfc_add_expr_to_block (&cleanup, tmp);
8933 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
8934 && !sym->attr.save && !sym->attr.result
8935 && !sym->ns->proc_name->attr.is_main_program)
8937 gfc_expr *e;
8938 e = has_finalizer ? gfc_lval_expr_from_sym (sym) : NULL;
8939 tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
8940 sym->attr.codimension, e);
8941 if (e)
8942 gfc_free_expr (e);
8943 gfc_add_expr_to_block (&cleanup, tmp);
8946 gfc_add_init_cleanup (block, gfc_finish_block (&init),
8947 gfc_finish_block (&cleanup));
8950 /************ Expression Walking Functions ******************/
8952 /* Walk a variable reference.
8954 Possible extension - multiple component subscripts.
8955 x(:,:) = foo%a(:)%b(:)
8956 Transforms to
8957 forall (i=..., j=...)
8958 x(i,j) = foo%a(j)%b(i)
8959 end forall
8960 This adds a fair amount of complexity because you need to deal with more
8961 than one ref. Maybe handle in a similar manner to vector subscripts.
8962 Maybe not worth the effort. */
8965 static gfc_ss *
8966 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
8968 gfc_ref *ref;
8970 for (ref = expr->ref; ref; ref = ref->next)
8971 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
8972 break;
8974 return gfc_walk_array_ref (ss, expr, ref);
8978 gfc_ss *
8979 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
8981 gfc_array_ref *ar;
8982 gfc_ss *newss;
8983 int n;
8985 for (; ref; ref = ref->next)
8987 if (ref->type == REF_SUBSTRING)
8989 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
8990 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
8993 /* We're only interested in array sections from now on. */
8994 if (ref->type != REF_ARRAY)
8995 continue;
8997 ar = &ref->u.ar;
8999 switch (ar->type)
9001 case AR_ELEMENT:
9002 for (n = ar->dimen - 1; n >= 0; n--)
9003 ss = gfc_get_scalar_ss (ss, ar->start[n]);
9004 break;
9006 case AR_FULL:
9007 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
9008 newss->info->data.array.ref = ref;
9010 /* Make sure array is the same as array(:,:), this way
9011 we don't need to special case all the time. */
9012 ar->dimen = ar->as->rank;
9013 for (n = 0; n < ar->dimen; n++)
9015 ar->dimen_type[n] = DIMEN_RANGE;
9017 gcc_assert (ar->start[n] == NULL);
9018 gcc_assert (ar->end[n] == NULL);
9019 gcc_assert (ar->stride[n] == NULL);
9021 ss = newss;
9022 break;
9024 case AR_SECTION:
9025 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
9026 newss->info->data.array.ref = ref;
9028 /* We add SS chains for all the subscripts in the section. */
9029 for (n = 0; n < ar->dimen; n++)
9031 gfc_ss *indexss;
9033 switch (ar->dimen_type[n])
9035 case DIMEN_ELEMENT:
9036 /* Add SS for elemental (scalar) subscripts. */
9037 gcc_assert (ar->start[n]);
9038 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
9039 indexss->loop_chain = gfc_ss_terminator;
9040 newss->info->data.array.subscript[n] = indexss;
9041 break;
9043 case DIMEN_RANGE:
9044 /* We don't add anything for sections, just remember this
9045 dimension for later. */
9046 newss->dim[newss->dimen] = n;
9047 newss->dimen++;
9048 break;
9050 case DIMEN_VECTOR:
9051 /* Create a GFC_SS_VECTOR index in which we can store
9052 the vector's descriptor. */
9053 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
9054 1, GFC_SS_VECTOR);
9055 indexss->loop_chain = gfc_ss_terminator;
9056 newss->info->data.array.subscript[n] = indexss;
9057 newss->dim[newss->dimen] = n;
9058 newss->dimen++;
9059 break;
9061 default:
9062 /* We should know what sort of section it is by now. */
9063 gcc_unreachable ();
9066 /* We should have at least one non-elemental dimension,
9067 unless we are creating a descriptor for a (scalar) coarray. */
9068 gcc_assert (newss->dimen > 0
9069 || newss->info->data.array.ref->u.ar.as->corank > 0);
9070 ss = newss;
9071 break;
9073 default:
9074 /* We should know what sort of section it is by now. */
9075 gcc_unreachable ();
9079 return ss;
9083 /* Walk an expression operator. If only one operand of a binary expression is
9084 scalar, we must also add the scalar term to the SS chain. */
9086 static gfc_ss *
9087 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
9089 gfc_ss *head;
9090 gfc_ss *head2;
9092 head = gfc_walk_subexpr (ss, expr->value.op.op1);
9093 if (expr->value.op.op2 == NULL)
9094 head2 = head;
9095 else
9096 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
9098 /* All operands are scalar. Pass back and let the caller deal with it. */
9099 if (head2 == ss)
9100 return head2;
9102 /* All operands require scalarization. */
9103 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
9104 return head2;
9106 /* One of the operands needs scalarization, the other is scalar.
9107 Create a gfc_ss for the scalar expression. */
9108 if (head == ss)
9110 /* First operand is scalar. We build the chain in reverse order, so
9111 add the scalar SS after the second operand. */
9112 head = head2;
9113 while (head && head->next != ss)
9114 head = head->next;
9115 /* Check we haven't somehow broken the chain. */
9116 gcc_assert (head);
9117 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
9119 else /* head2 == head */
9121 gcc_assert (head2 == head);
9122 /* Second operand is scalar. */
9123 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
9126 return head2;
9130 /* Reverse a SS chain. */
9132 gfc_ss *
9133 gfc_reverse_ss (gfc_ss * ss)
9135 gfc_ss *next;
9136 gfc_ss *head;
9138 gcc_assert (ss != NULL);
9140 head = gfc_ss_terminator;
9141 while (ss != gfc_ss_terminator)
9143 next = ss->next;
9144 /* Check we didn't somehow break the chain. */
9145 gcc_assert (next != NULL);
9146 ss->next = head;
9147 head = ss;
9148 ss = next;
9151 return (head);
9155 /* Given an expression referring to a procedure, return the symbol of its
9156 interface. We can't get the procedure symbol directly as we have to handle
9157 the case of (deferred) type-bound procedures. */
9159 gfc_symbol *
9160 gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
9162 gfc_symbol *sym;
9163 gfc_ref *ref;
9165 if (procedure_ref == NULL)
9166 return NULL;
9168 /* Normal procedure case. */
9169 if (procedure_ref->expr_type == EXPR_FUNCTION
9170 && procedure_ref->value.function.esym)
9171 sym = procedure_ref->value.function.esym;
9172 else
9173 sym = procedure_ref->symtree->n.sym;
9175 /* Typebound procedure case. */
9176 for (ref = procedure_ref->ref; ref; ref = ref->next)
9178 if (ref->type == REF_COMPONENT
9179 && ref->u.c.component->attr.proc_pointer)
9180 sym = ref->u.c.component->ts.interface;
9181 else
9182 sym = NULL;
9185 return sym;
9189 /* Walk the arguments of an elemental function.
9190 PROC_EXPR is used to check whether an argument is permitted to be absent. If
9191 it is NULL, we don't do the check and the argument is assumed to be present.
9194 gfc_ss *
9195 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
9196 gfc_symbol *proc_ifc, gfc_ss_type type)
9198 gfc_formal_arglist *dummy_arg;
9199 int scalar;
9200 gfc_ss *head;
9201 gfc_ss *tail;
9202 gfc_ss *newss;
9204 head = gfc_ss_terminator;
9205 tail = NULL;
9207 if (proc_ifc)
9208 dummy_arg = gfc_sym_get_dummy_args (proc_ifc);
9209 else
9210 dummy_arg = NULL;
9212 scalar = 1;
9213 for (; arg; arg = arg->next)
9215 if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
9216 goto loop_continue;
9218 newss = gfc_walk_subexpr (head, arg->expr);
9219 if (newss == head)
9221 /* Scalar argument. */
9222 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
9223 newss = gfc_get_scalar_ss (head, arg->expr);
9224 newss->info->type = type;
9225 if (dummy_arg)
9226 newss->info->data.scalar.dummy_arg = dummy_arg->sym;
9228 else
9229 scalar = 0;
9231 if (dummy_arg != NULL
9232 && dummy_arg->sym->attr.optional
9233 && arg->expr->expr_type == EXPR_VARIABLE
9234 && (gfc_expr_attr (arg->expr).optional
9235 || gfc_expr_attr (arg->expr).allocatable
9236 || gfc_expr_attr (arg->expr).pointer))
9237 newss->info->can_be_null_ref = true;
9239 head = newss;
9240 if (!tail)
9242 tail = head;
9243 while (tail->next != gfc_ss_terminator)
9244 tail = tail->next;
9247 loop_continue:
9248 if (dummy_arg != NULL)
9249 dummy_arg = dummy_arg->next;
9252 if (scalar)
9254 /* If all the arguments are scalar we don't need the argument SS. */
9255 gfc_free_ss_chain (head);
9256 /* Pass it back. */
9257 return ss;
9260 /* Add it onto the existing chain. */
9261 tail->next = ss;
9262 return head;
9266 /* Walk a function call. Scalar functions are passed back, and taken out of
9267 scalarization loops. For elemental functions we walk their arguments.
9268 The result of functions returning arrays is stored in a temporary outside
9269 the loop, so that the function is only called once. Hence we do not need
9270 to walk their arguments. */
9272 static gfc_ss *
9273 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
9275 gfc_intrinsic_sym *isym;
9276 gfc_symbol *sym;
9277 gfc_component *comp = NULL;
9279 isym = expr->value.function.isym;
9281 /* Handle intrinsic functions separately. */
9282 if (isym)
9283 return gfc_walk_intrinsic_function (ss, expr, isym);
9285 sym = expr->value.function.esym;
9286 if (!sym)
9287 sym = expr->symtree->n.sym;
9289 if (gfc_is_alloc_class_array_function (expr))
9290 return gfc_get_array_ss (ss, expr,
9291 CLASS_DATA (expr->value.function.esym->result)->as->rank,
9292 GFC_SS_FUNCTION);
9294 /* A function that returns arrays. */
9295 comp = gfc_get_proc_ptr_comp (expr);
9296 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
9297 || (comp && comp->attr.dimension))
9298 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
9300 /* Walk the parameters of an elemental function. For now we always pass
9301 by reference. */
9302 if (sym->attr.elemental || (comp && comp->attr.elemental))
9304 gfc_ss *old_ss = ss;
9306 ss = gfc_walk_elemental_function_args (old_ss,
9307 expr->value.function.actual,
9308 gfc_get_proc_ifc_for_expr (expr),
9309 GFC_SS_REFERENCE);
9310 if (ss != old_ss
9311 && (comp
9312 || sym->attr.proc_pointer
9313 || sym->attr.if_source != IFSRC_DECL
9314 || sym->attr.array_outer_dependency))
9315 ss->info->array_outer_dependency = 1;
9318 /* Scalar functions are OK as these are evaluated outside the scalarization
9319 loop. Pass back and let the caller deal with it. */
9320 return ss;
9324 /* An array temporary is constructed for array constructors. */
9326 static gfc_ss *
9327 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
9329 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
9333 /* Walk an expression. Add walked expressions to the head of the SS chain.
9334 A wholly scalar expression will not be added. */
9336 gfc_ss *
9337 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
9339 gfc_ss *head;
9341 switch (expr->expr_type)
9343 case EXPR_VARIABLE:
9344 head = gfc_walk_variable_expr (ss, expr);
9345 return head;
9347 case EXPR_OP:
9348 head = gfc_walk_op_expr (ss, expr);
9349 return head;
9351 case EXPR_FUNCTION:
9352 head = gfc_walk_function_expr (ss, expr);
9353 return head;
9355 case EXPR_CONSTANT:
9356 case EXPR_NULL:
9357 case EXPR_STRUCTURE:
9358 /* Pass back and let the caller deal with it. */
9359 break;
9361 case EXPR_ARRAY:
9362 head = gfc_walk_array_constructor (ss, expr);
9363 return head;
9365 case EXPR_SUBSTRING:
9366 /* Pass back and let the caller deal with it. */
9367 break;
9369 default:
9370 gfc_internal_error ("bad expression type during walk (%d)",
9371 expr->expr_type);
9373 return ss;
9377 /* Entry point for expression walking.
9378 A return value equal to the passed chain means this is
9379 a scalar expression. It is up to the caller to take whatever action is
9380 necessary to translate these. */
9382 gfc_ss *
9383 gfc_walk_expr (gfc_expr * expr)
9385 gfc_ss *res;
9387 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
9388 return gfc_reverse_ss (res);