1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
28 #include "tree-gimple.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
40 #include "dependency.h"
42 typedef struct iter_info
48 struct iter_info
*next
;
52 typedef struct forall_info
59 struct forall_info
*prev_nest
;
63 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
64 forall_info
*, stmtblock_t
*);
66 /* Translate a F95 label number to a LABEL_EXPR. */
69 gfc_trans_label_here (gfc_code
* code
)
71 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
80 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
82 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
83 gfc_conv_expr (se
, expr
);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
86 se
->expr
= TREE_OPERAND (se
->expr
, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
89 se
->expr
= TREE_OPERAND (se
->expr
, 0);
92 /* Translate a label assignment statement. */
95 gfc_trans_label_assign (gfc_code
* code
)
104 /* Start a new block. */
105 gfc_init_se (&se
, NULL
);
106 gfc_start_block (&se
.pre
);
107 gfc_conv_label_variable (&se
, code
->expr
);
109 len
= GFC_DECL_STRING_LEN (se
.expr
);
110 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
112 label_tree
= gfc_get_label_decl (code
->label
);
114 if (code
->label
->defined
== ST_LABEL_TARGET
)
116 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
117 len_tree
= integer_minus_one_node
;
121 gfc_expr
*format
= code
->label
->format
;
123 label_len
= format
->value
.character
.length
;
124 len_tree
= build_int_cst (NULL_TREE
, label_len
);
125 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
126 format
->value
.character
.string
);
127 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
130 gfc_add_modify_expr (&se
.pre
, len
, len_tree
);
131 gfc_add_modify_expr (&se
.pre
, addr
, label_tree
);
133 return gfc_finish_block (&se
.pre
);
136 /* Translate a GOTO statement. */
139 gfc_trans_goto (gfc_code
* code
)
141 locus loc
= code
->loc
;
147 if (code
->label
!= NULL
)
148 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label
));
151 gfc_init_se (&se
, NULL
);
152 gfc_start_block (&se
.pre
);
153 gfc_conv_label_variable (&se
, code
->expr
);
154 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
155 tmp
= fold_build2 (NE_EXPR
, boolean_type_node
, tmp
,
156 build_int_cst (TREE_TYPE (tmp
), -1));
157 gfc_trans_runtime_check (tmp
, &se
.pre
, &loc
,
158 "Assigned label is not a target label");
160 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
165 target
= fold_build1 (GOTO_EXPR
, void_type_node
, assigned_goto
);
166 gfc_add_expr_to_block (&se
.pre
, target
);
167 return gfc_finish_block (&se
.pre
);
170 /* Check the label list. */
173 target
= gfc_get_label_decl (code
->label
);
174 tmp
= gfc_build_addr_expr (pvoid_type_node
, target
);
175 tmp
= fold_build2 (EQ_EXPR
, boolean_type_node
, tmp
, assigned_goto
);
176 tmp
= build3_v (COND_EXPR
, tmp
,
177 fold_build1 (GOTO_EXPR
, void_type_node
, target
),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se
.pre
, tmp
);
182 while (code
!= NULL
);
183 gfc_trans_runtime_check (boolean_true_node
, &se
.pre
, &loc
,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se
.pre
);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
192 gfc_trans_entry (gfc_code
* code
)
194 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
203 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
204 gfc_symbol
* sym
, gfc_actual_arglist
* arg
)
206 gfc_actual_arglist
*arg0
;
208 gfc_formal_arglist
*formal
;
209 gfc_loopinfo tmp_loop
;
221 if (loopse
->ss
== NULL
)
226 formal
= sym
->formal
;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
235 /* Obtain the info structure for the current argument. */
237 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
241 info
= &ss
->data
.info
;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym
= formal
? formal
->sym
: NULL
;
248 if (e
->expr_type
== EXPR_VARIABLE
250 && fsym
->attr
.intent
!= INTENT_IN
251 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
254 /* Make a local loopinfo for the temporary creation, so that
255 none of the other ss->info's have to be renormalized. */
256 gfc_init_loopinfo (&tmp_loop
);
257 for (n
= 0; n
< info
->dimen
; n
++)
259 tmp_loop
.to
[n
] = loopse
->loop
->to
[n
];
260 tmp_loop
.from
[n
] = loopse
->loop
->from
[n
];
261 tmp_loop
.order
[n
] = loopse
->loop
->order
[n
];
264 /* Generate the temporary. Merge the block so that the
265 declarations are put at the right binding level. */
266 size
= gfc_create_var (gfc_array_index_type
, NULL
);
267 data
= gfc_create_var (pvoid_type_node
, NULL
);
268 gfc_start_block (&block
);
269 tmp
= gfc_typenode_for_spec (&e
->ts
);
270 tmp
= gfc_trans_create_temp_array (&se
->pre
, &se
->post
,
271 &tmp_loop
, info
, tmp
,
273 gfc_add_modify_expr (&se
->pre
, size
, tmp
);
274 tmp
= fold_convert (pvoid_type_node
, info
->data
);
275 gfc_add_modify_expr (&se
->pre
, data
, tmp
);
276 gfc_merge_block_scope (&block
);
278 /* Obtain the argument descriptor for unpacking. */
279 gfc_init_se (&parmse
, NULL
);
280 parmse
.want_pointer
= 1;
281 gfc_conv_expr_descriptor (&parmse
, e
, gfc_walk_expr (e
));
282 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
284 /* Calculate the offset for the temporary. */
285 offset
= gfc_index_zero_node
;
286 for (n
= 0; n
< info
->dimen
; n
++)
288 tmp
= gfc_conv_descriptor_stride (info
->descriptor
,
290 tmp
= fold_build2 (MULT_EXPR
, gfc_array_index_type
,
291 loopse
->loop
->from
[n
], tmp
);
292 offset
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
,
295 info
->offset
= gfc_create_var (gfc_array_index_type
, NULL
);
296 gfc_add_modify_expr (&se
->pre
, info
->offset
, offset
);
298 /* Copy the result back using unpack. */
299 tmp
= build_call_expr (gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
300 gfc_add_expr_to_block (&se
->post
, tmp
);
302 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
308 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
311 gfc_trans_call (gfc_code
* code
, bool dependency_check
)
315 int has_alternate_specifier
;
317 /* A CALL starts a new block because the actual arguments may have to
318 be evaluated first. */
319 gfc_init_se (&se
, NULL
);
320 gfc_start_block (&se
.pre
);
322 gcc_assert (code
->resolved_sym
);
324 ss
= gfc_ss_terminator
;
325 if (code
->resolved_sym
->attr
.elemental
)
326 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
, GFC_SS_REFERENCE
);
328 /* Is not an elemental subroutine call with array valued arguments. */
329 if (ss
== gfc_ss_terminator
)
332 /* Translate the call. */
333 has_alternate_specifier
334 = gfc_conv_function_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
337 /* A subroutine without side-effect, by definition, does nothing! */
338 TREE_SIDE_EFFECTS (se
.expr
) = 1;
340 /* Chain the pieces together and return the block. */
341 if (has_alternate_specifier
)
343 gfc_code
*select_code
;
345 select_code
= code
->next
;
346 gcc_assert(select_code
->op
== EXEC_SELECT
);
347 sym
= select_code
->expr
->symtree
->n
.sym
;
348 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
349 if (sym
->backend_decl
== NULL
)
350 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
351 gfc_add_modify_expr (&se
.pre
, sym
->backend_decl
, se
.expr
);
354 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
356 gfc_add_block_to_block (&se
.pre
, &se
.post
);
361 /* An elemental subroutine call with array valued arguments has
368 /* gfc_walk_elemental_function_args renders the ss chain in the
369 reverse order to the actual argument order. */
370 ss
= gfc_reverse_ss (ss
);
372 /* Initialize the loop. */
373 gfc_init_se (&loopse
, NULL
);
374 gfc_init_loopinfo (&loop
);
375 gfc_add_ss_to_loop (&loop
, ss
);
377 gfc_conv_ss_startstride (&loop
);
378 gfc_conv_loop_setup (&loop
);
379 gfc_mark_ss_chain_used (ss
, 1);
381 /* Convert the arguments, checking for dependencies. */
382 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
385 /* For operator assignment, do dependency checking. */
386 if (dependency_check
)
389 sym
= code
->resolved_sym
;
390 gfc_conv_elemental_dependencies (&se
, &loopse
, sym
,
394 /* Generate the loop body. */
395 gfc_start_scalarized_body (&loop
, &body
);
396 gfc_init_block (&block
);
398 /* Add the subroutine call to the block. */
399 gfc_conv_function_call (&loopse
, code
->resolved_sym
, code
->ext
.actual
,
401 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
403 gfc_add_block_to_block (&block
, &loopse
.pre
);
404 gfc_add_block_to_block (&block
, &loopse
.post
);
406 /* Finish up the loop block and the loop. */
407 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
408 gfc_trans_scalarizing_loops (&loop
, &body
);
409 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
410 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
411 gfc_add_block_to_block (&se
.pre
, &se
.post
);
412 gfc_cleanup_loop (&loop
);
415 return gfc_finish_block (&se
.pre
);
419 /* Translate the RETURN statement. */
422 gfc_trans_return (gfc_code
* code ATTRIBUTE_UNUSED
)
430 /* If code->expr is not NULL, this return statement must appear
431 in a subroutine and current_fake_result_decl has already
434 result
= gfc_get_fake_result_decl (NULL
, 0);
437 gfc_warning ("An alternate return at %L without a * dummy argument",
439 return build1_v (GOTO_EXPR
, gfc_get_return_label ());
442 /* Start a new block for this statement. */
443 gfc_init_se (&se
, NULL
);
444 gfc_start_block (&se
.pre
);
446 gfc_conv_expr (&se
, code
->expr
);
448 tmp
= fold_build2 (MODIFY_EXPR
, TREE_TYPE (result
), result
,
449 fold_convert (TREE_TYPE (result
), se
.expr
));
450 gfc_add_expr_to_block (&se
.pre
, tmp
);
452 tmp
= build1_v (GOTO_EXPR
, gfc_get_return_label ());
453 gfc_add_expr_to_block (&se
.pre
, tmp
);
454 gfc_add_block_to_block (&se
.pre
, &se
.post
);
455 return gfc_finish_block (&se
.pre
);
458 return build1_v (GOTO_EXPR
, gfc_get_return_label ());
462 /* Translate the PAUSE statement. We have to translate this statement
463 to a runtime library call. */
466 gfc_trans_pause (gfc_code
* code
)
468 tree gfc_int4_type_node
= gfc_get_int_type (4);
472 /* Start a new block for this statement. */
473 gfc_init_se (&se
, NULL
);
474 gfc_start_block (&se
.pre
);
477 if (code
->expr
== NULL
)
479 tmp
= build_int_cst (gfc_int4_type_node
, code
->ext
.stop_code
);
480 tmp
= build_call_expr (gfor_fndecl_pause_numeric
, 1, tmp
);
484 gfc_conv_expr_reference (&se
, code
->expr
);
485 tmp
= build_call_expr (gfor_fndecl_pause_string
, 2,
486 se
.expr
, se
.string_length
);
489 gfc_add_expr_to_block (&se
.pre
, tmp
);
491 gfc_add_block_to_block (&se
.pre
, &se
.post
);
493 return gfc_finish_block (&se
.pre
);
497 /* Translate the STOP statement. We have to translate this statement
498 to a runtime library call. */
501 gfc_trans_stop (gfc_code
* code
)
503 tree gfc_int4_type_node
= gfc_get_int_type (4);
507 /* Start a new block for this statement. */
508 gfc_init_se (&se
, NULL
);
509 gfc_start_block (&se
.pre
);
512 if (code
->expr
== NULL
)
514 tmp
= build_int_cst (gfc_int4_type_node
, code
->ext
.stop_code
);
515 tmp
= build_call_expr (gfor_fndecl_stop_numeric
, 1, tmp
);
519 gfc_conv_expr_reference (&se
, code
->expr
);
520 tmp
= build_call_expr (gfor_fndecl_stop_string
, 2,
521 se
.expr
, se
.string_length
);
524 gfc_add_expr_to_block (&se
.pre
, tmp
);
526 gfc_add_block_to_block (&se
.pre
, &se
.post
);
528 return gfc_finish_block (&se
.pre
);
532 /* Generate GENERIC for the IF construct. This function also deals with
533 the simple IF statement, because the front end translates the IF
534 statement into an IF construct.
566 where COND_S is the simplified version of the predicate. PRE_COND_S
567 are the pre side-effects produced by the translation of the
569 We need to build the chain recursively otherwise we run into
570 problems with folding incomplete statements. */
573 gfc_trans_if_1 (gfc_code
* code
)
578 /* Check for an unconditional ELSE clause. */
580 return gfc_trans_code (code
->next
);
582 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
583 gfc_init_se (&if_se
, NULL
);
584 gfc_start_block (&if_se
.pre
);
586 /* Calculate the IF condition expression. */
587 gfc_conv_expr_val (&if_se
, code
->expr
);
589 /* Translate the THEN clause. */
590 stmt
= gfc_trans_code (code
->next
);
592 /* Translate the ELSE clause. */
594 elsestmt
= gfc_trans_if_1 (code
->block
);
596 elsestmt
= build_empty_stmt ();
598 /* Build the condition expression and add it to the condition block. */
599 stmt
= fold_build3 (COND_EXPR
, void_type_node
, if_se
.expr
, stmt
, elsestmt
);
601 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
603 /* Finish off this statement. */
604 return gfc_finish_block (&if_se
.pre
);
608 gfc_trans_if (gfc_code
* code
)
610 /* Ignore the top EXEC_IF, it only announces an IF construct. The
611 actual code we must translate is in code->block. */
613 return gfc_trans_if_1 (code
->block
);
617 /* Translate an arithmetic IF expression.
619 IF (cond) label1, label2, label3 translates to
631 An optimized version can be generated in case of equal labels.
632 E.g., if label1 is equal to label2, we can translate it to
641 gfc_trans_arithmetic_if (gfc_code
* code
)
649 /* Start a new block. */
650 gfc_init_se (&se
, NULL
);
651 gfc_start_block (&se
.pre
);
653 /* Pre-evaluate COND. */
654 gfc_conv_expr_val (&se
, code
->expr
);
655 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
657 /* Build something to compare with. */
658 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
660 if (code
->label
->value
!= code
->label2
->value
)
662 /* If (cond < 0) take branch1 else take branch2.
663 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
664 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label
));
665 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
667 if (code
->label
->value
!= code
->label3
->value
)
668 tmp
= fold_build2 (LT_EXPR
, boolean_type_node
, se
.expr
, zero
);
670 tmp
= fold_build2 (NE_EXPR
, boolean_type_node
, se
.expr
, zero
);
672 branch1
= fold_build3 (COND_EXPR
, void_type_node
, tmp
, branch1
, branch2
);
675 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label
));
677 if (code
->label
->value
!= code
->label3
->value
678 && code
->label2
->value
!= code
->label3
->value
)
680 /* if (cond <= 0) take branch1 else take branch2. */
681 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
682 tmp
= fold_build2 (LE_EXPR
, boolean_type_node
, se
.expr
, zero
);
683 branch1
= fold_build3 (COND_EXPR
, void_type_node
, tmp
, branch1
, branch2
);
686 /* Append the COND_EXPR to the evaluation of COND, and return. */
687 gfc_add_expr_to_block (&se
.pre
, branch1
);
688 return gfc_finish_block (&se
.pre
);
692 /* Translate the simple DO construct. This is where the loop variable has
693 integer type and step +-1. We can't use this in the general case
694 because integer overflow and floating point errors could give incorrect
696 We translate a do loop from:
698 DO dovar = from, to, step
704 [Evaluate loop bounds and step]
706 if ((step > 0) ? (dovar <= to) : (dovar => to))
712 cond = (dovar == to);
714 if (cond) goto end_label;
719 This helps the optimizers by avoiding the extra induction variable
720 used in the general case. */
723 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
724 tree from
, tree to
, tree step
)
733 type
= TREE_TYPE (dovar
);
735 /* Initialize the DO variable: dovar = from. */
736 gfc_add_modify_expr (pblock
, dovar
, from
);
738 /* Cycle and exit statements are implemented with gotos. */
739 cycle_label
= gfc_build_label_decl (NULL_TREE
);
740 exit_label
= gfc_build_label_decl (NULL_TREE
);
742 /* Put the labels where they can be found later. See gfc_trans_do(). */
743 code
->block
->backend_decl
= tree_cons (cycle_label
, exit_label
, NULL
);
746 gfc_start_block (&body
);
748 /* Main loop body. */
749 tmp
= gfc_trans_code (code
->block
->next
);
750 gfc_add_expr_to_block (&body
, tmp
);
752 /* Label for cycle statements (if needed). */
753 if (TREE_USED (cycle_label
))
755 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
756 gfc_add_expr_to_block (&body
, tmp
);
759 /* Evaluate the loop condition. */
760 cond
= fold_build2 (EQ_EXPR
, boolean_type_node
, dovar
, to
);
761 cond
= gfc_evaluate_now (cond
, &body
);
763 /* Increment the loop variable. */
764 tmp
= fold_build2 (PLUS_EXPR
, type
, dovar
, step
);
765 gfc_add_modify_expr (&body
, dovar
, tmp
);
768 tmp
= build1_v (GOTO_EXPR
, exit_label
);
769 TREE_USED (exit_label
) = 1;
770 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
771 cond
, tmp
, build_empty_stmt ());
772 gfc_add_expr_to_block (&body
, tmp
);
774 /* Finish the loop body. */
775 tmp
= gfc_finish_block (&body
);
776 tmp
= build1_v (LOOP_EXPR
, tmp
);
778 /* Only execute the loop if the number of iterations is positive. */
779 if (tree_int_cst_sgn (step
) > 0)
780 cond
= fold_build2 (LE_EXPR
, boolean_type_node
, dovar
, to
);
782 cond
= fold_build2 (GE_EXPR
, boolean_type_node
, dovar
, to
);
783 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
784 cond
, tmp
, build_empty_stmt ());
785 gfc_add_expr_to_block (pblock
, tmp
);
787 /* Add the exit label. */
788 tmp
= build1_v (LABEL_EXPR
, exit_label
);
789 gfc_add_expr_to_block (pblock
, tmp
);
791 return gfc_finish_block (pblock
);
794 /* Translate the DO construct. This obviously is one of the most
795 important ones to get right with any compiler, but especially
798 We special case some loop forms as described in gfc_trans_simple_do.
799 For other cases we implement them with a separate loop count,
800 as described in the standard.
802 We translate a do loop from:
804 DO dovar = from, to, step
810 [evaluate loop bounds and step]
811 empty = (step > 0 ? to < from : to > from);
812 countm1 = (to - from) / step;
814 if (empty) goto exit_label;
820 if (countm1 ==0) goto exit_label;
825 countm1 is an unsigned integer. It is equal to the loop count minus one,
826 because the loop count itself can overflow. */
829 gfc_trans_do (gfc_code
* code
)
848 gfc_start_block (&block
);
850 /* Evaluate all the expressions in the iterator. */
851 gfc_init_se (&se
, NULL
);
852 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
853 gfc_add_block_to_block (&block
, &se
.pre
);
855 type
= TREE_TYPE (dovar
);
857 gfc_init_se (&se
, NULL
);
858 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
859 gfc_add_block_to_block (&block
, &se
.pre
);
860 from
= gfc_evaluate_now (se
.expr
, &block
);
862 gfc_init_se (&se
, NULL
);
863 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
864 gfc_add_block_to_block (&block
, &se
.pre
);
865 to
= gfc_evaluate_now (se
.expr
, &block
);
867 gfc_init_se (&se
, NULL
);
868 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
869 gfc_add_block_to_block (&block
, &se
.pre
);
870 step
= gfc_evaluate_now (se
.expr
, &block
);
872 /* Special case simple loops. */
873 if (TREE_CODE (type
) == INTEGER_TYPE
874 && (integer_onep (step
)
875 || tree_int_cst_equal (step
, integer_minus_one_node
)))
876 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
);
878 /* We need a special check for empty loops:
879 empty = (step > 0 ? to < from : to > from); */
880 pos_step
= fold_build2 (GT_EXPR
, boolean_type_node
, step
,
881 fold_convert (type
, integer_zero_node
));
882 empty
= fold_build3 (COND_EXPR
, boolean_type_node
, pos_step
,
883 fold_build2 (LT_EXPR
, boolean_type_node
, to
, from
),
884 fold_build2 (GT_EXPR
, boolean_type_node
, to
, from
));
886 /* Initialize loop count. This code is executed before we enter the
887 loop body. We generate: countm1 = abs(to - from) / abs(step). */
888 if (TREE_CODE (type
) == INTEGER_TYPE
)
892 utype
= unsigned_type_for (type
);
894 /* tmp = abs(to - from) / abs(step) */
895 ustep
= fold_convert (utype
, fold_build1 (ABS_EXPR
, type
, step
));
896 tmp
= fold_build3 (COND_EXPR
, type
, pos_step
,
897 fold_build2 (MINUS_EXPR
, type
, to
, from
),
898 fold_build2 (MINUS_EXPR
, type
, from
, to
));
899 tmp
= fold_build2 (TRUNC_DIV_EXPR
, utype
, fold_convert (utype
, tmp
),
904 /* TODO: We could use the same width as the real type.
905 This would probably cause more problems that it solves
906 when we implement "long double" types. */
907 utype
= unsigned_type_for (gfc_array_index_type
);
908 tmp
= fold_build2 (MINUS_EXPR
, type
, to
, from
);
909 tmp
= fold_build2 (RDIV_EXPR
, type
, tmp
, step
);
910 tmp
= fold_build1 (FIX_TRUNC_EXPR
, utype
, tmp
);
912 countm1
= gfc_create_var (utype
, "countm1");
913 gfc_add_modify_expr (&block
, countm1
, tmp
);
915 /* Cycle and exit statements are implemented with gotos. */
916 cycle_label
= gfc_build_label_decl (NULL_TREE
);
917 exit_label
= gfc_build_label_decl (NULL_TREE
);
918 TREE_USED (exit_label
) = 1;
920 /* Initialize the DO variable: dovar = from. */
921 gfc_add_modify_expr (&block
, dovar
, from
);
923 /* If the loop is empty, go directly to the exit label. */
924 tmp
= fold_build3 (COND_EXPR
, void_type_node
, empty
,
925 build1_v (GOTO_EXPR
, exit_label
), build_empty_stmt ());
926 gfc_add_expr_to_block (&block
, tmp
);
929 gfc_start_block (&body
);
931 /* Put these labels where they can be found later. We put the
932 labels in a TREE_LIST node (because TREE_CHAIN is already
933 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
934 label in TREE_VALUE (backend_decl). */
936 code
->block
->backend_decl
= tree_cons (cycle_label
, exit_label
, NULL
);
938 /* Main loop body. */
939 tmp
= gfc_trans_code (code
->block
->next
);
940 gfc_add_expr_to_block (&body
, tmp
);
942 /* Label for cycle statements (if needed). */
943 if (TREE_USED (cycle_label
))
945 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
946 gfc_add_expr_to_block (&body
, tmp
);
949 /* Increment the loop variable. */
950 tmp
= fold_build2 (PLUS_EXPR
, type
, dovar
, step
);
951 gfc_add_modify_expr (&body
, dovar
, tmp
);
953 /* End with the loop condition. Loop until countm1 == 0. */
954 cond
= fold_build2 (EQ_EXPR
, boolean_type_node
, countm1
,
955 build_int_cst (utype
, 0));
956 tmp
= build1_v (GOTO_EXPR
, exit_label
);
957 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
958 cond
, tmp
, build_empty_stmt ());
959 gfc_add_expr_to_block (&body
, tmp
);
961 /* Decrement the loop count. */
962 tmp
= fold_build2 (MINUS_EXPR
, utype
, countm1
, build_int_cst (utype
, 1));
963 gfc_add_modify_expr (&body
, countm1
, tmp
);
965 /* End of loop body. */
966 tmp
= gfc_finish_block (&body
);
968 /* The for loop itself. */
969 tmp
= build1_v (LOOP_EXPR
, tmp
);
970 gfc_add_expr_to_block (&block
, tmp
);
972 /* Add the exit label. */
973 tmp
= build1_v (LABEL_EXPR
, exit_label
);
974 gfc_add_expr_to_block (&block
, tmp
);
976 return gfc_finish_block (&block
);
980 /* Translate the DO WHILE construct.
993 if (! cond) goto exit_label;
999 Because the evaluation of the exit condition `cond' may have side
1000 effects, we can't do much for empty loop bodies. The backend optimizers
1001 should be smart enough to eliminate any dead loops. */
1004 gfc_trans_do_while (gfc_code
* code
)
1012 /* Everything we build here is part of the loop body. */
1013 gfc_start_block (&block
);
1015 /* Cycle and exit statements are implemented with gotos. */
1016 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1017 exit_label
= gfc_build_label_decl (NULL_TREE
);
1019 /* Put the labels where they can be found later. See gfc_trans_do(). */
1020 code
->block
->backend_decl
= tree_cons (cycle_label
, exit_label
, NULL
);
1022 /* Create a GIMPLE version of the exit condition. */
1023 gfc_init_se (&cond
, NULL
);
1024 gfc_conv_expr_val (&cond
, code
->expr
);
1025 gfc_add_block_to_block (&block
, &cond
.pre
);
1026 cond
.expr
= fold_build1 (TRUTH_NOT_EXPR
, boolean_type_node
, cond
.expr
);
1028 /* Build "IF (! cond) GOTO exit_label". */
1029 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1030 TREE_USED (exit_label
) = 1;
1031 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
1032 cond
.expr
, tmp
, build_empty_stmt ());
1033 gfc_add_expr_to_block (&block
, tmp
);
1035 /* The main body of the loop. */
1036 tmp
= gfc_trans_code (code
->block
->next
);
1037 gfc_add_expr_to_block (&block
, tmp
);
1039 /* Label for cycle statements (if needed). */
1040 if (TREE_USED (cycle_label
))
1042 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1043 gfc_add_expr_to_block (&block
, tmp
);
1046 /* End of loop body. */
1047 tmp
= gfc_finish_block (&block
);
1049 gfc_init_block (&block
);
1050 /* Build the loop. */
1051 tmp
= build1_v (LOOP_EXPR
, tmp
);
1052 gfc_add_expr_to_block (&block
, tmp
);
1054 /* Add the exit label. */
1055 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1056 gfc_add_expr_to_block (&block
, tmp
);
1058 return gfc_finish_block (&block
);
1062 /* Translate the SELECT CASE construct for INTEGER case expressions,
1063 without killing all potential optimizations. The problem is that
1064 Fortran allows unbounded cases, but the back-end does not, so we
1065 need to intercept those before we enter the equivalent SWITCH_EXPR
1068 For example, we translate this,
1071 CASE (:100,101,105:115)
1081 to the GENERIC equivalent,
1085 case (minimum value for typeof(expr) ... 100:
1091 case 200 ... (maximum value for typeof(expr):
1108 gfc_trans_integer_select (gfc_code
* code
)
1118 gfc_start_block (&block
);
1120 /* Calculate the switch expression. */
1121 gfc_init_se (&se
, NULL
);
1122 gfc_conv_expr_val (&se
, code
->expr
);
1123 gfc_add_block_to_block (&block
, &se
.pre
);
1125 end_label
= gfc_build_label_decl (NULL_TREE
);
1127 gfc_init_block (&body
);
1129 for (c
= code
->block
; c
; c
= c
->block
)
1131 for (cp
= c
->ext
.case_list
; cp
; cp
= cp
->next
)
1136 /* Assume it's the default case. */
1137 low
= high
= NULL_TREE
;
1141 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1144 /* If there's only a lower bound, set the high bound to the
1145 maximum value of the case expression. */
1147 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1152 /* Three cases are possible here:
1154 1) There is no lower bound, e.g. CASE (:N).
1155 2) There is a lower bound .NE. high bound, that is
1156 a case range, e.g. CASE (N:M) where M>N (we make
1157 sure that M>N during type resolution).
1158 3) There is a lower bound, and it has the same value
1159 as the high bound, e.g. CASE (N:N). This is our
1160 internal representation of CASE(N).
1162 In the first and second case, we need to set a value for
1163 high. In the third case, we don't because the GCC middle
1164 end represents a single case value by just letting high be
1165 a NULL_TREE. We can't do that because we need to be able
1166 to represent unbounded cases. */
1170 && mpz_cmp (cp
->low
->value
.integer
,
1171 cp
->high
->value
.integer
) != 0))
1172 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
1175 /* Unbounded case. */
1177 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
1180 /* Build a label. */
1181 label
= gfc_build_label_decl (NULL_TREE
);
1183 /* Add this case label.
1184 Add parameter 'label', make it match GCC backend. */
1185 tmp
= fold_build3 (CASE_LABEL_EXPR
, void_type_node
,
1187 gfc_add_expr_to_block (&body
, tmp
);
1190 /* Add the statements for this case. */
1191 tmp
= gfc_trans_code (c
->next
);
1192 gfc_add_expr_to_block (&body
, tmp
);
1194 /* Break to the end of the construct. */
1195 tmp
= build1_v (GOTO_EXPR
, end_label
);
1196 gfc_add_expr_to_block (&body
, tmp
);
1199 tmp
= gfc_finish_block (&body
);
1200 tmp
= build3_v (SWITCH_EXPR
, se
.expr
, tmp
, NULL_TREE
);
1201 gfc_add_expr_to_block (&block
, tmp
);
1203 tmp
= build1_v (LABEL_EXPR
, end_label
);
1204 gfc_add_expr_to_block (&block
, tmp
);
1206 return gfc_finish_block (&block
);
1210 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1212 There are only two cases possible here, even though the standard
1213 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1214 .FALSE., and DEFAULT.
1216 We never generate more than two blocks here. Instead, we always
1217 try to eliminate the DEFAULT case. This way, we can translate this
1218 kind of SELECT construct to a simple
1222 expression in GENERIC. */
1225 gfc_trans_logical_select (gfc_code
* code
)
1228 gfc_code
*t
, *f
, *d
;
1233 /* Assume we don't have any cases at all. */
1236 /* Now see which ones we actually do have. We can have at most two
1237 cases in a single case list: one for .TRUE. and one for .FALSE.
1238 The default case is always separate. If the cases for .TRUE. and
1239 .FALSE. are in the same case list, the block for that case list
1240 always executed, and we don't generate code a COND_EXPR. */
1241 for (c
= code
->block
; c
; c
= c
->block
)
1243 for (cp
= c
->ext
.case_list
; cp
; cp
= cp
->next
)
1247 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
1249 else /* if (cp->value.logical != 0), thus .TRUE. */
1257 /* Start a new block. */
1258 gfc_start_block (&block
);
1260 /* Calculate the switch expression. We always need to do this
1261 because it may have side effects. */
1262 gfc_init_se (&se
, NULL
);
1263 gfc_conv_expr_val (&se
, code
->expr
);
1264 gfc_add_block_to_block (&block
, &se
.pre
);
1266 if (t
== f
&& t
!= NULL
)
1268 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1269 translate the code for these cases, append it to the current
1271 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
1275 tree true_tree
, false_tree
, stmt
;
1277 true_tree
= build_empty_stmt ();
1278 false_tree
= build_empty_stmt ();
1280 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1281 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1282 make the missing case the default case. */
1283 if (t
!= NULL
&& f
!= NULL
)
1293 /* Translate the code for each of these blocks, and append it to
1294 the current block. */
1296 true_tree
= gfc_trans_code (t
->next
);
1299 false_tree
= gfc_trans_code (f
->next
);
1301 stmt
= fold_build3 (COND_EXPR
, void_type_node
, se
.expr
,
1302 true_tree
, false_tree
);
1303 gfc_add_expr_to_block (&block
, stmt
);
1306 return gfc_finish_block (&block
);
1310 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1311 Instead of generating compares and jumps, it is far simpler to
1312 generate a data structure describing the cases in order and call a
1313 library subroutine that locates the right case.
1314 This is particularly true because this is the only case where we
1315 might have to dispose of a temporary.
1316 The library subroutine returns a pointer to jump to or NULL if no
1317 branches are to be taken. */
1320 gfc_trans_character_select (gfc_code
*code
)
1322 tree init
, node
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
1323 stmtblock_t block
, body
;
1329 /* The jump table types are stored in static variables to avoid
1330 constructing them from scratch every single time. */
1331 static tree select_struct
[2];
1332 static tree ss_string1
[2], ss_string1_len
[2];
1333 static tree ss_string2
[2], ss_string2_len
[2];
1334 static tree ss_target
[2];
1336 tree pchartype
= gfc_get_pchar_type (code
->expr
->ts
.kind
);
1338 if (code
->expr
->ts
.kind
== 1)
1340 else if (code
->expr
->ts
.kind
== 4)
1345 if (select_struct
[k
] == NULL
)
1347 select_struct
[k
] = make_node (RECORD_TYPE
);
1349 if (code
->expr
->ts
.kind
== 1)
1350 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
1351 else if (code
->expr
->ts
.kind
== 4)
1352 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
1357 #define ADD_FIELD(NAME, TYPE) \
1358 ss_##NAME[k] = gfc_add_field_to_struct \
1359 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1360 get_identifier (stringize(NAME)), TYPE)
1362 ADD_FIELD (string1
, pchartype
);
1363 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
1365 ADD_FIELD (string2
, pchartype
);
1366 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
1368 ADD_FIELD (target
, integer_type_node
);
1371 gfc_finish_type (select_struct
[k
]);
1374 cp
= code
->block
->ext
.case_list
;
1375 while (cp
->left
!= NULL
)
1379 for (d
= cp
; d
; d
= d
->right
)
1382 end_label
= gfc_build_label_decl (NULL_TREE
);
1384 /* Generate the body */
1385 gfc_start_block (&block
);
1386 gfc_init_block (&body
);
1388 for (c
= code
->block
; c
; c
= c
->block
)
1390 for (d
= c
->ext
.case_list
; d
; d
= d
->next
)
1392 label
= gfc_build_label_decl (NULL_TREE
);
1393 tmp
= fold_build3 (CASE_LABEL_EXPR
, void_type_node
,
1394 build_int_cst (NULL_TREE
, d
->n
),
1395 build_int_cst (NULL_TREE
, d
->n
), label
);
1396 gfc_add_expr_to_block (&body
, tmp
);
1399 tmp
= gfc_trans_code (c
->next
);
1400 gfc_add_expr_to_block (&body
, tmp
);
1402 tmp
= build1_v (GOTO_EXPR
, end_label
);
1403 gfc_add_expr_to_block (&body
, tmp
);
1406 /* Generate the structure describing the branches */
1409 for(d
= cp
; d
; d
= d
->right
)
1413 gfc_init_se (&se
, NULL
);
1417 node
= tree_cons (ss_string1
[k
], null_pointer_node
, node
);
1418 node
= tree_cons (ss_string1_len
[k
], integer_zero_node
, node
);
1422 gfc_conv_expr_reference (&se
, d
->low
);
1424 node
= tree_cons (ss_string1
[k
], se
.expr
, node
);
1425 node
= tree_cons (ss_string1_len
[k
], se
.string_length
, node
);
1428 if (d
->high
== NULL
)
1430 node
= tree_cons (ss_string2
[k
], null_pointer_node
, node
);
1431 node
= tree_cons (ss_string2_len
[k
], integer_zero_node
, node
);
1435 gfc_init_se (&se
, NULL
);
1436 gfc_conv_expr_reference (&se
, d
->high
);
1438 node
= tree_cons (ss_string2
[k
], se
.expr
, node
);
1439 node
= tree_cons (ss_string2_len
[k
], se
.string_length
, node
);
1442 node
= tree_cons (ss_target
[k
], build_int_cst (integer_type_node
, d
->n
),
1445 tmp
= build_constructor_from_list (select_struct
[k
], nreverse (node
));
1446 init
= tree_cons (NULL_TREE
, tmp
, init
);
1449 type
= build_array_type (select_struct
[k
],
1450 build_index_type (build_int_cst (NULL_TREE
, n
-1)));
1452 init
= build_constructor_from_list (type
, nreverse(init
));
1453 TREE_CONSTANT (init
) = 1;
1454 TREE_STATIC (init
) = 1;
1455 /* Create a static variable to hold the jump table. */
1456 tmp
= gfc_create_var (type
, "jumptable");
1457 TREE_CONSTANT (tmp
) = 1;
1458 TREE_STATIC (tmp
) = 1;
1459 TREE_READONLY (tmp
) = 1;
1460 DECL_INITIAL (tmp
) = init
;
1463 /* Build the library call */
1464 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
1466 gfc_init_se (&se
, NULL
);
1467 gfc_conv_expr_reference (&se
, code
->expr
);
1469 gfc_add_block_to_block (&block
, &se
.pre
);
1471 if (code
->expr
->ts
.kind
== 1)
1472 fndecl
= gfor_fndecl_select_string
;
1473 else if (code
->expr
->ts
.kind
== 4)
1474 fndecl
= gfor_fndecl_select_string_char4
;
1478 tmp
= build_call_expr (fndecl
, 4, init
, build_int_cst (NULL_TREE
, n
),
1479 se
.expr
, se
.string_length
);
1480 case_num
= gfc_create_var (integer_type_node
, "case_num");
1481 gfc_add_modify_expr (&block
, case_num
, tmp
);
1483 gfc_add_block_to_block (&block
, &se
.post
);
1485 tmp
= gfc_finish_block (&body
);
1486 tmp
= build3_v (SWITCH_EXPR
, case_num
, tmp
, NULL_TREE
);
1487 gfc_add_expr_to_block (&block
, tmp
);
1489 tmp
= build1_v (LABEL_EXPR
, end_label
);
1490 gfc_add_expr_to_block (&block
, tmp
);
1492 return gfc_finish_block (&block
);
1496 /* Translate the three variants of the SELECT CASE construct.
1498 SELECT CASEs with INTEGER case expressions can be translated to an
1499 equivalent GENERIC switch statement, and for LOGICAL case
1500 expressions we build one or two if-else compares.
1502 SELECT CASEs with CHARACTER case expressions are a whole different
1503 story, because they don't exist in GENERIC. So we sort them and
1504 do a binary search at runtime.
1506 Fortran has no BREAK statement, and it does not allow jumps from
1507 one case block to another. That makes things a lot easier for
1511 gfc_trans_select (gfc_code
* code
)
1513 gcc_assert (code
&& code
->expr
);
1515 /* Empty SELECT constructs are legal. */
1516 if (code
->block
== NULL
)
1517 return build_empty_stmt ();
1519 /* Select the correct translation function. */
1520 switch (code
->expr
->ts
.type
)
1522 case BT_LOGICAL
: return gfc_trans_logical_select (code
);
1523 case BT_INTEGER
: return gfc_trans_integer_select (code
);
1524 case BT_CHARACTER
: return gfc_trans_character_select (code
);
1526 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1532 /* Traversal function to substitute a replacement symtree if the symbol
1533 in the expression is the same as that passed. f == 2 signals that
1534 that variable itself is not to be checked - only the references.
1535 This group of functions is used when the variable expression in a
1536 FORALL assignment has internal references. For example:
1537 FORALL (i = 1:4) p(p(i)) = i
1538 The only recourse here is to store a copy of 'p' for the index
1541 static gfc_symtree
*new_symtree
;
1542 static gfc_symtree
*old_symtree
;
1545 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
1547 if (expr
->expr_type
!= EXPR_VARIABLE
)
1552 else if (expr
->symtree
->n
.sym
== sym
)
1553 expr
->symtree
= new_symtree
;
1559 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
1561 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
1565 forall_restore (gfc_expr
*expr
,
1566 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
1567 int *f ATTRIBUTE_UNUSED
)
1569 if (expr
->expr_type
!= EXPR_VARIABLE
)
1572 if (expr
->symtree
== new_symtree
)
1573 expr
->symtree
= old_symtree
;
1579 forall_restore_symtree (gfc_expr
*e
)
1581 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
1585 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
1590 gfc_symbol
*new_sym
;
1591 gfc_symbol
*old_sym
;
1595 /* Build a copy of the lvalue. */
1596 old_symtree
= c
->expr
->symtree
;
1597 old_sym
= old_symtree
->n
.sym
;
1598 e
= gfc_lval_expr_from_sym (old_sym
);
1599 if (old_sym
->attr
.dimension
)
1601 gfc_init_se (&tse
, NULL
);
1602 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
);
1603 gfc_add_block_to_block (pre
, &tse
.pre
);
1604 gfc_add_block_to_block (post
, &tse
.post
);
1605 tse
.expr
= build_fold_indirect_ref (tse
.expr
);
1607 if (e
->ts
.type
!= BT_CHARACTER
)
1609 /* Use the variable offset for the temporary. */
1610 tmp
= gfc_conv_descriptor_offset (tse
.expr
);
1611 gfc_add_modify_expr (pre
, tmp
,
1612 gfc_conv_array_offset (old_sym
->backend_decl
));
1617 gfc_init_se (&tse
, NULL
);
1618 gfc_init_se (&rse
, NULL
);
1619 gfc_conv_expr (&rse
, e
);
1620 if (e
->ts
.type
== BT_CHARACTER
)
1622 tse
.string_length
= rse
.string_length
;
1623 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
1625 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
1627 gfc_add_block_to_block (pre
, &tse
.pre
);
1628 gfc_add_block_to_block (post
, &tse
.post
);
1632 tmp
= gfc_typenode_for_spec (&e
->ts
);
1633 tse
.expr
= gfc_create_var (tmp
, "temp");
1636 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
1637 e
->expr_type
== EXPR_VARIABLE
);
1638 gfc_add_expr_to_block (pre
, tmp
);
1642 /* Create a new symbol to represent the lvalue. */
1643 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
1644 new_sym
->ts
= old_sym
->ts
;
1645 new_sym
->attr
.referenced
= 1;
1646 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
1647 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
1649 /* Use the temporary as the backend_decl. */
1650 new_sym
->backend_decl
= tse
.expr
;
1652 /* Create a fake symtree for it. */
1654 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
1655 new_symtree
->n
.sym
= new_sym
;
1656 gcc_assert (new_symtree
== root
);
1658 /* Go through the expression reference replacing the old_symtree
1660 forall_replace_symtree (c
->expr
, old_sym
, 2);
1662 /* Now we have made this temporary, we might as well use it for
1663 the right hand side. */
1664 forall_replace_symtree (c
->expr2
, old_sym
, 1);
1668 /* Handles dependencies in forall assignments. */
1670 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
1677 lsym
= c
->expr
->symtree
->n
.sym
;
1678 need_temp
= gfc_check_dependency (c
->expr
, c
->expr2
, 0);
1680 /* Now check for dependencies within the 'variable'
1681 expression itself. These are treated by making a complete
1682 copy of variable and changing all the references to it
1683 point to the copy instead. Note that the shallow copy of
1684 the variable will not suffice for derived types with
1685 pointer components. We therefore leave these to their
1687 if (lsym
->ts
.type
== BT_DERIVED
1688 && lsym
->ts
.derived
->attr
.pointer_comp
)
1692 if (find_forall_index (c
->expr
, lsym
, 2) == SUCCESS
)
1694 forall_make_variable_temp (c
, pre
, post
);
1698 /* Substrings with dependencies are treated in the same
1700 if (c
->expr
->ts
.type
== BT_CHARACTER
1702 && c
->expr2
->expr_type
== EXPR_VARIABLE
1703 && lsym
== c
->expr2
->symtree
->n
.sym
)
1705 for (lref
= c
->expr
->ref
; lref
; lref
= lref
->next
)
1706 if (lref
->type
== REF_SUBSTRING
)
1708 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
1709 if (rref
->type
== REF_SUBSTRING
)
1713 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
1715 forall_make_variable_temp (c
, pre
, post
);
1724 cleanup_forall_symtrees (gfc_code
*c
)
1726 forall_restore_symtree (c
->expr
);
1727 forall_restore_symtree (c
->expr2
);
1728 gfc_free (new_symtree
->n
.sym
);
1729 gfc_free (new_symtree
);
1733 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1734 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1735 indicates whether we should generate code to test the FORALLs mask
1736 array. OUTER is the loop header to be used for initializing mask
1739 The generated loop format is:
1740 count = (end - start + step) / step
1753 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
1754 int mask_flag
, stmtblock_t
*outer
)
1762 tree var
, start
, end
, step
;
1765 /* Initialize the mask index outside the FORALL nest. */
1766 if (mask_flag
&& forall_tmp
->mask
)
1767 gfc_add_modify_expr (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
1769 iter
= forall_tmp
->this_loop
;
1770 nvar
= forall_tmp
->nvar
;
1771 for (n
= 0; n
< nvar
; n
++)
1774 start
= iter
->start
;
1778 exit_label
= gfc_build_label_decl (NULL_TREE
);
1779 TREE_USED (exit_label
) = 1;
1781 /* The loop counter. */
1782 count
= gfc_create_var (TREE_TYPE (var
), "count");
1784 /* The body of the loop. */
1785 gfc_init_block (&block
);
1787 /* The exit condition. */
1788 cond
= fold_build2 (LE_EXPR
, boolean_type_node
,
1789 count
, build_int_cst (TREE_TYPE (count
), 0));
1790 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1791 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
1792 cond
, tmp
, build_empty_stmt ());
1793 gfc_add_expr_to_block (&block
, tmp
);
1795 /* The main loop body. */
1796 gfc_add_expr_to_block (&block
, body
);
1798 /* Increment the loop variable. */
1799 tmp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (var
), var
, step
);
1800 gfc_add_modify_expr (&block
, var
, tmp
);
1802 /* Advance to the next mask element. Only do this for the
1804 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
1806 tree maskindex
= forall_tmp
->maskindex
;
1807 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
1808 maskindex
, gfc_index_one_node
);
1809 gfc_add_modify_expr (&block
, maskindex
, tmp
);
1812 /* Decrement the loop counter. */
1813 tmp
= fold_build2 (MINUS_EXPR
, TREE_TYPE (var
), count
,
1814 build_int_cst (TREE_TYPE (var
), 1));
1815 gfc_add_modify_expr (&block
, count
, tmp
);
1817 body
= gfc_finish_block (&block
);
1819 /* Loop var initialization. */
1820 gfc_init_block (&block
);
1821 gfc_add_modify_expr (&block
, var
, start
);
1824 /* Initialize the loop counter. */
1825 tmp
= fold_build2 (MINUS_EXPR
, TREE_TYPE (var
), step
, start
);
1826 tmp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (var
), end
, tmp
);
1827 tmp
= fold_build2 (TRUNC_DIV_EXPR
, TREE_TYPE (var
), tmp
, step
);
1828 gfc_add_modify_expr (&block
, count
, tmp
);
1830 /* The loop expression. */
1831 tmp
= build1_v (LOOP_EXPR
, body
);
1832 gfc_add_expr_to_block (&block
, tmp
);
1834 /* The exit label. */
1835 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1836 gfc_add_expr_to_block (&block
, tmp
);
1838 body
= gfc_finish_block (&block
);
1845 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1846 is nonzero, the body is controlled by all masks in the forall nest.
1847 Otherwise, the innermost loop is not controlled by it's mask. This
1848 is used for initializing that mask. */
1851 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
1856 forall_info
*forall_tmp
;
1857 tree mask
, maskindex
;
1859 gfc_start_block (&header
);
1861 forall_tmp
= nested_forall_info
;
1862 while (forall_tmp
!= NULL
)
1864 /* Generate body with masks' control. */
1867 mask
= forall_tmp
->mask
;
1868 maskindex
= forall_tmp
->maskindex
;
1870 /* If a mask was specified make the assignment conditional. */
1873 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
1874 body
= build3_v (COND_EXPR
, tmp
, body
, build_empty_stmt ());
1877 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
1878 forall_tmp
= forall_tmp
->prev_nest
;
1882 gfc_add_expr_to_block (&header
, body
);
1883 return gfc_finish_block (&header
);
1887 /* Allocate data for holding a temporary array. Returns either a local
1888 temporary array or a pointer variable. */
1891 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
1898 if (INTEGER_CST_P (size
))
1900 tmp
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
, size
,
1901 gfc_index_one_node
);
1906 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
1907 type
= build_array_type (elem_type
, type
);
1908 if (gfc_can_put_var_on_stack (bytesize
))
1910 gcc_assert (INTEGER_CST_P (size
));
1911 tmpvar
= gfc_create_var (type
, "temp");
1916 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
1917 *pdata
= convert (pvoid_type_node
, tmpvar
);
1919 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
1920 gfc_add_modify_expr (pblock
, tmpvar
, tmp
);
1926 /* Generate codes to copy the temporary to the actual lhs. */
1929 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
1930 tree count1
, tree wheremask
, bool invert
)
1934 stmtblock_t block
, body
;
1940 lss
= gfc_walk_expr (expr
);
1942 if (lss
== gfc_ss_terminator
)
1944 gfc_start_block (&block
);
1946 gfc_init_se (&lse
, NULL
);
1948 /* Translate the expression. */
1949 gfc_conv_expr (&lse
, expr
);
1951 /* Form the expression for the temporary. */
1952 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
1954 /* Use the scalar assignment as is. */
1955 gfc_add_block_to_block (&block
, &lse
.pre
);
1956 gfc_add_modify_expr (&block
, lse
.expr
, tmp
);
1957 gfc_add_block_to_block (&block
, &lse
.post
);
1959 /* Increment the count1. */
1960 tmp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (count1
), count1
,
1961 gfc_index_one_node
);
1962 gfc_add_modify_expr (&block
, count1
, tmp
);
1964 tmp
= gfc_finish_block (&block
);
1968 gfc_start_block (&block
);
1970 gfc_init_loopinfo (&loop1
);
1971 gfc_init_se (&rse
, NULL
);
1972 gfc_init_se (&lse
, NULL
);
1974 /* Associate the lss with the loop. */
1975 gfc_add_ss_to_loop (&loop1
, lss
);
1977 /* Calculate the bounds of the scalarization. */
1978 gfc_conv_ss_startstride (&loop1
);
1979 /* Setup the scalarizing loops. */
1980 gfc_conv_loop_setup (&loop1
);
1982 gfc_mark_ss_chain_used (lss
, 1);
1984 /* Start the scalarized loop body. */
1985 gfc_start_scalarized_body (&loop1
, &body
);
1987 /* Setup the gfc_se structures. */
1988 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
1991 /* Form the expression of the temporary. */
1992 if (lss
!= gfc_ss_terminator
)
1993 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
1994 /* Translate expr. */
1995 gfc_conv_expr (&lse
, expr
);
1997 /* Use the scalar assignment. */
1998 rse
.string_length
= lse
.string_length
;
1999 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, false);
2001 /* Form the mask expression according to the mask tree list. */
2004 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
2006 wheremaskexpr
= fold_build1 (TRUTH_NOT_EXPR
,
2007 TREE_TYPE (wheremaskexpr
),
2009 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
2010 wheremaskexpr
, tmp
, build_empty_stmt ());
2013 gfc_add_expr_to_block (&body
, tmp
);
2015 /* Increment count1. */
2016 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2017 count1
, gfc_index_one_node
);
2018 gfc_add_modify_expr (&body
, count1
, tmp
);
2020 /* Increment count3. */
2023 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2024 count3
, gfc_index_one_node
);
2025 gfc_add_modify_expr (&body
, count3
, tmp
);
2028 /* Generate the copying loops. */
2029 gfc_trans_scalarizing_loops (&loop1
, &body
);
2030 gfc_add_block_to_block (&block
, &loop1
.pre
);
2031 gfc_add_block_to_block (&block
, &loop1
.post
);
2032 gfc_cleanup_loop (&loop1
);
2034 tmp
= gfc_finish_block (&block
);
2040 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2041 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2042 and should not be freed. WHEREMASK is the conditional execution mask
2043 whose sense may be inverted by INVERT. */
2046 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
2047 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
2048 tree wheremask
, bool invert
)
2050 stmtblock_t block
, body1
;
2057 gfc_start_block (&block
);
2059 gfc_init_se (&rse
, NULL
);
2060 gfc_init_se (&lse
, NULL
);
2062 if (lss
== gfc_ss_terminator
)
2064 gfc_init_block (&body1
);
2065 gfc_conv_expr (&rse
, expr2
);
2066 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2070 /* Initialize the loop. */
2071 gfc_init_loopinfo (&loop
);
2073 /* We may need LSS to determine the shape of the expression. */
2074 gfc_add_ss_to_loop (&loop
, lss
);
2075 gfc_add_ss_to_loop (&loop
, rss
);
2077 gfc_conv_ss_startstride (&loop
);
2078 gfc_conv_loop_setup (&loop
);
2080 gfc_mark_ss_chain_used (rss
, 1);
2081 /* Start the loop body. */
2082 gfc_start_scalarized_body (&loop
, &body1
);
2084 /* Translate the expression. */
2085 gfc_copy_loopinfo_to_se (&rse
, &loop
);
2087 gfc_conv_expr (&rse
, expr2
);
2089 /* Form the expression of the temporary. */
2090 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2093 /* Use the scalar assignment. */
2094 lse
.string_length
= rse
.string_length
;
2095 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
2096 expr2
->expr_type
== EXPR_VARIABLE
);
2098 /* Form the mask expression according to the mask tree list. */
2101 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
2103 wheremaskexpr
= fold_build1 (TRUTH_NOT_EXPR
,
2104 TREE_TYPE (wheremaskexpr
),
2106 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
2107 wheremaskexpr
, tmp
, build_empty_stmt ());
2110 gfc_add_expr_to_block (&body1
, tmp
);
2112 if (lss
== gfc_ss_terminator
)
2114 gfc_add_block_to_block (&block
, &body1
);
2116 /* Increment count1. */
2117 tmp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (count1
), count1
,
2118 gfc_index_one_node
);
2119 gfc_add_modify_expr (&block
, count1
, tmp
);
2123 /* Increment count1. */
2124 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2125 count1
, gfc_index_one_node
);
2126 gfc_add_modify_expr (&body1
, count1
, tmp
);
2128 /* Increment count3. */
2131 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2132 count3
, gfc_index_one_node
);
2133 gfc_add_modify_expr (&body1
, count3
, tmp
);
2136 /* Generate the copying loops. */
2137 gfc_trans_scalarizing_loops (&loop
, &body1
);
2139 gfc_add_block_to_block (&block
, &loop
.pre
);
2140 gfc_add_block_to_block (&block
, &loop
.post
);
2142 gfc_cleanup_loop (&loop
);
2143 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2144 as tree nodes in SS may not be valid in different scope. */
2147 tmp
= gfc_finish_block (&block
);
2152 /* Calculate the size of temporary needed in the assignment inside forall.
2153 LSS and RSS are filled in this function. */
2156 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
2157 stmtblock_t
* pblock
,
2158 gfc_ss
**lss
, gfc_ss
**rss
)
2166 *lss
= gfc_walk_expr (expr1
);
2169 size
= gfc_index_one_node
;
2170 if (*lss
!= gfc_ss_terminator
)
2172 gfc_init_loopinfo (&loop
);
2174 /* Walk the RHS of the expression. */
2175 *rss
= gfc_walk_expr (expr2
);
2176 if (*rss
== gfc_ss_terminator
)
2178 /* The rhs is scalar. Add a ss for the expression. */
2179 *rss
= gfc_get_ss ();
2180 (*rss
)->next
= gfc_ss_terminator
;
2181 (*rss
)->type
= GFC_SS_SCALAR
;
2182 (*rss
)->expr
= expr2
;
2185 /* Associate the SS with the loop. */
2186 gfc_add_ss_to_loop (&loop
, *lss
);
2187 /* We don't actually need to add the rhs at this point, but it might
2188 make guessing the loop bounds a bit easier. */
2189 gfc_add_ss_to_loop (&loop
, *rss
);
2191 /* We only want the shape of the expression, not rest of the junk
2192 generated by the scalarizer. */
2193 loop
.array_parameter
= 1;
2195 /* Calculate the bounds of the scalarization. */
2196 save_flag
= flag_bounds_check
;
2197 flag_bounds_check
= 0;
2198 gfc_conv_ss_startstride (&loop
);
2199 flag_bounds_check
= save_flag
;
2200 gfc_conv_loop_setup (&loop
);
2202 /* Figure out how many elements we need. */
2203 for (i
= 0; i
< loop
.dimen
; i
++)
2205 tmp
= fold_build2 (MINUS_EXPR
, gfc_array_index_type
,
2206 gfc_index_one_node
, loop
.from
[i
]);
2207 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2209 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, tmp
);
2211 gfc_add_block_to_block (pblock
, &loop
.pre
);
2212 size
= gfc_evaluate_now (size
, pblock
);
2213 gfc_add_block_to_block (pblock
, &loop
.post
);
2215 /* TODO: write a function that cleans up a loopinfo without freeing
2216 the SS chains. Currently a NOP. */
2223 /* Calculate the overall iterator number of the nested forall construct.
2224 This routine actually calculates the number of times the body of the
2225 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2226 that by the expression INNER_SIZE. The BLOCK argument specifies the
2227 block in which to calculate the result, and the optional INNER_SIZE_BODY
2228 argument contains any statements that need to executed (inside the loop)
2229 to initialize or calculate INNER_SIZE. */
2232 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
2233 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
2235 forall_info
*forall_tmp
= nested_forall_info
;
2239 /* We can eliminate the innermost unconditional loops with constant
2241 if (INTEGER_CST_P (inner_size
))
2244 && !forall_tmp
->mask
2245 && INTEGER_CST_P (forall_tmp
->size
))
2247 inner_size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
,
2248 inner_size
, forall_tmp
->size
);
2249 forall_tmp
= forall_tmp
->prev_nest
;
2252 /* If there are no loops left, we have our constant result. */
2257 /* Otherwise, create a temporary variable to compute the result. */
2258 number
= gfc_create_var (gfc_array_index_type
, "num");
2259 gfc_add_modify_expr (block
, number
, gfc_index_zero_node
);
2261 gfc_start_block (&body
);
2262 if (inner_size_body
)
2263 gfc_add_block_to_block (&body
, inner_size_body
);
2265 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2266 number
, inner_size
);
2269 gfc_add_modify_expr (&body
, number
, tmp
);
2270 tmp
= gfc_finish_block (&body
);
2272 /* Generate loops. */
2273 if (forall_tmp
!= NULL
)
2274 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
2276 gfc_add_expr_to_block (block
, tmp
);
2282 /* Allocate temporary for forall construct. SIZE is the size of temporary
2283 needed. PTEMP1 is returned for space free. */
2286 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
2293 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
2294 if (!integer_onep (unit
))
2295 bytesize
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, unit
);
2300 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
2303 tmp
= build_fold_indirect_ref (tmp
);
2308 /* Allocate temporary for forall construct according to the information in
2309 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2310 assignment inside forall. PTEMP1 is returned for space free. */
2313 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
2314 tree inner_size
, stmtblock_t
* inner_size_body
,
2315 stmtblock_t
* block
, tree
* ptemp1
)
2319 /* Calculate the total size of temporary needed in forall construct. */
2320 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
2321 inner_size_body
, block
);
2323 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
2327 /* Handle assignments inside forall which need temporary.
2329 forall (i=start:end:stride; maskexpr)
2332 (where e,f<i> are arbitrary expressions possibly involving i
2333 and there is a dependency between e<i> and f<i>)
2335 masktmp(:) = maskexpr(:)
2340 for (i = start; i <= end; i += stride)
2344 for (i = start; i <= end; i += stride)
2346 if (masktmp[maskindex++])
2347 tmp[count1++] = f<i>
2351 for (i = start; i <= end; i += stride)
2353 if (masktmp[maskindex++])
2354 e<i> = tmp[count1++]
2359 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
2360 tree wheremask
, bool invert
,
2361 forall_info
* nested_forall_info
,
2362 stmtblock_t
* block
)
2370 stmtblock_t inner_size_body
;
2372 /* Create vars. count1 is the current iterator number of the nested
2374 count1
= gfc_create_var (gfc_array_index_type
, "count1");
2376 /* Count is the wheremask index. */
2379 count
= gfc_create_var (gfc_array_index_type
, "count");
2380 gfc_add_modify_expr (block
, count
, gfc_index_zero_node
);
2385 /* Initialize count1. */
2386 gfc_add_modify_expr (block
, count1
, gfc_index_zero_node
);
2388 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2389 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2390 gfc_init_block (&inner_size_body
);
2391 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
2394 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2395 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.cl
->length
)
2397 if (!expr1
->ts
.cl
->backend_decl
)
2400 gfc_init_se (&tse
, NULL
);
2401 gfc_conv_expr (&tse
, expr1
->ts
.cl
->length
);
2402 expr1
->ts
.cl
->backend_decl
= tse
.expr
;
2404 type
= gfc_get_character_type_len (gfc_default_character_kind
,
2405 expr1
->ts
.cl
->backend_decl
);
2408 type
= gfc_typenode_for_spec (&expr1
->ts
);
2410 /* Allocate temporary for nested forall construct according to the
2411 information in nested_forall_info and inner_size. */
2412 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
2413 &inner_size_body
, block
, &ptemp1
);
2415 /* Generate codes to copy rhs to the temporary . */
2416 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
2419 /* Generate body and loops according to the information in
2420 nested_forall_info. */
2421 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
2422 gfc_add_expr_to_block (block
, tmp
);
2425 gfc_add_modify_expr (block
, count1
, gfc_index_zero_node
);
2429 gfc_add_modify_expr (block
, count
, gfc_index_zero_node
);
2431 /* Generate codes to copy the temporary to lhs. */
2432 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
2435 /* Generate body and loops according to the information in
2436 nested_forall_info. */
2437 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
2438 gfc_add_expr_to_block (block
, tmp
);
2442 /* Free the temporary. */
2443 tmp
= gfc_call_free (ptemp1
);
2444 gfc_add_expr_to_block (block
, tmp
);
2449 /* Translate pointer assignment inside FORALL which need temporary. */
2452 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
2453 forall_info
* nested_forall_info
,
2454 stmtblock_t
* block
)
2468 tree tmp
, tmp1
, ptemp1
;
2470 count
= gfc_create_var (gfc_array_index_type
, "count");
2471 gfc_add_modify_expr (block
, count
, gfc_index_zero_node
);
2473 inner_size
= integer_one_node
;
2474 lss
= gfc_walk_expr (expr1
);
2475 rss
= gfc_walk_expr (expr2
);
2476 if (lss
== gfc_ss_terminator
)
2478 type
= gfc_typenode_for_spec (&expr1
->ts
);
2479 type
= build_pointer_type (type
);
2481 /* Allocate temporary for nested forall construct according to the
2482 information in nested_forall_info and inner_size. */
2483 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
2484 inner_size
, NULL
, block
, &ptemp1
);
2485 gfc_start_block (&body
);
2486 gfc_init_se (&lse
, NULL
);
2487 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
2488 gfc_init_se (&rse
, NULL
);
2489 rse
.want_pointer
= 1;
2490 gfc_conv_expr (&rse
, expr2
);
2491 gfc_add_block_to_block (&body
, &rse
.pre
);
2492 gfc_add_modify_expr (&body
, lse
.expr
,
2493 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
2494 gfc_add_block_to_block (&body
, &rse
.post
);
2496 /* Increment count. */
2497 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2498 count
, gfc_index_one_node
);
2499 gfc_add_modify_expr (&body
, count
, tmp
);
2501 tmp
= gfc_finish_block (&body
);
2503 /* Generate body and loops according to the information in
2504 nested_forall_info. */
2505 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
2506 gfc_add_expr_to_block (block
, tmp
);
2509 gfc_add_modify_expr (block
, count
, gfc_index_zero_node
);
2511 gfc_start_block (&body
);
2512 gfc_init_se (&lse
, NULL
);
2513 gfc_init_se (&rse
, NULL
);
2514 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
2515 lse
.want_pointer
= 1;
2516 gfc_conv_expr (&lse
, expr1
);
2517 gfc_add_block_to_block (&body
, &lse
.pre
);
2518 gfc_add_modify_expr (&body
, lse
.expr
, rse
.expr
);
2519 gfc_add_block_to_block (&body
, &lse
.post
);
2520 /* Increment count. */
2521 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2522 count
, gfc_index_one_node
);
2523 gfc_add_modify_expr (&body
, count
, tmp
);
2524 tmp
= gfc_finish_block (&body
);
2526 /* Generate body and loops according to the information in
2527 nested_forall_info. */
2528 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
2529 gfc_add_expr_to_block (block
, tmp
);
2533 gfc_init_loopinfo (&loop
);
2535 /* Associate the SS with the loop. */
2536 gfc_add_ss_to_loop (&loop
, rss
);
2538 /* Setup the scalarizing loops and bounds. */
2539 gfc_conv_ss_startstride (&loop
);
2541 gfc_conv_loop_setup (&loop
);
2543 info
= &rss
->data
.info
;
2544 desc
= info
->descriptor
;
2546 /* Make a new descriptor. */
2547 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
2548 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
,
2549 loop
.from
, loop
.to
, 1,
2552 /* Allocate temporary for nested forall construct. */
2553 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
2554 inner_size
, NULL
, block
, &ptemp1
);
2555 gfc_start_block (&body
);
2556 gfc_init_se (&lse
, NULL
);
2557 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
2558 lse
.direct_byref
= 1;
2559 rss
= gfc_walk_expr (expr2
);
2560 gfc_conv_expr_descriptor (&lse
, expr2
, rss
);
2562 gfc_add_block_to_block (&body
, &lse
.pre
);
2563 gfc_add_block_to_block (&body
, &lse
.post
);
2565 /* Increment count. */
2566 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2567 count
, gfc_index_one_node
);
2568 gfc_add_modify_expr (&body
, count
, tmp
);
2570 tmp
= gfc_finish_block (&body
);
2572 /* Generate body and loops according to the information in
2573 nested_forall_info. */
2574 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
2575 gfc_add_expr_to_block (block
, tmp
);
2578 gfc_add_modify_expr (block
, count
, gfc_index_zero_node
);
2580 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
2581 lss
= gfc_walk_expr (expr1
);
2582 gfc_init_se (&lse
, NULL
);
2583 gfc_conv_expr_descriptor (&lse
, expr1
, lss
);
2584 gfc_add_modify_expr (&lse
.pre
, lse
.expr
, parm
);
2585 gfc_start_block (&body
);
2586 gfc_add_block_to_block (&body
, &lse
.pre
);
2587 gfc_add_block_to_block (&body
, &lse
.post
);
2589 /* Increment count. */
2590 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2591 count
, gfc_index_one_node
);
2592 gfc_add_modify_expr (&body
, count
, tmp
);
2594 tmp
= gfc_finish_block (&body
);
2596 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
2597 gfc_add_expr_to_block (block
, tmp
);
2599 /* Free the temporary. */
2602 tmp
= gfc_call_free (ptemp1
);
2603 gfc_add_expr_to_block (block
, tmp
);
2608 /* FORALL and WHERE statements are really nasty, especially when you nest
2609 them. All the rhs of a forall assignment must be evaluated before the
2610 actual assignments are performed. Presumably this also applies to all the
2611 assignments in an inner where statement. */
2613 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2614 linear array, relying on the fact that we process in the same order in all
2617 forall (i=start:end:stride; maskexpr)
2621 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2623 count = ((end + 1 - start) / stride)
2624 masktmp(:) = maskexpr(:)
2627 for (i = start; i <= end; i += stride)
2629 if (masktmp[maskindex++])
2633 for (i = start; i <= end; i += stride)
2635 if (masktmp[maskindex++])
2639 Note that this code only works when there are no dependencies.
2640 Forall loop with array assignments and data dependencies are a real pain,
2641 because the size of the temporary cannot always be determined before the
2642 loop is executed. This problem is compounded by the presence of nested
2647 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
2667 gfc_forall_iterator
*fa
;
2670 gfc_saved_var
*saved_vars
;
2671 iter_info
*this_forall
;
2675 /* Do nothing if the mask is false. */
2677 && code
->expr
->expr_type
== EXPR_CONSTANT
2678 && !code
->expr
->value
.logical
)
2679 return build_empty_stmt ();
2682 /* Count the FORALL index number. */
2683 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
2687 /* Allocate the space for var, start, end, step, varexpr. */
2688 var
= (tree
*) gfc_getmem (nvar
* sizeof (tree
));
2689 start
= (tree
*) gfc_getmem (nvar
* sizeof (tree
));
2690 end
= (tree
*) gfc_getmem (nvar
* sizeof (tree
));
2691 step
= (tree
*) gfc_getmem (nvar
* sizeof (tree
));
2692 varexpr
= (gfc_expr
**) gfc_getmem (nvar
* sizeof (gfc_expr
*));
2693 saved_vars
= (gfc_saved_var
*) gfc_getmem (nvar
* sizeof (gfc_saved_var
));
2695 /* Allocate the space for info. */
2696 info
= (forall_info
*) gfc_getmem (sizeof (forall_info
));
2698 gfc_start_block (&pre
);
2699 gfc_init_block (&post
);
2700 gfc_init_block (&block
);
2703 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
2705 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
2707 /* Allocate space for this_forall. */
2708 this_forall
= (iter_info
*) gfc_getmem (sizeof (iter_info
));
2710 /* Create a temporary variable for the FORALL index. */
2711 tmp
= gfc_typenode_for_spec (&sym
->ts
);
2712 var
[n
] = gfc_create_var (tmp
, sym
->name
);
2713 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
2715 /* Record it in this_forall. */
2716 this_forall
->var
= var
[n
];
2718 /* Replace the index symbol's backend_decl with the temporary decl. */
2719 sym
->backend_decl
= var
[n
];
2721 /* Work out the start, end and stride for the loop. */
2722 gfc_init_se (&se
, NULL
);
2723 gfc_conv_expr_val (&se
, fa
->start
);
2724 /* Record it in this_forall. */
2725 this_forall
->start
= se
.expr
;
2726 gfc_add_block_to_block (&block
, &se
.pre
);
2729 gfc_init_se (&se
, NULL
);
2730 gfc_conv_expr_val (&se
, fa
->end
);
2731 /* Record it in this_forall. */
2732 this_forall
->end
= se
.expr
;
2733 gfc_make_safe_expr (&se
);
2734 gfc_add_block_to_block (&block
, &se
.pre
);
2737 gfc_init_se (&se
, NULL
);
2738 gfc_conv_expr_val (&se
, fa
->stride
);
2739 /* Record it in this_forall. */
2740 this_forall
->step
= se
.expr
;
2741 gfc_make_safe_expr (&se
);
2742 gfc_add_block_to_block (&block
, &se
.pre
);
2745 /* Set the NEXT field of this_forall to NULL. */
2746 this_forall
->next
= NULL
;
2747 /* Link this_forall to the info construct. */
2748 if (info
->this_loop
)
2750 iter_info
*iter_tmp
= info
->this_loop
;
2751 while (iter_tmp
->next
!= NULL
)
2752 iter_tmp
= iter_tmp
->next
;
2753 iter_tmp
->next
= this_forall
;
2756 info
->this_loop
= this_forall
;
2762 /* Calculate the size needed for the current forall level. */
2763 size
= gfc_index_one_node
;
2764 for (n
= 0; n
< nvar
; n
++)
2766 /* size = (end + step - start) / step. */
2767 tmp
= fold_build2 (MINUS_EXPR
, TREE_TYPE (start
[n
]),
2769 tmp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (end
[n
]), end
[n
], tmp
);
2771 tmp
= fold_build2 (FLOOR_DIV_EXPR
, TREE_TYPE (tmp
), tmp
, step
[n
]);
2772 tmp
= convert (gfc_array_index_type
, tmp
);
2774 size
= fold_build2 (MULT_EXPR
, gfc_array_index_type
, size
, tmp
);
2777 /* Record the nvar and size of current forall level. */
2783 /* If the mask is .true., consider the FORALL unconditional. */
2784 if (code
->expr
->expr_type
== EXPR_CONSTANT
2785 && code
->expr
->value
.logical
)
2793 /* First we need to allocate the mask. */
2796 /* As the mask array can be very big, prefer compact boolean types. */
2797 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
2798 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
2799 size
, NULL
, &block
, &pmask
);
2800 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
2802 /* Record them in the info structure. */
2803 info
->maskindex
= maskindex
;
2808 /* No mask was specified. */
2809 maskindex
= NULL_TREE
;
2810 mask
= pmask
= NULL_TREE
;
2813 /* Link the current forall level to nested_forall_info. */
2814 info
->prev_nest
= nested_forall_info
;
2815 nested_forall_info
= info
;
2817 /* Copy the mask into a temporary variable if required.
2818 For now we assume a mask temporary is needed. */
2821 /* As the mask array can be very big, prefer compact boolean types. */
2822 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
2824 gfc_add_modify_expr (&block
, maskindex
, gfc_index_zero_node
);
2826 /* Start of mask assignment loop body. */
2827 gfc_start_block (&body
);
2829 /* Evaluate the mask expression. */
2830 gfc_init_se (&se
, NULL
);
2831 gfc_conv_expr_val (&se
, code
->expr
);
2832 gfc_add_block_to_block (&body
, &se
.pre
);
2834 /* Store the mask. */
2835 se
.expr
= convert (mask_type
, se
.expr
);
2837 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2838 gfc_add_modify_expr (&body
, tmp
, se
.expr
);
2840 /* Advance to the next mask element. */
2841 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
2842 maskindex
, gfc_index_one_node
);
2843 gfc_add_modify_expr (&body
, maskindex
, tmp
);
2845 /* Generate the loops. */
2846 tmp
= gfc_finish_block (&body
);
2847 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
2848 gfc_add_expr_to_block (&block
, tmp
);
2851 c
= code
->block
->next
;
2853 /* TODO: loop merging in FORALL statements. */
2854 /* Now that we've got a copy of the mask, generate the assignment loops. */
2860 /* A scalar or array assignment. DO the simple check for
2861 lhs to rhs dependencies. These make a temporary for the
2862 rhs and form a second forall block to copy to variable. */
2863 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
2865 /* Temporaries due to array assignment data dependencies introduce
2866 no end of problems. */
2868 gfc_trans_assign_need_temp (c
->expr
, c
->expr2
, NULL
, false,
2869 nested_forall_info
, &block
);
2872 /* Use the normal assignment copying routines. */
2873 assign
= gfc_trans_assignment (c
->expr
, c
->expr2
, false);
2875 /* Generate body and loops. */
2876 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
2878 gfc_add_expr_to_block (&block
, tmp
);
2881 /* Cleanup any temporary symtrees that have been made to deal
2882 with dependencies. */
2884 cleanup_forall_symtrees (c
);
2889 /* Translate WHERE or WHERE construct nested in FORALL. */
2890 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
2893 /* Pointer assignment inside FORALL. */
2894 case EXEC_POINTER_ASSIGN
:
2895 need_temp
= gfc_check_dependency (c
->expr
, c
->expr2
, 0);
2897 gfc_trans_pointer_assign_need_temp (c
->expr
, c
->expr2
,
2898 nested_forall_info
, &block
);
2901 /* Use the normal assignment copying routines. */
2902 assign
= gfc_trans_pointer_assignment (c
->expr
, c
->expr2
);
2904 /* Generate body and loops. */
2905 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
2907 gfc_add_expr_to_block (&block
, tmp
);
2912 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
2913 gfc_add_expr_to_block (&block
, tmp
);
2916 /* Explicit subroutine calls are prevented by the frontend but interface
2917 assignments can legitimately produce them. */
2918 case EXEC_ASSIGN_CALL
:
2919 assign
= gfc_trans_call (c
, true);
2920 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
2921 gfc_add_expr_to_block (&block
, tmp
);
2931 /* Restore the original index variables. */
2932 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
2933 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
2935 /* Free the space for var, start, end, step, varexpr. */
2941 gfc_free (saved_vars
);
2943 /* Free the space for this forall_info. */
2948 /* Free the temporary for the mask. */
2949 tmp
= gfc_call_free (pmask
);
2950 gfc_add_expr_to_block (&block
, tmp
);
2953 pushdecl (maskindex
);
2955 gfc_add_block_to_block (&pre
, &block
);
2956 gfc_add_block_to_block (&pre
, &post
);
2958 return gfc_finish_block (&pre
);
2962 /* Translate the FORALL statement or construct. */
2964 tree
gfc_trans_forall (gfc_code
* code
)
2966 return gfc_trans_forall_1 (code
, NULL
);
2970 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2971 If the WHERE construct is nested in FORALL, compute the overall temporary
2972 needed by the WHERE mask expression multiplied by the iterator number of
2974 ME is the WHERE mask expression.
2975 MASK is the current execution mask upon input, whose sense may or may
2976 not be inverted as specified by the INVERT argument.
2977 CMASK is the updated execution mask on output, or NULL if not required.
2978 PMASK is the pending execution mask on output, or NULL if not required.
2979 BLOCK is the block in which to place the condition evaluation loops. */
2982 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
2983 tree mask
, bool invert
, tree cmask
, tree pmask
,
2984 tree mask_type
, stmtblock_t
* block
)
2989 stmtblock_t body
, body1
;
2990 tree count
, cond
, mtmp
;
2993 gfc_init_loopinfo (&loop
);
2995 lss
= gfc_walk_expr (me
);
2996 rss
= gfc_walk_expr (me
);
2998 /* Variable to index the temporary. */
2999 count
= gfc_create_var (gfc_array_index_type
, "count");
3000 /* Initialize count. */
3001 gfc_add_modify_expr (block
, count
, gfc_index_zero_node
);
3003 gfc_start_block (&body
);
3005 gfc_init_se (&rse
, NULL
);
3006 gfc_init_se (&lse
, NULL
);
3008 if (lss
== gfc_ss_terminator
)
3010 gfc_init_block (&body1
);
3014 /* Initialize the loop. */
3015 gfc_init_loopinfo (&loop
);
3017 /* We may need LSS to determine the shape of the expression. */
3018 gfc_add_ss_to_loop (&loop
, lss
);
3019 gfc_add_ss_to_loop (&loop
, rss
);
3021 gfc_conv_ss_startstride (&loop
);
3022 gfc_conv_loop_setup (&loop
);
3024 gfc_mark_ss_chain_used (rss
, 1);
3025 /* Start the loop body. */
3026 gfc_start_scalarized_body (&loop
, &body1
);
3028 /* Translate the expression. */
3029 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3031 gfc_conv_expr (&rse
, me
);
3034 /* Variable to evaluate mask condition. */
3035 cond
= gfc_create_var (mask_type
, "cond");
3036 if (mask
&& (cmask
|| pmask
))
3037 mtmp
= gfc_create_var (mask_type
, "mask");
3038 else mtmp
= NULL_TREE
;
3040 gfc_add_block_to_block (&body1
, &lse
.pre
);
3041 gfc_add_block_to_block (&body1
, &rse
.pre
);
3043 gfc_add_modify_expr (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
3045 if (mask
&& (cmask
|| pmask
))
3047 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
3049 tmp
= fold_build1 (TRUTH_NOT_EXPR
, mask_type
, tmp
);
3050 gfc_add_modify_expr (&body1
, mtmp
, tmp
);
3055 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
3058 tmp
= fold_build2 (TRUTH_AND_EXPR
, mask_type
, mtmp
, tmp
);
3059 gfc_add_modify_expr (&body1
, tmp1
, tmp
);
3064 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
3065 tmp
= fold_build1 (TRUTH_NOT_EXPR
, mask_type
, cond
);
3067 tmp
= fold_build2 (TRUTH_AND_EXPR
, mask_type
, mtmp
, tmp
);
3068 gfc_add_modify_expr (&body1
, tmp1
, tmp
);
3071 gfc_add_block_to_block (&body1
, &lse
.post
);
3072 gfc_add_block_to_block (&body1
, &rse
.post
);
3074 if (lss
== gfc_ss_terminator
)
3076 gfc_add_block_to_block (&body
, &body1
);
3080 /* Increment count. */
3081 tmp1
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
, count
,
3082 gfc_index_one_node
);
3083 gfc_add_modify_expr (&body1
, count
, tmp1
);
3085 /* Generate the copying loops. */
3086 gfc_trans_scalarizing_loops (&loop
, &body1
);
3088 gfc_add_block_to_block (&body
, &loop
.pre
);
3089 gfc_add_block_to_block (&body
, &loop
.post
);
3091 gfc_cleanup_loop (&loop
);
3092 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3093 as tree nodes in SS may not be valid in different scope. */
3096 tmp1
= gfc_finish_block (&body
);
3097 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3098 if (nested_forall_info
!= NULL
)
3099 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
3101 gfc_add_expr_to_block (block
, tmp1
);
3105 /* Translate an assignment statement in a WHERE statement or construct
3106 statement. The MASK expression is used to control which elements
3107 of EXPR1 shall be assigned. The sense of MASK is specified by
3111 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
3112 tree mask
, bool invert
,
3113 tree count1
, tree count2
,
3119 gfc_ss
*lss_section
;
3126 tree index
, maskexpr
;
3129 /* TODO: handle this special case.
3130 Special case a single function returning an array. */
3131 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
3133 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
3139 /* Assignment of the form lhs = rhs. */
3140 gfc_start_block (&block
);
3142 gfc_init_se (&lse
, NULL
);
3143 gfc_init_se (&rse
, NULL
);
3146 lss
= gfc_walk_expr (expr1
);
3149 /* In each where-assign-stmt, the mask-expr and the variable being
3150 defined shall be arrays of the same shape. */
3151 gcc_assert (lss
!= gfc_ss_terminator
);
3153 /* The assignment needs scalarization. */
3156 /* Find a non-scalar SS from the lhs. */
3157 while (lss_section
!= gfc_ss_terminator
3158 && lss_section
->type
!= GFC_SS_SECTION
)
3159 lss_section
= lss_section
->next
;
3161 gcc_assert (lss_section
!= gfc_ss_terminator
);
3163 /* Initialize the scalarizer. */
3164 gfc_init_loopinfo (&loop
);
3167 rss
= gfc_walk_expr (expr2
);
3168 if (rss
== gfc_ss_terminator
)
3170 /* The rhs is scalar. Add a ss for the expression. */
3171 rss
= gfc_get_ss ();
3173 rss
->next
= gfc_ss_terminator
;
3174 rss
->type
= GFC_SS_SCALAR
;
3178 /* Associate the SS with the loop. */
3179 gfc_add_ss_to_loop (&loop
, lss
);
3180 gfc_add_ss_to_loop (&loop
, rss
);
3182 /* Calculate the bounds of the scalarization. */
3183 gfc_conv_ss_startstride (&loop
);
3185 /* Resolve any data dependencies in the statement. */
3186 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
3188 /* Setup the scalarizing loops. */
3189 gfc_conv_loop_setup (&loop
);
3191 /* Setup the gfc_se structures. */
3192 gfc_copy_loopinfo_to_se (&lse
, &loop
);
3193 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3196 gfc_mark_ss_chain_used (rss
, 1);
3197 if (loop
.temp_ss
== NULL
)
3200 gfc_mark_ss_chain_used (lss
, 1);
3204 lse
.ss
= loop
.temp_ss
;
3205 gfc_mark_ss_chain_used (lss
, 3);
3206 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
3209 /* Start the scalarized loop body. */
3210 gfc_start_scalarized_body (&loop
, &body
);
3212 /* Translate the expression. */
3213 gfc_conv_expr (&rse
, expr2
);
3214 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
3216 gfc_conv_tmp_array_ref (&lse
);
3217 gfc_advance_se_ss_chain (&lse
);
3220 gfc_conv_expr (&lse
, expr1
);
3222 /* Form the mask expression according to the mask. */
3224 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
3226 maskexpr
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (maskexpr
), maskexpr
);
3228 /* Use the scalar assignment as is. */
3230 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
3231 loop
.temp_ss
!= NULL
, false);
3233 tmp
= gfc_conv_operator_assign (&lse
, &rse
, sym
);
3235 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt ());
3237 gfc_add_expr_to_block (&body
, tmp
);
3239 if (lss
== gfc_ss_terminator
)
3241 /* Increment count1. */
3242 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
3243 count1
, gfc_index_one_node
);
3244 gfc_add_modify_expr (&body
, count1
, tmp
);
3246 /* Use the scalar assignment as is. */
3247 gfc_add_block_to_block (&block
, &body
);
3251 gcc_assert (lse
.ss
== gfc_ss_terminator
3252 && rse
.ss
== gfc_ss_terminator
);
3254 if (loop
.temp_ss
!= NULL
)
3256 /* Increment count1 before finish the main body of a scalarized
3258 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
3259 count1
, gfc_index_one_node
);
3260 gfc_add_modify_expr (&body
, count1
, tmp
);
3261 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
3263 /* We need to copy the temporary to the actual lhs. */
3264 gfc_init_se (&lse
, NULL
);
3265 gfc_init_se (&rse
, NULL
);
3266 gfc_copy_loopinfo_to_se (&lse
, &loop
);
3267 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3269 rse
.ss
= loop
.temp_ss
;
3272 gfc_conv_tmp_array_ref (&rse
);
3273 gfc_advance_se_ss_chain (&rse
);
3274 gfc_conv_expr (&lse
, expr1
);
3276 gcc_assert (lse
.ss
== gfc_ss_terminator
3277 && rse
.ss
== gfc_ss_terminator
);
3279 /* Form the mask expression according to the mask tree list. */
3281 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
3283 maskexpr
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (maskexpr
),
3286 /* Use the scalar assignment as is. */
3287 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false);
3288 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt ());
3289 gfc_add_expr_to_block (&body
, tmp
);
3291 /* Increment count2. */
3292 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
3293 count2
, gfc_index_one_node
);
3294 gfc_add_modify_expr (&body
, count2
, tmp
);
3298 /* Increment count1. */
3299 tmp
= fold_build2 (PLUS_EXPR
, gfc_array_index_type
,
3300 count1
, gfc_index_one_node
);
3301 gfc_add_modify_expr (&body
, count1
, tmp
);
3304 /* Generate the copying loops. */
3305 gfc_trans_scalarizing_loops (&loop
, &body
);
3307 /* Wrap the whole thing up. */
3308 gfc_add_block_to_block (&block
, &loop
.pre
);
3309 gfc_add_block_to_block (&block
, &loop
.post
);
3310 gfc_cleanup_loop (&loop
);
3313 return gfc_finish_block (&block
);
3317 /* Translate the WHERE construct or statement.
3318 This function can be called iteratively to translate the nested WHERE
3319 construct or statement.
3320 MASK is the control mask. */
3323 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
3324 forall_info
* nested_forall_info
, stmtblock_t
* block
)
3326 stmtblock_t inner_size_body
;
3327 tree inner_size
, size
;
3336 tree count1
, count2
;
3340 tree pcmask
= NULL_TREE
;
3341 tree ppmask
= NULL_TREE
;
3342 tree cmask
= NULL_TREE
;
3343 tree pmask
= NULL_TREE
;
3344 gfc_actual_arglist
*arg
;
3346 /* the WHERE statement or the WHERE construct statement. */
3347 cblock
= code
->block
;
3349 /* As the mask array can be very big, prefer compact boolean types. */
3350 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3352 /* Determine which temporary masks are needed. */
3355 /* One clause: No ELSEWHEREs. */
3356 need_cmask
= (cblock
->next
!= 0);
3359 else if (cblock
->block
->block
)
3361 /* Three or more clauses: Conditional ELSEWHEREs. */
3365 else if (cblock
->next
)
3367 /* Two clauses, the first non-empty. */
3369 need_pmask
= (mask
!= NULL_TREE
3370 && cblock
->block
->next
!= 0);
3372 else if (!cblock
->block
->next
)
3374 /* Two clauses, both empty. */
3378 /* Two clauses, the first empty, the second non-empty. */
3381 need_cmask
= (cblock
->block
->expr
!= 0);
3390 if (need_cmask
|| need_pmask
)
3392 /* Calculate the size of temporary needed by the mask-expr. */
3393 gfc_init_block (&inner_size_body
);
3394 inner_size
= compute_inner_temp_size (cblock
->expr
, cblock
->expr
,
3395 &inner_size_body
, &lss
, &rss
);
3397 /* Calculate the total size of temporary needed. */
3398 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3399 &inner_size_body
, block
);
3401 /* Check whether the size is negative. */
3402 cond
= fold_build2 (LE_EXPR
, boolean_type_node
, size
,
3403 gfc_index_zero_node
);
3404 size
= fold_build3 (COND_EXPR
, gfc_array_index_type
, cond
,
3405 gfc_index_zero_node
, size
);
3406 size
= gfc_evaluate_now (size
, block
);
3408 /* Allocate temporary for WHERE mask if needed. */
3410 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
3413 /* Allocate temporary for !mask if needed. */
3415 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
3421 /* Each time around this loop, the where clause is conditional
3422 on the value of mask and invert, which are updated at the
3423 bottom of the loop. */
3425 /* Has mask-expr. */
3428 /* Ensure that the WHERE mask will be evaluated exactly once.
3429 If there are no statements in this WHERE/ELSEWHERE clause,
3430 then we don't need to update the control mask (cmask).
3431 If this is the last clause of the WHERE construct, then
3432 we don't need to update the pending control mask (pmask). */
3434 gfc_evaluate_where_mask (cblock
->expr
, nested_forall_info
,
3436 cblock
->next
? cmask
: NULL_TREE
,
3437 cblock
->block
? pmask
: NULL_TREE
,
3440 gfc_evaluate_where_mask (cblock
->expr
, nested_forall_info
,
3442 (cblock
->next
|| cblock
->block
)
3443 ? cmask
: NULL_TREE
,
3444 NULL_TREE
, mask_type
, block
);
3448 /* It's a final elsewhere-stmt. No mask-expr is present. */
3452 /* The body of this where clause are controlled by cmask with
3453 sense specified by invert. */
3455 /* Get the assignment statement of a WHERE statement, or the first
3456 statement in where-body-construct of a WHERE construct. */
3457 cnext
= cblock
->next
;
3462 /* WHERE assignment statement. */
3463 case EXEC_ASSIGN_CALL
:
3465 arg
= cnext
->ext
.actual
;
3466 expr1
= expr2
= NULL
;
3467 for (; arg
; arg
= arg
->next
)
3479 expr1
= cnext
->expr
;
3480 expr2
= cnext
->expr2
;
3482 if (nested_forall_info
!= NULL
)
3484 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
3485 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
3486 gfc_trans_assign_need_temp (expr1
, expr2
,
3488 nested_forall_info
, block
);
3491 /* Variables to control maskexpr. */
3492 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3493 count2
= gfc_create_var (gfc_array_index_type
, "count2");
3494 gfc_add_modify_expr (block
, count1
, gfc_index_zero_node
);
3495 gfc_add_modify_expr (block
, count2
, gfc_index_zero_node
);
3497 tmp
= gfc_trans_where_assign (expr1
, expr2
,
3500 cnext
->resolved_sym
);
3502 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3504 gfc_add_expr_to_block (block
, tmp
);
3509 /* Variables to control maskexpr. */
3510 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3511 count2
= gfc_create_var (gfc_array_index_type
, "count2");
3512 gfc_add_modify_expr (block
, count1
, gfc_index_zero_node
);
3513 gfc_add_modify_expr (block
, count2
, gfc_index_zero_node
);
3515 tmp
= gfc_trans_where_assign (expr1
, expr2
,
3518 cnext
->resolved_sym
);
3519 gfc_add_expr_to_block (block
, tmp
);
3524 /* WHERE or WHERE construct is part of a where-body-construct. */
3526 gfc_trans_where_2 (cnext
, cmask
, invert
,
3527 nested_forall_info
, block
);
3534 /* The next statement within the same where-body-construct. */
3535 cnext
= cnext
->next
;
3537 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3538 cblock
= cblock
->block
;
3539 if (mask
== NULL_TREE
)
3541 /* If we're the initial WHERE, we can simply invert the sense
3542 of the current mask to obtain the "mask" for the remaining
3549 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3555 /* If we allocated a pending mask array, deallocate it now. */
3558 tmp
= gfc_call_free (ppmask
);
3559 gfc_add_expr_to_block (block
, tmp
);
3562 /* If we allocated a current mask array, deallocate it now. */
3565 tmp
= gfc_call_free (pcmask
);
3566 gfc_add_expr_to_block (block
, tmp
);
3570 /* Translate a simple WHERE construct or statement without dependencies.
3571 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3572 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3573 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3576 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
3578 stmtblock_t block
, body
;
3579 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
3580 tree tmp
, cexpr
, tstmt
, estmt
;
3581 gfc_ss
*css
, *tdss
, *tsss
;
3582 gfc_se cse
, tdse
, tsse
, edse
, esse
;
3587 cond
= cblock
->expr
;
3588 tdst
= cblock
->next
->expr
;
3589 tsrc
= cblock
->next
->expr2
;
3590 edst
= eblock
? eblock
->next
->expr
: NULL
;
3591 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
3593 gfc_start_block (&block
);
3594 gfc_init_loopinfo (&loop
);
3596 /* Handle the condition. */
3597 gfc_init_se (&cse
, NULL
);
3598 css
= gfc_walk_expr (cond
);
3599 gfc_add_ss_to_loop (&loop
, css
);
3601 /* Handle the then-clause. */
3602 gfc_init_se (&tdse
, NULL
);
3603 gfc_init_se (&tsse
, NULL
);
3604 tdss
= gfc_walk_expr (tdst
);
3605 tsss
= gfc_walk_expr (tsrc
);
3606 if (tsss
== gfc_ss_terminator
)
3608 tsss
= gfc_get_ss ();
3610 tsss
->next
= gfc_ss_terminator
;
3611 tsss
->type
= GFC_SS_SCALAR
;
3614 gfc_add_ss_to_loop (&loop
, tdss
);
3615 gfc_add_ss_to_loop (&loop
, tsss
);
3619 /* Handle the else clause. */
3620 gfc_init_se (&edse
, NULL
);
3621 gfc_init_se (&esse
, NULL
);
3622 edss
= gfc_walk_expr (edst
);
3623 esss
= gfc_walk_expr (esrc
);
3624 if (esss
== gfc_ss_terminator
)
3626 esss
= gfc_get_ss ();
3628 esss
->next
= gfc_ss_terminator
;
3629 esss
->type
= GFC_SS_SCALAR
;
3632 gfc_add_ss_to_loop (&loop
, edss
);
3633 gfc_add_ss_to_loop (&loop
, esss
);
3636 gfc_conv_ss_startstride (&loop
);
3637 gfc_conv_loop_setup (&loop
);
3639 gfc_mark_ss_chain_used (css
, 1);
3640 gfc_mark_ss_chain_used (tdss
, 1);
3641 gfc_mark_ss_chain_used (tsss
, 1);
3644 gfc_mark_ss_chain_used (edss
, 1);
3645 gfc_mark_ss_chain_used (esss
, 1);
3648 gfc_start_scalarized_body (&loop
, &body
);
3650 gfc_copy_loopinfo_to_se (&cse
, &loop
);
3651 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
3652 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
3658 gfc_copy_loopinfo_to_se (&edse
, &loop
);
3659 gfc_copy_loopinfo_to_se (&esse
, &loop
);
3664 gfc_conv_expr (&cse
, cond
);
3665 gfc_add_block_to_block (&body
, &cse
.pre
);
3668 gfc_conv_expr (&tsse
, tsrc
);
3669 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
3671 gfc_conv_tmp_array_ref (&tdse
);
3672 gfc_advance_se_ss_chain (&tdse
);
3675 gfc_conv_expr (&tdse
, tdst
);
3679 gfc_conv_expr (&esse
, esrc
);
3680 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
3682 gfc_conv_tmp_array_ref (&edse
);
3683 gfc_advance_se_ss_chain (&edse
);
3686 gfc_conv_expr (&edse
, edst
);
3689 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false);
3690 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false, false)
3691 : build_empty_stmt ();
3692 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
3693 gfc_add_expr_to_block (&body
, tmp
);
3694 gfc_add_block_to_block (&body
, &cse
.post
);
3696 gfc_trans_scalarizing_loops (&loop
, &body
);
3697 gfc_add_block_to_block (&block
, &loop
.pre
);
3698 gfc_add_block_to_block (&block
, &loop
.post
);
3699 gfc_cleanup_loop (&loop
);
3701 return gfc_finish_block (&block
);
3704 /* As the WHERE or WHERE construct statement can be nested, we call
3705 gfc_trans_where_2 to do the translation, and pass the initial
3706 NULL values for both the control mask and the pending control mask. */
3709 gfc_trans_where (gfc_code
* code
)
3715 cblock
= code
->block
;
3717 && cblock
->next
->op
== EXEC_ASSIGN
3718 && !cblock
->next
->next
)
3720 eblock
= cblock
->block
;
3723 /* A simple "WHERE (cond) x = y" statement or block is
3724 dependence free if cond is not dependent upon writing x,
3725 and the source y is unaffected by the destination x. */
3726 if (!gfc_check_dependency (cblock
->next
->expr
,
3728 && !gfc_check_dependency (cblock
->next
->expr
,
3729 cblock
->next
->expr2
, 0))
3730 return gfc_trans_where_3 (cblock
, NULL
);
3732 else if (!eblock
->expr
3735 && eblock
->next
->op
== EXEC_ASSIGN
3736 && !eblock
->next
->next
)
3738 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3739 block is dependence free if cond is not dependent on writes
3740 to x1 and x2, y1 is not dependent on writes to x2, and y2
3741 is not dependent on writes to x1, and both y's are not
3742 dependent upon their own x's. In addition to this, the
3743 final two dependency checks below exclude all but the same
3744 array reference if the where and elswhere destinations
3745 are the same. In short, this is VERY conservative and this
3746 is needed because the two loops, required by the standard
3747 are coalesced in gfc_trans_where_3. */
3748 if (!gfc_check_dependency(cblock
->next
->expr
,
3750 && !gfc_check_dependency(eblock
->next
->expr
,
3752 && !gfc_check_dependency(cblock
->next
->expr
,
3753 eblock
->next
->expr2
, 1)
3754 && !gfc_check_dependency(eblock
->next
->expr
,
3755 cblock
->next
->expr2
, 1)
3756 && !gfc_check_dependency(cblock
->next
->expr
,
3757 cblock
->next
->expr2
, 1)
3758 && !gfc_check_dependency(eblock
->next
->expr
,
3759 eblock
->next
->expr2
, 1)
3760 && !gfc_check_dependency(cblock
->next
->expr
,
3761 eblock
->next
->expr
, 0)
3762 && !gfc_check_dependency(eblock
->next
->expr
,
3763 cblock
->next
->expr
, 0))
3764 return gfc_trans_where_3 (cblock
, eblock
);
3768 gfc_start_block (&block
);
3770 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
3772 return gfc_finish_block (&block
);
3776 /* CYCLE a DO loop. The label decl has already been created by
3777 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3778 node at the head of the loop. We must mark the label as used. */
3781 gfc_trans_cycle (gfc_code
* code
)
3785 cycle_label
= TREE_PURPOSE (code
->ext
.whichloop
->backend_decl
);
3786 TREE_USED (cycle_label
) = 1;
3787 return build1_v (GOTO_EXPR
, cycle_label
);
3791 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3792 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3796 gfc_trans_exit (gfc_code
* code
)
3800 exit_label
= TREE_VALUE (code
->ext
.whichloop
->backend_decl
);
3801 TREE_USED (exit_label
) = 1;
3802 return build1_v (GOTO_EXPR
, exit_label
);
3806 /* Translate the ALLOCATE statement. */
3809 gfc_trans_allocate (gfc_code
* code
)
3821 if (!code
->ext
.alloc_list
)
3824 gfc_start_block (&block
);
3828 tree gfc_int4_type_node
= gfc_get_int_type (4);
3830 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
3831 pstat
= build_fold_addr_expr (stat
);
3833 error_label
= gfc_build_label_decl (NULL_TREE
);
3834 TREE_USED (error_label
) = 1;
3837 pstat
= stat
= error_label
= NULL_TREE
;
3839 for (al
= code
->ext
.alloc_list
; al
!= NULL
; al
= al
->next
)
3843 gfc_init_se (&se
, NULL
);
3844 gfc_start_block (&se
.pre
);
3846 se
.want_pointer
= 1;
3847 se
.descriptor_only
= 1;
3848 gfc_conv_expr (&se
, expr
);
3850 if (!gfc_array_allocate (&se
, expr
, pstat
))
3852 /* A scalar or derived type. */
3853 tmp
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
3855 if (expr
->ts
.type
== BT_CHARACTER
&& tmp
== NULL_TREE
)
3856 tmp
= se
.string_length
;
3858 tmp
= gfc_allocate_with_status (&se
.pre
, tmp
, pstat
);
3859 tmp
= fold_build2 (MODIFY_EXPR
, void_type_node
, se
.expr
,
3860 fold_convert (TREE_TYPE (se
.expr
), tmp
));
3861 gfc_add_expr_to_block (&se
.pre
, tmp
);
3865 tmp
= build1_v (GOTO_EXPR
, error_label
);
3866 parm
= fold_build2 (NE_EXPR
, boolean_type_node
,
3867 stat
, build_int_cst (TREE_TYPE (stat
), 0));
3868 tmp
= fold_build3 (COND_EXPR
, void_type_node
,
3869 parm
, tmp
, build_empty_stmt ());
3870 gfc_add_expr_to_block (&se
.pre
, tmp
);
3873 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.derived
->attr
.alloc_comp
)
3875 tmp
= build_fold_indirect_ref (se
.expr
);
3876 tmp
= gfc_nullify_alloc_comp (expr
->ts
.derived
, tmp
, 0);
3877 gfc_add_expr_to_block (&se
.pre
, tmp
);
3882 tmp
= gfc_finish_block (&se
.pre
);
3883 gfc_add_expr_to_block (&block
, tmp
);
3886 /* Assign the value to the status variable. */
3889 tmp
= build1_v (LABEL_EXPR
, error_label
);
3890 gfc_add_expr_to_block (&block
, tmp
);
3892 gfc_init_se (&se
, NULL
);
3893 gfc_conv_expr_lhs (&se
, code
->expr
);
3894 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
3895 gfc_add_modify_expr (&block
, se
.expr
, tmp
);
3898 return gfc_finish_block (&block
);
3902 /* Translate a DEALLOCATE statement.
3903 There are two cases within the for loop:
3904 (1) deallocate(a1, a2, a3) is translated into the following sequence
3905 _gfortran_deallocate(a1, 0B)
3906 _gfortran_deallocate(a2, 0B)
3907 _gfortran_deallocate(a3, 0B)
3908 where the STAT= variable is passed a NULL pointer.
3909 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3911 _gfortran_deallocate(a1, &stat)
3912 astat = astat + stat
3913 _gfortran_deallocate(a2, &stat)
3914 astat = astat + stat
3915 _gfortran_deallocate(a3, &stat)
3916 astat = astat + stat
3917 In case (1), we simply return at the end of the for loop. In case (2)
3918 we set STAT= astat. */
3920 gfc_trans_deallocate (gfc_code
* code
)
3925 tree apstat
, astat
, pstat
, stat
, tmp
;
3928 gfc_start_block (&block
);
3930 /* Set up the optional STAT= */
3933 tree gfc_int4_type_node
= gfc_get_int_type (4);
3935 /* Variable used with the library call. */
3936 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
3937 pstat
= build_fold_addr_expr (stat
);
3939 /* Running total of possible deallocation failures. */
3940 astat
= gfc_create_var (gfc_int4_type_node
, "astat");
3941 apstat
= build_fold_addr_expr (astat
);
3943 /* Initialize astat to 0. */
3944 gfc_add_modify_expr (&block
, astat
, build_int_cst (TREE_TYPE (astat
), 0));
3947 pstat
= apstat
= stat
= astat
= NULL_TREE
;
3949 for (al
= code
->ext
.alloc_list
; al
!= NULL
; al
= al
->next
)
3952 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
3954 gfc_init_se (&se
, NULL
);
3955 gfc_start_block (&se
.pre
);
3957 se
.want_pointer
= 1;
3958 se
.descriptor_only
= 1;
3959 gfc_conv_expr (&se
, expr
);
3961 if (expr
->ts
.type
== BT_DERIVED
3962 && expr
->ts
.derived
->attr
.alloc_comp
)
3965 gfc_ref
*last
= NULL
;
3966 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
3967 if (ref
->type
== REF_COMPONENT
)
3970 /* Do not deallocate the components of a derived type
3971 ultimate pointer component. */
3972 if (!(last
&& last
->u
.c
.component
->pointer
)
3973 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
3975 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.derived
, se
.expr
,
3977 gfc_add_expr_to_block (&se
.pre
, tmp
);
3982 tmp
= gfc_array_deallocate (se
.expr
, pstat
);
3985 tmp
= gfc_deallocate_with_status (se
.expr
, pstat
, false);
3986 gfc_add_expr_to_block (&se
.pre
, tmp
);
3988 tmp
= fold_build2 (MODIFY_EXPR
, void_type_node
,
3989 se
.expr
, build_int_cst (TREE_TYPE (se
.expr
), 0));
3992 gfc_add_expr_to_block (&se
.pre
, tmp
);
3994 /* Keep track of the number of failed deallocations by adding stat
3995 of the last deallocation to the running total. */
3998 apstat
= fold_build2 (PLUS_EXPR
, TREE_TYPE (stat
), astat
, stat
);
3999 gfc_add_modify_expr (&se
.pre
, astat
, apstat
);
4002 tmp
= gfc_finish_block (&se
.pre
);
4003 gfc_add_expr_to_block (&block
, tmp
);
4007 /* Assign the value to the status variable. */
4010 gfc_init_se (&se
, NULL
);
4011 gfc_conv_expr_lhs (&se
, code
->expr
);
4012 tmp
= convert (TREE_TYPE (se
.expr
), astat
);
4013 gfc_add_modify_expr (&block
, se
.expr
, tmp
);
4016 return gfc_finish_block (&block
);