* c-common.c (get_priority): Add check for
[official-gcc.git] / gcc / fortran / trans-stmt.c
blobcdc8dc6c6694b826bcd6b946a2b8b993401c3069
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tree.h"
29 #include "tree-gimple.h"
30 #include "ggc.h"
31 #include "toplev.h"
32 #include "real.h"
33 #include "gfortran.h"
34 #include "flags.h"
35 #include "trans.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
40 #include "arith.h"
41 #include "dependency.h"
43 typedef struct iter_info
45 tree var;
46 tree start;
47 tree end;
48 tree step;
49 struct iter_info *next;
51 iter_info;
53 typedef struct forall_info
55 iter_info *this_loop;
56 tree mask;
57 tree maskindex;
58 int nvar;
59 tree size;
60 struct forall_info *prev_nest;
62 forall_info;
64 static void gfc_trans_where_2 (gfc_code *, tree, bool,
65 forall_info *, stmtblock_t *);
67 /* Translate a F95 label number to a LABEL_EXPR. */
69 tree
70 gfc_trans_label_here (gfc_code * code)
72 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
76 /* Given a variable expression which has been ASSIGNed to, find the decl
77 containing the auxiliary variables. For variables in common blocks this
78 is a field_decl. */
80 void
81 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
83 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
84 gfc_conv_expr (se, expr);
85 /* Deals with variable in common block. Get the field declaration. */
86 if (TREE_CODE (se->expr) == COMPONENT_REF)
87 se->expr = TREE_OPERAND (se->expr, 1);
88 /* Deals with dummy argument. Get the parameter declaration. */
89 else if (TREE_CODE (se->expr) == INDIRECT_REF)
90 se->expr = TREE_OPERAND (se->expr, 0);
93 /* Translate a label assignment statement. */
95 tree
96 gfc_trans_label_assign (gfc_code * code)
98 tree label_tree;
99 gfc_se se;
100 tree len;
101 tree addr;
102 tree len_tree;
103 char *label_str;
104 int label_len;
106 /* Start a new block. */
107 gfc_init_se (&se, NULL);
108 gfc_start_block (&se.pre);
109 gfc_conv_label_variable (&se, code->expr);
111 len = GFC_DECL_STRING_LEN (se.expr);
112 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
114 label_tree = gfc_get_label_decl (code->label);
116 if (code->label->defined == ST_LABEL_TARGET)
118 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
119 len_tree = integer_minus_one_node;
121 else
123 label_str = code->label->format->value.character.string;
124 label_len = code->label->format->value.character.length;
125 len_tree = build_int_cst (NULL_TREE, label_len);
126 label_tree = gfc_build_string_const (label_len + 1, label_str);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify_expr (&se.pre, len, len_tree);
131 gfc_add_modify_expr (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
158 &se.pre, &loc);
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 code = code->block;
163 if (code == NULL)
165 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
180 code = code->block;
182 while (code != NULL);
183 gfc_trans_runtime_check (boolean_true_node,
184 "Assigned label is not in the list", &se.pre, &loc);
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 tree
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
202 static void
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
207 gfc_expr *e;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
210 gfc_se parmse;
211 gfc_ss *ss;
212 gfc_ss_info *info;
213 gfc_symbol *fsym;
214 int n;
215 stmtblock_t block;
216 tree data;
217 tree offset;
218 tree size;
219 tree tmp;
221 if (loopse->ss == NULL)
222 return;
224 ss = loopse->ss;
225 arg0 = arg;
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
231 e = arg->expr;
232 if (e == NULL)
233 continue;
235 /* Obtain the info structure for the current argument. */
236 info = NULL;
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
239 if (ss->expr != e)
240 continue;
241 info = &ss->data.info;
242 break;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
249 && e->rank && fsym
250 && fsym->attr.intent == INTENT_OUT
251 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
271 false, true, false);
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
288 gfc_rank_cst[n]);
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
292 offset, tmp);
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
309 tree
310 gfc_trans_call (gfc_code * code, bool dependency_check)
312 gfc_se se;
313 gfc_ss * ss;
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
334 NULL_TREE);
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
343 gfc_symbol *sym;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
352 else
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
358 else
360 /* An elemental subroutine call with array valued arguments has
361 to be scalarized. */
362 gfc_loopinfo loop;
363 stmtblock_t body;
364 stmtblock_t block;
365 gfc_se loopse;
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
382 loopse.ss = ss;
384 /* For operator assignment, we need to do dependency checking.
385 We also check the intent of the parameters. */
386 if (dependency_check)
388 gfc_symbol *sym;
389 sym = code->resolved_sym;
390 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
391 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
392 gfc_conv_elemental_dependencies (&se, &loopse, sym,
393 code->ext.actual);
396 /* Generate the loop body. */
397 gfc_start_scalarized_body (&loop, &body);
398 gfc_init_block (&block);
400 /* Add the subroutine call to the block. */
401 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
402 NULL_TREE);
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
423 tree
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
426 if (code->expr)
428 gfc_se se;
429 tree tmp;
430 tree result;
432 /* If code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
434 been generated. */
436 result = gfc_get_fake_result_decl (NULL, 0);
437 if (!result)
439 gfc_warning ("An alternate return at %L without a * dummy argument",
440 &code->expr->where);
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
458 else
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
466 tree
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
470 gfc_se se;
471 tree tmp;
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
483 else
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
501 tree
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
505 gfc_se se;
506 tree tmp;
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
518 else
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
537 We translate:
539 IF (cond) THEN
540 then_clause
541 ELSEIF (cond2)
542 elseif_clause
543 ELSE
544 else_clause
545 ENDIF
547 into:
549 pre_cond_s;
550 if (cond_s)
552 then_clause;
554 else
556 pre_cond_s
557 if (cond_s)
559 elseif_clause
561 else
563 else_clause;
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
569 conditional.
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
573 static tree
574 gfc_trans_if_1 (gfc_code * code)
576 gfc_se if_se;
577 tree stmt, elsestmt;
579 /* Check for an unconditional ELSE clause. */
580 if (!code->expr)
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
594 if (code->block)
595 elsestmt = gfc_trans_if_1 (code->block);
596 else
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
608 tree
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
622 if (cond <= 0)
624 if (cond < 0)
625 goto label1;
626 else // cond == 0
627 goto label2;
629 else // cond > 0
630 goto label3;
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
635 if (cond <= 0)
636 goto label1;
637 else
638 goto label3;
641 tree
642 gfc_trans_arithmetic_if (gfc_code * code)
644 gfc_se se;
645 tree tmp;
646 tree branch1;
647 tree branch2;
648 tree zero;
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
670 else
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
675 else
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
696 results.
697 We translate a do loop from:
699 DO dovar = from, to, step
700 body
701 END DO
705 [Evaluate loop bounds and step]
706 dovar = from;
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
709 for (;;)
711 body;
712 cycle_label:
713 cond = (dovar == to);
714 dovar += step;
715 if (cond) goto end_label;
718 end_label:
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
723 static tree
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
727 stmtblock_t body;
728 tree type;
729 tree cond;
730 tree tmp;
731 tree cycle_label;
732 tree exit_label;
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify_expr (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
746 /* Loop body. */
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify_expr (&body, dovar, tmp);
768 /* The loop exit. */
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
782 else
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
797 so for Fortran.
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
806 body
807 END DO
811 [evaluate loop bounds and step]
812 count = (to + step - from) / step;
813 dovar = from;
814 for (;;)
816 body;
817 cycle_label:
818 dovar += step
819 count--;
820 if (count <=0) goto exit_label;
822 exit_label:
824 TODO: Large loop counts
825 The code above assumes the loop count fits into a signed integer kind,
826 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
827 We must support the full range. */
829 tree
830 gfc_trans_do (gfc_code * code)
832 gfc_se se;
833 tree dovar;
834 tree from;
835 tree to;
836 tree step;
837 tree count;
838 tree count_one;
839 tree type;
840 tree cond;
841 tree cycle_label;
842 tree exit_label;
843 tree tmp;
844 stmtblock_t block;
845 stmtblock_t body;
847 gfc_start_block (&block);
849 /* Evaluate all the expressions in the iterator. */
850 gfc_init_se (&se, NULL);
851 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
852 gfc_add_block_to_block (&block, &se.pre);
853 dovar = se.expr;
854 type = TREE_TYPE (dovar);
856 gfc_init_se (&se, NULL);
857 gfc_conv_expr_val (&se, code->ext.iterator->start);
858 gfc_add_block_to_block (&block, &se.pre);
859 from = gfc_evaluate_now (se.expr, &block);
861 gfc_init_se (&se, NULL);
862 gfc_conv_expr_val (&se, code->ext.iterator->end);
863 gfc_add_block_to_block (&block, &se.pre);
864 to = gfc_evaluate_now (se.expr, &block);
866 gfc_init_se (&se, NULL);
867 gfc_conv_expr_val (&se, code->ext.iterator->step);
868 gfc_add_block_to_block (&block, &se.pre);
869 step = gfc_evaluate_now (se.expr, &block);
871 /* Special case simple loops. */
872 if (TREE_CODE (type) == INTEGER_TYPE
873 && (integer_onep (step)
874 || tree_int_cst_equal (step, integer_minus_one_node)))
875 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
877 /* Initialize loop count. This code is executed before we enter the
878 loop body. We generate: count = (to + step - from) / step. */
880 tmp = fold_build2 (MINUS_EXPR, type, step, from);
881 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
882 if (TREE_CODE (type) == INTEGER_TYPE)
884 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
885 count = gfc_create_var (type, "count");
887 else
889 /* TODO: We could use the same width as the real type.
890 This would probably cause more problems that it solves
891 when we implement "long double" types. */
892 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
893 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
894 count = gfc_create_var (gfc_array_index_type, "count");
896 gfc_add_modify_expr (&block, count, tmp);
898 count_one = build_int_cst (TREE_TYPE (count), 1);
900 /* Initialize the DO variable: dovar = from. */
901 gfc_add_modify_expr (&block, dovar, from);
903 /* Loop body. */
904 gfc_start_block (&body);
906 /* Cycle and exit statements are implemented with gotos. */
907 cycle_label = gfc_build_label_decl (NULL_TREE);
908 exit_label = gfc_build_label_decl (NULL_TREE);
910 /* Start with the loop condition. Loop until count <= 0. */
911 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
912 build_int_cst (TREE_TYPE (count), 0));
913 tmp = build1_v (GOTO_EXPR, exit_label);
914 TREE_USED (exit_label) = 1;
915 tmp = fold_build3 (COND_EXPR, void_type_node,
916 cond, tmp, build_empty_stmt ());
917 gfc_add_expr_to_block (&body, tmp);
919 /* Put these labels where they can be found later. We put the
920 labels in a TREE_LIST node (because TREE_CHAIN is already
921 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
922 label in TREE_VALUE (backend_decl). */
924 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
926 /* Main loop body. */
927 tmp = gfc_trans_code (code->block->next);
928 gfc_add_expr_to_block (&body, tmp);
930 /* Label for cycle statements (if needed). */
931 if (TREE_USED (cycle_label))
933 tmp = build1_v (LABEL_EXPR, cycle_label);
934 gfc_add_expr_to_block (&body, tmp);
937 /* Increment the loop variable. */
938 tmp = build2 (PLUS_EXPR, type, dovar, step);
939 gfc_add_modify_expr (&body, dovar, tmp);
941 /* Decrement the loop count. */
942 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
943 gfc_add_modify_expr (&body, count, tmp);
945 /* End of loop body. */
946 tmp = gfc_finish_block (&body);
948 /* The for loop itself. */
949 tmp = build1_v (LOOP_EXPR, tmp);
950 gfc_add_expr_to_block (&block, tmp);
952 /* Add the exit label. */
953 tmp = build1_v (LABEL_EXPR, exit_label);
954 gfc_add_expr_to_block (&block, tmp);
956 return gfc_finish_block (&block);
960 /* Translate the DO WHILE construct.
962 We translate
964 DO WHILE (cond)
965 body
966 END DO
970 for ( ; ; )
972 pre_cond;
973 if (! cond) goto exit_label;
974 body;
975 cycle_label:
977 exit_label:
979 Because the evaluation of the exit condition `cond' may have side
980 effects, we can't do much for empty loop bodies. The backend optimizers
981 should be smart enough to eliminate any dead loops. */
983 tree
984 gfc_trans_do_while (gfc_code * code)
986 gfc_se cond;
987 tree tmp;
988 tree cycle_label;
989 tree exit_label;
990 stmtblock_t block;
992 /* Everything we build here is part of the loop body. */
993 gfc_start_block (&block);
995 /* Cycle and exit statements are implemented with gotos. */
996 cycle_label = gfc_build_label_decl (NULL_TREE);
997 exit_label = gfc_build_label_decl (NULL_TREE);
999 /* Put the labels where they can be found later. See gfc_trans_do(). */
1000 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1002 /* Create a GIMPLE version of the exit condition. */
1003 gfc_init_se (&cond, NULL);
1004 gfc_conv_expr_val (&cond, code->expr);
1005 gfc_add_block_to_block (&block, &cond.pre);
1006 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1008 /* Build "IF (! cond) GOTO exit_label". */
1009 tmp = build1_v (GOTO_EXPR, exit_label);
1010 TREE_USED (exit_label) = 1;
1011 tmp = fold_build3 (COND_EXPR, void_type_node,
1012 cond.expr, tmp, build_empty_stmt ());
1013 gfc_add_expr_to_block (&block, tmp);
1015 /* The main body of the loop. */
1016 tmp = gfc_trans_code (code->block->next);
1017 gfc_add_expr_to_block (&block, tmp);
1019 /* Label for cycle statements (if needed). */
1020 if (TREE_USED (cycle_label))
1022 tmp = build1_v (LABEL_EXPR, cycle_label);
1023 gfc_add_expr_to_block (&block, tmp);
1026 /* End of loop body. */
1027 tmp = gfc_finish_block (&block);
1029 gfc_init_block (&block);
1030 /* Build the loop. */
1031 tmp = build1_v (LOOP_EXPR, tmp);
1032 gfc_add_expr_to_block (&block, tmp);
1034 /* Add the exit label. */
1035 tmp = build1_v (LABEL_EXPR, exit_label);
1036 gfc_add_expr_to_block (&block, tmp);
1038 return gfc_finish_block (&block);
1042 /* Translate the SELECT CASE construct for INTEGER case expressions,
1043 without killing all potential optimizations. The problem is that
1044 Fortran allows unbounded cases, but the back-end does not, so we
1045 need to intercept those before we enter the equivalent SWITCH_EXPR
1046 we can build.
1048 For example, we translate this,
1050 SELECT CASE (expr)
1051 CASE (:100,101,105:115)
1052 block_1
1053 CASE (190:199,200:)
1054 block_2
1055 CASE (300)
1056 block_3
1057 CASE DEFAULT
1058 block_4
1059 END SELECT
1061 to the GENERIC equivalent,
1063 switch (expr)
1065 case (minimum value for typeof(expr) ... 100:
1066 case 101:
1067 case 105 ... 114:
1068 block1:
1069 goto end_label;
1071 case 200 ... (maximum value for typeof(expr):
1072 case 190 ... 199:
1073 block2;
1074 goto end_label;
1076 case 300:
1077 block_3;
1078 goto end_label;
1080 default:
1081 block_4;
1082 goto end_label;
1085 end_label: */
1087 static tree
1088 gfc_trans_integer_select (gfc_code * code)
1090 gfc_code *c;
1091 gfc_case *cp;
1092 tree end_label;
1093 tree tmp;
1094 gfc_se se;
1095 stmtblock_t block;
1096 stmtblock_t body;
1098 gfc_start_block (&block);
1100 /* Calculate the switch expression. */
1101 gfc_init_se (&se, NULL);
1102 gfc_conv_expr_val (&se, code->expr);
1103 gfc_add_block_to_block (&block, &se.pre);
1105 end_label = gfc_build_label_decl (NULL_TREE);
1107 gfc_init_block (&body);
1109 for (c = code->block; c; c = c->block)
1111 for (cp = c->ext.case_list; cp; cp = cp->next)
1113 tree low, high;
1114 tree label;
1116 /* Assume it's the default case. */
1117 low = high = NULL_TREE;
1119 if (cp->low)
1121 low = gfc_conv_constant_to_tree (cp->low);
1123 /* If there's only a lower bound, set the high bound to the
1124 maximum value of the case expression. */
1125 if (!cp->high)
1126 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1129 if (cp->high)
1131 /* Three cases are possible here:
1133 1) There is no lower bound, e.g. CASE (:N).
1134 2) There is a lower bound .NE. high bound, that is
1135 a case range, e.g. CASE (N:M) where M>N (we make
1136 sure that M>N during type resolution).
1137 3) There is a lower bound, and it has the same value
1138 as the high bound, e.g. CASE (N:N). This is our
1139 internal representation of CASE(N).
1141 In the first and second case, we need to set a value for
1142 high. In the third case, we don't because the GCC middle
1143 end represents a single case value by just letting high be
1144 a NULL_TREE. We can't do that because we need to be able
1145 to represent unbounded cases. */
1147 if (!cp->low
1148 || (cp->low
1149 && mpz_cmp (cp->low->value.integer,
1150 cp->high->value.integer) != 0))
1151 high = gfc_conv_constant_to_tree (cp->high);
1153 /* Unbounded case. */
1154 if (!cp->low)
1155 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1158 /* Build a label. */
1159 label = gfc_build_label_decl (NULL_TREE);
1161 /* Add this case label.
1162 Add parameter 'label', make it match GCC backend. */
1163 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1164 gfc_add_expr_to_block (&body, tmp);
1167 /* Add the statements for this case. */
1168 tmp = gfc_trans_code (c->next);
1169 gfc_add_expr_to_block (&body, tmp);
1171 /* Break to the end of the construct. */
1172 tmp = build1_v (GOTO_EXPR, end_label);
1173 gfc_add_expr_to_block (&body, tmp);
1176 tmp = gfc_finish_block (&body);
1177 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1178 gfc_add_expr_to_block (&block, tmp);
1180 tmp = build1_v (LABEL_EXPR, end_label);
1181 gfc_add_expr_to_block (&block, tmp);
1183 return gfc_finish_block (&block);
1187 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1189 There are only two cases possible here, even though the standard
1190 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1191 .FALSE., and DEFAULT.
1193 We never generate more than two blocks here. Instead, we always
1194 try to eliminate the DEFAULT case. This way, we can translate this
1195 kind of SELECT construct to a simple
1197 if {} else {};
1199 expression in GENERIC. */
1201 static tree
1202 gfc_trans_logical_select (gfc_code * code)
1204 gfc_code *c;
1205 gfc_code *t, *f, *d;
1206 gfc_case *cp;
1207 gfc_se se;
1208 stmtblock_t block;
1210 /* Assume we don't have any cases at all. */
1211 t = f = d = NULL;
1213 /* Now see which ones we actually do have. We can have at most two
1214 cases in a single case list: one for .TRUE. and one for .FALSE.
1215 The default case is always separate. If the cases for .TRUE. and
1216 .FALSE. are in the same case list, the block for that case list
1217 always executed, and we don't generate code a COND_EXPR. */
1218 for (c = code->block; c; c = c->block)
1220 for (cp = c->ext.case_list; cp; cp = cp->next)
1222 if (cp->low)
1224 if (cp->low->value.logical == 0) /* .FALSE. */
1225 f = c;
1226 else /* if (cp->value.logical != 0), thus .TRUE. */
1227 t = c;
1229 else
1230 d = c;
1234 /* Start a new block. */
1235 gfc_start_block (&block);
1237 /* Calculate the switch expression. We always need to do this
1238 because it may have side effects. */
1239 gfc_init_se (&se, NULL);
1240 gfc_conv_expr_val (&se, code->expr);
1241 gfc_add_block_to_block (&block, &se.pre);
1243 if (t == f && t != NULL)
1245 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1246 translate the code for these cases, append it to the current
1247 block. */
1248 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1250 else
1252 tree true_tree, false_tree, stmt;
1254 true_tree = build_empty_stmt ();
1255 false_tree = build_empty_stmt ();
1257 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1258 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1259 make the missing case the default case. */
1260 if (t != NULL && f != NULL)
1261 d = NULL;
1262 else if (d != NULL)
1264 if (t == NULL)
1265 t = d;
1266 else
1267 f = d;
1270 /* Translate the code for each of these blocks, and append it to
1271 the current block. */
1272 if (t != NULL)
1273 true_tree = gfc_trans_code (t->next);
1275 if (f != NULL)
1276 false_tree = gfc_trans_code (f->next);
1278 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1279 true_tree, false_tree);
1280 gfc_add_expr_to_block (&block, stmt);
1283 return gfc_finish_block (&block);
1287 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1288 Instead of generating compares and jumps, it is far simpler to
1289 generate a data structure describing the cases in order and call a
1290 library subroutine that locates the right case.
1291 This is particularly true because this is the only case where we
1292 might have to dispose of a temporary.
1293 The library subroutine returns a pointer to jump to or NULL if no
1294 branches are to be taken. */
1296 static tree
1297 gfc_trans_character_select (gfc_code *code)
1299 tree init, node, end_label, tmp, type, *labels;
1300 tree case_label;
1301 stmtblock_t block, body;
1302 gfc_case *cp, *d;
1303 gfc_code *c;
1304 gfc_se se;
1305 int i, n;
1307 static tree select_struct;
1308 static tree ss_string1, ss_string1_len;
1309 static tree ss_string2, ss_string2_len;
1310 static tree ss_target;
1312 if (select_struct == NULL)
1314 tree gfc_int4_type_node = gfc_get_int_type (4);
1316 select_struct = make_node (RECORD_TYPE);
1317 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1319 #undef ADD_FIELD
1320 #define ADD_FIELD(NAME, TYPE) \
1321 ss_##NAME = gfc_add_field_to_struct \
1322 (&(TYPE_FIELDS (select_struct)), select_struct, \
1323 get_identifier (stringize(NAME)), TYPE)
1325 ADD_FIELD (string1, pchar_type_node);
1326 ADD_FIELD (string1_len, gfc_int4_type_node);
1328 ADD_FIELD (string2, pchar_type_node);
1329 ADD_FIELD (string2_len, gfc_int4_type_node);
1331 ADD_FIELD (target, pvoid_type_node);
1332 #undef ADD_FIELD
1334 gfc_finish_type (select_struct);
1337 cp = code->block->ext.case_list;
1338 while (cp->left != NULL)
1339 cp = cp->left;
1341 n = 0;
1342 for (d = cp; d; d = d->right)
1343 d->n = n++;
1345 if (n != 0)
1346 labels = gfc_getmem (n * sizeof (tree));
1347 else
1348 labels = NULL;
1350 for(i = 0; i < n; i++)
1352 labels[i] = gfc_build_label_decl (NULL_TREE);
1353 TREE_USED (labels[i]) = 1;
1354 /* TODO: The gimplifier should do this for us, but it has
1355 inadequacies when dealing with static initializers. */
1356 FORCED_LABEL (labels[i]) = 1;
1359 end_label = gfc_build_label_decl (NULL_TREE);
1361 /* Generate the body */
1362 gfc_start_block (&block);
1363 gfc_init_block (&body);
1365 for (c = code->block; c; c = c->block)
1367 for (d = c->ext.case_list; d; d = d->next)
1369 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1370 gfc_add_expr_to_block (&body, tmp);
1373 tmp = gfc_trans_code (c->next);
1374 gfc_add_expr_to_block (&body, tmp);
1376 tmp = build1_v (GOTO_EXPR, end_label);
1377 gfc_add_expr_to_block (&body, tmp);
1380 /* Generate the structure describing the branches */
1381 init = NULL_TREE;
1382 i = 0;
1384 for(d = cp; d; d = d->right, i++)
1386 node = NULL_TREE;
1388 gfc_init_se (&se, NULL);
1390 if (d->low == NULL)
1392 node = tree_cons (ss_string1, null_pointer_node, node);
1393 node = tree_cons (ss_string1_len, integer_zero_node, node);
1395 else
1397 gfc_conv_expr_reference (&se, d->low);
1399 node = tree_cons (ss_string1, se.expr, node);
1400 node = tree_cons (ss_string1_len, se.string_length, node);
1403 if (d->high == NULL)
1405 node = tree_cons (ss_string2, null_pointer_node, node);
1406 node = tree_cons (ss_string2_len, integer_zero_node, node);
1408 else
1410 gfc_init_se (&se, NULL);
1411 gfc_conv_expr_reference (&se, d->high);
1413 node = tree_cons (ss_string2, se.expr, node);
1414 node = tree_cons (ss_string2_len, se.string_length, node);
1417 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1418 node = tree_cons (ss_target, tmp, node);
1420 tmp = build_constructor_from_list (select_struct, nreverse (node));
1421 init = tree_cons (NULL_TREE, tmp, init);
1424 type = build_array_type (select_struct, build_index_type
1425 (build_int_cst (NULL_TREE, n - 1)));
1427 init = build_constructor_from_list (type, nreverse(init));
1428 TREE_CONSTANT (init) = 1;
1429 TREE_INVARIANT (init) = 1;
1430 TREE_STATIC (init) = 1;
1431 /* Create a static variable to hold the jump table. */
1432 tmp = gfc_create_var (type, "jumptable");
1433 TREE_CONSTANT (tmp) = 1;
1434 TREE_INVARIANT (tmp) = 1;
1435 TREE_STATIC (tmp) = 1;
1436 TREE_READONLY (tmp) = 1;
1437 DECL_INITIAL (tmp) = init;
1438 init = tmp;
1440 /* Build the library call */
1441 init = gfc_build_addr_expr (pvoid_type_node, init);
1442 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1444 gfc_init_se (&se, NULL);
1445 gfc_conv_expr_reference (&se, code->expr);
1447 gfc_add_block_to_block (&block, &se.pre);
1449 tmp = build_call_expr (gfor_fndecl_select_string, 5,
1450 init, build_int_cst (NULL_TREE, n),
1451 tmp, se.expr, se.string_length);
1453 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1454 gfc_add_modify_expr (&block, case_label, tmp);
1456 gfc_add_block_to_block (&block, &se.post);
1458 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1459 gfc_add_expr_to_block (&block, tmp);
1461 tmp = gfc_finish_block (&body);
1462 gfc_add_expr_to_block (&block, tmp);
1463 tmp = build1_v (LABEL_EXPR, end_label);
1464 gfc_add_expr_to_block (&block, tmp);
1466 if (n != 0)
1467 gfc_free (labels);
1469 return gfc_finish_block (&block);
1473 /* Translate the three variants of the SELECT CASE construct.
1475 SELECT CASEs with INTEGER case expressions can be translated to an
1476 equivalent GENERIC switch statement, and for LOGICAL case
1477 expressions we build one or two if-else compares.
1479 SELECT CASEs with CHARACTER case expressions are a whole different
1480 story, because they don't exist in GENERIC. So we sort them and
1481 do a binary search at runtime.
1483 Fortran has no BREAK statement, and it does not allow jumps from
1484 one case block to another. That makes things a lot easier for
1485 the optimizers. */
1487 tree
1488 gfc_trans_select (gfc_code * code)
1490 gcc_assert (code && code->expr);
1492 /* Empty SELECT constructs are legal. */
1493 if (code->block == NULL)
1494 return build_empty_stmt ();
1496 /* Select the correct translation function. */
1497 switch (code->expr->ts.type)
1499 case BT_LOGICAL: return gfc_trans_logical_select (code);
1500 case BT_INTEGER: return gfc_trans_integer_select (code);
1501 case BT_CHARACTER: return gfc_trans_character_select (code);
1502 default:
1503 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1504 /* Not reached */
1509 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1510 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1511 indicates whether we should generate code to test the FORALLs mask
1512 array. OUTER is the loop header to be used for initializing mask
1513 indices.
1515 The generated loop format is:
1516 count = (end - start + step) / step
1517 loopvar = start
1518 while (1)
1520 if (count <=0 )
1521 goto end_of_loop
1522 <body>
1523 loopvar += step
1524 count --
1526 end_of_loop: */
1528 static tree
1529 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1530 int mask_flag, stmtblock_t *outer)
1532 int n, nvar;
1533 tree tmp;
1534 tree cond;
1535 stmtblock_t block;
1536 tree exit_label;
1537 tree count;
1538 tree var, start, end, step;
1539 iter_info *iter;
1541 /* Initialize the mask index outside the FORALL nest. */
1542 if (mask_flag && forall_tmp->mask)
1543 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1545 iter = forall_tmp->this_loop;
1546 nvar = forall_tmp->nvar;
1547 for (n = 0; n < nvar; n++)
1549 var = iter->var;
1550 start = iter->start;
1551 end = iter->end;
1552 step = iter->step;
1554 exit_label = gfc_build_label_decl (NULL_TREE);
1555 TREE_USED (exit_label) = 1;
1557 /* The loop counter. */
1558 count = gfc_create_var (TREE_TYPE (var), "count");
1560 /* The body of the loop. */
1561 gfc_init_block (&block);
1563 /* The exit condition. */
1564 cond = fold_build2 (LE_EXPR, boolean_type_node,
1565 count, build_int_cst (TREE_TYPE (count), 0));
1566 tmp = build1_v (GOTO_EXPR, exit_label);
1567 tmp = fold_build3 (COND_EXPR, void_type_node,
1568 cond, tmp, build_empty_stmt ());
1569 gfc_add_expr_to_block (&block, tmp);
1571 /* The main loop body. */
1572 gfc_add_expr_to_block (&block, body);
1574 /* Increment the loop variable. */
1575 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1576 gfc_add_modify_expr (&block, var, tmp);
1578 /* Advance to the next mask element. Only do this for the
1579 innermost loop. */
1580 if (n == 0 && mask_flag && forall_tmp->mask)
1582 tree maskindex = forall_tmp->maskindex;
1583 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1584 maskindex, gfc_index_one_node);
1585 gfc_add_modify_expr (&block, maskindex, tmp);
1588 /* Decrement the loop counter. */
1589 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1590 gfc_add_modify_expr (&block, count, tmp);
1592 body = gfc_finish_block (&block);
1594 /* Loop var initialization. */
1595 gfc_init_block (&block);
1596 gfc_add_modify_expr (&block, var, start);
1599 /* Initialize the loop counter. */
1600 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1601 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1602 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1603 gfc_add_modify_expr (&block, count, tmp);
1605 /* The loop expression. */
1606 tmp = build1_v (LOOP_EXPR, body);
1607 gfc_add_expr_to_block (&block, tmp);
1609 /* The exit label. */
1610 tmp = build1_v (LABEL_EXPR, exit_label);
1611 gfc_add_expr_to_block (&block, tmp);
1613 body = gfc_finish_block (&block);
1614 iter = iter->next;
1616 return body;
1620 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1621 is nonzero, the body is controlled by all masks in the forall nest.
1622 Otherwise, the innermost loop is not controlled by it's mask. This
1623 is used for initializing that mask. */
1625 static tree
1626 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1627 int mask_flag)
1629 tree tmp;
1630 stmtblock_t header;
1631 forall_info *forall_tmp;
1632 tree mask, maskindex;
1634 gfc_start_block (&header);
1636 forall_tmp = nested_forall_info;
1637 while (forall_tmp != NULL)
1639 /* Generate body with masks' control. */
1640 if (mask_flag)
1642 mask = forall_tmp->mask;
1643 maskindex = forall_tmp->maskindex;
1645 /* If a mask was specified make the assignment conditional. */
1646 if (mask)
1648 tmp = gfc_build_array_ref (mask, maskindex);
1649 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1652 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1653 forall_tmp = forall_tmp->prev_nest;
1654 mask_flag = 1;
1657 gfc_add_expr_to_block (&header, body);
1658 return gfc_finish_block (&header);
1662 /* Allocate data for holding a temporary array. Returns either a local
1663 temporary array or a pointer variable. */
1665 static tree
1666 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1667 tree elem_type)
1669 tree tmpvar;
1670 tree type;
1671 tree tmp;
1673 if (INTEGER_CST_P (size))
1675 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1676 gfc_index_one_node);
1678 else
1679 tmp = NULL_TREE;
1681 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1682 type = build_array_type (elem_type, type);
1683 if (gfc_can_put_var_on_stack (bytesize))
1685 gcc_assert (INTEGER_CST_P (size));
1686 tmpvar = gfc_create_var (type, "temp");
1687 *pdata = NULL_TREE;
1689 else
1691 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1692 *pdata = convert (pvoid_type_node, tmpvar);
1694 if (gfc_index_integer_kind == 4)
1695 tmp = gfor_fndecl_internal_malloc;
1696 else if (gfc_index_integer_kind == 8)
1697 tmp = gfor_fndecl_internal_malloc64;
1698 else
1699 gcc_unreachable ();
1700 tmp = build_call_expr (tmp, 1, bytesize);
1701 tmp = convert (TREE_TYPE (tmpvar), tmp);
1702 gfc_add_modify_expr (pblock, tmpvar, tmp);
1704 return tmpvar;
1708 /* Generate codes to copy the temporary to the actual lhs. */
1710 static tree
1711 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1712 tree count1, tree wheremask, bool invert)
1714 gfc_ss *lss;
1715 gfc_se lse, rse;
1716 stmtblock_t block, body;
1717 gfc_loopinfo loop1;
1718 tree tmp;
1719 tree wheremaskexpr;
1721 /* Walk the lhs. */
1722 lss = gfc_walk_expr (expr);
1724 if (lss == gfc_ss_terminator)
1726 gfc_start_block (&block);
1728 gfc_init_se (&lse, NULL);
1730 /* Translate the expression. */
1731 gfc_conv_expr (&lse, expr);
1733 /* Form the expression for the temporary. */
1734 tmp = gfc_build_array_ref (tmp1, count1);
1736 /* Use the scalar assignment as is. */
1737 gfc_add_block_to_block (&block, &lse.pre);
1738 gfc_add_modify_expr (&block, lse.expr, tmp);
1739 gfc_add_block_to_block (&block, &lse.post);
1741 /* Increment the count1. */
1742 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1743 gfc_index_one_node);
1744 gfc_add_modify_expr (&block, count1, tmp);
1746 tmp = gfc_finish_block (&block);
1748 else
1750 gfc_start_block (&block);
1752 gfc_init_loopinfo (&loop1);
1753 gfc_init_se (&rse, NULL);
1754 gfc_init_se (&lse, NULL);
1756 /* Associate the lss with the loop. */
1757 gfc_add_ss_to_loop (&loop1, lss);
1759 /* Calculate the bounds of the scalarization. */
1760 gfc_conv_ss_startstride (&loop1);
1761 /* Setup the scalarizing loops. */
1762 gfc_conv_loop_setup (&loop1);
1764 gfc_mark_ss_chain_used (lss, 1);
1766 /* Start the scalarized loop body. */
1767 gfc_start_scalarized_body (&loop1, &body);
1769 /* Setup the gfc_se structures. */
1770 gfc_copy_loopinfo_to_se (&lse, &loop1);
1771 lse.ss = lss;
1773 /* Form the expression of the temporary. */
1774 if (lss != gfc_ss_terminator)
1775 rse.expr = gfc_build_array_ref (tmp1, count1);
1776 /* Translate expr. */
1777 gfc_conv_expr (&lse, expr);
1779 /* Use the scalar assignment. */
1780 rse.string_length = lse.string_length;
1781 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1783 /* Form the mask expression according to the mask tree list. */
1784 if (wheremask)
1786 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1787 if (invert)
1788 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1789 TREE_TYPE (wheremaskexpr),
1790 wheremaskexpr);
1791 tmp = fold_build3 (COND_EXPR, void_type_node,
1792 wheremaskexpr, tmp, build_empty_stmt ());
1795 gfc_add_expr_to_block (&body, tmp);
1797 /* Increment count1. */
1798 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1799 count1, gfc_index_one_node);
1800 gfc_add_modify_expr (&body, count1, tmp);
1802 /* Increment count3. */
1803 if (count3)
1805 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1806 count3, gfc_index_one_node);
1807 gfc_add_modify_expr (&body, count3, tmp);
1810 /* Generate the copying loops. */
1811 gfc_trans_scalarizing_loops (&loop1, &body);
1812 gfc_add_block_to_block (&block, &loop1.pre);
1813 gfc_add_block_to_block (&block, &loop1.post);
1814 gfc_cleanup_loop (&loop1);
1816 tmp = gfc_finish_block (&block);
1818 return tmp;
1822 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1823 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1824 and should not be freed. WHEREMASK is the conditional execution mask
1825 whose sense may be inverted by INVERT. */
1827 static tree
1828 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1829 tree count1, gfc_ss *lss, gfc_ss *rss,
1830 tree wheremask, bool invert)
1832 stmtblock_t block, body1;
1833 gfc_loopinfo loop;
1834 gfc_se lse;
1835 gfc_se rse;
1836 tree tmp;
1837 tree wheremaskexpr;
1839 gfc_start_block (&block);
1841 gfc_init_se (&rse, NULL);
1842 gfc_init_se (&lse, NULL);
1844 if (lss == gfc_ss_terminator)
1846 gfc_init_block (&body1);
1847 gfc_conv_expr (&rse, expr2);
1848 lse.expr = gfc_build_array_ref (tmp1, count1);
1850 else
1852 /* Initialize the loop. */
1853 gfc_init_loopinfo (&loop);
1855 /* We may need LSS to determine the shape of the expression. */
1856 gfc_add_ss_to_loop (&loop, lss);
1857 gfc_add_ss_to_loop (&loop, rss);
1859 gfc_conv_ss_startstride (&loop);
1860 gfc_conv_loop_setup (&loop);
1862 gfc_mark_ss_chain_used (rss, 1);
1863 /* Start the loop body. */
1864 gfc_start_scalarized_body (&loop, &body1);
1866 /* Translate the expression. */
1867 gfc_copy_loopinfo_to_se (&rse, &loop);
1868 rse.ss = rss;
1869 gfc_conv_expr (&rse, expr2);
1871 /* Form the expression of the temporary. */
1872 lse.expr = gfc_build_array_ref (tmp1, count1);
1875 /* Use the scalar assignment. */
1876 lse.string_length = rse.string_length;
1877 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1878 expr2->expr_type == EXPR_VARIABLE);
1880 /* Form the mask expression according to the mask tree list. */
1881 if (wheremask)
1883 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1884 if (invert)
1885 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1886 TREE_TYPE (wheremaskexpr),
1887 wheremaskexpr);
1888 tmp = fold_build3 (COND_EXPR, void_type_node,
1889 wheremaskexpr, tmp, build_empty_stmt ());
1892 gfc_add_expr_to_block (&body1, tmp);
1894 if (lss == gfc_ss_terminator)
1896 gfc_add_block_to_block (&block, &body1);
1898 /* Increment count1. */
1899 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1900 gfc_index_one_node);
1901 gfc_add_modify_expr (&block, count1, tmp);
1903 else
1905 /* Increment count1. */
1906 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1907 count1, gfc_index_one_node);
1908 gfc_add_modify_expr (&body1, count1, tmp);
1910 /* Increment count3. */
1911 if (count3)
1913 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1914 count3, gfc_index_one_node);
1915 gfc_add_modify_expr (&body1, count3, tmp);
1918 /* Generate the copying loops. */
1919 gfc_trans_scalarizing_loops (&loop, &body1);
1921 gfc_add_block_to_block (&block, &loop.pre);
1922 gfc_add_block_to_block (&block, &loop.post);
1924 gfc_cleanup_loop (&loop);
1925 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1926 as tree nodes in SS may not be valid in different scope. */
1929 tmp = gfc_finish_block (&block);
1930 return tmp;
1934 /* Calculate the size of temporary needed in the assignment inside forall.
1935 LSS and RSS are filled in this function. */
1937 static tree
1938 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1939 stmtblock_t * pblock,
1940 gfc_ss **lss, gfc_ss **rss)
1942 gfc_loopinfo loop;
1943 tree size;
1944 int i;
1945 int save_flag;
1946 tree tmp;
1948 *lss = gfc_walk_expr (expr1);
1949 *rss = NULL;
1951 size = gfc_index_one_node;
1952 if (*lss != gfc_ss_terminator)
1954 gfc_init_loopinfo (&loop);
1956 /* Walk the RHS of the expression. */
1957 *rss = gfc_walk_expr (expr2);
1958 if (*rss == gfc_ss_terminator)
1960 /* The rhs is scalar. Add a ss for the expression. */
1961 *rss = gfc_get_ss ();
1962 (*rss)->next = gfc_ss_terminator;
1963 (*rss)->type = GFC_SS_SCALAR;
1964 (*rss)->expr = expr2;
1967 /* Associate the SS with the loop. */
1968 gfc_add_ss_to_loop (&loop, *lss);
1969 /* We don't actually need to add the rhs at this point, but it might
1970 make guessing the loop bounds a bit easier. */
1971 gfc_add_ss_to_loop (&loop, *rss);
1973 /* We only want the shape of the expression, not rest of the junk
1974 generated by the scalarizer. */
1975 loop.array_parameter = 1;
1977 /* Calculate the bounds of the scalarization. */
1978 save_flag = flag_bounds_check;
1979 flag_bounds_check = 0;
1980 gfc_conv_ss_startstride (&loop);
1981 flag_bounds_check = save_flag;
1982 gfc_conv_loop_setup (&loop);
1984 /* Figure out how many elements we need. */
1985 for (i = 0; i < loop.dimen; i++)
1987 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
1988 gfc_index_one_node, loop.from[i]);
1989 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1990 tmp, loop.to[i]);
1991 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
1993 gfc_add_block_to_block (pblock, &loop.pre);
1994 size = gfc_evaluate_now (size, pblock);
1995 gfc_add_block_to_block (pblock, &loop.post);
1997 /* TODO: write a function that cleans up a loopinfo without freeing
1998 the SS chains. Currently a NOP. */
2001 return size;
2005 /* Calculate the overall iterator number of the nested forall construct.
2006 This routine actually calculates the number of times the body of the
2007 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2008 that by the expression INNER_SIZE. The BLOCK argument specifies the
2009 block in which to calculate the result, and the optional INNER_SIZE_BODY
2010 argument contains any statements that need to executed (inside the loop)
2011 to initialize or calculate INNER_SIZE. */
2013 static tree
2014 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2015 stmtblock_t *inner_size_body, stmtblock_t *block)
2017 forall_info *forall_tmp = nested_forall_info;
2018 tree tmp, number;
2019 stmtblock_t body;
2021 /* We can eliminate the innermost unconditional loops with constant
2022 array bounds. */
2023 if (INTEGER_CST_P (inner_size))
2025 while (forall_tmp
2026 && !forall_tmp->mask
2027 && INTEGER_CST_P (forall_tmp->size))
2029 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2030 inner_size, forall_tmp->size);
2031 forall_tmp = forall_tmp->prev_nest;
2034 /* If there are no loops left, we have our constant result. */
2035 if (!forall_tmp)
2036 return inner_size;
2039 /* Otherwise, create a temporary variable to compute the result. */
2040 number = gfc_create_var (gfc_array_index_type, "num");
2041 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2043 gfc_start_block (&body);
2044 if (inner_size_body)
2045 gfc_add_block_to_block (&body, inner_size_body);
2046 if (forall_tmp)
2047 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2048 inner_size);
2049 else
2050 tmp = inner_size;
2051 gfc_add_modify_expr (&body, number, tmp);
2052 tmp = gfc_finish_block (&body);
2054 /* Generate loops. */
2055 if (forall_tmp != NULL)
2056 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2058 gfc_add_expr_to_block (block, tmp);
2060 return number;
2064 /* Allocate temporary for forall construct. SIZE is the size of temporary
2065 needed. PTEMP1 is returned for space free. */
2067 static tree
2068 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2069 tree * ptemp1)
2071 tree bytesize;
2072 tree unit;
2073 tree tmp;
2075 unit = TYPE_SIZE_UNIT (type);
2076 if (!integer_onep (unit))
2077 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2078 else
2079 bytesize = size;
2081 *ptemp1 = NULL;
2082 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2084 if (*ptemp1)
2085 tmp = build_fold_indirect_ref (tmp);
2086 return tmp;
2090 /* Allocate temporary for forall construct according to the information in
2091 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2092 assignment inside forall. PTEMP1 is returned for space free. */
2094 static tree
2095 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2096 tree inner_size, stmtblock_t * inner_size_body,
2097 stmtblock_t * block, tree * ptemp1)
2099 tree size;
2101 /* Calculate the total size of temporary needed in forall construct. */
2102 size = compute_overall_iter_number (nested_forall_info, inner_size,
2103 inner_size_body, block);
2105 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2109 /* Handle assignments inside forall which need temporary.
2111 forall (i=start:end:stride; maskexpr)
2112 e<i> = f<i>
2113 end forall
2114 (where e,f<i> are arbitrary expressions possibly involving i
2115 and there is a dependency between e<i> and f<i>)
2116 Translates to:
2117 masktmp(:) = maskexpr(:)
2119 maskindex = 0;
2120 count1 = 0;
2121 num = 0;
2122 for (i = start; i <= end; i += stride)
2123 num += SIZE (f<i>)
2124 count1 = 0;
2125 ALLOCATE (tmp(num))
2126 for (i = start; i <= end; i += stride)
2128 if (masktmp[maskindex++])
2129 tmp[count1++] = f<i>
2131 maskindex = 0;
2132 count1 = 0;
2133 for (i = start; i <= end; i += stride)
2135 if (masktmp[maskindex++])
2136 e<i> = tmp[count1++]
2138 DEALLOCATE (tmp)
2140 static void
2141 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2142 tree wheremask, bool invert,
2143 forall_info * nested_forall_info,
2144 stmtblock_t * block)
2146 tree type;
2147 tree inner_size;
2148 gfc_ss *lss, *rss;
2149 tree count, count1;
2150 tree tmp, tmp1;
2151 tree ptemp1;
2152 stmtblock_t inner_size_body;
2154 /* Create vars. count1 is the current iterator number of the nested
2155 forall. */
2156 count1 = gfc_create_var (gfc_array_index_type, "count1");
2158 /* Count is the wheremask index. */
2159 if (wheremask)
2161 count = gfc_create_var (gfc_array_index_type, "count");
2162 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2164 else
2165 count = NULL;
2167 /* Initialize count1. */
2168 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2170 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2171 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2172 gfc_init_block (&inner_size_body);
2173 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2174 &lss, &rss);
2176 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2177 type = gfc_typenode_for_spec (&expr1->ts);
2179 /* Allocate temporary for nested forall construct according to the
2180 information in nested_forall_info and inner_size. */
2181 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2182 &inner_size_body, block, &ptemp1);
2184 /* Generate codes to copy rhs to the temporary . */
2185 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2186 wheremask, invert);
2188 /* Generate body and loops according to the information in
2189 nested_forall_info. */
2190 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2191 gfc_add_expr_to_block (block, tmp);
2193 /* Reset count1. */
2194 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2196 /* Reset count. */
2197 if (wheremask)
2198 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2200 /* Generate codes to copy the temporary to lhs. */
2201 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2202 wheremask, invert);
2204 /* Generate body and loops according to the information in
2205 nested_forall_info. */
2206 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2207 gfc_add_expr_to_block (block, tmp);
2209 if (ptemp1)
2211 /* Free the temporary. */
2212 tmp = build_call_expr (gfor_fndecl_internal_free, 1, ptemp1);
2213 gfc_add_expr_to_block (block, tmp);
2218 /* Translate pointer assignment inside FORALL which need temporary. */
2220 static void
2221 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2222 forall_info * nested_forall_info,
2223 stmtblock_t * block)
2225 tree type;
2226 tree inner_size;
2227 gfc_ss *lss, *rss;
2228 gfc_se lse;
2229 gfc_se rse;
2230 gfc_ss_info *info;
2231 gfc_loopinfo loop;
2232 tree desc;
2233 tree parm;
2234 tree parmtype;
2235 stmtblock_t body;
2236 tree count;
2237 tree tmp, tmp1, ptemp1;
2239 count = gfc_create_var (gfc_array_index_type, "count");
2240 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2242 inner_size = integer_one_node;
2243 lss = gfc_walk_expr (expr1);
2244 rss = gfc_walk_expr (expr2);
2245 if (lss == gfc_ss_terminator)
2247 type = gfc_typenode_for_spec (&expr1->ts);
2248 type = build_pointer_type (type);
2250 /* Allocate temporary for nested forall construct according to the
2251 information in nested_forall_info and inner_size. */
2252 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2253 inner_size, NULL, block, &ptemp1);
2254 gfc_start_block (&body);
2255 gfc_init_se (&lse, NULL);
2256 lse.expr = gfc_build_array_ref (tmp1, count);
2257 gfc_init_se (&rse, NULL);
2258 rse.want_pointer = 1;
2259 gfc_conv_expr (&rse, expr2);
2260 gfc_add_block_to_block (&body, &rse.pre);
2261 gfc_add_modify_expr (&body, lse.expr,
2262 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2263 gfc_add_block_to_block (&body, &rse.post);
2265 /* Increment count. */
2266 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2267 count, gfc_index_one_node);
2268 gfc_add_modify_expr (&body, count, tmp);
2270 tmp = gfc_finish_block (&body);
2272 /* Generate body and loops according to the information in
2273 nested_forall_info. */
2274 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2275 gfc_add_expr_to_block (block, tmp);
2277 /* Reset count. */
2278 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2280 gfc_start_block (&body);
2281 gfc_init_se (&lse, NULL);
2282 gfc_init_se (&rse, NULL);
2283 rse.expr = gfc_build_array_ref (tmp1, count);
2284 lse.want_pointer = 1;
2285 gfc_conv_expr (&lse, expr1);
2286 gfc_add_block_to_block (&body, &lse.pre);
2287 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2288 gfc_add_block_to_block (&body, &lse.post);
2289 /* Increment count. */
2290 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2291 count, gfc_index_one_node);
2292 gfc_add_modify_expr (&body, count, tmp);
2293 tmp = gfc_finish_block (&body);
2295 /* Generate body and loops according to the information in
2296 nested_forall_info. */
2297 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2298 gfc_add_expr_to_block (block, tmp);
2300 else
2302 gfc_init_loopinfo (&loop);
2304 /* Associate the SS with the loop. */
2305 gfc_add_ss_to_loop (&loop, rss);
2307 /* Setup the scalarizing loops and bounds. */
2308 gfc_conv_ss_startstride (&loop);
2310 gfc_conv_loop_setup (&loop);
2312 info = &rss->data.info;
2313 desc = info->descriptor;
2315 /* Make a new descriptor. */
2316 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2317 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2318 loop.from, loop.to, 1);
2320 /* Allocate temporary for nested forall construct. */
2321 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2322 inner_size, NULL, block, &ptemp1);
2323 gfc_start_block (&body);
2324 gfc_init_se (&lse, NULL);
2325 lse.expr = gfc_build_array_ref (tmp1, count);
2326 lse.direct_byref = 1;
2327 rss = gfc_walk_expr (expr2);
2328 gfc_conv_expr_descriptor (&lse, expr2, rss);
2330 gfc_add_block_to_block (&body, &lse.pre);
2331 gfc_add_block_to_block (&body, &lse.post);
2333 /* Increment count. */
2334 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2335 count, gfc_index_one_node);
2336 gfc_add_modify_expr (&body, count, tmp);
2338 tmp = gfc_finish_block (&body);
2340 /* Generate body and loops according to the information in
2341 nested_forall_info. */
2342 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2343 gfc_add_expr_to_block (block, tmp);
2345 /* Reset count. */
2346 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2348 parm = gfc_build_array_ref (tmp1, count);
2349 lss = gfc_walk_expr (expr1);
2350 gfc_init_se (&lse, NULL);
2351 gfc_conv_expr_descriptor (&lse, expr1, lss);
2352 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2353 gfc_start_block (&body);
2354 gfc_add_block_to_block (&body, &lse.pre);
2355 gfc_add_block_to_block (&body, &lse.post);
2357 /* Increment count. */
2358 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2359 count, gfc_index_one_node);
2360 gfc_add_modify_expr (&body, count, tmp);
2362 tmp = gfc_finish_block (&body);
2364 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2365 gfc_add_expr_to_block (block, tmp);
2367 /* Free the temporary. */
2368 if (ptemp1)
2370 tmp = build_call_expr (gfor_fndecl_internal_free, 1, ptemp1);
2371 gfc_add_expr_to_block (block, tmp);
2376 /* FORALL and WHERE statements are really nasty, especially when you nest
2377 them. All the rhs of a forall assignment must be evaluated before the
2378 actual assignments are performed. Presumably this also applies to all the
2379 assignments in an inner where statement. */
2381 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2382 linear array, relying on the fact that we process in the same order in all
2383 loops.
2385 forall (i=start:end:stride; maskexpr)
2386 e<i> = f<i>
2387 g<i> = h<i>
2388 end forall
2389 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2390 Translates to:
2391 count = ((end + 1 - start) / stride)
2392 masktmp(:) = maskexpr(:)
2394 maskindex = 0;
2395 for (i = start; i <= end; i += stride)
2397 if (masktmp[maskindex++])
2398 e<i> = f<i>
2400 maskindex = 0;
2401 for (i = start; i <= end; i += stride)
2403 if (masktmp[maskindex++])
2404 g<i> = h<i>
2407 Note that this code only works when there are no dependencies.
2408 Forall loop with array assignments and data dependencies are a real pain,
2409 because the size of the temporary cannot always be determined before the
2410 loop is executed. This problem is compounded by the presence of nested
2411 FORALL constructs.
2414 static tree
2415 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2417 stmtblock_t block;
2418 stmtblock_t body;
2419 tree *var;
2420 tree *start;
2421 tree *end;
2422 tree *step;
2423 gfc_expr **varexpr;
2424 tree tmp;
2425 tree assign;
2426 tree size;
2427 tree maskindex;
2428 tree mask;
2429 tree pmask;
2430 int n;
2431 int nvar;
2432 int need_temp;
2433 gfc_forall_iterator *fa;
2434 gfc_se se;
2435 gfc_code *c;
2436 gfc_saved_var *saved_vars;
2437 iter_info *this_forall;
2438 forall_info *info;
2439 bool need_mask;
2441 /* Do nothing if the mask is false. */
2442 if (code->expr
2443 && code->expr->expr_type == EXPR_CONSTANT
2444 && !code->expr->value.logical)
2445 return build_empty_stmt ();
2447 n = 0;
2448 /* Count the FORALL index number. */
2449 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2450 n++;
2451 nvar = n;
2453 /* Allocate the space for var, start, end, step, varexpr. */
2454 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2455 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2456 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2457 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2458 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2459 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2461 /* Allocate the space for info. */
2462 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2464 gfc_start_block (&block);
2466 n = 0;
2467 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2469 gfc_symbol *sym = fa->var->symtree->n.sym;
2471 /* Allocate space for this_forall. */
2472 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2474 /* Create a temporary variable for the FORALL index. */
2475 tmp = gfc_typenode_for_spec (&sym->ts);
2476 var[n] = gfc_create_var (tmp, sym->name);
2477 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2479 /* Record it in this_forall. */
2480 this_forall->var = var[n];
2482 /* Replace the index symbol's backend_decl with the temporary decl. */
2483 sym->backend_decl = var[n];
2485 /* Work out the start, end and stride for the loop. */
2486 gfc_init_se (&se, NULL);
2487 gfc_conv_expr_val (&se, fa->start);
2488 /* Record it in this_forall. */
2489 this_forall->start = se.expr;
2490 gfc_add_block_to_block (&block, &se.pre);
2491 start[n] = se.expr;
2493 gfc_init_se (&se, NULL);
2494 gfc_conv_expr_val (&se, fa->end);
2495 /* Record it in this_forall. */
2496 this_forall->end = se.expr;
2497 gfc_make_safe_expr (&se);
2498 gfc_add_block_to_block (&block, &se.pre);
2499 end[n] = se.expr;
2501 gfc_init_se (&se, NULL);
2502 gfc_conv_expr_val (&se, fa->stride);
2503 /* Record it in this_forall. */
2504 this_forall->step = se.expr;
2505 gfc_make_safe_expr (&se);
2506 gfc_add_block_to_block (&block, &se.pre);
2507 step[n] = se.expr;
2509 /* Set the NEXT field of this_forall to NULL. */
2510 this_forall->next = NULL;
2511 /* Link this_forall to the info construct. */
2512 if (info->this_loop)
2514 iter_info *iter_tmp = info->this_loop;
2515 while (iter_tmp->next != NULL)
2516 iter_tmp = iter_tmp->next;
2517 iter_tmp->next = this_forall;
2519 else
2520 info->this_loop = this_forall;
2522 n++;
2524 nvar = n;
2526 /* Calculate the size needed for the current forall level. */
2527 size = gfc_index_one_node;
2528 for (n = 0; n < nvar; n++)
2530 /* size = (end + step - start) / step. */
2531 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2532 step[n], start[n]);
2533 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2535 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2536 tmp = convert (gfc_array_index_type, tmp);
2538 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2541 /* Record the nvar and size of current forall level. */
2542 info->nvar = nvar;
2543 info->size = size;
2545 if (code->expr)
2547 /* If the mask is .true., consider the FORALL unconditional. */
2548 if (code->expr->expr_type == EXPR_CONSTANT
2549 && code->expr->value.logical)
2550 need_mask = false;
2551 else
2552 need_mask = true;
2554 else
2555 need_mask = false;
2557 /* First we need to allocate the mask. */
2558 if (need_mask)
2560 /* As the mask array can be very big, prefer compact boolean types. */
2561 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2562 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2563 size, NULL, &block, &pmask);
2564 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2566 /* Record them in the info structure. */
2567 info->maskindex = maskindex;
2568 info->mask = mask;
2570 else
2572 /* No mask was specified. */
2573 maskindex = NULL_TREE;
2574 mask = pmask = NULL_TREE;
2577 /* Link the current forall level to nested_forall_info. */
2578 info->prev_nest = nested_forall_info;
2579 nested_forall_info = info;
2581 /* Copy the mask into a temporary variable if required.
2582 For now we assume a mask temporary is needed. */
2583 if (need_mask)
2585 /* As the mask array can be very big, prefer compact boolean types. */
2586 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2588 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2590 /* Start of mask assignment loop body. */
2591 gfc_start_block (&body);
2593 /* Evaluate the mask expression. */
2594 gfc_init_se (&se, NULL);
2595 gfc_conv_expr_val (&se, code->expr);
2596 gfc_add_block_to_block (&body, &se.pre);
2598 /* Store the mask. */
2599 se.expr = convert (mask_type, se.expr);
2601 tmp = gfc_build_array_ref (mask, maskindex);
2602 gfc_add_modify_expr (&body, tmp, se.expr);
2604 /* Advance to the next mask element. */
2605 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2606 maskindex, gfc_index_one_node);
2607 gfc_add_modify_expr (&body, maskindex, tmp);
2609 /* Generate the loops. */
2610 tmp = gfc_finish_block (&body);
2611 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2612 gfc_add_expr_to_block (&block, tmp);
2615 c = code->block->next;
2617 /* TODO: loop merging in FORALL statements. */
2618 /* Now that we've got a copy of the mask, generate the assignment loops. */
2619 while (c)
2621 switch (c->op)
2623 case EXEC_ASSIGN:
2624 /* A scalar or array assignment. */
2625 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2626 /* Temporaries due to array assignment data dependencies introduce
2627 no end of problems. */
2628 if (need_temp)
2629 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2630 nested_forall_info, &block);
2631 else
2633 /* Use the normal assignment copying routines. */
2634 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2636 /* Generate body and loops. */
2637 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2638 assign, 1);
2639 gfc_add_expr_to_block (&block, tmp);
2642 break;
2644 case EXEC_WHERE:
2645 /* Translate WHERE or WHERE construct nested in FORALL. */
2646 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2647 break;
2649 /* Pointer assignment inside FORALL. */
2650 case EXEC_POINTER_ASSIGN:
2651 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2652 if (need_temp)
2653 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2654 nested_forall_info, &block);
2655 else
2657 /* Use the normal assignment copying routines. */
2658 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2660 /* Generate body and loops. */
2661 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2662 assign, 1);
2663 gfc_add_expr_to_block (&block, tmp);
2665 break;
2667 case EXEC_FORALL:
2668 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2669 gfc_add_expr_to_block (&block, tmp);
2670 break;
2672 /* Explicit subroutine calls are prevented by the frontend but interface
2673 assignments can legitimately produce them. */
2674 case EXEC_ASSIGN_CALL:
2675 assign = gfc_trans_call (c, true);
2676 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2677 gfc_add_expr_to_block (&block, tmp);
2678 break;
2680 default:
2681 gcc_unreachable ();
2684 c = c->next;
2687 /* Restore the original index variables. */
2688 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2689 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2691 /* Free the space for var, start, end, step, varexpr. */
2692 gfc_free (var);
2693 gfc_free (start);
2694 gfc_free (end);
2695 gfc_free (step);
2696 gfc_free (varexpr);
2697 gfc_free (saved_vars);
2699 /* Free the space for this forall_info. */
2700 gfc_free (info);
2702 if (pmask)
2704 /* Free the temporary for the mask. */
2705 tmp = build_call_expr (gfor_fndecl_internal_free, 1, pmask);
2706 gfc_add_expr_to_block (&block, tmp);
2708 if (maskindex)
2709 pushdecl (maskindex);
2711 return gfc_finish_block (&block);
2715 /* Translate the FORALL statement or construct. */
2717 tree gfc_trans_forall (gfc_code * code)
2719 return gfc_trans_forall_1 (code, NULL);
2723 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2724 If the WHERE construct is nested in FORALL, compute the overall temporary
2725 needed by the WHERE mask expression multiplied by the iterator number of
2726 the nested forall.
2727 ME is the WHERE mask expression.
2728 MASK is the current execution mask upon input, whose sense may or may
2729 not be inverted as specified by the INVERT argument.
2730 CMASK is the updated execution mask on output, or NULL if not required.
2731 PMASK is the pending execution mask on output, or NULL if not required.
2732 BLOCK is the block in which to place the condition evaluation loops. */
2734 static void
2735 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2736 tree mask, bool invert, tree cmask, tree pmask,
2737 tree mask_type, stmtblock_t * block)
2739 tree tmp, tmp1;
2740 gfc_ss *lss, *rss;
2741 gfc_loopinfo loop;
2742 stmtblock_t body, body1;
2743 tree count, cond, mtmp;
2744 gfc_se lse, rse;
2746 gfc_init_loopinfo (&loop);
2748 lss = gfc_walk_expr (me);
2749 rss = gfc_walk_expr (me);
2751 /* Variable to index the temporary. */
2752 count = gfc_create_var (gfc_array_index_type, "count");
2753 /* Initialize count. */
2754 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2756 gfc_start_block (&body);
2758 gfc_init_se (&rse, NULL);
2759 gfc_init_se (&lse, NULL);
2761 if (lss == gfc_ss_terminator)
2763 gfc_init_block (&body1);
2765 else
2767 /* Initialize the loop. */
2768 gfc_init_loopinfo (&loop);
2770 /* We may need LSS to determine the shape of the expression. */
2771 gfc_add_ss_to_loop (&loop, lss);
2772 gfc_add_ss_to_loop (&loop, rss);
2774 gfc_conv_ss_startstride (&loop);
2775 gfc_conv_loop_setup (&loop);
2777 gfc_mark_ss_chain_used (rss, 1);
2778 /* Start the loop body. */
2779 gfc_start_scalarized_body (&loop, &body1);
2781 /* Translate the expression. */
2782 gfc_copy_loopinfo_to_se (&rse, &loop);
2783 rse.ss = rss;
2784 gfc_conv_expr (&rse, me);
2787 /* Variable to evaluate mask condition. */
2788 cond = gfc_create_var (mask_type, "cond");
2789 if (mask && (cmask || pmask))
2790 mtmp = gfc_create_var (mask_type, "mask");
2791 else mtmp = NULL_TREE;
2793 gfc_add_block_to_block (&body1, &lse.pre);
2794 gfc_add_block_to_block (&body1, &rse.pre);
2796 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2798 if (mask && (cmask || pmask))
2800 tmp = gfc_build_array_ref (mask, count);
2801 if (invert)
2802 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2803 gfc_add_modify_expr (&body1, mtmp, tmp);
2806 if (cmask)
2808 tmp1 = gfc_build_array_ref (cmask, count);
2809 tmp = cond;
2810 if (mask)
2811 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2812 gfc_add_modify_expr (&body1, tmp1, tmp);
2815 if (pmask)
2817 tmp1 = gfc_build_array_ref (pmask, count);
2818 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2819 if (mask)
2820 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2821 gfc_add_modify_expr (&body1, tmp1, tmp);
2824 gfc_add_block_to_block (&body1, &lse.post);
2825 gfc_add_block_to_block (&body1, &rse.post);
2827 if (lss == gfc_ss_terminator)
2829 gfc_add_block_to_block (&body, &body1);
2831 else
2833 /* Increment count. */
2834 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2835 gfc_index_one_node);
2836 gfc_add_modify_expr (&body1, count, tmp1);
2838 /* Generate the copying loops. */
2839 gfc_trans_scalarizing_loops (&loop, &body1);
2841 gfc_add_block_to_block (&body, &loop.pre);
2842 gfc_add_block_to_block (&body, &loop.post);
2844 gfc_cleanup_loop (&loop);
2845 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2846 as tree nodes in SS may not be valid in different scope. */
2849 tmp1 = gfc_finish_block (&body);
2850 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2851 if (nested_forall_info != NULL)
2852 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2854 gfc_add_expr_to_block (block, tmp1);
2858 /* Translate an assignment statement in a WHERE statement or construct
2859 statement. The MASK expression is used to control which elements
2860 of EXPR1 shall be assigned. The sense of MASK is specified by
2861 INVERT. */
2863 static tree
2864 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2865 tree mask, bool invert,
2866 tree count1, tree count2,
2867 gfc_symbol *sym)
2869 gfc_se lse;
2870 gfc_se rse;
2871 gfc_ss *lss;
2872 gfc_ss *lss_section;
2873 gfc_ss *rss;
2875 gfc_loopinfo loop;
2876 tree tmp;
2877 stmtblock_t block;
2878 stmtblock_t body;
2879 tree index, maskexpr;
2881 #if 0
2882 /* TODO: handle this special case.
2883 Special case a single function returning an array. */
2884 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2886 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2887 if (tmp)
2888 return tmp;
2890 #endif
2892 /* Assignment of the form lhs = rhs. */
2893 gfc_start_block (&block);
2895 gfc_init_se (&lse, NULL);
2896 gfc_init_se (&rse, NULL);
2898 /* Walk the lhs. */
2899 lss = gfc_walk_expr (expr1);
2900 rss = NULL;
2902 /* In each where-assign-stmt, the mask-expr and the variable being
2903 defined shall be arrays of the same shape. */
2904 gcc_assert (lss != gfc_ss_terminator);
2906 /* The assignment needs scalarization. */
2907 lss_section = lss;
2909 /* Find a non-scalar SS from the lhs. */
2910 while (lss_section != gfc_ss_terminator
2911 && lss_section->type != GFC_SS_SECTION)
2912 lss_section = lss_section->next;
2914 gcc_assert (lss_section != gfc_ss_terminator);
2916 /* Initialize the scalarizer. */
2917 gfc_init_loopinfo (&loop);
2919 /* Walk the rhs. */
2920 rss = gfc_walk_expr (expr2);
2921 if (rss == gfc_ss_terminator)
2923 /* The rhs is scalar. Add a ss for the expression. */
2924 rss = gfc_get_ss ();
2925 rss->next = gfc_ss_terminator;
2926 rss->type = GFC_SS_SCALAR;
2927 rss->expr = expr2;
2930 /* Associate the SS with the loop. */
2931 gfc_add_ss_to_loop (&loop, lss);
2932 gfc_add_ss_to_loop (&loop, rss);
2934 /* Calculate the bounds of the scalarization. */
2935 gfc_conv_ss_startstride (&loop);
2937 /* Resolve any data dependencies in the statement. */
2938 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2940 /* Setup the scalarizing loops. */
2941 gfc_conv_loop_setup (&loop);
2943 /* Setup the gfc_se structures. */
2944 gfc_copy_loopinfo_to_se (&lse, &loop);
2945 gfc_copy_loopinfo_to_se (&rse, &loop);
2947 rse.ss = rss;
2948 gfc_mark_ss_chain_used (rss, 1);
2949 if (loop.temp_ss == NULL)
2951 lse.ss = lss;
2952 gfc_mark_ss_chain_used (lss, 1);
2954 else
2956 lse.ss = loop.temp_ss;
2957 gfc_mark_ss_chain_used (lss, 3);
2958 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2961 /* Start the scalarized loop body. */
2962 gfc_start_scalarized_body (&loop, &body);
2964 /* Translate the expression. */
2965 gfc_conv_expr (&rse, expr2);
2966 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2968 gfc_conv_tmp_array_ref (&lse);
2969 gfc_advance_se_ss_chain (&lse);
2971 else
2972 gfc_conv_expr (&lse, expr1);
2974 /* Form the mask expression according to the mask. */
2975 index = count1;
2976 maskexpr = gfc_build_array_ref (mask, index);
2977 if (invert)
2978 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2980 /* Use the scalar assignment as is. */
2981 if (sym == NULL)
2982 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2983 loop.temp_ss != NULL, false);
2984 else
2985 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
2987 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2989 gfc_add_expr_to_block (&body, tmp);
2991 if (lss == gfc_ss_terminator)
2993 /* Increment count1. */
2994 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2995 count1, gfc_index_one_node);
2996 gfc_add_modify_expr (&body, count1, tmp);
2998 /* Use the scalar assignment as is. */
2999 gfc_add_block_to_block (&block, &body);
3001 else
3003 gcc_assert (lse.ss == gfc_ss_terminator
3004 && rse.ss == gfc_ss_terminator);
3006 if (loop.temp_ss != NULL)
3008 /* Increment count1 before finish the main body of a scalarized
3009 expression. */
3010 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3011 count1, gfc_index_one_node);
3012 gfc_add_modify_expr (&body, count1, tmp);
3013 gfc_trans_scalarized_loop_boundary (&loop, &body);
3015 /* We need to copy the temporary to the actual lhs. */
3016 gfc_init_se (&lse, NULL);
3017 gfc_init_se (&rse, NULL);
3018 gfc_copy_loopinfo_to_se (&lse, &loop);
3019 gfc_copy_loopinfo_to_se (&rse, &loop);
3021 rse.ss = loop.temp_ss;
3022 lse.ss = lss;
3024 gfc_conv_tmp_array_ref (&rse);
3025 gfc_advance_se_ss_chain (&rse);
3026 gfc_conv_expr (&lse, expr1);
3028 gcc_assert (lse.ss == gfc_ss_terminator
3029 && rse.ss == gfc_ss_terminator);
3031 /* Form the mask expression according to the mask tree list. */
3032 index = count2;
3033 maskexpr = gfc_build_array_ref (mask, index);
3034 if (invert)
3035 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3036 maskexpr);
3038 /* Use the scalar assignment as is. */
3039 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3040 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3041 gfc_add_expr_to_block (&body, tmp);
3043 /* Increment count2. */
3044 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3045 count2, gfc_index_one_node);
3046 gfc_add_modify_expr (&body, count2, tmp);
3048 else
3050 /* Increment count1. */
3051 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3052 count1, gfc_index_one_node);
3053 gfc_add_modify_expr (&body, count1, tmp);
3056 /* Generate the copying loops. */
3057 gfc_trans_scalarizing_loops (&loop, &body);
3059 /* Wrap the whole thing up. */
3060 gfc_add_block_to_block (&block, &loop.pre);
3061 gfc_add_block_to_block (&block, &loop.post);
3062 gfc_cleanup_loop (&loop);
3065 return gfc_finish_block (&block);
3069 /* Translate the WHERE construct or statement.
3070 This function can be called iteratively to translate the nested WHERE
3071 construct or statement.
3072 MASK is the control mask. */
3074 static void
3075 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3076 forall_info * nested_forall_info, stmtblock_t * block)
3078 stmtblock_t inner_size_body;
3079 tree inner_size, size;
3080 gfc_ss *lss, *rss;
3081 tree mask_type;
3082 gfc_expr *expr1;
3083 gfc_expr *expr2;
3084 gfc_code *cblock;
3085 gfc_code *cnext;
3086 tree tmp;
3087 tree count1, count2;
3088 bool need_cmask;
3089 bool need_pmask;
3090 int need_temp;
3091 tree pcmask = NULL_TREE;
3092 tree ppmask = NULL_TREE;
3093 tree cmask = NULL_TREE;
3094 tree pmask = NULL_TREE;
3095 gfc_actual_arglist *arg;
3097 /* the WHERE statement or the WHERE construct statement. */
3098 cblock = code->block;
3100 /* As the mask array can be very big, prefer compact boolean types. */
3101 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3103 /* Determine which temporary masks are needed. */
3104 if (!cblock->block)
3106 /* One clause: No ELSEWHEREs. */
3107 need_cmask = (cblock->next != 0);
3108 need_pmask = false;
3110 else if (cblock->block->block)
3112 /* Three or more clauses: Conditional ELSEWHEREs. */
3113 need_cmask = true;
3114 need_pmask = true;
3116 else if (cblock->next)
3118 /* Two clauses, the first non-empty. */
3119 need_cmask = true;
3120 need_pmask = (mask != NULL_TREE
3121 && cblock->block->next != 0);
3123 else if (!cblock->block->next)
3125 /* Two clauses, both empty. */
3126 need_cmask = false;
3127 need_pmask = false;
3129 /* Two clauses, the first empty, the second non-empty. */
3130 else if (mask)
3132 need_cmask = (cblock->block->expr != 0);
3133 need_pmask = true;
3135 else
3137 need_cmask = true;
3138 need_pmask = false;
3141 if (need_cmask || need_pmask)
3143 /* Calculate the size of temporary needed by the mask-expr. */
3144 gfc_init_block (&inner_size_body);
3145 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3146 &inner_size_body, &lss, &rss);
3148 /* Calculate the total size of temporary needed. */
3149 size = compute_overall_iter_number (nested_forall_info, inner_size,
3150 &inner_size_body, block);
3152 /* Allocate temporary for WHERE mask if needed. */
3153 if (need_cmask)
3154 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3155 &pcmask);
3157 /* Allocate temporary for !mask if needed. */
3158 if (need_pmask)
3159 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3160 &ppmask);
3163 while (cblock)
3165 /* Each time around this loop, the where clause is conditional
3166 on the value of mask and invert, which are updated at the
3167 bottom of the loop. */
3169 /* Has mask-expr. */
3170 if (cblock->expr)
3172 /* Ensure that the WHERE mask will be evaluated exactly once.
3173 If there are no statements in this WHERE/ELSEWHERE clause,
3174 then we don't need to update the control mask (cmask).
3175 If this is the last clause of the WHERE construct, then
3176 we don't need to update the pending control mask (pmask). */
3177 if (mask)
3178 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3179 mask, invert,
3180 cblock->next ? cmask : NULL_TREE,
3181 cblock->block ? pmask : NULL_TREE,
3182 mask_type, block);
3183 else
3184 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3185 NULL_TREE, false,
3186 (cblock->next || cblock->block)
3187 ? cmask : NULL_TREE,
3188 NULL_TREE, mask_type, block);
3190 invert = false;
3192 /* It's a final elsewhere-stmt. No mask-expr is present. */
3193 else
3194 cmask = mask;
3196 /* The body of this where clause are controlled by cmask with
3197 sense specified by invert. */
3199 /* Get the assignment statement of a WHERE statement, or the first
3200 statement in where-body-construct of a WHERE construct. */
3201 cnext = cblock->next;
3202 while (cnext)
3204 switch (cnext->op)
3206 /* WHERE assignment statement. */
3207 case EXEC_ASSIGN_CALL:
3209 arg = cnext->ext.actual;
3210 expr1 = expr2 = NULL;
3211 for (; arg; arg = arg->next)
3213 if (!arg->expr)
3214 continue;
3215 if (expr1 == NULL)
3216 expr1 = arg->expr;
3217 else
3218 expr2 = arg->expr;
3220 goto evaluate;
3222 case EXEC_ASSIGN:
3223 expr1 = cnext->expr;
3224 expr2 = cnext->expr2;
3225 evaluate:
3226 if (nested_forall_info != NULL)
3228 need_temp = gfc_check_dependency (expr1, expr2, 0);
3229 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3230 gfc_trans_assign_need_temp (expr1, expr2,
3231 cmask, invert,
3232 nested_forall_info, block);
3233 else
3235 /* Variables to control maskexpr. */
3236 count1 = gfc_create_var (gfc_array_index_type, "count1");
3237 count2 = gfc_create_var (gfc_array_index_type, "count2");
3238 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3239 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3241 tmp = gfc_trans_where_assign (expr1, expr2,
3242 cmask, invert,
3243 count1, count2,
3244 cnext->resolved_sym);
3246 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3247 tmp, 1);
3248 gfc_add_expr_to_block (block, tmp);
3251 else
3253 /* Variables to control maskexpr. */
3254 count1 = gfc_create_var (gfc_array_index_type, "count1");
3255 count2 = gfc_create_var (gfc_array_index_type, "count2");
3256 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3257 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3259 tmp = gfc_trans_where_assign (expr1, expr2,
3260 cmask, invert,
3261 count1, count2,
3262 cnext->resolved_sym);
3263 gfc_add_expr_to_block (block, tmp);
3266 break;
3268 /* WHERE or WHERE construct is part of a where-body-construct. */
3269 case EXEC_WHERE:
3270 gfc_trans_where_2 (cnext, cmask, invert,
3271 nested_forall_info, block);
3272 break;
3274 default:
3275 gcc_unreachable ();
3278 /* The next statement within the same where-body-construct. */
3279 cnext = cnext->next;
3281 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3282 cblock = cblock->block;
3283 if (mask == NULL_TREE)
3285 /* If we're the initial WHERE, we can simply invert the sense
3286 of the current mask to obtain the "mask" for the remaining
3287 ELSEWHEREs. */
3288 invert = true;
3289 mask = cmask;
3291 else
3293 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3294 invert = false;
3295 mask = pmask;
3299 /* If we allocated a pending mask array, deallocate it now. */
3300 if (ppmask)
3302 tmp = build_call_expr (gfor_fndecl_internal_free, 1, ppmask);
3303 gfc_add_expr_to_block (block, tmp);
3306 /* If we allocated a current mask array, deallocate it now. */
3307 if (pcmask)
3309 tmp = build_call_expr (gfor_fndecl_internal_free, 1, pcmask);
3310 gfc_add_expr_to_block (block, tmp);
3314 /* Translate a simple WHERE construct or statement without dependencies.
3315 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3316 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3317 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3319 static tree
3320 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3322 stmtblock_t block, body;
3323 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3324 tree tmp, cexpr, tstmt, estmt;
3325 gfc_ss *css, *tdss, *tsss;
3326 gfc_se cse, tdse, tsse, edse, esse;
3327 gfc_loopinfo loop;
3328 gfc_ss *edss = 0;
3329 gfc_ss *esss = 0;
3331 cond = cblock->expr;
3332 tdst = cblock->next->expr;
3333 tsrc = cblock->next->expr2;
3334 edst = eblock ? eblock->next->expr : NULL;
3335 esrc = eblock ? eblock->next->expr2 : NULL;
3337 gfc_start_block (&block);
3338 gfc_init_loopinfo (&loop);
3340 /* Handle the condition. */
3341 gfc_init_se (&cse, NULL);
3342 css = gfc_walk_expr (cond);
3343 gfc_add_ss_to_loop (&loop, css);
3345 /* Handle the then-clause. */
3346 gfc_init_se (&tdse, NULL);
3347 gfc_init_se (&tsse, NULL);
3348 tdss = gfc_walk_expr (tdst);
3349 tsss = gfc_walk_expr (tsrc);
3350 if (tsss == gfc_ss_terminator)
3352 tsss = gfc_get_ss ();
3353 tsss->next = gfc_ss_terminator;
3354 tsss->type = GFC_SS_SCALAR;
3355 tsss->expr = tsrc;
3357 gfc_add_ss_to_loop (&loop, tdss);
3358 gfc_add_ss_to_loop (&loop, tsss);
3360 if (eblock)
3362 /* Handle the else clause. */
3363 gfc_init_se (&edse, NULL);
3364 gfc_init_se (&esse, NULL);
3365 edss = gfc_walk_expr (edst);
3366 esss = gfc_walk_expr (esrc);
3367 if (esss == gfc_ss_terminator)
3369 esss = gfc_get_ss ();
3370 esss->next = gfc_ss_terminator;
3371 esss->type = GFC_SS_SCALAR;
3372 esss->expr = esrc;
3374 gfc_add_ss_to_loop (&loop, edss);
3375 gfc_add_ss_to_loop (&loop, esss);
3378 gfc_conv_ss_startstride (&loop);
3379 gfc_conv_loop_setup (&loop);
3381 gfc_mark_ss_chain_used (css, 1);
3382 gfc_mark_ss_chain_used (tdss, 1);
3383 gfc_mark_ss_chain_used (tsss, 1);
3384 if (eblock)
3386 gfc_mark_ss_chain_used (edss, 1);
3387 gfc_mark_ss_chain_used (esss, 1);
3390 gfc_start_scalarized_body (&loop, &body);
3392 gfc_copy_loopinfo_to_se (&cse, &loop);
3393 gfc_copy_loopinfo_to_se (&tdse, &loop);
3394 gfc_copy_loopinfo_to_se (&tsse, &loop);
3395 cse.ss = css;
3396 tdse.ss = tdss;
3397 tsse.ss = tsss;
3398 if (eblock)
3400 gfc_copy_loopinfo_to_se (&edse, &loop);
3401 gfc_copy_loopinfo_to_se (&esse, &loop);
3402 edse.ss = edss;
3403 esse.ss = esss;
3406 gfc_conv_expr (&cse, cond);
3407 gfc_add_block_to_block (&body, &cse.pre);
3408 cexpr = cse.expr;
3410 gfc_conv_expr (&tsse, tsrc);
3411 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3413 gfc_conv_tmp_array_ref (&tdse);
3414 gfc_advance_se_ss_chain (&tdse);
3416 else
3417 gfc_conv_expr (&tdse, tdst);
3419 if (eblock)
3421 gfc_conv_expr (&esse, esrc);
3422 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3424 gfc_conv_tmp_array_ref (&edse);
3425 gfc_advance_se_ss_chain (&edse);
3427 else
3428 gfc_conv_expr (&edse, edst);
3431 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3432 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3433 : build_empty_stmt ();
3434 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3435 gfc_add_expr_to_block (&body, tmp);
3436 gfc_add_block_to_block (&body, &cse.post);
3438 gfc_trans_scalarizing_loops (&loop, &body);
3439 gfc_add_block_to_block (&block, &loop.pre);
3440 gfc_add_block_to_block (&block, &loop.post);
3441 gfc_cleanup_loop (&loop);
3443 return gfc_finish_block (&block);
3446 /* As the WHERE or WHERE construct statement can be nested, we call
3447 gfc_trans_where_2 to do the translation, and pass the initial
3448 NULL values for both the control mask and the pending control mask. */
3450 tree
3451 gfc_trans_where (gfc_code * code)
3453 stmtblock_t block;
3454 gfc_code *cblock;
3455 gfc_code *eblock;
3457 cblock = code->block;
3458 if (cblock->next
3459 && cblock->next->op == EXEC_ASSIGN
3460 && !cblock->next->next)
3462 eblock = cblock->block;
3463 if (!eblock)
3465 /* A simple "WHERE (cond) x = y" statement or block is
3466 dependence free if cond is not dependent upon writing x,
3467 and the source y is unaffected by the destination x. */
3468 if (!gfc_check_dependency (cblock->next->expr,
3469 cblock->expr, 0)
3470 && !gfc_check_dependency (cblock->next->expr,
3471 cblock->next->expr2, 0))
3472 return gfc_trans_where_3 (cblock, NULL);
3474 else if (!eblock->expr
3475 && !eblock->block
3476 && eblock->next
3477 && eblock->next->op == EXEC_ASSIGN
3478 && !eblock->next->next)
3480 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3481 block is dependence free if cond is not dependent on writes
3482 to x1 and x2, y1 is not dependent on writes to x2, and y2
3483 is not dependent on writes to x1, and both y's are not
3484 dependent upon their own x's. */
3485 if (!gfc_check_dependency(cblock->next->expr,
3486 cblock->expr, 0)
3487 && !gfc_check_dependency(eblock->next->expr,
3488 cblock->expr, 0)
3489 && !gfc_check_dependency(cblock->next->expr,
3490 eblock->next->expr2, 0)
3491 && !gfc_check_dependency(eblock->next->expr,
3492 cblock->next->expr2, 0)
3493 && !gfc_check_dependency(cblock->next->expr,
3494 cblock->next->expr2, 0)
3495 && !gfc_check_dependency(eblock->next->expr,
3496 eblock->next->expr2, 0))
3497 return gfc_trans_where_3 (cblock, eblock);
3501 gfc_start_block (&block);
3503 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3505 return gfc_finish_block (&block);
3509 /* CYCLE a DO loop. The label decl has already been created by
3510 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3511 node at the head of the loop. We must mark the label as used. */
3513 tree
3514 gfc_trans_cycle (gfc_code * code)
3516 tree cycle_label;
3518 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3519 TREE_USED (cycle_label) = 1;
3520 return build1_v (GOTO_EXPR, cycle_label);
3524 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3525 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3526 loop. */
3528 tree
3529 gfc_trans_exit (gfc_code * code)
3531 tree exit_label;
3533 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3534 TREE_USED (exit_label) = 1;
3535 return build1_v (GOTO_EXPR, exit_label);
3539 /* Translate the ALLOCATE statement. */
3541 tree
3542 gfc_trans_allocate (gfc_code * code)
3544 gfc_alloc *al;
3545 gfc_expr *expr;
3546 gfc_se se;
3547 tree tmp;
3548 tree parm;
3549 tree stat;
3550 tree pstat;
3551 tree error_label;
3552 stmtblock_t block;
3554 if (!code->ext.alloc_list)
3555 return NULL_TREE;
3557 gfc_start_block (&block);
3559 if (code->expr)
3561 tree gfc_int4_type_node = gfc_get_int_type (4);
3563 stat = gfc_create_var (gfc_int4_type_node, "stat");
3564 pstat = build_fold_addr_expr (stat);
3566 error_label = gfc_build_label_decl (NULL_TREE);
3567 TREE_USED (error_label) = 1;
3569 else
3571 pstat = integer_zero_node;
3572 stat = error_label = NULL_TREE;
3576 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3578 expr = al->expr;
3580 gfc_init_se (&se, NULL);
3581 gfc_start_block (&se.pre);
3583 se.want_pointer = 1;
3584 se.descriptor_only = 1;
3585 gfc_conv_expr (&se, expr);
3587 if (!gfc_array_allocate (&se, expr, pstat))
3589 /* A scalar or derived type. */
3590 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3592 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3593 tmp = se.string_length;
3595 tmp = build_call_expr (gfor_fndecl_allocate, 2, tmp, pstat);
3596 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr, tmp);
3597 gfc_add_expr_to_block (&se.pre, tmp);
3599 if (code->expr)
3601 tmp = build1_v (GOTO_EXPR, error_label);
3602 parm = fold_build2 (NE_EXPR, boolean_type_node,
3603 stat, build_int_cst (TREE_TYPE (stat), 0));
3604 tmp = fold_build3 (COND_EXPR, void_type_node,
3605 parm, tmp, build_empty_stmt ());
3606 gfc_add_expr_to_block (&se.pre, tmp);
3609 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3611 tmp = build_fold_indirect_ref (se.expr);
3612 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3613 gfc_add_expr_to_block (&se.pre, tmp);
3618 tmp = gfc_finish_block (&se.pre);
3619 gfc_add_expr_to_block (&block, tmp);
3622 /* Assign the value to the status variable. */
3623 if (code->expr)
3625 tmp = build1_v (LABEL_EXPR, error_label);
3626 gfc_add_expr_to_block (&block, tmp);
3628 gfc_init_se (&se, NULL);
3629 gfc_conv_expr_lhs (&se, code->expr);
3630 tmp = convert (TREE_TYPE (se.expr), stat);
3631 gfc_add_modify_expr (&block, se.expr, tmp);
3634 return gfc_finish_block (&block);
3638 /* Translate a DEALLOCATE statement.
3639 There are two cases within the for loop:
3640 (1) deallocate(a1, a2, a3) is translated into the following sequence
3641 _gfortran_deallocate(a1, 0B)
3642 _gfortran_deallocate(a2, 0B)
3643 _gfortran_deallocate(a3, 0B)
3644 where the STAT= variable is passed a NULL pointer.
3645 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3646 astat = 0
3647 _gfortran_deallocate(a1, &stat)
3648 astat = astat + stat
3649 _gfortran_deallocate(a2, &stat)
3650 astat = astat + stat
3651 _gfortran_deallocate(a3, &stat)
3652 astat = astat + stat
3653 In case (1), we simply return at the end of the for loop. In case (2)
3654 we set STAT= astat. */
3655 tree
3656 gfc_trans_deallocate (gfc_code * code)
3658 gfc_se se;
3659 gfc_alloc *al;
3660 gfc_expr *expr;
3661 tree apstat, astat, pstat, stat, tmp;
3662 stmtblock_t block;
3664 gfc_start_block (&block);
3666 /* Set up the optional STAT= */
3667 if (code->expr)
3669 tree gfc_int4_type_node = gfc_get_int_type (4);
3671 /* Variable used with the library call. */
3672 stat = gfc_create_var (gfc_int4_type_node, "stat");
3673 pstat = build_fold_addr_expr (stat);
3675 /* Running total of possible deallocation failures. */
3676 astat = gfc_create_var (gfc_int4_type_node, "astat");
3677 apstat = build_fold_addr_expr (astat);
3679 /* Initialize astat to 0. */
3680 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3682 else
3684 pstat = apstat = null_pointer_node;
3685 stat = astat = NULL_TREE;
3688 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3690 expr = al->expr;
3691 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3693 gfc_init_se (&se, NULL);
3694 gfc_start_block (&se.pre);
3696 se.want_pointer = 1;
3697 se.descriptor_only = 1;
3698 gfc_conv_expr (&se, expr);
3700 if (expr->ts.type == BT_DERIVED
3701 && expr->ts.derived->attr.alloc_comp)
3703 gfc_ref *ref;
3704 gfc_ref *last = NULL;
3705 for (ref = expr->ref; ref; ref = ref->next)
3706 if (ref->type == REF_COMPONENT)
3707 last = ref;
3709 /* Do not deallocate the components of a derived type
3710 ultimate pointer component. */
3711 if (!(last && last->u.c.component->pointer)
3712 && !(!last && expr->symtree->n.sym->attr.pointer))
3714 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3715 expr->rank);
3716 gfc_add_expr_to_block (&se.pre, tmp);
3720 if (expr->rank)
3721 tmp = gfc_array_deallocate (se.expr, pstat);
3722 else
3724 tmp = build_call_expr (gfor_fndecl_deallocate, 2, se.expr, pstat);
3725 gfc_add_expr_to_block (&se.pre, tmp);
3727 tmp = build2 (MODIFY_EXPR, void_type_node,
3728 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3731 gfc_add_expr_to_block (&se.pre, tmp);
3733 /* Keep track of the number of failed deallocations by adding stat
3734 of the last deallocation to the running total. */
3735 if (code->expr)
3737 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3738 gfc_add_modify_expr (&se.pre, astat, apstat);
3741 tmp = gfc_finish_block (&se.pre);
3742 gfc_add_expr_to_block (&block, tmp);
3746 /* Assign the value to the status variable. */
3747 if (code->expr)
3749 gfc_init_se (&se, NULL);
3750 gfc_conv_expr_lhs (&se, code->expr);
3751 tmp = convert (TREE_TYPE (se.expr), astat);
3752 gfc_add_modify_expr (&block, se.expr, tmp);
3755 return gfc_finish_block (&block);