2008-05-16 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob64829e370c1fe8d20437a5fa458f4939df971f48
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "tree-gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 char *label_str;
103 int label_len;
105 /* Start a new block. */
106 gfc_init_se (&se, NULL);
107 gfc_start_block (&se.pre);
108 gfc_conv_label_variable (&se, code->expr);
110 len = GFC_DECL_STRING_LEN (se.expr);
111 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
113 label_tree = gfc_get_label_decl (code->label);
115 if (code->label->defined == ST_LABEL_TARGET)
117 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
118 len_tree = integer_minus_one_node;
120 else
122 label_len = code->label->format->value.character.length;
123 label_str
124 = gfc_widechar_to_char (code->label->format->value.character.string,
125 label_len);
126 len_tree = build_int_cst (NULL_TREE, label_len);
127 label_tree = gfc_build_string_const (label_len + 1, label_str);
128 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
129 gfc_free (label_str);
132 gfc_add_modify_expr (&se.pre, len, len_tree);
133 gfc_add_modify_expr (&se.pre, addr, label_tree);
135 return gfc_finish_block (&se.pre);
138 /* Translate a GOTO statement. */
140 tree
141 gfc_trans_goto (gfc_code * code)
143 locus loc = code->loc;
144 tree assigned_goto;
145 tree target;
146 tree tmp;
147 gfc_se se;
149 if (code->label != NULL)
150 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
152 /* ASSIGNED GOTO. */
153 gfc_init_se (&se, NULL);
154 gfc_start_block (&se.pre);
155 gfc_conv_label_variable (&se, code->expr);
156 tmp = GFC_DECL_STRING_LEN (se.expr);
157 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
158 build_int_cst (TREE_TYPE (tmp), -1));
159 gfc_trans_runtime_check (tmp, &se.pre, &loc,
160 "Assigned label is not a target label");
162 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
164 code = code->block;
165 if (code == NULL)
167 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
172 /* Check the label list. */
175 target = gfc_get_label_decl (code->label);
176 tmp = gfc_build_addr_expr (pvoid_type_node, target);
177 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
178 tmp = build3_v (COND_EXPR, tmp,
179 fold_build1 (GOTO_EXPR, void_type_node, target),
180 build_empty_stmt ());
181 gfc_add_expr_to_block (&se.pre, tmp);
182 code = code->block;
184 while (code != NULL);
185 gfc_trans_runtime_check (boolean_true_node, &se.pre, &loc,
186 "Assigned label is not in the list");
188 return gfc_finish_block (&se.pre);
192 /* Translate an ENTRY statement. Just adds a label for this entry point. */
193 tree
194 gfc_trans_entry (gfc_code * code)
196 return build1_v (LABEL_EXPR, code->ext.entry->label);
200 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
201 elemental subroutines. Make temporaries for output arguments if any such
202 dependencies are found. Output arguments are chosen because internal_unpack
203 can be used, as is, to copy the result back to the variable. */
204 static void
205 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
206 gfc_symbol * sym, gfc_actual_arglist * arg)
208 gfc_actual_arglist *arg0;
209 gfc_expr *e;
210 gfc_formal_arglist *formal;
211 gfc_loopinfo tmp_loop;
212 gfc_se parmse;
213 gfc_ss *ss;
214 gfc_ss_info *info;
215 gfc_symbol *fsym;
216 int n;
217 stmtblock_t block;
218 tree data;
219 tree offset;
220 tree size;
221 tree tmp;
223 if (loopse->ss == NULL)
224 return;
226 ss = loopse->ss;
227 arg0 = arg;
228 formal = sym->formal;
230 /* Loop over all the arguments testing for dependencies. */
231 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
233 e = arg->expr;
234 if (e == NULL)
235 continue;
237 /* Obtain the info structure for the current argument. */
238 info = NULL;
239 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 if (ss->expr != e)
242 continue;
243 info = &ss->data.info;
244 break;
247 /* If there is a dependency, create a temporary and use it
248 instead of the variable. */
249 fsym = formal ? formal->sym : NULL;
250 if (e->expr_type == EXPR_VARIABLE
251 && e->rank && fsym
252 && fsym->attr.intent != INTENT_IN
253 && gfc_check_fncall_dependency (e, fsym->attr.intent,
254 sym, arg0))
256 /* Make a local loopinfo for the temporary creation, so that
257 none of the other ss->info's have to be renormalized. */
258 gfc_init_loopinfo (&tmp_loop);
259 for (n = 0; n < info->dimen; n++)
261 tmp_loop.to[n] = loopse->loop->to[n];
262 tmp_loop.from[n] = loopse->loop->from[n];
263 tmp_loop.order[n] = loopse->loop->order[n];
266 /* Generate the temporary. Merge the block so that the
267 declarations are put at the right binding level. */
268 size = gfc_create_var (gfc_array_index_type, NULL);
269 data = gfc_create_var (pvoid_type_node, NULL);
270 gfc_start_block (&block);
271 tmp = gfc_typenode_for_spec (&e->ts);
272 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
273 &tmp_loop, info, tmp,
274 false, true, false);
275 gfc_add_modify_expr (&se->pre, size, tmp);
276 tmp = fold_convert (pvoid_type_node, info->data);
277 gfc_add_modify_expr (&se->pre, data, tmp);
278 gfc_merge_block_scope (&block);
280 /* Obtain the argument descriptor for unpacking. */
281 gfc_init_se (&parmse, NULL);
282 parmse.want_pointer = 1;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* Calculate the offset for the temporary. */
287 offset = gfc_index_zero_node;
288 for (n = 0; n < info->dimen; n++)
290 tmp = gfc_conv_descriptor_stride (info->descriptor,
291 gfc_rank_cst[n]);
292 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
293 loopse->loop->from[n], tmp);
294 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
295 offset, tmp);
297 info->offset = gfc_create_var (gfc_array_index_type, NULL);
298 gfc_add_modify_expr (&se->pre, info->offset, offset);
300 /* Copy the result back using unpack. */
301 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
302 gfc_add_expr_to_block (&se->post, tmp);
304 gfc_add_block_to_block (&se->post, &parmse.post);
310 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
312 tree
313 gfc_trans_call (gfc_code * code, bool dependency_check)
315 gfc_se se;
316 gfc_ss * ss;
317 int has_alternate_specifier;
319 /* A CALL starts a new block because the actual arguments may have to
320 be evaluated first. */
321 gfc_init_se (&se, NULL);
322 gfc_start_block (&se.pre);
324 gcc_assert (code->resolved_sym);
326 ss = gfc_ss_terminator;
327 if (code->resolved_sym->attr.elemental)
328 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
330 /* Is not an elemental subroutine call with array valued arguments. */
331 if (ss == gfc_ss_terminator)
334 /* Translate the call. */
335 has_alternate_specifier
336 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
337 NULL_TREE);
339 /* A subroutine without side-effect, by definition, does nothing! */
340 TREE_SIDE_EFFECTS (se.expr) = 1;
342 /* Chain the pieces together and return the block. */
343 if (has_alternate_specifier)
345 gfc_code *select_code;
346 gfc_symbol *sym;
347 select_code = code->next;
348 gcc_assert(select_code->op == EXEC_SELECT);
349 sym = select_code->expr->symtree->n.sym;
350 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
351 if (sym->backend_decl == NULL)
352 sym->backend_decl = gfc_get_symbol_decl (sym);
353 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
355 else
356 gfc_add_expr_to_block (&se.pre, se.expr);
358 gfc_add_block_to_block (&se.pre, &se.post);
361 else
363 /* An elemental subroutine call with array valued arguments has
364 to be scalarized. */
365 gfc_loopinfo loop;
366 stmtblock_t body;
367 stmtblock_t block;
368 gfc_se loopse;
370 /* gfc_walk_elemental_function_args renders the ss chain in the
371 reverse order to the actual argument order. */
372 ss = gfc_reverse_ss (ss);
374 /* Initialize the loop. */
375 gfc_init_se (&loopse, NULL);
376 gfc_init_loopinfo (&loop);
377 gfc_add_ss_to_loop (&loop, ss);
379 gfc_conv_ss_startstride (&loop);
380 gfc_conv_loop_setup (&loop);
381 gfc_mark_ss_chain_used (ss, 1);
383 /* Convert the arguments, checking for dependencies. */
384 gfc_copy_loopinfo_to_se (&loopse, &loop);
385 loopse.ss = ss;
387 /* For operator assignment, do dependency checking. */
388 if (dependency_check)
390 gfc_symbol *sym;
391 sym = code->resolved_sym;
392 gfc_conv_elemental_dependencies (&se, &loopse, sym,
393 code->ext.actual);
396 /* Generate the loop body. */
397 gfc_start_scalarized_body (&loop, &body);
398 gfc_init_block (&block);
400 /* Add the subroutine call to the block. */
401 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
402 NULL_TREE);
403 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
405 gfc_add_block_to_block (&block, &loopse.pre);
406 gfc_add_block_to_block (&block, &loopse.post);
408 /* Finish up the loop block and the loop. */
409 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
410 gfc_trans_scalarizing_loops (&loop, &body);
411 gfc_add_block_to_block (&se.pre, &loop.pre);
412 gfc_add_block_to_block (&se.pre, &loop.post);
413 gfc_add_block_to_block (&se.pre, &se.post);
414 gfc_cleanup_loop (&loop);
417 return gfc_finish_block (&se.pre);
421 /* Translate the RETURN statement. */
423 tree
424 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
426 if (code->expr)
428 gfc_se se;
429 tree tmp;
430 tree result;
432 /* If code->expr is not NULL, this return statement must appear
433 in a subroutine and current_fake_result_decl has already
434 been generated. */
436 result = gfc_get_fake_result_decl (NULL, 0);
437 if (!result)
439 gfc_warning ("An alternate return at %L without a * dummy argument",
440 &code->expr->where);
441 return build1_v (GOTO_EXPR, gfc_get_return_label ());
444 /* Start a new block for this statement. */
445 gfc_init_se (&se, NULL);
446 gfc_start_block (&se.pre);
448 gfc_conv_expr (&se, code->expr);
450 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
451 fold_convert (TREE_TYPE (result), se.expr));
452 gfc_add_expr_to_block (&se.pre, tmp);
454 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
455 gfc_add_expr_to_block (&se.pre, tmp);
456 gfc_add_block_to_block (&se.pre, &se.post);
457 return gfc_finish_block (&se.pre);
459 else
460 return build1_v (GOTO_EXPR, gfc_get_return_label ());
464 /* Translate the PAUSE statement. We have to translate this statement
465 to a runtime library call. */
467 tree
468 gfc_trans_pause (gfc_code * code)
470 tree gfc_int4_type_node = gfc_get_int_type (4);
471 gfc_se se;
472 tree tmp;
474 /* Start a new block for this statement. */
475 gfc_init_se (&se, NULL);
476 gfc_start_block (&se.pre);
479 if (code->expr == NULL)
481 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
482 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
484 else
486 gfc_conv_expr_reference (&se, code->expr);
487 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
488 se.expr, se.string_length);
491 gfc_add_expr_to_block (&se.pre, tmp);
493 gfc_add_block_to_block (&se.pre, &se.post);
495 return gfc_finish_block (&se.pre);
499 /* Translate the STOP statement. We have to translate this statement
500 to a runtime library call. */
502 tree
503 gfc_trans_stop (gfc_code * code)
505 tree gfc_int4_type_node = gfc_get_int_type (4);
506 gfc_se se;
507 tree tmp;
509 /* Start a new block for this statement. */
510 gfc_init_se (&se, NULL);
511 gfc_start_block (&se.pre);
514 if (code->expr == NULL)
516 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
517 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
519 else
521 gfc_conv_expr_reference (&se, code->expr);
522 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
523 se.expr, se.string_length);
526 gfc_add_expr_to_block (&se.pre, tmp);
528 gfc_add_block_to_block (&se.pre, &se.post);
530 return gfc_finish_block (&se.pre);
534 /* Generate GENERIC for the IF construct. This function also deals with
535 the simple IF statement, because the front end translates the IF
536 statement into an IF construct.
538 We translate:
540 IF (cond) THEN
541 then_clause
542 ELSEIF (cond2)
543 elseif_clause
544 ELSE
545 else_clause
546 ENDIF
548 into:
550 pre_cond_s;
551 if (cond_s)
553 then_clause;
555 else
557 pre_cond_s
558 if (cond_s)
560 elseif_clause
562 else
564 else_clause;
568 where COND_S is the simplified version of the predicate. PRE_COND_S
569 are the pre side-effects produced by the translation of the
570 conditional.
571 We need to build the chain recursively otherwise we run into
572 problems with folding incomplete statements. */
574 static tree
575 gfc_trans_if_1 (gfc_code * code)
577 gfc_se if_se;
578 tree stmt, elsestmt;
580 /* Check for an unconditional ELSE clause. */
581 if (!code->expr)
582 return gfc_trans_code (code->next);
584 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
585 gfc_init_se (&if_se, NULL);
586 gfc_start_block (&if_se.pre);
588 /* Calculate the IF condition expression. */
589 gfc_conv_expr_val (&if_se, code->expr);
591 /* Translate the THEN clause. */
592 stmt = gfc_trans_code (code->next);
594 /* Translate the ELSE clause. */
595 if (code->block)
596 elsestmt = gfc_trans_if_1 (code->block);
597 else
598 elsestmt = build_empty_stmt ();
600 /* Build the condition expression and add it to the condition block. */
601 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
603 gfc_add_expr_to_block (&if_se.pre, stmt);
605 /* Finish off this statement. */
606 return gfc_finish_block (&if_se.pre);
609 tree
610 gfc_trans_if (gfc_code * code)
612 /* Ignore the top EXEC_IF, it only announces an IF construct. The
613 actual code we must translate is in code->block. */
615 return gfc_trans_if_1 (code->block);
619 /* Translate an arithmetic IF expression.
621 IF (cond) label1, label2, label3 translates to
623 if (cond <= 0)
625 if (cond < 0)
626 goto label1;
627 else // cond == 0
628 goto label2;
630 else // cond > 0
631 goto label3;
633 An optimized version can be generated in case of equal labels.
634 E.g., if label1 is equal to label2, we can translate it to
636 if (cond <= 0)
637 goto label1;
638 else
639 goto label3;
642 tree
643 gfc_trans_arithmetic_if (gfc_code * code)
645 gfc_se se;
646 tree tmp;
647 tree branch1;
648 tree branch2;
649 tree zero;
651 /* Start a new block. */
652 gfc_init_se (&se, NULL);
653 gfc_start_block (&se.pre);
655 /* Pre-evaluate COND. */
656 gfc_conv_expr_val (&se, code->expr);
657 se.expr = gfc_evaluate_now (se.expr, &se.pre);
659 /* Build something to compare with. */
660 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
662 if (code->label->value != code->label2->value)
664 /* If (cond < 0) take branch1 else take branch2.
665 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
666 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
667 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
669 if (code->label->value != code->label3->value)
670 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
671 else
672 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
674 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
676 else
677 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
679 if (code->label->value != code->label3->value
680 && code->label2->value != code->label3->value)
682 /* if (cond <= 0) take branch1 else take branch2. */
683 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
684 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
685 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
688 /* Append the COND_EXPR to the evaluation of COND, and return. */
689 gfc_add_expr_to_block (&se.pre, branch1);
690 return gfc_finish_block (&se.pre);
694 /* Translate the simple DO construct. This is where the loop variable has
695 integer type and step +-1. We can't use this in the general case
696 because integer overflow and floating point errors could give incorrect
697 results.
698 We translate a do loop from:
700 DO dovar = from, to, step
701 body
702 END DO
706 [Evaluate loop bounds and step]
707 dovar = from;
708 if ((step > 0) ? (dovar <= to) : (dovar => to))
710 for (;;)
712 body;
713 cycle_label:
714 cond = (dovar == to);
715 dovar += step;
716 if (cond) goto end_label;
719 end_label:
721 This helps the optimizers by avoiding the extra induction variable
722 used in the general case. */
724 static tree
725 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
726 tree from, tree to, tree step)
728 stmtblock_t body;
729 tree type;
730 tree cond;
731 tree tmp;
732 tree cycle_label;
733 tree exit_label;
735 type = TREE_TYPE (dovar);
737 /* Initialize the DO variable: dovar = from. */
738 gfc_add_modify_expr (pblock, dovar, from);
740 /* Cycle and exit statements are implemented with gotos. */
741 cycle_label = gfc_build_label_decl (NULL_TREE);
742 exit_label = gfc_build_label_decl (NULL_TREE);
744 /* Put the labels where they can be found later. See gfc_trans_do(). */
745 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
747 /* Loop body. */
748 gfc_start_block (&body);
750 /* Main loop body. */
751 tmp = gfc_trans_code (code->block->next);
752 gfc_add_expr_to_block (&body, tmp);
754 /* Label for cycle statements (if needed). */
755 if (TREE_USED (cycle_label))
757 tmp = build1_v (LABEL_EXPR, cycle_label);
758 gfc_add_expr_to_block (&body, tmp);
761 /* Evaluate the loop condition. */
762 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
763 cond = gfc_evaluate_now (cond, &body);
765 /* Increment the loop variable. */
766 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
767 gfc_add_modify_expr (&body, dovar, tmp);
769 /* The loop exit. */
770 tmp = build1_v (GOTO_EXPR, exit_label);
771 TREE_USED (exit_label) = 1;
772 tmp = fold_build3 (COND_EXPR, void_type_node,
773 cond, tmp, build_empty_stmt ());
774 gfc_add_expr_to_block (&body, tmp);
776 /* Finish the loop body. */
777 tmp = gfc_finish_block (&body);
778 tmp = build1_v (LOOP_EXPR, tmp);
780 /* Only execute the loop if the number of iterations is positive. */
781 if (tree_int_cst_sgn (step) > 0)
782 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
783 else
784 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
785 tmp = fold_build3 (COND_EXPR, void_type_node,
786 cond, tmp, build_empty_stmt ());
787 gfc_add_expr_to_block (pblock, tmp);
789 /* Add the exit label. */
790 tmp = build1_v (LABEL_EXPR, exit_label);
791 gfc_add_expr_to_block (pblock, tmp);
793 return gfc_finish_block (pblock);
796 /* Translate the DO construct. This obviously is one of the most
797 important ones to get right with any compiler, but especially
798 so for Fortran.
800 We special case some loop forms as described in gfc_trans_simple_do.
801 For other cases we implement them with a separate loop count,
802 as described in the standard.
804 We translate a do loop from:
806 DO dovar = from, to, step
807 body
808 END DO
812 [evaluate loop bounds and step]
813 empty = (step > 0 ? to < from : to > from);
814 countm1 = (to - from) / step;
815 dovar = from;
816 if (empty) goto exit_label;
817 for (;;)
819 body;
820 cycle_label:
821 dovar += step
822 if (countm1 ==0) goto exit_label;
823 countm1--;
825 exit_label:
827 countm1 is an unsigned integer. It is equal to the loop count minus one,
828 because the loop count itself can overflow. */
830 tree
831 gfc_trans_do (gfc_code * code)
833 gfc_se se;
834 tree dovar;
835 tree from;
836 tree to;
837 tree step;
838 tree empty;
839 tree countm1;
840 tree type;
841 tree utype;
842 tree cond;
843 tree cycle_label;
844 tree exit_label;
845 tree tmp;
846 tree pos_step;
847 stmtblock_t block;
848 stmtblock_t body;
850 gfc_start_block (&block);
852 /* Evaluate all the expressions in the iterator. */
853 gfc_init_se (&se, NULL);
854 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
855 gfc_add_block_to_block (&block, &se.pre);
856 dovar = se.expr;
857 type = TREE_TYPE (dovar);
859 gfc_init_se (&se, NULL);
860 gfc_conv_expr_val (&se, code->ext.iterator->start);
861 gfc_add_block_to_block (&block, &se.pre);
862 from = gfc_evaluate_now (se.expr, &block);
864 gfc_init_se (&se, NULL);
865 gfc_conv_expr_val (&se, code->ext.iterator->end);
866 gfc_add_block_to_block (&block, &se.pre);
867 to = gfc_evaluate_now (se.expr, &block);
869 gfc_init_se (&se, NULL);
870 gfc_conv_expr_val (&se, code->ext.iterator->step);
871 gfc_add_block_to_block (&block, &se.pre);
872 step = gfc_evaluate_now (se.expr, &block);
874 /* Special case simple loops. */
875 if (TREE_CODE (type) == INTEGER_TYPE
876 && (integer_onep (step)
877 || tree_int_cst_equal (step, integer_minus_one_node)))
878 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
880 /* We need a special check for empty loops:
881 empty = (step > 0 ? to < from : to > from); */
882 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
883 fold_convert (type, integer_zero_node));
884 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
885 fold_build2 (LT_EXPR, boolean_type_node, to, from),
886 fold_build2 (GT_EXPR, boolean_type_node, to, from));
888 /* Initialize loop count. This code is executed before we enter the
889 loop body. We generate: countm1 = abs(to - from) / abs(step). */
890 if (TREE_CODE (type) == INTEGER_TYPE)
892 tree ustep;
894 utype = unsigned_type_for (type);
896 /* tmp = abs(to - from) / abs(step) */
897 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
898 tmp = fold_build3 (COND_EXPR, type, pos_step,
899 fold_build2 (MINUS_EXPR, type, to, from),
900 fold_build2 (MINUS_EXPR, type, from, to));
901 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
902 ustep);
904 else
906 /* TODO: We could use the same width as the real type.
907 This would probably cause more problems that it solves
908 when we implement "long double" types. */
909 utype = unsigned_type_for (gfc_array_index_type);
910 tmp = fold_build2 (MINUS_EXPR, type, to, from);
911 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
912 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
914 countm1 = gfc_create_var (utype, "countm1");
915 gfc_add_modify_expr (&block, countm1, tmp);
917 /* Cycle and exit statements are implemented with gotos. */
918 cycle_label = gfc_build_label_decl (NULL_TREE);
919 exit_label = gfc_build_label_decl (NULL_TREE);
920 TREE_USED (exit_label) = 1;
922 /* Initialize the DO variable: dovar = from. */
923 gfc_add_modify_expr (&block, dovar, from);
925 /* If the loop is empty, go directly to the exit label. */
926 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
927 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
928 gfc_add_expr_to_block (&block, tmp);
930 /* Loop body. */
931 gfc_start_block (&body);
933 /* Put these labels where they can be found later. We put the
934 labels in a TREE_LIST node (because TREE_CHAIN is already
935 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
936 label in TREE_VALUE (backend_decl). */
938 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
940 /* Main loop body. */
941 tmp = gfc_trans_code (code->block->next);
942 gfc_add_expr_to_block (&body, tmp);
944 /* Label for cycle statements (if needed). */
945 if (TREE_USED (cycle_label))
947 tmp = build1_v (LABEL_EXPR, cycle_label);
948 gfc_add_expr_to_block (&body, tmp);
951 /* Increment the loop variable. */
952 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
953 gfc_add_modify_expr (&body, dovar, tmp);
955 /* End with the loop condition. Loop until countm1 == 0. */
956 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
957 build_int_cst (utype, 0));
958 tmp = build1_v (GOTO_EXPR, exit_label);
959 tmp = fold_build3 (COND_EXPR, void_type_node,
960 cond, tmp, build_empty_stmt ());
961 gfc_add_expr_to_block (&body, tmp);
963 /* Decrement the loop count. */
964 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
965 gfc_add_modify_expr (&body, countm1, tmp);
967 /* End of loop body. */
968 tmp = gfc_finish_block (&body);
970 /* The for loop itself. */
971 tmp = build1_v (LOOP_EXPR, tmp);
972 gfc_add_expr_to_block (&block, tmp);
974 /* Add the exit label. */
975 tmp = build1_v (LABEL_EXPR, exit_label);
976 gfc_add_expr_to_block (&block, tmp);
978 return gfc_finish_block (&block);
982 /* Translate the DO WHILE construct.
984 We translate
986 DO WHILE (cond)
987 body
988 END DO
992 for ( ; ; )
994 pre_cond;
995 if (! cond) goto exit_label;
996 body;
997 cycle_label:
999 exit_label:
1001 Because the evaluation of the exit condition `cond' may have side
1002 effects, we can't do much for empty loop bodies. The backend optimizers
1003 should be smart enough to eliminate any dead loops. */
1005 tree
1006 gfc_trans_do_while (gfc_code * code)
1008 gfc_se cond;
1009 tree tmp;
1010 tree cycle_label;
1011 tree exit_label;
1012 stmtblock_t block;
1014 /* Everything we build here is part of the loop body. */
1015 gfc_start_block (&block);
1017 /* Cycle and exit statements are implemented with gotos. */
1018 cycle_label = gfc_build_label_decl (NULL_TREE);
1019 exit_label = gfc_build_label_decl (NULL_TREE);
1021 /* Put the labels where they can be found later. See gfc_trans_do(). */
1022 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1024 /* Create a GIMPLE version of the exit condition. */
1025 gfc_init_se (&cond, NULL);
1026 gfc_conv_expr_val (&cond, code->expr);
1027 gfc_add_block_to_block (&block, &cond.pre);
1028 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1030 /* Build "IF (! cond) GOTO exit_label". */
1031 tmp = build1_v (GOTO_EXPR, exit_label);
1032 TREE_USED (exit_label) = 1;
1033 tmp = fold_build3 (COND_EXPR, void_type_node,
1034 cond.expr, tmp, build_empty_stmt ());
1035 gfc_add_expr_to_block (&block, tmp);
1037 /* The main body of the loop. */
1038 tmp = gfc_trans_code (code->block->next);
1039 gfc_add_expr_to_block (&block, tmp);
1041 /* Label for cycle statements (if needed). */
1042 if (TREE_USED (cycle_label))
1044 tmp = build1_v (LABEL_EXPR, cycle_label);
1045 gfc_add_expr_to_block (&block, tmp);
1048 /* End of loop body. */
1049 tmp = gfc_finish_block (&block);
1051 gfc_init_block (&block);
1052 /* Build the loop. */
1053 tmp = build1_v (LOOP_EXPR, tmp);
1054 gfc_add_expr_to_block (&block, tmp);
1056 /* Add the exit label. */
1057 tmp = build1_v (LABEL_EXPR, exit_label);
1058 gfc_add_expr_to_block (&block, tmp);
1060 return gfc_finish_block (&block);
1064 /* Translate the SELECT CASE construct for INTEGER case expressions,
1065 without killing all potential optimizations. The problem is that
1066 Fortran allows unbounded cases, but the back-end does not, so we
1067 need to intercept those before we enter the equivalent SWITCH_EXPR
1068 we can build.
1070 For example, we translate this,
1072 SELECT CASE (expr)
1073 CASE (:100,101,105:115)
1074 block_1
1075 CASE (190:199,200:)
1076 block_2
1077 CASE (300)
1078 block_3
1079 CASE DEFAULT
1080 block_4
1081 END SELECT
1083 to the GENERIC equivalent,
1085 switch (expr)
1087 case (minimum value for typeof(expr) ... 100:
1088 case 101:
1089 case 105 ... 114:
1090 block1:
1091 goto end_label;
1093 case 200 ... (maximum value for typeof(expr):
1094 case 190 ... 199:
1095 block2;
1096 goto end_label;
1098 case 300:
1099 block_3;
1100 goto end_label;
1102 default:
1103 block_4;
1104 goto end_label;
1107 end_label: */
1109 static tree
1110 gfc_trans_integer_select (gfc_code * code)
1112 gfc_code *c;
1113 gfc_case *cp;
1114 tree end_label;
1115 tree tmp;
1116 gfc_se se;
1117 stmtblock_t block;
1118 stmtblock_t body;
1120 gfc_start_block (&block);
1122 /* Calculate the switch expression. */
1123 gfc_init_se (&se, NULL);
1124 gfc_conv_expr_val (&se, code->expr);
1125 gfc_add_block_to_block (&block, &se.pre);
1127 end_label = gfc_build_label_decl (NULL_TREE);
1129 gfc_init_block (&body);
1131 for (c = code->block; c; c = c->block)
1133 for (cp = c->ext.case_list; cp; cp = cp->next)
1135 tree low, high;
1136 tree label;
1138 /* Assume it's the default case. */
1139 low = high = NULL_TREE;
1141 if (cp->low)
1143 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1144 cp->low->ts.kind);
1146 /* If there's only a lower bound, set the high bound to the
1147 maximum value of the case expression. */
1148 if (!cp->high)
1149 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1152 if (cp->high)
1154 /* Three cases are possible here:
1156 1) There is no lower bound, e.g. CASE (:N).
1157 2) There is a lower bound .NE. high bound, that is
1158 a case range, e.g. CASE (N:M) where M>N (we make
1159 sure that M>N during type resolution).
1160 3) There is a lower bound, and it has the same value
1161 as the high bound, e.g. CASE (N:N). This is our
1162 internal representation of CASE(N).
1164 In the first and second case, we need to set a value for
1165 high. In the third case, we don't because the GCC middle
1166 end represents a single case value by just letting high be
1167 a NULL_TREE. We can't do that because we need to be able
1168 to represent unbounded cases. */
1170 if (!cp->low
1171 || (cp->low
1172 && mpz_cmp (cp->low->value.integer,
1173 cp->high->value.integer) != 0))
1174 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1175 cp->high->ts.kind);
1177 /* Unbounded case. */
1178 if (!cp->low)
1179 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1182 /* Build a label. */
1183 label = gfc_build_label_decl (NULL_TREE);
1185 /* Add this case label.
1186 Add parameter 'label', make it match GCC backend. */
1187 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1188 low, high, label);
1189 gfc_add_expr_to_block (&body, tmp);
1192 /* Add the statements for this case. */
1193 tmp = gfc_trans_code (c->next);
1194 gfc_add_expr_to_block (&body, tmp);
1196 /* Break to the end of the construct. */
1197 tmp = build1_v (GOTO_EXPR, end_label);
1198 gfc_add_expr_to_block (&body, tmp);
1201 tmp = gfc_finish_block (&body);
1202 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1203 gfc_add_expr_to_block (&block, tmp);
1205 tmp = build1_v (LABEL_EXPR, end_label);
1206 gfc_add_expr_to_block (&block, tmp);
1208 return gfc_finish_block (&block);
1212 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1214 There are only two cases possible here, even though the standard
1215 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1216 .FALSE., and DEFAULT.
1218 We never generate more than two blocks here. Instead, we always
1219 try to eliminate the DEFAULT case. This way, we can translate this
1220 kind of SELECT construct to a simple
1222 if {} else {};
1224 expression in GENERIC. */
1226 static tree
1227 gfc_trans_logical_select (gfc_code * code)
1229 gfc_code *c;
1230 gfc_code *t, *f, *d;
1231 gfc_case *cp;
1232 gfc_se se;
1233 stmtblock_t block;
1235 /* Assume we don't have any cases at all. */
1236 t = f = d = NULL;
1238 /* Now see which ones we actually do have. We can have at most two
1239 cases in a single case list: one for .TRUE. and one for .FALSE.
1240 The default case is always separate. If the cases for .TRUE. and
1241 .FALSE. are in the same case list, the block for that case list
1242 always executed, and we don't generate code a COND_EXPR. */
1243 for (c = code->block; c; c = c->block)
1245 for (cp = c->ext.case_list; cp; cp = cp->next)
1247 if (cp->low)
1249 if (cp->low->value.logical == 0) /* .FALSE. */
1250 f = c;
1251 else /* if (cp->value.logical != 0), thus .TRUE. */
1252 t = c;
1254 else
1255 d = c;
1259 /* Start a new block. */
1260 gfc_start_block (&block);
1262 /* Calculate the switch expression. We always need to do this
1263 because it may have side effects. */
1264 gfc_init_se (&se, NULL);
1265 gfc_conv_expr_val (&se, code->expr);
1266 gfc_add_block_to_block (&block, &se.pre);
1268 if (t == f && t != NULL)
1270 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1271 translate the code for these cases, append it to the current
1272 block. */
1273 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1275 else
1277 tree true_tree, false_tree, stmt;
1279 true_tree = build_empty_stmt ();
1280 false_tree = build_empty_stmt ();
1282 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1283 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1284 make the missing case the default case. */
1285 if (t != NULL && f != NULL)
1286 d = NULL;
1287 else if (d != NULL)
1289 if (t == NULL)
1290 t = d;
1291 else
1292 f = d;
1295 /* Translate the code for each of these blocks, and append it to
1296 the current block. */
1297 if (t != NULL)
1298 true_tree = gfc_trans_code (t->next);
1300 if (f != NULL)
1301 false_tree = gfc_trans_code (f->next);
1303 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1304 true_tree, false_tree);
1305 gfc_add_expr_to_block (&block, stmt);
1308 return gfc_finish_block (&block);
1312 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1313 Instead of generating compares and jumps, it is far simpler to
1314 generate a data structure describing the cases in order and call a
1315 library subroutine that locates the right case.
1316 This is particularly true because this is the only case where we
1317 might have to dispose of a temporary.
1318 The library subroutine returns a pointer to jump to or NULL if no
1319 branches are to be taken. */
1321 static tree
1322 gfc_trans_character_select (gfc_code *code)
1324 tree init, node, end_label, tmp, type, case_num, label;
1325 stmtblock_t block, body;
1326 gfc_case *cp, *d;
1327 gfc_code *c;
1328 gfc_se se;
1329 int n;
1331 static tree select_struct;
1332 static tree ss_string1, ss_string1_len;
1333 static tree ss_string2, ss_string2_len;
1334 static tree ss_target;
1336 if (select_struct == NULL)
1338 tree gfc_int4_type_node = gfc_get_int_type (4);
1340 select_struct = make_node (RECORD_TYPE);
1341 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1343 #undef ADD_FIELD
1344 #define ADD_FIELD(NAME, TYPE) \
1345 ss_##NAME = gfc_add_field_to_struct \
1346 (&(TYPE_FIELDS (select_struct)), select_struct, \
1347 get_identifier (stringize(NAME)), TYPE)
1349 ADD_FIELD (string1, pchar_type_node);
1350 ADD_FIELD (string1_len, gfc_int4_type_node);
1352 ADD_FIELD (string2, pchar_type_node);
1353 ADD_FIELD (string2_len, gfc_int4_type_node);
1355 ADD_FIELD (target, integer_type_node);
1356 #undef ADD_FIELD
1358 gfc_finish_type (select_struct);
1361 cp = code->block->ext.case_list;
1362 while (cp->left != NULL)
1363 cp = cp->left;
1365 n = 0;
1366 for (d = cp; d; d = d->right)
1367 d->n = n++;
1369 end_label = gfc_build_label_decl (NULL_TREE);
1371 /* Generate the body */
1372 gfc_start_block (&block);
1373 gfc_init_block (&body);
1375 for (c = code->block; c; c = c->block)
1377 for (d = c->ext.case_list; d; d = d->next)
1379 label = gfc_build_label_decl (NULL_TREE);
1380 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1381 build_int_cst (NULL_TREE, d->n),
1382 build_int_cst (NULL_TREE, d->n), label);
1383 gfc_add_expr_to_block (&body, tmp);
1386 tmp = gfc_trans_code (c->next);
1387 gfc_add_expr_to_block (&body, tmp);
1389 tmp = build1_v (GOTO_EXPR, end_label);
1390 gfc_add_expr_to_block (&body, tmp);
1393 /* Generate the structure describing the branches */
1394 init = NULL_TREE;
1396 for(d = cp; d; d = d->right)
1398 node = NULL_TREE;
1400 gfc_init_se (&se, NULL);
1402 if (d->low == NULL)
1404 node = tree_cons (ss_string1, null_pointer_node, node);
1405 node = tree_cons (ss_string1_len, integer_zero_node, node);
1407 else
1409 gfc_conv_expr_reference (&se, d->low);
1411 node = tree_cons (ss_string1, se.expr, node);
1412 node = tree_cons (ss_string1_len, se.string_length, node);
1415 if (d->high == NULL)
1417 node = tree_cons (ss_string2, null_pointer_node, node);
1418 node = tree_cons (ss_string2_len, integer_zero_node, node);
1420 else
1422 gfc_init_se (&se, NULL);
1423 gfc_conv_expr_reference (&se, d->high);
1425 node = tree_cons (ss_string2, se.expr, node);
1426 node = tree_cons (ss_string2_len, se.string_length, node);
1429 node = tree_cons (ss_target, build_int_cst (integer_type_node, d->n),
1430 node);
1432 tmp = build_constructor_from_list (select_struct, nreverse (node));
1433 init = tree_cons (NULL_TREE, tmp, init);
1436 type = build_array_type (select_struct, build_index_type
1437 (build_int_cst (NULL_TREE, n - 1)));
1439 init = build_constructor_from_list (type, nreverse(init));
1440 TREE_CONSTANT (init) = 1;
1441 TREE_STATIC (init) = 1;
1442 /* Create a static variable to hold the jump table. */
1443 tmp = gfc_create_var (type, "jumptable");
1444 TREE_CONSTANT (tmp) = 1;
1445 TREE_STATIC (tmp) = 1;
1446 TREE_READONLY (tmp) = 1;
1447 DECL_INITIAL (tmp) = init;
1448 init = tmp;
1450 /* Build the library call */
1451 init = gfc_build_addr_expr (pvoid_type_node, init);
1453 gfc_init_se (&se, NULL);
1454 gfc_conv_expr_reference (&se, code->expr);
1456 gfc_add_block_to_block (&block, &se.pre);
1458 tmp = build_call_expr (gfor_fndecl_select_string, 4, init,
1459 build_int_cst (NULL_TREE, n), se.expr,
1460 se.string_length);
1461 case_num = gfc_create_var (integer_type_node, "case_num");
1462 gfc_add_modify_expr (&block, case_num, tmp);
1464 gfc_add_block_to_block (&block, &se.post);
1466 tmp = gfc_finish_block (&body);
1467 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1468 gfc_add_expr_to_block (&block, tmp);
1470 tmp = build1_v (LABEL_EXPR, end_label);
1471 gfc_add_expr_to_block (&block, tmp);
1473 return gfc_finish_block (&block);
1477 /* Translate the three variants of the SELECT CASE construct.
1479 SELECT CASEs with INTEGER case expressions can be translated to an
1480 equivalent GENERIC switch statement, and for LOGICAL case
1481 expressions we build one or two if-else compares.
1483 SELECT CASEs with CHARACTER case expressions are a whole different
1484 story, because they don't exist in GENERIC. So we sort them and
1485 do a binary search at runtime.
1487 Fortran has no BREAK statement, and it does not allow jumps from
1488 one case block to another. That makes things a lot easier for
1489 the optimizers. */
1491 tree
1492 gfc_trans_select (gfc_code * code)
1494 gcc_assert (code && code->expr);
1496 /* Empty SELECT constructs are legal. */
1497 if (code->block == NULL)
1498 return build_empty_stmt ();
1500 /* Select the correct translation function. */
1501 switch (code->expr->ts.type)
1503 case BT_LOGICAL: return gfc_trans_logical_select (code);
1504 case BT_INTEGER: return gfc_trans_integer_select (code);
1505 case BT_CHARACTER: return gfc_trans_character_select (code);
1506 default:
1507 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1508 /* Not reached */
1513 /* Traversal function to substitute a replacement symtree if the symbol
1514 in the expression is the same as that passed. f == 2 signals that
1515 that variable itself is not to be checked - only the references.
1516 This group of functions is used when the variable expression in a
1517 FORALL assignment has internal references. For example:
1518 FORALL (i = 1:4) p(p(i)) = i
1519 The only recourse here is to store a copy of 'p' for the index
1520 expression. */
1522 static gfc_symtree *new_symtree;
1523 static gfc_symtree *old_symtree;
1525 static bool
1526 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1528 if (expr->expr_type != EXPR_VARIABLE)
1529 return false;
1531 if (*f == 2)
1532 *f = 1;
1533 else if (expr->symtree->n.sym == sym)
1534 expr->symtree = new_symtree;
1536 return false;
1539 static void
1540 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1542 gfc_traverse_expr (e, sym, forall_replace, f);
1545 static bool
1546 forall_restore (gfc_expr *expr,
1547 gfc_symbol *sym ATTRIBUTE_UNUSED,
1548 int *f ATTRIBUTE_UNUSED)
1550 if (expr->expr_type != EXPR_VARIABLE)
1551 return false;
1553 if (expr->symtree == new_symtree)
1554 expr->symtree = old_symtree;
1556 return false;
1559 static void
1560 forall_restore_symtree (gfc_expr *e)
1562 gfc_traverse_expr (e, NULL, forall_restore, 0);
1565 static void
1566 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1568 gfc_se tse;
1569 gfc_se rse;
1570 gfc_expr *e;
1571 gfc_symbol *new_sym;
1572 gfc_symbol *old_sym;
1573 gfc_symtree *root;
1574 tree tmp;
1576 /* Build a copy of the lvalue. */
1577 old_symtree = c->expr->symtree;
1578 old_sym = old_symtree->n.sym;
1579 e = gfc_lval_expr_from_sym (old_sym);
1580 if (old_sym->attr.dimension)
1582 gfc_init_se (&tse, NULL);
1583 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1584 gfc_add_block_to_block (pre, &tse.pre);
1585 gfc_add_block_to_block (post, &tse.post);
1586 tse.expr = build_fold_indirect_ref (tse.expr);
1588 if (e->ts.type != BT_CHARACTER)
1590 /* Use the variable offset for the temporary. */
1591 tmp = gfc_conv_descriptor_offset (tse.expr);
1592 gfc_add_modify_expr (pre, tmp,
1593 gfc_conv_array_offset (old_sym->backend_decl));
1596 else
1598 gfc_init_se (&tse, NULL);
1599 gfc_init_se (&rse, NULL);
1600 gfc_conv_expr (&rse, e);
1601 if (e->ts.type == BT_CHARACTER)
1603 tse.string_length = rse.string_length;
1604 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1605 tse.string_length);
1606 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1607 rse.string_length);
1608 gfc_add_block_to_block (pre, &tse.pre);
1609 gfc_add_block_to_block (post, &tse.post);
1611 else
1613 tmp = gfc_typenode_for_spec (&e->ts);
1614 tse.expr = gfc_create_var (tmp, "temp");
1617 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1618 e->expr_type == EXPR_VARIABLE);
1619 gfc_add_expr_to_block (pre, tmp);
1621 gfc_free_expr (e);
1623 /* Create a new symbol to represent the lvalue. */
1624 new_sym = gfc_new_symbol (old_sym->name, NULL);
1625 new_sym->ts = old_sym->ts;
1626 new_sym->attr.referenced = 1;
1627 new_sym->attr.dimension = old_sym->attr.dimension;
1628 new_sym->attr.flavor = old_sym->attr.flavor;
1630 /* Use the temporary as the backend_decl. */
1631 new_sym->backend_decl = tse.expr;
1633 /* Create a fake symtree for it. */
1634 root = NULL;
1635 new_symtree = gfc_new_symtree (&root, old_sym->name);
1636 new_symtree->n.sym = new_sym;
1637 gcc_assert (new_symtree == root);
1639 /* Go through the expression reference replacing the old_symtree
1640 with the new. */
1641 forall_replace_symtree (c->expr, old_sym, 2);
1643 /* Now we have made this temporary, we might as well use it for
1644 the right hand side. */
1645 forall_replace_symtree (c->expr2, old_sym, 1);
1649 /* Handles dependencies in forall assignments. */
1650 static int
1651 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1653 gfc_ref *lref;
1654 gfc_ref *rref;
1655 int need_temp;
1656 gfc_symbol *lsym;
1658 lsym = c->expr->symtree->n.sym;
1659 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1661 /* Now check for dependencies within the 'variable'
1662 expression itself. These are treated by making a complete
1663 copy of variable and changing all the references to it
1664 point to the copy instead. Note that the shallow copy of
1665 the variable will not suffice for derived types with
1666 pointer components. We therefore leave these to their
1667 own devices. */
1668 if (lsym->ts.type == BT_DERIVED
1669 && lsym->ts.derived->attr.pointer_comp)
1670 return need_temp;
1672 new_symtree = NULL;
1673 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1675 forall_make_variable_temp (c, pre, post);
1676 need_temp = 0;
1679 /* Substrings with dependencies are treated in the same
1680 way. */
1681 if (c->expr->ts.type == BT_CHARACTER
1682 && c->expr->ref
1683 && c->expr2->expr_type == EXPR_VARIABLE
1684 && lsym == c->expr2->symtree->n.sym)
1686 for (lref = c->expr->ref; lref; lref = lref->next)
1687 if (lref->type == REF_SUBSTRING)
1688 break;
1689 for (rref = c->expr2->ref; rref; rref = rref->next)
1690 if (rref->type == REF_SUBSTRING)
1691 break;
1693 if (rref && lref
1694 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1696 forall_make_variable_temp (c, pre, post);
1697 need_temp = 0;
1700 return need_temp;
1704 static void
1705 cleanup_forall_symtrees (gfc_code *c)
1707 forall_restore_symtree (c->expr);
1708 forall_restore_symtree (c->expr2);
1709 gfc_free (new_symtree->n.sym);
1710 gfc_free (new_symtree);
1714 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1715 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1716 indicates whether we should generate code to test the FORALLs mask
1717 array. OUTER is the loop header to be used for initializing mask
1718 indices.
1720 The generated loop format is:
1721 count = (end - start + step) / step
1722 loopvar = start
1723 while (1)
1725 if (count <=0 )
1726 goto end_of_loop
1727 <body>
1728 loopvar += step
1729 count --
1731 end_of_loop: */
1733 static tree
1734 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1735 int mask_flag, stmtblock_t *outer)
1737 int n, nvar;
1738 tree tmp;
1739 tree cond;
1740 stmtblock_t block;
1741 tree exit_label;
1742 tree count;
1743 tree var, start, end, step;
1744 iter_info *iter;
1746 /* Initialize the mask index outside the FORALL nest. */
1747 if (mask_flag && forall_tmp->mask)
1748 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1750 iter = forall_tmp->this_loop;
1751 nvar = forall_tmp->nvar;
1752 for (n = 0; n < nvar; n++)
1754 var = iter->var;
1755 start = iter->start;
1756 end = iter->end;
1757 step = iter->step;
1759 exit_label = gfc_build_label_decl (NULL_TREE);
1760 TREE_USED (exit_label) = 1;
1762 /* The loop counter. */
1763 count = gfc_create_var (TREE_TYPE (var), "count");
1765 /* The body of the loop. */
1766 gfc_init_block (&block);
1768 /* The exit condition. */
1769 cond = fold_build2 (LE_EXPR, boolean_type_node,
1770 count, build_int_cst (TREE_TYPE (count), 0));
1771 tmp = build1_v (GOTO_EXPR, exit_label);
1772 tmp = fold_build3 (COND_EXPR, void_type_node,
1773 cond, tmp, build_empty_stmt ());
1774 gfc_add_expr_to_block (&block, tmp);
1776 /* The main loop body. */
1777 gfc_add_expr_to_block (&block, body);
1779 /* Increment the loop variable. */
1780 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1781 gfc_add_modify_expr (&block, var, tmp);
1783 /* Advance to the next mask element. Only do this for the
1784 innermost loop. */
1785 if (n == 0 && mask_flag && forall_tmp->mask)
1787 tree maskindex = forall_tmp->maskindex;
1788 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1789 maskindex, gfc_index_one_node);
1790 gfc_add_modify_expr (&block, maskindex, tmp);
1793 /* Decrement the loop counter. */
1794 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1795 build_int_cst (TREE_TYPE (var), 1));
1796 gfc_add_modify_expr (&block, count, tmp);
1798 body = gfc_finish_block (&block);
1800 /* Loop var initialization. */
1801 gfc_init_block (&block);
1802 gfc_add_modify_expr (&block, var, start);
1805 /* Initialize the loop counter. */
1806 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1807 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1808 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1809 gfc_add_modify_expr (&block, count, tmp);
1811 /* The loop expression. */
1812 tmp = build1_v (LOOP_EXPR, body);
1813 gfc_add_expr_to_block (&block, tmp);
1815 /* The exit label. */
1816 tmp = build1_v (LABEL_EXPR, exit_label);
1817 gfc_add_expr_to_block (&block, tmp);
1819 body = gfc_finish_block (&block);
1820 iter = iter->next;
1822 return body;
1826 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1827 is nonzero, the body is controlled by all masks in the forall nest.
1828 Otherwise, the innermost loop is not controlled by it's mask. This
1829 is used for initializing that mask. */
1831 static tree
1832 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1833 int mask_flag)
1835 tree tmp;
1836 stmtblock_t header;
1837 forall_info *forall_tmp;
1838 tree mask, maskindex;
1840 gfc_start_block (&header);
1842 forall_tmp = nested_forall_info;
1843 while (forall_tmp != NULL)
1845 /* Generate body with masks' control. */
1846 if (mask_flag)
1848 mask = forall_tmp->mask;
1849 maskindex = forall_tmp->maskindex;
1851 /* If a mask was specified make the assignment conditional. */
1852 if (mask)
1854 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1855 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1858 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1859 forall_tmp = forall_tmp->prev_nest;
1860 mask_flag = 1;
1863 gfc_add_expr_to_block (&header, body);
1864 return gfc_finish_block (&header);
1868 /* Allocate data for holding a temporary array. Returns either a local
1869 temporary array or a pointer variable. */
1871 static tree
1872 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1873 tree elem_type)
1875 tree tmpvar;
1876 tree type;
1877 tree tmp;
1879 if (INTEGER_CST_P (size))
1881 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1882 gfc_index_one_node);
1884 else
1885 tmp = NULL_TREE;
1887 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1888 type = build_array_type (elem_type, type);
1889 if (gfc_can_put_var_on_stack (bytesize))
1891 gcc_assert (INTEGER_CST_P (size));
1892 tmpvar = gfc_create_var (type, "temp");
1893 *pdata = NULL_TREE;
1895 else
1897 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1898 *pdata = convert (pvoid_type_node, tmpvar);
1900 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1901 gfc_add_modify_expr (pblock, tmpvar, tmp);
1903 return tmpvar;
1907 /* Generate codes to copy the temporary to the actual lhs. */
1909 static tree
1910 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1911 tree count1, tree wheremask, bool invert)
1913 gfc_ss *lss;
1914 gfc_se lse, rse;
1915 stmtblock_t block, body;
1916 gfc_loopinfo loop1;
1917 tree tmp;
1918 tree wheremaskexpr;
1920 /* Walk the lhs. */
1921 lss = gfc_walk_expr (expr);
1923 if (lss == gfc_ss_terminator)
1925 gfc_start_block (&block);
1927 gfc_init_se (&lse, NULL);
1929 /* Translate the expression. */
1930 gfc_conv_expr (&lse, expr);
1932 /* Form the expression for the temporary. */
1933 tmp = gfc_build_array_ref (tmp1, count1, NULL);
1935 /* Use the scalar assignment as is. */
1936 gfc_add_block_to_block (&block, &lse.pre);
1937 gfc_add_modify_expr (&block, lse.expr, tmp);
1938 gfc_add_block_to_block (&block, &lse.post);
1940 /* Increment the count1. */
1941 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1942 gfc_index_one_node);
1943 gfc_add_modify_expr (&block, count1, tmp);
1945 tmp = gfc_finish_block (&block);
1947 else
1949 gfc_start_block (&block);
1951 gfc_init_loopinfo (&loop1);
1952 gfc_init_se (&rse, NULL);
1953 gfc_init_se (&lse, NULL);
1955 /* Associate the lss with the loop. */
1956 gfc_add_ss_to_loop (&loop1, lss);
1958 /* Calculate the bounds of the scalarization. */
1959 gfc_conv_ss_startstride (&loop1);
1960 /* Setup the scalarizing loops. */
1961 gfc_conv_loop_setup (&loop1);
1963 gfc_mark_ss_chain_used (lss, 1);
1965 /* Start the scalarized loop body. */
1966 gfc_start_scalarized_body (&loop1, &body);
1968 /* Setup the gfc_se structures. */
1969 gfc_copy_loopinfo_to_se (&lse, &loop1);
1970 lse.ss = lss;
1972 /* Form the expression of the temporary. */
1973 if (lss != gfc_ss_terminator)
1974 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
1975 /* Translate expr. */
1976 gfc_conv_expr (&lse, expr);
1978 /* Use the scalar assignment. */
1979 rse.string_length = lse.string_length;
1980 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1982 /* Form the mask expression according to the mask tree list. */
1983 if (wheremask)
1985 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
1986 if (invert)
1987 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1988 TREE_TYPE (wheremaskexpr),
1989 wheremaskexpr);
1990 tmp = fold_build3 (COND_EXPR, void_type_node,
1991 wheremaskexpr, tmp, build_empty_stmt ());
1994 gfc_add_expr_to_block (&body, tmp);
1996 /* Increment count1. */
1997 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1998 count1, gfc_index_one_node);
1999 gfc_add_modify_expr (&body, count1, tmp);
2001 /* Increment count3. */
2002 if (count3)
2004 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2005 count3, gfc_index_one_node);
2006 gfc_add_modify_expr (&body, count3, tmp);
2009 /* Generate the copying loops. */
2010 gfc_trans_scalarizing_loops (&loop1, &body);
2011 gfc_add_block_to_block (&block, &loop1.pre);
2012 gfc_add_block_to_block (&block, &loop1.post);
2013 gfc_cleanup_loop (&loop1);
2015 tmp = gfc_finish_block (&block);
2017 return tmp;
2021 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2022 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2023 and should not be freed. WHEREMASK is the conditional execution mask
2024 whose sense may be inverted by INVERT. */
2026 static tree
2027 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2028 tree count1, gfc_ss *lss, gfc_ss *rss,
2029 tree wheremask, bool invert)
2031 stmtblock_t block, body1;
2032 gfc_loopinfo loop;
2033 gfc_se lse;
2034 gfc_se rse;
2035 tree tmp;
2036 tree wheremaskexpr;
2038 gfc_start_block (&block);
2040 gfc_init_se (&rse, NULL);
2041 gfc_init_se (&lse, NULL);
2043 if (lss == gfc_ss_terminator)
2045 gfc_init_block (&body1);
2046 gfc_conv_expr (&rse, expr2);
2047 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2049 else
2051 /* Initialize the loop. */
2052 gfc_init_loopinfo (&loop);
2054 /* We may need LSS to determine the shape of the expression. */
2055 gfc_add_ss_to_loop (&loop, lss);
2056 gfc_add_ss_to_loop (&loop, rss);
2058 gfc_conv_ss_startstride (&loop);
2059 gfc_conv_loop_setup (&loop);
2061 gfc_mark_ss_chain_used (rss, 1);
2062 /* Start the loop body. */
2063 gfc_start_scalarized_body (&loop, &body1);
2065 /* Translate the expression. */
2066 gfc_copy_loopinfo_to_se (&rse, &loop);
2067 rse.ss = rss;
2068 gfc_conv_expr (&rse, expr2);
2070 /* Form the expression of the temporary. */
2071 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2074 /* Use the scalar assignment. */
2075 lse.string_length = rse.string_length;
2076 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2077 expr2->expr_type == EXPR_VARIABLE);
2079 /* Form the mask expression according to the mask tree list. */
2080 if (wheremask)
2082 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2083 if (invert)
2084 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2085 TREE_TYPE (wheremaskexpr),
2086 wheremaskexpr);
2087 tmp = fold_build3 (COND_EXPR, void_type_node,
2088 wheremaskexpr, tmp, build_empty_stmt ());
2091 gfc_add_expr_to_block (&body1, tmp);
2093 if (lss == gfc_ss_terminator)
2095 gfc_add_block_to_block (&block, &body1);
2097 /* Increment count1. */
2098 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2099 gfc_index_one_node);
2100 gfc_add_modify_expr (&block, count1, tmp);
2102 else
2104 /* Increment count1. */
2105 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2106 count1, gfc_index_one_node);
2107 gfc_add_modify_expr (&body1, count1, tmp);
2109 /* Increment count3. */
2110 if (count3)
2112 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2113 count3, gfc_index_one_node);
2114 gfc_add_modify_expr (&body1, count3, tmp);
2117 /* Generate the copying loops. */
2118 gfc_trans_scalarizing_loops (&loop, &body1);
2120 gfc_add_block_to_block (&block, &loop.pre);
2121 gfc_add_block_to_block (&block, &loop.post);
2123 gfc_cleanup_loop (&loop);
2124 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2125 as tree nodes in SS may not be valid in different scope. */
2128 tmp = gfc_finish_block (&block);
2129 return tmp;
2133 /* Calculate the size of temporary needed in the assignment inside forall.
2134 LSS and RSS are filled in this function. */
2136 static tree
2137 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2138 stmtblock_t * pblock,
2139 gfc_ss **lss, gfc_ss **rss)
2141 gfc_loopinfo loop;
2142 tree size;
2143 int i;
2144 int save_flag;
2145 tree tmp;
2147 *lss = gfc_walk_expr (expr1);
2148 *rss = NULL;
2150 size = gfc_index_one_node;
2151 if (*lss != gfc_ss_terminator)
2153 gfc_init_loopinfo (&loop);
2155 /* Walk the RHS of the expression. */
2156 *rss = gfc_walk_expr (expr2);
2157 if (*rss == gfc_ss_terminator)
2159 /* The rhs is scalar. Add a ss for the expression. */
2160 *rss = gfc_get_ss ();
2161 (*rss)->next = gfc_ss_terminator;
2162 (*rss)->type = GFC_SS_SCALAR;
2163 (*rss)->expr = expr2;
2166 /* Associate the SS with the loop. */
2167 gfc_add_ss_to_loop (&loop, *lss);
2168 /* We don't actually need to add the rhs at this point, but it might
2169 make guessing the loop bounds a bit easier. */
2170 gfc_add_ss_to_loop (&loop, *rss);
2172 /* We only want the shape of the expression, not rest of the junk
2173 generated by the scalarizer. */
2174 loop.array_parameter = 1;
2176 /* Calculate the bounds of the scalarization. */
2177 save_flag = flag_bounds_check;
2178 flag_bounds_check = 0;
2179 gfc_conv_ss_startstride (&loop);
2180 flag_bounds_check = save_flag;
2181 gfc_conv_loop_setup (&loop);
2183 /* Figure out how many elements we need. */
2184 for (i = 0; i < loop.dimen; i++)
2186 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2187 gfc_index_one_node, loop.from[i]);
2188 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2189 tmp, loop.to[i]);
2190 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2192 gfc_add_block_to_block (pblock, &loop.pre);
2193 size = gfc_evaluate_now (size, pblock);
2194 gfc_add_block_to_block (pblock, &loop.post);
2196 /* TODO: write a function that cleans up a loopinfo without freeing
2197 the SS chains. Currently a NOP. */
2200 return size;
2204 /* Calculate the overall iterator number of the nested forall construct.
2205 This routine actually calculates the number of times the body of the
2206 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2207 that by the expression INNER_SIZE. The BLOCK argument specifies the
2208 block in which to calculate the result, and the optional INNER_SIZE_BODY
2209 argument contains any statements that need to executed (inside the loop)
2210 to initialize or calculate INNER_SIZE. */
2212 static tree
2213 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2214 stmtblock_t *inner_size_body, stmtblock_t *block)
2216 forall_info *forall_tmp = nested_forall_info;
2217 tree tmp, number;
2218 stmtblock_t body;
2220 /* We can eliminate the innermost unconditional loops with constant
2221 array bounds. */
2222 if (INTEGER_CST_P (inner_size))
2224 while (forall_tmp
2225 && !forall_tmp->mask
2226 && INTEGER_CST_P (forall_tmp->size))
2228 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2229 inner_size, forall_tmp->size);
2230 forall_tmp = forall_tmp->prev_nest;
2233 /* If there are no loops left, we have our constant result. */
2234 if (!forall_tmp)
2235 return inner_size;
2238 /* Otherwise, create a temporary variable to compute the result. */
2239 number = gfc_create_var (gfc_array_index_type, "num");
2240 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2242 gfc_start_block (&body);
2243 if (inner_size_body)
2244 gfc_add_block_to_block (&body, inner_size_body);
2245 if (forall_tmp)
2246 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2247 number, inner_size);
2248 else
2249 tmp = inner_size;
2250 gfc_add_modify_expr (&body, number, tmp);
2251 tmp = gfc_finish_block (&body);
2253 /* Generate loops. */
2254 if (forall_tmp != NULL)
2255 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2257 gfc_add_expr_to_block (block, tmp);
2259 return number;
2263 /* Allocate temporary for forall construct. SIZE is the size of temporary
2264 needed. PTEMP1 is returned for space free. */
2266 static tree
2267 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2268 tree * ptemp1)
2270 tree bytesize;
2271 tree unit;
2272 tree tmp;
2274 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2275 if (!integer_onep (unit))
2276 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2277 else
2278 bytesize = size;
2280 *ptemp1 = NULL;
2281 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2283 if (*ptemp1)
2284 tmp = build_fold_indirect_ref (tmp);
2285 return tmp;
2289 /* Allocate temporary for forall construct according to the information in
2290 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2291 assignment inside forall. PTEMP1 is returned for space free. */
2293 static tree
2294 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2295 tree inner_size, stmtblock_t * inner_size_body,
2296 stmtblock_t * block, tree * ptemp1)
2298 tree size;
2300 /* Calculate the total size of temporary needed in forall construct. */
2301 size = compute_overall_iter_number (nested_forall_info, inner_size,
2302 inner_size_body, block);
2304 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2308 /* Handle assignments inside forall which need temporary.
2310 forall (i=start:end:stride; maskexpr)
2311 e<i> = f<i>
2312 end forall
2313 (where e,f<i> are arbitrary expressions possibly involving i
2314 and there is a dependency between e<i> and f<i>)
2315 Translates to:
2316 masktmp(:) = maskexpr(:)
2318 maskindex = 0;
2319 count1 = 0;
2320 num = 0;
2321 for (i = start; i <= end; i += stride)
2322 num += SIZE (f<i>)
2323 count1 = 0;
2324 ALLOCATE (tmp(num))
2325 for (i = start; i <= end; i += stride)
2327 if (masktmp[maskindex++])
2328 tmp[count1++] = f<i>
2330 maskindex = 0;
2331 count1 = 0;
2332 for (i = start; i <= end; i += stride)
2334 if (masktmp[maskindex++])
2335 e<i> = tmp[count1++]
2337 DEALLOCATE (tmp)
2339 static void
2340 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2341 tree wheremask, bool invert,
2342 forall_info * nested_forall_info,
2343 stmtblock_t * block)
2345 tree type;
2346 tree inner_size;
2347 gfc_ss *lss, *rss;
2348 tree count, count1;
2349 tree tmp, tmp1;
2350 tree ptemp1;
2351 stmtblock_t inner_size_body;
2353 /* Create vars. count1 is the current iterator number of the nested
2354 forall. */
2355 count1 = gfc_create_var (gfc_array_index_type, "count1");
2357 /* Count is the wheremask index. */
2358 if (wheremask)
2360 count = gfc_create_var (gfc_array_index_type, "count");
2361 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2363 else
2364 count = NULL;
2366 /* Initialize count1. */
2367 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2369 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2370 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2371 gfc_init_block (&inner_size_body);
2372 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2373 &lss, &rss);
2375 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2376 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2378 if (!expr1->ts.cl->backend_decl)
2380 gfc_se tse;
2381 gfc_init_se (&tse, NULL);
2382 gfc_conv_expr (&tse, expr1->ts.cl->length);
2383 expr1->ts.cl->backend_decl = tse.expr;
2385 type = gfc_get_character_type_len (gfc_default_character_kind,
2386 expr1->ts.cl->backend_decl);
2388 else
2389 type = gfc_typenode_for_spec (&expr1->ts);
2391 /* Allocate temporary for nested forall construct according to the
2392 information in nested_forall_info and inner_size. */
2393 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2394 &inner_size_body, block, &ptemp1);
2396 /* Generate codes to copy rhs to the temporary . */
2397 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2398 wheremask, invert);
2400 /* Generate body and loops according to the information in
2401 nested_forall_info. */
2402 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2403 gfc_add_expr_to_block (block, tmp);
2405 /* Reset count1. */
2406 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2408 /* Reset count. */
2409 if (wheremask)
2410 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2412 /* Generate codes to copy the temporary to lhs. */
2413 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2414 wheremask, invert);
2416 /* Generate body and loops according to the information in
2417 nested_forall_info. */
2418 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2419 gfc_add_expr_to_block (block, tmp);
2421 if (ptemp1)
2423 /* Free the temporary. */
2424 tmp = gfc_call_free (ptemp1);
2425 gfc_add_expr_to_block (block, tmp);
2430 /* Translate pointer assignment inside FORALL which need temporary. */
2432 static void
2433 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2434 forall_info * nested_forall_info,
2435 stmtblock_t * block)
2437 tree type;
2438 tree inner_size;
2439 gfc_ss *lss, *rss;
2440 gfc_se lse;
2441 gfc_se rse;
2442 gfc_ss_info *info;
2443 gfc_loopinfo loop;
2444 tree desc;
2445 tree parm;
2446 tree parmtype;
2447 stmtblock_t body;
2448 tree count;
2449 tree tmp, tmp1, ptemp1;
2451 count = gfc_create_var (gfc_array_index_type, "count");
2452 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2454 inner_size = integer_one_node;
2455 lss = gfc_walk_expr (expr1);
2456 rss = gfc_walk_expr (expr2);
2457 if (lss == gfc_ss_terminator)
2459 type = gfc_typenode_for_spec (&expr1->ts);
2460 type = build_pointer_type (type);
2462 /* Allocate temporary for nested forall construct according to the
2463 information in nested_forall_info and inner_size. */
2464 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2465 inner_size, NULL, block, &ptemp1);
2466 gfc_start_block (&body);
2467 gfc_init_se (&lse, NULL);
2468 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2469 gfc_init_se (&rse, NULL);
2470 rse.want_pointer = 1;
2471 gfc_conv_expr (&rse, expr2);
2472 gfc_add_block_to_block (&body, &rse.pre);
2473 gfc_add_modify_expr (&body, lse.expr,
2474 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2475 gfc_add_block_to_block (&body, &rse.post);
2477 /* Increment count. */
2478 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2479 count, gfc_index_one_node);
2480 gfc_add_modify_expr (&body, count, tmp);
2482 tmp = gfc_finish_block (&body);
2484 /* Generate body and loops according to the information in
2485 nested_forall_info. */
2486 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2487 gfc_add_expr_to_block (block, tmp);
2489 /* Reset count. */
2490 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2492 gfc_start_block (&body);
2493 gfc_init_se (&lse, NULL);
2494 gfc_init_se (&rse, NULL);
2495 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2496 lse.want_pointer = 1;
2497 gfc_conv_expr (&lse, expr1);
2498 gfc_add_block_to_block (&body, &lse.pre);
2499 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2500 gfc_add_block_to_block (&body, &lse.post);
2501 /* Increment count. */
2502 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2503 count, gfc_index_one_node);
2504 gfc_add_modify_expr (&body, count, tmp);
2505 tmp = gfc_finish_block (&body);
2507 /* Generate body and loops according to the information in
2508 nested_forall_info. */
2509 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2510 gfc_add_expr_to_block (block, tmp);
2512 else
2514 gfc_init_loopinfo (&loop);
2516 /* Associate the SS with the loop. */
2517 gfc_add_ss_to_loop (&loop, rss);
2519 /* Setup the scalarizing loops and bounds. */
2520 gfc_conv_ss_startstride (&loop);
2522 gfc_conv_loop_setup (&loop);
2524 info = &rss->data.info;
2525 desc = info->descriptor;
2527 /* Make a new descriptor. */
2528 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2529 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2530 loop.from, loop.to, 1,
2531 GFC_ARRAY_UNKNOWN);
2533 /* Allocate temporary for nested forall construct. */
2534 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2535 inner_size, NULL, block, &ptemp1);
2536 gfc_start_block (&body);
2537 gfc_init_se (&lse, NULL);
2538 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2539 lse.direct_byref = 1;
2540 rss = gfc_walk_expr (expr2);
2541 gfc_conv_expr_descriptor (&lse, expr2, rss);
2543 gfc_add_block_to_block (&body, &lse.pre);
2544 gfc_add_block_to_block (&body, &lse.post);
2546 /* Increment count. */
2547 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2548 count, gfc_index_one_node);
2549 gfc_add_modify_expr (&body, count, tmp);
2551 tmp = gfc_finish_block (&body);
2553 /* Generate body and loops according to the information in
2554 nested_forall_info. */
2555 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2556 gfc_add_expr_to_block (block, tmp);
2558 /* Reset count. */
2559 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2561 parm = gfc_build_array_ref (tmp1, count, NULL);
2562 lss = gfc_walk_expr (expr1);
2563 gfc_init_se (&lse, NULL);
2564 gfc_conv_expr_descriptor (&lse, expr1, lss);
2565 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2566 gfc_start_block (&body);
2567 gfc_add_block_to_block (&body, &lse.pre);
2568 gfc_add_block_to_block (&body, &lse.post);
2570 /* Increment count. */
2571 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2572 count, gfc_index_one_node);
2573 gfc_add_modify_expr (&body, count, tmp);
2575 tmp = gfc_finish_block (&body);
2577 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2578 gfc_add_expr_to_block (block, tmp);
2580 /* Free the temporary. */
2581 if (ptemp1)
2583 tmp = gfc_call_free (ptemp1);
2584 gfc_add_expr_to_block (block, tmp);
2589 /* FORALL and WHERE statements are really nasty, especially when you nest
2590 them. All the rhs of a forall assignment must be evaluated before the
2591 actual assignments are performed. Presumably this also applies to all the
2592 assignments in an inner where statement. */
2594 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2595 linear array, relying on the fact that we process in the same order in all
2596 loops.
2598 forall (i=start:end:stride; maskexpr)
2599 e<i> = f<i>
2600 g<i> = h<i>
2601 end forall
2602 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2603 Translates to:
2604 count = ((end + 1 - start) / stride)
2605 masktmp(:) = maskexpr(:)
2607 maskindex = 0;
2608 for (i = start; i <= end; i += stride)
2610 if (masktmp[maskindex++])
2611 e<i> = f<i>
2613 maskindex = 0;
2614 for (i = start; i <= end; i += stride)
2616 if (masktmp[maskindex++])
2617 g<i> = h<i>
2620 Note that this code only works when there are no dependencies.
2621 Forall loop with array assignments and data dependencies are a real pain,
2622 because the size of the temporary cannot always be determined before the
2623 loop is executed. This problem is compounded by the presence of nested
2624 FORALL constructs.
2627 static tree
2628 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2630 stmtblock_t pre;
2631 stmtblock_t post;
2632 stmtblock_t block;
2633 stmtblock_t body;
2634 tree *var;
2635 tree *start;
2636 tree *end;
2637 tree *step;
2638 gfc_expr **varexpr;
2639 tree tmp;
2640 tree assign;
2641 tree size;
2642 tree maskindex;
2643 tree mask;
2644 tree pmask;
2645 int n;
2646 int nvar;
2647 int need_temp;
2648 gfc_forall_iterator *fa;
2649 gfc_se se;
2650 gfc_code *c;
2651 gfc_saved_var *saved_vars;
2652 iter_info *this_forall;
2653 forall_info *info;
2654 bool need_mask;
2656 /* Do nothing if the mask is false. */
2657 if (code->expr
2658 && code->expr->expr_type == EXPR_CONSTANT
2659 && !code->expr->value.logical)
2660 return build_empty_stmt ();
2662 n = 0;
2663 /* Count the FORALL index number. */
2664 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2665 n++;
2666 nvar = n;
2668 /* Allocate the space for var, start, end, step, varexpr. */
2669 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2670 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2671 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2672 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2673 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2674 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2676 /* Allocate the space for info. */
2677 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2679 gfc_start_block (&pre);
2680 gfc_init_block (&post);
2681 gfc_init_block (&block);
2683 n = 0;
2684 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2686 gfc_symbol *sym = fa->var->symtree->n.sym;
2688 /* Allocate space for this_forall. */
2689 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2691 /* Create a temporary variable for the FORALL index. */
2692 tmp = gfc_typenode_for_spec (&sym->ts);
2693 var[n] = gfc_create_var (tmp, sym->name);
2694 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2696 /* Record it in this_forall. */
2697 this_forall->var = var[n];
2699 /* Replace the index symbol's backend_decl with the temporary decl. */
2700 sym->backend_decl = var[n];
2702 /* Work out the start, end and stride for the loop. */
2703 gfc_init_se (&se, NULL);
2704 gfc_conv_expr_val (&se, fa->start);
2705 /* Record it in this_forall. */
2706 this_forall->start = se.expr;
2707 gfc_add_block_to_block (&block, &se.pre);
2708 start[n] = se.expr;
2710 gfc_init_se (&se, NULL);
2711 gfc_conv_expr_val (&se, fa->end);
2712 /* Record it in this_forall. */
2713 this_forall->end = se.expr;
2714 gfc_make_safe_expr (&se);
2715 gfc_add_block_to_block (&block, &se.pre);
2716 end[n] = se.expr;
2718 gfc_init_se (&se, NULL);
2719 gfc_conv_expr_val (&se, fa->stride);
2720 /* Record it in this_forall. */
2721 this_forall->step = se.expr;
2722 gfc_make_safe_expr (&se);
2723 gfc_add_block_to_block (&block, &se.pre);
2724 step[n] = se.expr;
2726 /* Set the NEXT field of this_forall to NULL. */
2727 this_forall->next = NULL;
2728 /* Link this_forall to the info construct. */
2729 if (info->this_loop)
2731 iter_info *iter_tmp = info->this_loop;
2732 while (iter_tmp->next != NULL)
2733 iter_tmp = iter_tmp->next;
2734 iter_tmp->next = this_forall;
2736 else
2737 info->this_loop = this_forall;
2739 n++;
2741 nvar = n;
2743 /* Calculate the size needed for the current forall level. */
2744 size = gfc_index_one_node;
2745 for (n = 0; n < nvar; n++)
2747 /* size = (end + step - start) / step. */
2748 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2749 step[n], start[n]);
2750 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2752 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2753 tmp = convert (gfc_array_index_type, tmp);
2755 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2758 /* Record the nvar and size of current forall level. */
2759 info->nvar = nvar;
2760 info->size = size;
2762 if (code->expr)
2764 /* If the mask is .true., consider the FORALL unconditional. */
2765 if (code->expr->expr_type == EXPR_CONSTANT
2766 && code->expr->value.logical)
2767 need_mask = false;
2768 else
2769 need_mask = true;
2771 else
2772 need_mask = false;
2774 /* First we need to allocate the mask. */
2775 if (need_mask)
2777 /* As the mask array can be very big, prefer compact boolean types. */
2778 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2779 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2780 size, NULL, &block, &pmask);
2781 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2783 /* Record them in the info structure. */
2784 info->maskindex = maskindex;
2785 info->mask = mask;
2787 else
2789 /* No mask was specified. */
2790 maskindex = NULL_TREE;
2791 mask = pmask = NULL_TREE;
2794 /* Link the current forall level to nested_forall_info. */
2795 info->prev_nest = nested_forall_info;
2796 nested_forall_info = info;
2798 /* Copy the mask into a temporary variable if required.
2799 For now we assume a mask temporary is needed. */
2800 if (need_mask)
2802 /* As the mask array can be very big, prefer compact boolean types. */
2803 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2805 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2807 /* Start of mask assignment loop body. */
2808 gfc_start_block (&body);
2810 /* Evaluate the mask expression. */
2811 gfc_init_se (&se, NULL);
2812 gfc_conv_expr_val (&se, code->expr);
2813 gfc_add_block_to_block (&body, &se.pre);
2815 /* Store the mask. */
2816 se.expr = convert (mask_type, se.expr);
2818 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2819 gfc_add_modify_expr (&body, tmp, se.expr);
2821 /* Advance to the next mask element. */
2822 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2823 maskindex, gfc_index_one_node);
2824 gfc_add_modify_expr (&body, maskindex, tmp);
2826 /* Generate the loops. */
2827 tmp = gfc_finish_block (&body);
2828 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2829 gfc_add_expr_to_block (&block, tmp);
2832 c = code->block->next;
2834 /* TODO: loop merging in FORALL statements. */
2835 /* Now that we've got a copy of the mask, generate the assignment loops. */
2836 while (c)
2838 switch (c->op)
2840 case EXEC_ASSIGN:
2841 /* A scalar or array assignment. DO the simple check for
2842 lhs to rhs dependencies. These make a temporary for the
2843 rhs and form a second forall block to copy to variable. */
2844 need_temp = check_forall_dependencies(c, &pre, &post);
2846 /* Temporaries due to array assignment data dependencies introduce
2847 no end of problems. */
2848 if (need_temp)
2849 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2850 nested_forall_info, &block);
2851 else
2853 /* Use the normal assignment copying routines. */
2854 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2856 /* Generate body and loops. */
2857 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2858 assign, 1);
2859 gfc_add_expr_to_block (&block, tmp);
2862 /* Cleanup any temporary symtrees that have been made to deal
2863 with dependencies. */
2864 if (new_symtree)
2865 cleanup_forall_symtrees (c);
2867 break;
2869 case EXEC_WHERE:
2870 /* Translate WHERE or WHERE construct nested in FORALL. */
2871 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2872 break;
2874 /* Pointer assignment inside FORALL. */
2875 case EXEC_POINTER_ASSIGN:
2876 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2877 if (need_temp)
2878 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2879 nested_forall_info, &block);
2880 else
2882 /* Use the normal assignment copying routines. */
2883 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2885 /* Generate body and loops. */
2886 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2887 assign, 1);
2888 gfc_add_expr_to_block (&block, tmp);
2890 break;
2892 case EXEC_FORALL:
2893 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2894 gfc_add_expr_to_block (&block, tmp);
2895 break;
2897 /* Explicit subroutine calls are prevented by the frontend but interface
2898 assignments can legitimately produce them. */
2899 case EXEC_ASSIGN_CALL:
2900 assign = gfc_trans_call (c, true);
2901 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2902 gfc_add_expr_to_block (&block, tmp);
2903 break;
2905 default:
2906 gcc_unreachable ();
2909 c = c->next;
2912 /* Restore the original index variables. */
2913 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2914 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2916 /* Free the space for var, start, end, step, varexpr. */
2917 gfc_free (var);
2918 gfc_free (start);
2919 gfc_free (end);
2920 gfc_free (step);
2921 gfc_free (varexpr);
2922 gfc_free (saved_vars);
2924 /* Free the space for this forall_info. */
2925 gfc_free (info);
2927 if (pmask)
2929 /* Free the temporary for the mask. */
2930 tmp = gfc_call_free (pmask);
2931 gfc_add_expr_to_block (&block, tmp);
2933 if (maskindex)
2934 pushdecl (maskindex);
2936 gfc_add_block_to_block (&pre, &block);
2937 gfc_add_block_to_block (&pre, &post);
2939 return gfc_finish_block (&pre);
2943 /* Translate the FORALL statement or construct. */
2945 tree gfc_trans_forall (gfc_code * code)
2947 return gfc_trans_forall_1 (code, NULL);
2951 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2952 If the WHERE construct is nested in FORALL, compute the overall temporary
2953 needed by the WHERE mask expression multiplied by the iterator number of
2954 the nested forall.
2955 ME is the WHERE mask expression.
2956 MASK is the current execution mask upon input, whose sense may or may
2957 not be inverted as specified by the INVERT argument.
2958 CMASK is the updated execution mask on output, or NULL if not required.
2959 PMASK is the pending execution mask on output, or NULL if not required.
2960 BLOCK is the block in which to place the condition evaluation loops. */
2962 static void
2963 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2964 tree mask, bool invert, tree cmask, tree pmask,
2965 tree mask_type, stmtblock_t * block)
2967 tree tmp, tmp1;
2968 gfc_ss *lss, *rss;
2969 gfc_loopinfo loop;
2970 stmtblock_t body, body1;
2971 tree count, cond, mtmp;
2972 gfc_se lse, rse;
2974 gfc_init_loopinfo (&loop);
2976 lss = gfc_walk_expr (me);
2977 rss = gfc_walk_expr (me);
2979 /* Variable to index the temporary. */
2980 count = gfc_create_var (gfc_array_index_type, "count");
2981 /* Initialize count. */
2982 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2984 gfc_start_block (&body);
2986 gfc_init_se (&rse, NULL);
2987 gfc_init_se (&lse, NULL);
2989 if (lss == gfc_ss_terminator)
2991 gfc_init_block (&body1);
2993 else
2995 /* Initialize the loop. */
2996 gfc_init_loopinfo (&loop);
2998 /* We may need LSS to determine the shape of the expression. */
2999 gfc_add_ss_to_loop (&loop, lss);
3000 gfc_add_ss_to_loop (&loop, rss);
3002 gfc_conv_ss_startstride (&loop);
3003 gfc_conv_loop_setup (&loop);
3005 gfc_mark_ss_chain_used (rss, 1);
3006 /* Start the loop body. */
3007 gfc_start_scalarized_body (&loop, &body1);
3009 /* Translate the expression. */
3010 gfc_copy_loopinfo_to_se (&rse, &loop);
3011 rse.ss = rss;
3012 gfc_conv_expr (&rse, me);
3015 /* Variable to evaluate mask condition. */
3016 cond = gfc_create_var (mask_type, "cond");
3017 if (mask && (cmask || pmask))
3018 mtmp = gfc_create_var (mask_type, "mask");
3019 else mtmp = NULL_TREE;
3021 gfc_add_block_to_block (&body1, &lse.pre);
3022 gfc_add_block_to_block (&body1, &rse.pre);
3024 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
3026 if (mask && (cmask || pmask))
3028 tmp = gfc_build_array_ref (mask, count, NULL);
3029 if (invert)
3030 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3031 gfc_add_modify_expr (&body1, mtmp, tmp);
3034 if (cmask)
3036 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3037 tmp = cond;
3038 if (mask)
3039 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3040 gfc_add_modify_expr (&body1, tmp1, tmp);
3043 if (pmask)
3045 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3046 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3047 if (mask)
3048 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3049 gfc_add_modify_expr (&body1, tmp1, tmp);
3052 gfc_add_block_to_block (&body1, &lse.post);
3053 gfc_add_block_to_block (&body1, &rse.post);
3055 if (lss == gfc_ss_terminator)
3057 gfc_add_block_to_block (&body, &body1);
3059 else
3061 /* Increment count. */
3062 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3063 gfc_index_one_node);
3064 gfc_add_modify_expr (&body1, count, tmp1);
3066 /* Generate the copying loops. */
3067 gfc_trans_scalarizing_loops (&loop, &body1);
3069 gfc_add_block_to_block (&body, &loop.pre);
3070 gfc_add_block_to_block (&body, &loop.post);
3072 gfc_cleanup_loop (&loop);
3073 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3074 as tree nodes in SS may not be valid in different scope. */
3077 tmp1 = gfc_finish_block (&body);
3078 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3079 if (nested_forall_info != NULL)
3080 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3082 gfc_add_expr_to_block (block, tmp1);
3086 /* Translate an assignment statement in a WHERE statement or construct
3087 statement. The MASK expression is used to control which elements
3088 of EXPR1 shall be assigned. The sense of MASK is specified by
3089 INVERT. */
3091 static tree
3092 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3093 tree mask, bool invert,
3094 tree count1, tree count2,
3095 gfc_symbol *sym)
3097 gfc_se lse;
3098 gfc_se rse;
3099 gfc_ss *lss;
3100 gfc_ss *lss_section;
3101 gfc_ss *rss;
3103 gfc_loopinfo loop;
3104 tree tmp;
3105 stmtblock_t block;
3106 stmtblock_t body;
3107 tree index, maskexpr;
3109 #if 0
3110 /* TODO: handle this special case.
3111 Special case a single function returning an array. */
3112 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3114 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3115 if (tmp)
3116 return tmp;
3118 #endif
3120 /* Assignment of the form lhs = rhs. */
3121 gfc_start_block (&block);
3123 gfc_init_se (&lse, NULL);
3124 gfc_init_se (&rse, NULL);
3126 /* Walk the lhs. */
3127 lss = gfc_walk_expr (expr1);
3128 rss = NULL;
3130 /* In each where-assign-stmt, the mask-expr and the variable being
3131 defined shall be arrays of the same shape. */
3132 gcc_assert (lss != gfc_ss_terminator);
3134 /* The assignment needs scalarization. */
3135 lss_section = lss;
3137 /* Find a non-scalar SS from the lhs. */
3138 while (lss_section != gfc_ss_terminator
3139 && lss_section->type != GFC_SS_SECTION)
3140 lss_section = lss_section->next;
3142 gcc_assert (lss_section != gfc_ss_terminator);
3144 /* Initialize the scalarizer. */
3145 gfc_init_loopinfo (&loop);
3147 /* Walk the rhs. */
3148 rss = gfc_walk_expr (expr2);
3149 if (rss == gfc_ss_terminator)
3151 /* The rhs is scalar. Add a ss for the expression. */
3152 rss = gfc_get_ss ();
3153 rss->where = 1;
3154 rss->next = gfc_ss_terminator;
3155 rss->type = GFC_SS_SCALAR;
3156 rss->expr = expr2;
3159 /* Associate the SS with the loop. */
3160 gfc_add_ss_to_loop (&loop, lss);
3161 gfc_add_ss_to_loop (&loop, rss);
3163 /* Calculate the bounds of the scalarization. */
3164 gfc_conv_ss_startstride (&loop);
3166 /* Resolve any data dependencies in the statement. */
3167 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3169 /* Setup the scalarizing loops. */
3170 gfc_conv_loop_setup (&loop);
3172 /* Setup the gfc_se structures. */
3173 gfc_copy_loopinfo_to_se (&lse, &loop);
3174 gfc_copy_loopinfo_to_se (&rse, &loop);
3176 rse.ss = rss;
3177 gfc_mark_ss_chain_used (rss, 1);
3178 if (loop.temp_ss == NULL)
3180 lse.ss = lss;
3181 gfc_mark_ss_chain_used (lss, 1);
3183 else
3185 lse.ss = loop.temp_ss;
3186 gfc_mark_ss_chain_used (lss, 3);
3187 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3190 /* Start the scalarized loop body. */
3191 gfc_start_scalarized_body (&loop, &body);
3193 /* Translate the expression. */
3194 gfc_conv_expr (&rse, expr2);
3195 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3197 gfc_conv_tmp_array_ref (&lse);
3198 gfc_advance_se_ss_chain (&lse);
3200 else
3201 gfc_conv_expr (&lse, expr1);
3203 /* Form the mask expression according to the mask. */
3204 index = count1;
3205 maskexpr = gfc_build_array_ref (mask, index, NULL);
3206 if (invert)
3207 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3209 /* Use the scalar assignment as is. */
3210 if (sym == NULL)
3211 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3212 loop.temp_ss != NULL, false);
3213 else
3214 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3216 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3218 gfc_add_expr_to_block (&body, tmp);
3220 if (lss == gfc_ss_terminator)
3222 /* Increment count1. */
3223 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3224 count1, gfc_index_one_node);
3225 gfc_add_modify_expr (&body, count1, tmp);
3227 /* Use the scalar assignment as is. */
3228 gfc_add_block_to_block (&block, &body);
3230 else
3232 gcc_assert (lse.ss == gfc_ss_terminator
3233 && rse.ss == gfc_ss_terminator);
3235 if (loop.temp_ss != NULL)
3237 /* Increment count1 before finish the main body of a scalarized
3238 expression. */
3239 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3240 count1, gfc_index_one_node);
3241 gfc_add_modify_expr (&body, count1, tmp);
3242 gfc_trans_scalarized_loop_boundary (&loop, &body);
3244 /* We need to copy the temporary to the actual lhs. */
3245 gfc_init_se (&lse, NULL);
3246 gfc_init_se (&rse, NULL);
3247 gfc_copy_loopinfo_to_se (&lse, &loop);
3248 gfc_copy_loopinfo_to_se (&rse, &loop);
3250 rse.ss = loop.temp_ss;
3251 lse.ss = lss;
3253 gfc_conv_tmp_array_ref (&rse);
3254 gfc_advance_se_ss_chain (&rse);
3255 gfc_conv_expr (&lse, expr1);
3257 gcc_assert (lse.ss == gfc_ss_terminator
3258 && rse.ss == gfc_ss_terminator);
3260 /* Form the mask expression according to the mask tree list. */
3261 index = count2;
3262 maskexpr = gfc_build_array_ref (mask, index, NULL);
3263 if (invert)
3264 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3265 maskexpr);
3267 /* Use the scalar assignment as is. */
3268 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3269 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3270 gfc_add_expr_to_block (&body, tmp);
3272 /* Increment count2. */
3273 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3274 count2, gfc_index_one_node);
3275 gfc_add_modify_expr (&body, count2, tmp);
3277 else
3279 /* Increment count1. */
3280 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3281 count1, gfc_index_one_node);
3282 gfc_add_modify_expr (&body, count1, tmp);
3285 /* Generate the copying loops. */
3286 gfc_trans_scalarizing_loops (&loop, &body);
3288 /* Wrap the whole thing up. */
3289 gfc_add_block_to_block (&block, &loop.pre);
3290 gfc_add_block_to_block (&block, &loop.post);
3291 gfc_cleanup_loop (&loop);
3294 return gfc_finish_block (&block);
3298 /* Translate the WHERE construct or statement.
3299 This function can be called iteratively to translate the nested WHERE
3300 construct or statement.
3301 MASK is the control mask. */
3303 static void
3304 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3305 forall_info * nested_forall_info, stmtblock_t * block)
3307 stmtblock_t inner_size_body;
3308 tree inner_size, size;
3309 gfc_ss *lss, *rss;
3310 tree mask_type;
3311 gfc_expr *expr1;
3312 gfc_expr *expr2;
3313 gfc_code *cblock;
3314 gfc_code *cnext;
3315 tree tmp;
3316 tree cond;
3317 tree count1, count2;
3318 bool need_cmask;
3319 bool need_pmask;
3320 int need_temp;
3321 tree pcmask = NULL_TREE;
3322 tree ppmask = NULL_TREE;
3323 tree cmask = NULL_TREE;
3324 tree pmask = NULL_TREE;
3325 gfc_actual_arglist *arg;
3327 /* the WHERE statement or the WHERE construct statement. */
3328 cblock = code->block;
3330 /* As the mask array can be very big, prefer compact boolean types. */
3331 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3333 /* Determine which temporary masks are needed. */
3334 if (!cblock->block)
3336 /* One clause: No ELSEWHEREs. */
3337 need_cmask = (cblock->next != 0);
3338 need_pmask = false;
3340 else if (cblock->block->block)
3342 /* Three or more clauses: Conditional ELSEWHEREs. */
3343 need_cmask = true;
3344 need_pmask = true;
3346 else if (cblock->next)
3348 /* Two clauses, the first non-empty. */
3349 need_cmask = true;
3350 need_pmask = (mask != NULL_TREE
3351 && cblock->block->next != 0);
3353 else if (!cblock->block->next)
3355 /* Two clauses, both empty. */
3356 need_cmask = false;
3357 need_pmask = false;
3359 /* Two clauses, the first empty, the second non-empty. */
3360 else if (mask)
3362 need_cmask = (cblock->block->expr != 0);
3363 need_pmask = true;
3365 else
3367 need_cmask = true;
3368 need_pmask = false;
3371 if (need_cmask || need_pmask)
3373 /* Calculate the size of temporary needed by the mask-expr. */
3374 gfc_init_block (&inner_size_body);
3375 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3376 &inner_size_body, &lss, &rss);
3378 /* Calculate the total size of temporary needed. */
3379 size = compute_overall_iter_number (nested_forall_info, inner_size,
3380 &inner_size_body, block);
3382 /* Check whether the size is negative. */
3383 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3384 gfc_index_zero_node);
3385 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3386 gfc_index_zero_node, size);
3387 size = gfc_evaluate_now (size, block);
3389 /* Allocate temporary for WHERE mask if needed. */
3390 if (need_cmask)
3391 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3392 &pcmask);
3394 /* Allocate temporary for !mask if needed. */
3395 if (need_pmask)
3396 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3397 &ppmask);
3400 while (cblock)
3402 /* Each time around this loop, the where clause is conditional
3403 on the value of mask and invert, which are updated at the
3404 bottom of the loop. */
3406 /* Has mask-expr. */
3407 if (cblock->expr)
3409 /* Ensure that the WHERE mask will be evaluated exactly once.
3410 If there are no statements in this WHERE/ELSEWHERE clause,
3411 then we don't need to update the control mask (cmask).
3412 If this is the last clause of the WHERE construct, then
3413 we don't need to update the pending control mask (pmask). */
3414 if (mask)
3415 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3416 mask, invert,
3417 cblock->next ? cmask : NULL_TREE,
3418 cblock->block ? pmask : NULL_TREE,
3419 mask_type, block);
3420 else
3421 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3422 NULL_TREE, false,
3423 (cblock->next || cblock->block)
3424 ? cmask : NULL_TREE,
3425 NULL_TREE, mask_type, block);
3427 invert = false;
3429 /* It's a final elsewhere-stmt. No mask-expr is present. */
3430 else
3431 cmask = mask;
3433 /* The body of this where clause are controlled by cmask with
3434 sense specified by invert. */
3436 /* Get the assignment statement of a WHERE statement, or the first
3437 statement in where-body-construct of a WHERE construct. */
3438 cnext = cblock->next;
3439 while (cnext)
3441 switch (cnext->op)
3443 /* WHERE assignment statement. */
3444 case EXEC_ASSIGN_CALL:
3446 arg = cnext->ext.actual;
3447 expr1 = expr2 = NULL;
3448 for (; arg; arg = arg->next)
3450 if (!arg->expr)
3451 continue;
3452 if (expr1 == NULL)
3453 expr1 = arg->expr;
3454 else
3455 expr2 = arg->expr;
3457 goto evaluate;
3459 case EXEC_ASSIGN:
3460 expr1 = cnext->expr;
3461 expr2 = cnext->expr2;
3462 evaluate:
3463 if (nested_forall_info != NULL)
3465 need_temp = gfc_check_dependency (expr1, expr2, 0);
3466 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3467 gfc_trans_assign_need_temp (expr1, expr2,
3468 cmask, invert,
3469 nested_forall_info, block);
3470 else
3472 /* Variables to control maskexpr. */
3473 count1 = gfc_create_var (gfc_array_index_type, "count1");
3474 count2 = gfc_create_var (gfc_array_index_type, "count2");
3475 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3476 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3478 tmp = gfc_trans_where_assign (expr1, expr2,
3479 cmask, invert,
3480 count1, count2,
3481 cnext->resolved_sym);
3483 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3484 tmp, 1);
3485 gfc_add_expr_to_block (block, tmp);
3488 else
3490 /* Variables to control maskexpr. */
3491 count1 = gfc_create_var (gfc_array_index_type, "count1");
3492 count2 = gfc_create_var (gfc_array_index_type, "count2");
3493 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3494 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3496 tmp = gfc_trans_where_assign (expr1, expr2,
3497 cmask, invert,
3498 count1, count2,
3499 cnext->resolved_sym);
3500 gfc_add_expr_to_block (block, tmp);
3503 break;
3505 /* WHERE or WHERE construct is part of a where-body-construct. */
3506 case EXEC_WHERE:
3507 gfc_trans_where_2 (cnext, cmask, invert,
3508 nested_forall_info, block);
3509 break;
3511 default:
3512 gcc_unreachable ();
3515 /* The next statement within the same where-body-construct. */
3516 cnext = cnext->next;
3518 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3519 cblock = cblock->block;
3520 if (mask == NULL_TREE)
3522 /* If we're the initial WHERE, we can simply invert the sense
3523 of the current mask to obtain the "mask" for the remaining
3524 ELSEWHEREs. */
3525 invert = true;
3526 mask = cmask;
3528 else
3530 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3531 invert = false;
3532 mask = pmask;
3536 /* If we allocated a pending mask array, deallocate it now. */
3537 if (ppmask)
3539 tmp = gfc_call_free (ppmask);
3540 gfc_add_expr_to_block (block, tmp);
3543 /* If we allocated a current mask array, deallocate it now. */
3544 if (pcmask)
3546 tmp = gfc_call_free (pcmask);
3547 gfc_add_expr_to_block (block, tmp);
3551 /* Translate a simple WHERE construct or statement without dependencies.
3552 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3553 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3554 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3556 static tree
3557 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3559 stmtblock_t block, body;
3560 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3561 tree tmp, cexpr, tstmt, estmt;
3562 gfc_ss *css, *tdss, *tsss;
3563 gfc_se cse, tdse, tsse, edse, esse;
3564 gfc_loopinfo loop;
3565 gfc_ss *edss = 0;
3566 gfc_ss *esss = 0;
3568 cond = cblock->expr;
3569 tdst = cblock->next->expr;
3570 tsrc = cblock->next->expr2;
3571 edst = eblock ? eblock->next->expr : NULL;
3572 esrc = eblock ? eblock->next->expr2 : NULL;
3574 gfc_start_block (&block);
3575 gfc_init_loopinfo (&loop);
3577 /* Handle the condition. */
3578 gfc_init_se (&cse, NULL);
3579 css = gfc_walk_expr (cond);
3580 gfc_add_ss_to_loop (&loop, css);
3582 /* Handle the then-clause. */
3583 gfc_init_se (&tdse, NULL);
3584 gfc_init_se (&tsse, NULL);
3585 tdss = gfc_walk_expr (tdst);
3586 tsss = gfc_walk_expr (tsrc);
3587 if (tsss == gfc_ss_terminator)
3589 tsss = gfc_get_ss ();
3590 tsss->where = 1;
3591 tsss->next = gfc_ss_terminator;
3592 tsss->type = GFC_SS_SCALAR;
3593 tsss->expr = tsrc;
3595 gfc_add_ss_to_loop (&loop, tdss);
3596 gfc_add_ss_to_loop (&loop, tsss);
3598 if (eblock)
3600 /* Handle the else clause. */
3601 gfc_init_se (&edse, NULL);
3602 gfc_init_se (&esse, NULL);
3603 edss = gfc_walk_expr (edst);
3604 esss = gfc_walk_expr (esrc);
3605 if (esss == gfc_ss_terminator)
3607 esss = gfc_get_ss ();
3608 esss->where = 1;
3609 esss->next = gfc_ss_terminator;
3610 esss->type = GFC_SS_SCALAR;
3611 esss->expr = esrc;
3613 gfc_add_ss_to_loop (&loop, edss);
3614 gfc_add_ss_to_loop (&loop, esss);
3617 gfc_conv_ss_startstride (&loop);
3618 gfc_conv_loop_setup (&loop);
3620 gfc_mark_ss_chain_used (css, 1);
3621 gfc_mark_ss_chain_used (tdss, 1);
3622 gfc_mark_ss_chain_used (tsss, 1);
3623 if (eblock)
3625 gfc_mark_ss_chain_used (edss, 1);
3626 gfc_mark_ss_chain_used (esss, 1);
3629 gfc_start_scalarized_body (&loop, &body);
3631 gfc_copy_loopinfo_to_se (&cse, &loop);
3632 gfc_copy_loopinfo_to_se (&tdse, &loop);
3633 gfc_copy_loopinfo_to_se (&tsse, &loop);
3634 cse.ss = css;
3635 tdse.ss = tdss;
3636 tsse.ss = tsss;
3637 if (eblock)
3639 gfc_copy_loopinfo_to_se (&edse, &loop);
3640 gfc_copy_loopinfo_to_se (&esse, &loop);
3641 edse.ss = edss;
3642 esse.ss = esss;
3645 gfc_conv_expr (&cse, cond);
3646 gfc_add_block_to_block (&body, &cse.pre);
3647 cexpr = cse.expr;
3649 gfc_conv_expr (&tsse, tsrc);
3650 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3652 gfc_conv_tmp_array_ref (&tdse);
3653 gfc_advance_se_ss_chain (&tdse);
3655 else
3656 gfc_conv_expr (&tdse, tdst);
3658 if (eblock)
3660 gfc_conv_expr (&esse, esrc);
3661 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3663 gfc_conv_tmp_array_ref (&edse);
3664 gfc_advance_se_ss_chain (&edse);
3666 else
3667 gfc_conv_expr (&edse, edst);
3670 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3671 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3672 : build_empty_stmt ();
3673 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3674 gfc_add_expr_to_block (&body, tmp);
3675 gfc_add_block_to_block (&body, &cse.post);
3677 gfc_trans_scalarizing_loops (&loop, &body);
3678 gfc_add_block_to_block (&block, &loop.pre);
3679 gfc_add_block_to_block (&block, &loop.post);
3680 gfc_cleanup_loop (&loop);
3682 return gfc_finish_block (&block);
3685 /* As the WHERE or WHERE construct statement can be nested, we call
3686 gfc_trans_where_2 to do the translation, and pass the initial
3687 NULL values for both the control mask and the pending control mask. */
3689 tree
3690 gfc_trans_where (gfc_code * code)
3692 stmtblock_t block;
3693 gfc_code *cblock;
3694 gfc_code *eblock;
3696 cblock = code->block;
3697 if (cblock->next
3698 && cblock->next->op == EXEC_ASSIGN
3699 && !cblock->next->next)
3701 eblock = cblock->block;
3702 if (!eblock)
3704 /* A simple "WHERE (cond) x = y" statement or block is
3705 dependence free if cond is not dependent upon writing x,
3706 and the source y is unaffected by the destination x. */
3707 if (!gfc_check_dependency (cblock->next->expr,
3708 cblock->expr, 0)
3709 && !gfc_check_dependency (cblock->next->expr,
3710 cblock->next->expr2, 0))
3711 return gfc_trans_where_3 (cblock, NULL);
3713 else if (!eblock->expr
3714 && !eblock->block
3715 && eblock->next
3716 && eblock->next->op == EXEC_ASSIGN
3717 && !eblock->next->next)
3719 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3720 block is dependence free if cond is not dependent on writes
3721 to x1 and x2, y1 is not dependent on writes to x2, and y2
3722 is not dependent on writes to x1, and both y's are not
3723 dependent upon their own x's. In addition to this, the
3724 final two dependency checks below exclude all but the same
3725 array reference if the where and elswhere destinations
3726 are the same. In short, this is VERY conservative and this
3727 is needed because the two loops, required by the standard
3728 are coalesced in gfc_trans_where_3. */
3729 if (!gfc_check_dependency(cblock->next->expr,
3730 cblock->expr, 0)
3731 && !gfc_check_dependency(eblock->next->expr,
3732 cblock->expr, 0)
3733 && !gfc_check_dependency(cblock->next->expr,
3734 eblock->next->expr2, 1)
3735 && !gfc_check_dependency(eblock->next->expr,
3736 cblock->next->expr2, 1)
3737 && !gfc_check_dependency(cblock->next->expr,
3738 cblock->next->expr2, 1)
3739 && !gfc_check_dependency(eblock->next->expr,
3740 eblock->next->expr2, 1)
3741 && !gfc_check_dependency(cblock->next->expr,
3742 eblock->next->expr, 0)
3743 && !gfc_check_dependency(eblock->next->expr,
3744 cblock->next->expr, 0))
3745 return gfc_trans_where_3 (cblock, eblock);
3749 gfc_start_block (&block);
3751 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3753 return gfc_finish_block (&block);
3757 /* CYCLE a DO loop. The label decl has already been created by
3758 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3759 node at the head of the loop. We must mark the label as used. */
3761 tree
3762 gfc_trans_cycle (gfc_code * code)
3764 tree cycle_label;
3766 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3767 TREE_USED (cycle_label) = 1;
3768 return build1_v (GOTO_EXPR, cycle_label);
3772 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3773 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3774 loop. */
3776 tree
3777 gfc_trans_exit (gfc_code * code)
3779 tree exit_label;
3781 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3782 TREE_USED (exit_label) = 1;
3783 return build1_v (GOTO_EXPR, exit_label);
3787 /* Translate the ALLOCATE statement. */
3789 tree
3790 gfc_trans_allocate (gfc_code * code)
3792 gfc_alloc *al;
3793 gfc_expr *expr;
3794 gfc_se se;
3795 tree tmp;
3796 tree parm;
3797 tree stat;
3798 tree pstat;
3799 tree error_label;
3800 stmtblock_t block;
3802 if (!code->ext.alloc_list)
3803 return NULL_TREE;
3805 gfc_start_block (&block);
3807 if (code->expr)
3809 tree gfc_int4_type_node = gfc_get_int_type (4);
3811 stat = gfc_create_var (gfc_int4_type_node, "stat");
3812 pstat = build_fold_addr_expr (stat);
3814 error_label = gfc_build_label_decl (NULL_TREE);
3815 TREE_USED (error_label) = 1;
3817 else
3818 pstat = stat = error_label = NULL_TREE;
3820 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3822 expr = al->expr;
3824 gfc_init_se (&se, NULL);
3825 gfc_start_block (&se.pre);
3827 se.want_pointer = 1;
3828 se.descriptor_only = 1;
3829 gfc_conv_expr (&se, expr);
3831 if (!gfc_array_allocate (&se, expr, pstat))
3833 /* A scalar or derived type. */
3834 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3836 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3837 tmp = se.string_length;
3839 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3840 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3841 fold_convert (TREE_TYPE (se.expr), tmp));
3842 gfc_add_expr_to_block (&se.pre, tmp);
3844 if (code->expr)
3846 tmp = build1_v (GOTO_EXPR, error_label);
3847 parm = fold_build2 (NE_EXPR, boolean_type_node,
3848 stat, build_int_cst (TREE_TYPE (stat), 0));
3849 tmp = fold_build3 (COND_EXPR, void_type_node,
3850 parm, tmp, build_empty_stmt ());
3851 gfc_add_expr_to_block (&se.pre, tmp);
3854 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3856 tmp = build_fold_indirect_ref (se.expr);
3857 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3858 gfc_add_expr_to_block (&se.pre, tmp);
3863 tmp = gfc_finish_block (&se.pre);
3864 gfc_add_expr_to_block (&block, tmp);
3867 /* Assign the value to the status variable. */
3868 if (code->expr)
3870 tmp = build1_v (LABEL_EXPR, error_label);
3871 gfc_add_expr_to_block (&block, tmp);
3873 gfc_init_se (&se, NULL);
3874 gfc_conv_expr_lhs (&se, code->expr);
3875 tmp = convert (TREE_TYPE (se.expr), stat);
3876 gfc_add_modify_expr (&block, se.expr, tmp);
3879 return gfc_finish_block (&block);
3883 /* Translate a DEALLOCATE statement.
3884 There are two cases within the for loop:
3885 (1) deallocate(a1, a2, a3) is translated into the following sequence
3886 _gfortran_deallocate(a1, 0B)
3887 _gfortran_deallocate(a2, 0B)
3888 _gfortran_deallocate(a3, 0B)
3889 where the STAT= variable is passed a NULL pointer.
3890 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3891 astat = 0
3892 _gfortran_deallocate(a1, &stat)
3893 astat = astat + stat
3894 _gfortran_deallocate(a2, &stat)
3895 astat = astat + stat
3896 _gfortran_deallocate(a3, &stat)
3897 astat = astat + stat
3898 In case (1), we simply return at the end of the for loop. In case (2)
3899 we set STAT= astat. */
3900 tree
3901 gfc_trans_deallocate (gfc_code * code)
3903 gfc_se se;
3904 gfc_alloc *al;
3905 gfc_expr *expr;
3906 tree apstat, astat, pstat, stat, tmp;
3907 stmtblock_t block;
3909 gfc_start_block (&block);
3911 /* Set up the optional STAT= */
3912 if (code->expr)
3914 tree gfc_int4_type_node = gfc_get_int_type (4);
3916 /* Variable used with the library call. */
3917 stat = gfc_create_var (gfc_int4_type_node, "stat");
3918 pstat = build_fold_addr_expr (stat);
3920 /* Running total of possible deallocation failures. */
3921 astat = gfc_create_var (gfc_int4_type_node, "astat");
3922 apstat = build_fold_addr_expr (astat);
3924 /* Initialize astat to 0. */
3925 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3927 else
3928 pstat = apstat = stat = astat = NULL_TREE;
3930 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3932 expr = al->expr;
3933 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3935 gfc_init_se (&se, NULL);
3936 gfc_start_block (&se.pre);
3938 se.want_pointer = 1;
3939 se.descriptor_only = 1;
3940 gfc_conv_expr (&se, expr);
3942 if (expr->ts.type == BT_DERIVED
3943 && expr->ts.derived->attr.alloc_comp)
3945 gfc_ref *ref;
3946 gfc_ref *last = NULL;
3947 for (ref = expr->ref; ref; ref = ref->next)
3948 if (ref->type == REF_COMPONENT)
3949 last = ref;
3951 /* Do not deallocate the components of a derived type
3952 ultimate pointer component. */
3953 if (!(last && last->u.c.component->pointer)
3954 && !(!last && expr->symtree->n.sym->attr.pointer))
3956 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3957 expr->rank);
3958 gfc_add_expr_to_block (&se.pre, tmp);
3962 if (expr->rank)
3963 tmp = gfc_array_deallocate (se.expr, pstat);
3964 else
3966 tmp = gfc_deallocate_with_status (se.expr, pstat, false);
3967 gfc_add_expr_to_block (&se.pre, tmp);
3969 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
3970 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3973 gfc_add_expr_to_block (&se.pre, tmp);
3975 /* Keep track of the number of failed deallocations by adding stat
3976 of the last deallocation to the running total. */
3977 if (code->expr)
3979 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3980 gfc_add_modify_expr (&se.pre, astat, apstat);
3983 tmp = gfc_finish_block (&se.pre);
3984 gfc_add_expr_to_block (&block, tmp);
3988 /* Assign the value to the status variable. */
3989 if (code->expr)
3991 gfc_init_se (&se, NULL);
3992 gfc_conv_expr_lhs (&se, code->expr);
3993 tmp = convert (TREE_TYPE (se.expr), astat);
3994 gfc_add_modify_expr (&block, se.expr, tmp);
3997 return gfc_finish_block (&block);