tree-cfg.c (verify_expr): Check with is_gimple_address.
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob5660ae6181183e55623691552658a2657656f932
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "tree-gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 char *label_str;
103 int label_len;
105 /* Start a new block. */
106 gfc_init_se (&se, NULL);
107 gfc_start_block (&se.pre);
108 gfc_conv_label_variable (&se, code->expr);
110 len = GFC_DECL_STRING_LEN (se.expr);
111 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
113 label_tree = gfc_get_label_decl (code->label);
115 if (code->label->defined == ST_LABEL_TARGET)
117 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
118 len_tree = integer_minus_one_node;
120 else
122 label_str = code->label->format->value.character.string;
123 label_len = code->label->format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_string_const (label_len + 1, label_str);
126 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
129 gfc_add_modify_expr (&se.pre, len, len_tree);
130 gfc_add_modify_expr (&se.pre, addr, label_tree);
132 return gfc_finish_block (&se.pre);
135 /* Translate a GOTO statement. */
137 tree
138 gfc_trans_goto (gfc_code * code)
140 locus loc = code->loc;
141 tree assigned_goto;
142 tree target;
143 tree tmp;
144 gfc_se se;
146 if (code->label != NULL)
147 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
149 /* ASSIGNED GOTO. */
150 gfc_init_se (&se, NULL);
151 gfc_start_block (&se.pre);
152 gfc_conv_label_variable (&se, code->expr);
153 tmp = GFC_DECL_STRING_LEN (se.expr);
154 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
155 build_int_cst (TREE_TYPE (tmp), -1));
156 gfc_trans_runtime_check (tmp, &se.pre, &loc,
157 "Assigned label is not a target label");
159 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
161 code = code->block;
162 if (code == NULL)
164 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
165 gfc_add_expr_to_block (&se.pre, target);
166 return gfc_finish_block (&se.pre);
169 /* Check the label list. */
172 target = gfc_get_label_decl (code->label);
173 tmp = gfc_build_addr_expr (pvoid_type_node, target);
174 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
175 tmp = build3_v (COND_EXPR, tmp,
176 fold_build1 (GOTO_EXPR, void_type_node, target),
177 build_empty_stmt ());
178 gfc_add_expr_to_block (&se.pre, tmp);
179 code = code->block;
181 while (code != NULL);
182 gfc_trans_runtime_check (boolean_true_node, &se.pre, &loc,
183 "Assigned label is not in the list");
185 return gfc_finish_block (&se.pre);
189 /* Translate an ENTRY statement. Just adds a label for this entry point. */
190 tree
191 gfc_trans_entry (gfc_code * code)
193 return build1_v (LABEL_EXPR, code->ext.entry->label);
197 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
198 elemental subroutines. Make temporaries for output arguments if any such
199 dependencies are found. Output arguments are chosen because internal_unpack
200 can be used, as is, to copy the result back to the variable. */
201 static void
202 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
203 gfc_symbol * sym, gfc_actual_arglist * arg)
205 gfc_actual_arglist *arg0;
206 gfc_expr *e;
207 gfc_formal_arglist *formal;
208 gfc_loopinfo tmp_loop;
209 gfc_se parmse;
210 gfc_ss *ss;
211 gfc_ss_info *info;
212 gfc_symbol *fsym;
213 int n;
214 stmtblock_t block;
215 tree data;
216 tree offset;
217 tree size;
218 tree tmp;
220 if (loopse->ss == NULL)
221 return;
223 ss = loopse->ss;
224 arg0 = arg;
225 formal = sym->formal;
227 /* Loop over all the arguments testing for dependencies. */
228 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
230 e = arg->expr;
231 if (e == NULL)
232 continue;
234 /* Obtain the info structure for the current argument. */
235 info = NULL;
236 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
238 if (ss->expr != e)
239 continue;
240 info = &ss->data.info;
241 break;
244 /* If there is a dependency, create a temporary and use it
245 instead of the variable. */
246 fsym = formal ? formal->sym : NULL;
247 if (e->expr_type == EXPR_VARIABLE
248 && e->rank && fsym
249 && fsym->attr.intent != INTENT_IN
250 && gfc_check_fncall_dependency (e, fsym->attr.intent,
251 sym, arg0))
253 /* Make a local loopinfo for the temporary creation, so that
254 none of the other ss->info's have to be renormalized. */
255 gfc_init_loopinfo (&tmp_loop);
256 for (n = 0; n < info->dimen; n++)
258 tmp_loop.to[n] = loopse->loop->to[n];
259 tmp_loop.from[n] = loopse->loop->from[n];
260 tmp_loop.order[n] = loopse->loop->order[n];
263 /* Generate the temporary. Merge the block so that the
264 declarations are put at the right binding level. */
265 size = gfc_create_var (gfc_array_index_type, NULL);
266 data = gfc_create_var (pvoid_type_node, NULL);
267 gfc_start_block (&block);
268 tmp = gfc_typenode_for_spec (&e->ts);
269 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
270 &tmp_loop, info, tmp,
271 false, true, false);
272 gfc_add_modify_expr (&se->pre, size, tmp);
273 tmp = fold_convert (pvoid_type_node, info->data);
274 gfc_add_modify_expr (&se->pre, data, tmp);
275 gfc_merge_block_scope (&block);
277 /* Obtain the argument descriptor for unpacking. */
278 gfc_init_se (&parmse, NULL);
279 parmse.want_pointer = 1;
280 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
281 gfc_add_block_to_block (&se->pre, &parmse.pre);
283 /* Calculate the offset for the temporary. */
284 offset = gfc_index_zero_node;
285 for (n = 0; n < info->dimen; n++)
287 tmp = gfc_conv_descriptor_stride (info->descriptor,
288 gfc_rank_cst[n]);
289 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
290 loopse->loop->from[n], tmp);
291 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
292 offset, tmp);
294 info->offset = gfc_create_var (gfc_array_index_type, NULL);
295 gfc_add_modify_expr (&se->pre, info->offset, offset);
297 /* Copy the result back using unpack. */
298 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
299 gfc_add_expr_to_block (&se->post, tmp);
301 gfc_add_block_to_block (&se->post, &parmse.post);
307 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
309 tree
310 gfc_trans_call (gfc_code * code, bool dependency_check)
312 gfc_se se;
313 gfc_ss * ss;
314 int has_alternate_specifier;
316 /* A CALL starts a new block because the actual arguments may have to
317 be evaluated first. */
318 gfc_init_se (&se, NULL);
319 gfc_start_block (&se.pre);
321 gcc_assert (code->resolved_sym);
323 ss = gfc_ss_terminator;
324 if (code->resolved_sym->attr.elemental)
325 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
327 /* Is not an elemental subroutine call with array valued arguments. */
328 if (ss == gfc_ss_terminator)
331 /* Translate the call. */
332 has_alternate_specifier
333 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
334 NULL_TREE);
336 /* A subroutine without side-effect, by definition, does nothing! */
337 TREE_SIDE_EFFECTS (se.expr) = 1;
339 /* Chain the pieces together and return the block. */
340 if (has_alternate_specifier)
342 gfc_code *select_code;
343 gfc_symbol *sym;
344 select_code = code->next;
345 gcc_assert(select_code->op == EXEC_SELECT);
346 sym = select_code->expr->symtree->n.sym;
347 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
348 if (sym->backend_decl == NULL)
349 sym->backend_decl = gfc_get_symbol_decl (sym);
350 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
352 else
353 gfc_add_expr_to_block (&se.pre, se.expr);
355 gfc_add_block_to_block (&se.pre, &se.post);
358 else
360 /* An elemental subroutine call with array valued arguments has
361 to be scalarized. */
362 gfc_loopinfo loop;
363 stmtblock_t body;
364 stmtblock_t block;
365 gfc_se loopse;
367 /* gfc_walk_elemental_function_args renders the ss chain in the
368 reverse order to the actual argument order. */
369 ss = gfc_reverse_ss (ss);
371 /* Initialize the loop. */
372 gfc_init_se (&loopse, NULL);
373 gfc_init_loopinfo (&loop);
374 gfc_add_ss_to_loop (&loop, ss);
376 gfc_conv_ss_startstride (&loop);
377 gfc_conv_loop_setup (&loop);
378 gfc_mark_ss_chain_used (ss, 1);
380 /* Convert the arguments, checking for dependencies. */
381 gfc_copy_loopinfo_to_se (&loopse, &loop);
382 loopse.ss = ss;
384 /* For operator assignment, do dependency checking. */
385 if (dependency_check)
387 gfc_symbol *sym;
388 sym = code->resolved_sym;
389 gfc_conv_elemental_dependencies (&se, &loopse, sym,
390 code->ext.actual);
393 /* Generate the loop body. */
394 gfc_start_scalarized_body (&loop, &body);
395 gfc_init_block (&block);
397 /* Add the subroutine call to the block. */
398 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
399 NULL_TREE);
400 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
402 gfc_add_block_to_block (&block, &loopse.pre);
403 gfc_add_block_to_block (&block, &loopse.post);
405 /* Finish up the loop block and the loop. */
406 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
407 gfc_trans_scalarizing_loops (&loop, &body);
408 gfc_add_block_to_block (&se.pre, &loop.pre);
409 gfc_add_block_to_block (&se.pre, &loop.post);
410 gfc_add_block_to_block (&se.pre, &se.post);
411 gfc_cleanup_loop (&loop);
414 return gfc_finish_block (&se.pre);
418 /* Translate the RETURN statement. */
420 tree
421 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
423 if (code->expr)
425 gfc_se se;
426 tree tmp;
427 tree result;
429 /* If code->expr is not NULL, this return statement must appear
430 in a subroutine and current_fake_result_decl has already
431 been generated. */
433 result = gfc_get_fake_result_decl (NULL, 0);
434 if (!result)
436 gfc_warning ("An alternate return at %L without a * dummy argument",
437 &code->expr->where);
438 return build1_v (GOTO_EXPR, gfc_get_return_label ());
441 /* Start a new block for this statement. */
442 gfc_init_se (&se, NULL);
443 gfc_start_block (&se.pre);
445 gfc_conv_expr (&se, code->expr);
447 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
448 fold_convert (TREE_TYPE (result), se.expr));
449 gfc_add_expr_to_block (&se.pre, tmp);
451 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
452 gfc_add_expr_to_block (&se.pre, tmp);
453 gfc_add_block_to_block (&se.pre, &se.post);
454 return gfc_finish_block (&se.pre);
456 else
457 return build1_v (GOTO_EXPR, gfc_get_return_label ());
461 /* Translate the PAUSE statement. We have to translate this statement
462 to a runtime library call. */
464 tree
465 gfc_trans_pause (gfc_code * code)
467 tree gfc_int4_type_node = gfc_get_int_type (4);
468 gfc_se se;
469 tree tmp;
471 /* Start a new block for this statement. */
472 gfc_init_se (&se, NULL);
473 gfc_start_block (&se.pre);
476 if (code->expr == NULL)
478 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
479 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
481 else
483 gfc_conv_expr_reference (&se, code->expr);
484 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
485 se.expr, se.string_length);
488 gfc_add_expr_to_block (&se.pre, tmp);
490 gfc_add_block_to_block (&se.pre, &se.post);
492 return gfc_finish_block (&se.pre);
496 /* Translate the STOP statement. We have to translate this statement
497 to a runtime library call. */
499 tree
500 gfc_trans_stop (gfc_code * code)
502 tree gfc_int4_type_node = gfc_get_int_type (4);
503 gfc_se se;
504 tree tmp;
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
511 if (code->expr == NULL)
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
516 else
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
520 se.expr, se.string_length);
523 gfc_add_expr_to_block (&se.pre, tmp);
525 gfc_add_block_to_block (&se.pre, &se.post);
527 return gfc_finish_block (&se.pre);
531 /* Generate GENERIC for the IF construct. This function also deals with
532 the simple IF statement, because the front end translates the IF
533 statement into an IF construct.
535 We translate:
537 IF (cond) THEN
538 then_clause
539 ELSEIF (cond2)
540 elseif_clause
541 ELSE
542 else_clause
543 ENDIF
545 into:
547 pre_cond_s;
548 if (cond_s)
550 then_clause;
552 else
554 pre_cond_s
555 if (cond_s)
557 elseif_clause
559 else
561 else_clause;
565 where COND_S is the simplified version of the predicate. PRE_COND_S
566 are the pre side-effects produced by the translation of the
567 conditional.
568 We need to build the chain recursively otherwise we run into
569 problems with folding incomplete statements. */
571 static tree
572 gfc_trans_if_1 (gfc_code * code)
574 gfc_se if_se;
575 tree stmt, elsestmt;
577 /* Check for an unconditional ELSE clause. */
578 if (!code->expr)
579 return gfc_trans_code (code->next);
581 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
582 gfc_init_se (&if_se, NULL);
583 gfc_start_block (&if_se.pre);
585 /* Calculate the IF condition expression. */
586 gfc_conv_expr_val (&if_se, code->expr);
588 /* Translate the THEN clause. */
589 stmt = gfc_trans_code (code->next);
591 /* Translate the ELSE clause. */
592 if (code->block)
593 elsestmt = gfc_trans_if_1 (code->block);
594 else
595 elsestmt = build_empty_stmt ();
597 /* Build the condition expression and add it to the condition block. */
598 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
600 gfc_add_expr_to_block (&if_se.pre, stmt);
602 /* Finish off this statement. */
603 return gfc_finish_block (&if_se.pre);
606 tree
607 gfc_trans_if (gfc_code * code)
609 /* Ignore the top EXEC_IF, it only announces an IF construct. The
610 actual code we must translate is in code->block. */
612 return gfc_trans_if_1 (code->block);
616 /* Translate an arithmetic IF expression.
618 IF (cond) label1, label2, label3 translates to
620 if (cond <= 0)
622 if (cond < 0)
623 goto label1;
624 else // cond == 0
625 goto label2;
627 else // cond > 0
628 goto label3;
630 An optimized version can be generated in case of equal labels.
631 E.g., if label1 is equal to label2, we can translate it to
633 if (cond <= 0)
634 goto label1;
635 else
636 goto label3;
639 tree
640 gfc_trans_arithmetic_if (gfc_code * code)
642 gfc_se se;
643 tree tmp;
644 tree branch1;
645 tree branch2;
646 tree zero;
648 /* Start a new block. */
649 gfc_init_se (&se, NULL);
650 gfc_start_block (&se.pre);
652 /* Pre-evaluate COND. */
653 gfc_conv_expr_val (&se, code->expr);
654 se.expr = gfc_evaluate_now (se.expr, &se.pre);
656 /* Build something to compare with. */
657 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
659 if (code->label->value != code->label2->value)
661 /* If (cond < 0) take branch1 else take branch2.
662 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
663 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
664 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
666 if (code->label->value != code->label3->value)
667 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
668 else
669 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
671 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
673 else
674 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
676 if (code->label->value != code->label3->value
677 && code->label2->value != code->label3->value)
679 /* if (cond <= 0) take branch1 else take branch2. */
680 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
681 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
682 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
685 /* Append the COND_EXPR to the evaluation of COND, and return. */
686 gfc_add_expr_to_block (&se.pre, branch1);
687 return gfc_finish_block (&se.pre);
691 /* Translate the simple DO construct. This is where the loop variable has
692 integer type and step +-1. We can't use this in the general case
693 because integer overflow and floating point errors could give incorrect
694 results.
695 We translate a do loop from:
697 DO dovar = from, to, step
698 body
699 END DO
703 [Evaluate loop bounds and step]
704 dovar = from;
705 if ((step > 0) ? (dovar <= to) : (dovar => to))
707 for (;;)
709 body;
710 cycle_label:
711 cond = (dovar == to);
712 dovar += step;
713 if (cond) goto end_label;
716 end_label:
718 This helps the optimizers by avoiding the extra induction variable
719 used in the general case. */
721 static tree
722 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
723 tree from, tree to, tree step)
725 stmtblock_t body;
726 tree type;
727 tree cond;
728 tree tmp;
729 tree cycle_label;
730 tree exit_label;
732 type = TREE_TYPE (dovar);
734 /* Initialize the DO variable: dovar = from. */
735 gfc_add_modify_expr (pblock, dovar, from);
737 /* Cycle and exit statements are implemented with gotos. */
738 cycle_label = gfc_build_label_decl (NULL_TREE);
739 exit_label = gfc_build_label_decl (NULL_TREE);
741 /* Put the labels where they can be found later. See gfc_trans_do(). */
742 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
744 /* Loop body. */
745 gfc_start_block (&body);
747 /* Main loop body. */
748 tmp = gfc_trans_code (code->block->next);
749 gfc_add_expr_to_block (&body, tmp);
751 /* Label for cycle statements (if needed). */
752 if (TREE_USED (cycle_label))
754 tmp = build1_v (LABEL_EXPR, cycle_label);
755 gfc_add_expr_to_block (&body, tmp);
758 /* Evaluate the loop condition. */
759 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
760 cond = gfc_evaluate_now (cond, &body);
762 /* Increment the loop variable. */
763 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
764 gfc_add_modify_expr (&body, dovar, tmp);
766 /* The loop exit. */
767 tmp = build1_v (GOTO_EXPR, exit_label);
768 TREE_USED (exit_label) = 1;
769 tmp = fold_build3 (COND_EXPR, void_type_node,
770 cond, tmp, build_empty_stmt ());
771 gfc_add_expr_to_block (&body, tmp);
773 /* Finish the loop body. */
774 tmp = gfc_finish_block (&body);
775 tmp = build1_v (LOOP_EXPR, tmp);
777 /* Only execute the loop if the number of iterations is positive. */
778 if (tree_int_cst_sgn (step) > 0)
779 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
780 else
781 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
782 tmp = fold_build3 (COND_EXPR, void_type_node,
783 cond, tmp, build_empty_stmt ());
784 gfc_add_expr_to_block (pblock, tmp);
786 /* Add the exit label. */
787 tmp = build1_v (LABEL_EXPR, exit_label);
788 gfc_add_expr_to_block (pblock, tmp);
790 return gfc_finish_block (pblock);
793 /* Translate the DO construct. This obviously is one of the most
794 important ones to get right with any compiler, but especially
795 so for Fortran.
797 We special case some loop forms as described in gfc_trans_simple_do.
798 For other cases we implement them with a separate loop count,
799 as described in the standard.
801 We translate a do loop from:
803 DO dovar = from, to, step
804 body
805 END DO
809 [evaluate loop bounds and step]
810 empty = (step > 0 ? to < from : to > from);
811 countm1 = (to - from) / step;
812 dovar = from;
813 if (empty) goto exit_label;
814 for (;;)
816 body;
817 cycle_label:
818 dovar += step
819 if (countm1 ==0) goto exit_label;
820 countm1--;
822 exit_label:
824 countm1 is an unsigned integer. It is equal to the loop count minus one,
825 because the loop count itself can overflow. */
827 tree
828 gfc_trans_do (gfc_code * code)
830 gfc_se se;
831 tree dovar;
832 tree from;
833 tree to;
834 tree step;
835 tree empty;
836 tree countm1;
837 tree type;
838 tree utype;
839 tree cond;
840 tree cycle_label;
841 tree exit_label;
842 tree tmp;
843 tree pos_step;
844 stmtblock_t block;
845 stmtblock_t body;
847 gfc_start_block (&block);
849 /* Evaluate all the expressions in the iterator. */
850 gfc_init_se (&se, NULL);
851 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
852 gfc_add_block_to_block (&block, &se.pre);
853 dovar = se.expr;
854 type = TREE_TYPE (dovar);
856 gfc_init_se (&se, NULL);
857 gfc_conv_expr_val (&se, code->ext.iterator->start);
858 gfc_add_block_to_block (&block, &se.pre);
859 from = gfc_evaluate_now (se.expr, &block);
861 gfc_init_se (&se, NULL);
862 gfc_conv_expr_val (&se, code->ext.iterator->end);
863 gfc_add_block_to_block (&block, &se.pre);
864 to = gfc_evaluate_now (se.expr, &block);
866 gfc_init_se (&se, NULL);
867 gfc_conv_expr_val (&se, code->ext.iterator->step);
868 gfc_add_block_to_block (&block, &se.pre);
869 step = gfc_evaluate_now (se.expr, &block);
871 /* Special case simple loops. */
872 if (TREE_CODE (type) == INTEGER_TYPE
873 && (integer_onep (step)
874 || tree_int_cst_equal (step, integer_minus_one_node)))
875 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
877 /* We need a special check for empty loops:
878 empty = (step > 0 ? to < from : to > from); */
879 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
880 fold_convert (type, integer_zero_node));
881 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
882 fold_build2 (LT_EXPR, boolean_type_node, to, from),
883 fold_build2 (GT_EXPR, boolean_type_node, to, from));
885 /* Initialize loop count. This code is executed before we enter the
886 loop body. We generate: countm1 = abs(to - from) / abs(step). */
887 if (TREE_CODE (type) == INTEGER_TYPE)
889 tree ustep;
891 utype = unsigned_type_for (type);
893 /* tmp = abs(to - from) / abs(step) */
894 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
895 tmp = fold_build3 (COND_EXPR, type, pos_step,
896 fold_build2 (MINUS_EXPR, type, to, from),
897 fold_build2 (MINUS_EXPR, type, from, to));
898 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
899 ustep);
901 else
903 /* TODO: We could use the same width as the real type.
904 This would probably cause more problems that it solves
905 when we implement "long double" types. */
906 utype = unsigned_type_for (gfc_array_index_type);
907 tmp = fold_build2 (MINUS_EXPR, type, to, from);
908 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
909 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
911 countm1 = gfc_create_var (utype, "countm1");
912 gfc_add_modify_expr (&block, countm1, tmp);
914 /* Cycle and exit statements are implemented with gotos. */
915 cycle_label = gfc_build_label_decl (NULL_TREE);
916 exit_label = gfc_build_label_decl (NULL_TREE);
917 TREE_USED (exit_label) = 1;
919 /* Initialize the DO variable: dovar = from. */
920 gfc_add_modify_expr (&block, dovar, from);
922 /* If the loop is empty, go directly to the exit label. */
923 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
924 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
925 gfc_add_expr_to_block (&block, tmp);
927 /* Loop body. */
928 gfc_start_block (&body);
930 /* Put these labels where they can be found later. We put the
931 labels in a TREE_LIST node (because TREE_CHAIN is already
932 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
933 label in TREE_VALUE (backend_decl). */
935 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
937 /* Main loop body. */
938 tmp = gfc_trans_code (code->block->next);
939 gfc_add_expr_to_block (&body, tmp);
941 /* Label for cycle statements (if needed). */
942 if (TREE_USED (cycle_label))
944 tmp = build1_v (LABEL_EXPR, cycle_label);
945 gfc_add_expr_to_block (&body, tmp);
948 /* Increment the loop variable. */
949 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
950 gfc_add_modify_expr (&body, dovar, tmp);
952 /* End with the loop condition. Loop until countm1 == 0. */
953 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
954 build_int_cst (utype, 0));
955 tmp = build1_v (GOTO_EXPR, exit_label);
956 tmp = fold_build3 (COND_EXPR, void_type_node,
957 cond, tmp, build_empty_stmt ());
958 gfc_add_expr_to_block (&body, tmp);
960 /* Decrement the loop count. */
961 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
962 gfc_add_modify_expr (&body, countm1, tmp);
964 /* End of loop body. */
965 tmp = gfc_finish_block (&body);
967 /* The for loop itself. */
968 tmp = build1_v (LOOP_EXPR, tmp);
969 gfc_add_expr_to_block (&block, tmp);
971 /* Add the exit label. */
972 tmp = build1_v (LABEL_EXPR, exit_label);
973 gfc_add_expr_to_block (&block, tmp);
975 return gfc_finish_block (&block);
979 /* Translate the DO WHILE construct.
981 We translate
983 DO WHILE (cond)
984 body
985 END DO
989 for ( ; ; )
991 pre_cond;
992 if (! cond) goto exit_label;
993 body;
994 cycle_label:
996 exit_label:
998 Because the evaluation of the exit condition `cond' may have side
999 effects, we can't do much for empty loop bodies. The backend optimizers
1000 should be smart enough to eliminate any dead loops. */
1002 tree
1003 gfc_trans_do_while (gfc_code * code)
1005 gfc_se cond;
1006 tree tmp;
1007 tree cycle_label;
1008 tree exit_label;
1009 stmtblock_t block;
1011 /* Everything we build here is part of the loop body. */
1012 gfc_start_block (&block);
1014 /* Cycle and exit statements are implemented with gotos. */
1015 cycle_label = gfc_build_label_decl (NULL_TREE);
1016 exit_label = gfc_build_label_decl (NULL_TREE);
1018 /* Put the labels where they can be found later. See gfc_trans_do(). */
1019 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1021 /* Create a GIMPLE version of the exit condition. */
1022 gfc_init_se (&cond, NULL);
1023 gfc_conv_expr_val (&cond, code->expr);
1024 gfc_add_block_to_block (&block, &cond.pre);
1025 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1027 /* Build "IF (! cond) GOTO exit_label". */
1028 tmp = build1_v (GOTO_EXPR, exit_label);
1029 TREE_USED (exit_label) = 1;
1030 tmp = fold_build3 (COND_EXPR, void_type_node,
1031 cond.expr, tmp, build_empty_stmt ());
1032 gfc_add_expr_to_block (&block, tmp);
1034 /* The main body of the loop. */
1035 tmp = gfc_trans_code (code->block->next);
1036 gfc_add_expr_to_block (&block, tmp);
1038 /* Label for cycle statements (if needed). */
1039 if (TREE_USED (cycle_label))
1041 tmp = build1_v (LABEL_EXPR, cycle_label);
1042 gfc_add_expr_to_block (&block, tmp);
1045 /* End of loop body. */
1046 tmp = gfc_finish_block (&block);
1048 gfc_init_block (&block);
1049 /* Build the loop. */
1050 tmp = build1_v (LOOP_EXPR, tmp);
1051 gfc_add_expr_to_block (&block, tmp);
1053 /* Add the exit label. */
1054 tmp = build1_v (LABEL_EXPR, exit_label);
1055 gfc_add_expr_to_block (&block, tmp);
1057 return gfc_finish_block (&block);
1061 /* Translate the SELECT CASE construct for INTEGER case expressions,
1062 without killing all potential optimizations. The problem is that
1063 Fortran allows unbounded cases, but the back-end does not, so we
1064 need to intercept those before we enter the equivalent SWITCH_EXPR
1065 we can build.
1067 For example, we translate this,
1069 SELECT CASE (expr)
1070 CASE (:100,101,105:115)
1071 block_1
1072 CASE (190:199,200:)
1073 block_2
1074 CASE (300)
1075 block_3
1076 CASE DEFAULT
1077 block_4
1078 END SELECT
1080 to the GENERIC equivalent,
1082 switch (expr)
1084 case (minimum value for typeof(expr) ... 100:
1085 case 101:
1086 case 105 ... 114:
1087 block1:
1088 goto end_label;
1090 case 200 ... (maximum value for typeof(expr):
1091 case 190 ... 199:
1092 block2;
1093 goto end_label;
1095 case 300:
1096 block_3;
1097 goto end_label;
1099 default:
1100 block_4;
1101 goto end_label;
1104 end_label: */
1106 static tree
1107 gfc_trans_integer_select (gfc_code * code)
1109 gfc_code *c;
1110 gfc_case *cp;
1111 tree end_label;
1112 tree tmp;
1113 gfc_se se;
1114 stmtblock_t block;
1115 stmtblock_t body;
1117 gfc_start_block (&block);
1119 /* Calculate the switch expression. */
1120 gfc_init_se (&se, NULL);
1121 gfc_conv_expr_val (&se, code->expr);
1122 gfc_add_block_to_block (&block, &se.pre);
1124 end_label = gfc_build_label_decl (NULL_TREE);
1126 gfc_init_block (&body);
1128 for (c = code->block; c; c = c->block)
1130 for (cp = c->ext.case_list; cp; cp = cp->next)
1132 tree low, high;
1133 tree label;
1135 /* Assume it's the default case. */
1136 low = high = NULL_TREE;
1138 if (cp->low)
1140 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1141 cp->low->ts.kind);
1143 /* If there's only a lower bound, set the high bound to the
1144 maximum value of the case expression. */
1145 if (!cp->high)
1146 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1149 if (cp->high)
1151 /* Three cases are possible here:
1153 1) There is no lower bound, e.g. CASE (:N).
1154 2) There is a lower bound .NE. high bound, that is
1155 a case range, e.g. CASE (N:M) where M>N (we make
1156 sure that M>N during type resolution).
1157 3) There is a lower bound, and it has the same value
1158 as the high bound, e.g. CASE (N:N). This is our
1159 internal representation of CASE(N).
1161 In the first and second case, we need to set a value for
1162 high. In the third case, we don't because the GCC middle
1163 end represents a single case value by just letting high be
1164 a NULL_TREE. We can't do that because we need to be able
1165 to represent unbounded cases. */
1167 if (!cp->low
1168 || (cp->low
1169 && mpz_cmp (cp->low->value.integer,
1170 cp->high->value.integer) != 0))
1171 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1172 cp->high->ts.kind);
1174 /* Unbounded case. */
1175 if (!cp->low)
1176 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1179 /* Build a label. */
1180 label = gfc_build_label_decl (NULL_TREE);
1182 /* Add this case label.
1183 Add parameter 'label', make it match GCC backend. */
1184 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1185 low, high, label);
1186 gfc_add_expr_to_block (&body, tmp);
1189 /* Add the statements for this case. */
1190 tmp = gfc_trans_code (c->next);
1191 gfc_add_expr_to_block (&body, tmp);
1193 /* Break to the end of the construct. */
1194 tmp = build1_v (GOTO_EXPR, end_label);
1195 gfc_add_expr_to_block (&body, tmp);
1198 tmp = gfc_finish_block (&body);
1199 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1200 gfc_add_expr_to_block (&block, tmp);
1202 tmp = build1_v (LABEL_EXPR, end_label);
1203 gfc_add_expr_to_block (&block, tmp);
1205 return gfc_finish_block (&block);
1209 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1211 There are only two cases possible here, even though the standard
1212 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1213 .FALSE., and DEFAULT.
1215 We never generate more than two blocks here. Instead, we always
1216 try to eliminate the DEFAULT case. This way, we can translate this
1217 kind of SELECT construct to a simple
1219 if {} else {};
1221 expression in GENERIC. */
1223 static tree
1224 gfc_trans_logical_select (gfc_code * code)
1226 gfc_code *c;
1227 gfc_code *t, *f, *d;
1228 gfc_case *cp;
1229 gfc_se se;
1230 stmtblock_t block;
1232 /* Assume we don't have any cases at all. */
1233 t = f = d = NULL;
1235 /* Now see which ones we actually do have. We can have at most two
1236 cases in a single case list: one for .TRUE. and one for .FALSE.
1237 The default case is always separate. If the cases for .TRUE. and
1238 .FALSE. are in the same case list, the block for that case list
1239 always executed, and we don't generate code a COND_EXPR. */
1240 for (c = code->block; c; c = c->block)
1242 for (cp = c->ext.case_list; cp; cp = cp->next)
1244 if (cp->low)
1246 if (cp->low->value.logical == 0) /* .FALSE. */
1247 f = c;
1248 else /* if (cp->value.logical != 0), thus .TRUE. */
1249 t = c;
1251 else
1252 d = c;
1256 /* Start a new block. */
1257 gfc_start_block (&block);
1259 /* Calculate the switch expression. We always need to do this
1260 because it may have side effects. */
1261 gfc_init_se (&se, NULL);
1262 gfc_conv_expr_val (&se, code->expr);
1263 gfc_add_block_to_block (&block, &se.pre);
1265 if (t == f && t != NULL)
1267 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1268 translate the code for these cases, append it to the current
1269 block. */
1270 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1272 else
1274 tree true_tree, false_tree, stmt;
1276 true_tree = build_empty_stmt ();
1277 false_tree = build_empty_stmt ();
1279 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1280 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1281 make the missing case the default case. */
1282 if (t != NULL && f != NULL)
1283 d = NULL;
1284 else if (d != NULL)
1286 if (t == NULL)
1287 t = d;
1288 else
1289 f = d;
1292 /* Translate the code for each of these blocks, and append it to
1293 the current block. */
1294 if (t != NULL)
1295 true_tree = gfc_trans_code (t->next);
1297 if (f != NULL)
1298 false_tree = gfc_trans_code (f->next);
1300 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1301 true_tree, false_tree);
1302 gfc_add_expr_to_block (&block, stmt);
1305 return gfc_finish_block (&block);
1309 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1310 Instead of generating compares and jumps, it is far simpler to
1311 generate a data structure describing the cases in order and call a
1312 library subroutine that locates the right case.
1313 This is particularly true because this is the only case where we
1314 might have to dispose of a temporary.
1315 The library subroutine returns a pointer to jump to or NULL if no
1316 branches are to be taken. */
1318 static tree
1319 gfc_trans_character_select (gfc_code *code)
1321 tree init, node, end_label, tmp, type, case_num, label;
1322 stmtblock_t block, body;
1323 gfc_case *cp, *d;
1324 gfc_code *c;
1325 gfc_se se;
1326 int n;
1328 static tree select_struct;
1329 static tree ss_string1, ss_string1_len;
1330 static tree ss_string2, ss_string2_len;
1331 static tree ss_target;
1333 if (select_struct == NULL)
1335 tree gfc_int4_type_node = gfc_get_int_type (4);
1337 select_struct = make_node (RECORD_TYPE);
1338 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1340 #undef ADD_FIELD
1341 #define ADD_FIELD(NAME, TYPE) \
1342 ss_##NAME = gfc_add_field_to_struct \
1343 (&(TYPE_FIELDS (select_struct)), select_struct, \
1344 get_identifier (stringize(NAME)), TYPE)
1346 ADD_FIELD (string1, pchar_type_node);
1347 ADD_FIELD (string1_len, gfc_int4_type_node);
1349 ADD_FIELD (string2, pchar_type_node);
1350 ADD_FIELD (string2_len, gfc_int4_type_node);
1352 ADD_FIELD (target, integer_type_node);
1353 #undef ADD_FIELD
1355 gfc_finish_type (select_struct);
1358 cp = code->block->ext.case_list;
1359 while (cp->left != NULL)
1360 cp = cp->left;
1362 n = 0;
1363 for (d = cp; d; d = d->right)
1364 d->n = n++;
1366 end_label = gfc_build_label_decl (NULL_TREE);
1368 /* Generate the body */
1369 gfc_start_block (&block);
1370 gfc_init_block (&body);
1372 for (c = code->block; c; c = c->block)
1374 for (d = c->ext.case_list; d; d = d->next)
1376 label = gfc_build_label_decl (NULL_TREE);
1377 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1378 build_int_cst (NULL_TREE, d->n),
1379 build_int_cst (NULL_TREE, d->n), label);
1380 gfc_add_expr_to_block (&body, tmp);
1383 tmp = gfc_trans_code (c->next);
1384 gfc_add_expr_to_block (&body, tmp);
1386 tmp = build1_v (GOTO_EXPR, end_label);
1387 gfc_add_expr_to_block (&body, tmp);
1390 /* Generate the structure describing the branches */
1391 init = NULL_TREE;
1393 for(d = cp; d; d = d->right)
1395 node = NULL_TREE;
1397 gfc_init_se (&se, NULL);
1399 if (d->low == NULL)
1401 node = tree_cons (ss_string1, null_pointer_node, node);
1402 node = tree_cons (ss_string1_len, integer_zero_node, node);
1404 else
1406 gfc_conv_expr_reference (&se, d->low);
1408 node = tree_cons (ss_string1, se.expr, node);
1409 node = tree_cons (ss_string1_len, se.string_length, node);
1412 if (d->high == NULL)
1414 node = tree_cons (ss_string2, null_pointer_node, node);
1415 node = tree_cons (ss_string2_len, integer_zero_node, node);
1417 else
1419 gfc_init_se (&se, NULL);
1420 gfc_conv_expr_reference (&se, d->high);
1422 node = tree_cons (ss_string2, se.expr, node);
1423 node = tree_cons (ss_string2_len, se.string_length, node);
1426 node = tree_cons (ss_target, build_int_cst (integer_type_node, d->n),
1427 node);
1429 tmp = build_constructor_from_list (select_struct, nreverse (node));
1430 init = tree_cons (NULL_TREE, tmp, init);
1433 type = build_array_type (select_struct, build_index_type
1434 (build_int_cst (NULL_TREE, n - 1)));
1436 init = build_constructor_from_list (type, nreverse(init));
1437 TREE_CONSTANT (init) = 1;
1438 TREE_STATIC (init) = 1;
1439 /* Create a static variable to hold the jump table. */
1440 tmp = gfc_create_var (type, "jumptable");
1441 TREE_CONSTANT (tmp) = 1;
1442 TREE_STATIC (tmp) = 1;
1443 TREE_READONLY (tmp) = 1;
1444 DECL_INITIAL (tmp) = init;
1445 init = tmp;
1447 /* Build the library call */
1448 init = gfc_build_addr_expr (pvoid_type_node, init);
1450 gfc_init_se (&se, NULL);
1451 gfc_conv_expr_reference (&se, code->expr);
1453 gfc_add_block_to_block (&block, &se.pre);
1455 tmp = build_call_expr (gfor_fndecl_select_string, 4, init,
1456 build_int_cst (NULL_TREE, n), se.expr,
1457 se.string_length);
1458 case_num = gfc_create_var (integer_type_node, "case_num");
1459 gfc_add_modify_expr (&block, case_num, tmp);
1461 gfc_add_block_to_block (&block, &se.post);
1463 tmp = gfc_finish_block (&body);
1464 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1465 gfc_add_expr_to_block (&block, tmp);
1467 tmp = build1_v (LABEL_EXPR, end_label);
1468 gfc_add_expr_to_block (&block, tmp);
1470 return gfc_finish_block (&block);
1474 /* Translate the three variants of the SELECT CASE construct.
1476 SELECT CASEs with INTEGER case expressions can be translated to an
1477 equivalent GENERIC switch statement, and for LOGICAL case
1478 expressions we build one or two if-else compares.
1480 SELECT CASEs with CHARACTER case expressions are a whole different
1481 story, because they don't exist in GENERIC. So we sort them and
1482 do a binary search at runtime.
1484 Fortran has no BREAK statement, and it does not allow jumps from
1485 one case block to another. That makes things a lot easier for
1486 the optimizers. */
1488 tree
1489 gfc_trans_select (gfc_code * code)
1491 gcc_assert (code && code->expr);
1493 /* Empty SELECT constructs are legal. */
1494 if (code->block == NULL)
1495 return build_empty_stmt ();
1497 /* Select the correct translation function. */
1498 switch (code->expr->ts.type)
1500 case BT_LOGICAL: return gfc_trans_logical_select (code);
1501 case BT_INTEGER: return gfc_trans_integer_select (code);
1502 case BT_CHARACTER: return gfc_trans_character_select (code);
1503 default:
1504 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1505 /* Not reached */
1510 /* Traversal function to substitute a replacement symtree if the symbol
1511 in the expression is the same as that passed. f == 2 signals that
1512 that variable itself is not to be checked - only the references.
1513 This group of functions is used when the variable expression in a
1514 FORALL assignment has internal references. For example:
1515 FORALL (i = 1:4) p(p(i)) = i
1516 The only recourse here is to store a copy of 'p' for the index
1517 expression. */
1519 static gfc_symtree *new_symtree;
1520 static gfc_symtree *old_symtree;
1522 static bool
1523 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1525 if (expr->expr_type != EXPR_VARIABLE)
1526 return false;
1528 if (*f == 2)
1529 *f = 1;
1530 else if (expr->symtree->n.sym == sym)
1531 expr->symtree = new_symtree;
1533 return false;
1536 static void
1537 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1539 gfc_traverse_expr (e, sym, forall_replace, f);
1542 static bool
1543 forall_restore (gfc_expr *expr,
1544 gfc_symbol *sym ATTRIBUTE_UNUSED,
1545 int *f ATTRIBUTE_UNUSED)
1547 if (expr->expr_type != EXPR_VARIABLE)
1548 return false;
1550 if (expr->symtree == new_symtree)
1551 expr->symtree = old_symtree;
1553 return false;
1556 static void
1557 forall_restore_symtree (gfc_expr *e)
1559 gfc_traverse_expr (e, NULL, forall_restore, 0);
1562 static void
1563 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1565 gfc_se tse;
1566 gfc_se rse;
1567 gfc_expr *e;
1568 gfc_symbol *new_sym;
1569 gfc_symbol *old_sym;
1570 gfc_symtree *root;
1571 tree tmp;
1573 /* Build a copy of the lvalue. */
1574 old_symtree = c->expr->symtree;
1575 old_sym = old_symtree->n.sym;
1576 e = gfc_lval_expr_from_sym (old_sym);
1577 if (old_sym->attr.dimension)
1579 gfc_init_se (&tse, NULL);
1580 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1581 gfc_add_block_to_block (pre, &tse.pre);
1582 gfc_add_block_to_block (post, &tse.post);
1583 tse.expr = build_fold_indirect_ref (tse.expr);
1585 if (e->ts.type != BT_CHARACTER)
1587 /* Use the variable offset for the temporary. */
1588 tmp = gfc_conv_descriptor_offset (tse.expr);
1589 gfc_add_modify_expr (pre, tmp,
1590 gfc_conv_array_offset (old_sym->backend_decl));
1593 else
1595 gfc_init_se (&tse, NULL);
1596 gfc_init_se (&rse, NULL);
1597 gfc_conv_expr (&rse, e);
1598 if (e->ts.type == BT_CHARACTER)
1600 tse.string_length = rse.string_length;
1601 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1602 tse.string_length);
1603 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1604 rse.string_length);
1605 gfc_add_block_to_block (pre, &tse.pre);
1606 gfc_add_block_to_block (post, &tse.post);
1608 else
1610 tmp = gfc_typenode_for_spec (&e->ts);
1611 tse.expr = gfc_create_var (tmp, "temp");
1614 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1615 e->expr_type == EXPR_VARIABLE);
1616 gfc_add_expr_to_block (pre, tmp);
1618 gfc_free_expr (e);
1620 /* Create a new symbol to represent the lvalue. */
1621 new_sym = gfc_new_symbol (old_sym->name, NULL);
1622 new_sym->ts = old_sym->ts;
1623 new_sym->attr.referenced = 1;
1624 new_sym->attr.dimension = old_sym->attr.dimension;
1625 new_sym->attr.flavor = old_sym->attr.flavor;
1627 /* Use the temporary as the backend_decl. */
1628 new_sym->backend_decl = tse.expr;
1630 /* Create a fake symtree for it. */
1631 root = NULL;
1632 new_symtree = gfc_new_symtree (&root, old_sym->name);
1633 new_symtree->n.sym = new_sym;
1634 gcc_assert (new_symtree == root);
1636 /* Go through the expression reference replacing the old_symtree
1637 with the new. */
1638 forall_replace_symtree (c->expr, old_sym, 2);
1640 /* Now we have made this temporary, we might as well use it for
1641 the right hand side. */
1642 forall_replace_symtree (c->expr2, old_sym, 1);
1646 /* Handles dependencies in forall assignments. */
1647 static int
1648 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1650 gfc_ref *lref;
1651 gfc_ref *rref;
1652 int need_temp;
1653 gfc_symbol *lsym;
1655 lsym = c->expr->symtree->n.sym;
1656 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1658 /* Now check for dependencies within the 'variable'
1659 expression itself. These are treated by making a complete
1660 copy of variable and changing all the references to it
1661 point to the copy instead. Note that the shallow copy of
1662 the variable will not suffice for derived types with
1663 pointer components. We therefore leave these to their
1664 own devices. */
1665 if (lsym->ts.type == BT_DERIVED
1666 && lsym->ts.derived->attr.pointer_comp)
1667 return need_temp;
1669 new_symtree = NULL;
1670 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1672 forall_make_variable_temp (c, pre, post);
1673 need_temp = 0;
1676 /* Substrings with dependencies are treated in the same
1677 way. */
1678 if (c->expr->ts.type == BT_CHARACTER
1679 && c->expr->ref
1680 && c->expr2->expr_type == EXPR_VARIABLE
1681 && lsym == c->expr2->symtree->n.sym)
1683 for (lref = c->expr->ref; lref; lref = lref->next)
1684 if (lref->type == REF_SUBSTRING)
1685 break;
1686 for (rref = c->expr2->ref; rref; rref = rref->next)
1687 if (rref->type == REF_SUBSTRING)
1688 break;
1690 if (rref && lref
1691 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1693 forall_make_variable_temp (c, pre, post);
1694 need_temp = 0;
1697 return need_temp;
1701 static void
1702 cleanup_forall_symtrees (gfc_code *c)
1704 forall_restore_symtree (c->expr);
1705 forall_restore_symtree (c->expr2);
1706 gfc_free (new_symtree->n.sym);
1707 gfc_free (new_symtree);
1711 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1712 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1713 indicates whether we should generate code to test the FORALLs mask
1714 array. OUTER is the loop header to be used for initializing mask
1715 indices.
1717 The generated loop format is:
1718 count = (end - start + step) / step
1719 loopvar = start
1720 while (1)
1722 if (count <=0 )
1723 goto end_of_loop
1724 <body>
1725 loopvar += step
1726 count --
1728 end_of_loop: */
1730 static tree
1731 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1732 int mask_flag, stmtblock_t *outer)
1734 int n, nvar;
1735 tree tmp;
1736 tree cond;
1737 stmtblock_t block;
1738 tree exit_label;
1739 tree count;
1740 tree var, start, end, step;
1741 iter_info *iter;
1743 /* Initialize the mask index outside the FORALL nest. */
1744 if (mask_flag && forall_tmp->mask)
1745 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1747 iter = forall_tmp->this_loop;
1748 nvar = forall_tmp->nvar;
1749 for (n = 0; n < nvar; n++)
1751 var = iter->var;
1752 start = iter->start;
1753 end = iter->end;
1754 step = iter->step;
1756 exit_label = gfc_build_label_decl (NULL_TREE);
1757 TREE_USED (exit_label) = 1;
1759 /* The loop counter. */
1760 count = gfc_create_var (TREE_TYPE (var), "count");
1762 /* The body of the loop. */
1763 gfc_init_block (&block);
1765 /* The exit condition. */
1766 cond = fold_build2 (LE_EXPR, boolean_type_node,
1767 count, build_int_cst (TREE_TYPE (count), 0));
1768 tmp = build1_v (GOTO_EXPR, exit_label);
1769 tmp = fold_build3 (COND_EXPR, void_type_node,
1770 cond, tmp, build_empty_stmt ());
1771 gfc_add_expr_to_block (&block, tmp);
1773 /* The main loop body. */
1774 gfc_add_expr_to_block (&block, body);
1776 /* Increment the loop variable. */
1777 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1778 gfc_add_modify_expr (&block, var, tmp);
1780 /* Advance to the next mask element. Only do this for the
1781 innermost loop. */
1782 if (n == 0 && mask_flag && forall_tmp->mask)
1784 tree maskindex = forall_tmp->maskindex;
1785 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1786 maskindex, gfc_index_one_node);
1787 gfc_add_modify_expr (&block, maskindex, tmp);
1790 /* Decrement the loop counter. */
1791 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1792 build_int_cst (TREE_TYPE (var), 1));
1793 gfc_add_modify_expr (&block, count, tmp);
1795 body = gfc_finish_block (&block);
1797 /* Loop var initialization. */
1798 gfc_init_block (&block);
1799 gfc_add_modify_expr (&block, var, start);
1802 /* Initialize the loop counter. */
1803 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1804 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1805 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1806 gfc_add_modify_expr (&block, count, tmp);
1808 /* The loop expression. */
1809 tmp = build1_v (LOOP_EXPR, body);
1810 gfc_add_expr_to_block (&block, tmp);
1812 /* The exit label. */
1813 tmp = build1_v (LABEL_EXPR, exit_label);
1814 gfc_add_expr_to_block (&block, tmp);
1816 body = gfc_finish_block (&block);
1817 iter = iter->next;
1819 return body;
1823 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1824 is nonzero, the body is controlled by all masks in the forall nest.
1825 Otherwise, the innermost loop is not controlled by it's mask. This
1826 is used for initializing that mask. */
1828 static tree
1829 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1830 int mask_flag)
1832 tree tmp;
1833 stmtblock_t header;
1834 forall_info *forall_tmp;
1835 tree mask, maskindex;
1837 gfc_start_block (&header);
1839 forall_tmp = nested_forall_info;
1840 while (forall_tmp != NULL)
1842 /* Generate body with masks' control. */
1843 if (mask_flag)
1845 mask = forall_tmp->mask;
1846 maskindex = forall_tmp->maskindex;
1848 /* If a mask was specified make the assignment conditional. */
1849 if (mask)
1851 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1852 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1855 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1856 forall_tmp = forall_tmp->prev_nest;
1857 mask_flag = 1;
1860 gfc_add_expr_to_block (&header, body);
1861 return gfc_finish_block (&header);
1865 /* Allocate data for holding a temporary array. Returns either a local
1866 temporary array or a pointer variable. */
1868 static tree
1869 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1870 tree elem_type)
1872 tree tmpvar;
1873 tree type;
1874 tree tmp;
1876 if (INTEGER_CST_P (size))
1878 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1879 gfc_index_one_node);
1881 else
1882 tmp = NULL_TREE;
1884 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1885 type = build_array_type (elem_type, type);
1886 if (gfc_can_put_var_on_stack (bytesize))
1888 gcc_assert (INTEGER_CST_P (size));
1889 tmpvar = gfc_create_var (type, "temp");
1890 *pdata = NULL_TREE;
1892 else
1894 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1895 *pdata = convert (pvoid_type_node, tmpvar);
1897 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1898 gfc_add_modify_expr (pblock, tmpvar, tmp);
1900 return tmpvar;
1904 /* Generate codes to copy the temporary to the actual lhs. */
1906 static tree
1907 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1908 tree count1, tree wheremask, bool invert)
1910 gfc_ss *lss;
1911 gfc_se lse, rse;
1912 stmtblock_t block, body;
1913 gfc_loopinfo loop1;
1914 tree tmp;
1915 tree wheremaskexpr;
1917 /* Walk the lhs. */
1918 lss = gfc_walk_expr (expr);
1920 if (lss == gfc_ss_terminator)
1922 gfc_start_block (&block);
1924 gfc_init_se (&lse, NULL);
1926 /* Translate the expression. */
1927 gfc_conv_expr (&lse, expr);
1929 /* Form the expression for the temporary. */
1930 tmp = gfc_build_array_ref (tmp1, count1, NULL);
1932 /* Use the scalar assignment as is. */
1933 gfc_add_block_to_block (&block, &lse.pre);
1934 gfc_add_modify_expr (&block, lse.expr, tmp);
1935 gfc_add_block_to_block (&block, &lse.post);
1937 /* Increment the count1. */
1938 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1939 gfc_index_one_node);
1940 gfc_add_modify_expr (&block, count1, tmp);
1942 tmp = gfc_finish_block (&block);
1944 else
1946 gfc_start_block (&block);
1948 gfc_init_loopinfo (&loop1);
1949 gfc_init_se (&rse, NULL);
1950 gfc_init_se (&lse, NULL);
1952 /* Associate the lss with the loop. */
1953 gfc_add_ss_to_loop (&loop1, lss);
1955 /* Calculate the bounds of the scalarization. */
1956 gfc_conv_ss_startstride (&loop1);
1957 /* Setup the scalarizing loops. */
1958 gfc_conv_loop_setup (&loop1);
1960 gfc_mark_ss_chain_used (lss, 1);
1962 /* Start the scalarized loop body. */
1963 gfc_start_scalarized_body (&loop1, &body);
1965 /* Setup the gfc_se structures. */
1966 gfc_copy_loopinfo_to_se (&lse, &loop1);
1967 lse.ss = lss;
1969 /* Form the expression of the temporary. */
1970 if (lss != gfc_ss_terminator)
1971 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
1972 /* Translate expr. */
1973 gfc_conv_expr (&lse, expr);
1975 /* Use the scalar assignment. */
1976 rse.string_length = lse.string_length;
1977 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1979 /* Form the mask expression according to the mask tree list. */
1980 if (wheremask)
1982 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
1983 if (invert)
1984 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1985 TREE_TYPE (wheremaskexpr),
1986 wheremaskexpr);
1987 tmp = fold_build3 (COND_EXPR, void_type_node,
1988 wheremaskexpr, tmp, build_empty_stmt ());
1991 gfc_add_expr_to_block (&body, tmp);
1993 /* Increment count1. */
1994 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1995 count1, gfc_index_one_node);
1996 gfc_add_modify_expr (&body, count1, tmp);
1998 /* Increment count3. */
1999 if (count3)
2001 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2002 count3, gfc_index_one_node);
2003 gfc_add_modify_expr (&body, count3, tmp);
2006 /* Generate the copying loops. */
2007 gfc_trans_scalarizing_loops (&loop1, &body);
2008 gfc_add_block_to_block (&block, &loop1.pre);
2009 gfc_add_block_to_block (&block, &loop1.post);
2010 gfc_cleanup_loop (&loop1);
2012 tmp = gfc_finish_block (&block);
2014 return tmp;
2018 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2019 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2020 and should not be freed. WHEREMASK is the conditional execution mask
2021 whose sense may be inverted by INVERT. */
2023 static tree
2024 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2025 tree count1, gfc_ss *lss, gfc_ss *rss,
2026 tree wheremask, bool invert)
2028 stmtblock_t block, body1;
2029 gfc_loopinfo loop;
2030 gfc_se lse;
2031 gfc_se rse;
2032 tree tmp;
2033 tree wheremaskexpr;
2035 gfc_start_block (&block);
2037 gfc_init_se (&rse, NULL);
2038 gfc_init_se (&lse, NULL);
2040 if (lss == gfc_ss_terminator)
2042 gfc_init_block (&body1);
2043 gfc_conv_expr (&rse, expr2);
2044 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2046 else
2048 /* Initialize the loop. */
2049 gfc_init_loopinfo (&loop);
2051 /* We may need LSS to determine the shape of the expression. */
2052 gfc_add_ss_to_loop (&loop, lss);
2053 gfc_add_ss_to_loop (&loop, rss);
2055 gfc_conv_ss_startstride (&loop);
2056 gfc_conv_loop_setup (&loop);
2058 gfc_mark_ss_chain_used (rss, 1);
2059 /* Start the loop body. */
2060 gfc_start_scalarized_body (&loop, &body1);
2062 /* Translate the expression. */
2063 gfc_copy_loopinfo_to_se (&rse, &loop);
2064 rse.ss = rss;
2065 gfc_conv_expr (&rse, expr2);
2067 /* Form the expression of the temporary. */
2068 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2071 /* Use the scalar assignment. */
2072 lse.string_length = rse.string_length;
2073 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2074 expr2->expr_type == EXPR_VARIABLE);
2076 /* Form the mask expression according to the mask tree list. */
2077 if (wheremask)
2079 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2080 if (invert)
2081 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2082 TREE_TYPE (wheremaskexpr),
2083 wheremaskexpr);
2084 tmp = fold_build3 (COND_EXPR, void_type_node,
2085 wheremaskexpr, tmp, build_empty_stmt ());
2088 gfc_add_expr_to_block (&body1, tmp);
2090 if (lss == gfc_ss_terminator)
2092 gfc_add_block_to_block (&block, &body1);
2094 /* Increment count1. */
2095 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2096 gfc_index_one_node);
2097 gfc_add_modify_expr (&block, count1, tmp);
2099 else
2101 /* Increment count1. */
2102 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2103 count1, gfc_index_one_node);
2104 gfc_add_modify_expr (&body1, count1, tmp);
2106 /* Increment count3. */
2107 if (count3)
2109 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2110 count3, gfc_index_one_node);
2111 gfc_add_modify_expr (&body1, count3, tmp);
2114 /* Generate the copying loops. */
2115 gfc_trans_scalarizing_loops (&loop, &body1);
2117 gfc_add_block_to_block (&block, &loop.pre);
2118 gfc_add_block_to_block (&block, &loop.post);
2120 gfc_cleanup_loop (&loop);
2121 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2122 as tree nodes in SS may not be valid in different scope. */
2125 tmp = gfc_finish_block (&block);
2126 return tmp;
2130 /* Calculate the size of temporary needed in the assignment inside forall.
2131 LSS and RSS are filled in this function. */
2133 static tree
2134 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2135 stmtblock_t * pblock,
2136 gfc_ss **lss, gfc_ss **rss)
2138 gfc_loopinfo loop;
2139 tree size;
2140 int i;
2141 int save_flag;
2142 tree tmp;
2144 *lss = gfc_walk_expr (expr1);
2145 *rss = NULL;
2147 size = gfc_index_one_node;
2148 if (*lss != gfc_ss_terminator)
2150 gfc_init_loopinfo (&loop);
2152 /* Walk the RHS of the expression. */
2153 *rss = gfc_walk_expr (expr2);
2154 if (*rss == gfc_ss_terminator)
2156 /* The rhs is scalar. Add a ss for the expression. */
2157 *rss = gfc_get_ss ();
2158 (*rss)->next = gfc_ss_terminator;
2159 (*rss)->type = GFC_SS_SCALAR;
2160 (*rss)->expr = expr2;
2163 /* Associate the SS with the loop. */
2164 gfc_add_ss_to_loop (&loop, *lss);
2165 /* We don't actually need to add the rhs at this point, but it might
2166 make guessing the loop bounds a bit easier. */
2167 gfc_add_ss_to_loop (&loop, *rss);
2169 /* We only want the shape of the expression, not rest of the junk
2170 generated by the scalarizer. */
2171 loop.array_parameter = 1;
2173 /* Calculate the bounds of the scalarization. */
2174 save_flag = flag_bounds_check;
2175 flag_bounds_check = 0;
2176 gfc_conv_ss_startstride (&loop);
2177 flag_bounds_check = save_flag;
2178 gfc_conv_loop_setup (&loop);
2180 /* Figure out how many elements we need. */
2181 for (i = 0; i < loop.dimen; i++)
2183 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2184 gfc_index_one_node, loop.from[i]);
2185 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2186 tmp, loop.to[i]);
2187 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2189 gfc_add_block_to_block (pblock, &loop.pre);
2190 size = gfc_evaluate_now (size, pblock);
2191 gfc_add_block_to_block (pblock, &loop.post);
2193 /* TODO: write a function that cleans up a loopinfo without freeing
2194 the SS chains. Currently a NOP. */
2197 return size;
2201 /* Calculate the overall iterator number of the nested forall construct.
2202 This routine actually calculates the number of times the body of the
2203 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2204 that by the expression INNER_SIZE. The BLOCK argument specifies the
2205 block in which to calculate the result, and the optional INNER_SIZE_BODY
2206 argument contains any statements that need to executed (inside the loop)
2207 to initialize or calculate INNER_SIZE. */
2209 static tree
2210 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2211 stmtblock_t *inner_size_body, stmtblock_t *block)
2213 forall_info *forall_tmp = nested_forall_info;
2214 tree tmp, number;
2215 stmtblock_t body;
2217 /* We can eliminate the innermost unconditional loops with constant
2218 array bounds. */
2219 if (INTEGER_CST_P (inner_size))
2221 while (forall_tmp
2222 && !forall_tmp->mask
2223 && INTEGER_CST_P (forall_tmp->size))
2225 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2226 inner_size, forall_tmp->size);
2227 forall_tmp = forall_tmp->prev_nest;
2230 /* If there are no loops left, we have our constant result. */
2231 if (!forall_tmp)
2232 return inner_size;
2235 /* Otherwise, create a temporary variable to compute the result. */
2236 number = gfc_create_var (gfc_array_index_type, "num");
2237 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2239 gfc_start_block (&body);
2240 if (inner_size_body)
2241 gfc_add_block_to_block (&body, inner_size_body);
2242 if (forall_tmp)
2243 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2244 number, inner_size);
2245 else
2246 tmp = inner_size;
2247 gfc_add_modify_expr (&body, number, tmp);
2248 tmp = gfc_finish_block (&body);
2250 /* Generate loops. */
2251 if (forall_tmp != NULL)
2252 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2254 gfc_add_expr_to_block (block, tmp);
2256 return number;
2260 /* Allocate temporary for forall construct. SIZE is the size of temporary
2261 needed. PTEMP1 is returned for space free. */
2263 static tree
2264 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2265 tree * ptemp1)
2267 tree bytesize;
2268 tree unit;
2269 tree tmp;
2271 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2272 if (!integer_onep (unit))
2273 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2274 else
2275 bytesize = size;
2277 *ptemp1 = NULL;
2278 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2280 if (*ptemp1)
2281 tmp = build_fold_indirect_ref (tmp);
2282 return tmp;
2286 /* Allocate temporary for forall construct according to the information in
2287 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2288 assignment inside forall. PTEMP1 is returned for space free. */
2290 static tree
2291 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2292 tree inner_size, stmtblock_t * inner_size_body,
2293 stmtblock_t * block, tree * ptemp1)
2295 tree size;
2297 /* Calculate the total size of temporary needed in forall construct. */
2298 size = compute_overall_iter_number (nested_forall_info, inner_size,
2299 inner_size_body, block);
2301 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2305 /* Handle assignments inside forall which need temporary.
2307 forall (i=start:end:stride; maskexpr)
2308 e<i> = f<i>
2309 end forall
2310 (where e,f<i> are arbitrary expressions possibly involving i
2311 and there is a dependency between e<i> and f<i>)
2312 Translates to:
2313 masktmp(:) = maskexpr(:)
2315 maskindex = 0;
2316 count1 = 0;
2317 num = 0;
2318 for (i = start; i <= end; i += stride)
2319 num += SIZE (f<i>)
2320 count1 = 0;
2321 ALLOCATE (tmp(num))
2322 for (i = start; i <= end; i += stride)
2324 if (masktmp[maskindex++])
2325 tmp[count1++] = f<i>
2327 maskindex = 0;
2328 count1 = 0;
2329 for (i = start; i <= end; i += stride)
2331 if (masktmp[maskindex++])
2332 e<i> = tmp[count1++]
2334 DEALLOCATE (tmp)
2336 static void
2337 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2338 tree wheremask, bool invert,
2339 forall_info * nested_forall_info,
2340 stmtblock_t * block)
2342 tree type;
2343 tree inner_size;
2344 gfc_ss *lss, *rss;
2345 tree count, count1;
2346 tree tmp, tmp1;
2347 tree ptemp1;
2348 stmtblock_t inner_size_body;
2350 /* Create vars. count1 is the current iterator number of the nested
2351 forall. */
2352 count1 = gfc_create_var (gfc_array_index_type, "count1");
2354 /* Count is the wheremask index. */
2355 if (wheremask)
2357 count = gfc_create_var (gfc_array_index_type, "count");
2358 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2360 else
2361 count = NULL;
2363 /* Initialize count1. */
2364 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2366 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2367 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2368 gfc_init_block (&inner_size_body);
2369 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2370 &lss, &rss);
2372 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2373 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2375 if (!expr1->ts.cl->backend_decl)
2377 gfc_se tse;
2378 gfc_init_se (&tse, NULL);
2379 gfc_conv_expr (&tse, expr1->ts.cl->length);
2380 expr1->ts.cl->backend_decl = tse.expr;
2382 type = gfc_get_character_type_len (gfc_default_character_kind,
2383 expr1->ts.cl->backend_decl);
2385 else
2386 type = gfc_typenode_for_spec (&expr1->ts);
2388 /* Allocate temporary for nested forall construct according to the
2389 information in nested_forall_info and inner_size. */
2390 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2391 &inner_size_body, block, &ptemp1);
2393 /* Generate codes to copy rhs to the temporary . */
2394 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2395 wheremask, invert);
2397 /* Generate body and loops according to the information in
2398 nested_forall_info. */
2399 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2400 gfc_add_expr_to_block (block, tmp);
2402 /* Reset count1. */
2403 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2405 /* Reset count. */
2406 if (wheremask)
2407 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2409 /* Generate codes to copy the temporary to lhs. */
2410 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2411 wheremask, invert);
2413 /* Generate body and loops according to the information in
2414 nested_forall_info. */
2415 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2416 gfc_add_expr_to_block (block, tmp);
2418 if (ptemp1)
2420 /* Free the temporary. */
2421 tmp = gfc_call_free (ptemp1);
2422 gfc_add_expr_to_block (block, tmp);
2427 /* Translate pointer assignment inside FORALL which need temporary. */
2429 static void
2430 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2431 forall_info * nested_forall_info,
2432 stmtblock_t * block)
2434 tree type;
2435 tree inner_size;
2436 gfc_ss *lss, *rss;
2437 gfc_se lse;
2438 gfc_se rse;
2439 gfc_ss_info *info;
2440 gfc_loopinfo loop;
2441 tree desc;
2442 tree parm;
2443 tree parmtype;
2444 stmtblock_t body;
2445 tree count;
2446 tree tmp, tmp1, ptemp1;
2448 count = gfc_create_var (gfc_array_index_type, "count");
2449 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2451 inner_size = integer_one_node;
2452 lss = gfc_walk_expr (expr1);
2453 rss = gfc_walk_expr (expr2);
2454 if (lss == gfc_ss_terminator)
2456 type = gfc_typenode_for_spec (&expr1->ts);
2457 type = build_pointer_type (type);
2459 /* Allocate temporary for nested forall construct according to the
2460 information in nested_forall_info and inner_size. */
2461 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2462 inner_size, NULL, block, &ptemp1);
2463 gfc_start_block (&body);
2464 gfc_init_se (&lse, NULL);
2465 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2466 gfc_init_se (&rse, NULL);
2467 rse.want_pointer = 1;
2468 gfc_conv_expr (&rse, expr2);
2469 gfc_add_block_to_block (&body, &rse.pre);
2470 gfc_add_modify_expr (&body, lse.expr,
2471 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2472 gfc_add_block_to_block (&body, &rse.post);
2474 /* Increment count. */
2475 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2476 count, gfc_index_one_node);
2477 gfc_add_modify_expr (&body, count, tmp);
2479 tmp = gfc_finish_block (&body);
2481 /* Generate body and loops according to the information in
2482 nested_forall_info. */
2483 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2484 gfc_add_expr_to_block (block, tmp);
2486 /* Reset count. */
2487 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2489 gfc_start_block (&body);
2490 gfc_init_se (&lse, NULL);
2491 gfc_init_se (&rse, NULL);
2492 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2493 lse.want_pointer = 1;
2494 gfc_conv_expr (&lse, expr1);
2495 gfc_add_block_to_block (&body, &lse.pre);
2496 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2497 gfc_add_block_to_block (&body, &lse.post);
2498 /* Increment count. */
2499 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2500 count, gfc_index_one_node);
2501 gfc_add_modify_expr (&body, count, tmp);
2502 tmp = gfc_finish_block (&body);
2504 /* Generate body and loops according to the information in
2505 nested_forall_info. */
2506 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2507 gfc_add_expr_to_block (block, tmp);
2509 else
2511 gfc_init_loopinfo (&loop);
2513 /* Associate the SS with the loop. */
2514 gfc_add_ss_to_loop (&loop, rss);
2516 /* Setup the scalarizing loops and bounds. */
2517 gfc_conv_ss_startstride (&loop);
2519 gfc_conv_loop_setup (&loop);
2521 info = &rss->data.info;
2522 desc = info->descriptor;
2524 /* Make a new descriptor. */
2525 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2526 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2527 loop.from, loop.to, 1,
2528 GFC_ARRAY_UNKNOWN);
2530 /* Allocate temporary for nested forall construct. */
2531 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2532 inner_size, NULL, block, &ptemp1);
2533 gfc_start_block (&body);
2534 gfc_init_se (&lse, NULL);
2535 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2536 lse.direct_byref = 1;
2537 rss = gfc_walk_expr (expr2);
2538 gfc_conv_expr_descriptor (&lse, expr2, rss);
2540 gfc_add_block_to_block (&body, &lse.pre);
2541 gfc_add_block_to_block (&body, &lse.post);
2543 /* Increment count. */
2544 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2545 count, gfc_index_one_node);
2546 gfc_add_modify_expr (&body, count, tmp);
2548 tmp = gfc_finish_block (&body);
2550 /* Generate body and loops according to the information in
2551 nested_forall_info. */
2552 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2553 gfc_add_expr_to_block (block, tmp);
2555 /* Reset count. */
2556 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2558 parm = gfc_build_array_ref (tmp1, count, NULL);
2559 lss = gfc_walk_expr (expr1);
2560 gfc_init_se (&lse, NULL);
2561 gfc_conv_expr_descriptor (&lse, expr1, lss);
2562 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2563 gfc_start_block (&body);
2564 gfc_add_block_to_block (&body, &lse.pre);
2565 gfc_add_block_to_block (&body, &lse.post);
2567 /* Increment count. */
2568 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2569 count, gfc_index_one_node);
2570 gfc_add_modify_expr (&body, count, tmp);
2572 tmp = gfc_finish_block (&body);
2574 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2575 gfc_add_expr_to_block (block, tmp);
2577 /* Free the temporary. */
2578 if (ptemp1)
2580 tmp = gfc_call_free (ptemp1);
2581 gfc_add_expr_to_block (block, tmp);
2586 /* FORALL and WHERE statements are really nasty, especially when you nest
2587 them. All the rhs of a forall assignment must be evaluated before the
2588 actual assignments are performed. Presumably this also applies to all the
2589 assignments in an inner where statement. */
2591 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2592 linear array, relying on the fact that we process in the same order in all
2593 loops.
2595 forall (i=start:end:stride; maskexpr)
2596 e<i> = f<i>
2597 g<i> = h<i>
2598 end forall
2599 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2600 Translates to:
2601 count = ((end + 1 - start) / stride)
2602 masktmp(:) = maskexpr(:)
2604 maskindex = 0;
2605 for (i = start; i <= end; i += stride)
2607 if (masktmp[maskindex++])
2608 e<i> = f<i>
2610 maskindex = 0;
2611 for (i = start; i <= end; i += stride)
2613 if (masktmp[maskindex++])
2614 g<i> = h<i>
2617 Note that this code only works when there are no dependencies.
2618 Forall loop with array assignments and data dependencies are a real pain,
2619 because the size of the temporary cannot always be determined before the
2620 loop is executed. This problem is compounded by the presence of nested
2621 FORALL constructs.
2624 static tree
2625 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2627 stmtblock_t pre;
2628 stmtblock_t post;
2629 stmtblock_t block;
2630 stmtblock_t body;
2631 tree *var;
2632 tree *start;
2633 tree *end;
2634 tree *step;
2635 gfc_expr **varexpr;
2636 tree tmp;
2637 tree assign;
2638 tree size;
2639 tree maskindex;
2640 tree mask;
2641 tree pmask;
2642 int n;
2643 int nvar;
2644 int need_temp;
2645 gfc_forall_iterator *fa;
2646 gfc_se se;
2647 gfc_code *c;
2648 gfc_saved_var *saved_vars;
2649 iter_info *this_forall;
2650 forall_info *info;
2651 bool need_mask;
2653 /* Do nothing if the mask is false. */
2654 if (code->expr
2655 && code->expr->expr_type == EXPR_CONSTANT
2656 && !code->expr->value.logical)
2657 return build_empty_stmt ();
2659 n = 0;
2660 /* Count the FORALL index number. */
2661 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2662 n++;
2663 nvar = n;
2665 /* Allocate the space for var, start, end, step, varexpr. */
2666 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2667 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2668 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2669 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2670 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2671 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2673 /* Allocate the space for info. */
2674 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2676 gfc_start_block (&pre);
2677 gfc_init_block (&post);
2678 gfc_init_block (&block);
2680 n = 0;
2681 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2683 gfc_symbol *sym = fa->var->symtree->n.sym;
2685 /* Allocate space for this_forall. */
2686 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2688 /* Create a temporary variable for the FORALL index. */
2689 tmp = gfc_typenode_for_spec (&sym->ts);
2690 var[n] = gfc_create_var (tmp, sym->name);
2691 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2693 /* Record it in this_forall. */
2694 this_forall->var = var[n];
2696 /* Replace the index symbol's backend_decl with the temporary decl. */
2697 sym->backend_decl = var[n];
2699 /* Work out the start, end and stride for the loop. */
2700 gfc_init_se (&se, NULL);
2701 gfc_conv_expr_val (&se, fa->start);
2702 /* Record it in this_forall. */
2703 this_forall->start = se.expr;
2704 gfc_add_block_to_block (&block, &se.pre);
2705 start[n] = se.expr;
2707 gfc_init_se (&se, NULL);
2708 gfc_conv_expr_val (&se, fa->end);
2709 /* Record it in this_forall. */
2710 this_forall->end = se.expr;
2711 gfc_make_safe_expr (&se);
2712 gfc_add_block_to_block (&block, &se.pre);
2713 end[n] = se.expr;
2715 gfc_init_se (&se, NULL);
2716 gfc_conv_expr_val (&se, fa->stride);
2717 /* Record it in this_forall. */
2718 this_forall->step = se.expr;
2719 gfc_make_safe_expr (&se);
2720 gfc_add_block_to_block (&block, &se.pre);
2721 step[n] = se.expr;
2723 /* Set the NEXT field of this_forall to NULL. */
2724 this_forall->next = NULL;
2725 /* Link this_forall to the info construct. */
2726 if (info->this_loop)
2728 iter_info *iter_tmp = info->this_loop;
2729 while (iter_tmp->next != NULL)
2730 iter_tmp = iter_tmp->next;
2731 iter_tmp->next = this_forall;
2733 else
2734 info->this_loop = this_forall;
2736 n++;
2738 nvar = n;
2740 /* Calculate the size needed for the current forall level. */
2741 size = gfc_index_one_node;
2742 for (n = 0; n < nvar; n++)
2744 /* size = (end + step - start) / step. */
2745 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2746 step[n], start[n]);
2747 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2749 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2750 tmp = convert (gfc_array_index_type, tmp);
2752 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2755 /* Record the nvar and size of current forall level. */
2756 info->nvar = nvar;
2757 info->size = size;
2759 if (code->expr)
2761 /* If the mask is .true., consider the FORALL unconditional. */
2762 if (code->expr->expr_type == EXPR_CONSTANT
2763 && code->expr->value.logical)
2764 need_mask = false;
2765 else
2766 need_mask = true;
2768 else
2769 need_mask = false;
2771 /* First we need to allocate the mask. */
2772 if (need_mask)
2774 /* As the mask array can be very big, prefer compact boolean types. */
2775 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2776 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2777 size, NULL, &block, &pmask);
2778 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2780 /* Record them in the info structure. */
2781 info->maskindex = maskindex;
2782 info->mask = mask;
2784 else
2786 /* No mask was specified. */
2787 maskindex = NULL_TREE;
2788 mask = pmask = NULL_TREE;
2791 /* Link the current forall level to nested_forall_info. */
2792 info->prev_nest = nested_forall_info;
2793 nested_forall_info = info;
2795 /* Copy the mask into a temporary variable if required.
2796 For now we assume a mask temporary is needed. */
2797 if (need_mask)
2799 /* As the mask array can be very big, prefer compact boolean types. */
2800 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2802 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2804 /* Start of mask assignment loop body. */
2805 gfc_start_block (&body);
2807 /* Evaluate the mask expression. */
2808 gfc_init_se (&se, NULL);
2809 gfc_conv_expr_val (&se, code->expr);
2810 gfc_add_block_to_block (&body, &se.pre);
2812 /* Store the mask. */
2813 se.expr = convert (mask_type, se.expr);
2815 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2816 gfc_add_modify_expr (&body, tmp, se.expr);
2818 /* Advance to the next mask element. */
2819 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2820 maskindex, gfc_index_one_node);
2821 gfc_add_modify_expr (&body, maskindex, tmp);
2823 /* Generate the loops. */
2824 tmp = gfc_finish_block (&body);
2825 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2826 gfc_add_expr_to_block (&block, tmp);
2829 c = code->block->next;
2831 /* TODO: loop merging in FORALL statements. */
2832 /* Now that we've got a copy of the mask, generate the assignment loops. */
2833 while (c)
2835 switch (c->op)
2837 case EXEC_ASSIGN:
2838 /* A scalar or array assignment. DO the simple check for
2839 lhs to rhs dependencies. These make a temporary for the
2840 rhs and form a second forall block to copy to variable. */
2841 need_temp = check_forall_dependencies(c, &pre, &post);
2843 /* Temporaries due to array assignment data dependencies introduce
2844 no end of problems. */
2845 if (need_temp)
2846 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2847 nested_forall_info, &block);
2848 else
2850 /* Use the normal assignment copying routines. */
2851 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2853 /* Generate body and loops. */
2854 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2855 assign, 1);
2856 gfc_add_expr_to_block (&block, tmp);
2859 /* Cleanup any temporary symtrees that have been made to deal
2860 with dependencies. */
2861 if (new_symtree)
2862 cleanup_forall_symtrees (c);
2864 break;
2866 case EXEC_WHERE:
2867 /* Translate WHERE or WHERE construct nested in FORALL. */
2868 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2869 break;
2871 /* Pointer assignment inside FORALL. */
2872 case EXEC_POINTER_ASSIGN:
2873 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2874 if (need_temp)
2875 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2876 nested_forall_info, &block);
2877 else
2879 /* Use the normal assignment copying routines. */
2880 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2882 /* Generate body and loops. */
2883 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2884 assign, 1);
2885 gfc_add_expr_to_block (&block, tmp);
2887 break;
2889 case EXEC_FORALL:
2890 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2891 gfc_add_expr_to_block (&block, tmp);
2892 break;
2894 /* Explicit subroutine calls are prevented by the frontend but interface
2895 assignments can legitimately produce them. */
2896 case EXEC_ASSIGN_CALL:
2897 assign = gfc_trans_call (c, true);
2898 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2899 gfc_add_expr_to_block (&block, tmp);
2900 break;
2902 default:
2903 gcc_unreachable ();
2906 c = c->next;
2909 /* Restore the original index variables. */
2910 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2911 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2913 /* Free the space for var, start, end, step, varexpr. */
2914 gfc_free (var);
2915 gfc_free (start);
2916 gfc_free (end);
2917 gfc_free (step);
2918 gfc_free (varexpr);
2919 gfc_free (saved_vars);
2921 /* Free the space for this forall_info. */
2922 gfc_free (info);
2924 if (pmask)
2926 /* Free the temporary for the mask. */
2927 tmp = gfc_call_free (pmask);
2928 gfc_add_expr_to_block (&block, tmp);
2930 if (maskindex)
2931 pushdecl (maskindex);
2933 gfc_add_block_to_block (&pre, &block);
2934 gfc_add_block_to_block (&pre, &post);
2936 return gfc_finish_block (&pre);
2940 /* Translate the FORALL statement or construct. */
2942 tree gfc_trans_forall (gfc_code * code)
2944 return gfc_trans_forall_1 (code, NULL);
2948 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2949 If the WHERE construct is nested in FORALL, compute the overall temporary
2950 needed by the WHERE mask expression multiplied by the iterator number of
2951 the nested forall.
2952 ME is the WHERE mask expression.
2953 MASK is the current execution mask upon input, whose sense may or may
2954 not be inverted as specified by the INVERT argument.
2955 CMASK is the updated execution mask on output, or NULL if not required.
2956 PMASK is the pending execution mask on output, or NULL if not required.
2957 BLOCK is the block in which to place the condition evaluation loops. */
2959 static void
2960 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2961 tree mask, bool invert, tree cmask, tree pmask,
2962 tree mask_type, stmtblock_t * block)
2964 tree tmp, tmp1;
2965 gfc_ss *lss, *rss;
2966 gfc_loopinfo loop;
2967 stmtblock_t body, body1;
2968 tree count, cond, mtmp;
2969 gfc_se lse, rse;
2971 gfc_init_loopinfo (&loop);
2973 lss = gfc_walk_expr (me);
2974 rss = gfc_walk_expr (me);
2976 /* Variable to index the temporary. */
2977 count = gfc_create_var (gfc_array_index_type, "count");
2978 /* Initialize count. */
2979 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2981 gfc_start_block (&body);
2983 gfc_init_se (&rse, NULL);
2984 gfc_init_se (&lse, NULL);
2986 if (lss == gfc_ss_terminator)
2988 gfc_init_block (&body1);
2990 else
2992 /* Initialize the loop. */
2993 gfc_init_loopinfo (&loop);
2995 /* We may need LSS to determine the shape of the expression. */
2996 gfc_add_ss_to_loop (&loop, lss);
2997 gfc_add_ss_to_loop (&loop, rss);
2999 gfc_conv_ss_startstride (&loop);
3000 gfc_conv_loop_setup (&loop);
3002 gfc_mark_ss_chain_used (rss, 1);
3003 /* Start the loop body. */
3004 gfc_start_scalarized_body (&loop, &body1);
3006 /* Translate the expression. */
3007 gfc_copy_loopinfo_to_se (&rse, &loop);
3008 rse.ss = rss;
3009 gfc_conv_expr (&rse, me);
3012 /* Variable to evaluate mask condition. */
3013 cond = gfc_create_var (mask_type, "cond");
3014 if (mask && (cmask || pmask))
3015 mtmp = gfc_create_var (mask_type, "mask");
3016 else mtmp = NULL_TREE;
3018 gfc_add_block_to_block (&body1, &lse.pre);
3019 gfc_add_block_to_block (&body1, &rse.pre);
3021 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
3023 if (mask && (cmask || pmask))
3025 tmp = gfc_build_array_ref (mask, count, NULL);
3026 if (invert)
3027 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3028 gfc_add_modify_expr (&body1, mtmp, tmp);
3031 if (cmask)
3033 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3034 tmp = cond;
3035 if (mask)
3036 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3037 gfc_add_modify_expr (&body1, tmp1, tmp);
3040 if (pmask)
3042 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3043 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3044 if (mask)
3045 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3046 gfc_add_modify_expr (&body1, tmp1, tmp);
3049 gfc_add_block_to_block (&body1, &lse.post);
3050 gfc_add_block_to_block (&body1, &rse.post);
3052 if (lss == gfc_ss_terminator)
3054 gfc_add_block_to_block (&body, &body1);
3056 else
3058 /* Increment count. */
3059 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3060 gfc_index_one_node);
3061 gfc_add_modify_expr (&body1, count, tmp1);
3063 /* Generate the copying loops. */
3064 gfc_trans_scalarizing_loops (&loop, &body1);
3066 gfc_add_block_to_block (&body, &loop.pre);
3067 gfc_add_block_to_block (&body, &loop.post);
3069 gfc_cleanup_loop (&loop);
3070 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3071 as tree nodes in SS may not be valid in different scope. */
3074 tmp1 = gfc_finish_block (&body);
3075 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3076 if (nested_forall_info != NULL)
3077 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3079 gfc_add_expr_to_block (block, tmp1);
3083 /* Translate an assignment statement in a WHERE statement or construct
3084 statement. The MASK expression is used to control which elements
3085 of EXPR1 shall be assigned. The sense of MASK is specified by
3086 INVERT. */
3088 static tree
3089 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3090 tree mask, bool invert,
3091 tree count1, tree count2,
3092 gfc_symbol *sym)
3094 gfc_se lse;
3095 gfc_se rse;
3096 gfc_ss *lss;
3097 gfc_ss *lss_section;
3098 gfc_ss *rss;
3100 gfc_loopinfo loop;
3101 tree tmp;
3102 stmtblock_t block;
3103 stmtblock_t body;
3104 tree index, maskexpr;
3106 #if 0
3107 /* TODO: handle this special case.
3108 Special case a single function returning an array. */
3109 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3111 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3112 if (tmp)
3113 return tmp;
3115 #endif
3117 /* Assignment of the form lhs = rhs. */
3118 gfc_start_block (&block);
3120 gfc_init_se (&lse, NULL);
3121 gfc_init_se (&rse, NULL);
3123 /* Walk the lhs. */
3124 lss = gfc_walk_expr (expr1);
3125 rss = NULL;
3127 /* In each where-assign-stmt, the mask-expr and the variable being
3128 defined shall be arrays of the same shape. */
3129 gcc_assert (lss != gfc_ss_terminator);
3131 /* The assignment needs scalarization. */
3132 lss_section = lss;
3134 /* Find a non-scalar SS from the lhs. */
3135 while (lss_section != gfc_ss_terminator
3136 && lss_section->type != GFC_SS_SECTION)
3137 lss_section = lss_section->next;
3139 gcc_assert (lss_section != gfc_ss_terminator);
3141 /* Initialize the scalarizer. */
3142 gfc_init_loopinfo (&loop);
3144 /* Walk the rhs. */
3145 rss = gfc_walk_expr (expr2);
3146 if (rss == gfc_ss_terminator)
3148 /* The rhs is scalar. Add a ss for the expression. */
3149 rss = gfc_get_ss ();
3150 rss->next = gfc_ss_terminator;
3151 rss->type = GFC_SS_SCALAR;
3152 rss->expr = expr2;
3155 /* Associate the SS with the loop. */
3156 gfc_add_ss_to_loop (&loop, lss);
3157 gfc_add_ss_to_loop (&loop, rss);
3159 /* Calculate the bounds of the scalarization. */
3160 gfc_conv_ss_startstride (&loop);
3162 /* Resolve any data dependencies in the statement. */
3163 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3165 /* Setup the scalarizing loops. */
3166 gfc_conv_loop_setup (&loop);
3168 /* Setup the gfc_se structures. */
3169 gfc_copy_loopinfo_to_se (&lse, &loop);
3170 gfc_copy_loopinfo_to_se (&rse, &loop);
3172 rse.ss = rss;
3173 gfc_mark_ss_chain_used (rss, 1);
3174 if (loop.temp_ss == NULL)
3176 lse.ss = lss;
3177 gfc_mark_ss_chain_used (lss, 1);
3179 else
3181 lse.ss = loop.temp_ss;
3182 gfc_mark_ss_chain_used (lss, 3);
3183 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3186 /* Start the scalarized loop body. */
3187 gfc_start_scalarized_body (&loop, &body);
3189 /* Translate the expression. */
3190 gfc_conv_expr (&rse, expr2);
3191 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3193 gfc_conv_tmp_array_ref (&lse);
3194 gfc_advance_se_ss_chain (&lse);
3196 else
3197 gfc_conv_expr (&lse, expr1);
3199 /* Form the mask expression according to the mask. */
3200 index = count1;
3201 maskexpr = gfc_build_array_ref (mask, index, NULL);
3202 if (invert)
3203 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3205 /* Use the scalar assignment as is. */
3206 if (sym == NULL)
3207 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3208 loop.temp_ss != NULL, false);
3209 else
3210 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3212 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3214 gfc_add_expr_to_block (&body, tmp);
3216 if (lss == gfc_ss_terminator)
3218 /* Increment count1. */
3219 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3220 count1, gfc_index_one_node);
3221 gfc_add_modify_expr (&body, count1, tmp);
3223 /* Use the scalar assignment as is. */
3224 gfc_add_block_to_block (&block, &body);
3226 else
3228 gcc_assert (lse.ss == gfc_ss_terminator
3229 && rse.ss == gfc_ss_terminator);
3231 if (loop.temp_ss != NULL)
3233 /* Increment count1 before finish the main body of a scalarized
3234 expression. */
3235 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3236 count1, gfc_index_one_node);
3237 gfc_add_modify_expr (&body, count1, tmp);
3238 gfc_trans_scalarized_loop_boundary (&loop, &body);
3240 /* We need to copy the temporary to the actual lhs. */
3241 gfc_init_se (&lse, NULL);
3242 gfc_init_se (&rse, NULL);
3243 gfc_copy_loopinfo_to_se (&lse, &loop);
3244 gfc_copy_loopinfo_to_se (&rse, &loop);
3246 rse.ss = loop.temp_ss;
3247 lse.ss = lss;
3249 gfc_conv_tmp_array_ref (&rse);
3250 gfc_advance_se_ss_chain (&rse);
3251 gfc_conv_expr (&lse, expr1);
3253 gcc_assert (lse.ss == gfc_ss_terminator
3254 && rse.ss == gfc_ss_terminator);
3256 /* Form the mask expression according to the mask tree list. */
3257 index = count2;
3258 maskexpr = gfc_build_array_ref (mask, index, NULL);
3259 if (invert)
3260 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3261 maskexpr);
3263 /* Use the scalar assignment as is. */
3264 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3265 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3266 gfc_add_expr_to_block (&body, tmp);
3268 /* Increment count2. */
3269 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3270 count2, gfc_index_one_node);
3271 gfc_add_modify_expr (&body, count2, tmp);
3273 else
3275 /* Increment count1. */
3276 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3277 count1, gfc_index_one_node);
3278 gfc_add_modify_expr (&body, count1, tmp);
3281 /* Generate the copying loops. */
3282 gfc_trans_scalarizing_loops (&loop, &body);
3284 /* Wrap the whole thing up. */
3285 gfc_add_block_to_block (&block, &loop.pre);
3286 gfc_add_block_to_block (&block, &loop.post);
3287 gfc_cleanup_loop (&loop);
3290 return gfc_finish_block (&block);
3294 /* Translate the WHERE construct or statement.
3295 This function can be called iteratively to translate the nested WHERE
3296 construct or statement.
3297 MASK is the control mask. */
3299 static void
3300 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3301 forall_info * nested_forall_info, stmtblock_t * block)
3303 stmtblock_t inner_size_body;
3304 tree inner_size, size;
3305 gfc_ss *lss, *rss;
3306 tree mask_type;
3307 gfc_expr *expr1;
3308 gfc_expr *expr2;
3309 gfc_code *cblock;
3310 gfc_code *cnext;
3311 tree tmp;
3312 tree count1, count2;
3313 bool need_cmask;
3314 bool need_pmask;
3315 int need_temp;
3316 tree pcmask = NULL_TREE;
3317 tree ppmask = NULL_TREE;
3318 tree cmask = NULL_TREE;
3319 tree pmask = NULL_TREE;
3320 gfc_actual_arglist *arg;
3322 /* the WHERE statement or the WHERE construct statement. */
3323 cblock = code->block;
3325 /* As the mask array can be very big, prefer compact boolean types. */
3326 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3328 /* Determine which temporary masks are needed. */
3329 if (!cblock->block)
3331 /* One clause: No ELSEWHEREs. */
3332 need_cmask = (cblock->next != 0);
3333 need_pmask = false;
3335 else if (cblock->block->block)
3337 /* Three or more clauses: Conditional ELSEWHEREs. */
3338 need_cmask = true;
3339 need_pmask = true;
3341 else if (cblock->next)
3343 /* Two clauses, the first non-empty. */
3344 need_cmask = true;
3345 need_pmask = (mask != NULL_TREE
3346 && cblock->block->next != 0);
3348 else if (!cblock->block->next)
3350 /* Two clauses, both empty. */
3351 need_cmask = false;
3352 need_pmask = false;
3354 /* Two clauses, the first empty, the second non-empty. */
3355 else if (mask)
3357 need_cmask = (cblock->block->expr != 0);
3358 need_pmask = true;
3360 else
3362 need_cmask = true;
3363 need_pmask = false;
3366 if (need_cmask || need_pmask)
3368 /* Calculate the size of temporary needed by the mask-expr. */
3369 gfc_init_block (&inner_size_body);
3370 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3371 &inner_size_body, &lss, &rss);
3373 /* Calculate the total size of temporary needed. */
3374 size = compute_overall_iter_number (nested_forall_info, inner_size,
3375 &inner_size_body, block);
3377 /* Allocate temporary for WHERE mask if needed. */
3378 if (need_cmask)
3379 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3380 &pcmask);
3382 /* Allocate temporary for !mask if needed. */
3383 if (need_pmask)
3384 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3385 &ppmask);
3388 while (cblock)
3390 /* Each time around this loop, the where clause is conditional
3391 on the value of mask and invert, which are updated at the
3392 bottom of the loop. */
3394 /* Has mask-expr. */
3395 if (cblock->expr)
3397 /* Ensure that the WHERE mask will be evaluated exactly once.
3398 If there are no statements in this WHERE/ELSEWHERE clause,
3399 then we don't need to update the control mask (cmask).
3400 If this is the last clause of the WHERE construct, then
3401 we don't need to update the pending control mask (pmask). */
3402 if (mask)
3403 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3404 mask, invert,
3405 cblock->next ? cmask : NULL_TREE,
3406 cblock->block ? pmask : NULL_TREE,
3407 mask_type, block);
3408 else
3409 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3410 NULL_TREE, false,
3411 (cblock->next || cblock->block)
3412 ? cmask : NULL_TREE,
3413 NULL_TREE, mask_type, block);
3415 invert = false;
3417 /* It's a final elsewhere-stmt. No mask-expr is present. */
3418 else
3419 cmask = mask;
3421 /* The body of this where clause are controlled by cmask with
3422 sense specified by invert. */
3424 /* Get the assignment statement of a WHERE statement, or the first
3425 statement in where-body-construct of a WHERE construct. */
3426 cnext = cblock->next;
3427 while (cnext)
3429 switch (cnext->op)
3431 /* WHERE assignment statement. */
3432 case EXEC_ASSIGN_CALL:
3434 arg = cnext->ext.actual;
3435 expr1 = expr2 = NULL;
3436 for (; arg; arg = arg->next)
3438 if (!arg->expr)
3439 continue;
3440 if (expr1 == NULL)
3441 expr1 = arg->expr;
3442 else
3443 expr2 = arg->expr;
3445 goto evaluate;
3447 case EXEC_ASSIGN:
3448 expr1 = cnext->expr;
3449 expr2 = cnext->expr2;
3450 evaluate:
3451 if (nested_forall_info != NULL)
3453 need_temp = gfc_check_dependency (expr1, expr2, 0);
3454 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3455 gfc_trans_assign_need_temp (expr1, expr2,
3456 cmask, invert,
3457 nested_forall_info, block);
3458 else
3460 /* Variables to control maskexpr. */
3461 count1 = gfc_create_var (gfc_array_index_type, "count1");
3462 count2 = gfc_create_var (gfc_array_index_type, "count2");
3463 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3464 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3466 tmp = gfc_trans_where_assign (expr1, expr2,
3467 cmask, invert,
3468 count1, count2,
3469 cnext->resolved_sym);
3471 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3472 tmp, 1);
3473 gfc_add_expr_to_block (block, tmp);
3476 else
3478 /* Variables to control maskexpr. */
3479 count1 = gfc_create_var (gfc_array_index_type, "count1");
3480 count2 = gfc_create_var (gfc_array_index_type, "count2");
3481 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3482 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3484 tmp = gfc_trans_where_assign (expr1, expr2,
3485 cmask, invert,
3486 count1, count2,
3487 cnext->resolved_sym);
3488 gfc_add_expr_to_block (block, tmp);
3491 break;
3493 /* WHERE or WHERE construct is part of a where-body-construct. */
3494 case EXEC_WHERE:
3495 gfc_trans_where_2 (cnext, cmask, invert,
3496 nested_forall_info, block);
3497 break;
3499 default:
3500 gcc_unreachable ();
3503 /* The next statement within the same where-body-construct. */
3504 cnext = cnext->next;
3506 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3507 cblock = cblock->block;
3508 if (mask == NULL_TREE)
3510 /* If we're the initial WHERE, we can simply invert the sense
3511 of the current mask to obtain the "mask" for the remaining
3512 ELSEWHEREs. */
3513 invert = true;
3514 mask = cmask;
3516 else
3518 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3519 invert = false;
3520 mask = pmask;
3524 /* If we allocated a pending mask array, deallocate it now. */
3525 if (ppmask)
3527 tmp = gfc_call_free (ppmask);
3528 gfc_add_expr_to_block (block, tmp);
3531 /* If we allocated a current mask array, deallocate it now. */
3532 if (pcmask)
3534 tmp = gfc_call_free (pcmask);
3535 gfc_add_expr_to_block (block, tmp);
3539 /* Translate a simple WHERE construct or statement without dependencies.
3540 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3541 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3542 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3544 static tree
3545 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3547 stmtblock_t block, body;
3548 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3549 tree tmp, cexpr, tstmt, estmt;
3550 gfc_ss *css, *tdss, *tsss;
3551 gfc_se cse, tdse, tsse, edse, esse;
3552 gfc_loopinfo loop;
3553 gfc_ss *edss = 0;
3554 gfc_ss *esss = 0;
3556 cond = cblock->expr;
3557 tdst = cblock->next->expr;
3558 tsrc = cblock->next->expr2;
3559 edst = eblock ? eblock->next->expr : NULL;
3560 esrc = eblock ? eblock->next->expr2 : NULL;
3562 gfc_start_block (&block);
3563 gfc_init_loopinfo (&loop);
3565 /* Handle the condition. */
3566 gfc_init_se (&cse, NULL);
3567 css = gfc_walk_expr (cond);
3568 gfc_add_ss_to_loop (&loop, css);
3570 /* Handle the then-clause. */
3571 gfc_init_se (&tdse, NULL);
3572 gfc_init_se (&tsse, NULL);
3573 tdss = gfc_walk_expr (tdst);
3574 tsss = gfc_walk_expr (tsrc);
3575 if (tsss == gfc_ss_terminator)
3577 tsss = gfc_get_ss ();
3578 tsss->next = gfc_ss_terminator;
3579 tsss->type = GFC_SS_SCALAR;
3580 tsss->expr = tsrc;
3582 gfc_add_ss_to_loop (&loop, tdss);
3583 gfc_add_ss_to_loop (&loop, tsss);
3585 if (eblock)
3587 /* Handle the else clause. */
3588 gfc_init_se (&edse, NULL);
3589 gfc_init_se (&esse, NULL);
3590 edss = gfc_walk_expr (edst);
3591 esss = gfc_walk_expr (esrc);
3592 if (esss == gfc_ss_terminator)
3594 esss = gfc_get_ss ();
3595 esss->next = gfc_ss_terminator;
3596 esss->type = GFC_SS_SCALAR;
3597 esss->expr = esrc;
3599 gfc_add_ss_to_loop (&loop, edss);
3600 gfc_add_ss_to_loop (&loop, esss);
3603 gfc_conv_ss_startstride (&loop);
3604 gfc_conv_loop_setup (&loop);
3606 gfc_mark_ss_chain_used (css, 1);
3607 gfc_mark_ss_chain_used (tdss, 1);
3608 gfc_mark_ss_chain_used (tsss, 1);
3609 if (eblock)
3611 gfc_mark_ss_chain_used (edss, 1);
3612 gfc_mark_ss_chain_used (esss, 1);
3615 gfc_start_scalarized_body (&loop, &body);
3617 gfc_copy_loopinfo_to_se (&cse, &loop);
3618 gfc_copy_loopinfo_to_se (&tdse, &loop);
3619 gfc_copy_loopinfo_to_se (&tsse, &loop);
3620 cse.ss = css;
3621 tdse.ss = tdss;
3622 tsse.ss = tsss;
3623 if (eblock)
3625 gfc_copy_loopinfo_to_se (&edse, &loop);
3626 gfc_copy_loopinfo_to_se (&esse, &loop);
3627 edse.ss = edss;
3628 esse.ss = esss;
3631 gfc_conv_expr (&cse, cond);
3632 gfc_add_block_to_block (&body, &cse.pre);
3633 cexpr = cse.expr;
3635 gfc_conv_expr (&tsse, tsrc);
3636 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3638 gfc_conv_tmp_array_ref (&tdse);
3639 gfc_advance_se_ss_chain (&tdse);
3641 else
3642 gfc_conv_expr (&tdse, tdst);
3644 if (eblock)
3646 gfc_conv_expr (&esse, esrc);
3647 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3649 gfc_conv_tmp_array_ref (&edse);
3650 gfc_advance_se_ss_chain (&edse);
3652 else
3653 gfc_conv_expr (&edse, edst);
3656 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3657 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3658 : build_empty_stmt ();
3659 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3660 gfc_add_expr_to_block (&body, tmp);
3661 gfc_add_block_to_block (&body, &cse.post);
3663 gfc_trans_scalarizing_loops (&loop, &body);
3664 gfc_add_block_to_block (&block, &loop.pre);
3665 gfc_add_block_to_block (&block, &loop.post);
3666 gfc_cleanup_loop (&loop);
3668 return gfc_finish_block (&block);
3671 /* As the WHERE or WHERE construct statement can be nested, we call
3672 gfc_trans_where_2 to do the translation, and pass the initial
3673 NULL values for both the control mask and the pending control mask. */
3675 tree
3676 gfc_trans_where (gfc_code * code)
3678 stmtblock_t block;
3679 gfc_code *cblock;
3680 gfc_code *eblock;
3682 cblock = code->block;
3683 if (cblock->next
3684 && cblock->next->op == EXEC_ASSIGN
3685 && !cblock->next->next)
3687 eblock = cblock->block;
3688 if (!eblock)
3690 /* A simple "WHERE (cond) x = y" statement or block is
3691 dependence free if cond is not dependent upon writing x,
3692 and the source y is unaffected by the destination x. */
3693 if (!gfc_check_dependency (cblock->next->expr,
3694 cblock->expr, 0)
3695 && !gfc_check_dependency (cblock->next->expr,
3696 cblock->next->expr2, 0))
3697 return gfc_trans_where_3 (cblock, NULL);
3699 else if (!eblock->expr
3700 && !eblock->block
3701 && eblock->next
3702 && eblock->next->op == EXEC_ASSIGN
3703 && !eblock->next->next)
3705 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3706 block is dependence free if cond is not dependent on writes
3707 to x1 and x2, y1 is not dependent on writes to x2, and y2
3708 is not dependent on writes to x1, and both y's are not
3709 dependent upon their own x's. */
3710 if (!gfc_check_dependency(cblock->next->expr,
3711 cblock->expr, 0)
3712 && !gfc_check_dependency(eblock->next->expr,
3713 cblock->expr, 0)
3714 && !gfc_check_dependency(cblock->next->expr,
3715 eblock->next->expr2, 0)
3716 && !gfc_check_dependency(eblock->next->expr,
3717 cblock->next->expr2, 0)
3718 && !gfc_check_dependency(cblock->next->expr,
3719 cblock->next->expr2, 0)
3720 && !gfc_check_dependency(eblock->next->expr,
3721 eblock->next->expr2, 0))
3722 return gfc_trans_where_3 (cblock, eblock);
3726 gfc_start_block (&block);
3728 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3730 return gfc_finish_block (&block);
3734 /* CYCLE a DO loop. The label decl has already been created by
3735 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3736 node at the head of the loop. We must mark the label as used. */
3738 tree
3739 gfc_trans_cycle (gfc_code * code)
3741 tree cycle_label;
3743 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3744 TREE_USED (cycle_label) = 1;
3745 return build1_v (GOTO_EXPR, cycle_label);
3749 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3750 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3751 loop. */
3753 tree
3754 gfc_trans_exit (gfc_code * code)
3756 tree exit_label;
3758 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3759 TREE_USED (exit_label) = 1;
3760 return build1_v (GOTO_EXPR, exit_label);
3764 /* Translate the ALLOCATE statement. */
3766 tree
3767 gfc_trans_allocate (gfc_code * code)
3769 gfc_alloc *al;
3770 gfc_expr *expr;
3771 gfc_se se;
3772 tree tmp;
3773 tree parm;
3774 tree stat;
3775 tree pstat;
3776 tree error_label;
3777 stmtblock_t block;
3779 if (!code->ext.alloc_list)
3780 return NULL_TREE;
3782 gfc_start_block (&block);
3784 if (code->expr)
3786 tree gfc_int4_type_node = gfc_get_int_type (4);
3788 stat = gfc_create_var (gfc_int4_type_node, "stat");
3789 pstat = build_fold_addr_expr (stat);
3791 error_label = gfc_build_label_decl (NULL_TREE);
3792 TREE_USED (error_label) = 1;
3794 else
3795 pstat = stat = error_label = NULL_TREE;
3797 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3799 expr = al->expr;
3801 gfc_init_se (&se, NULL);
3802 gfc_start_block (&se.pre);
3804 se.want_pointer = 1;
3805 se.descriptor_only = 1;
3806 gfc_conv_expr (&se, expr);
3808 if (!gfc_array_allocate (&se, expr, pstat))
3810 /* A scalar or derived type. */
3811 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3813 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3814 tmp = se.string_length;
3816 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3817 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3818 fold_convert (TREE_TYPE (se.expr), tmp));
3819 gfc_add_expr_to_block (&se.pre, tmp);
3821 if (code->expr)
3823 tmp = build1_v (GOTO_EXPR, error_label);
3824 parm = fold_build2 (NE_EXPR, boolean_type_node,
3825 stat, build_int_cst (TREE_TYPE (stat), 0));
3826 tmp = fold_build3 (COND_EXPR, void_type_node,
3827 parm, tmp, build_empty_stmt ());
3828 gfc_add_expr_to_block (&se.pre, tmp);
3831 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3833 tmp = build_fold_indirect_ref (se.expr);
3834 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3835 gfc_add_expr_to_block (&se.pre, tmp);
3840 tmp = gfc_finish_block (&se.pre);
3841 gfc_add_expr_to_block (&block, tmp);
3844 /* Assign the value to the status variable. */
3845 if (code->expr)
3847 tmp = build1_v (LABEL_EXPR, error_label);
3848 gfc_add_expr_to_block (&block, tmp);
3850 gfc_init_se (&se, NULL);
3851 gfc_conv_expr_lhs (&se, code->expr);
3852 tmp = convert (TREE_TYPE (se.expr), stat);
3853 gfc_add_modify_expr (&block, se.expr, tmp);
3856 return gfc_finish_block (&block);
3860 /* Translate a DEALLOCATE statement.
3861 There are two cases within the for loop:
3862 (1) deallocate(a1, a2, a3) is translated into the following sequence
3863 _gfortran_deallocate(a1, 0B)
3864 _gfortran_deallocate(a2, 0B)
3865 _gfortran_deallocate(a3, 0B)
3866 where the STAT= variable is passed a NULL pointer.
3867 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3868 astat = 0
3869 _gfortran_deallocate(a1, &stat)
3870 astat = astat + stat
3871 _gfortran_deallocate(a2, &stat)
3872 astat = astat + stat
3873 _gfortran_deallocate(a3, &stat)
3874 astat = astat + stat
3875 In case (1), we simply return at the end of the for loop. In case (2)
3876 we set STAT= astat. */
3877 tree
3878 gfc_trans_deallocate (gfc_code * code)
3880 gfc_se se;
3881 gfc_alloc *al;
3882 gfc_expr *expr;
3883 tree apstat, astat, pstat, stat, tmp;
3884 stmtblock_t block;
3886 gfc_start_block (&block);
3888 /* Set up the optional STAT= */
3889 if (code->expr)
3891 tree gfc_int4_type_node = gfc_get_int_type (4);
3893 /* Variable used with the library call. */
3894 stat = gfc_create_var (gfc_int4_type_node, "stat");
3895 pstat = build_fold_addr_expr (stat);
3897 /* Running total of possible deallocation failures. */
3898 astat = gfc_create_var (gfc_int4_type_node, "astat");
3899 apstat = build_fold_addr_expr (astat);
3901 /* Initialize astat to 0. */
3902 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3904 else
3905 pstat = apstat = stat = astat = NULL_TREE;
3907 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3909 expr = al->expr;
3910 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3912 gfc_init_se (&se, NULL);
3913 gfc_start_block (&se.pre);
3915 se.want_pointer = 1;
3916 se.descriptor_only = 1;
3917 gfc_conv_expr (&se, expr);
3919 if (expr->ts.type == BT_DERIVED
3920 && expr->ts.derived->attr.alloc_comp)
3922 gfc_ref *ref;
3923 gfc_ref *last = NULL;
3924 for (ref = expr->ref; ref; ref = ref->next)
3925 if (ref->type == REF_COMPONENT)
3926 last = ref;
3928 /* Do not deallocate the components of a derived type
3929 ultimate pointer component. */
3930 if (!(last && last->u.c.component->pointer)
3931 && !(!last && expr->symtree->n.sym->attr.pointer))
3933 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3934 expr->rank);
3935 gfc_add_expr_to_block (&se.pre, tmp);
3939 if (expr->rank)
3940 tmp = gfc_array_deallocate (se.expr, pstat);
3941 else
3943 tmp = gfc_deallocate_with_status (se.expr, pstat, false);
3944 gfc_add_expr_to_block (&se.pre, tmp);
3946 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
3947 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3950 gfc_add_expr_to_block (&se.pre, tmp);
3952 /* Keep track of the number of failed deallocations by adding stat
3953 of the last deallocation to the running total. */
3954 if (code->expr)
3956 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3957 gfc_add_modify_expr (&se.pre, astat, apstat);
3960 tmp = gfc_finish_block (&se.pre);
3961 gfc_add_expr_to_block (&block, tmp);
3965 /* Assign the value to the status variable. */
3966 if (code->expr)
3968 gfc_init_se (&se, NULL);
3969 gfc_conv_expr_lhs (&se, code->expr);
3970 tmp = convert (TREE_TYPE (se.expr), astat);
3971 gfc_add_modify_expr (&block, se.expr, tmp);
3974 return gfc_finish_block (&block);