2009-01-19 Iain Sandoe <iain.sandoe@sandoe-acoustics.co.uk>
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob031fe88a4960ba13eb618533770dc1bcbb4b959a
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
119 else
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 code = code->block;
163 if (code == NULL)
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
180 code = code->block;
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 tree
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
202 static void
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
208 gfc_expr *e;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
211 gfc_se parmse;
212 gfc_ss *ss;
213 gfc_ss_info *info;
214 gfc_symbol *fsym;
215 int n;
216 stmtblock_t block;
217 tree data;
218 tree offset;
219 tree size;
220 tree tmp;
222 if (loopse->ss == NULL)
223 return;
225 ss = loopse->ss;
226 arg0 = arg;
227 formal = sym->formal;
229 /* Loop over all the arguments testing for dependencies. */
230 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
232 e = arg->expr;
233 if (e == NULL)
234 continue;
236 /* Obtain the info structure for the current argument. */
237 info = NULL;
238 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
240 if (ss->expr != e)
241 continue;
242 info = &ss->data.info;
243 break;
246 /* If there is a dependency, create a temporary and use it
247 instead of the variable. */
248 fsym = formal ? formal->sym : NULL;
249 if (e->expr_type == EXPR_VARIABLE
250 && e->rank && fsym
251 && fsym->attr.intent != INTENT_IN
252 && gfc_check_fncall_dependency (e, fsym->attr.intent,
253 sym, arg0, check_variable))
255 tree initial;
256 stmtblock_t temp_post;
258 /* Make a local loopinfo for the temporary creation, so that
259 none of the other ss->info's have to be renormalized. */
260 gfc_init_loopinfo (&tmp_loop);
261 for (n = 0; n < info->dimen; n++)
263 tmp_loop.to[n] = loopse->loop->to[n];
264 tmp_loop.from[n] = loopse->loop->from[n];
265 tmp_loop.order[n] = loopse->loop->order[n];
268 /* Obtain the argument descriptor for unpacking. */
269 gfc_init_se (&parmse, NULL);
270 parmse.want_pointer = 1;
271 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
272 gfc_add_block_to_block (&se->pre, &parmse.pre);
274 /* If we've got INTENT(INOUT), initialize the array temporary with
275 a copy of the values. */
276 if (fsym->attr.intent == INTENT_INOUT)
277 initial = parmse.expr;
278 else
279 initial = NULL_TREE;
281 /* Generate the temporary. Merge the block so that the
282 declarations are put at the right binding level. Cleaning up the
283 temporary should be the very last thing done, so we add the code to
284 a new block and add it to se->post as last instructions. */
285 size = gfc_create_var (gfc_array_index_type, NULL);
286 data = gfc_create_var (pvoid_type_node, NULL);
287 gfc_start_block (&block);
288 gfc_init_block (&temp_post);
289 tmp = gfc_typenode_for_spec (&e->ts);
290 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
291 &tmp_loop, info, tmp,
292 initial,
293 false, true, false,
294 &arg->expr->where);
295 gfc_add_modify (&se->pre, size, tmp);
296 tmp = fold_convert (pvoid_type_node, info->data);
297 gfc_add_modify (&se->pre, data, tmp);
298 gfc_merge_block_scope (&block);
300 /* Calculate the offset for the temporary. */
301 offset = gfc_index_zero_node;
302 for (n = 0; n < info->dimen; n++)
304 tmp = gfc_conv_descriptor_stride (info->descriptor,
305 gfc_rank_cst[n]);
306 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
307 loopse->loop->from[n], tmp);
308 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
309 offset, tmp);
311 info->offset = gfc_create_var (gfc_array_index_type, NULL);
312 gfc_add_modify (&se->pre, info->offset, offset);
315 /* Copy the result back using unpack. */
316 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
317 gfc_add_expr_to_block (&se->post, tmp);
319 /* XXX: This is possibly not needed; but isn't it cleaner this way? */
320 gfc_add_block_to_block (&se->pre, &parmse.pre);
322 gfc_add_block_to_block (&se->post, &parmse.post);
323 gfc_add_block_to_block (&se->post, &temp_post);
329 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
331 tree
332 gfc_trans_call (gfc_code * code, bool dependency_check)
334 gfc_se se;
335 gfc_ss * ss;
336 int has_alternate_specifier;
337 gfc_dep_check check_variable;
339 /* A CALL starts a new block because the actual arguments may have to
340 be evaluated first. */
341 gfc_init_se (&se, NULL);
342 gfc_start_block (&se.pre);
344 gcc_assert (code->resolved_sym);
346 ss = gfc_ss_terminator;
347 if (code->resolved_sym->attr.elemental)
348 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
350 /* Is not an elemental subroutine call with array valued arguments. */
351 if (ss == gfc_ss_terminator)
354 /* Translate the call. */
355 has_alternate_specifier
356 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
357 NULL_TREE);
359 /* A subroutine without side-effect, by definition, does nothing! */
360 TREE_SIDE_EFFECTS (se.expr) = 1;
362 /* Chain the pieces together and return the block. */
363 if (has_alternate_specifier)
365 gfc_code *select_code;
366 gfc_symbol *sym;
367 select_code = code->next;
368 gcc_assert(select_code->op == EXEC_SELECT);
369 sym = select_code->expr->symtree->n.sym;
370 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
371 if (sym->backend_decl == NULL)
372 sym->backend_decl = gfc_get_symbol_decl (sym);
373 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
375 else
376 gfc_add_expr_to_block (&se.pre, se.expr);
378 gfc_add_block_to_block (&se.pre, &se.post);
381 else
383 /* An elemental subroutine call with array valued arguments has
384 to be scalarized. */
385 gfc_loopinfo loop;
386 stmtblock_t body;
387 stmtblock_t block;
388 gfc_se loopse;
389 gfc_se depse;
391 /* gfc_walk_elemental_function_args renders the ss chain in the
392 reverse order to the actual argument order. */
393 ss = gfc_reverse_ss (ss);
395 /* Initialize the loop. */
396 gfc_init_se (&loopse, NULL);
397 gfc_init_loopinfo (&loop);
398 gfc_add_ss_to_loop (&loop, ss);
400 gfc_conv_ss_startstride (&loop);
401 /* TODO: gfc_conv_loop_setup generates a temporary for vector
402 subscripts. This could be prevented in the elemental case
403 as temporaries are handled separatedly
404 (below in gfc_conv_elemental_dependencies). */
405 gfc_conv_loop_setup (&loop, &code->expr->where);
406 gfc_mark_ss_chain_used (ss, 1);
408 /* Convert the arguments, checking for dependencies. */
409 gfc_copy_loopinfo_to_se (&loopse, &loop);
410 loopse.ss = ss;
412 /* For operator assignment, do dependency checking. */
413 if (dependency_check)
414 check_variable = ELEM_CHECK_VARIABLE;
415 else
416 check_variable = ELEM_DONT_CHECK_VARIABLE;
418 gfc_init_se (&depse, NULL);
419 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
420 code->ext.actual, check_variable);
422 gfc_add_block_to_block (&loop.pre, &depse.pre);
423 gfc_add_block_to_block (&loop.post, &depse.post);
425 /* Generate the loop body. */
426 gfc_start_scalarized_body (&loop, &body);
427 gfc_init_block (&block);
429 /* Add the subroutine call to the block. */
430 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
431 NULL_TREE);
432 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
434 gfc_add_block_to_block (&block, &loopse.pre);
435 gfc_add_block_to_block (&block, &loopse.post);
437 /* Finish up the loop block and the loop. */
438 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
439 gfc_trans_scalarizing_loops (&loop, &body);
440 gfc_add_block_to_block (&se.pre, &loop.pre);
441 gfc_add_block_to_block (&se.pre, &loop.post);
442 gfc_add_block_to_block (&se.pre, &se.post);
443 gfc_cleanup_loop (&loop);
446 return gfc_finish_block (&se.pre);
450 /* Translate the RETURN statement. */
452 tree
453 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
455 if (code->expr)
457 gfc_se se;
458 tree tmp;
459 tree result;
461 /* If code->expr is not NULL, this return statement must appear
462 in a subroutine and current_fake_result_decl has already
463 been generated. */
465 result = gfc_get_fake_result_decl (NULL, 0);
466 if (!result)
468 gfc_warning ("An alternate return at %L without a * dummy argument",
469 &code->expr->where);
470 return build1_v (GOTO_EXPR, gfc_get_return_label ());
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
477 gfc_conv_expr (&se, code->expr);
479 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
480 fold_convert (TREE_TYPE (result), se.expr));
481 gfc_add_expr_to_block (&se.pre, tmp);
483 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
484 gfc_add_expr_to_block (&se.pre, tmp);
485 gfc_add_block_to_block (&se.pre, &se.post);
486 return gfc_finish_block (&se.pre);
488 else
489 return build1_v (GOTO_EXPR, gfc_get_return_label ());
493 /* Translate the PAUSE statement. We have to translate this statement
494 to a runtime library call. */
496 tree
497 gfc_trans_pause (gfc_code * code)
499 tree gfc_int4_type_node = gfc_get_int_type (4);
500 gfc_se se;
501 tree tmp;
503 /* Start a new block for this statement. */
504 gfc_init_se (&se, NULL);
505 gfc_start_block (&se.pre);
508 if (code->expr == NULL)
510 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
511 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
513 else
515 gfc_conv_expr_reference (&se, code->expr);
516 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
517 se.expr, se.string_length);
520 gfc_add_expr_to_block (&se.pre, tmp);
522 gfc_add_block_to_block (&se.pre, &se.post);
524 return gfc_finish_block (&se.pre);
528 /* Translate the STOP statement. We have to translate this statement
529 to a runtime library call. */
531 tree
532 gfc_trans_stop (gfc_code * code)
534 tree gfc_int4_type_node = gfc_get_int_type (4);
535 gfc_se se;
536 tree tmp;
538 /* Start a new block for this statement. */
539 gfc_init_se (&se, NULL);
540 gfc_start_block (&se.pre);
543 if (code->expr == NULL)
545 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
546 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
548 else
550 gfc_conv_expr_reference (&se, code->expr);
551 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
552 se.expr, se.string_length);
555 gfc_add_expr_to_block (&se.pre, tmp);
557 gfc_add_block_to_block (&se.pre, &se.post);
559 return gfc_finish_block (&se.pre);
563 /* Generate GENERIC for the IF construct. This function also deals with
564 the simple IF statement, because the front end translates the IF
565 statement into an IF construct.
567 We translate:
569 IF (cond) THEN
570 then_clause
571 ELSEIF (cond2)
572 elseif_clause
573 ELSE
574 else_clause
575 ENDIF
577 into:
579 pre_cond_s;
580 if (cond_s)
582 then_clause;
584 else
586 pre_cond_s
587 if (cond_s)
589 elseif_clause
591 else
593 else_clause;
597 where COND_S is the simplified version of the predicate. PRE_COND_S
598 are the pre side-effects produced by the translation of the
599 conditional.
600 We need to build the chain recursively otherwise we run into
601 problems with folding incomplete statements. */
603 static tree
604 gfc_trans_if_1 (gfc_code * code)
606 gfc_se if_se;
607 tree stmt, elsestmt;
609 /* Check for an unconditional ELSE clause. */
610 if (!code->expr)
611 return gfc_trans_code (code->next);
613 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
614 gfc_init_se (&if_se, NULL);
615 gfc_start_block (&if_se.pre);
617 /* Calculate the IF condition expression. */
618 gfc_conv_expr_val (&if_se, code->expr);
620 /* Translate the THEN clause. */
621 stmt = gfc_trans_code (code->next);
623 /* Translate the ELSE clause. */
624 if (code->block)
625 elsestmt = gfc_trans_if_1 (code->block);
626 else
627 elsestmt = build_empty_stmt ();
629 /* Build the condition expression and add it to the condition block. */
630 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
632 gfc_add_expr_to_block (&if_se.pre, stmt);
634 /* Finish off this statement. */
635 return gfc_finish_block (&if_se.pre);
638 tree
639 gfc_trans_if (gfc_code * code)
641 /* Ignore the top EXEC_IF, it only announces an IF construct. The
642 actual code we must translate is in code->block. */
644 return gfc_trans_if_1 (code->block);
648 /* Translate an arithmetic IF expression.
650 IF (cond) label1, label2, label3 translates to
652 if (cond <= 0)
654 if (cond < 0)
655 goto label1;
656 else // cond == 0
657 goto label2;
659 else // cond > 0
660 goto label3;
662 An optimized version can be generated in case of equal labels.
663 E.g., if label1 is equal to label2, we can translate it to
665 if (cond <= 0)
666 goto label1;
667 else
668 goto label3;
671 tree
672 gfc_trans_arithmetic_if (gfc_code * code)
674 gfc_se se;
675 tree tmp;
676 tree branch1;
677 tree branch2;
678 tree zero;
680 /* Start a new block. */
681 gfc_init_se (&se, NULL);
682 gfc_start_block (&se.pre);
684 /* Pre-evaluate COND. */
685 gfc_conv_expr_val (&se, code->expr);
686 se.expr = gfc_evaluate_now (se.expr, &se.pre);
688 /* Build something to compare with. */
689 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
691 if (code->label->value != code->label2->value)
693 /* If (cond < 0) take branch1 else take branch2.
694 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
695 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
696 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
698 if (code->label->value != code->label3->value)
699 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
700 else
701 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
703 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
705 else
706 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
708 if (code->label->value != code->label3->value
709 && code->label2->value != code->label3->value)
711 /* if (cond <= 0) take branch1 else take branch2. */
712 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
713 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
714 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
717 /* Append the COND_EXPR to the evaluation of COND, and return. */
718 gfc_add_expr_to_block (&se.pre, branch1);
719 return gfc_finish_block (&se.pre);
723 /* Translate the simple DO construct. This is where the loop variable has
724 integer type and step +-1. We can't use this in the general case
725 because integer overflow and floating point errors could give incorrect
726 results.
727 We translate a do loop from:
729 DO dovar = from, to, step
730 body
731 END DO
735 [Evaluate loop bounds and step]
736 dovar = from;
737 if ((step > 0) ? (dovar <= to) : (dovar => to))
739 for (;;)
741 body;
742 cycle_label:
743 cond = (dovar == to);
744 dovar += step;
745 if (cond) goto end_label;
748 end_label:
750 This helps the optimizers by avoiding the extra induction variable
751 used in the general case. */
753 static tree
754 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
755 tree from, tree to, tree step)
757 stmtblock_t body;
758 tree type;
759 tree cond;
760 tree tmp;
761 tree cycle_label;
762 tree exit_label;
764 type = TREE_TYPE (dovar);
766 /* Initialize the DO variable: dovar = from. */
767 gfc_add_modify (pblock, dovar, from);
769 /* Cycle and exit statements are implemented with gotos. */
770 cycle_label = gfc_build_label_decl (NULL_TREE);
771 exit_label = gfc_build_label_decl (NULL_TREE);
773 /* Put the labels where they can be found later. See gfc_trans_do(). */
774 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
776 /* Loop body. */
777 gfc_start_block (&body);
779 /* Main loop body. */
780 tmp = gfc_trans_code (code->block->next);
781 gfc_add_expr_to_block (&body, tmp);
783 /* Label for cycle statements (if needed). */
784 if (TREE_USED (cycle_label))
786 tmp = build1_v (LABEL_EXPR, cycle_label);
787 gfc_add_expr_to_block (&body, tmp);
790 /* Evaluate the loop condition. */
791 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
792 cond = gfc_evaluate_now (cond, &body);
794 /* Increment the loop variable. */
795 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
796 gfc_add_modify (&body, dovar, tmp);
798 /* The loop exit. */
799 tmp = build1_v (GOTO_EXPR, exit_label);
800 TREE_USED (exit_label) = 1;
801 tmp = fold_build3 (COND_EXPR, void_type_node,
802 cond, tmp, build_empty_stmt ());
803 gfc_add_expr_to_block (&body, tmp);
805 /* Finish the loop body. */
806 tmp = gfc_finish_block (&body);
807 tmp = build1_v (LOOP_EXPR, tmp);
809 /* Only execute the loop if the number of iterations is positive. */
810 if (tree_int_cst_sgn (step) > 0)
811 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
812 else
813 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
814 tmp = fold_build3 (COND_EXPR, void_type_node,
815 cond, tmp, build_empty_stmt ());
816 gfc_add_expr_to_block (pblock, tmp);
818 /* Add the exit label. */
819 tmp = build1_v (LABEL_EXPR, exit_label);
820 gfc_add_expr_to_block (pblock, tmp);
822 return gfc_finish_block (pblock);
825 /* Translate the DO construct. This obviously is one of the most
826 important ones to get right with any compiler, but especially
827 so for Fortran.
829 We special case some loop forms as described in gfc_trans_simple_do.
830 For other cases we implement them with a separate loop count,
831 as described in the standard.
833 We translate a do loop from:
835 DO dovar = from, to, step
836 body
837 END DO
841 [evaluate loop bounds and step]
842 empty = (step > 0 ? to < from : to > from);
843 countm1 = (to - from) / step;
844 dovar = from;
845 if (empty) goto exit_label;
846 for (;;)
848 body;
849 cycle_label:
850 dovar += step
851 if (countm1 ==0) goto exit_label;
852 countm1--;
854 exit_label:
856 countm1 is an unsigned integer. It is equal to the loop count minus one,
857 because the loop count itself can overflow. */
859 tree
860 gfc_trans_do (gfc_code * code)
862 gfc_se se;
863 tree dovar;
864 tree from;
865 tree to;
866 tree step;
867 tree countm1;
868 tree type;
869 tree utype;
870 tree cond;
871 tree cycle_label;
872 tree exit_label;
873 tree tmp;
874 tree pos_step;
875 stmtblock_t block;
876 stmtblock_t body;
878 gfc_start_block (&block);
880 /* Evaluate all the expressions in the iterator. */
881 gfc_init_se (&se, NULL);
882 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
883 gfc_add_block_to_block (&block, &se.pre);
884 dovar = se.expr;
885 type = TREE_TYPE (dovar);
887 gfc_init_se (&se, NULL);
888 gfc_conv_expr_val (&se, code->ext.iterator->start);
889 gfc_add_block_to_block (&block, &se.pre);
890 from = gfc_evaluate_now (se.expr, &block);
892 gfc_init_se (&se, NULL);
893 gfc_conv_expr_val (&se, code->ext.iterator->end);
894 gfc_add_block_to_block (&block, &se.pre);
895 to = gfc_evaluate_now (se.expr, &block);
897 gfc_init_se (&se, NULL);
898 gfc_conv_expr_val (&se, code->ext.iterator->step);
899 gfc_add_block_to_block (&block, &se.pre);
900 step = gfc_evaluate_now (se.expr, &block);
902 /* Special case simple loops. */
903 if (TREE_CODE (type) == INTEGER_TYPE
904 && (integer_onep (step)
905 || tree_int_cst_equal (step, integer_minus_one_node)))
906 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
908 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
909 fold_convert (type, integer_zero_node));
911 if (TREE_CODE (type) == INTEGER_TYPE)
912 utype = unsigned_type_for (type);
913 else
914 utype = unsigned_type_for (gfc_array_index_type);
915 countm1 = gfc_create_var (utype, "countm1");
917 /* Cycle and exit statements are implemented with gotos. */
918 cycle_label = gfc_build_label_decl (NULL_TREE);
919 exit_label = gfc_build_label_decl (NULL_TREE);
920 TREE_USED (exit_label) = 1;
922 /* Initialize the DO variable: dovar = from. */
923 gfc_add_modify (&block, dovar, from);
925 /* Initialize loop count and jump to exit label if the loop is empty.
926 This code is executed before we enter the loop body. We generate:
927 if (step > 0)
929 if (to < from) goto exit_label;
930 countm1 = (to - from) / step;
932 else
934 if (to > from) goto exit_label;
935 countm1 = (from - to) / -step;
936 } */
937 if (TREE_CODE (type) == INTEGER_TYPE)
939 tree pos, neg;
941 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
942 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
943 build1_v (GOTO_EXPR, exit_label),
944 build_empty_stmt ());
945 tmp = fold_build2 (MINUS_EXPR, type, to, from);
946 tmp = fold_convert (utype, tmp);
947 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
948 fold_convert (utype, step));
949 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
950 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
952 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
953 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
954 build1_v (GOTO_EXPR, exit_label),
955 build_empty_stmt ());
956 tmp = fold_build2 (MINUS_EXPR, type, from, to);
957 tmp = fold_convert (utype, tmp);
958 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
959 fold_convert (utype, fold_build1 (NEGATE_EXPR,
960 type, step)));
961 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
962 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
964 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
965 gfc_add_expr_to_block (&block, tmp);
967 else
969 /* TODO: We could use the same width as the real type.
970 This would probably cause more problems that it solves
971 when we implement "long double" types. */
973 tmp = fold_build2 (MINUS_EXPR, type, to, from);
974 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
975 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
976 gfc_add_modify (&block, countm1, tmp);
978 /* We need a special check for empty loops:
979 empty = (step > 0 ? to < from : to > from); */
980 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
981 fold_build2 (LT_EXPR, boolean_type_node, to, from),
982 fold_build2 (GT_EXPR, boolean_type_node, to, from));
983 /* If the loop is empty, go directly to the exit label. */
984 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
985 build1_v (GOTO_EXPR, exit_label),
986 build_empty_stmt ());
987 gfc_add_expr_to_block (&block, tmp);
990 /* Loop body. */
991 gfc_start_block (&body);
993 /* Put these labels where they can be found later. We put the
994 labels in a TREE_LIST node (because TREE_CHAIN is already
995 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
996 label in TREE_VALUE (backend_decl). */
998 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1000 /* Main loop body. */
1001 tmp = gfc_trans_code (code->block->next);
1002 gfc_add_expr_to_block (&body, tmp);
1004 /* Label for cycle statements (if needed). */
1005 if (TREE_USED (cycle_label))
1007 tmp = build1_v (LABEL_EXPR, cycle_label);
1008 gfc_add_expr_to_block (&body, tmp);
1011 /* Increment the loop variable. */
1012 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1013 gfc_add_modify (&body, dovar, tmp);
1015 /* End with the loop condition. Loop until countm1 == 0. */
1016 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1017 build_int_cst (utype, 0));
1018 tmp = build1_v (GOTO_EXPR, exit_label);
1019 tmp = fold_build3 (COND_EXPR, void_type_node,
1020 cond, tmp, build_empty_stmt ());
1021 gfc_add_expr_to_block (&body, tmp);
1023 /* Decrement the loop count. */
1024 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1025 gfc_add_modify (&body, countm1, tmp);
1027 /* End of loop body. */
1028 tmp = gfc_finish_block (&body);
1030 /* The for loop itself. */
1031 tmp = build1_v (LOOP_EXPR, tmp);
1032 gfc_add_expr_to_block (&block, tmp);
1034 /* Add the exit label. */
1035 tmp = build1_v (LABEL_EXPR, exit_label);
1036 gfc_add_expr_to_block (&block, tmp);
1038 return gfc_finish_block (&block);
1042 /* Translate the DO WHILE construct.
1044 We translate
1046 DO WHILE (cond)
1047 body
1048 END DO
1052 for ( ; ; )
1054 pre_cond;
1055 if (! cond) goto exit_label;
1056 body;
1057 cycle_label:
1059 exit_label:
1061 Because the evaluation of the exit condition `cond' may have side
1062 effects, we can't do much for empty loop bodies. The backend optimizers
1063 should be smart enough to eliminate any dead loops. */
1065 tree
1066 gfc_trans_do_while (gfc_code * code)
1068 gfc_se cond;
1069 tree tmp;
1070 tree cycle_label;
1071 tree exit_label;
1072 stmtblock_t block;
1074 /* Everything we build here is part of the loop body. */
1075 gfc_start_block (&block);
1077 /* Cycle and exit statements are implemented with gotos. */
1078 cycle_label = gfc_build_label_decl (NULL_TREE);
1079 exit_label = gfc_build_label_decl (NULL_TREE);
1081 /* Put the labels where they can be found later. See gfc_trans_do(). */
1082 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1084 /* Create a GIMPLE version of the exit condition. */
1085 gfc_init_se (&cond, NULL);
1086 gfc_conv_expr_val (&cond, code->expr);
1087 gfc_add_block_to_block (&block, &cond.pre);
1088 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1090 /* Build "IF (! cond) GOTO exit_label". */
1091 tmp = build1_v (GOTO_EXPR, exit_label);
1092 TREE_USED (exit_label) = 1;
1093 tmp = fold_build3 (COND_EXPR, void_type_node,
1094 cond.expr, tmp, build_empty_stmt ());
1095 gfc_add_expr_to_block (&block, tmp);
1097 /* The main body of the loop. */
1098 tmp = gfc_trans_code (code->block->next);
1099 gfc_add_expr_to_block (&block, tmp);
1101 /* Label for cycle statements (if needed). */
1102 if (TREE_USED (cycle_label))
1104 tmp = build1_v (LABEL_EXPR, cycle_label);
1105 gfc_add_expr_to_block (&block, tmp);
1108 /* End of loop body. */
1109 tmp = gfc_finish_block (&block);
1111 gfc_init_block (&block);
1112 /* Build the loop. */
1113 tmp = build1_v (LOOP_EXPR, tmp);
1114 gfc_add_expr_to_block (&block, tmp);
1116 /* Add the exit label. */
1117 tmp = build1_v (LABEL_EXPR, exit_label);
1118 gfc_add_expr_to_block (&block, tmp);
1120 return gfc_finish_block (&block);
1124 /* Translate the SELECT CASE construct for INTEGER case expressions,
1125 without killing all potential optimizations. The problem is that
1126 Fortran allows unbounded cases, but the back-end does not, so we
1127 need to intercept those before we enter the equivalent SWITCH_EXPR
1128 we can build.
1130 For example, we translate this,
1132 SELECT CASE (expr)
1133 CASE (:100,101,105:115)
1134 block_1
1135 CASE (190:199,200:)
1136 block_2
1137 CASE (300)
1138 block_3
1139 CASE DEFAULT
1140 block_4
1141 END SELECT
1143 to the GENERIC equivalent,
1145 switch (expr)
1147 case (minimum value for typeof(expr) ... 100:
1148 case 101:
1149 case 105 ... 114:
1150 block1:
1151 goto end_label;
1153 case 200 ... (maximum value for typeof(expr):
1154 case 190 ... 199:
1155 block2;
1156 goto end_label;
1158 case 300:
1159 block_3;
1160 goto end_label;
1162 default:
1163 block_4;
1164 goto end_label;
1167 end_label: */
1169 static tree
1170 gfc_trans_integer_select (gfc_code * code)
1172 gfc_code *c;
1173 gfc_case *cp;
1174 tree end_label;
1175 tree tmp;
1176 gfc_se se;
1177 stmtblock_t block;
1178 stmtblock_t body;
1180 gfc_start_block (&block);
1182 /* Calculate the switch expression. */
1183 gfc_init_se (&se, NULL);
1184 gfc_conv_expr_val (&se, code->expr);
1185 gfc_add_block_to_block (&block, &se.pre);
1187 end_label = gfc_build_label_decl (NULL_TREE);
1189 gfc_init_block (&body);
1191 for (c = code->block; c; c = c->block)
1193 for (cp = c->ext.case_list; cp; cp = cp->next)
1195 tree low, high;
1196 tree label;
1198 /* Assume it's the default case. */
1199 low = high = NULL_TREE;
1201 if (cp->low)
1203 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1204 cp->low->ts.kind);
1206 /* If there's only a lower bound, set the high bound to the
1207 maximum value of the case expression. */
1208 if (!cp->high)
1209 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1212 if (cp->high)
1214 /* Three cases are possible here:
1216 1) There is no lower bound, e.g. CASE (:N).
1217 2) There is a lower bound .NE. high bound, that is
1218 a case range, e.g. CASE (N:M) where M>N (we make
1219 sure that M>N during type resolution).
1220 3) There is a lower bound, and it has the same value
1221 as the high bound, e.g. CASE (N:N). This is our
1222 internal representation of CASE(N).
1224 In the first and second case, we need to set a value for
1225 high. In the third case, we don't because the GCC middle
1226 end represents a single case value by just letting high be
1227 a NULL_TREE. We can't do that because we need to be able
1228 to represent unbounded cases. */
1230 if (!cp->low
1231 || (cp->low
1232 && mpz_cmp (cp->low->value.integer,
1233 cp->high->value.integer) != 0))
1234 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1235 cp->high->ts.kind);
1237 /* Unbounded case. */
1238 if (!cp->low)
1239 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1242 /* Build a label. */
1243 label = gfc_build_label_decl (NULL_TREE);
1245 /* Add this case label.
1246 Add parameter 'label', make it match GCC backend. */
1247 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1248 low, high, label);
1249 gfc_add_expr_to_block (&body, tmp);
1252 /* Add the statements for this case. */
1253 tmp = gfc_trans_code (c->next);
1254 gfc_add_expr_to_block (&body, tmp);
1256 /* Break to the end of the construct. */
1257 tmp = build1_v (GOTO_EXPR, end_label);
1258 gfc_add_expr_to_block (&body, tmp);
1261 tmp = gfc_finish_block (&body);
1262 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1263 gfc_add_expr_to_block (&block, tmp);
1265 tmp = build1_v (LABEL_EXPR, end_label);
1266 gfc_add_expr_to_block (&block, tmp);
1268 return gfc_finish_block (&block);
1272 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1274 There are only two cases possible here, even though the standard
1275 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1276 .FALSE., and DEFAULT.
1278 We never generate more than two blocks here. Instead, we always
1279 try to eliminate the DEFAULT case. This way, we can translate this
1280 kind of SELECT construct to a simple
1282 if {} else {};
1284 expression in GENERIC. */
1286 static tree
1287 gfc_trans_logical_select (gfc_code * code)
1289 gfc_code *c;
1290 gfc_code *t, *f, *d;
1291 gfc_case *cp;
1292 gfc_se se;
1293 stmtblock_t block;
1295 /* Assume we don't have any cases at all. */
1296 t = f = d = NULL;
1298 /* Now see which ones we actually do have. We can have at most two
1299 cases in a single case list: one for .TRUE. and one for .FALSE.
1300 The default case is always separate. If the cases for .TRUE. and
1301 .FALSE. are in the same case list, the block for that case list
1302 always executed, and we don't generate code a COND_EXPR. */
1303 for (c = code->block; c; c = c->block)
1305 for (cp = c->ext.case_list; cp; cp = cp->next)
1307 if (cp->low)
1309 if (cp->low->value.logical == 0) /* .FALSE. */
1310 f = c;
1311 else /* if (cp->value.logical != 0), thus .TRUE. */
1312 t = c;
1314 else
1315 d = c;
1319 /* Start a new block. */
1320 gfc_start_block (&block);
1322 /* Calculate the switch expression. We always need to do this
1323 because it may have side effects. */
1324 gfc_init_se (&se, NULL);
1325 gfc_conv_expr_val (&se, code->expr);
1326 gfc_add_block_to_block (&block, &se.pre);
1328 if (t == f && t != NULL)
1330 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1331 translate the code for these cases, append it to the current
1332 block. */
1333 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1335 else
1337 tree true_tree, false_tree, stmt;
1339 true_tree = build_empty_stmt ();
1340 false_tree = build_empty_stmt ();
1342 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1343 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1344 make the missing case the default case. */
1345 if (t != NULL && f != NULL)
1346 d = NULL;
1347 else if (d != NULL)
1349 if (t == NULL)
1350 t = d;
1351 else
1352 f = d;
1355 /* Translate the code for each of these blocks, and append it to
1356 the current block. */
1357 if (t != NULL)
1358 true_tree = gfc_trans_code (t->next);
1360 if (f != NULL)
1361 false_tree = gfc_trans_code (f->next);
1363 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1364 true_tree, false_tree);
1365 gfc_add_expr_to_block (&block, stmt);
1368 return gfc_finish_block (&block);
1372 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1373 Instead of generating compares and jumps, it is far simpler to
1374 generate a data structure describing the cases in order and call a
1375 library subroutine that locates the right case.
1376 This is particularly true because this is the only case where we
1377 might have to dispose of a temporary.
1378 The library subroutine returns a pointer to jump to or NULL if no
1379 branches are to be taken. */
1381 static tree
1382 gfc_trans_character_select (gfc_code *code)
1384 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1385 stmtblock_t block, body;
1386 gfc_case *cp, *d;
1387 gfc_code *c;
1388 gfc_se se;
1389 int n, k;
1391 /* The jump table types are stored in static variables to avoid
1392 constructing them from scratch every single time. */
1393 static tree select_struct[2];
1394 static tree ss_string1[2], ss_string1_len[2];
1395 static tree ss_string2[2], ss_string2_len[2];
1396 static tree ss_target[2];
1398 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1400 if (code->expr->ts.kind == 1)
1401 k = 0;
1402 else if (code->expr->ts.kind == 4)
1403 k = 1;
1404 else
1405 gcc_unreachable ();
1407 if (select_struct[k] == NULL)
1409 select_struct[k] = make_node (RECORD_TYPE);
1411 if (code->expr->ts.kind == 1)
1412 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1413 else if (code->expr->ts.kind == 4)
1414 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1415 else
1416 gcc_unreachable ();
1418 #undef ADD_FIELD
1419 #define ADD_FIELD(NAME, TYPE) \
1420 ss_##NAME[k] = gfc_add_field_to_struct \
1421 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1422 get_identifier (stringize(NAME)), TYPE)
1424 ADD_FIELD (string1, pchartype);
1425 ADD_FIELD (string1_len, gfc_charlen_type_node);
1427 ADD_FIELD (string2, pchartype);
1428 ADD_FIELD (string2_len, gfc_charlen_type_node);
1430 ADD_FIELD (target, integer_type_node);
1431 #undef ADD_FIELD
1433 gfc_finish_type (select_struct[k]);
1436 cp = code->block->ext.case_list;
1437 while (cp->left != NULL)
1438 cp = cp->left;
1440 n = 0;
1441 for (d = cp; d; d = d->right)
1442 d->n = n++;
1444 end_label = gfc_build_label_decl (NULL_TREE);
1446 /* Generate the body */
1447 gfc_start_block (&block);
1448 gfc_init_block (&body);
1450 for (c = code->block; c; c = c->block)
1452 for (d = c->ext.case_list; d; d = d->next)
1454 label = gfc_build_label_decl (NULL_TREE);
1455 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1456 build_int_cst (NULL_TREE, d->n),
1457 build_int_cst (NULL_TREE, d->n), label);
1458 gfc_add_expr_to_block (&body, tmp);
1461 tmp = gfc_trans_code (c->next);
1462 gfc_add_expr_to_block (&body, tmp);
1464 tmp = build1_v (GOTO_EXPR, end_label);
1465 gfc_add_expr_to_block (&body, tmp);
1468 /* Generate the structure describing the branches */
1469 init = NULL_TREE;
1471 for(d = cp; d; d = d->right)
1473 node = NULL_TREE;
1475 gfc_init_se (&se, NULL);
1477 if (d->low == NULL)
1479 node = tree_cons (ss_string1[k], null_pointer_node, node);
1480 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1482 else
1484 gfc_conv_expr_reference (&se, d->low);
1486 node = tree_cons (ss_string1[k], se.expr, node);
1487 node = tree_cons (ss_string1_len[k], se.string_length, node);
1490 if (d->high == NULL)
1492 node = tree_cons (ss_string2[k], null_pointer_node, node);
1493 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1495 else
1497 gfc_init_se (&se, NULL);
1498 gfc_conv_expr_reference (&se, d->high);
1500 node = tree_cons (ss_string2[k], se.expr, node);
1501 node = tree_cons (ss_string2_len[k], se.string_length, node);
1504 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1505 node);
1507 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1508 init = tree_cons (NULL_TREE, tmp, init);
1511 type = build_array_type (select_struct[k],
1512 build_index_type (build_int_cst (NULL_TREE, n-1)));
1514 init = build_constructor_from_list (type, nreverse(init));
1515 TREE_CONSTANT (init) = 1;
1516 TREE_STATIC (init) = 1;
1517 /* Create a static variable to hold the jump table. */
1518 tmp = gfc_create_var (type, "jumptable");
1519 TREE_CONSTANT (tmp) = 1;
1520 TREE_STATIC (tmp) = 1;
1521 TREE_READONLY (tmp) = 1;
1522 DECL_INITIAL (tmp) = init;
1523 init = tmp;
1525 /* Build the library call */
1526 init = gfc_build_addr_expr (pvoid_type_node, init);
1528 gfc_init_se (&se, NULL);
1529 gfc_conv_expr_reference (&se, code->expr);
1531 gfc_add_block_to_block (&block, &se.pre);
1533 if (code->expr->ts.kind == 1)
1534 fndecl = gfor_fndecl_select_string;
1535 else if (code->expr->ts.kind == 4)
1536 fndecl = gfor_fndecl_select_string_char4;
1537 else
1538 gcc_unreachable ();
1540 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1541 se.expr, se.string_length);
1542 case_num = gfc_create_var (integer_type_node, "case_num");
1543 gfc_add_modify (&block, case_num, tmp);
1545 gfc_add_block_to_block (&block, &se.post);
1547 tmp = gfc_finish_block (&body);
1548 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1549 gfc_add_expr_to_block (&block, tmp);
1551 tmp = build1_v (LABEL_EXPR, end_label);
1552 gfc_add_expr_to_block (&block, tmp);
1554 return gfc_finish_block (&block);
1558 /* Translate the three variants of the SELECT CASE construct.
1560 SELECT CASEs with INTEGER case expressions can be translated to an
1561 equivalent GENERIC switch statement, and for LOGICAL case
1562 expressions we build one or two if-else compares.
1564 SELECT CASEs with CHARACTER case expressions are a whole different
1565 story, because they don't exist in GENERIC. So we sort them and
1566 do a binary search at runtime.
1568 Fortran has no BREAK statement, and it does not allow jumps from
1569 one case block to another. That makes things a lot easier for
1570 the optimizers. */
1572 tree
1573 gfc_trans_select (gfc_code * code)
1575 gcc_assert (code && code->expr);
1577 /* Empty SELECT constructs are legal. */
1578 if (code->block == NULL)
1579 return build_empty_stmt ();
1581 /* Select the correct translation function. */
1582 switch (code->expr->ts.type)
1584 case BT_LOGICAL: return gfc_trans_logical_select (code);
1585 case BT_INTEGER: return gfc_trans_integer_select (code);
1586 case BT_CHARACTER: return gfc_trans_character_select (code);
1587 default:
1588 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1589 /* Not reached */
1594 /* Traversal function to substitute a replacement symtree if the symbol
1595 in the expression is the same as that passed. f == 2 signals that
1596 that variable itself is not to be checked - only the references.
1597 This group of functions is used when the variable expression in a
1598 FORALL assignment has internal references. For example:
1599 FORALL (i = 1:4) p(p(i)) = i
1600 The only recourse here is to store a copy of 'p' for the index
1601 expression. */
1603 static gfc_symtree *new_symtree;
1604 static gfc_symtree *old_symtree;
1606 static bool
1607 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1609 if (expr->expr_type != EXPR_VARIABLE)
1610 return false;
1612 if (*f == 2)
1613 *f = 1;
1614 else if (expr->symtree->n.sym == sym)
1615 expr->symtree = new_symtree;
1617 return false;
1620 static void
1621 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1623 gfc_traverse_expr (e, sym, forall_replace, f);
1626 static bool
1627 forall_restore (gfc_expr *expr,
1628 gfc_symbol *sym ATTRIBUTE_UNUSED,
1629 int *f ATTRIBUTE_UNUSED)
1631 if (expr->expr_type != EXPR_VARIABLE)
1632 return false;
1634 if (expr->symtree == new_symtree)
1635 expr->symtree = old_symtree;
1637 return false;
1640 static void
1641 forall_restore_symtree (gfc_expr *e)
1643 gfc_traverse_expr (e, NULL, forall_restore, 0);
1646 static void
1647 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1649 gfc_se tse;
1650 gfc_se rse;
1651 gfc_expr *e;
1652 gfc_symbol *new_sym;
1653 gfc_symbol *old_sym;
1654 gfc_symtree *root;
1655 tree tmp;
1657 /* Build a copy of the lvalue. */
1658 old_symtree = c->expr->symtree;
1659 old_sym = old_symtree->n.sym;
1660 e = gfc_lval_expr_from_sym (old_sym);
1661 if (old_sym->attr.dimension)
1663 gfc_init_se (&tse, NULL);
1664 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1665 gfc_add_block_to_block (pre, &tse.pre);
1666 gfc_add_block_to_block (post, &tse.post);
1667 tse.expr = build_fold_indirect_ref (tse.expr);
1669 if (e->ts.type != BT_CHARACTER)
1671 /* Use the variable offset for the temporary. */
1672 tmp = gfc_conv_descriptor_offset (tse.expr);
1673 gfc_add_modify (pre, tmp,
1674 gfc_conv_array_offset (old_sym->backend_decl));
1677 else
1679 gfc_init_se (&tse, NULL);
1680 gfc_init_se (&rse, NULL);
1681 gfc_conv_expr (&rse, e);
1682 if (e->ts.type == BT_CHARACTER)
1684 tse.string_length = rse.string_length;
1685 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1686 tse.string_length);
1687 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1688 rse.string_length);
1689 gfc_add_block_to_block (pre, &tse.pre);
1690 gfc_add_block_to_block (post, &tse.post);
1692 else
1694 tmp = gfc_typenode_for_spec (&e->ts);
1695 tse.expr = gfc_create_var (tmp, "temp");
1698 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1699 e->expr_type == EXPR_VARIABLE);
1700 gfc_add_expr_to_block (pre, tmp);
1702 gfc_free_expr (e);
1704 /* Create a new symbol to represent the lvalue. */
1705 new_sym = gfc_new_symbol (old_sym->name, NULL);
1706 new_sym->ts = old_sym->ts;
1707 new_sym->attr.referenced = 1;
1708 new_sym->attr.dimension = old_sym->attr.dimension;
1709 new_sym->attr.flavor = old_sym->attr.flavor;
1711 /* Use the temporary as the backend_decl. */
1712 new_sym->backend_decl = tse.expr;
1714 /* Create a fake symtree for it. */
1715 root = NULL;
1716 new_symtree = gfc_new_symtree (&root, old_sym->name);
1717 new_symtree->n.sym = new_sym;
1718 gcc_assert (new_symtree == root);
1720 /* Go through the expression reference replacing the old_symtree
1721 with the new. */
1722 forall_replace_symtree (c->expr, old_sym, 2);
1724 /* Now we have made this temporary, we might as well use it for
1725 the right hand side. */
1726 forall_replace_symtree (c->expr2, old_sym, 1);
1730 /* Handles dependencies in forall assignments. */
1731 static int
1732 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1734 gfc_ref *lref;
1735 gfc_ref *rref;
1736 int need_temp;
1737 gfc_symbol *lsym;
1739 lsym = c->expr->symtree->n.sym;
1740 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1742 /* Now check for dependencies within the 'variable'
1743 expression itself. These are treated by making a complete
1744 copy of variable and changing all the references to it
1745 point to the copy instead. Note that the shallow copy of
1746 the variable will not suffice for derived types with
1747 pointer components. We therefore leave these to their
1748 own devices. */
1749 if (lsym->ts.type == BT_DERIVED
1750 && lsym->ts.derived->attr.pointer_comp)
1751 return need_temp;
1753 new_symtree = NULL;
1754 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1756 forall_make_variable_temp (c, pre, post);
1757 need_temp = 0;
1760 /* Substrings with dependencies are treated in the same
1761 way. */
1762 if (c->expr->ts.type == BT_CHARACTER
1763 && c->expr->ref
1764 && c->expr2->expr_type == EXPR_VARIABLE
1765 && lsym == c->expr2->symtree->n.sym)
1767 for (lref = c->expr->ref; lref; lref = lref->next)
1768 if (lref->type == REF_SUBSTRING)
1769 break;
1770 for (rref = c->expr2->ref; rref; rref = rref->next)
1771 if (rref->type == REF_SUBSTRING)
1772 break;
1774 if (rref && lref
1775 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1777 forall_make_variable_temp (c, pre, post);
1778 need_temp = 0;
1781 return need_temp;
1785 static void
1786 cleanup_forall_symtrees (gfc_code *c)
1788 forall_restore_symtree (c->expr);
1789 forall_restore_symtree (c->expr2);
1790 gfc_free (new_symtree->n.sym);
1791 gfc_free (new_symtree);
1795 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1796 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1797 indicates whether we should generate code to test the FORALLs mask
1798 array. OUTER is the loop header to be used for initializing mask
1799 indices.
1801 The generated loop format is:
1802 count = (end - start + step) / step
1803 loopvar = start
1804 while (1)
1806 if (count <=0 )
1807 goto end_of_loop
1808 <body>
1809 loopvar += step
1810 count --
1812 end_of_loop: */
1814 static tree
1815 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1816 int mask_flag, stmtblock_t *outer)
1818 int n, nvar;
1819 tree tmp;
1820 tree cond;
1821 stmtblock_t block;
1822 tree exit_label;
1823 tree count;
1824 tree var, start, end, step;
1825 iter_info *iter;
1827 /* Initialize the mask index outside the FORALL nest. */
1828 if (mask_flag && forall_tmp->mask)
1829 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1831 iter = forall_tmp->this_loop;
1832 nvar = forall_tmp->nvar;
1833 for (n = 0; n < nvar; n++)
1835 var = iter->var;
1836 start = iter->start;
1837 end = iter->end;
1838 step = iter->step;
1840 exit_label = gfc_build_label_decl (NULL_TREE);
1841 TREE_USED (exit_label) = 1;
1843 /* The loop counter. */
1844 count = gfc_create_var (TREE_TYPE (var), "count");
1846 /* The body of the loop. */
1847 gfc_init_block (&block);
1849 /* The exit condition. */
1850 cond = fold_build2 (LE_EXPR, boolean_type_node,
1851 count, build_int_cst (TREE_TYPE (count), 0));
1852 tmp = build1_v (GOTO_EXPR, exit_label);
1853 tmp = fold_build3 (COND_EXPR, void_type_node,
1854 cond, tmp, build_empty_stmt ());
1855 gfc_add_expr_to_block (&block, tmp);
1857 /* The main loop body. */
1858 gfc_add_expr_to_block (&block, body);
1860 /* Increment the loop variable. */
1861 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1862 gfc_add_modify (&block, var, tmp);
1864 /* Advance to the next mask element. Only do this for the
1865 innermost loop. */
1866 if (n == 0 && mask_flag && forall_tmp->mask)
1868 tree maskindex = forall_tmp->maskindex;
1869 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1870 maskindex, gfc_index_one_node);
1871 gfc_add_modify (&block, maskindex, tmp);
1874 /* Decrement the loop counter. */
1875 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1876 build_int_cst (TREE_TYPE (var), 1));
1877 gfc_add_modify (&block, count, tmp);
1879 body = gfc_finish_block (&block);
1881 /* Loop var initialization. */
1882 gfc_init_block (&block);
1883 gfc_add_modify (&block, var, start);
1886 /* Initialize the loop counter. */
1887 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1888 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1889 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1890 gfc_add_modify (&block, count, tmp);
1892 /* The loop expression. */
1893 tmp = build1_v (LOOP_EXPR, body);
1894 gfc_add_expr_to_block (&block, tmp);
1896 /* The exit label. */
1897 tmp = build1_v (LABEL_EXPR, exit_label);
1898 gfc_add_expr_to_block (&block, tmp);
1900 body = gfc_finish_block (&block);
1901 iter = iter->next;
1903 return body;
1907 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1908 is nonzero, the body is controlled by all masks in the forall nest.
1909 Otherwise, the innermost loop is not controlled by it's mask. This
1910 is used for initializing that mask. */
1912 static tree
1913 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1914 int mask_flag)
1916 tree tmp;
1917 stmtblock_t header;
1918 forall_info *forall_tmp;
1919 tree mask, maskindex;
1921 gfc_start_block (&header);
1923 forall_tmp = nested_forall_info;
1924 while (forall_tmp != NULL)
1926 /* Generate body with masks' control. */
1927 if (mask_flag)
1929 mask = forall_tmp->mask;
1930 maskindex = forall_tmp->maskindex;
1932 /* If a mask was specified make the assignment conditional. */
1933 if (mask)
1935 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1936 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1939 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1940 forall_tmp = forall_tmp->prev_nest;
1941 mask_flag = 1;
1944 gfc_add_expr_to_block (&header, body);
1945 return gfc_finish_block (&header);
1949 /* Allocate data for holding a temporary array. Returns either a local
1950 temporary array or a pointer variable. */
1952 static tree
1953 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1954 tree elem_type)
1956 tree tmpvar;
1957 tree type;
1958 tree tmp;
1960 if (INTEGER_CST_P (size))
1962 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1963 gfc_index_one_node);
1965 else
1966 tmp = NULL_TREE;
1968 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1969 type = build_array_type (elem_type, type);
1970 if (gfc_can_put_var_on_stack (bytesize))
1972 gcc_assert (INTEGER_CST_P (size));
1973 tmpvar = gfc_create_var (type, "temp");
1974 *pdata = NULL_TREE;
1976 else
1978 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1979 *pdata = convert (pvoid_type_node, tmpvar);
1981 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1982 gfc_add_modify (pblock, tmpvar, tmp);
1984 return tmpvar;
1988 /* Generate codes to copy the temporary to the actual lhs. */
1990 static tree
1991 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1992 tree count1, tree wheremask, bool invert)
1994 gfc_ss *lss;
1995 gfc_se lse, rse;
1996 stmtblock_t block, body;
1997 gfc_loopinfo loop1;
1998 tree tmp;
1999 tree wheremaskexpr;
2001 /* Walk the lhs. */
2002 lss = gfc_walk_expr (expr);
2004 if (lss == gfc_ss_terminator)
2006 gfc_start_block (&block);
2008 gfc_init_se (&lse, NULL);
2010 /* Translate the expression. */
2011 gfc_conv_expr (&lse, expr);
2013 /* Form the expression for the temporary. */
2014 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2016 /* Use the scalar assignment as is. */
2017 gfc_add_block_to_block (&block, &lse.pre);
2018 gfc_add_modify (&block, lse.expr, tmp);
2019 gfc_add_block_to_block (&block, &lse.post);
2021 /* Increment the count1. */
2022 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2023 gfc_index_one_node);
2024 gfc_add_modify (&block, count1, tmp);
2026 tmp = gfc_finish_block (&block);
2028 else
2030 gfc_start_block (&block);
2032 gfc_init_loopinfo (&loop1);
2033 gfc_init_se (&rse, NULL);
2034 gfc_init_se (&lse, NULL);
2036 /* Associate the lss with the loop. */
2037 gfc_add_ss_to_loop (&loop1, lss);
2039 /* Calculate the bounds of the scalarization. */
2040 gfc_conv_ss_startstride (&loop1);
2041 /* Setup the scalarizing loops. */
2042 gfc_conv_loop_setup (&loop1, &expr->where);
2044 gfc_mark_ss_chain_used (lss, 1);
2046 /* Start the scalarized loop body. */
2047 gfc_start_scalarized_body (&loop1, &body);
2049 /* Setup the gfc_se structures. */
2050 gfc_copy_loopinfo_to_se (&lse, &loop1);
2051 lse.ss = lss;
2053 /* Form the expression of the temporary. */
2054 if (lss != gfc_ss_terminator)
2055 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2056 /* Translate expr. */
2057 gfc_conv_expr (&lse, expr);
2059 /* Use the scalar assignment. */
2060 rse.string_length = lse.string_length;
2061 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2063 /* Form the mask expression according to the mask tree list. */
2064 if (wheremask)
2066 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2067 if (invert)
2068 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2069 TREE_TYPE (wheremaskexpr),
2070 wheremaskexpr);
2071 tmp = fold_build3 (COND_EXPR, void_type_node,
2072 wheremaskexpr, tmp, build_empty_stmt ());
2075 gfc_add_expr_to_block (&body, tmp);
2077 /* Increment count1. */
2078 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2079 count1, gfc_index_one_node);
2080 gfc_add_modify (&body, count1, tmp);
2082 /* Increment count3. */
2083 if (count3)
2085 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2086 count3, gfc_index_one_node);
2087 gfc_add_modify (&body, count3, tmp);
2090 /* Generate the copying loops. */
2091 gfc_trans_scalarizing_loops (&loop1, &body);
2092 gfc_add_block_to_block (&block, &loop1.pre);
2093 gfc_add_block_to_block (&block, &loop1.post);
2094 gfc_cleanup_loop (&loop1);
2096 tmp = gfc_finish_block (&block);
2098 return tmp;
2102 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2103 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2104 and should not be freed. WHEREMASK is the conditional execution mask
2105 whose sense may be inverted by INVERT. */
2107 static tree
2108 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2109 tree count1, gfc_ss *lss, gfc_ss *rss,
2110 tree wheremask, bool invert)
2112 stmtblock_t block, body1;
2113 gfc_loopinfo loop;
2114 gfc_se lse;
2115 gfc_se rse;
2116 tree tmp;
2117 tree wheremaskexpr;
2119 gfc_start_block (&block);
2121 gfc_init_se (&rse, NULL);
2122 gfc_init_se (&lse, NULL);
2124 if (lss == gfc_ss_terminator)
2126 gfc_init_block (&body1);
2127 gfc_conv_expr (&rse, expr2);
2128 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2130 else
2132 /* Initialize the loop. */
2133 gfc_init_loopinfo (&loop);
2135 /* We may need LSS to determine the shape of the expression. */
2136 gfc_add_ss_to_loop (&loop, lss);
2137 gfc_add_ss_to_loop (&loop, rss);
2139 gfc_conv_ss_startstride (&loop);
2140 gfc_conv_loop_setup (&loop, &expr2->where);
2142 gfc_mark_ss_chain_used (rss, 1);
2143 /* Start the loop body. */
2144 gfc_start_scalarized_body (&loop, &body1);
2146 /* Translate the expression. */
2147 gfc_copy_loopinfo_to_se (&rse, &loop);
2148 rse.ss = rss;
2149 gfc_conv_expr (&rse, expr2);
2151 /* Form the expression of the temporary. */
2152 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2155 /* Use the scalar assignment. */
2156 lse.string_length = rse.string_length;
2157 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2158 expr2->expr_type == EXPR_VARIABLE);
2160 /* Form the mask expression according to the mask tree list. */
2161 if (wheremask)
2163 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2164 if (invert)
2165 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2166 TREE_TYPE (wheremaskexpr),
2167 wheremaskexpr);
2168 tmp = fold_build3 (COND_EXPR, void_type_node,
2169 wheremaskexpr, tmp, build_empty_stmt ());
2172 gfc_add_expr_to_block (&body1, tmp);
2174 if (lss == gfc_ss_terminator)
2176 gfc_add_block_to_block (&block, &body1);
2178 /* Increment count1. */
2179 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2180 gfc_index_one_node);
2181 gfc_add_modify (&block, count1, tmp);
2183 else
2185 /* Increment count1. */
2186 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2187 count1, gfc_index_one_node);
2188 gfc_add_modify (&body1, count1, tmp);
2190 /* Increment count3. */
2191 if (count3)
2193 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2194 count3, gfc_index_one_node);
2195 gfc_add_modify (&body1, count3, tmp);
2198 /* Generate the copying loops. */
2199 gfc_trans_scalarizing_loops (&loop, &body1);
2201 gfc_add_block_to_block (&block, &loop.pre);
2202 gfc_add_block_to_block (&block, &loop.post);
2204 gfc_cleanup_loop (&loop);
2205 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2206 as tree nodes in SS may not be valid in different scope. */
2209 tmp = gfc_finish_block (&block);
2210 return tmp;
2214 /* Calculate the size of temporary needed in the assignment inside forall.
2215 LSS and RSS are filled in this function. */
2217 static tree
2218 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2219 stmtblock_t * pblock,
2220 gfc_ss **lss, gfc_ss **rss)
2222 gfc_loopinfo loop;
2223 tree size;
2224 int i;
2225 int save_flag;
2226 tree tmp;
2228 *lss = gfc_walk_expr (expr1);
2229 *rss = NULL;
2231 size = gfc_index_one_node;
2232 if (*lss != gfc_ss_terminator)
2234 gfc_init_loopinfo (&loop);
2236 /* Walk the RHS of the expression. */
2237 *rss = gfc_walk_expr (expr2);
2238 if (*rss == gfc_ss_terminator)
2240 /* The rhs is scalar. Add a ss for the expression. */
2241 *rss = gfc_get_ss ();
2242 (*rss)->next = gfc_ss_terminator;
2243 (*rss)->type = GFC_SS_SCALAR;
2244 (*rss)->expr = expr2;
2247 /* Associate the SS with the loop. */
2248 gfc_add_ss_to_loop (&loop, *lss);
2249 /* We don't actually need to add the rhs at this point, but it might
2250 make guessing the loop bounds a bit easier. */
2251 gfc_add_ss_to_loop (&loop, *rss);
2253 /* We only want the shape of the expression, not rest of the junk
2254 generated by the scalarizer. */
2255 loop.array_parameter = 1;
2257 /* Calculate the bounds of the scalarization. */
2258 save_flag = flag_bounds_check;
2259 flag_bounds_check = 0;
2260 gfc_conv_ss_startstride (&loop);
2261 flag_bounds_check = save_flag;
2262 gfc_conv_loop_setup (&loop, &expr2->where);
2264 /* Figure out how many elements we need. */
2265 for (i = 0; i < loop.dimen; i++)
2267 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2268 gfc_index_one_node, loop.from[i]);
2269 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2270 tmp, loop.to[i]);
2271 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2273 gfc_add_block_to_block (pblock, &loop.pre);
2274 size = gfc_evaluate_now (size, pblock);
2275 gfc_add_block_to_block (pblock, &loop.post);
2277 /* TODO: write a function that cleans up a loopinfo without freeing
2278 the SS chains. Currently a NOP. */
2281 return size;
2285 /* Calculate the overall iterator number of the nested forall construct.
2286 This routine actually calculates the number of times the body of the
2287 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2288 that by the expression INNER_SIZE. The BLOCK argument specifies the
2289 block in which to calculate the result, and the optional INNER_SIZE_BODY
2290 argument contains any statements that need to executed (inside the loop)
2291 to initialize or calculate INNER_SIZE. */
2293 static tree
2294 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2295 stmtblock_t *inner_size_body, stmtblock_t *block)
2297 forall_info *forall_tmp = nested_forall_info;
2298 tree tmp, number;
2299 stmtblock_t body;
2301 /* We can eliminate the innermost unconditional loops with constant
2302 array bounds. */
2303 if (INTEGER_CST_P (inner_size))
2305 while (forall_tmp
2306 && !forall_tmp->mask
2307 && INTEGER_CST_P (forall_tmp->size))
2309 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2310 inner_size, forall_tmp->size);
2311 forall_tmp = forall_tmp->prev_nest;
2314 /* If there are no loops left, we have our constant result. */
2315 if (!forall_tmp)
2316 return inner_size;
2319 /* Otherwise, create a temporary variable to compute the result. */
2320 number = gfc_create_var (gfc_array_index_type, "num");
2321 gfc_add_modify (block, number, gfc_index_zero_node);
2323 gfc_start_block (&body);
2324 if (inner_size_body)
2325 gfc_add_block_to_block (&body, inner_size_body);
2326 if (forall_tmp)
2327 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2328 number, inner_size);
2329 else
2330 tmp = inner_size;
2331 gfc_add_modify (&body, number, tmp);
2332 tmp = gfc_finish_block (&body);
2334 /* Generate loops. */
2335 if (forall_tmp != NULL)
2336 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2338 gfc_add_expr_to_block (block, tmp);
2340 return number;
2344 /* Allocate temporary for forall construct. SIZE is the size of temporary
2345 needed. PTEMP1 is returned for space free. */
2347 static tree
2348 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2349 tree * ptemp1)
2351 tree bytesize;
2352 tree unit;
2353 tree tmp;
2355 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2356 if (!integer_onep (unit))
2357 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2358 else
2359 bytesize = size;
2361 *ptemp1 = NULL;
2362 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2364 if (*ptemp1)
2365 tmp = build_fold_indirect_ref (tmp);
2366 return tmp;
2370 /* Allocate temporary for forall construct according to the information in
2371 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2372 assignment inside forall. PTEMP1 is returned for space free. */
2374 static tree
2375 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2376 tree inner_size, stmtblock_t * inner_size_body,
2377 stmtblock_t * block, tree * ptemp1)
2379 tree size;
2381 /* Calculate the total size of temporary needed in forall construct. */
2382 size = compute_overall_iter_number (nested_forall_info, inner_size,
2383 inner_size_body, block);
2385 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2389 /* Handle assignments inside forall which need temporary.
2391 forall (i=start:end:stride; maskexpr)
2392 e<i> = f<i>
2393 end forall
2394 (where e,f<i> are arbitrary expressions possibly involving i
2395 and there is a dependency between e<i> and f<i>)
2396 Translates to:
2397 masktmp(:) = maskexpr(:)
2399 maskindex = 0;
2400 count1 = 0;
2401 num = 0;
2402 for (i = start; i <= end; i += stride)
2403 num += SIZE (f<i>)
2404 count1 = 0;
2405 ALLOCATE (tmp(num))
2406 for (i = start; i <= end; i += stride)
2408 if (masktmp[maskindex++])
2409 tmp[count1++] = f<i>
2411 maskindex = 0;
2412 count1 = 0;
2413 for (i = start; i <= end; i += stride)
2415 if (masktmp[maskindex++])
2416 e<i> = tmp[count1++]
2418 DEALLOCATE (tmp)
2420 static void
2421 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2422 tree wheremask, bool invert,
2423 forall_info * nested_forall_info,
2424 stmtblock_t * block)
2426 tree type;
2427 tree inner_size;
2428 gfc_ss *lss, *rss;
2429 tree count, count1;
2430 tree tmp, tmp1;
2431 tree ptemp1;
2432 stmtblock_t inner_size_body;
2434 /* Create vars. count1 is the current iterator number of the nested
2435 forall. */
2436 count1 = gfc_create_var (gfc_array_index_type, "count1");
2438 /* Count is the wheremask index. */
2439 if (wheremask)
2441 count = gfc_create_var (gfc_array_index_type, "count");
2442 gfc_add_modify (block, count, gfc_index_zero_node);
2444 else
2445 count = NULL;
2447 /* Initialize count1. */
2448 gfc_add_modify (block, count1, gfc_index_zero_node);
2450 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2451 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2452 gfc_init_block (&inner_size_body);
2453 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2454 &lss, &rss);
2456 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2457 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2459 if (!expr1->ts.cl->backend_decl)
2461 gfc_se tse;
2462 gfc_init_se (&tse, NULL);
2463 gfc_conv_expr (&tse, expr1->ts.cl->length);
2464 expr1->ts.cl->backend_decl = tse.expr;
2466 type = gfc_get_character_type_len (gfc_default_character_kind,
2467 expr1->ts.cl->backend_decl);
2469 else
2470 type = gfc_typenode_for_spec (&expr1->ts);
2472 /* Allocate temporary for nested forall construct according to the
2473 information in nested_forall_info and inner_size. */
2474 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2475 &inner_size_body, block, &ptemp1);
2477 /* Generate codes to copy rhs to the temporary . */
2478 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2479 wheremask, invert);
2481 /* Generate body and loops according to the information in
2482 nested_forall_info. */
2483 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2484 gfc_add_expr_to_block (block, tmp);
2486 /* Reset count1. */
2487 gfc_add_modify (block, count1, gfc_index_zero_node);
2489 /* Reset count. */
2490 if (wheremask)
2491 gfc_add_modify (block, count, gfc_index_zero_node);
2493 /* Generate codes to copy the temporary to lhs. */
2494 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2495 wheremask, invert);
2497 /* Generate body and loops according to the information in
2498 nested_forall_info. */
2499 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2500 gfc_add_expr_to_block (block, tmp);
2502 if (ptemp1)
2504 /* Free the temporary. */
2505 tmp = gfc_call_free (ptemp1);
2506 gfc_add_expr_to_block (block, tmp);
2511 /* Translate pointer assignment inside FORALL which need temporary. */
2513 static void
2514 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2515 forall_info * nested_forall_info,
2516 stmtblock_t * block)
2518 tree type;
2519 tree inner_size;
2520 gfc_ss *lss, *rss;
2521 gfc_se lse;
2522 gfc_se rse;
2523 gfc_ss_info *info;
2524 gfc_loopinfo loop;
2525 tree desc;
2526 tree parm;
2527 tree parmtype;
2528 stmtblock_t body;
2529 tree count;
2530 tree tmp, tmp1, ptemp1;
2532 count = gfc_create_var (gfc_array_index_type, "count");
2533 gfc_add_modify (block, count, gfc_index_zero_node);
2535 inner_size = integer_one_node;
2536 lss = gfc_walk_expr (expr1);
2537 rss = gfc_walk_expr (expr2);
2538 if (lss == gfc_ss_terminator)
2540 type = gfc_typenode_for_spec (&expr1->ts);
2541 type = build_pointer_type (type);
2543 /* Allocate temporary for nested forall construct according to the
2544 information in nested_forall_info and inner_size. */
2545 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2546 inner_size, NULL, block, &ptemp1);
2547 gfc_start_block (&body);
2548 gfc_init_se (&lse, NULL);
2549 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2550 gfc_init_se (&rse, NULL);
2551 rse.want_pointer = 1;
2552 gfc_conv_expr (&rse, expr2);
2553 gfc_add_block_to_block (&body, &rse.pre);
2554 gfc_add_modify (&body, lse.expr,
2555 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2556 gfc_add_block_to_block (&body, &rse.post);
2558 /* Increment count. */
2559 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2560 count, gfc_index_one_node);
2561 gfc_add_modify (&body, count, tmp);
2563 tmp = gfc_finish_block (&body);
2565 /* Generate body and loops according to the information in
2566 nested_forall_info. */
2567 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2568 gfc_add_expr_to_block (block, tmp);
2570 /* Reset count. */
2571 gfc_add_modify (block, count, gfc_index_zero_node);
2573 gfc_start_block (&body);
2574 gfc_init_se (&lse, NULL);
2575 gfc_init_se (&rse, NULL);
2576 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2577 lse.want_pointer = 1;
2578 gfc_conv_expr (&lse, expr1);
2579 gfc_add_block_to_block (&body, &lse.pre);
2580 gfc_add_modify (&body, lse.expr, rse.expr);
2581 gfc_add_block_to_block (&body, &lse.post);
2582 /* Increment count. */
2583 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2584 count, gfc_index_one_node);
2585 gfc_add_modify (&body, count, tmp);
2586 tmp = gfc_finish_block (&body);
2588 /* Generate body and loops according to the information in
2589 nested_forall_info. */
2590 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2591 gfc_add_expr_to_block (block, tmp);
2593 else
2595 gfc_init_loopinfo (&loop);
2597 /* Associate the SS with the loop. */
2598 gfc_add_ss_to_loop (&loop, rss);
2600 /* Setup the scalarizing loops and bounds. */
2601 gfc_conv_ss_startstride (&loop);
2603 gfc_conv_loop_setup (&loop, &expr2->where);
2605 info = &rss->data.info;
2606 desc = info->descriptor;
2608 /* Make a new descriptor. */
2609 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2610 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2611 loop.from, loop.to, 1,
2612 GFC_ARRAY_UNKNOWN);
2614 /* Allocate temporary for nested forall construct. */
2615 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2616 inner_size, NULL, block, &ptemp1);
2617 gfc_start_block (&body);
2618 gfc_init_se (&lse, NULL);
2619 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2620 lse.direct_byref = 1;
2621 rss = gfc_walk_expr (expr2);
2622 gfc_conv_expr_descriptor (&lse, expr2, rss);
2624 gfc_add_block_to_block (&body, &lse.pre);
2625 gfc_add_block_to_block (&body, &lse.post);
2627 /* Increment count. */
2628 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2629 count, gfc_index_one_node);
2630 gfc_add_modify (&body, count, tmp);
2632 tmp = gfc_finish_block (&body);
2634 /* Generate body and loops according to the information in
2635 nested_forall_info. */
2636 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2637 gfc_add_expr_to_block (block, tmp);
2639 /* Reset count. */
2640 gfc_add_modify (block, count, gfc_index_zero_node);
2642 parm = gfc_build_array_ref (tmp1, count, NULL);
2643 lss = gfc_walk_expr (expr1);
2644 gfc_init_se (&lse, NULL);
2645 gfc_conv_expr_descriptor (&lse, expr1, lss);
2646 gfc_add_modify (&lse.pre, lse.expr, parm);
2647 gfc_start_block (&body);
2648 gfc_add_block_to_block (&body, &lse.pre);
2649 gfc_add_block_to_block (&body, &lse.post);
2651 /* Increment count. */
2652 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2653 count, gfc_index_one_node);
2654 gfc_add_modify (&body, count, tmp);
2656 tmp = gfc_finish_block (&body);
2658 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2659 gfc_add_expr_to_block (block, tmp);
2661 /* Free the temporary. */
2662 if (ptemp1)
2664 tmp = gfc_call_free (ptemp1);
2665 gfc_add_expr_to_block (block, tmp);
2670 /* FORALL and WHERE statements are really nasty, especially when you nest
2671 them. All the rhs of a forall assignment must be evaluated before the
2672 actual assignments are performed. Presumably this also applies to all the
2673 assignments in an inner where statement. */
2675 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2676 linear array, relying on the fact that we process in the same order in all
2677 loops.
2679 forall (i=start:end:stride; maskexpr)
2680 e<i> = f<i>
2681 g<i> = h<i>
2682 end forall
2683 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2684 Translates to:
2685 count = ((end + 1 - start) / stride)
2686 masktmp(:) = maskexpr(:)
2688 maskindex = 0;
2689 for (i = start; i <= end; i += stride)
2691 if (masktmp[maskindex++])
2692 e<i> = f<i>
2694 maskindex = 0;
2695 for (i = start; i <= end; i += stride)
2697 if (masktmp[maskindex++])
2698 g<i> = h<i>
2701 Note that this code only works when there are no dependencies.
2702 Forall loop with array assignments and data dependencies are a real pain,
2703 because the size of the temporary cannot always be determined before the
2704 loop is executed. This problem is compounded by the presence of nested
2705 FORALL constructs.
2708 static tree
2709 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2711 stmtblock_t pre;
2712 stmtblock_t post;
2713 stmtblock_t block;
2714 stmtblock_t body;
2715 tree *var;
2716 tree *start;
2717 tree *end;
2718 tree *step;
2719 gfc_expr **varexpr;
2720 tree tmp;
2721 tree assign;
2722 tree size;
2723 tree maskindex;
2724 tree mask;
2725 tree pmask;
2726 int n;
2727 int nvar;
2728 int need_temp;
2729 gfc_forall_iterator *fa;
2730 gfc_se se;
2731 gfc_code *c;
2732 gfc_saved_var *saved_vars;
2733 iter_info *this_forall;
2734 forall_info *info;
2735 bool need_mask;
2737 /* Do nothing if the mask is false. */
2738 if (code->expr
2739 && code->expr->expr_type == EXPR_CONSTANT
2740 && !code->expr->value.logical)
2741 return build_empty_stmt ();
2743 n = 0;
2744 /* Count the FORALL index number. */
2745 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2746 n++;
2747 nvar = n;
2749 /* Allocate the space for var, start, end, step, varexpr. */
2750 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2751 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2752 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2753 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2754 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2755 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2757 /* Allocate the space for info. */
2758 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2760 gfc_start_block (&pre);
2761 gfc_init_block (&post);
2762 gfc_init_block (&block);
2764 n = 0;
2765 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2767 gfc_symbol *sym = fa->var->symtree->n.sym;
2769 /* Allocate space for this_forall. */
2770 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2772 /* Create a temporary variable for the FORALL index. */
2773 tmp = gfc_typenode_for_spec (&sym->ts);
2774 var[n] = gfc_create_var (tmp, sym->name);
2775 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2777 /* Record it in this_forall. */
2778 this_forall->var = var[n];
2780 /* Replace the index symbol's backend_decl with the temporary decl. */
2781 sym->backend_decl = var[n];
2783 /* Work out the start, end and stride for the loop. */
2784 gfc_init_se (&se, NULL);
2785 gfc_conv_expr_val (&se, fa->start);
2786 /* Record it in this_forall. */
2787 this_forall->start = se.expr;
2788 gfc_add_block_to_block (&block, &se.pre);
2789 start[n] = se.expr;
2791 gfc_init_se (&se, NULL);
2792 gfc_conv_expr_val (&se, fa->end);
2793 /* Record it in this_forall. */
2794 this_forall->end = se.expr;
2795 gfc_make_safe_expr (&se);
2796 gfc_add_block_to_block (&block, &se.pre);
2797 end[n] = se.expr;
2799 gfc_init_se (&se, NULL);
2800 gfc_conv_expr_val (&se, fa->stride);
2801 /* Record it in this_forall. */
2802 this_forall->step = se.expr;
2803 gfc_make_safe_expr (&se);
2804 gfc_add_block_to_block (&block, &se.pre);
2805 step[n] = se.expr;
2807 /* Set the NEXT field of this_forall to NULL. */
2808 this_forall->next = NULL;
2809 /* Link this_forall to the info construct. */
2810 if (info->this_loop)
2812 iter_info *iter_tmp = info->this_loop;
2813 while (iter_tmp->next != NULL)
2814 iter_tmp = iter_tmp->next;
2815 iter_tmp->next = this_forall;
2817 else
2818 info->this_loop = this_forall;
2820 n++;
2822 nvar = n;
2824 /* Calculate the size needed for the current forall level. */
2825 size = gfc_index_one_node;
2826 for (n = 0; n < nvar; n++)
2828 /* size = (end + step - start) / step. */
2829 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2830 step[n], start[n]);
2831 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2833 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2834 tmp = convert (gfc_array_index_type, tmp);
2836 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2839 /* Record the nvar and size of current forall level. */
2840 info->nvar = nvar;
2841 info->size = size;
2843 if (code->expr)
2845 /* If the mask is .true., consider the FORALL unconditional. */
2846 if (code->expr->expr_type == EXPR_CONSTANT
2847 && code->expr->value.logical)
2848 need_mask = false;
2849 else
2850 need_mask = true;
2852 else
2853 need_mask = false;
2855 /* First we need to allocate the mask. */
2856 if (need_mask)
2858 /* As the mask array can be very big, prefer compact boolean types. */
2859 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2860 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2861 size, NULL, &block, &pmask);
2862 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2864 /* Record them in the info structure. */
2865 info->maskindex = maskindex;
2866 info->mask = mask;
2868 else
2870 /* No mask was specified. */
2871 maskindex = NULL_TREE;
2872 mask = pmask = NULL_TREE;
2875 /* Link the current forall level to nested_forall_info. */
2876 info->prev_nest = nested_forall_info;
2877 nested_forall_info = info;
2879 /* Copy the mask into a temporary variable if required.
2880 For now we assume a mask temporary is needed. */
2881 if (need_mask)
2883 /* As the mask array can be very big, prefer compact boolean types. */
2884 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2886 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2888 /* Start of mask assignment loop body. */
2889 gfc_start_block (&body);
2891 /* Evaluate the mask expression. */
2892 gfc_init_se (&se, NULL);
2893 gfc_conv_expr_val (&se, code->expr);
2894 gfc_add_block_to_block (&body, &se.pre);
2896 /* Store the mask. */
2897 se.expr = convert (mask_type, se.expr);
2899 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2900 gfc_add_modify (&body, tmp, se.expr);
2902 /* Advance to the next mask element. */
2903 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2904 maskindex, gfc_index_one_node);
2905 gfc_add_modify (&body, maskindex, tmp);
2907 /* Generate the loops. */
2908 tmp = gfc_finish_block (&body);
2909 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2910 gfc_add_expr_to_block (&block, tmp);
2913 c = code->block->next;
2915 /* TODO: loop merging in FORALL statements. */
2916 /* Now that we've got a copy of the mask, generate the assignment loops. */
2917 while (c)
2919 switch (c->op)
2921 case EXEC_ASSIGN:
2922 /* A scalar or array assignment. DO the simple check for
2923 lhs to rhs dependencies. These make a temporary for the
2924 rhs and form a second forall block to copy to variable. */
2925 need_temp = check_forall_dependencies(c, &pre, &post);
2927 /* Temporaries due to array assignment data dependencies introduce
2928 no end of problems. */
2929 if (need_temp)
2930 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2931 nested_forall_info, &block);
2932 else
2934 /* Use the normal assignment copying routines. */
2935 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2937 /* Generate body and loops. */
2938 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2939 assign, 1);
2940 gfc_add_expr_to_block (&block, tmp);
2943 /* Cleanup any temporary symtrees that have been made to deal
2944 with dependencies. */
2945 if (new_symtree)
2946 cleanup_forall_symtrees (c);
2948 break;
2950 case EXEC_WHERE:
2951 /* Translate WHERE or WHERE construct nested in FORALL. */
2952 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2953 break;
2955 /* Pointer assignment inside FORALL. */
2956 case EXEC_POINTER_ASSIGN:
2957 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2958 if (need_temp)
2959 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2960 nested_forall_info, &block);
2961 else
2963 /* Use the normal assignment copying routines. */
2964 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2966 /* Generate body and loops. */
2967 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2968 assign, 1);
2969 gfc_add_expr_to_block (&block, tmp);
2971 break;
2973 case EXEC_FORALL:
2974 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2975 gfc_add_expr_to_block (&block, tmp);
2976 break;
2978 /* Explicit subroutine calls are prevented by the frontend but interface
2979 assignments can legitimately produce them. */
2980 case EXEC_ASSIGN_CALL:
2981 assign = gfc_trans_call (c, true);
2982 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2983 gfc_add_expr_to_block (&block, tmp);
2984 break;
2986 default:
2987 gcc_unreachable ();
2990 c = c->next;
2993 /* Restore the original index variables. */
2994 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2995 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2997 /* Free the space for var, start, end, step, varexpr. */
2998 gfc_free (var);
2999 gfc_free (start);
3000 gfc_free (end);
3001 gfc_free (step);
3002 gfc_free (varexpr);
3003 gfc_free (saved_vars);
3005 /* Free the space for this forall_info. */
3006 gfc_free (info);
3008 if (pmask)
3010 /* Free the temporary for the mask. */
3011 tmp = gfc_call_free (pmask);
3012 gfc_add_expr_to_block (&block, tmp);
3014 if (maskindex)
3015 pushdecl (maskindex);
3017 gfc_add_block_to_block (&pre, &block);
3018 gfc_add_block_to_block (&pre, &post);
3020 return gfc_finish_block (&pre);
3024 /* Translate the FORALL statement or construct. */
3026 tree gfc_trans_forall (gfc_code * code)
3028 return gfc_trans_forall_1 (code, NULL);
3032 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3033 If the WHERE construct is nested in FORALL, compute the overall temporary
3034 needed by the WHERE mask expression multiplied by the iterator number of
3035 the nested forall.
3036 ME is the WHERE mask expression.
3037 MASK is the current execution mask upon input, whose sense may or may
3038 not be inverted as specified by the INVERT argument.
3039 CMASK is the updated execution mask on output, or NULL if not required.
3040 PMASK is the pending execution mask on output, or NULL if not required.
3041 BLOCK is the block in which to place the condition evaluation loops. */
3043 static void
3044 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3045 tree mask, bool invert, tree cmask, tree pmask,
3046 tree mask_type, stmtblock_t * block)
3048 tree tmp, tmp1;
3049 gfc_ss *lss, *rss;
3050 gfc_loopinfo loop;
3051 stmtblock_t body, body1;
3052 tree count, cond, mtmp;
3053 gfc_se lse, rse;
3055 gfc_init_loopinfo (&loop);
3057 lss = gfc_walk_expr (me);
3058 rss = gfc_walk_expr (me);
3060 /* Variable to index the temporary. */
3061 count = gfc_create_var (gfc_array_index_type, "count");
3062 /* Initialize count. */
3063 gfc_add_modify (block, count, gfc_index_zero_node);
3065 gfc_start_block (&body);
3067 gfc_init_se (&rse, NULL);
3068 gfc_init_se (&lse, NULL);
3070 if (lss == gfc_ss_terminator)
3072 gfc_init_block (&body1);
3074 else
3076 /* Initialize the loop. */
3077 gfc_init_loopinfo (&loop);
3079 /* We may need LSS to determine the shape of the expression. */
3080 gfc_add_ss_to_loop (&loop, lss);
3081 gfc_add_ss_to_loop (&loop, rss);
3083 gfc_conv_ss_startstride (&loop);
3084 gfc_conv_loop_setup (&loop, &me->where);
3086 gfc_mark_ss_chain_used (rss, 1);
3087 /* Start the loop body. */
3088 gfc_start_scalarized_body (&loop, &body1);
3090 /* Translate the expression. */
3091 gfc_copy_loopinfo_to_se (&rse, &loop);
3092 rse.ss = rss;
3093 gfc_conv_expr (&rse, me);
3096 /* Variable to evaluate mask condition. */
3097 cond = gfc_create_var (mask_type, "cond");
3098 if (mask && (cmask || pmask))
3099 mtmp = gfc_create_var (mask_type, "mask");
3100 else mtmp = NULL_TREE;
3102 gfc_add_block_to_block (&body1, &lse.pre);
3103 gfc_add_block_to_block (&body1, &rse.pre);
3105 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3107 if (mask && (cmask || pmask))
3109 tmp = gfc_build_array_ref (mask, count, NULL);
3110 if (invert)
3111 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3112 gfc_add_modify (&body1, mtmp, tmp);
3115 if (cmask)
3117 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3118 tmp = cond;
3119 if (mask)
3120 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3121 gfc_add_modify (&body1, tmp1, tmp);
3124 if (pmask)
3126 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3127 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3128 if (mask)
3129 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3130 gfc_add_modify (&body1, tmp1, tmp);
3133 gfc_add_block_to_block (&body1, &lse.post);
3134 gfc_add_block_to_block (&body1, &rse.post);
3136 if (lss == gfc_ss_terminator)
3138 gfc_add_block_to_block (&body, &body1);
3140 else
3142 /* Increment count. */
3143 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3144 gfc_index_one_node);
3145 gfc_add_modify (&body1, count, tmp1);
3147 /* Generate the copying loops. */
3148 gfc_trans_scalarizing_loops (&loop, &body1);
3150 gfc_add_block_to_block (&body, &loop.pre);
3151 gfc_add_block_to_block (&body, &loop.post);
3153 gfc_cleanup_loop (&loop);
3154 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3155 as tree nodes in SS may not be valid in different scope. */
3158 tmp1 = gfc_finish_block (&body);
3159 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3160 if (nested_forall_info != NULL)
3161 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3163 gfc_add_expr_to_block (block, tmp1);
3167 /* Translate an assignment statement in a WHERE statement or construct
3168 statement. The MASK expression is used to control which elements
3169 of EXPR1 shall be assigned. The sense of MASK is specified by
3170 INVERT. */
3172 static tree
3173 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3174 tree mask, bool invert,
3175 tree count1, tree count2,
3176 gfc_symbol *sym)
3178 gfc_se lse;
3179 gfc_se rse;
3180 gfc_ss *lss;
3181 gfc_ss *lss_section;
3182 gfc_ss *rss;
3184 gfc_loopinfo loop;
3185 tree tmp;
3186 stmtblock_t block;
3187 stmtblock_t body;
3188 tree index, maskexpr;
3190 #if 0
3191 /* TODO: handle this special case.
3192 Special case a single function returning an array. */
3193 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3195 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3196 if (tmp)
3197 return tmp;
3199 #endif
3201 /* Assignment of the form lhs = rhs. */
3202 gfc_start_block (&block);
3204 gfc_init_se (&lse, NULL);
3205 gfc_init_se (&rse, NULL);
3207 /* Walk the lhs. */
3208 lss = gfc_walk_expr (expr1);
3209 rss = NULL;
3211 /* In each where-assign-stmt, the mask-expr and the variable being
3212 defined shall be arrays of the same shape. */
3213 gcc_assert (lss != gfc_ss_terminator);
3215 /* The assignment needs scalarization. */
3216 lss_section = lss;
3218 /* Find a non-scalar SS from the lhs. */
3219 while (lss_section != gfc_ss_terminator
3220 && lss_section->type != GFC_SS_SECTION)
3221 lss_section = lss_section->next;
3223 gcc_assert (lss_section != gfc_ss_terminator);
3225 /* Initialize the scalarizer. */
3226 gfc_init_loopinfo (&loop);
3228 /* Walk the rhs. */
3229 rss = gfc_walk_expr (expr2);
3230 if (rss == gfc_ss_terminator)
3232 /* The rhs is scalar. Add a ss for the expression. */
3233 rss = gfc_get_ss ();
3234 rss->where = 1;
3235 rss->next = gfc_ss_terminator;
3236 rss->type = GFC_SS_SCALAR;
3237 rss->expr = expr2;
3240 /* Associate the SS with the loop. */
3241 gfc_add_ss_to_loop (&loop, lss);
3242 gfc_add_ss_to_loop (&loop, rss);
3244 /* Calculate the bounds of the scalarization. */
3245 gfc_conv_ss_startstride (&loop);
3247 /* Resolve any data dependencies in the statement. */
3248 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3250 /* Setup the scalarizing loops. */
3251 gfc_conv_loop_setup (&loop, &expr2->where);
3253 /* Setup the gfc_se structures. */
3254 gfc_copy_loopinfo_to_se (&lse, &loop);
3255 gfc_copy_loopinfo_to_se (&rse, &loop);
3257 rse.ss = rss;
3258 gfc_mark_ss_chain_used (rss, 1);
3259 if (loop.temp_ss == NULL)
3261 lse.ss = lss;
3262 gfc_mark_ss_chain_used (lss, 1);
3264 else
3266 lse.ss = loop.temp_ss;
3267 gfc_mark_ss_chain_used (lss, 3);
3268 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3271 /* Start the scalarized loop body. */
3272 gfc_start_scalarized_body (&loop, &body);
3274 /* Translate the expression. */
3275 gfc_conv_expr (&rse, expr2);
3276 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3278 gfc_conv_tmp_array_ref (&lse);
3279 gfc_advance_se_ss_chain (&lse);
3281 else
3282 gfc_conv_expr (&lse, expr1);
3284 /* Form the mask expression according to the mask. */
3285 index = count1;
3286 maskexpr = gfc_build_array_ref (mask, index, NULL);
3287 if (invert)
3288 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3290 /* Use the scalar assignment as is. */
3291 if (sym == NULL)
3292 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3293 loop.temp_ss != NULL, false);
3294 else
3295 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3297 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3299 gfc_add_expr_to_block (&body, tmp);
3301 if (lss == gfc_ss_terminator)
3303 /* Increment count1. */
3304 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3305 count1, gfc_index_one_node);
3306 gfc_add_modify (&body, count1, tmp);
3308 /* Use the scalar assignment as is. */
3309 gfc_add_block_to_block (&block, &body);
3311 else
3313 gcc_assert (lse.ss == gfc_ss_terminator
3314 && rse.ss == gfc_ss_terminator);
3316 if (loop.temp_ss != NULL)
3318 /* Increment count1 before finish the main body of a scalarized
3319 expression. */
3320 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3321 count1, gfc_index_one_node);
3322 gfc_add_modify (&body, count1, tmp);
3323 gfc_trans_scalarized_loop_boundary (&loop, &body);
3325 /* We need to copy the temporary to the actual lhs. */
3326 gfc_init_se (&lse, NULL);
3327 gfc_init_se (&rse, NULL);
3328 gfc_copy_loopinfo_to_se (&lse, &loop);
3329 gfc_copy_loopinfo_to_se (&rse, &loop);
3331 rse.ss = loop.temp_ss;
3332 lse.ss = lss;
3334 gfc_conv_tmp_array_ref (&rse);
3335 gfc_advance_se_ss_chain (&rse);
3336 gfc_conv_expr (&lse, expr1);
3338 gcc_assert (lse.ss == gfc_ss_terminator
3339 && rse.ss == gfc_ss_terminator);
3341 /* Form the mask expression according to the mask tree list. */
3342 index = count2;
3343 maskexpr = gfc_build_array_ref (mask, index, NULL);
3344 if (invert)
3345 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3346 maskexpr);
3348 /* Use the scalar assignment as is. */
3349 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3350 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3351 gfc_add_expr_to_block (&body, tmp);
3353 /* Increment count2. */
3354 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3355 count2, gfc_index_one_node);
3356 gfc_add_modify (&body, count2, tmp);
3358 else
3360 /* Increment count1. */
3361 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3362 count1, gfc_index_one_node);
3363 gfc_add_modify (&body, count1, tmp);
3366 /* Generate the copying loops. */
3367 gfc_trans_scalarizing_loops (&loop, &body);
3369 /* Wrap the whole thing up. */
3370 gfc_add_block_to_block (&block, &loop.pre);
3371 gfc_add_block_to_block (&block, &loop.post);
3372 gfc_cleanup_loop (&loop);
3375 return gfc_finish_block (&block);
3379 /* Translate the WHERE construct or statement.
3380 This function can be called iteratively to translate the nested WHERE
3381 construct or statement.
3382 MASK is the control mask. */
3384 static void
3385 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3386 forall_info * nested_forall_info, stmtblock_t * block)
3388 stmtblock_t inner_size_body;
3389 tree inner_size, size;
3390 gfc_ss *lss, *rss;
3391 tree mask_type;
3392 gfc_expr *expr1;
3393 gfc_expr *expr2;
3394 gfc_code *cblock;
3395 gfc_code *cnext;
3396 tree tmp;
3397 tree cond;
3398 tree count1, count2;
3399 bool need_cmask;
3400 bool need_pmask;
3401 int need_temp;
3402 tree pcmask = NULL_TREE;
3403 tree ppmask = NULL_TREE;
3404 tree cmask = NULL_TREE;
3405 tree pmask = NULL_TREE;
3406 gfc_actual_arglist *arg;
3408 /* the WHERE statement or the WHERE construct statement. */
3409 cblock = code->block;
3411 /* As the mask array can be very big, prefer compact boolean types. */
3412 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3414 /* Determine which temporary masks are needed. */
3415 if (!cblock->block)
3417 /* One clause: No ELSEWHEREs. */
3418 need_cmask = (cblock->next != 0);
3419 need_pmask = false;
3421 else if (cblock->block->block)
3423 /* Three or more clauses: Conditional ELSEWHEREs. */
3424 need_cmask = true;
3425 need_pmask = true;
3427 else if (cblock->next)
3429 /* Two clauses, the first non-empty. */
3430 need_cmask = true;
3431 need_pmask = (mask != NULL_TREE
3432 && cblock->block->next != 0);
3434 else if (!cblock->block->next)
3436 /* Two clauses, both empty. */
3437 need_cmask = false;
3438 need_pmask = false;
3440 /* Two clauses, the first empty, the second non-empty. */
3441 else if (mask)
3443 need_cmask = (cblock->block->expr != 0);
3444 need_pmask = true;
3446 else
3448 need_cmask = true;
3449 need_pmask = false;
3452 if (need_cmask || need_pmask)
3454 /* Calculate the size of temporary needed by the mask-expr. */
3455 gfc_init_block (&inner_size_body);
3456 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3457 &inner_size_body, &lss, &rss);
3459 /* Calculate the total size of temporary needed. */
3460 size = compute_overall_iter_number (nested_forall_info, inner_size,
3461 &inner_size_body, block);
3463 /* Check whether the size is negative. */
3464 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3465 gfc_index_zero_node);
3466 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3467 gfc_index_zero_node, size);
3468 size = gfc_evaluate_now (size, block);
3470 /* Allocate temporary for WHERE mask if needed. */
3471 if (need_cmask)
3472 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3473 &pcmask);
3475 /* Allocate temporary for !mask if needed. */
3476 if (need_pmask)
3477 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3478 &ppmask);
3481 while (cblock)
3483 /* Each time around this loop, the where clause is conditional
3484 on the value of mask and invert, which are updated at the
3485 bottom of the loop. */
3487 /* Has mask-expr. */
3488 if (cblock->expr)
3490 /* Ensure that the WHERE mask will be evaluated exactly once.
3491 If there are no statements in this WHERE/ELSEWHERE clause,
3492 then we don't need to update the control mask (cmask).
3493 If this is the last clause of the WHERE construct, then
3494 we don't need to update the pending control mask (pmask). */
3495 if (mask)
3496 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3497 mask, invert,
3498 cblock->next ? cmask : NULL_TREE,
3499 cblock->block ? pmask : NULL_TREE,
3500 mask_type, block);
3501 else
3502 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3503 NULL_TREE, false,
3504 (cblock->next || cblock->block)
3505 ? cmask : NULL_TREE,
3506 NULL_TREE, mask_type, block);
3508 invert = false;
3510 /* It's a final elsewhere-stmt. No mask-expr is present. */
3511 else
3512 cmask = mask;
3514 /* The body of this where clause are controlled by cmask with
3515 sense specified by invert. */
3517 /* Get the assignment statement of a WHERE statement, or the first
3518 statement in where-body-construct of a WHERE construct. */
3519 cnext = cblock->next;
3520 while (cnext)
3522 switch (cnext->op)
3524 /* WHERE assignment statement. */
3525 case EXEC_ASSIGN_CALL:
3527 arg = cnext->ext.actual;
3528 expr1 = expr2 = NULL;
3529 for (; arg; arg = arg->next)
3531 if (!arg->expr)
3532 continue;
3533 if (expr1 == NULL)
3534 expr1 = arg->expr;
3535 else
3536 expr2 = arg->expr;
3538 goto evaluate;
3540 case EXEC_ASSIGN:
3541 expr1 = cnext->expr;
3542 expr2 = cnext->expr2;
3543 evaluate:
3544 if (nested_forall_info != NULL)
3546 need_temp = gfc_check_dependency (expr1, expr2, 0);
3547 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3548 gfc_trans_assign_need_temp (expr1, expr2,
3549 cmask, invert,
3550 nested_forall_info, block);
3551 else
3553 /* Variables to control maskexpr. */
3554 count1 = gfc_create_var (gfc_array_index_type, "count1");
3555 count2 = gfc_create_var (gfc_array_index_type, "count2");
3556 gfc_add_modify (block, count1, gfc_index_zero_node);
3557 gfc_add_modify (block, count2, gfc_index_zero_node);
3559 tmp = gfc_trans_where_assign (expr1, expr2,
3560 cmask, invert,
3561 count1, count2,
3562 cnext->resolved_sym);
3564 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3565 tmp, 1);
3566 gfc_add_expr_to_block (block, tmp);
3569 else
3571 /* Variables to control maskexpr. */
3572 count1 = gfc_create_var (gfc_array_index_type, "count1");
3573 count2 = gfc_create_var (gfc_array_index_type, "count2");
3574 gfc_add_modify (block, count1, gfc_index_zero_node);
3575 gfc_add_modify (block, count2, gfc_index_zero_node);
3577 tmp = gfc_trans_where_assign (expr1, expr2,
3578 cmask, invert,
3579 count1, count2,
3580 cnext->resolved_sym);
3581 gfc_add_expr_to_block (block, tmp);
3584 break;
3586 /* WHERE or WHERE construct is part of a where-body-construct. */
3587 case EXEC_WHERE:
3588 gfc_trans_where_2 (cnext, cmask, invert,
3589 nested_forall_info, block);
3590 break;
3592 default:
3593 gcc_unreachable ();
3596 /* The next statement within the same where-body-construct. */
3597 cnext = cnext->next;
3599 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3600 cblock = cblock->block;
3601 if (mask == NULL_TREE)
3603 /* If we're the initial WHERE, we can simply invert the sense
3604 of the current mask to obtain the "mask" for the remaining
3605 ELSEWHEREs. */
3606 invert = true;
3607 mask = cmask;
3609 else
3611 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3612 invert = false;
3613 mask = pmask;
3617 /* If we allocated a pending mask array, deallocate it now. */
3618 if (ppmask)
3620 tmp = gfc_call_free (ppmask);
3621 gfc_add_expr_to_block (block, tmp);
3624 /* If we allocated a current mask array, deallocate it now. */
3625 if (pcmask)
3627 tmp = gfc_call_free (pcmask);
3628 gfc_add_expr_to_block (block, tmp);
3632 /* Translate a simple WHERE construct or statement without dependencies.
3633 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3634 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3635 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3637 static tree
3638 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3640 stmtblock_t block, body;
3641 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3642 tree tmp, cexpr, tstmt, estmt;
3643 gfc_ss *css, *tdss, *tsss;
3644 gfc_se cse, tdse, tsse, edse, esse;
3645 gfc_loopinfo loop;
3646 gfc_ss *edss = 0;
3647 gfc_ss *esss = 0;
3649 cond = cblock->expr;
3650 tdst = cblock->next->expr;
3651 tsrc = cblock->next->expr2;
3652 edst = eblock ? eblock->next->expr : NULL;
3653 esrc = eblock ? eblock->next->expr2 : NULL;
3655 gfc_start_block (&block);
3656 gfc_init_loopinfo (&loop);
3658 /* Handle the condition. */
3659 gfc_init_se (&cse, NULL);
3660 css = gfc_walk_expr (cond);
3661 gfc_add_ss_to_loop (&loop, css);
3663 /* Handle the then-clause. */
3664 gfc_init_se (&tdse, NULL);
3665 gfc_init_se (&tsse, NULL);
3666 tdss = gfc_walk_expr (tdst);
3667 tsss = gfc_walk_expr (tsrc);
3668 if (tsss == gfc_ss_terminator)
3670 tsss = gfc_get_ss ();
3671 tsss->where = 1;
3672 tsss->next = gfc_ss_terminator;
3673 tsss->type = GFC_SS_SCALAR;
3674 tsss->expr = tsrc;
3676 gfc_add_ss_to_loop (&loop, tdss);
3677 gfc_add_ss_to_loop (&loop, tsss);
3679 if (eblock)
3681 /* Handle the else clause. */
3682 gfc_init_se (&edse, NULL);
3683 gfc_init_se (&esse, NULL);
3684 edss = gfc_walk_expr (edst);
3685 esss = gfc_walk_expr (esrc);
3686 if (esss == gfc_ss_terminator)
3688 esss = gfc_get_ss ();
3689 esss->where = 1;
3690 esss->next = gfc_ss_terminator;
3691 esss->type = GFC_SS_SCALAR;
3692 esss->expr = esrc;
3694 gfc_add_ss_to_loop (&loop, edss);
3695 gfc_add_ss_to_loop (&loop, esss);
3698 gfc_conv_ss_startstride (&loop);
3699 gfc_conv_loop_setup (&loop, &tdst->where);
3701 gfc_mark_ss_chain_used (css, 1);
3702 gfc_mark_ss_chain_used (tdss, 1);
3703 gfc_mark_ss_chain_used (tsss, 1);
3704 if (eblock)
3706 gfc_mark_ss_chain_used (edss, 1);
3707 gfc_mark_ss_chain_used (esss, 1);
3710 gfc_start_scalarized_body (&loop, &body);
3712 gfc_copy_loopinfo_to_se (&cse, &loop);
3713 gfc_copy_loopinfo_to_se (&tdse, &loop);
3714 gfc_copy_loopinfo_to_se (&tsse, &loop);
3715 cse.ss = css;
3716 tdse.ss = tdss;
3717 tsse.ss = tsss;
3718 if (eblock)
3720 gfc_copy_loopinfo_to_se (&edse, &loop);
3721 gfc_copy_loopinfo_to_se (&esse, &loop);
3722 edse.ss = edss;
3723 esse.ss = esss;
3726 gfc_conv_expr (&cse, cond);
3727 gfc_add_block_to_block (&body, &cse.pre);
3728 cexpr = cse.expr;
3730 gfc_conv_expr (&tsse, tsrc);
3731 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3733 gfc_conv_tmp_array_ref (&tdse);
3734 gfc_advance_se_ss_chain (&tdse);
3736 else
3737 gfc_conv_expr (&tdse, tdst);
3739 if (eblock)
3741 gfc_conv_expr (&esse, esrc);
3742 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3744 gfc_conv_tmp_array_ref (&edse);
3745 gfc_advance_se_ss_chain (&edse);
3747 else
3748 gfc_conv_expr (&edse, edst);
3751 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3752 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3753 : build_empty_stmt ();
3754 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3755 gfc_add_expr_to_block (&body, tmp);
3756 gfc_add_block_to_block (&body, &cse.post);
3758 gfc_trans_scalarizing_loops (&loop, &body);
3759 gfc_add_block_to_block (&block, &loop.pre);
3760 gfc_add_block_to_block (&block, &loop.post);
3761 gfc_cleanup_loop (&loop);
3763 return gfc_finish_block (&block);
3766 /* As the WHERE or WHERE construct statement can be nested, we call
3767 gfc_trans_where_2 to do the translation, and pass the initial
3768 NULL values for both the control mask and the pending control mask. */
3770 tree
3771 gfc_trans_where (gfc_code * code)
3773 stmtblock_t block;
3774 gfc_code *cblock;
3775 gfc_code *eblock;
3777 cblock = code->block;
3778 if (cblock->next
3779 && cblock->next->op == EXEC_ASSIGN
3780 && !cblock->next->next)
3782 eblock = cblock->block;
3783 if (!eblock)
3785 /* A simple "WHERE (cond) x = y" statement or block is
3786 dependence free if cond is not dependent upon writing x,
3787 and the source y is unaffected by the destination x. */
3788 if (!gfc_check_dependency (cblock->next->expr,
3789 cblock->expr, 0)
3790 && !gfc_check_dependency (cblock->next->expr,
3791 cblock->next->expr2, 0))
3792 return gfc_trans_where_3 (cblock, NULL);
3794 else if (!eblock->expr
3795 && !eblock->block
3796 && eblock->next
3797 && eblock->next->op == EXEC_ASSIGN
3798 && !eblock->next->next)
3800 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3801 block is dependence free if cond is not dependent on writes
3802 to x1 and x2, y1 is not dependent on writes to x2, and y2
3803 is not dependent on writes to x1, and both y's are not
3804 dependent upon their own x's. In addition to this, the
3805 final two dependency checks below exclude all but the same
3806 array reference if the where and elswhere destinations
3807 are the same. In short, this is VERY conservative and this
3808 is needed because the two loops, required by the standard
3809 are coalesced in gfc_trans_where_3. */
3810 if (!gfc_check_dependency(cblock->next->expr,
3811 cblock->expr, 0)
3812 && !gfc_check_dependency(eblock->next->expr,
3813 cblock->expr, 0)
3814 && !gfc_check_dependency(cblock->next->expr,
3815 eblock->next->expr2, 1)
3816 && !gfc_check_dependency(eblock->next->expr,
3817 cblock->next->expr2, 1)
3818 && !gfc_check_dependency(cblock->next->expr,
3819 cblock->next->expr2, 1)
3820 && !gfc_check_dependency(eblock->next->expr,
3821 eblock->next->expr2, 1)
3822 && !gfc_check_dependency(cblock->next->expr,
3823 eblock->next->expr, 0)
3824 && !gfc_check_dependency(eblock->next->expr,
3825 cblock->next->expr, 0))
3826 return gfc_trans_where_3 (cblock, eblock);
3830 gfc_start_block (&block);
3832 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3834 return gfc_finish_block (&block);
3838 /* CYCLE a DO loop. The label decl has already been created by
3839 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3840 node at the head of the loop. We must mark the label as used. */
3842 tree
3843 gfc_trans_cycle (gfc_code * code)
3845 tree cycle_label;
3847 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3848 TREE_USED (cycle_label) = 1;
3849 return build1_v (GOTO_EXPR, cycle_label);
3853 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3854 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3855 loop. */
3857 tree
3858 gfc_trans_exit (gfc_code * code)
3860 tree exit_label;
3862 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3863 TREE_USED (exit_label) = 1;
3864 return build1_v (GOTO_EXPR, exit_label);
3868 /* Translate the ALLOCATE statement. */
3870 tree
3871 gfc_trans_allocate (gfc_code * code)
3873 gfc_alloc *al;
3874 gfc_expr *expr;
3875 gfc_se se;
3876 tree tmp;
3877 tree parm;
3878 tree stat;
3879 tree pstat;
3880 tree error_label;
3881 stmtblock_t block;
3883 if (!code->ext.alloc_list)
3884 return NULL_TREE;
3886 gfc_start_block (&block);
3888 if (code->expr)
3890 tree gfc_int4_type_node = gfc_get_int_type (4);
3892 stat = gfc_create_var (gfc_int4_type_node, "stat");
3893 pstat = build_fold_addr_expr (stat);
3895 error_label = gfc_build_label_decl (NULL_TREE);
3896 TREE_USED (error_label) = 1;
3898 else
3899 pstat = stat = error_label = NULL_TREE;
3901 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3903 expr = al->expr;
3905 gfc_init_se (&se, NULL);
3906 gfc_start_block (&se.pre);
3908 se.want_pointer = 1;
3909 se.descriptor_only = 1;
3910 gfc_conv_expr (&se, expr);
3912 if (!gfc_array_allocate (&se, expr, pstat))
3914 /* A scalar or derived type. */
3915 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3917 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3918 tmp = se.string_length;
3920 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3921 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3922 fold_convert (TREE_TYPE (se.expr), tmp));
3923 gfc_add_expr_to_block (&se.pre, tmp);
3925 if (code->expr)
3927 tmp = build1_v (GOTO_EXPR, error_label);
3928 parm = fold_build2 (NE_EXPR, boolean_type_node,
3929 stat, build_int_cst (TREE_TYPE (stat), 0));
3930 tmp = fold_build3 (COND_EXPR, void_type_node,
3931 parm, tmp, build_empty_stmt ());
3932 gfc_add_expr_to_block (&se.pre, tmp);
3935 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3937 tmp = build_fold_indirect_ref (se.expr);
3938 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3939 gfc_add_expr_to_block (&se.pre, tmp);
3944 tmp = gfc_finish_block (&se.pre);
3945 gfc_add_expr_to_block (&block, tmp);
3948 /* Assign the value to the status variable. */
3949 if (code->expr)
3951 tmp = build1_v (LABEL_EXPR, error_label);
3952 gfc_add_expr_to_block (&block, tmp);
3954 gfc_init_se (&se, NULL);
3955 gfc_conv_expr_lhs (&se, code->expr);
3956 tmp = convert (TREE_TYPE (se.expr), stat);
3957 gfc_add_modify (&block, se.expr, tmp);
3960 return gfc_finish_block (&block);
3964 /* Translate a DEALLOCATE statement.
3965 There are two cases within the for loop:
3966 (1) deallocate(a1, a2, a3) is translated into the following sequence
3967 _gfortran_deallocate(a1, 0B)
3968 _gfortran_deallocate(a2, 0B)
3969 _gfortran_deallocate(a3, 0B)
3970 where the STAT= variable is passed a NULL pointer.
3971 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3972 astat = 0
3973 _gfortran_deallocate(a1, &stat)
3974 astat = astat + stat
3975 _gfortran_deallocate(a2, &stat)
3976 astat = astat + stat
3977 _gfortran_deallocate(a3, &stat)
3978 astat = astat + stat
3979 In case (1), we simply return at the end of the for loop. In case (2)
3980 we set STAT= astat. */
3981 tree
3982 gfc_trans_deallocate (gfc_code * code)
3984 gfc_se se;
3985 gfc_alloc *al;
3986 gfc_expr *expr;
3987 tree apstat, astat, pstat, stat, tmp;
3988 stmtblock_t block;
3990 gfc_start_block (&block);
3992 /* Set up the optional STAT= */
3993 if (code->expr)
3995 tree gfc_int4_type_node = gfc_get_int_type (4);
3997 /* Variable used with the library call. */
3998 stat = gfc_create_var (gfc_int4_type_node, "stat");
3999 pstat = build_fold_addr_expr (stat);
4001 /* Running total of possible deallocation failures. */
4002 astat = gfc_create_var (gfc_int4_type_node, "astat");
4003 apstat = build_fold_addr_expr (astat);
4005 /* Initialize astat to 0. */
4006 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4008 else
4009 pstat = apstat = stat = astat = NULL_TREE;
4011 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4013 expr = al->expr;
4014 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4016 gfc_init_se (&se, NULL);
4017 gfc_start_block (&se.pre);
4019 se.want_pointer = 1;
4020 se.descriptor_only = 1;
4021 gfc_conv_expr (&se, expr);
4023 if (expr->ts.type == BT_DERIVED
4024 && expr->ts.derived->attr.alloc_comp)
4026 gfc_ref *ref;
4027 gfc_ref *last = NULL;
4028 for (ref = expr->ref; ref; ref = ref->next)
4029 if (ref->type == REF_COMPONENT)
4030 last = ref;
4032 /* Do not deallocate the components of a derived type
4033 ultimate pointer component. */
4034 if (!(last && last->u.c.component->attr.pointer)
4035 && !(!last && expr->symtree->n.sym->attr.pointer))
4037 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4038 expr->rank);
4039 gfc_add_expr_to_block (&se.pre, tmp);
4043 if (expr->rank)
4044 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4045 else
4047 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4048 gfc_add_expr_to_block (&se.pre, tmp);
4050 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4051 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4054 gfc_add_expr_to_block (&se.pre, tmp);
4056 /* Keep track of the number of failed deallocations by adding stat
4057 of the last deallocation to the running total. */
4058 if (code->expr)
4060 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4061 gfc_add_modify (&se.pre, astat, apstat);
4064 tmp = gfc_finish_block (&se.pre);
4065 gfc_add_expr_to_block (&block, tmp);
4069 /* Assign the value to the status variable. */
4070 if (code->expr)
4072 gfc_init_se (&se, NULL);
4073 gfc_conv_expr_lhs (&se, code->expr);
4074 tmp = convert (TREE_TYPE (se.expr), astat);
4075 gfc_add_modify (&block, se.expr, tmp);
4078 return gfc_finish_block (&block);