* gcc.dg/compat/struct-layout-1_generate.c (dg_options): New. Moved
[official-gcc.git] / gcc / fortran / trans-stmt.c
blobda227523e72de99aaa69dcc2a8ba5a649d96c8af
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
119 else
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 code = code->block;
163 if (code == NULL)
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
180 code = code->block;
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 tree
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
202 static void
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg)
206 gfc_actual_arglist *arg0;
207 gfc_expr *e;
208 gfc_formal_arglist *formal;
209 gfc_loopinfo tmp_loop;
210 gfc_se parmse;
211 gfc_ss *ss;
212 gfc_ss_info *info;
213 gfc_symbol *fsym;
214 int n;
215 stmtblock_t block;
216 tree data;
217 tree offset;
218 tree size;
219 tree tmp;
221 if (loopse->ss == NULL)
222 return;
224 ss = loopse->ss;
225 arg0 = arg;
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
231 e = arg->expr;
232 if (e == NULL)
233 continue;
235 /* Obtain the info structure for the current argument. */
236 info = NULL;
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
239 if (ss->expr != e)
240 continue;
241 info = &ss->data.info;
242 break;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
249 && e->rank && fsym
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0))
254 /* Make a local loopinfo for the temporary creation, so that
255 none of the other ss->info's have to be renormalized. */
256 gfc_init_loopinfo (&tmp_loop);
257 for (n = 0; n < info->dimen; n++)
259 tmp_loop.to[n] = loopse->loop->to[n];
260 tmp_loop.from[n] = loopse->loop->from[n];
261 tmp_loop.order[n] = loopse->loop->order[n];
264 /* Generate the temporary. Merge the block so that the
265 declarations are put at the right binding level. */
266 size = gfc_create_var (gfc_array_index_type, NULL);
267 data = gfc_create_var (pvoid_type_node, NULL);
268 gfc_start_block (&block);
269 tmp = gfc_typenode_for_spec (&e->ts);
270 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
271 &tmp_loop, info, tmp,
272 false, true, false,
273 & arg->expr->where);
274 gfc_add_modify (&se->pre, size, tmp);
275 tmp = fold_convert (pvoid_type_node, info->data);
276 gfc_add_modify (&se->pre, data, tmp);
277 gfc_merge_block_scope (&block);
279 /* Obtain the argument descriptor for unpacking. */
280 gfc_init_se (&parmse, NULL);
281 parmse.want_pointer = 1;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* Calculate the offset for the temporary. */
286 offset = gfc_index_zero_node;
287 for (n = 0; n < info->dimen; n++)
289 tmp = gfc_conv_descriptor_stride (info->descriptor,
290 gfc_rank_cst[n]);
291 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
292 loopse->loop->from[n], tmp);
293 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 offset, tmp);
296 info->offset = gfc_create_var (gfc_array_index_type, NULL);
297 gfc_add_modify (&se->pre, info->offset, offset);
299 /* Copy the result back using unpack. */
300 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
301 gfc_add_expr_to_block (&se->post, tmp);
303 gfc_add_block_to_block (&se->post, &parmse.post);
309 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
311 tree
312 gfc_trans_call (gfc_code * code, bool dependency_check)
314 gfc_se se;
315 gfc_ss * ss;
316 int has_alternate_specifier;
318 /* A CALL starts a new block because the actual arguments may have to
319 be evaluated first. */
320 gfc_init_se (&se, NULL);
321 gfc_start_block (&se.pre);
323 gcc_assert (code->resolved_sym);
325 ss = gfc_ss_terminator;
326 if (code->resolved_sym->attr.elemental)
327 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
329 /* Is not an elemental subroutine call with array valued arguments. */
330 if (ss == gfc_ss_terminator)
333 /* Translate the call. */
334 has_alternate_specifier
335 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
336 NULL_TREE);
338 /* A subroutine without side-effect, by definition, does nothing! */
339 TREE_SIDE_EFFECTS (se.expr) = 1;
341 /* Chain the pieces together and return the block. */
342 if (has_alternate_specifier)
344 gfc_code *select_code;
345 gfc_symbol *sym;
346 select_code = code->next;
347 gcc_assert(select_code->op == EXEC_SELECT);
348 sym = select_code->expr->symtree->n.sym;
349 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
350 if (sym->backend_decl == NULL)
351 sym->backend_decl = gfc_get_symbol_decl (sym);
352 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
354 else
355 gfc_add_expr_to_block (&se.pre, se.expr);
357 gfc_add_block_to_block (&se.pre, &se.post);
360 else
362 /* An elemental subroutine call with array valued arguments has
363 to be scalarized. */
364 gfc_loopinfo loop;
365 stmtblock_t body;
366 stmtblock_t block;
367 gfc_se loopse;
369 /* gfc_walk_elemental_function_args renders the ss chain in the
370 reverse order to the actual argument order. */
371 ss = gfc_reverse_ss (ss);
373 /* Initialize the loop. */
374 gfc_init_se (&loopse, NULL);
375 gfc_init_loopinfo (&loop);
376 gfc_add_ss_to_loop (&loop, ss);
378 gfc_conv_ss_startstride (&loop);
379 gfc_conv_loop_setup (&loop, &code->expr->where);
380 gfc_mark_ss_chain_used (ss, 1);
382 /* Convert the arguments, checking for dependencies. */
383 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 loopse.ss = ss;
386 /* For operator assignment, do dependency checking. */
387 if (dependency_check)
389 gfc_symbol *sym;
390 sym = code->resolved_sym;
391 gfc_conv_elemental_dependencies (&se, &loopse, sym,
392 code->ext.actual);
395 /* Generate the loop body. */
396 gfc_start_scalarized_body (&loop, &body);
397 gfc_init_block (&block);
399 /* Add the subroutine call to the block. */
400 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
401 NULL_TREE);
402 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
404 gfc_add_block_to_block (&block, &loopse.pre);
405 gfc_add_block_to_block (&block, &loopse.post);
407 /* Finish up the loop block and the loop. */
408 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
409 gfc_trans_scalarizing_loops (&loop, &body);
410 gfc_add_block_to_block (&se.pre, &loop.pre);
411 gfc_add_block_to_block (&se.pre, &loop.post);
412 gfc_add_block_to_block (&se.pre, &se.post);
413 gfc_cleanup_loop (&loop);
416 return gfc_finish_block (&se.pre);
420 /* Translate the RETURN statement. */
422 tree
423 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
425 if (code->expr)
427 gfc_se se;
428 tree tmp;
429 tree result;
431 /* If code->expr is not NULL, this return statement must appear
432 in a subroutine and current_fake_result_decl has already
433 been generated. */
435 result = gfc_get_fake_result_decl (NULL, 0);
436 if (!result)
438 gfc_warning ("An alternate return at %L without a * dummy argument",
439 &code->expr->where);
440 return build1_v (GOTO_EXPR, gfc_get_return_label ());
443 /* Start a new block for this statement. */
444 gfc_init_se (&se, NULL);
445 gfc_start_block (&se.pre);
447 gfc_conv_expr (&se, code->expr);
449 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
450 fold_convert (TREE_TYPE (result), se.expr));
451 gfc_add_expr_to_block (&se.pre, tmp);
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
458 else
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
466 tree
467 gfc_trans_pause (gfc_code * code)
469 tree gfc_int4_type_node = gfc_get_int_type (4);
470 gfc_se se;
471 tree tmp;
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
478 if (code->expr == NULL)
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
483 else
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
490 gfc_add_expr_to_block (&se.pre, tmp);
492 gfc_add_block_to_block (&se.pre, &se.post);
494 return gfc_finish_block (&se.pre);
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
501 tree
502 gfc_trans_stop (gfc_code * code)
504 tree gfc_int4_type_node = gfc_get_int_type (4);
505 gfc_se se;
506 tree tmp;
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
513 if (code->expr == NULL)
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
518 else
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
525 gfc_add_expr_to_block (&se.pre, tmp);
527 gfc_add_block_to_block (&se.pre, &se.post);
529 return gfc_finish_block (&se.pre);
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
537 We translate:
539 IF (cond) THEN
540 then_clause
541 ELSEIF (cond2)
542 elseif_clause
543 ELSE
544 else_clause
545 ENDIF
547 into:
549 pre_cond_s;
550 if (cond_s)
552 then_clause;
554 else
556 pre_cond_s
557 if (cond_s)
559 elseif_clause
561 else
563 else_clause;
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
569 conditional.
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
573 static tree
574 gfc_trans_if_1 (gfc_code * code)
576 gfc_se if_se;
577 tree stmt, elsestmt;
579 /* Check for an unconditional ELSE clause. */
580 if (!code->expr)
581 return gfc_trans_code (code->next);
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
593 /* Translate the ELSE clause. */
594 if (code->block)
595 elsestmt = gfc_trans_if_1 (code->block);
596 else
597 elsestmt = build_empty_stmt ();
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
602 gfc_add_expr_to_block (&if_se.pre, stmt);
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
608 tree
609 gfc_trans_if (gfc_code * code)
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
614 return gfc_trans_if_1 (code->block);
618 /* Translate an arithmetic IF expression.
620 IF (cond) label1, label2, label3 translates to
622 if (cond <= 0)
624 if (cond < 0)
625 goto label1;
626 else // cond == 0
627 goto label2;
629 else // cond > 0
630 goto label3;
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
635 if (cond <= 0)
636 goto label1;
637 else
638 goto label3;
641 tree
642 gfc_trans_arithmetic_if (gfc_code * code)
644 gfc_se se;
645 tree tmp;
646 tree branch1;
647 tree branch2;
648 tree zero;
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
661 if (code->label->value != code->label2->value)
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
670 else
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
675 else
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
696 results.
697 We translate a do loop from:
699 DO dovar = from, to, step
700 body
701 END DO
705 [Evaluate loop bounds and step]
706 dovar = from;
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
709 for (;;)
711 body;
712 cycle_label:
713 cond = (dovar == to);
714 dovar += step;
715 if (cond) goto end_label;
718 end_label:
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
723 static tree
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
727 stmtblock_t body;
728 tree type;
729 tree cond;
730 tree tmp;
731 tree cycle_label;
732 tree exit_label;
734 type = TREE_TYPE (dovar);
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify (pblock, dovar, from);
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
746 /* Loop body. */
747 gfc_start_block (&body);
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify (&body, dovar, tmp);
768 /* The loop exit. */
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
782 else
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
792 return gfc_finish_block (pblock);
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
797 so for Fortran.
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
803 We translate a do loop from:
805 DO dovar = from, to, step
806 body
807 END DO
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
814 dovar = from;
815 if (empty) goto exit_label;
816 for (;;)
818 body;
819 cycle_label:
820 dovar += step
821 if (countm1 ==0) goto exit_label;
822 countm1--;
824 exit_label:
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
829 tree
830 gfc_trans_do (gfc_code * code)
832 gfc_se se;
833 tree dovar;
834 tree from;
835 tree to;
836 tree step;
837 tree countm1;
838 tree type;
839 tree utype;
840 tree cond;
841 tree cycle_label;
842 tree exit_label;
843 tree tmp;
844 tree pos_step;
845 stmtblock_t block;
846 stmtblock_t body;
848 gfc_start_block (&block);
850 /* Evaluate all the expressions in the iterator. */
851 gfc_init_se (&se, NULL);
852 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
853 gfc_add_block_to_block (&block, &se.pre);
854 dovar = se.expr;
855 type = TREE_TYPE (dovar);
857 gfc_init_se (&se, NULL);
858 gfc_conv_expr_val (&se, code->ext.iterator->start);
859 gfc_add_block_to_block (&block, &se.pre);
860 from = gfc_evaluate_now (se.expr, &block);
862 gfc_init_se (&se, NULL);
863 gfc_conv_expr_val (&se, code->ext.iterator->end);
864 gfc_add_block_to_block (&block, &se.pre);
865 to = gfc_evaluate_now (se.expr, &block);
867 gfc_init_se (&se, NULL);
868 gfc_conv_expr_val (&se, code->ext.iterator->step);
869 gfc_add_block_to_block (&block, &se.pre);
870 step = gfc_evaluate_now (se.expr, &block);
872 /* Special case simple loops. */
873 if (TREE_CODE (type) == INTEGER_TYPE
874 && (integer_onep (step)
875 || tree_int_cst_equal (step, integer_minus_one_node)))
876 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
878 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
879 fold_convert (type, integer_zero_node));
881 if (TREE_CODE (type) == INTEGER_TYPE)
882 utype = unsigned_type_for (type);
883 else
884 utype = unsigned_type_for (gfc_array_index_type);
885 countm1 = gfc_create_var (utype, "countm1");
887 /* Cycle and exit statements are implemented with gotos. */
888 cycle_label = gfc_build_label_decl (NULL_TREE);
889 exit_label = gfc_build_label_decl (NULL_TREE);
890 TREE_USED (exit_label) = 1;
892 /* Initialize the DO variable: dovar = from. */
893 gfc_add_modify (&block, dovar, from);
895 /* Initialize loop count and jump to exit label if the loop is empty.
896 This code is executed before we enter the loop body. We generate:
897 if (step > 0)
899 if (to < from) goto exit_label;
900 countm1 = (to - from) / step;
902 else
904 if (to > from) goto exit_label;
905 countm1 = (from - to) / -step;
906 } */
907 if (TREE_CODE (type) == INTEGER_TYPE)
909 tree pos, neg;
911 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
912 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
913 build1_v (GOTO_EXPR, exit_label),
914 build_empty_stmt ());
915 tmp = fold_build2 (MINUS_EXPR, type, to, from);
916 tmp = fold_convert (utype, tmp);
917 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
918 fold_convert (utype, step));
919 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
920 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
922 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
923 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
924 build1_v (GOTO_EXPR, exit_label),
925 build_empty_stmt ());
926 tmp = fold_build2 (MINUS_EXPR, type, from, to);
927 tmp = fold_convert (utype, tmp);
928 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
929 fold_convert (utype, fold_build1 (NEGATE_EXPR,
930 type, step)));
931 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
932 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
934 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
935 gfc_add_expr_to_block (&block, tmp);
937 else
939 /* TODO: We could use the same width as the real type.
940 This would probably cause more problems that it solves
941 when we implement "long double" types. */
943 tmp = fold_build2 (MINUS_EXPR, type, to, from);
944 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
945 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
946 gfc_add_modify (&block, countm1, tmp);
948 /* We need a special check for empty loops:
949 empty = (step > 0 ? to < from : to > from); */
950 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
951 fold_build2 (LT_EXPR, boolean_type_node, to, from),
952 fold_build2 (GT_EXPR, boolean_type_node, to, from));
953 /* If the loop is empty, go directly to the exit label. */
954 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
955 build1_v (GOTO_EXPR, exit_label),
956 build_empty_stmt ());
957 gfc_add_expr_to_block (&block, tmp);
960 /* Loop body. */
961 gfc_start_block (&body);
963 /* Put these labels where they can be found later. We put the
964 labels in a TREE_LIST node (because TREE_CHAIN is already
965 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
966 label in TREE_VALUE (backend_decl). */
968 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
970 /* Main loop body. */
971 tmp = gfc_trans_code (code->block->next);
972 gfc_add_expr_to_block (&body, tmp);
974 /* Label for cycle statements (if needed). */
975 if (TREE_USED (cycle_label))
977 tmp = build1_v (LABEL_EXPR, cycle_label);
978 gfc_add_expr_to_block (&body, tmp);
981 /* Increment the loop variable. */
982 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
983 gfc_add_modify (&body, dovar, tmp);
985 /* End with the loop condition. Loop until countm1 == 0. */
986 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
987 build_int_cst (utype, 0));
988 tmp = build1_v (GOTO_EXPR, exit_label);
989 tmp = fold_build3 (COND_EXPR, void_type_node,
990 cond, tmp, build_empty_stmt ());
991 gfc_add_expr_to_block (&body, tmp);
993 /* Decrement the loop count. */
994 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
995 gfc_add_modify (&body, countm1, tmp);
997 /* End of loop body. */
998 tmp = gfc_finish_block (&body);
1000 /* The for loop itself. */
1001 tmp = build1_v (LOOP_EXPR, tmp);
1002 gfc_add_expr_to_block (&block, tmp);
1004 /* Add the exit label. */
1005 tmp = build1_v (LABEL_EXPR, exit_label);
1006 gfc_add_expr_to_block (&block, tmp);
1008 return gfc_finish_block (&block);
1012 /* Translate the DO WHILE construct.
1014 We translate
1016 DO WHILE (cond)
1017 body
1018 END DO
1022 for ( ; ; )
1024 pre_cond;
1025 if (! cond) goto exit_label;
1026 body;
1027 cycle_label:
1029 exit_label:
1031 Because the evaluation of the exit condition `cond' may have side
1032 effects, we can't do much for empty loop bodies. The backend optimizers
1033 should be smart enough to eliminate any dead loops. */
1035 tree
1036 gfc_trans_do_while (gfc_code * code)
1038 gfc_se cond;
1039 tree tmp;
1040 tree cycle_label;
1041 tree exit_label;
1042 stmtblock_t block;
1044 /* Everything we build here is part of the loop body. */
1045 gfc_start_block (&block);
1047 /* Cycle and exit statements are implemented with gotos. */
1048 cycle_label = gfc_build_label_decl (NULL_TREE);
1049 exit_label = gfc_build_label_decl (NULL_TREE);
1051 /* Put the labels where they can be found later. See gfc_trans_do(). */
1052 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1054 /* Create a GIMPLE version of the exit condition. */
1055 gfc_init_se (&cond, NULL);
1056 gfc_conv_expr_val (&cond, code->expr);
1057 gfc_add_block_to_block (&block, &cond.pre);
1058 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1060 /* Build "IF (! cond) GOTO exit_label". */
1061 tmp = build1_v (GOTO_EXPR, exit_label);
1062 TREE_USED (exit_label) = 1;
1063 tmp = fold_build3 (COND_EXPR, void_type_node,
1064 cond.expr, tmp, build_empty_stmt ());
1065 gfc_add_expr_to_block (&block, tmp);
1067 /* The main body of the loop. */
1068 tmp = gfc_trans_code (code->block->next);
1069 gfc_add_expr_to_block (&block, tmp);
1071 /* Label for cycle statements (if needed). */
1072 if (TREE_USED (cycle_label))
1074 tmp = build1_v (LABEL_EXPR, cycle_label);
1075 gfc_add_expr_to_block (&block, tmp);
1078 /* End of loop body. */
1079 tmp = gfc_finish_block (&block);
1081 gfc_init_block (&block);
1082 /* Build the loop. */
1083 tmp = build1_v (LOOP_EXPR, tmp);
1084 gfc_add_expr_to_block (&block, tmp);
1086 /* Add the exit label. */
1087 tmp = build1_v (LABEL_EXPR, exit_label);
1088 gfc_add_expr_to_block (&block, tmp);
1090 return gfc_finish_block (&block);
1094 /* Translate the SELECT CASE construct for INTEGER case expressions,
1095 without killing all potential optimizations. The problem is that
1096 Fortran allows unbounded cases, but the back-end does not, so we
1097 need to intercept those before we enter the equivalent SWITCH_EXPR
1098 we can build.
1100 For example, we translate this,
1102 SELECT CASE (expr)
1103 CASE (:100,101,105:115)
1104 block_1
1105 CASE (190:199,200:)
1106 block_2
1107 CASE (300)
1108 block_3
1109 CASE DEFAULT
1110 block_4
1111 END SELECT
1113 to the GENERIC equivalent,
1115 switch (expr)
1117 case (minimum value for typeof(expr) ... 100:
1118 case 101:
1119 case 105 ... 114:
1120 block1:
1121 goto end_label;
1123 case 200 ... (maximum value for typeof(expr):
1124 case 190 ... 199:
1125 block2;
1126 goto end_label;
1128 case 300:
1129 block_3;
1130 goto end_label;
1132 default:
1133 block_4;
1134 goto end_label;
1137 end_label: */
1139 static tree
1140 gfc_trans_integer_select (gfc_code * code)
1142 gfc_code *c;
1143 gfc_case *cp;
1144 tree end_label;
1145 tree tmp;
1146 gfc_se se;
1147 stmtblock_t block;
1148 stmtblock_t body;
1150 gfc_start_block (&block);
1152 /* Calculate the switch expression. */
1153 gfc_init_se (&se, NULL);
1154 gfc_conv_expr_val (&se, code->expr);
1155 gfc_add_block_to_block (&block, &se.pre);
1157 end_label = gfc_build_label_decl (NULL_TREE);
1159 gfc_init_block (&body);
1161 for (c = code->block; c; c = c->block)
1163 for (cp = c->ext.case_list; cp; cp = cp->next)
1165 tree low, high;
1166 tree label;
1168 /* Assume it's the default case. */
1169 low = high = NULL_TREE;
1171 if (cp->low)
1173 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1174 cp->low->ts.kind);
1176 /* If there's only a lower bound, set the high bound to the
1177 maximum value of the case expression. */
1178 if (!cp->high)
1179 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1182 if (cp->high)
1184 /* Three cases are possible here:
1186 1) There is no lower bound, e.g. CASE (:N).
1187 2) There is a lower bound .NE. high bound, that is
1188 a case range, e.g. CASE (N:M) where M>N (we make
1189 sure that M>N during type resolution).
1190 3) There is a lower bound, and it has the same value
1191 as the high bound, e.g. CASE (N:N). This is our
1192 internal representation of CASE(N).
1194 In the first and second case, we need to set a value for
1195 high. In the third case, we don't because the GCC middle
1196 end represents a single case value by just letting high be
1197 a NULL_TREE. We can't do that because we need to be able
1198 to represent unbounded cases. */
1200 if (!cp->low
1201 || (cp->low
1202 && mpz_cmp (cp->low->value.integer,
1203 cp->high->value.integer) != 0))
1204 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1205 cp->high->ts.kind);
1207 /* Unbounded case. */
1208 if (!cp->low)
1209 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1212 /* Build a label. */
1213 label = gfc_build_label_decl (NULL_TREE);
1215 /* Add this case label.
1216 Add parameter 'label', make it match GCC backend. */
1217 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1218 low, high, label);
1219 gfc_add_expr_to_block (&body, tmp);
1222 /* Add the statements for this case. */
1223 tmp = gfc_trans_code (c->next);
1224 gfc_add_expr_to_block (&body, tmp);
1226 /* Break to the end of the construct. */
1227 tmp = build1_v (GOTO_EXPR, end_label);
1228 gfc_add_expr_to_block (&body, tmp);
1231 tmp = gfc_finish_block (&body);
1232 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1233 gfc_add_expr_to_block (&block, tmp);
1235 tmp = build1_v (LABEL_EXPR, end_label);
1236 gfc_add_expr_to_block (&block, tmp);
1238 return gfc_finish_block (&block);
1242 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1244 There are only two cases possible here, even though the standard
1245 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1246 .FALSE., and DEFAULT.
1248 We never generate more than two blocks here. Instead, we always
1249 try to eliminate the DEFAULT case. This way, we can translate this
1250 kind of SELECT construct to a simple
1252 if {} else {};
1254 expression in GENERIC. */
1256 static tree
1257 gfc_trans_logical_select (gfc_code * code)
1259 gfc_code *c;
1260 gfc_code *t, *f, *d;
1261 gfc_case *cp;
1262 gfc_se se;
1263 stmtblock_t block;
1265 /* Assume we don't have any cases at all. */
1266 t = f = d = NULL;
1268 /* Now see which ones we actually do have. We can have at most two
1269 cases in a single case list: one for .TRUE. and one for .FALSE.
1270 The default case is always separate. If the cases for .TRUE. and
1271 .FALSE. are in the same case list, the block for that case list
1272 always executed, and we don't generate code a COND_EXPR. */
1273 for (c = code->block; c; c = c->block)
1275 for (cp = c->ext.case_list; cp; cp = cp->next)
1277 if (cp->low)
1279 if (cp->low->value.logical == 0) /* .FALSE. */
1280 f = c;
1281 else /* if (cp->value.logical != 0), thus .TRUE. */
1282 t = c;
1284 else
1285 d = c;
1289 /* Start a new block. */
1290 gfc_start_block (&block);
1292 /* Calculate the switch expression. We always need to do this
1293 because it may have side effects. */
1294 gfc_init_se (&se, NULL);
1295 gfc_conv_expr_val (&se, code->expr);
1296 gfc_add_block_to_block (&block, &se.pre);
1298 if (t == f && t != NULL)
1300 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1301 translate the code for these cases, append it to the current
1302 block. */
1303 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1305 else
1307 tree true_tree, false_tree, stmt;
1309 true_tree = build_empty_stmt ();
1310 false_tree = build_empty_stmt ();
1312 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1313 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1314 make the missing case the default case. */
1315 if (t != NULL && f != NULL)
1316 d = NULL;
1317 else if (d != NULL)
1319 if (t == NULL)
1320 t = d;
1321 else
1322 f = d;
1325 /* Translate the code for each of these blocks, and append it to
1326 the current block. */
1327 if (t != NULL)
1328 true_tree = gfc_trans_code (t->next);
1330 if (f != NULL)
1331 false_tree = gfc_trans_code (f->next);
1333 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1334 true_tree, false_tree);
1335 gfc_add_expr_to_block (&block, stmt);
1338 return gfc_finish_block (&block);
1342 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1343 Instead of generating compares and jumps, it is far simpler to
1344 generate a data structure describing the cases in order and call a
1345 library subroutine that locates the right case.
1346 This is particularly true because this is the only case where we
1347 might have to dispose of a temporary.
1348 The library subroutine returns a pointer to jump to or NULL if no
1349 branches are to be taken. */
1351 static tree
1352 gfc_trans_character_select (gfc_code *code)
1354 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1355 stmtblock_t block, body;
1356 gfc_case *cp, *d;
1357 gfc_code *c;
1358 gfc_se se;
1359 int n, k;
1361 /* The jump table types are stored in static variables to avoid
1362 constructing them from scratch every single time. */
1363 static tree select_struct[2];
1364 static tree ss_string1[2], ss_string1_len[2];
1365 static tree ss_string2[2], ss_string2_len[2];
1366 static tree ss_target[2];
1368 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1370 if (code->expr->ts.kind == 1)
1371 k = 0;
1372 else if (code->expr->ts.kind == 4)
1373 k = 1;
1374 else
1375 gcc_unreachable ();
1377 if (select_struct[k] == NULL)
1379 select_struct[k] = make_node (RECORD_TYPE);
1381 if (code->expr->ts.kind == 1)
1382 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1383 else if (code->expr->ts.kind == 4)
1384 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1385 else
1386 gcc_unreachable ();
1388 #undef ADD_FIELD
1389 #define ADD_FIELD(NAME, TYPE) \
1390 ss_##NAME[k] = gfc_add_field_to_struct \
1391 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1392 get_identifier (stringize(NAME)), TYPE)
1394 ADD_FIELD (string1, pchartype);
1395 ADD_FIELD (string1_len, gfc_charlen_type_node);
1397 ADD_FIELD (string2, pchartype);
1398 ADD_FIELD (string2_len, gfc_charlen_type_node);
1400 ADD_FIELD (target, integer_type_node);
1401 #undef ADD_FIELD
1403 gfc_finish_type (select_struct[k]);
1406 cp = code->block->ext.case_list;
1407 while (cp->left != NULL)
1408 cp = cp->left;
1410 n = 0;
1411 for (d = cp; d; d = d->right)
1412 d->n = n++;
1414 end_label = gfc_build_label_decl (NULL_TREE);
1416 /* Generate the body */
1417 gfc_start_block (&block);
1418 gfc_init_block (&body);
1420 for (c = code->block; c; c = c->block)
1422 for (d = c->ext.case_list; d; d = d->next)
1424 label = gfc_build_label_decl (NULL_TREE);
1425 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1426 build_int_cst (NULL_TREE, d->n),
1427 build_int_cst (NULL_TREE, d->n), label);
1428 gfc_add_expr_to_block (&body, tmp);
1431 tmp = gfc_trans_code (c->next);
1432 gfc_add_expr_to_block (&body, tmp);
1434 tmp = build1_v (GOTO_EXPR, end_label);
1435 gfc_add_expr_to_block (&body, tmp);
1438 /* Generate the structure describing the branches */
1439 init = NULL_TREE;
1441 for(d = cp; d; d = d->right)
1443 node = NULL_TREE;
1445 gfc_init_se (&se, NULL);
1447 if (d->low == NULL)
1449 node = tree_cons (ss_string1[k], null_pointer_node, node);
1450 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1452 else
1454 gfc_conv_expr_reference (&se, d->low);
1456 node = tree_cons (ss_string1[k], se.expr, node);
1457 node = tree_cons (ss_string1_len[k], se.string_length, node);
1460 if (d->high == NULL)
1462 node = tree_cons (ss_string2[k], null_pointer_node, node);
1463 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1465 else
1467 gfc_init_se (&se, NULL);
1468 gfc_conv_expr_reference (&se, d->high);
1470 node = tree_cons (ss_string2[k], se.expr, node);
1471 node = tree_cons (ss_string2_len[k], se.string_length, node);
1474 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1475 node);
1477 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1478 init = tree_cons (NULL_TREE, tmp, init);
1481 type = build_array_type (select_struct[k],
1482 build_index_type (build_int_cst (NULL_TREE, n-1)));
1484 init = build_constructor_from_list (type, nreverse(init));
1485 TREE_CONSTANT (init) = 1;
1486 TREE_STATIC (init) = 1;
1487 /* Create a static variable to hold the jump table. */
1488 tmp = gfc_create_var (type, "jumptable");
1489 TREE_CONSTANT (tmp) = 1;
1490 TREE_STATIC (tmp) = 1;
1491 TREE_READONLY (tmp) = 1;
1492 DECL_INITIAL (tmp) = init;
1493 init = tmp;
1495 /* Build the library call */
1496 init = gfc_build_addr_expr (pvoid_type_node, init);
1498 gfc_init_se (&se, NULL);
1499 gfc_conv_expr_reference (&se, code->expr);
1501 gfc_add_block_to_block (&block, &se.pre);
1503 if (code->expr->ts.kind == 1)
1504 fndecl = gfor_fndecl_select_string;
1505 else if (code->expr->ts.kind == 4)
1506 fndecl = gfor_fndecl_select_string_char4;
1507 else
1508 gcc_unreachable ();
1510 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1511 se.expr, se.string_length);
1512 case_num = gfc_create_var (integer_type_node, "case_num");
1513 gfc_add_modify (&block, case_num, tmp);
1515 gfc_add_block_to_block (&block, &se.post);
1517 tmp = gfc_finish_block (&body);
1518 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1519 gfc_add_expr_to_block (&block, tmp);
1521 tmp = build1_v (LABEL_EXPR, end_label);
1522 gfc_add_expr_to_block (&block, tmp);
1524 return gfc_finish_block (&block);
1528 /* Translate the three variants of the SELECT CASE construct.
1530 SELECT CASEs with INTEGER case expressions can be translated to an
1531 equivalent GENERIC switch statement, and for LOGICAL case
1532 expressions we build one or two if-else compares.
1534 SELECT CASEs with CHARACTER case expressions are a whole different
1535 story, because they don't exist in GENERIC. So we sort them and
1536 do a binary search at runtime.
1538 Fortran has no BREAK statement, and it does not allow jumps from
1539 one case block to another. That makes things a lot easier for
1540 the optimizers. */
1542 tree
1543 gfc_trans_select (gfc_code * code)
1545 gcc_assert (code && code->expr);
1547 /* Empty SELECT constructs are legal. */
1548 if (code->block == NULL)
1549 return build_empty_stmt ();
1551 /* Select the correct translation function. */
1552 switch (code->expr->ts.type)
1554 case BT_LOGICAL: return gfc_trans_logical_select (code);
1555 case BT_INTEGER: return gfc_trans_integer_select (code);
1556 case BT_CHARACTER: return gfc_trans_character_select (code);
1557 default:
1558 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1559 /* Not reached */
1564 /* Traversal function to substitute a replacement symtree if the symbol
1565 in the expression is the same as that passed. f == 2 signals that
1566 that variable itself is not to be checked - only the references.
1567 This group of functions is used when the variable expression in a
1568 FORALL assignment has internal references. For example:
1569 FORALL (i = 1:4) p(p(i)) = i
1570 The only recourse here is to store a copy of 'p' for the index
1571 expression. */
1573 static gfc_symtree *new_symtree;
1574 static gfc_symtree *old_symtree;
1576 static bool
1577 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1579 if (expr->expr_type != EXPR_VARIABLE)
1580 return false;
1582 if (*f == 2)
1583 *f = 1;
1584 else if (expr->symtree->n.sym == sym)
1585 expr->symtree = new_symtree;
1587 return false;
1590 static void
1591 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1593 gfc_traverse_expr (e, sym, forall_replace, f);
1596 static bool
1597 forall_restore (gfc_expr *expr,
1598 gfc_symbol *sym ATTRIBUTE_UNUSED,
1599 int *f ATTRIBUTE_UNUSED)
1601 if (expr->expr_type != EXPR_VARIABLE)
1602 return false;
1604 if (expr->symtree == new_symtree)
1605 expr->symtree = old_symtree;
1607 return false;
1610 static void
1611 forall_restore_symtree (gfc_expr *e)
1613 gfc_traverse_expr (e, NULL, forall_restore, 0);
1616 static void
1617 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1619 gfc_se tse;
1620 gfc_se rse;
1621 gfc_expr *e;
1622 gfc_symbol *new_sym;
1623 gfc_symbol *old_sym;
1624 gfc_symtree *root;
1625 tree tmp;
1627 /* Build a copy of the lvalue. */
1628 old_symtree = c->expr->symtree;
1629 old_sym = old_symtree->n.sym;
1630 e = gfc_lval_expr_from_sym (old_sym);
1631 if (old_sym->attr.dimension)
1633 gfc_init_se (&tse, NULL);
1634 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1635 gfc_add_block_to_block (pre, &tse.pre);
1636 gfc_add_block_to_block (post, &tse.post);
1637 tse.expr = build_fold_indirect_ref (tse.expr);
1639 if (e->ts.type != BT_CHARACTER)
1641 /* Use the variable offset for the temporary. */
1642 tmp = gfc_conv_descriptor_offset (tse.expr);
1643 gfc_add_modify (pre, tmp,
1644 gfc_conv_array_offset (old_sym->backend_decl));
1647 else
1649 gfc_init_se (&tse, NULL);
1650 gfc_init_se (&rse, NULL);
1651 gfc_conv_expr (&rse, e);
1652 if (e->ts.type == BT_CHARACTER)
1654 tse.string_length = rse.string_length;
1655 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1656 tse.string_length);
1657 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1658 rse.string_length);
1659 gfc_add_block_to_block (pre, &tse.pre);
1660 gfc_add_block_to_block (post, &tse.post);
1662 else
1664 tmp = gfc_typenode_for_spec (&e->ts);
1665 tse.expr = gfc_create_var (tmp, "temp");
1668 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1669 e->expr_type == EXPR_VARIABLE);
1670 gfc_add_expr_to_block (pre, tmp);
1672 gfc_free_expr (e);
1674 /* Create a new symbol to represent the lvalue. */
1675 new_sym = gfc_new_symbol (old_sym->name, NULL);
1676 new_sym->ts = old_sym->ts;
1677 new_sym->attr.referenced = 1;
1678 new_sym->attr.dimension = old_sym->attr.dimension;
1679 new_sym->attr.flavor = old_sym->attr.flavor;
1681 /* Use the temporary as the backend_decl. */
1682 new_sym->backend_decl = tse.expr;
1684 /* Create a fake symtree for it. */
1685 root = NULL;
1686 new_symtree = gfc_new_symtree (&root, old_sym->name);
1687 new_symtree->n.sym = new_sym;
1688 gcc_assert (new_symtree == root);
1690 /* Go through the expression reference replacing the old_symtree
1691 with the new. */
1692 forall_replace_symtree (c->expr, old_sym, 2);
1694 /* Now we have made this temporary, we might as well use it for
1695 the right hand side. */
1696 forall_replace_symtree (c->expr2, old_sym, 1);
1700 /* Handles dependencies in forall assignments. */
1701 static int
1702 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1704 gfc_ref *lref;
1705 gfc_ref *rref;
1706 int need_temp;
1707 gfc_symbol *lsym;
1709 lsym = c->expr->symtree->n.sym;
1710 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1712 /* Now check for dependencies within the 'variable'
1713 expression itself. These are treated by making a complete
1714 copy of variable and changing all the references to it
1715 point to the copy instead. Note that the shallow copy of
1716 the variable will not suffice for derived types with
1717 pointer components. We therefore leave these to their
1718 own devices. */
1719 if (lsym->ts.type == BT_DERIVED
1720 && lsym->ts.derived->attr.pointer_comp)
1721 return need_temp;
1723 new_symtree = NULL;
1724 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1726 forall_make_variable_temp (c, pre, post);
1727 need_temp = 0;
1730 /* Substrings with dependencies are treated in the same
1731 way. */
1732 if (c->expr->ts.type == BT_CHARACTER
1733 && c->expr->ref
1734 && c->expr2->expr_type == EXPR_VARIABLE
1735 && lsym == c->expr2->symtree->n.sym)
1737 for (lref = c->expr->ref; lref; lref = lref->next)
1738 if (lref->type == REF_SUBSTRING)
1739 break;
1740 for (rref = c->expr2->ref; rref; rref = rref->next)
1741 if (rref->type == REF_SUBSTRING)
1742 break;
1744 if (rref && lref
1745 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1747 forall_make_variable_temp (c, pre, post);
1748 need_temp = 0;
1751 return need_temp;
1755 static void
1756 cleanup_forall_symtrees (gfc_code *c)
1758 forall_restore_symtree (c->expr);
1759 forall_restore_symtree (c->expr2);
1760 gfc_free (new_symtree->n.sym);
1761 gfc_free (new_symtree);
1765 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1766 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1767 indicates whether we should generate code to test the FORALLs mask
1768 array. OUTER is the loop header to be used for initializing mask
1769 indices.
1771 The generated loop format is:
1772 count = (end - start + step) / step
1773 loopvar = start
1774 while (1)
1776 if (count <=0 )
1777 goto end_of_loop
1778 <body>
1779 loopvar += step
1780 count --
1782 end_of_loop: */
1784 static tree
1785 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1786 int mask_flag, stmtblock_t *outer)
1788 int n, nvar;
1789 tree tmp;
1790 tree cond;
1791 stmtblock_t block;
1792 tree exit_label;
1793 tree count;
1794 tree var, start, end, step;
1795 iter_info *iter;
1797 /* Initialize the mask index outside the FORALL nest. */
1798 if (mask_flag && forall_tmp->mask)
1799 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1801 iter = forall_tmp->this_loop;
1802 nvar = forall_tmp->nvar;
1803 for (n = 0; n < nvar; n++)
1805 var = iter->var;
1806 start = iter->start;
1807 end = iter->end;
1808 step = iter->step;
1810 exit_label = gfc_build_label_decl (NULL_TREE);
1811 TREE_USED (exit_label) = 1;
1813 /* The loop counter. */
1814 count = gfc_create_var (TREE_TYPE (var), "count");
1816 /* The body of the loop. */
1817 gfc_init_block (&block);
1819 /* The exit condition. */
1820 cond = fold_build2 (LE_EXPR, boolean_type_node,
1821 count, build_int_cst (TREE_TYPE (count), 0));
1822 tmp = build1_v (GOTO_EXPR, exit_label);
1823 tmp = fold_build3 (COND_EXPR, void_type_node,
1824 cond, tmp, build_empty_stmt ());
1825 gfc_add_expr_to_block (&block, tmp);
1827 /* The main loop body. */
1828 gfc_add_expr_to_block (&block, body);
1830 /* Increment the loop variable. */
1831 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1832 gfc_add_modify (&block, var, tmp);
1834 /* Advance to the next mask element. Only do this for the
1835 innermost loop. */
1836 if (n == 0 && mask_flag && forall_tmp->mask)
1838 tree maskindex = forall_tmp->maskindex;
1839 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1840 maskindex, gfc_index_one_node);
1841 gfc_add_modify (&block, maskindex, tmp);
1844 /* Decrement the loop counter. */
1845 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1846 build_int_cst (TREE_TYPE (var), 1));
1847 gfc_add_modify (&block, count, tmp);
1849 body = gfc_finish_block (&block);
1851 /* Loop var initialization. */
1852 gfc_init_block (&block);
1853 gfc_add_modify (&block, var, start);
1856 /* Initialize the loop counter. */
1857 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1858 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1859 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1860 gfc_add_modify (&block, count, tmp);
1862 /* The loop expression. */
1863 tmp = build1_v (LOOP_EXPR, body);
1864 gfc_add_expr_to_block (&block, tmp);
1866 /* The exit label. */
1867 tmp = build1_v (LABEL_EXPR, exit_label);
1868 gfc_add_expr_to_block (&block, tmp);
1870 body = gfc_finish_block (&block);
1871 iter = iter->next;
1873 return body;
1877 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1878 is nonzero, the body is controlled by all masks in the forall nest.
1879 Otherwise, the innermost loop is not controlled by it's mask. This
1880 is used for initializing that mask. */
1882 static tree
1883 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1884 int mask_flag)
1886 tree tmp;
1887 stmtblock_t header;
1888 forall_info *forall_tmp;
1889 tree mask, maskindex;
1891 gfc_start_block (&header);
1893 forall_tmp = nested_forall_info;
1894 while (forall_tmp != NULL)
1896 /* Generate body with masks' control. */
1897 if (mask_flag)
1899 mask = forall_tmp->mask;
1900 maskindex = forall_tmp->maskindex;
1902 /* If a mask was specified make the assignment conditional. */
1903 if (mask)
1905 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1906 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1909 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1910 forall_tmp = forall_tmp->prev_nest;
1911 mask_flag = 1;
1914 gfc_add_expr_to_block (&header, body);
1915 return gfc_finish_block (&header);
1919 /* Allocate data for holding a temporary array. Returns either a local
1920 temporary array or a pointer variable. */
1922 static tree
1923 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1924 tree elem_type)
1926 tree tmpvar;
1927 tree type;
1928 tree tmp;
1930 if (INTEGER_CST_P (size))
1932 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1933 gfc_index_one_node);
1935 else
1936 tmp = NULL_TREE;
1938 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1939 type = build_array_type (elem_type, type);
1940 if (gfc_can_put_var_on_stack (bytesize))
1942 gcc_assert (INTEGER_CST_P (size));
1943 tmpvar = gfc_create_var (type, "temp");
1944 *pdata = NULL_TREE;
1946 else
1948 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1949 *pdata = convert (pvoid_type_node, tmpvar);
1951 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1952 gfc_add_modify (pblock, tmpvar, tmp);
1954 return tmpvar;
1958 /* Generate codes to copy the temporary to the actual lhs. */
1960 static tree
1961 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1962 tree count1, tree wheremask, bool invert)
1964 gfc_ss *lss;
1965 gfc_se lse, rse;
1966 stmtblock_t block, body;
1967 gfc_loopinfo loop1;
1968 tree tmp;
1969 tree wheremaskexpr;
1971 /* Walk the lhs. */
1972 lss = gfc_walk_expr (expr);
1974 if (lss == gfc_ss_terminator)
1976 gfc_start_block (&block);
1978 gfc_init_se (&lse, NULL);
1980 /* Translate the expression. */
1981 gfc_conv_expr (&lse, expr);
1983 /* Form the expression for the temporary. */
1984 tmp = gfc_build_array_ref (tmp1, count1, NULL);
1986 /* Use the scalar assignment as is. */
1987 gfc_add_block_to_block (&block, &lse.pre);
1988 gfc_add_modify (&block, lse.expr, tmp);
1989 gfc_add_block_to_block (&block, &lse.post);
1991 /* Increment the count1. */
1992 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1993 gfc_index_one_node);
1994 gfc_add_modify (&block, count1, tmp);
1996 tmp = gfc_finish_block (&block);
1998 else
2000 gfc_start_block (&block);
2002 gfc_init_loopinfo (&loop1);
2003 gfc_init_se (&rse, NULL);
2004 gfc_init_se (&lse, NULL);
2006 /* Associate the lss with the loop. */
2007 gfc_add_ss_to_loop (&loop1, lss);
2009 /* Calculate the bounds of the scalarization. */
2010 gfc_conv_ss_startstride (&loop1);
2011 /* Setup the scalarizing loops. */
2012 gfc_conv_loop_setup (&loop1, &expr->where);
2014 gfc_mark_ss_chain_used (lss, 1);
2016 /* Start the scalarized loop body. */
2017 gfc_start_scalarized_body (&loop1, &body);
2019 /* Setup the gfc_se structures. */
2020 gfc_copy_loopinfo_to_se (&lse, &loop1);
2021 lse.ss = lss;
2023 /* Form the expression of the temporary. */
2024 if (lss != gfc_ss_terminator)
2025 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2026 /* Translate expr. */
2027 gfc_conv_expr (&lse, expr);
2029 /* Use the scalar assignment. */
2030 rse.string_length = lse.string_length;
2031 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2033 /* Form the mask expression according to the mask tree list. */
2034 if (wheremask)
2036 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2037 if (invert)
2038 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2039 TREE_TYPE (wheremaskexpr),
2040 wheremaskexpr);
2041 tmp = fold_build3 (COND_EXPR, void_type_node,
2042 wheremaskexpr, tmp, build_empty_stmt ());
2045 gfc_add_expr_to_block (&body, tmp);
2047 /* Increment count1. */
2048 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2049 count1, gfc_index_one_node);
2050 gfc_add_modify (&body, count1, tmp);
2052 /* Increment count3. */
2053 if (count3)
2055 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2056 count3, gfc_index_one_node);
2057 gfc_add_modify (&body, count3, tmp);
2060 /* Generate the copying loops. */
2061 gfc_trans_scalarizing_loops (&loop1, &body);
2062 gfc_add_block_to_block (&block, &loop1.pre);
2063 gfc_add_block_to_block (&block, &loop1.post);
2064 gfc_cleanup_loop (&loop1);
2066 tmp = gfc_finish_block (&block);
2068 return tmp;
2072 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2073 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2074 and should not be freed. WHEREMASK is the conditional execution mask
2075 whose sense may be inverted by INVERT. */
2077 static tree
2078 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2079 tree count1, gfc_ss *lss, gfc_ss *rss,
2080 tree wheremask, bool invert)
2082 stmtblock_t block, body1;
2083 gfc_loopinfo loop;
2084 gfc_se lse;
2085 gfc_se rse;
2086 tree tmp;
2087 tree wheremaskexpr;
2089 gfc_start_block (&block);
2091 gfc_init_se (&rse, NULL);
2092 gfc_init_se (&lse, NULL);
2094 if (lss == gfc_ss_terminator)
2096 gfc_init_block (&body1);
2097 gfc_conv_expr (&rse, expr2);
2098 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2100 else
2102 /* Initialize the loop. */
2103 gfc_init_loopinfo (&loop);
2105 /* We may need LSS to determine the shape of the expression. */
2106 gfc_add_ss_to_loop (&loop, lss);
2107 gfc_add_ss_to_loop (&loop, rss);
2109 gfc_conv_ss_startstride (&loop);
2110 gfc_conv_loop_setup (&loop, &expr2->where);
2112 gfc_mark_ss_chain_used (rss, 1);
2113 /* Start the loop body. */
2114 gfc_start_scalarized_body (&loop, &body1);
2116 /* Translate the expression. */
2117 gfc_copy_loopinfo_to_se (&rse, &loop);
2118 rse.ss = rss;
2119 gfc_conv_expr (&rse, expr2);
2121 /* Form the expression of the temporary. */
2122 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2125 /* Use the scalar assignment. */
2126 lse.string_length = rse.string_length;
2127 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2128 expr2->expr_type == EXPR_VARIABLE);
2130 /* Form the mask expression according to the mask tree list. */
2131 if (wheremask)
2133 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2134 if (invert)
2135 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2136 TREE_TYPE (wheremaskexpr),
2137 wheremaskexpr);
2138 tmp = fold_build3 (COND_EXPR, void_type_node,
2139 wheremaskexpr, tmp, build_empty_stmt ());
2142 gfc_add_expr_to_block (&body1, tmp);
2144 if (lss == gfc_ss_terminator)
2146 gfc_add_block_to_block (&block, &body1);
2148 /* Increment count1. */
2149 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2150 gfc_index_one_node);
2151 gfc_add_modify (&block, count1, tmp);
2153 else
2155 /* Increment count1. */
2156 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2157 count1, gfc_index_one_node);
2158 gfc_add_modify (&body1, count1, tmp);
2160 /* Increment count3. */
2161 if (count3)
2163 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2164 count3, gfc_index_one_node);
2165 gfc_add_modify (&body1, count3, tmp);
2168 /* Generate the copying loops. */
2169 gfc_trans_scalarizing_loops (&loop, &body1);
2171 gfc_add_block_to_block (&block, &loop.pre);
2172 gfc_add_block_to_block (&block, &loop.post);
2174 gfc_cleanup_loop (&loop);
2175 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2176 as tree nodes in SS may not be valid in different scope. */
2179 tmp = gfc_finish_block (&block);
2180 return tmp;
2184 /* Calculate the size of temporary needed in the assignment inside forall.
2185 LSS and RSS are filled in this function. */
2187 static tree
2188 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2189 stmtblock_t * pblock,
2190 gfc_ss **lss, gfc_ss **rss)
2192 gfc_loopinfo loop;
2193 tree size;
2194 int i;
2195 int save_flag;
2196 tree tmp;
2198 *lss = gfc_walk_expr (expr1);
2199 *rss = NULL;
2201 size = gfc_index_one_node;
2202 if (*lss != gfc_ss_terminator)
2204 gfc_init_loopinfo (&loop);
2206 /* Walk the RHS of the expression. */
2207 *rss = gfc_walk_expr (expr2);
2208 if (*rss == gfc_ss_terminator)
2210 /* The rhs is scalar. Add a ss for the expression. */
2211 *rss = gfc_get_ss ();
2212 (*rss)->next = gfc_ss_terminator;
2213 (*rss)->type = GFC_SS_SCALAR;
2214 (*rss)->expr = expr2;
2217 /* Associate the SS with the loop. */
2218 gfc_add_ss_to_loop (&loop, *lss);
2219 /* We don't actually need to add the rhs at this point, but it might
2220 make guessing the loop bounds a bit easier. */
2221 gfc_add_ss_to_loop (&loop, *rss);
2223 /* We only want the shape of the expression, not rest of the junk
2224 generated by the scalarizer. */
2225 loop.array_parameter = 1;
2227 /* Calculate the bounds of the scalarization. */
2228 save_flag = flag_bounds_check;
2229 flag_bounds_check = 0;
2230 gfc_conv_ss_startstride (&loop);
2231 flag_bounds_check = save_flag;
2232 gfc_conv_loop_setup (&loop, &expr2->where);
2234 /* Figure out how many elements we need. */
2235 for (i = 0; i < loop.dimen; i++)
2237 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2238 gfc_index_one_node, loop.from[i]);
2239 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2240 tmp, loop.to[i]);
2241 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2243 gfc_add_block_to_block (pblock, &loop.pre);
2244 size = gfc_evaluate_now (size, pblock);
2245 gfc_add_block_to_block (pblock, &loop.post);
2247 /* TODO: write a function that cleans up a loopinfo without freeing
2248 the SS chains. Currently a NOP. */
2251 return size;
2255 /* Calculate the overall iterator number of the nested forall construct.
2256 This routine actually calculates the number of times the body of the
2257 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2258 that by the expression INNER_SIZE. The BLOCK argument specifies the
2259 block in which to calculate the result, and the optional INNER_SIZE_BODY
2260 argument contains any statements that need to executed (inside the loop)
2261 to initialize or calculate INNER_SIZE. */
2263 static tree
2264 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2265 stmtblock_t *inner_size_body, stmtblock_t *block)
2267 forall_info *forall_tmp = nested_forall_info;
2268 tree tmp, number;
2269 stmtblock_t body;
2271 /* We can eliminate the innermost unconditional loops with constant
2272 array bounds. */
2273 if (INTEGER_CST_P (inner_size))
2275 while (forall_tmp
2276 && !forall_tmp->mask
2277 && INTEGER_CST_P (forall_tmp->size))
2279 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2280 inner_size, forall_tmp->size);
2281 forall_tmp = forall_tmp->prev_nest;
2284 /* If there are no loops left, we have our constant result. */
2285 if (!forall_tmp)
2286 return inner_size;
2289 /* Otherwise, create a temporary variable to compute the result. */
2290 number = gfc_create_var (gfc_array_index_type, "num");
2291 gfc_add_modify (block, number, gfc_index_zero_node);
2293 gfc_start_block (&body);
2294 if (inner_size_body)
2295 gfc_add_block_to_block (&body, inner_size_body);
2296 if (forall_tmp)
2297 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2298 number, inner_size);
2299 else
2300 tmp = inner_size;
2301 gfc_add_modify (&body, number, tmp);
2302 tmp = gfc_finish_block (&body);
2304 /* Generate loops. */
2305 if (forall_tmp != NULL)
2306 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2308 gfc_add_expr_to_block (block, tmp);
2310 return number;
2314 /* Allocate temporary for forall construct. SIZE is the size of temporary
2315 needed. PTEMP1 is returned for space free. */
2317 static tree
2318 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2319 tree * ptemp1)
2321 tree bytesize;
2322 tree unit;
2323 tree tmp;
2325 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2326 if (!integer_onep (unit))
2327 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2328 else
2329 bytesize = size;
2331 *ptemp1 = NULL;
2332 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2334 if (*ptemp1)
2335 tmp = build_fold_indirect_ref (tmp);
2336 return tmp;
2340 /* Allocate temporary for forall construct according to the information in
2341 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2342 assignment inside forall. PTEMP1 is returned for space free. */
2344 static tree
2345 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2346 tree inner_size, stmtblock_t * inner_size_body,
2347 stmtblock_t * block, tree * ptemp1)
2349 tree size;
2351 /* Calculate the total size of temporary needed in forall construct. */
2352 size = compute_overall_iter_number (nested_forall_info, inner_size,
2353 inner_size_body, block);
2355 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2359 /* Handle assignments inside forall which need temporary.
2361 forall (i=start:end:stride; maskexpr)
2362 e<i> = f<i>
2363 end forall
2364 (where e,f<i> are arbitrary expressions possibly involving i
2365 and there is a dependency between e<i> and f<i>)
2366 Translates to:
2367 masktmp(:) = maskexpr(:)
2369 maskindex = 0;
2370 count1 = 0;
2371 num = 0;
2372 for (i = start; i <= end; i += stride)
2373 num += SIZE (f<i>)
2374 count1 = 0;
2375 ALLOCATE (tmp(num))
2376 for (i = start; i <= end; i += stride)
2378 if (masktmp[maskindex++])
2379 tmp[count1++] = f<i>
2381 maskindex = 0;
2382 count1 = 0;
2383 for (i = start; i <= end; i += stride)
2385 if (masktmp[maskindex++])
2386 e<i> = tmp[count1++]
2388 DEALLOCATE (tmp)
2390 static void
2391 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2392 tree wheremask, bool invert,
2393 forall_info * nested_forall_info,
2394 stmtblock_t * block)
2396 tree type;
2397 tree inner_size;
2398 gfc_ss *lss, *rss;
2399 tree count, count1;
2400 tree tmp, tmp1;
2401 tree ptemp1;
2402 stmtblock_t inner_size_body;
2404 /* Create vars. count1 is the current iterator number of the nested
2405 forall. */
2406 count1 = gfc_create_var (gfc_array_index_type, "count1");
2408 /* Count is the wheremask index. */
2409 if (wheremask)
2411 count = gfc_create_var (gfc_array_index_type, "count");
2412 gfc_add_modify (block, count, gfc_index_zero_node);
2414 else
2415 count = NULL;
2417 /* Initialize count1. */
2418 gfc_add_modify (block, count1, gfc_index_zero_node);
2420 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2421 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2422 gfc_init_block (&inner_size_body);
2423 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2424 &lss, &rss);
2426 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2427 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2429 if (!expr1->ts.cl->backend_decl)
2431 gfc_se tse;
2432 gfc_init_se (&tse, NULL);
2433 gfc_conv_expr (&tse, expr1->ts.cl->length);
2434 expr1->ts.cl->backend_decl = tse.expr;
2436 type = gfc_get_character_type_len (gfc_default_character_kind,
2437 expr1->ts.cl->backend_decl);
2439 else
2440 type = gfc_typenode_for_spec (&expr1->ts);
2442 /* Allocate temporary for nested forall construct according to the
2443 information in nested_forall_info and inner_size. */
2444 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2445 &inner_size_body, block, &ptemp1);
2447 /* Generate codes to copy rhs to the temporary . */
2448 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2449 wheremask, invert);
2451 /* Generate body and loops according to the information in
2452 nested_forall_info. */
2453 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2454 gfc_add_expr_to_block (block, tmp);
2456 /* Reset count1. */
2457 gfc_add_modify (block, count1, gfc_index_zero_node);
2459 /* Reset count. */
2460 if (wheremask)
2461 gfc_add_modify (block, count, gfc_index_zero_node);
2463 /* Generate codes to copy the temporary to lhs. */
2464 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2465 wheremask, invert);
2467 /* Generate body and loops according to the information in
2468 nested_forall_info. */
2469 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2470 gfc_add_expr_to_block (block, tmp);
2472 if (ptemp1)
2474 /* Free the temporary. */
2475 tmp = gfc_call_free (ptemp1);
2476 gfc_add_expr_to_block (block, tmp);
2481 /* Translate pointer assignment inside FORALL which need temporary. */
2483 static void
2484 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2485 forall_info * nested_forall_info,
2486 stmtblock_t * block)
2488 tree type;
2489 tree inner_size;
2490 gfc_ss *lss, *rss;
2491 gfc_se lse;
2492 gfc_se rse;
2493 gfc_ss_info *info;
2494 gfc_loopinfo loop;
2495 tree desc;
2496 tree parm;
2497 tree parmtype;
2498 stmtblock_t body;
2499 tree count;
2500 tree tmp, tmp1, ptemp1;
2502 count = gfc_create_var (gfc_array_index_type, "count");
2503 gfc_add_modify (block, count, gfc_index_zero_node);
2505 inner_size = integer_one_node;
2506 lss = gfc_walk_expr (expr1);
2507 rss = gfc_walk_expr (expr2);
2508 if (lss == gfc_ss_terminator)
2510 type = gfc_typenode_for_spec (&expr1->ts);
2511 type = build_pointer_type (type);
2513 /* Allocate temporary for nested forall construct according to the
2514 information in nested_forall_info and inner_size. */
2515 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2516 inner_size, NULL, block, &ptemp1);
2517 gfc_start_block (&body);
2518 gfc_init_se (&lse, NULL);
2519 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2520 gfc_init_se (&rse, NULL);
2521 rse.want_pointer = 1;
2522 gfc_conv_expr (&rse, expr2);
2523 gfc_add_block_to_block (&body, &rse.pre);
2524 gfc_add_modify (&body, lse.expr,
2525 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2526 gfc_add_block_to_block (&body, &rse.post);
2528 /* Increment count. */
2529 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2530 count, gfc_index_one_node);
2531 gfc_add_modify (&body, count, tmp);
2533 tmp = gfc_finish_block (&body);
2535 /* Generate body and loops according to the information in
2536 nested_forall_info. */
2537 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2538 gfc_add_expr_to_block (block, tmp);
2540 /* Reset count. */
2541 gfc_add_modify (block, count, gfc_index_zero_node);
2543 gfc_start_block (&body);
2544 gfc_init_se (&lse, NULL);
2545 gfc_init_se (&rse, NULL);
2546 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2547 lse.want_pointer = 1;
2548 gfc_conv_expr (&lse, expr1);
2549 gfc_add_block_to_block (&body, &lse.pre);
2550 gfc_add_modify (&body, lse.expr, rse.expr);
2551 gfc_add_block_to_block (&body, &lse.post);
2552 /* Increment count. */
2553 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2554 count, gfc_index_one_node);
2555 gfc_add_modify (&body, count, tmp);
2556 tmp = gfc_finish_block (&body);
2558 /* Generate body and loops according to the information in
2559 nested_forall_info. */
2560 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2561 gfc_add_expr_to_block (block, tmp);
2563 else
2565 gfc_init_loopinfo (&loop);
2567 /* Associate the SS with the loop. */
2568 gfc_add_ss_to_loop (&loop, rss);
2570 /* Setup the scalarizing loops and bounds. */
2571 gfc_conv_ss_startstride (&loop);
2573 gfc_conv_loop_setup (&loop, &expr2->where);
2575 info = &rss->data.info;
2576 desc = info->descriptor;
2578 /* Make a new descriptor. */
2579 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2580 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2581 loop.from, loop.to, 1,
2582 GFC_ARRAY_UNKNOWN);
2584 /* Allocate temporary for nested forall construct. */
2585 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2586 inner_size, NULL, block, &ptemp1);
2587 gfc_start_block (&body);
2588 gfc_init_se (&lse, NULL);
2589 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2590 lse.direct_byref = 1;
2591 rss = gfc_walk_expr (expr2);
2592 gfc_conv_expr_descriptor (&lse, expr2, rss);
2594 gfc_add_block_to_block (&body, &lse.pre);
2595 gfc_add_block_to_block (&body, &lse.post);
2597 /* Increment count. */
2598 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2599 count, gfc_index_one_node);
2600 gfc_add_modify (&body, count, tmp);
2602 tmp = gfc_finish_block (&body);
2604 /* Generate body and loops according to the information in
2605 nested_forall_info. */
2606 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2607 gfc_add_expr_to_block (block, tmp);
2609 /* Reset count. */
2610 gfc_add_modify (block, count, gfc_index_zero_node);
2612 parm = gfc_build_array_ref (tmp1, count, NULL);
2613 lss = gfc_walk_expr (expr1);
2614 gfc_init_se (&lse, NULL);
2615 gfc_conv_expr_descriptor (&lse, expr1, lss);
2616 gfc_add_modify (&lse.pre, lse.expr, parm);
2617 gfc_start_block (&body);
2618 gfc_add_block_to_block (&body, &lse.pre);
2619 gfc_add_block_to_block (&body, &lse.post);
2621 /* Increment count. */
2622 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2623 count, gfc_index_one_node);
2624 gfc_add_modify (&body, count, tmp);
2626 tmp = gfc_finish_block (&body);
2628 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2629 gfc_add_expr_to_block (block, tmp);
2631 /* Free the temporary. */
2632 if (ptemp1)
2634 tmp = gfc_call_free (ptemp1);
2635 gfc_add_expr_to_block (block, tmp);
2640 /* FORALL and WHERE statements are really nasty, especially when you nest
2641 them. All the rhs of a forall assignment must be evaluated before the
2642 actual assignments are performed. Presumably this also applies to all the
2643 assignments in an inner where statement. */
2645 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2646 linear array, relying on the fact that we process in the same order in all
2647 loops.
2649 forall (i=start:end:stride; maskexpr)
2650 e<i> = f<i>
2651 g<i> = h<i>
2652 end forall
2653 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2654 Translates to:
2655 count = ((end + 1 - start) / stride)
2656 masktmp(:) = maskexpr(:)
2658 maskindex = 0;
2659 for (i = start; i <= end; i += stride)
2661 if (masktmp[maskindex++])
2662 e<i> = f<i>
2664 maskindex = 0;
2665 for (i = start; i <= end; i += stride)
2667 if (masktmp[maskindex++])
2668 g<i> = h<i>
2671 Note that this code only works when there are no dependencies.
2672 Forall loop with array assignments and data dependencies are a real pain,
2673 because the size of the temporary cannot always be determined before the
2674 loop is executed. This problem is compounded by the presence of nested
2675 FORALL constructs.
2678 static tree
2679 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2681 stmtblock_t pre;
2682 stmtblock_t post;
2683 stmtblock_t block;
2684 stmtblock_t body;
2685 tree *var;
2686 tree *start;
2687 tree *end;
2688 tree *step;
2689 gfc_expr **varexpr;
2690 tree tmp;
2691 tree assign;
2692 tree size;
2693 tree maskindex;
2694 tree mask;
2695 tree pmask;
2696 int n;
2697 int nvar;
2698 int need_temp;
2699 gfc_forall_iterator *fa;
2700 gfc_se se;
2701 gfc_code *c;
2702 gfc_saved_var *saved_vars;
2703 iter_info *this_forall;
2704 forall_info *info;
2705 bool need_mask;
2707 /* Do nothing if the mask is false. */
2708 if (code->expr
2709 && code->expr->expr_type == EXPR_CONSTANT
2710 && !code->expr->value.logical)
2711 return build_empty_stmt ();
2713 n = 0;
2714 /* Count the FORALL index number. */
2715 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2716 n++;
2717 nvar = n;
2719 /* Allocate the space for var, start, end, step, varexpr. */
2720 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2721 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2722 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2723 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2724 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2725 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2727 /* Allocate the space for info. */
2728 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2730 gfc_start_block (&pre);
2731 gfc_init_block (&post);
2732 gfc_init_block (&block);
2734 n = 0;
2735 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2737 gfc_symbol *sym = fa->var->symtree->n.sym;
2739 /* Allocate space for this_forall. */
2740 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2742 /* Create a temporary variable for the FORALL index. */
2743 tmp = gfc_typenode_for_spec (&sym->ts);
2744 var[n] = gfc_create_var (tmp, sym->name);
2745 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2747 /* Record it in this_forall. */
2748 this_forall->var = var[n];
2750 /* Replace the index symbol's backend_decl with the temporary decl. */
2751 sym->backend_decl = var[n];
2753 /* Work out the start, end and stride for the loop. */
2754 gfc_init_se (&se, NULL);
2755 gfc_conv_expr_val (&se, fa->start);
2756 /* Record it in this_forall. */
2757 this_forall->start = se.expr;
2758 gfc_add_block_to_block (&block, &se.pre);
2759 start[n] = se.expr;
2761 gfc_init_se (&se, NULL);
2762 gfc_conv_expr_val (&se, fa->end);
2763 /* Record it in this_forall. */
2764 this_forall->end = se.expr;
2765 gfc_make_safe_expr (&se);
2766 gfc_add_block_to_block (&block, &se.pre);
2767 end[n] = se.expr;
2769 gfc_init_se (&se, NULL);
2770 gfc_conv_expr_val (&se, fa->stride);
2771 /* Record it in this_forall. */
2772 this_forall->step = se.expr;
2773 gfc_make_safe_expr (&se);
2774 gfc_add_block_to_block (&block, &se.pre);
2775 step[n] = se.expr;
2777 /* Set the NEXT field of this_forall to NULL. */
2778 this_forall->next = NULL;
2779 /* Link this_forall to the info construct. */
2780 if (info->this_loop)
2782 iter_info *iter_tmp = info->this_loop;
2783 while (iter_tmp->next != NULL)
2784 iter_tmp = iter_tmp->next;
2785 iter_tmp->next = this_forall;
2787 else
2788 info->this_loop = this_forall;
2790 n++;
2792 nvar = n;
2794 /* Calculate the size needed for the current forall level. */
2795 size = gfc_index_one_node;
2796 for (n = 0; n < nvar; n++)
2798 /* size = (end + step - start) / step. */
2799 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2800 step[n], start[n]);
2801 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2803 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2804 tmp = convert (gfc_array_index_type, tmp);
2806 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2809 /* Record the nvar and size of current forall level. */
2810 info->nvar = nvar;
2811 info->size = size;
2813 if (code->expr)
2815 /* If the mask is .true., consider the FORALL unconditional. */
2816 if (code->expr->expr_type == EXPR_CONSTANT
2817 && code->expr->value.logical)
2818 need_mask = false;
2819 else
2820 need_mask = true;
2822 else
2823 need_mask = false;
2825 /* First we need to allocate the mask. */
2826 if (need_mask)
2828 /* As the mask array can be very big, prefer compact boolean types. */
2829 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2830 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2831 size, NULL, &block, &pmask);
2832 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2834 /* Record them in the info structure. */
2835 info->maskindex = maskindex;
2836 info->mask = mask;
2838 else
2840 /* No mask was specified. */
2841 maskindex = NULL_TREE;
2842 mask = pmask = NULL_TREE;
2845 /* Link the current forall level to nested_forall_info. */
2846 info->prev_nest = nested_forall_info;
2847 nested_forall_info = info;
2849 /* Copy the mask into a temporary variable if required.
2850 For now we assume a mask temporary is needed. */
2851 if (need_mask)
2853 /* As the mask array can be very big, prefer compact boolean types. */
2854 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2856 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2858 /* Start of mask assignment loop body. */
2859 gfc_start_block (&body);
2861 /* Evaluate the mask expression. */
2862 gfc_init_se (&se, NULL);
2863 gfc_conv_expr_val (&se, code->expr);
2864 gfc_add_block_to_block (&body, &se.pre);
2866 /* Store the mask. */
2867 se.expr = convert (mask_type, se.expr);
2869 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2870 gfc_add_modify (&body, tmp, se.expr);
2872 /* Advance to the next mask element. */
2873 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2874 maskindex, gfc_index_one_node);
2875 gfc_add_modify (&body, maskindex, tmp);
2877 /* Generate the loops. */
2878 tmp = gfc_finish_block (&body);
2879 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2880 gfc_add_expr_to_block (&block, tmp);
2883 c = code->block->next;
2885 /* TODO: loop merging in FORALL statements. */
2886 /* Now that we've got a copy of the mask, generate the assignment loops. */
2887 while (c)
2889 switch (c->op)
2891 case EXEC_ASSIGN:
2892 /* A scalar or array assignment. DO the simple check for
2893 lhs to rhs dependencies. These make a temporary for the
2894 rhs and form a second forall block to copy to variable. */
2895 need_temp = check_forall_dependencies(c, &pre, &post);
2897 /* Temporaries due to array assignment data dependencies introduce
2898 no end of problems. */
2899 if (need_temp)
2900 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2901 nested_forall_info, &block);
2902 else
2904 /* Use the normal assignment copying routines. */
2905 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2907 /* Generate body and loops. */
2908 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2909 assign, 1);
2910 gfc_add_expr_to_block (&block, tmp);
2913 /* Cleanup any temporary symtrees that have been made to deal
2914 with dependencies. */
2915 if (new_symtree)
2916 cleanup_forall_symtrees (c);
2918 break;
2920 case EXEC_WHERE:
2921 /* Translate WHERE or WHERE construct nested in FORALL. */
2922 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2923 break;
2925 /* Pointer assignment inside FORALL. */
2926 case EXEC_POINTER_ASSIGN:
2927 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2928 if (need_temp)
2929 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2930 nested_forall_info, &block);
2931 else
2933 /* Use the normal assignment copying routines. */
2934 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2936 /* Generate body and loops. */
2937 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2938 assign, 1);
2939 gfc_add_expr_to_block (&block, tmp);
2941 break;
2943 case EXEC_FORALL:
2944 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2945 gfc_add_expr_to_block (&block, tmp);
2946 break;
2948 /* Explicit subroutine calls are prevented by the frontend but interface
2949 assignments can legitimately produce them. */
2950 case EXEC_ASSIGN_CALL:
2951 assign = gfc_trans_call (c, true);
2952 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2953 gfc_add_expr_to_block (&block, tmp);
2954 break;
2956 default:
2957 gcc_unreachable ();
2960 c = c->next;
2963 /* Restore the original index variables. */
2964 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2965 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2967 /* Free the space for var, start, end, step, varexpr. */
2968 gfc_free (var);
2969 gfc_free (start);
2970 gfc_free (end);
2971 gfc_free (step);
2972 gfc_free (varexpr);
2973 gfc_free (saved_vars);
2975 /* Free the space for this forall_info. */
2976 gfc_free (info);
2978 if (pmask)
2980 /* Free the temporary for the mask. */
2981 tmp = gfc_call_free (pmask);
2982 gfc_add_expr_to_block (&block, tmp);
2984 if (maskindex)
2985 pushdecl (maskindex);
2987 gfc_add_block_to_block (&pre, &block);
2988 gfc_add_block_to_block (&pre, &post);
2990 return gfc_finish_block (&pre);
2994 /* Translate the FORALL statement or construct. */
2996 tree gfc_trans_forall (gfc_code * code)
2998 return gfc_trans_forall_1 (code, NULL);
3002 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3003 If the WHERE construct is nested in FORALL, compute the overall temporary
3004 needed by the WHERE mask expression multiplied by the iterator number of
3005 the nested forall.
3006 ME is the WHERE mask expression.
3007 MASK is the current execution mask upon input, whose sense may or may
3008 not be inverted as specified by the INVERT argument.
3009 CMASK is the updated execution mask on output, or NULL if not required.
3010 PMASK is the pending execution mask on output, or NULL if not required.
3011 BLOCK is the block in which to place the condition evaluation loops. */
3013 static void
3014 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3015 tree mask, bool invert, tree cmask, tree pmask,
3016 tree mask_type, stmtblock_t * block)
3018 tree tmp, tmp1;
3019 gfc_ss *lss, *rss;
3020 gfc_loopinfo loop;
3021 stmtblock_t body, body1;
3022 tree count, cond, mtmp;
3023 gfc_se lse, rse;
3025 gfc_init_loopinfo (&loop);
3027 lss = gfc_walk_expr (me);
3028 rss = gfc_walk_expr (me);
3030 /* Variable to index the temporary. */
3031 count = gfc_create_var (gfc_array_index_type, "count");
3032 /* Initialize count. */
3033 gfc_add_modify (block, count, gfc_index_zero_node);
3035 gfc_start_block (&body);
3037 gfc_init_se (&rse, NULL);
3038 gfc_init_se (&lse, NULL);
3040 if (lss == gfc_ss_terminator)
3042 gfc_init_block (&body1);
3044 else
3046 /* Initialize the loop. */
3047 gfc_init_loopinfo (&loop);
3049 /* We may need LSS to determine the shape of the expression. */
3050 gfc_add_ss_to_loop (&loop, lss);
3051 gfc_add_ss_to_loop (&loop, rss);
3053 gfc_conv_ss_startstride (&loop);
3054 gfc_conv_loop_setup (&loop, &me->where);
3056 gfc_mark_ss_chain_used (rss, 1);
3057 /* Start the loop body. */
3058 gfc_start_scalarized_body (&loop, &body1);
3060 /* Translate the expression. */
3061 gfc_copy_loopinfo_to_se (&rse, &loop);
3062 rse.ss = rss;
3063 gfc_conv_expr (&rse, me);
3066 /* Variable to evaluate mask condition. */
3067 cond = gfc_create_var (mask_type, "cond");
3068 if (mask && (cmask || pmask))
3069 mtmp = gfc_create_var (mask_type, "mask");
3070 else mtmp = NULL_TREE;
3072 gfc_add_block_to_block (&body1, &lse.pre);
3073 gfc_add_block_to_block (&body1, &rse.pre);
3075 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3077 if (mask && (cmask || pmask))
3079 tmp = gfc_build_array_ref (mask, count, NULL);
3080 if (invert)
3081 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3082 gfc_add_modify (&body1, mtmp, tmp);
3085 if (cmask)
3087 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3088 tmp = cond;
3089 if (mask)
3090 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3091 gfc_add_modify (&body1, tmp1, tmp);
3094 if (pmask)
3096 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3097 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3098 if (mask)
3099 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3100 gfc_add_modify (&body1, tmp1, tmp);
3103 gfc_add_block_to_block (&body1, &lse.post);
3104 gfc_add_block_to_block (&body1, &rse.post);
3106 if (lss == gfc_ss_terminator)
3108 gfc_add_block_to_block (&body, &body1);
3110 else
3112 /* Increment count. */
3113 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3114 gfc_index_one_node);
3115 gfc_add_modify (&body1, count, tmp1);
3117 /* Generate the copying loops. */
3118 gfc_trans_scalarizing_loops (&loop, &body1);
3120 gfc_add_block_to_block (&body, &loop.pre);
3121 gfc_add_block_to_block (&body, &loop.post);
3123 gfc_cleanup_loop (&loop);
3124 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3125 as tree nodes in SS may not be valid in different scope. */
3128 tmp1 = gfc_finish_block (&body);
3129 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3130 if (nested_forall_info != NULL)
3131 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3133 gfc_add_expr_to_block (block, tmp1);
3137 /* Translate an assignment statement in a WHERE statement or construct
3138 statement. The MASK expression is used to control which elements
3139 of EXPR1 shall be assigned. The sense of MASK is specified by
3140 INVERT. */
3142 static tree
3143 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3144 tree mask, bool invert,
3145 tree count1, tree count2,
3146 gfc_symbol *sym)
3148 gfc_se lse;
3149 gfc_se rse;
3150 gfc_ss *lss;
3151 gfc_ss *lss_section;
3152 gfc_ss *rss;
3154 gfc_loopinfo loop;
3155 tree tmp;
3156 stmtblock_t block;
3157 stmtblock_t body;
3158 tree index, maskexpr;
3160 #if 0
3161 /* TODO: handle this special case.
3162 Special case a single function returning an array. */
3163 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3165 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3166 if (tmp)
3167 return tmp;
3169 #endif
3171 /* Assignment of the form lhs = rhs. */
3172 gfc_start_block (&block);
3174 gfc_init_se (&lse, NULL);
3175 gfc_init_se (&rse, NULL);
3177 /* Walk the lhs. */
3178 lss = gfc_walk_expr (expr1);
3179 rss = NULL;
3181 /* In each where-assign-stmt, the mask-expr and the variable being
3182 defined shall be arrays of the same shape. */
3183 gcc_assert (lss != gfc_ss_terminator);
3185 /* The assignment needs scalarization. */
3186 lss_section = lss;
3188 /* Find a non-scalar SS from the lhs. */
3189 while (lss_section != gfc_ss_terminator
3190 && lss_section->type != GFC_SS_SECTION)
3191 lss_section = lss_section->next;
3193 gcc_assert (lss_section != gfc_ss_terminator);
3195 /* Initialize the scalarizer. */
3196 gfc_init_loopinfo (&loop);
3198 /* Walk the rhs. */
3199 rss = gfc_walk_expr (expr2);
3200 if (rss == gfc_ss_terminator)
3202 /* The rhs is scalar. Add a ss for the expression. */
3203 rss = gfc_get_ss ();
3204 rss->where = 1;
3205 rss->next = gfc_ss_terminator;
3206 rss->type = GFC_SS_SCALAR;
3207 rss->expr = expr2;
3210 /* Associate the SS with the loop. */
3211 gfc_add_ss_to_loop (&loop, lss);
3212 gfc_add_ss_to_loop (&loop, rss);
3214 /* Calculate the bounds of the scalarization. */
3215 gfc_conv_ss_startstride (&loop);
3217 /* Resolve any data dependencies in the statement. */
3218 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3220 /* Setup the scalarizing loops. */
3221 gfc_conv_loop_setup (&loop, &expr2->where);
3223 /* Setup the gfc_se structures. */
3224 gfc_copy_loopinfo_to_se (&lse, &loop);
3225 gfc_copy_loopinfo_to_se (&rse, &loop);
3227 rse.ss = rss;
3228 gfc_mark_ss_chain_used (rss, 1);
3229 if (loop.temp_ss == NULL)
3231 lse.ss = lss;
3232 gfc_mark_ss_chain_used (lss, 1);
3234 else
3236 lse.ss = loop.temp_ss;
3237 gfc_mark_ss_chain_used (lss, 3);
3238 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3241 /* Start the scalarized loop body. */
3242 gfc_start_scalarized_body (&loop, &body);
3244 /* Translate the expression. */
3245 gfc_conv_expr (&rse, expr2);
3246 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3248 gfc_conv_tmp_array_ref (&lse);
3249 gfc_advance_se_ss_chain (&lse);
3251 else
3252 gfc_conv_expr (&lse, expr1);
3254 /* Form the mask expression according to the mask. */
3255 index = count1;
3256 maskexpr = gfc_build_array_ref (mask, index, NULL);
3257 if (invert)
3258 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3260 /* Use the scalar assignment as is. */
3261 if (sym == NULL)
3262 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3263 loop.temp_ss != NULL, false);
3264 else
3265 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3267 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3269 gfc_add_expr_to_block (&body, tmp);
3271 if (lss == gfc_ss_terminator)
3273 /* Increment count1. */
3274 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3275 count1, gfc_index_one_node);
3276 gfc_add_modify (&body, count1, tmp);
3278 /* Use the scalar assignment as is. */
3279 gfc_add_block_to_block (&block, &body);
3281 else
3283 gcc_assert (lse.ss == gfc_ss_terminator
3284 && rse.ss == gfc_ss_terminator);
3286 if (loop.temp_ss != NULL)
3288 /* Increment count1 before finish the main body of a scalarized
3289 expression. */
3290 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3291 count1, gfc_index_one_node);
3292 gfc_add_modify (&body, count1, tmp);
3293 gfc_trans_scalarized_loop_boundary (&loop, &body);
3295 /* We need to copy the temporary to the actual lhs. */
3296 gfc_init_se (&lse, NULL);
3297 gfc_init_se (&rse, NULL);
3298 gfc_copy_loopinfo_to_se (&lse, &loop);
3299 gfc_copy_loopinfo_to_se (&rse, &loop);
3301 rse.ss = loop.temp_ss;
3302 lse.ss = lss;
3304 gfc_conv_tmp_array_ref (&rse);
3305 gfc_advance_se_ss_chain (&rse);
3306 gfc_conv_expr (&lse, expr1);
3308 gcc_assert (lse.ss == gfc_ss_terminator
3309 && rse.ss == gfc_ss_terminator);
3311 /* Form the mask expression according to the mask tree list. */
3312 index = count2;
3313 maskexpr = gfc_build_array_ref (mask, index, NULL);
3314 if (invert)
3315 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3316 maskexpr);
3318 /* Use the scalar assignment as is. */
3319 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3320 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3321 gfc_add_expr_to_block (&body, tmp);
3323 /* Increment count2. */
3324 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3325 count2, gfc_index_one_node);
3326 gfc_add_modify (&body, count2, tmp);
3328 else
3330 /* Increment count1. */
3331 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3332 count1, gfc_index_one_node);
3333 gfc_add_modify (&body, count1, tmp);
3336 /* Generate the copying loops. */
3337 gfc_trans_scalarizing_loops (&loop, &body);
3339 /* Wrap the whole thing up. */
3340 gfc_add_block_to_block (&block, &loop.pre);
3341 gfc_add_block_to_block (&block, &loop.post);
3342 gfc_cleanup_loop (&loop);
3345 return gfc_finish_block (&block);
3349 /* Translate the WHERE construct or statement.
3350 This function can be called iteratively to translate the nested WHERE
3351 construct or statement.
3352 MASK is the control mask. */
3354 static void
3355 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3356 forall_info * nested_forall_info, stmtblock_t * block)
3358 stmtblock_t inner_size_body;
3359 tree inner_size, size;
3360 gfc_ss *lss, *rss;
3361 tree mask_type;
3362 gfc_expr *expr1;
3363 gfc_expr *expr2;
3364 gfc_code *cblock;
3365 gfc_code *cnext;
3366 tree tmp;
3367 tree cond;
3368 tree count1, count2;
3369 bool need_cmask;
3370 bool need_pmask;
3371 int need_temp;
3372 tree pcmask = NULL_TREE;
3373 tree ppmask = NULL_TREE;
3374 tree cmask = NULL_TREE;
3375 tree pmask = NULL_TREE;
3376 gfc_actual_arglist *arg;
3378 /* the WHERE statement or the WHERE construct statement. */
3379 cblock = code->block;
3381 /* As the mask array can be very big, prefer compact boolean types. */
3382 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3384 /* Determine which temporary masks are needed. */
3385 if (!cblock->block)
3387 /* One clause: No ELSEWHEREs. */
3388 need_cmask = (cblock->next != 0);
3389 need_pmask = false;
3391 else if (cblock->block->block)
3393 /* Three or more clauses: Conditional ELSEWHEREs. */
3394 need_cmask = true;
3395 need_pmask = true;
3397 else if (cblock->next)
3399 /* Two clauses, the first non-empty. */
3400 need_cmask = true;
3401 need_pmask = (mask != NULL_TREE
3402 && cblock->block->next != 0);
3404 else if (!cblock->block->next)
3406 /* Two clauses, both empty. */
3407 need_cmask = false;
3408 need_pmask = false;
3410 /* Two clauses, the first empty, the second non-empty. */
3411 else if (mask)
3413 need_cmask = (cblock->block->expr != 0);
3414 need_pmask = true;
3416 else
3418 need_cmask = true;
3419 need_pmask = false;
3422 if (need_cmask || need_pmask)
3424 /* Calculate the size of temporary needed by the mask-expr. */
3425 gfc_init_block (&inner_size_body);
3426 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3427 &inner_size_body, &lss, &rss);
3429 /* Calculate the total size of temporary needed. */
3430 size = compute_overall_iter_number (nested_forall_info, inner_size,
3431 &inner_size_body, block);
3433 /* Check whether the size is negative. */
3434 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3435 gfc_index_zero_node);
3436 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3437 gfc_index_zero_node, size);
3438 size = gfc_evaluate_now (size, block);
3440 /* Allocate temporary for WHERE mask if needed. */
3441 if (need_cmask)
3442 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3443 &pcmask);
3445 /* Allocate temporary for !mask if needed. */
3446 if (need_pmask)
3447 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3448 &ppmask);
3451 while (cblock)
3453 /* Each time around this loop, the where clause is conditional
3454 on the value of mask and invert, which are updated at the
3455 bottom of the loop. */
3457 /* Has mask-expr. */
3458 if (cblock->expr)
3460 /* Ensure that the WHERE mask will be evaluated exactly once.
3461 If there are no statements in this WHERE/ELSEWHERE clause,
3462 then we don't need to update the control mask (cmask).
3463 If this is the last clause of the WHERE construct, then
3464 we don't need to update the pending control mask (pmask). */
3465 if (mask)
3466 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3467 mask, invert,
3468 cblock->next ? cmask : NULL_TREE,
3469 cblock->block ? pmask : NULL_TREE,
3470 mask_type, block);
3471 else
3472 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3473 NULL_TREE, false,
3474 (cblock->next || cblock->block)
3475 ? cmask : NULL_TREE,
3476 NULL_TREE, mask_type, block);
3478 invert = false;
3480 /* It's a final elsewhere-stmt. No mask-expr is present. */
3481 else
3482 cmask = mask;
3484 /* The body of this where clause are controlled by cmask with
3485 sense specified by invert. */
3487 /* Get the assignment statement of a WHERE statement, or the first
3488 statement in where-body-construct of a WHERE construct. */
3489 cnext = cblock->next;
3490 while (cnext)
3492 switch (cnext->op)
3494 /* WHERE assignment statement. */
3495 case EXEC_ASSIGN_CALL:
3497 arg = cnext->ext.actual;
3498 expr1 = expr2 = NULL;
3499 for (; arg; arg = arg->next)
3501 if (!arg->expr)
3502 continue;
3503 if (expr1 == NULL)
3504 expr1 = arg->expr;
3505 else
3506 expr2 = arg->expr;
3508 goto evaluate;
3510 case EXEC_ASSIGN:
3511 expr1 = cnext->expr;
3512 expr2 = cnext->expr2;
3513 evaluate:
3514 if (nested_forall_info != NULL)
3516 need_temp = gfc_check_dependency (expr1, expr2, 0);
3517 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3518 gfc_trans_assign_need_temp (expr1, expr2,
3519 cmask, invert,
3520 nested_forall_info, block);
3521 else
3523 /* Variables to control maskexpr. */
3524 count1 = gfc_create_var (gfc_array_index_type, "count1");
3525 count2 = gfc_create_var (gfc_array_index_type, "count2");
3526 gfc_add_modify (block, count1, gfc_index_zero_node);
3527 gfc_add_modify (block, count2, gfc_index_zero_node);
3529 tmp = gfc_trans_where_assign (expr1, expr2,
3530 cmask, invert,
3531 count1, count2,
3532 cnext->resolved_sym);
3534 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3535 tmp, 1);
3536 gfc_add_expr_to_block (block, tmp);
3539 else
3541 /* Variables to control maskexpr. */
3542 count1 = gfc_create_var (gfc_array_index_type, "count1");
3543 count2 = gfc_create_var (gfc_array_index_type, "count2");
3544 gfc_add_modify (block, count1, gfc_index_zero_node);
3545 gfc_add_modify (block, count2, gfc_index_zero_node);
3547 tmp = gfc_trans_where_assign (expr1, expr2,
3548 cmask, invert,
3549 count1, count2,
3550 cnext->resolved_sym);
3551 gfc_add_expr_to_block (block, tmp);
3554 break;
3556 /* WHERE or WHERE construct is part of a where-body-construct. */
3557 case EXEC_WHERE:
3558 gfc_trans_where_2 (cnext, cmask, invert,
3559 nested_forall_info, block);
3560 break;
3562 default:
3563 gcc_unreachable ();
3566 /* The next statement within the same where-body-construct. */
3567 cnext = cnext->next;
3569 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3570 cblock = cblock->block;
3571 if (mask == NULL_TREE)
3573 /* If we're the initial WHERE, we can simply invert the sense
3574 of the current mask to obtain the "mask" for the remaining
3575 ELSEWHEREs. */
3576 invert = true;
3577 mask = cmask;
3579 else
3581 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3582 invert = false;
3583 mask = pmask;
3587 /* If we allocated a pending mask array, deallocate it now. */
3588 if (ppmask)
3590 tmp = gfc_call_free (ppmask);
3591 gfc_add_expr_to_block (block, tmp);
3594 /* If we allocated a current mask array, deallocate it now. */
3595 if (pcmask)
3597 tmp = gfc_call_free (pcmask);
3598 gfc_add_expr_to_block (block, tmp);
3602 /* Translate a simple WHERE construct or statement without dependencies.
3603 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3604 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3605 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3607 static tree
3608 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3610 stmtblock_t block, body;
3611 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3612 tree tmp, cexpr, tstmt, estmt;
3613 gfc_ss *css, *tdss, *tsss;
3614 gfc_se cse, tdse, tsse, edse, esse;
3615 gfc_loopinfo loop;
3616 gfc_ss *edss = 0;
3617 gfc_ss *esss = 0;
3619 cond = cblock->expr;
3620 tdst = cblock->next->expr;
3621 tsrc = cblock->next->expr2;
3622 edst = eblock ? eblock->next->expr : NULL;
3623 esrc = eblock ? eblock->next->expr2 : NULL;
3625 gfc_start_block (&block);
3626 gfc_init_loopinfo (&loop);
3628 /* Handle the condition. */
3629 gfc_init_se (&cse, NULL);
3630 css = gfc_walk_expr (cond);
3631 gfc_add_ss_to_loop (&loop, css);
3633 /* Handle the then-clause. */
3634 gfc_init_se (&tdse, NULL);
3635 gfc_init_se (&tsse, NULL);
3636 tdss = gfc_walk_expr (tdst);
3637 tsss = gfc_walk_expr (tsrc);
3638 if (tsss == gfc_ss_terminator)
3640 tsss = gfc_get_ss ();
3641 tsss->where = 1;
3642 tsss->next = gfc_ss_terminator;
3643 tsss->type = GFC_SS_SCALAR;
3644 tsss->expr = tsrc;
3646 gfc_add_ss_to_loop (&loop, tdss);
3647 gfc_add_ss_to_loop (&loop, tsss);
3649 if (eblock)
3651 /* Handle the else clause. */
3652 gfc_init_se (&edse, NULL);
3653 gfc_init_se (&esse, NULL);
3654 edss = gfc_walk_expr (edst);
3655 esss = gfc_walk_expr (esrc);
3656 if (esss == gfc_ss_terminator)
3658 esss = gfc_get_ss ();
3659 esss->where = 1;
3660 esss->next = gfc_ss_terminator;
3661 esss->type = GFC_SS_SCALAR;
3662 esss->expr = esrc;
3664 gfc_add_ss_to_loop (&loop, edss);
3665 gfc_add_ss_to_loop (&loop, esss);
3668 gfc_conv_ss_startstride (&loop);
3669 gfc_conv_loop_setup (&loop, &tdst->where);
3671 gfc_mark_ss_chain_used (css, 1);
3672 gfc_mark_ss_chain_used (tdss, 1);
3673 gfc_mark_ss_chain_used (tsss, 1);
3674 if (eblock)
3676 gfc_mark_ss_chain_used (edss, 1);
3677 gfc_mark_ss_chain_used (esss, 1);
3680 gfc_start_scalarized_body (&loop, &body);
3682 gfc_copy_loopinfo_to_se (&cse, &loop);
3683 gfc_copy_loopinfo_to_se (&tdse, &loop);
3684 gfc_copy_loopinfo_to_se (&tsse, &loop);
3685 cse.ss = css;
3686 tdse.ss = tdss;
3687 tsse.ss = tsss;
3688 if (eblock)
3690 gfc_copy_loopinfo_to_se (&edse, &loop);
3691 gfc_copy_loopinfo_to_se (&esse, &loop);
3692 edse.ss = edss;
3693 esse.ss = esss;
3696 gfc_conv_expr (&cse, cond);
3697 gfc_add_block_to_block (&body, &cse.pre);
3698 cexpr = cse.expr;
3700 gfc_conv_expr (&tsse, tsrc);
3701 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3703 gfc_conv_tmp_array_ref (&tdse);
3704 gfc_advance_se_ss_chain (&tdse);
3706 else
3707 gfc_conv_expr (&tdse, tdst);
3709 if (eblock)
3711 gfc_conv_expr (&esse, esrc);
3712 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3714 gfc_conv_tmp_array_ref (&edse);
3715 gfc_advance_se_ss_chain (&edse);
3717 else
3718 gfc_conv_expr (&edse, edst);
3721 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3722 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3723 : build_empty_stmt ();
3724 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3725 gfc_add_expr_to_block (&body, tmp);
3726 gfc_add_block_to_block (&body, &cse.post);
3728 gfc_trans_scalarizing_loops (&loop, &body);
3729 gfc_add_block_to_block (&block, &loop.pre);
3730 gfc_add_block_to_block (&block, &loop.post);
3731 gfc_cleanup_loop (&loop);
3733 return gfc_finish_block (&block);
3736 /* As the WHERE or WHERE construct statement can be nested, we call
3737 gfc_trans_where_2 to do the translation, and pass the initial
3738 NULL values for both the control mask and the pending control mask. */
3740 tree
3741 gfc_trans_where (gfc_code * code)
3743 stmtblock_t block;
3744 gfc_code *cblock;
3745 gfc_code *eblock;
3747 cblock = code->block;
3748 if (cblock->next
3749 && cblock->next->op == EXEC_ASSIGN
3750 && !cblock->next->next)
3752 eblock = cblock->block;
3753 if (!eblock)
3755 /* A simple "WHERE (cond) x = y" statement or block is
3756 dependence free if cond is not dependent upon writing x,
3757 and the source y is unaffected by the destination x. */
3758 if (!gfc_check_dependency (cblock->next->expr,
3759 cblock->expr, 0)
3760 && !gfc_check_dependency (cblock->next->expr,
3761 cblock->next->expr2, 0))
3762 return gfc_trans_where_3 (cblock, NULL);
3764 else if (!eblock->expr
3765 && !eblock->block
3766 && eblock->next
3767 && eblock->next->op == EXEC_ASSIGN
3768 && !eblock->next->next)
3770 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3771 block is dependence free if cond is not dependent on writes
3772 to x1 and x2, y1 is not dependent on writes to x2, and y2
3773 is not dependent on writes to x1, and both y's are not
3774 dependent upon their own x's. In addition to this, the
3775 final two dependency checks below exclude all but the same
3776 array reference if the where and elswhere destinations
3777 are the same. In short, this is VERY conservative and this
3778 is needed because the two loops, required by the standard
3779 are coalesced in gfc_trans_where_3. */
3780 if (!gfc_check_dependency(cblock->next->expr,
3781 cblock->expr, 0)
3782 && !gfc_check_dependency(eblock->next->expr,
3783 cblock->expr, 0)
3784 && !gfc_check_dependency(cblock->next->expr,
3785 eblock->next->expr2, 1)
3786 && !gfc_check_dependency(eblock->next->expr,
3787 cblock->next->expr2, 1)
3788 && !gfc_check_dependency(cblock->next->expr,
3789 cblock->next->expr2, 1)
3790 && !gfc_check_dependency(eblock->next->expr,
3791 eblock->next->expr2, 1)
3792 && !gfc_check_dependency(cblock->next->expr,
3793 eblock->next->expr, 0)
3794 && !gfc_check_dependency(eblock->next->expr,
3795 cblock->next->expr, 0))
3796 return gfc_trans_where_3 (cblock, eblock);
3800 gfc_start_block (&block);
3802 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3804 return gfc_finish_block (&block);
3808 /* CYCLE a DO loop. The label decl has already been created by
3809 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3810 node at the head of the loop. We must mark the label as used. */
3812 tree
3813 gfc_trans_cycle (gfc_code * code)
3815 tree cycle_label;
3817 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3818 TREE_USED (cycle_label) = 1;
3819 return build1_v (GOTO_EXPR, cycle_label);
3823 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3824 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3825 loop. */
3827 tree
3828 gfc_trans_exit (gfc_code * code)
3830 tree exit_label;
3832 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3833 TREE_USED (exit_label) = 1;
3834 return build1_v (GOTO_EXPR, exit_label);
3838 /* Translate the ALLOCATE statement. */
3840 tree
3841 gfc_trans_allocate (gfc_code * code)
3843 gfc_alloc *al;
3844 gfc_expr *expr;
3845 gfc_se se;
3846 tree tmp;
3847 tree parm;
3848 tree stat;
3849 tree pstat;
3850 tree error_label;
3851 stmtblock_t block;
3853 if (!code->ext.alloc_list)
3854 return NULL_TREE;
3856 gfc_start_block (&block);
3858 if (code->expr)
3860 tree gfc_int4_type_node = gfc_get_int_type (4);
3862 stat = gfc_create_var (gfc_int4_type_node, "stat");
3863 pstat = build_fold_addr_expr (stat);
3865 error_label = gfc_build_label_decl (NULL_TREE);
3866 TREE_USED (error_label) = 1;
3868 else
3869 pstat = stat = error_label = NULL_TREE;
3871 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3873 expr = al->expr;
3875 gfc_init_se (&se, NULL);
3876 gfc_start_block (&se.pre);
3878 se.want_pointer = 1;
3879 se.descriptor_only = 1;
3880 gfc_conv_expr (&se, expr);
3882 if (!gfc_array_allocate (&se, expr, pstat))
3884 /* A scalar or derived type. */
3885 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3887 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3888 tmp = se.string_length;
3890 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3891 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3892 fold_convert (TREE_TYPE (se.expr), tmp));
3893 gfc_add_expr_to_block (&se.pre, tmp);
3895 if (code->expr)
3897 tmp = build1_v (GOTO_EXPR, error_label);
3898 parm = fold_build2 (NE_EXPR, boolean_type_node,
3899 stat, build_int_cst (TREE_TYPE (stat), 0));
3900 tmp = fold_build3 (COND_EXPR, void_type_node,
3901 parm, tmp, build_empty_stmt ());
3902 gfc_add_expr_to_block (&se.pre, tmp);
3905 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3907 tmp = build_fold_indirect_ref (se.expr);
3908 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3909 gfc_add_expr_to_block (&se.pre, tmp);
3914 tmp = gfc_finish_block (&se.pre);
3915 gfc_add_expr_to_block (&block, tmp);
3918 /* Assign the value to the status variable. */
3919 if (code->expr)
3921 tmp = build1_v (LABEL_EXPR, error_label);
3922 gfc_add_expr_to_block (&block, tmp);
3924 gfc_init_se (&se, NULL);
3925 gfc_conv_expr_lhs (&se, code->expr);
3926 tmp = convert (TREE_TYPE (se.expr), stat);
3927 gfc_add_modify (&block, se.expr, tmp);
3930 return gfc_finish_block (&block);
3934 /* Translate a DEALLOCATE statement.
3935 There are two cases within the for loop:
3936 (1) deallocate(a1, a2, a3) is translated into the following sequence
3937 _gfortran_deallocate(a1, 0B)
3938 _gfortran_deallocate(a2, 0B)
3939 _gfortran_deallocate(a3, 0B)
3940 where the STAT= variable is passed a NULL pointer.
3941 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3942 astat = 0
3943 _gfortran_deallocate(a1, &stat)
3944 astat = astat + stat
3945 _gfortran_deallocate(a2, &stat)
3946 astat = astat + stat
3947 _gfortran_deallocate(a3, &stat)
3948 astat = astat + stat
3949 In case (1), we simply return at the end of the for loop. In case (2)
3950 we set STAT= astat. */
3951 tree
3952 gfc_trans_deallocate (gfc_code * code)
3954 gfc_se se;
3955 gfc_alloc *al;
3956 gfc_expr *expr;
3957 tree apstat, astat, pstat, stat, tmp;
3958 stmtblock_t block;
3960 gfc_start_block (&block);
3962 /* Set up the optional STAT= */
3963 if (code->expr)
3965 tree gfc_int4_type_node = gfc_get_int_type (4);
3967 /* Variable used with the library call. */
3968 stat = gfc_create_var (gfc_int4_type_node, "stat");
3969 pstat = build_fold_addr_expr (stat);
3971 /* Running total of possible deallocation failures. */
3972 astat = gfc_create_var (gfc_int4_type_node, "astat");
3973 apstat = build_fold_addr_expr (astat);
3975 /* Initialize astat to 0. */
3976 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3978 else
3979 pstat = apstat = stat = astat = NULL_TREE;
3981 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3983 expr = al->expr;
3984 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3986 gfc_init_se (&se, NULL);
3987 gfc_start_block (&se.pre);
3989 se.want_pointer = 1;
3990 se.descriptor_only = 1;
3991 gfc_conv_expr (&se, expr);
3993 if (expr->ts.type == BT_DERIVED
3994 && expr->ts.derived->attr.alloc_comp)
3996 gfc_ref *ref;
3997 gfc_ref *last = NULL;
3998 for (ref = expr->ref; ref; ref = ref->next)
3999 if (ref->type == REF_COMPONENT)
4000 last = ref;
4002 /* Do not deallocate the components of a derived type
4003 ultimate pointer component. */
4004 if (!(last && last->u.c.component->attr.pointer)
4005 && !(!last && expr->symtree->n.sym->attr.pointer))
4007 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4008 expr->rank);
4009 gfc_add_expr_to_block (&se.pre, tmp);
4013 if (expr->rank)
4014 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4015 else
4017 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4018 gfc_add_expr_to_block (&se.pre, tmp);
4020 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4021 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4024 gfc_add_expr_to_block (&se.pre, tmp);
4026 /* Keep track of the number of failed deallocations by adding stat
4027 of the last deallocation to the running total. */
4028 if (code->expr)
4030 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4031 gfc_add_modify (&se.pre, astat, apstat);
4034 tmp = gfc_finish_block (&se.pre);
4035 gfc_add_expr_to_block (&block, tmp);
4039 /* Assign the value to the status variable. */
4040 if (code->expr)
4042 gfc_init_se (&se, NULL);
4043 gfc_conv_expr_lhs (&se, code->expr);
4044 tmp = convert (TREE_TYPE (se.expr), astat);
4045 gfc_add_modify (&block, se.expr, tmp);
4048 return gfc_finish_block (&block);