expr.c (convert_move): Use emit_store_flag instead of "emulating" it.
[official-gcc.git] / gcc / fortran / trans-stmt.c
blobdd473ef73b04dd396ced46e966613bf503a16f0e
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label);
114 if (code->label->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
119 else
121 gfc_expr *format = code->label->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 code = code->block;
163 if (code == NULL)
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
180 code = code->block;
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 tree
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
202 static void
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
208 gfc_expr *e;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
211 gfc_se parmse;
212 gfc_ss *ss;
213 gfc_ss_info *info;
214 gfc_symbol *fsym;
215 int n;
216 tree data;
217 tree offset;
218 tree size;
219 tree tmp;
221 if (loopse->ss == NULL)
222 return;
224 ss = loopse->ss;
225 arg0 = arg;
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
231 e = arg->expr;
232 if (e == NULL)
233 continue;
235 /* Obtain the info structure for the current argument. */
236 info = NULL;
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
239 if (ss->expr != e)
240 continue;
241 info = &ss->data.info;
242 break;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
249 && e->rank && fsym
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0, check_variable))
254 tree initial, temptype;
255 stmtblock_t temp_post;
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
267 /* Obtain the argument descriptor for unpacking. */
268 gfc_init_se (&parmse, NULL);
269 parmse.want_pointer = 1;
270 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
271 gfc_add_block_to_block (&se->pre, &parmse.pre);
273 /* If we've got INTENT(INOUT), initialize the array temporary with
274 a copy of the values. */
275 if (fsym->attr.intent == INTENT_INOUT)
276 initial = parmse.expr;
277 else
278 initial = NULL_TREE;
280 /* Find the type of the temporary to create; we don't use the type
281 of e itself as this breaks for subcomponent-references in e (where
282 the type of e is that of the final reference, but parmse.expr's
283 type corresponds to the full derived-type). */
284 /* TODO: Fix this somehow so we don't need a temporary of the whole
285 array but instead only the components referenced. */
286 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
287 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
288 temptype = TREE_TYPE (temptype);
289 temptype = gfc_get_element_type (temptype);
291 /* Generate the temporary. Cleaning up the temporary should be the
292 very last thing done, so we add the code to a new block and add it
293 to se->post as last instructions. */
294 size = gfc_create_var (gfc_array_index_type, NULL);
295 data = gfc_create_var (pvoid_type_node, NULL);
296 gfc_init_block (&temp_post);
297 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
298 &tmp_loop, info, temptype,
299 initial,
300 false, true, false,
301 &arg->expr->where);
302 gfc_add_modify (&se->pre, size, tmp);
303 tmp = fold_convert (pvoid_type_node, info->data);
304 gfc_add_modify (&se->pre, data, tmp);
306 /* Calculate the offset for the temporary. */
307 offset = gfc_index_zero_node;
308 for (n = 0; n < info->dimen; n++)
310 tmp = gfc_conv_descriptor_stride (info->descriptor,
311 gfc_rank_cst[n]);
312 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
313 loopse->loop->from[n], tmp);
314 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
315 offset, tmp);
317 info->offset = gfc_create_var (gfc_array_index_type, NULL);
318 gfc_add_modify (&se->pre, info->offset, offset);
320 /* Copy the result back using unpack. */
321 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
322 gfc_add_expr_to_block (&se->post, tmp);
324 /* parmse.pre is already added above. */
325 gfc_add_block_to_block (&se->post, &parmse.post);
326 gfc_add_block_to_block (&se->post, &temp_post);
332 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
334 tree
335 gfc_trans_call (gfc_code * code, bool dependency_check)
337 gfc_se se;
338 gfc_ss * ss;
339 int has_alternate_specifier;
340 gfc_dep_check check_variable;
342 /* A CALL starts a new block because the actual arguments may have to
343 be evaluated first. */
344 gfc_init_se (&se, NULL);
345 gfc_start_block (&se.pre);
347 gcc_assert (code->resolved_sym);
349 ss = gfc_ss_terminator;
350 if (code->resolved_sym->attr.elemental)
351 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
353 /* Is not an elemental subroutine call with array valued arguments. */
354 if (ss == gfc_ss_terminator)
357 /* Translate the call. */
358 has_alternate_specifier
359 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
360 NULL_TREE);
362 /* A subroutine without side-effect, by definition, does nothing! */
363 TREE_SIDE_EFFECTS (se.expr) = 1;
365 /* Chain the pieces together and return the block. */
366 if (has_alternate_specifier)
368 gfc_code *select_code;
369 gfc_symbol *sym;
370 select_code = code->next;
371 gcc_assert(select_code->op == EXEC_SELECT);
372 sym = select_code->expr->symtree->n.sym;
373 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
374 if (sym->backend_decl == NULL)
375 sym->backend_decl = gfc_get_symbol_decl (sym);
376 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
378 else
379 gfc_add_expr_to_block (&se.pre, se.expr);
381 gfc_add_block_to_block (&se.pre, &se.post);
384 else
386 /* An elemental subroutine call with array valued arguments has
387 to be scalarized. */
388 gfc_loopinfo loop;
389 stmtblock_t body;
390 stmtblock_t block;
391 gfc_se loopse;
392 gfc_se depse;
394 /* gfc_walk_elemental_function_args renders the ss chain in the
395 reverse order to the actual argument order. */
396 ss = gfc_reverse_ss (ss);
398 /* Initialize the loop. */
399 gfc_init_se (&loopse, NULL);
400 gfc_init_loopinfo (&loop);
401 gfc_add_ss_to_loop (&loop, ss);
403 gfc_conv_ss_startstride (&loop);
404 /* TODO: gfc_conv_loop_setup generates a temporary for vector
405 subscripts. This could be prevented in the elemental case
406 as temporaries are handled separatedly
407 (below in gfc_conv_elemental_dependencies). */
408 gfc_conv_loop_setup (&loop, &code->expr->where);
409 gfc_mark_ss_chain_used (ss, 1);
411 /* Convert the arguments, checking for dependencies. */
412 gfc_copy_loopinfo_to_se (&loopse, &loop);
413 loopse.ss = ss;
415 /* For operator assignment, do dependency checking. */
416 if (dependency_check)
417 check_variable = ELEM_CHECK_VARIABLE;
418 else
419 check_variable = ELEM_DONT_CHECK_VARIABLE;
421 gfc_init_se (&depse, NULL);
422 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
423 code->ext.actual, check_variable);
425 gfc_add_block_to_block (&loop.pre, &depse.pre);
426 gfc_add_block_to_block (&loop.post, &depse.post);
428 /* Generate the loop body. */
429 gfc_start_scalarized_body (&loop, &body);
430 gfc_init_block (&block);
432 /* Add the subroutine call to the block. */
433 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
434 NULL_TREE);
435 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
437 gfc_add_block_to_block (&block, &loopse.pre);
438 gfc_add_block_to_block (&block, &loopse.post);
440 /* Finish up the loop block and the loop. */
441 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
442 gfc_trans_scalarizing_loops (&loop, &body);
443 gfc_add_block_to_block (&se.pre, &loop.pre);
444 gfc_add_block_to_block (&se.pre, &loop.post);
445 gfc_add_block_to_block (&se.pre, &se.post);
446 gfc_cleanup_loop (&loop);
449 return gfc_finish_block (&se.pre);
453 /* Translate the RETURN statement. */
455 tree
456 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
458 if (code->expr)
460 gfc_se se;
461 tree tmp;
462 tree result;
464 /* If code->expr is not NULL, this return statement must appear
465 in a subroutine and current_fake_result_decl has already
466 been generated. */
468 result = gfc_get_fake_result_decl (NULL, 0);
469 if (!result)
471 gfc_warning ("An alternate return at %L without a * dummy argument",
472 &code->expr->where);
473 return build1_v (GOTO_EXPR, gfc_get_return_label ());
476 /* Start a new block for this statement. */
477 gfc_init_se (&se, NULL);
478 gfc_start_block (&se.pre);
480 gfc_conv_expr (&se, code->expr);
482 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
483 fold_convert (TREE_TYPE (result), se.expr));
484 gfc_add_expr_to_block (&se.pre, tmp);
486 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
487 gfc_add_expr_to_block (&se.pre, tmp);
488 gfc_add_block_to_block (&se.pre, &se.post);
489 return gfc_finish_block (&se.pre);
491 else
492 return build1_v (GOTO_EXPR, gfc_get_return_label ());
496 /* Translate the PAUSE statement. We have to translate this statement
497 to a runtime library call. */
499 tree
500 gfc_trans_pause (gfc_code * code)
502 tree gfc_int4_type_node = gfc_get_int_type (4);
503 gfc_se se;
504 tree tmp;
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
511 if (code->expr == NULL)
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
516 else
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
520 se.expr, se.string_length);
523 gfc_add_expr_to_block (&se.pre, tmp);
525 gfc_add_block_to_block (&se.pre, &se.post);
527 return gfc_finish_block (&se.pre);
531 /* Translate the STOP statement. We have to translate this statement
532 to a runtime library call. */
534 tree
535 gfc_trans_stop (gfc_code * code)
537 tree gfc_int4_type_node = gfc_get_int_type (4);
538 gfc_se se;
539 tree tmp;
541 /* Start a new block for this statement. */
542 gfc_init_se (&se, NULL);
543 gfc_start_block (&se.pre);
546 if (code->expr == NULL)
548 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
549 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
551 else
553 gfc_conv_expr_reference (&se, code->expr);
554 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
555 se.expr, se.string_length);
558 gfc_add_expr_to_block (&se.pre, tmp);
560 gfc_add_block_to_block (&se.pre, &se.post);
562 return gfc_finish_block (&se.pre);
566 /* Generate GENERIC for the IF construct. This function also deals with
567 the simple IF statement, because the front end translates the IF
568 statement into an IF construct.
570 We translate:
572 IF (cond) THEN
573 then_clause
574 ELSEIF (cond2)
575 elseif_clause
576 ELSE
577 else_clause
578 ENDIF
580 into:
582 pre_cond_s;
583 if (cond_s)
585 then_clause;
587 else
589 pre_cond_s
590 if (cond_s)
592 elseif_clause
594 else
596 else_clause;
600 where COND_S is the simplified version of the predicate. PRE_COND_S
601 are the pre side-effects produced by the translation of the
602 conditional.
603 We need to build the chain recursively otherwise we run into
604 problems with folding incomplete statements. */
606 static tree
607 gfc_trans_if_1 (gfc_code * code)
609 gfc_se if_se;
610 tree stmt, elsestmt;
612 /* Check for an unconditional ELSE clause. */
613 if (!code->expr)
614 return gfc_trans_code (code->next);
616 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
617 gfc_init_se (&if_se, NULL);
618 gfc_start_block (&if_se.pre);
620 /* Calculate the IF condition expression. */
621 gfc_conv_expr_val (&if_se, code->expr);
623 /* Translate the THEN clause. */
624 stmt = gfc_trans_code (code->next);
626 /* Translate the ELSE clause. */
627 if (code->block)
628 elsestmt = gfc_trans_if_1 (code->block);
629 else
630 elsestmt = build_empty_stmt ();
632 /* Build the condition expression and add it to the condition block. */
633 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
635 gfc_add_expr_to_block (&if_se.pre, stmt);
637 /* Finish off this statement. */
638 return gfc_finish_block (&if_se.pre);
641 tree
642 gfc_trans_if (gfc_code * code)
644 /* Ignore the top EXEC_IF, it only announces an IF construct. The
645 actual code we must translate is in code->block. */
647 return gfc_trans_if_1 (code->block);
651 /* Translate an arithmetic IF expression.
653 IF (cond) label1, label2, label3 translates to
655 if (cond <= 0)
657 if (cond < 0)
658 goto label1;
659 else // cond == 0
660 goto label2;
662 else // cond > 0
663 goto label3;
665 An optimized version can be generated in case of equal labels.
666 E.g., if label1 is equal to label2, we can translate it to
668 if (cond <= 0)
669 goto label1;
670 else
671 goto label3;
674 tree
675 gfc_trans_arithmetic_if (gfc_code * code)
677 gfc_se se;
678 tree tmp;
679 tree branch1;
680 tree branch2;
681 tree zero;
683 /* Start a new block. */
684 gfc_init_se (&se, NULL);
685 gfc_start_block (&se.pre);
687 /* Pre-evaluate COND. */
688 gfc_conv_expr_val (&se, code->expr);
689 se.expr = gfc_evaluate_now (se.expr, &se.pre);
691 /* Build something to compare with. */
692 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
694 if (code->label->value != code->label2->value)
696 /* If (cond < 0) take branch1 else take branch2.
697 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
698 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
699 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
701 if (code->label->value != code->label3->value)
702 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
703 else
704 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
706 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
708 else
709 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
711 if (code->label->value != code->label3->value
712 && code->label2->value != code->label3->value)
714 /* if (cond <= 0) take branch1 else take branch2. */
715 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
716 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
717 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
720 /* Append the COND_EXPR to the evaluation of COND, and return. */
721 gfc_add_expr_to_block (&se.pre, branch1);
722 return gfc_finish_block (&se.pre);
726 /* Translate the simple DO construct. This is where the loop variable has
727 integer type and step +-1. We can't use this in the general case
728 because integer overflow and floating point errors could give incorrect
729 results.
730 We translate a do loop from:
732 DO dovar = from, to, step
733 body
734 END DO
738 [Evaluate loop bounds and step]
739 dovar = from;
740 if ((step > 0) ? (dovar <= to) : (dovar => to))
742 for (;;)
744 body;
745 cycle_label:
746 cond = (dovar == to);
747 dovar += step;
748 if (cond) goto end_label;
751 end_label:
753 This helps the optimizers by avoiding the extra induction variable
754 used in the general case. */
756 static tree
757 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
758 tree from, tree to, tree step)
760 stmtblock_t body;
761 tree type;
762 tree cond;
763 tree tmp;
764 tree saved_dovar = NULL;
765 tree cycle_label;
766 tree exit_label;
768 type = TREE_TYPE (dovar);
770 /* Initialize the DO variable: dovar = from. */
771 gfc_add_modify (pblock, dovar, from);
773 /* Save value for do-tinkering checking. */
774 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
776 saved_dovar = gfc_create_var (type, ".saved_dovar");
777 gfc_add_modify (pblock, saved_dovar, dovar);
780 /* Cycle and exit statements are implemented with gotos. */
781 cycle_label = gfc_build_label_decl (NULL_TREE);
782 exit_label = gfc_build_label_decl (NULL_TREE);
784 /* Put the labels where they can be found later. See gfc_trans_do(). */
785 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
787 /* Loop body. */
788 gfc_start_block (&body);
790 /* Main loop body. */
791 tmp = gfc_trans_code (code->block->next);
792 gfc_add_expr_to_block (&body, tmp);
794 /* Label for cycle statements (if needed). */
795 if (TREE_USED (cycle_label))
797 tmp = build1_v (LABEL_EXPR, cycle_label);
798 gfc_add_expr_to_block (&body, tmp);
801 /* Check whether someone has modified the loop variable. */
802 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
804 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
805 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
806 "Loop variable has been modified");
809 /* Evaluate the loop condition. */
810 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
811 cond = gfc_evaluate_now (cond, &body);
813 /* Increment the loop variable. */
814 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
815 gfc_add_modify (&body, dovar, tmp);
817 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
818 gfc_add_modify (&body, saved_dovar, dovar);
820 /* The loop exit. */
821 tmp = build1_v (GOTO_EXPR, exit_label);
822 TREE_USED (exit_label) = 1;
823 tmp = fold_build3 (COND_EXPR, void_type_node,
824 cond, tmp, build_empty_stmt ());
825 gfc_add_expr_to_block (&body, tmp);
827 /* Finish the loop body. */
828 tmp = gfc_finish_block (&body);
829 tmp = build1_v (LOOP_EXPR, tmp);
831 /* Only execute the loop if the number of iterations is positive. */
832 if (tree_int_cst_sgn (step) > 0)
833 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
834 else
835 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
836 tmp = fold_build3 (COND_EXPR, void_type_node,
837 cond, tmp, build_empty_stmt ());
838 gfc_add_expr_to_block (pblock, tmp);
840 /* Add the exit label. */
841 tmp = build1_v (LABEL_EXPR, exit_label);
842 gfc_add_expr_to_block (pblock, tmp);
844 return gfc_finish_block (pblock);
847 /* Translate the DO construct. This obviously is one of the most
848 important ones to get right with any compiler, but especially
849 so for Fortran.
851 We special case some loop forms as described in gfc_trans_simple_do.
852 For other cases we implement them with a separate loop count,
853 as described in the standard.
855 We translate a do loop from:
857 DO dovar = from, to, step
858 body
859 END DO
863 [evaluate loop bounds and step]
864 empty = (step > 0 ? to < from : to > from);
865 countm1 = (to - from) / step;
866 dovar = from;
867 if (empty) goto exit_label;
868 for (;;)
870 body;
871 cycle_label:
872 dovar += step
873 if (countm1 ==0) goto exit_label;
874 countm1--;
876 exit_label:
878 countm1 is an unsigned integer. It is equal to the loop count minus one,
879 because the loop count itself can overflow. */
881 tree
882 gfc_trans_do (gfc_code * code)
884 gfc_se se;
885 tree dovar;
886 tree saved_dovar = NULL;
887 tree from;
888 tree to;
889 tree step;
890 tree countm1;
891 tree type;
892 tree utype;
893 tree cond;
894 tree cycle_label;
895 tree exit_label;
896 tree tmp;
897 tree pos_step;
898 stmtblock_t block;
899 stmtblock_t body;
901 gfc_start_block (&block);
903 /* Evaluate all the expressions in the iterator. */
904 gfc_init_se (&se, NULL);
905 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
906 gfc_add_block_to_block (&block, &se.pre);
907 dovar = se.expr;
908 type = TREE_TYPE (dovar);
910 gfc_init_se (&se, NULL);
911 gfc_conv_expr_val (&se, code->ext.iterator->start);
912 gfc_add_block_to_block (&block, &se.pre);
913 from = gfc_evaluate_now (se.expr, &block);
915 gfc_init_se (&se, NULL);
916 gfc_conv_expr_val (&se, code->ext.iterator->end);
917 gfc_add_block_to_block (&block, &se.pre);
918 to = gfc_evaluate_now (se.expr, &block);
920 gfc_init_se (&se, NULL);
921 gfc_conv_expr_val (&se, code->ext.iterator->step);
922 gfc_add_block_to_block (&block, &se.pre);
923 step = gfc_evaluate_now (se.expr, &block);
925 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
927 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
928 fold_convert (type, integer_zero_node));
929 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
930 "DO step value is zero");
933 /* Special case simple loops. */
934 if (TREE_CODE (type) == INTEGER_TYPE
935 && (integer_onep (step)
936 || tree_int_cst_equal (step, integer_minus_one_node)))
937 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
939 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
940 fold_convert (type, integer_zero_node));
942 if (TREE_CODE (type) == INTEGER_TYPE)
943 utype = unsigned_type_for (type);
944 else
945 utype = unsigned_type_for (gfc_array_index_type);
946 countm1 = gfc_create_var (utype, "countm1");
948 /* Cycle and exit statements are implemented with gotos. */
949 cycle_label = gfc_build_label_decl (NULL_TREE);
950 exit_label = gfc_build_label_decl (NULL_TREE);
951 TREE_USED (exit_label) = 1;
953 /* Initialize the DO variable: dovar = from. */
954 gfc_add_modify (&block, dovar, from);
956 /* Save value for do-tinkering checking. */
957 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
959 saved_dovar = gfc_create_var (type, ".saved_dovar");
960 gfc_add_modify (&block, saved_dovar, dovar);
963 /* Initialize loop count and jump to exit label if the loop is empty.
964 This code is executed before we enter the loop body. We generate:
965 if (step > 0)
967 if (to < from) goto exit_label;
968 countm1 = (to - from) / step;
970 else
972 if (to > from) goto exit_label;
973 countm1 = (from - to) / -step;
974 } */
975 if (TREE_CODE (type) == INTEGER_TYPE)
977 tree pos, neg;
979 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
980 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
981 build1_v (GOTO_EXPR, exit_label),
982 build_empty_stmt ());
983 tmp = fold_build2 (MINUS_EXPR, type, to, from);
984 tmp = fold_convert (utype, tmp);
985 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
986 fold_convert (utype, step));
987 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
988 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
990 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
991 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
992 build1_v (GOTO_EXPR, exit_label),
993 build_empty_stmt ());
994 tmp = fold_build2 (MINUS_EXPR, type, from, to);
995 tmp = fold_convert (utype, tmp);
996 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
997 fold_convert (utype, fold_build1 (NEGATE_EXPR,
998 type, step)));
999 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1000 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
1002 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1003 gfc_add_expr_to_block (&block, tmp);
1005 else
1007 /* TODO: We could use the same width as the real type.
1008 This would probably cause more problems that it solves
1009 when we implement "long double" types. */
1011 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1012 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1013 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1014 gfc_add_modify (&block, countm1, tmp);
1016 /* We need a special check for empty loops:
1017 empty = (step > 0 ? to < from : to > from); */
1018 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1019 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1020 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1021 /* If the loop is empty, go directly to the exit label. */
1022 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1023 build1_v (GOTO_EXPR, exit_label),
1024 build_empty_stmt ());
1025 gfc_add_expr_to_block (&block, tmp);
1028 /* Loop body. */
1029 gfc_start_block (&body);
1031 /* Put these labels where they can be found later. We put the
1032 labels in a TREE_LIST node (because TREE_CHAIN is already
1033 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1034 label in TREE_VALUE (backend_decl). */
1036 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1038 /* Main loop body. */
1039 tmp = gfc_trans_code (code->block->next);
1040 gfc_add_expr_to_block (&body, tmp);
1042 /* Label for cycle statements (if needed). */
1043 if (TREE_USED (cycle_label))
1045 tmp = build1_v (LABEL_EXPR, cycle_label);
1046 gfc_add_expr_to_block (&body, tmp);
1049 /* Check whether someone has modified the loop variable. */
1050 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1052 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1053 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1054 "Loop variable has been modified");
1057 /* Increment the loop variable. */
1058 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1059 gfc_add_modify (&body, dovar, tmp);
1061 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1062 gfc_add_modify (&body, saved_dovar, dovar);
1064 /* End with the loop condition. Loop until countm1 == 0. */
1065 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1066 build_int_cst (utype, 0));
1067 tmp = build1_v (GOTO_EXPR, exit_label);
1068 tmp = fold_build3 (COND_EXPR, void_type_node,
1069 cond, tmp, build_empty_stmt ());
1070 gfc_add_expr_to_block (&body, tmp);
1072 /* Decrement the loop count. */
1073 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1074 gfc_add_modify (&body, countm1, tmp);
1076 /* End of loop body. */
1077 tmp = gfc_finish_block (&body);
1079 /* The for loop itself. */
1080 tmp = build1_v (LOOP_EXPR, tmp);
1081 gfc_add_expr_to_block (&block, tmp);
1083 /* Add the exit label. */
1084 tmp = build1_v (LABEL_EXPR, exit_label);
1085 gfc_add_expr_to_block (&block, tmp);
1087 return gfc_finish_block (&block);
1091 /* Translate the DO WHILE construct.
1093 We translate
1095 DO WHILE (cond)
1096 body
1097 END DO
1101 for ( ; ; )
1103 pre_cond;
1104 if (! cond) goto exit_label;
1105 body;
1106 cycle_label:
1108 exit_label:
1110 Because the evaluation of the exit condition `cond' may have side
1111 effects, we can't do much for empty loop bodies. The backend optimizers
1112 should be smart enough to eliminate any dead loops. */
1114 tree
1115 gfc_trans_do_while (gfc_code * code)
1117 gfc_se cond;
1118 tree tmp;
1119 tree cycle_label;
1120 tree exit_label;
1121 stmtblock_t block;
1123 /* Everything we build here is part of the loop body. */
1124 gfc_start_block (&block);
1126 /* Cycle and exit statements are implemented with gotos. */
1127 cycle_label = gfc_build_label_decl (NULL_TREE);
1128 exit_label = gfc_build_label_decl (NULL_TREE);
1130 /* Put the labels where they can be found later. See gfc_trans_do(). */
1131 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1133 /* Create a GIMPLE version of the exit condition. */
1134 gfc_init_se (&cond, NULL);
1135 gfc_conv_expr_val (&cond, code->expr);
1136 gfc_add_block_to_block (&block, &cond.pre);
1137 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1139 /* Build "IF (! cond) GOTO exit_label". */
1140 tmp = build1_v (GOTO_EXPR, exit_label);
1141 TREE_USED (exit_label) = 1;
1142 tmp = fold_build3 (COND_EXPR, void_type_node,
1143 cond.expr, tmp, build_empty_stmt ());
1144 gfc_add_expr_to_block (&block, tmp);
1146 /* The main body of the loop. */
1147 tmp = gfc_trans_code (code->block->next);
1148 gfc_add_expr_to_block (&block, tmp);
1150 /* Label for cycle statements (if needed). */
1151 if (TREE_USED (cycle_label))
1153 tmp = build1_v (LABEL_EXPR, cycle_label);
1154 gfc_add_expr_to_block (&block, tmp);
1157 /* End of loop body. */
1158 tmp = gfc_finish_block (&block);
1160 gfc_init_block (&block);
1161 /* Build the loop. */
1162 tmp = build1_v (LOOP_EXPR, tmp);
1163 gfc_add_expr_to_block (&block, tmp);
1165 /* Add the exit label. */
1166 tmp = build1_v (LABEL_EXPR, exit_label);
1167 gfc_add_expr_to_block (&block, tmp);
1169 return gfc_finish_block (&block);
1173 /* Translate the SELECT CASE construct for INTEGER case expressions,
1174 without killing all potential optimizations. The problem is that
1175 Fortran allows unbounded cases, but the back-end does not, so we
1176 need to intercept those before we enter the equivalent SWITCH_EXPR
1177 we can build.
1179 For example, we translate this,
1181 SELECT CASE (expr)
1182 CASE (:100,101,105:115)
1183 block_1
1184 CASE (190:199,200:)
1185 block_2
1186 CASE (300)
1187 block_3
1188 CASE DEFAULT
1189 block_4
1190 END SELECT
1192 to the GENERIC equivalent,
1194 switch (expr)
1196 case (minimum value for typeof(expr) ... 100:
1197 case 101:
1198 case 105 ... 114:
1199 block1:
1200 goto end_label;
1202 case 200 ... (maximum value for typeof(expr):
1203 case 190 ... 199:
1204 block2;
1205 goto end_label;
1207 case 300:
1208 block_3;
1209 goto end_label;
1211 default:
1212 block_4;
1213 goto end_label;
1216 end_label: */
1218 static tree
1219 gfc_trans_integer_select (gfc_code * code)
1221 gfc_code *c;
1222 gfc_case *cp;
1223 tree end_label;
1224 tree tmp;
1225 gfc_se se;
1226 stmtblock_t block;
1227 stmtblock_t body;
1229 gfc_start_block (&block);
1231 /* Calculate the switch expression. */
1232 gfc_init_se (&se, NULL);
1233 gfc_conv_expr_val (&se, code->expr);
1234 gfc_add_block_to_block (&block, &se.pre);
1236 end_label = gfc_build_label_decl (NULL_TREE);
1238 gfc_init_block (&body);
1240 for (c = code->block; c; c = c->block)
1242 for (cp = c->ext.case_list; cp; cp = cp->next)
1244 tree low, high;
1245 tree label;
1247 /* Assume it's the default case. */
1248 low = high = NULL_TREE;
1250 if (cp->low)
1252 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1253 cp->low->ts.kind);
1255 /* If there's only a lower bound, set the high bound to the
1256 maximum value of the case expression. */
1257 if (!cp->high)
1258 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1261 if (cp->high)
1263 /* Three cases are possible here:
1265 1) There is no lower bound, e.g. CASE (:N).
1266 2) There is a lower bound .NE. high bound, that is
1267 a case range, e.g. CASE (N:M) where M>N (we make
1268 sure that M>N during type resolution).
1269 3) There is a lower bound, and it has the same value
1270 as the high bound, e.g. CASE (N:N). This is our
1271 internal representation of CASE(N).
1273 In the first and second case, we need to set a value for
1274 high. In the third case, we don't because the GCC middle
1275 end represents a single case value by just letting high be
1276 a NULL_TREE. We can't do that because we need to be able
1277 to represent unbounded cases. */
1279 if (!cp->low
1280 || (cp->low
1281 && mpz_cmp (cp->low->value.integer,
1282 cp->high->value.integer) != 0))
1283 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1284 cp->high->ts.kind);
1286 /* Unbounded case. */
1287 if (!cp->low)
1288 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1291 /* Build a label. */
1292 label = gfc_build_label_decl (NULL_TREE);
1294 /* Add this case label.
1295 Add parameter 'label', make it match GCC backend. */
1296 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1297 low, high, label);
1298 gfc_add_expr_to_block (&body, tmp);
1301 /* Add the statements for this case. */
1302 tmp = gfc_trans_code (c->next);
1303 gfc_add_expr_to_block (&body, tmp);
1305 /* Break to the end of the construct. */
1306 tmp = build1_v (GOTO_EXPR, end_label);
1307 gfc_add_expr_to_block (&body, tmp);
1310 tmp = gfc_finish_block (&body);
1311 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1312 gfc_add_expr_to_block (&block, tmp);
1314 tmp = build1_v (LABEL_EXPR, end_label);
1315 gfc_add_expr_to_block (&block, tmp);
1317 return gfc_finish_block (&block);
1321 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1323 There are only two cases possible here, even though the standard
1324 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1325 .FALSE., and DEFAULT.
1327 We never generate more than two blocks here. Instead, we always
1328 try to eliminate the DEFAULT case. This way, we can translate this
1329 kind of SELECT construct to a simple
1331 if {} else {};
1333 expression in GENERIC. */
1335 static tree
1336 gfc_trans_logical_select (gfc_code * code)
1338 gfc_code *c;
1339 gfc_code *t, *f, *d;
1340 gfc_case *cp;
1341 gfc_se se;
1342 stmtblock_t block;
1344 /* Assume we don't have any cases at all. */
1345 t = f = d = NULL;
1347 /* Now see which ones we actually do have. We can have at most two
1348 cases in a single case list: one for .TRUE. and one for .FALSE.
1349 The default case is always separate. If the cases for .TRUE. and
1350 .FALSE. are in the same case list, the block for that case list
1351 always executed, and we don't generate code a COND_EXPR. */
1352 for (c = code->block; c; c = c->block)
1354 for (cp = c->ext.case_list; cp; cp = cp->next)
1356 if (cp->low)
1358 if (cp->low->value.logical == 0) /* .FALSE. */
1359 f = c;
1360 else /* if (cp->value.logical != 0), thus .TRUE. */
1361 t = c;
1363 else
1364 d = c;
1368 /* Start a new block. */
1369 gfc_start_block (&block);
1371 /* Calculate the switch expression. We always need to do this
1372 because it may have side effects. */
1373 gfc_init_se (&se, NULL);
1374 gfc_conv_expr_val (&se, code->expr);
1375 gfc_add_block_to_block (&block, &se.pre);
1377 if (t == f && t != NULL)
1379 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1380 translate the code for these cases, append it to the current
1381 block. */
1382 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1384 else
1386 tree true_tree, false_tree, stmt;
1388 true_tree = build_empty_stmt ();
1389 false_tree = build_empty_stmt ();
1391 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1392 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1393 make the missing case the default case. */
1394 if (t != NULL && f != NULL)
1395 d = NULL;
1396 else if (d != NULL)
1398 if (t == NULL)
1399 t = d;
1400 else
1401 f = d;
1404 /* Translate the code for each of these blocks, and append it to
1405 the current block. */
1406 if (t != NULL)
1407 true_tree = gfc_trans_code (t->next);
1409 if (f != NULL)
1410 false_tree = gfc_trans_code (f->next);
1412 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1413 true_tree, false_tree);
1414 gfc_add_expr_to_block (&block, stmt);
1417 return gfc_finish_block (&block);
1421 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1422 Instead of generating compares and jumps, it is far simpler to
1423 generate a data structure describing the cases in order and call a
1424 library subroutine that locates the right case.
1425 This is particularly true because this is the only case where we
1426 might have to dispose of a temporary.
1427 The library subroutine returns a pointer to jump to or NULL if no
1428 branches are to be taken. */
1430 static tree
1431 gfc_trans_character_select (gfc_code *code)
1433 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1434 stmtblock_t block, body;
1435 gfc_case *cp, *d;
1436 gfc_code *c;
1437 gfc_se se;
1438 int n, k;
1440 /* The jump table types are stored in static variables to avoid
1441 constructing them from scratch every single time. */
1442 static tree select_struct[2];
1443 static tree ss_string1[2], ss_string1_len[2];
1444 static tree ss_string2[2], ss_string2_len[2];
1445 static tree ss_target[2];
1447 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1449 if (code->expr->ts.kind == 1)
1450 k = 0;
1451 else if (code->expr->ts.kind == 4)
1452 k = 1;
1453 else
1454 gcc_unreachable ();
1456 if (select_struct[k] == NULL)
1458 select_struct[k] = make_node (RECORD_TYPE);
1460 if (code->expr->ts.kind == 1)
1461 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1462 else if (code->expr->ts.kind == 4)
1463 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1464 else
1465 gcc_unreachable ();
1467 #undef ADD_FIELD
1468 #define ADD_FIELD(NAME, TYPE) \
1469 ss_##NAME[k] = gfc_add_field_to_struct \
1470 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1471 get_identifier (stringize(NAME)), TYPE)
1473 ADD_FIELD (string1, pchartype);
1474 ADD_FIELD (string1_len, gfc_charlen_type_node);
1476 ADD_FIELD (string2, pchartype);
1477 ADD_FIELD (string2_len, gfc_charlen_type_node);
1479 ADD_FIELD (target, integer_type_node);
1480 #undef ADD_FIELD
1482 gfc_finish_type (select_struct[k]);
1485 cp = code->block->ext.case_list;
1486 while (cp->left != NULL)
1487 cp = cp->left;
1489 n = 0;
1490 for (d = cp; d; d = d->right)
1491 d->n = n++;
1493 end_label = gfc_build_label_decl (NULL_TREE);
1495 /* Generate the body */
1496 gfc_start_block (&block);
1497 gfc_init_block (&body);
1499 for (c = code->block; c; c = c->block)
1501 for (d = c->ext.case_list; d; d = d->next)
1503 label = gfc_build_label_decl (NULL_TREE);
1504 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1505 build_int_cst (NULL_TREE, d->n),
1506 build_int_cst (NULL_TREE, d->n), label);
1507 gfc_add_expr_to_block (&body, tmp);
1510 tmp = gfc_trans_code (c->next);
1511 gfc_add_expr_to_block (&body, tmp);
1513 tmp = build1_v (GOTO_EXPR, end_label);
1514 gfc_add_expr_to_block (&body, tmp);
1517 /* Generate the structure describing the branches */
1518 init = NULL_TREE;
1520 for(d = cp; d; d = d->right)
1522 node = NULL_TREE;
1524 gfc_init_se (&se, NULL);
1526 if (d->low == NULL)
1528 node = tree_cons (ss_string1[k], null_pointer_node, node);
1529 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1531 else
1533 gfc_conv_expr_reference (&se, d->low);
1535 node = tree_cons (ss_string1[k], se.expr, node);
1536 node = tree_cons (ss_string1_len[k], se.string_length, node);
1539 if (d->high == NULL)
1541 node = tree_cons (ss_string2[k], null_pointer_node, node);
1542 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1544 else
1546 gfc_init_se (&se, NULL);
1547 gfc_conv_expr_reference (&se, d->high);
1549 node = tree_cons (ss_string2[k], se.expr, node);
1550 node = tree_cons (ss_string2_len[k], se.string_length, node);
1553 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1554 node);
1556 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1557 init = tree_cons (NULL_TREE, tmp, init);
1560 type = build_array_type (select_struct[k],
1561 build_index_type (build_int_cst (NULL_TREE, n-1)));
1563 init = build_constructor_from_list (type, nreverse(init));
1564 TREE_CONSTANT (init) = 1;
1565 TREE_STATIC (init) = 1;
1566 /* Create a static variable to hold the jump table. */
1567 tmp = gfc_create_var (type, "jumptable");
1568 TREE_CONSTANT (tmp) = 1;
1569 TREE_STATIC (tmp) = 1;
1570 TREE_READONLY (tmp) = 1;
1571 DECL_INITIAL (tmp) = init;
1572 init = tmp;
1574 /* Build the library call */
1575 init = gfc_build_addr_expr (pvoid_type_node, init);
1577 gfc_init_se (&se, NULL);
1578 gfc_conv_expr_reference (&se, code->expr);
1580 gfc_add_block_to_block (&block, &se.pre);
1582 if (code->expr->ts.kind == 1)
1583 fndecl = gfor_fndecl_select_string;
1584 else if (code->expr->ts.kind == 4)
1585 fndecl = gfor_fndecl_select_string_char4;
1586 else
1587 gcc_unreachable ();
1589 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1590 se.expr, se.string_length);
1591 case_num = gfc_create_var (integer_type_node, "case_num");
1592 gfc_add_modify (&block, case_num, tmp);
1594 gfc_add_block_to_block (&block, &se.post);
1596 tmp = gfc_finish_block (&body);
1597 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1598 gfc_add_expr_to_block (&block, tmp);
1600 tmp = build1_v (LABEL_EXPR, end_label);
1601 gfc_add_expr_to_block (&block, tmp);
1603 return gfc_finish_block (&block);
1607 /* Translate the three variants of the SELECT CASE construct.
1609 SELECT CASEs with INTEGER case expressions can be translated to an
1610 equivalent GENERIC switch statement, and for LOGICAL case
1611 expressions we build one or two if-else compares.
1613 SELECT CASEs with CHARACTER case expressions are a whole different
1614 story, because they don't exist in GENERIC. So we sort them and
1615 do a binary search at runtime.
1617 Fortran has no BREAK statement, and it does not allow jumps from
1618 one case block to another. That makes things a lot easier for
1619 the optimizers. */
1621 tree
1622 gfc_trans_select (gfc_code * code)
1624 gcc_assert (code && code->expr);
1626 /* Empty SELECT constructs are legal. */
1627 if (code->block == NULL)
1628 return build_empty_stmt ();
1630 /* Select the correct translation function. */
1631 switch (code->expr->ts.type)
1633 case BT_LOGICAL: return gfc_trans_logical_select (code);
1634 case BT_INTEGER: return gfc_trans_integer_select (code);
1635 case BT_CHARACTER: return gfc_trans_character_select (code);
1636 default:
1637 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1638 /* Not reached */
1643 /* Traversal function to substitute a replacement symtree if the symbol
1644 in the expression is the same as that passed. f == 2 signals that
1645 that variable itself is not to be checked - only the references.
1646 This group of functions is used when the variable expression in a
1647 FORALL assignment has internal references. For example:
1648 FORALL (i = 1:4) p(p(i)) = i
1649 The only recourse here is to store a copy of 'p' for the index
1650 expression. */
1652 static gfc_symtree *new_symtree;
1653 static gfc_symtree *old_symtree;
1655 static bool
1656 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1658 if (expr->expr_type != EXPR_VARIABLE)
1659 return false;
1661 if (*f == 2)
1662 *f = 1;
1663 else if (expr->symtree->n.sym == sym)
1664 expr->symtree = new_symtree;
1666 return false;
1669 static void
1670 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1672 gfc_traverse_expr (e, sym, forall_replace, f);
1675 static bool
1676 forall_restore (gfc_expr *expr,
1677 gfc_symbol *sym ATTRIBUTE_UNUSED,
1678 int *f ATTRIBUTE_UNUSED)
1680 if (expr->expr_type != EXPR_VARIABLE)
1681 return false;
1683 if (expr->symtree == new_symtree)
1684 expr->symtree = old_symtree;
1686 return false;
1689 static void
1690 forall_restore_symtree (gfc_expr *e)
1692 gfc_traverse_expr (e, NULL, forall_restore, 0);
1695 static void
1696 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1698 gfc_se tse;
1699 gfc_se rse;
1700 gfc_expr *e;
1701 gfc_symbol *new_sym;
1702 gfc_symbol *old_sym;
1703 gfc_symtree *root;
1704 tree tmp;
1706 /* Build a copy of the lvalue. */
1707 old_symtree = c->expr->symtree;
1708 old_sym = old_symtree->n.sym;
1709 e = gfc_lval_expr_from_sym (old_sym);
1710 if (old_sym->attr.dimension)
1712 gfc_init_se (&tse, NULL);
1713 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1714 gfc_add_block_to_block (pre, &tse.pre);
1715 gfc_add_block_to_block (post, &tse.post);
1716 tse.expr = build_fold_indirect_ref (tse.expr);
1718 if (e->ts.type != BT_CHARACTER)
1720 /* Use the variable offset for the temporary. */
1721 tmp = gfc_conv_descriptor_offset (tse.expr);
1722 gfc_add_modify (pre, tmp,
1723 gfc_conv_array_offset (old_sym->backend_decl));
1726 else
1728 gfc_init_se (&tse, NULL);
1729 gfc_init_se (&rse, NULL);
1730 gfc_conv_expr (&rse, e);
1731 if (e->ts.type == BT_CHARACTER)
1733 tse.string_length = rse.string_length;
1734 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1735 tse.string_length);
1736 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1737 rse.string_length);
1738 gfc_add_block_to_block (pre, &tse.pre);
1739 gfc_add_block_to_block (post, &tse.post);
1741 else
1743 tmp = gfc_typenode_for_spec (&e->ts);
1744 tse.expr = gfc_create_var (tmp, "temp");
1747 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1748 e->expr_type == EXPR_VARIABLE);
1749 gfc_add_expr_to_block (pre, tmp);
1751 gfc_free_expr (e);
1753 /* Create a new symbol to represent the lvalue. */
1754 new_sym = gfc_new_symbol (old_sym->name, NULL);
1755 new_sym->ts = old_sym->ts;
1756 new_sym->attr.referenced = 1;
1757 new_sym->attr.temporary = 1;
1758 new_sym->attr.dimension = old_sym->attr.dimension;
1759 new_sym->attr.flavor = old_sym->attr.flavor;
1761 /* Use the temporary as the backend_decl. */
1762 new_sym->backend_decl = tse.expr;
1764 /* Create a fake symtree for it. */
1765 root = NULL;
1766 new_symtree = gfc_new_symtree (&root, old_sym->name);
1767 new_symtree->n.sym = new_sym;
1768 gcc_assert (new_symtree == root);
1770 /* Go through the expression reference replacing the old_symtree
1771 with the new. */
1772 forall_replace_symtree (c->expr, old_sym, 2);
1774 /* Now we have made this temporary, we might as well use it for
1775 the right hand side. */
1776 forall_replace_symtree (c->expr2, old_sym, 1);
1780 /* Handles dependencies in forall assignments. */
1781 static int
1782 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1784 gfc_ref *lref;
1785 gfc_ref *rref;
1786 int need_temp;
1787 gfc_symbol *lsym;
1789 lsym = c->expr->symtree->n.sym;
1790 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1792 /* Now check for dependencies within the 'variable'
1793 expression itself. These are treated by making a complete
1794 copy of variable and changing all the references to it
1795 point to the copy instead. Note that the shallow copy of
1796 the variable will not suffice for derived types with
1797 pointer components. We therefore leave these to their
1798 own devices. */
1799 if (lsym->ts.type == BT_DERIVED
1800 && lsym->ts.derived->attr.pointer_comp)
1801 return need_temp;
1803 new_symtree = NULL;
1804 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1806 forall_make_variable_temp (c, pre, post);
1807 need_temp = 0;
1810 /* Substrings with dependencies are treated in the same
1811 way. */
1812 if (c->expr->ts.type == BT_CHARACTER
1813 && c->expr->ref
1814 && c->expr2->expr_type == EXPR_VARIABLE
1815 && lsym == c->expr2->symtree->n.sym)
1817 for (lref = c->expr->ref; lref; lref = lref->next)
1818 if (lref->type == REF_SUBSTRING)
1819 break;
1820 for (rref = c->expr2->ref; rref; rref = rref->next)
1821 if (rref->type == REF_SUBSTRING)
1822 break;
1824 if (rref && lref
1825 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1827 forall_make_variable_temp (c, pre, post);
1828 need_temp = 0;
1831 return need_temp;
1835 static void
1836 cleanup_forall_symtrees (gfc_code *c)
1838 forall_restore_symtree (c->expr);
1839 forall_restore_symtree (c->expr2);
1840 gfc_free (new_symtree->n.sym);
1841 gfc_free (new_symtree);
1845 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1846 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1847 indicates whether we should generate code to test the FORALLs mask
1848 array. OUTER is the loop header to be used for initializing mask
1849 indices.
1851 The generated loop format is:
1852 count = (end - start + step) / step
1853 loopvar = start
1854 while (1)
1856 if (count <=0 )
1857 goto end_of_loop
1858 <body>
1859 loopvar += step
1860 count --
1862 end_of_loop: */
1864 static tree
1865 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1866 int mask_flag, stmtblock_t *outer)
1868 int n, nvar;
1869 tree tmp;
1870 tree cond;
1871 stmtblock_t block;
1872 tree exit_label;
1873 tree count;
1874 tree var, start, end, step;
1875 iter_info *iter;
1877 /* Initialize the mask index outside the FORALL nest. */
1878 if (mask_flag && forall_tmp->mask)
1879 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1881 iter = forall_tmp->this_loop;
1882 nvar = forall_tmp->nvar;
1883 for (n = 0; n < nvar; n++)
1885 var = iter->var;
1886 start = iter->start;
1887 end = iter->end;
1888 step = iter->step;
1890 exit_label = gfc_build_label_decl (NULL_TREE);
1891 TREE_USED (exit_label) = 1;
1893 /* The loop counter. */
1894 count = gfc_create_var (TREE_TYPE (var), "count");
1896 /* The body of the loop. */
1897 gfc_init_block (&block);
1899 /* The exit condition. */
1900 cond = fold_build2 (LE_EXPR, boolean_type_node,
1901 count, build_int_cst (TREE_TYPE (count), 0));
1902 tmp = build1_v (GOTO_EXPR, exit_label);
1903 tmp = fold_build3 (COND_EXPR, void_type_node,
1904 cond, tmp, build_empty_stmt ());
1905 gfc_add_expr_to_block (&block, tmp);
1907 /* The main loop body. */
1908 gfc_add_expr_to_block (&block, body);
1910 /* Increment the loop variable. */
1911 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1912 gfc_add_modify (&block, var, tmp);
1914 /* Advance to the next mask element. Only do this for the
1915 innermost loop. */
1916 if (n == 0 && mask_flag && forall_tmp->mask)
1918 tree maskindex = forall_tmp->maskindex;
1919 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1920 maskindex, gfc_index_one_node);
1921 gfc_add_modify (&block, maskindex, tmp);
1924 /* Decrement the loop counter. */
1925 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1926 build_int_cst (TREE_TYPE (var), 1));
1927 gfc_add_modify (&block, count, tmp);
1929 body = gfc_finish_block (&block);
1931 /* Loop var initialization. */
1932 gfc_init_block (&block);
1933 gfc_add_modify (&block, var, start);
1936 /* Initialize the loop counter. */
1937 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1938 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1939 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1940 gfc_add_modify (&block, count, tmp);
1942 /* The loop expression. */
1943 tmp = build1_v (LOOP_EXPR, body);
1944 gfc_add_expr_to_block (&block, tmp);
1946 /* The exit label. */
1947 tmp = build1_v (LABEL_EXPR, exit_label);
1948 gfc_add_expr_to_block (&block, tmp);
1950 body = gfc_finish_block (&block);
1951 iter = iter->next;
1953 return body;
1957 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1958 is nonzero, the body is controlled by all masks in the forall nest.
1959 Otherwise, the innermost loop is not controlled by it's mask. This
1960 is used for initializing that mask. */
1962 static tree
1963 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1964 int mask_flag)
1966 tree tmp;
1967 stmtblock_t header;
1968 forall_info *forall_tmp;
1969 tree mask, maskindex;
1971 gfc_start_block (&header);
1973 forall_tmp = nested_forall_info;
1974 while (forall_tmp != NULL)
1976 /* Generate body with masks' control. */
1977 if (mask_flag)
1979 mask = forall_tmp->mask;
1980 maskindex = forall_tmp->maskindex;
1982 /* If a mask was specified make the assignment conditional. */
1983 if (mask)
1985 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1986 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1989 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1990 forall_tmp = forall_tmp->prev_nest;
1991 mask_flag = 1;
1994 gfc_add_expr_to_block (&header, body);
1995 return gfc_finish_block (&header);
1999 /* Allocate data for holding a temporary array. Returns either a local
2000 temporary array or a pointer variable. */
2002 static tree
2003 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2004 tree elem_type)
2006 tree tmpvar;
2007 tree type;
2008 tree tmp;
2010 if (INTEGER_CST_P (size))
2012 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2013 gfc_index_one_node);
2015 else
2016 tmp = NULL_TREE;
2018 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2019 type = build_array_type (elem_type, type);
2020 if (gfc_can_put_var_on_stack (bytesize))
2022 gcc_assert (INTEGER_CST_P (size));
2023 tmpvar = gfc_create_var (type, "temp");
2024 *pdata = NULL_TREE;
2026 else
2028 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2029 *pdata = convert (pvoid_type_node, tmpvar);
2031 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2032 gfc_add_modify (pblock, tmpvar, tmp);
2034 return tmpvar;
2038 /* Generate codes to copy the temporary to the actual lhs. */
2040 static tree
2041 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2042 tree count1, tree wheremask, bool invert)
2044 gfc_ss *lss;
2045 gfc_se lse, rse;
2046 stmtblock_t block, body;
2047 gfc_loopinfo loop1;
2048 tree tmp;
2049 tree wheremaskexpr;
2051 /* Walk the lhs. */
2052 lss = gfc_walk_expr (expr);
2054 if (lss == gfc_ss_terminator)
2056 gfc_start_block (&block);
2058 gfc_init_se (&lse, NULL);
2060 /* Translate the expression. */
2061 gfc_conv_expr (&lse, expr);
2063 /* Form the expression for the temporary. */
2064 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2066 /* Use the scalar assignment as is. */
2067 gfc_add_block_to_block (&block, &lse.pre);
2068 gfc_add_modify (&block, lse.expr, tmp);
2069 gfc_add_block_to_block (&block, &lse.post);
2071 /* Increment the count1. */
2072 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2073 gfc_index_one_node);
2074 gfc_add_modify (&block, count1, tmp);
2076 tmp = gfc_finish_block (&block);
2078 else
2080 gfc_start_block (&block);
2082 gfc_init_loopinfo (&loop1);
2083 gfc_init_se (&rse, NULL);
2084 gfc_init_se (&lse, NULL);
2086 /* Associate the lss with the loop. */
2087 gfc_add_ss_to_loop (&loop1, lss);
2089 /* Calculate the bounds of the scalarization. */
2090 gfc_conv_ss_startstride (&loop1);
2091 /* Setup the scalarizing loops. */
2092 gfc_conv_loop_setup (&loop1, &expr->where);
2094 gfc_mark_ss_chain_used (lss, 1);
2096 /* Start the scalarized loop body. */
2097 gfc_start_scalarized_body (&loop1, &body);
2099 /* Setup the gfc_se structures. */
2100 gfc_copy_loopinfo_to_se (&lse, &loop1);
2101 lse.ss = lss;
2103 /* Form the expression of the temporary. */
2104 if (lss != gfc_ss_terminator)
2105 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2106 /* Translate expr. */
2107 gfc_conv_expr (&lse, expr);
2109 /* Use the scalar assignment. */
2110 rse.string_length = lse.string_length;
2111 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2113 /* Form the mask expression according to the mask tree list. */
2114 if (wheremask)
2116 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2117 if (invert)
2118 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2119 TREE_TYPE (wheremaskexpr),
2120 wheremaskexpr);
2121 tmp = fold_build3 (COND_EXPR, void_type_node,
2122 wheremaskexpr, tmp, build_empty_stmt ());
2125 gfc_add_expr_to_block (&body, tmp);
2127 /* Increment count1. */
2128 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2129 count1, gfc_index_one_node);
2130 gfc_add_modify (&body, count1, tmp);
2132 /* Increment count3. */
2133 if (count3)
2135 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2136 count3, gfc_index_one_node);
2137 gfc_add_modify (&body, count3, tmp);
2140 /* Generate the copying loops. */
2141 gfc_trans_scalarizing_loops (&loop1, &body);
2142 gfc_add_block_to_block (&block, &loop1.pre);
2143 gfc_add_block_to_block (&block, &loop1.post);
2144 gfc_cleanup_loop (&loop1);
2146 tmp = gfc_finish_block (&block);
2148 return tmp;
2152 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2153 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2154 and should not be freed. WHEREMASK is the conditional execution mask
2155 whose sense may be inverted by INVERT. */
2157 static tree
2158 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2159 tree count1, gfc_ss *lss, gfc_ss *rss,
2160 tree wheremask, bool invert)
2162 stmtblock_t block, body1;
2163 gfc_loopinfo loop;
2164 gfc_se lse;
2165 gfc_se rse;
2166 tree tmp;
2167 tree wheremaskexpr;
2169 gfc_start_block (&block);
2171 gfc_init_se (&rse, NULL);
2172 gfc_init_se (&lse, NULL);
2174 if (lss == gfc_ss_terminator)
2176 gfc_init_block (&body1);
2177 gfc_conv_expr (&rse, expr2);
2178 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2180 else
2182 /* Initialize the loop. */
2183 gfc_init_loopinfo (&loop);
2185 /* We may need LSS to determine the shape of the expression. */
2186 gfc_add_ss_to_loop (&loop, lss);
2187 gfc_add_ss_to_loop (&loop, rss);
2189 gfc_conv_ss_startstride (&loop);
2190 gfc_conv_loop_setup (&loop, &expr2->where);
2192 gfc_mark_ss_chain_used (rss, 1);
2193 /* Start the loop body. */
2194 gfc_start_scalarized_body (&loop, &body1);
2196 /* Translate the expression. */
2197 gfc_copy_loopinfo_to_se (&rse, &loop);
2198 rse.ss = rss;
2199 gfc_conv_expr (&rse, expr2);
2201 /* Form the expression of the temporary. */
2202 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2205 /* Use the scalar assignment. */
2206 lse.string_length = rse.string_length;
2207 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2208 expr2->expr_type == EXPR_VARIABLE);
2210 /* Form the mask expression according to the mask tree list. */
2211 if (wheremask)
2213 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2214 if (invert)
2215 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2216 TREE_TYPE (wheremaskexpr),
2217 wheremaskexpr);
2218 tmp = fold_build3 (COND_EXPR, void_type_node,
2219 wheremaskexpr, tmp, build_empty_stmt ());
2222 gfc_add_expr_to_block (&body1, tmp);
2224 if (lss == gfc_ss_terminator)
2226 gfc_add_block_to_block (&block, &body1);
2228 /* Increment count1. */
2229 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2230 gfc_index_one_node);
2231 gfc_add_modify (&block, count1, tmp);
2233 else
2235 /* Increment count1. */
2236 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2237 count1, gfc_index_one_node);
2238 gfc_add_modify (&body1, count1, tmp);
2240 /* Increment count3. */
2241 if (count3)
2243 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2244 count3, gfc_index_one_node);
2245 gfc_add_modify (&body1, count3, tmp);
2248 /* Generate the copying loops. */
2249 gfc_trans_scalarizing_loops (&loop, &body1);
2251 gfc_add_block_to_block (&block, &loop.pre);
2252 gfc_add_block_to_block (&block, &loop.post);
2254 gfc_cleanup_loop (&loop);
2255 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2256 as tree nodes in SS may not be valid in different scope. */
2259 tmp = gfc_finish_block (&block);
2260 return tmp;
2264 /* Calculate the size of temporary needed in the assignment inside forall.
2265 LSS and RSS are filled in this function. */
2267 static tree
2268 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2269 stmtblock_t * pblock,
2270 gfc_ss **lss, gfc_ss **rss)
2272 gfc_loopinfo loop;
2273 tree size;
2274 int i;
2275 int save_flag;
2276 tree tmp;
2278 *lss = gfc_walk_expr (expr1);
2279 *rss = NULL;
2281 size = gfc_index_one_node;
2282 if (*lss != gfc_ss_terminator)
2284 gfc_init_loopinfo (&loop);
2286 /* Walk the RHS of the expression. */
2287 *rss = gfc_walk_expr (expr2);
2288 if (*rss == gfc_ss_terminator)
2290 /* The rhs is scalar. Add a ss for the expression. */
2291 *rss = gfc_get_ss ();
2292 (*rss)->next = gfc_ss_terminator;
2293 (*rss)->type = GFC_SS_SCALAR;
2294 (*rss)->expr = expr2;
2297 /* Associate the SS with the loop. */
2298 gfc_add_ss_to_loop (&loop, *lss);
2299 /* We don't actually need to add the rhs at this point, but it might
2300 make guessing the loop bounds a bit easier. */
2301 gfc_add_ss_to_loop (&loop, *rss);
2303 /* We only want the shape of the expression, not rest of the junk
2304 generated by the scalarizer. */
2305 loop.array_parameter = 1;
2307 /* Calculate the bounds of the scalarization. */
2308 save_flag = gfc_option.rtcheck;
2309 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2310 gfc_conv_ss_startstride (&loop);
2311 gfc_option.rtcheck = save_flag;
2312 gfc_conv_loop_setup (&loop, &expr2->where);
2314 /* Figure out how many elements we need. */
2315 for (i = 0; i < loop.dimen; i++)
2317 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2318 gfc_index_one_node, loop.from[i]);
2319 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2320 tmp, loop.to[i]);
2321 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2323 gfc_add_block_to_block (pblock, &loop.pre);
2324 size = gfc_evaluate_now (size, pblock);
2325 gfc_add_block_to_block (pblock, &loop.post);
2327 /* TODO: write a function that cleans up a loopinfo without freeing
2328 the SS chains. Currently a NOP. */
2331 return size;
2335 /* Calculate the overall iterator number of the nested forall construct.
2336 This routine actually calculates the number of times the body of the
2337 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2338 that by the expression INNER_SIZE. The BLOCK argument specifies the
2339 block in which to calculate the result, and the optional INNER_SIZE_BODY
2340 argument contains any statements that need to executed (inside the loop)
2341 to initialize or calculate INNER_SIZE. */
2343 static tree
2344 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2345 stmtblock_t *inner_size_body, stmtblock_t *block)
2347 forall_info *forall_tmp = nested_forall_info;
2348 tree tmp, number;
2349 stmtblock_t body;
2351 /* We can eliminate the innermost unconditional loops with constant
2352 array bounds. */
2353 if (INTEGER_CST_P (inner_size))
2355 while (forall_tmp
2356 && !forall_tmp->mask
2357 && INTEGER_CST_P (forall_tmp->size))
2359 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2360 inner_size, forall_tmp->size);
2361 forall_tmp = forall_tmp->prev_nest;
2364 /* If there are no loops left, we have our constant result. */
2365 if (!forall_tmp)
2366 return inner_size;
2369 /* Otherwise, create a temporary variable to compute the result. */
2370 number = gfc_create_var (gfc_array_index_type, "num");
2371 gfc_add_modify (block, number, gfc_index_zero_node);
2373 gfc_start_block (&body);
2374 if (inner_size_body)
2375 gfc_add_block_to_block (&body, inner_size_body);
2376 if (forall_tmp)
2377 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2378 number, inner_size);
2379 else
2380 tmp = inner_size;
2381 gfc_add_modify (&body, number, tmp);
2382 tmp = gfc_finish_block (&body);
2384 /* Generate loops. */
2385 if (forall_tmp != NULL)
2386 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2388 gfc_add_expr_to_block (block, tmp);
2390 return number;
2394 /* Allocate temporary for forall construct. SIZE is the size of temporary
2395 needed. PTEMP1 is returned for space free. */
2397 static tree
2398 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2399 tree * ptemp1)
2401 tree bytesize;
2402 tree unit;
2403 tree tmp;
2405 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2406 if (!integer_onep (unit))
2407 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2408 else
2409 bytesize = size;
2411 *ptemp1 = NULL;
2412 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2414 if (*ptemp1)
2415 tmp = build_fold_indirect_ref (tmp);
2416 return tmp;
2420 /* Allocate temporary for forall construct according to the information in
2421 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2422 assignment inside forall. PTEMP1 is returned for space free. */
2424 static tree
2425 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2426 tree inner_size, stmtblock_t * inner_size_body,
2427 stmtblock_t * block, tree * ptemp1)
2429 tree size;
2431 /* Calculate the total size of temporary needed in forall construct. */
2432 size = compute_overall_iter_number (nested_forall_info, inner_size,
2433 inner_size_body, block);
2435 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2439 /* Handle assignments inside forall which need temporary.
2441 forall (i=start:end:stride; maskexpr)
2442 e<i> = f<i>
2443 end forall
2444 (where e,f<i> are arbitrary expressions possibly involving i
2445 and there is a dependency between e<i> and f<i>)
2446 Translates to:
2447 masktmp(:) = maskexpr(:)
2449 maskindex = 0;
2450 count1 = 0;
2451 num = 0;
2452 for (i = start; i <= end; i += stride)
2453 num += SIZE (f<i>)
2454 count1 = 0;
2455 ALLOCATE (tmp(num))
2456 for (i = start; i <= end; i += stride)
2458 if (masktmp[maskindex++])
2459 tmp[count1++] = f<i>
2461 maskindex = 0;
2462 count1 = 0;
2463 for (i = start; i <= end; i += stride)
2465 if (masktmp[maskindex++])
2466 e<i> = tmp[count1++]
2468 DEALLOCATE (tmp)
2470 static void
2471 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2472 tree wheremask, bool invert,
2473 forall_info * nested_forall_info,
2474 stmtblock_t * block)
2476 tree type;
2477 tree inner_size;
2478 gfc_ss *lss, *rss;
2479 tree count, count1;
2480 tree tmp, tmp1;
2481 tree ptemp1;
2482 stmtblock_t inner_size_body;
2484 /* Create vars. count1 is the current iterator number of the nested
2485 forall. */
2486 count1 = gfc_create_var (gfc_array_index_type, "count1");
2488 /* Count is the wheremask index. */
2489 if (wheremask)
2491 count = gfc_create_var (gfc_array_index_type, "count");
2492 gfc_add_modify (block, count, gfc_index_zero_node);
2494 else
2495 count = NULL;
2497 /* Initialize count1. */
2498 gfc_add_modify (block, count1, gfc_index_zero_node);
2500 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2501 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2502 gfc_init_block (&inner_size_body);
2503 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2504 &lss, &rss);
2506 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2507 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2509 if (!expr1->ts.cl->backend_decl)
2511 gfc_se tse;
2512 gfc_init_se (&tse, NULL);
2513 gfc_conv_expr (&tse, expr1->ts.cl->length);
2514 expr1->ts.cl->backend_decl = tse.expr;
2516 type = gfc_get_character_type_len (gfc_default_character_kind,
2517 expr1->ts.cl->backend_decl);
2519 else
2520 type = gfc_typenode_for_spec (&expr1->ts);
2522 /* Allocate temporary for nested forall construct according to the
2523 information in nested_forall_info and inner_size. */
2524 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2525 &inner_size_body, block, &ptemp1);
2527 /* Generate codes to copy rhs to the temporary . */
2528 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2529 wheremask, invert);
2531 /* Generate body and loops according to the information in
2532 nested_forall_info. */
2533 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2534 gfc_add_expr_to_block (block, tmp);
2536 /* Reset count1. */
2537 gfc_add_modify (block, count1, gfc_index_zero_node);
2539 /* Reset count. */
2540 if (wheremask)
2541 gfc_add_modify (block, count, gfc_index_zero_node);
2543 /* Generate codes to copy the temporary to lhs. */
2544 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2545 wheremask, invert);
2547 /* Generate body and loops according to the information in
2548 nested_forall_info. */
2549 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2550 gfc_add_expr_to_block (block, tmp);
2552 if (ptemp1)
2554 /* Free the temporary. */
2555 tmp = gfc_call_free (ptemp1);
2556 gfc_add_expr_to_block (block, tmp);
2561 /* Translate pointer assignment inside FORALL which need temporary. */
2563 static void
2564 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2565 forall_info * nested_forall_info,
2566 stmtblock_t * block)
2568 tree type;
2569 tree inner_size;
2570 gfc_ss *lss, *rss;
2571 gfc_se lse;
2572 gfc_se rse;
2573 gfc_ss_info *info;
2574 gfc_loopinfo loop;
2575 tree desc;
2576 tree parm;
2577 tree parmtype;
2578 stmtblock_t body;
2579 tree count;
2580 tree tmp, tmp1, ptemp1;
2582 count = gfc_create_var (gfc_array_index_type, "count");
2583 gfc_add_modify (block, count, gfc_index_zero_node);
2585 inner_size = integer_one_node;
2586 lss = gfc_walk_expr (expr1);
2587 rss = gfc_walk_expr (expr2);
2588 if (lss == gfc_ss_terminator)
2590 type = gfc_typenode_for_spec (&expr1->ts);
2591 type = build_pointer_type (type);
2593 /* Allocate temporary for nested forall construct according to the
2594 information in nested_forall_info and inner_size. */
2595 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2596 inner_size, NULL, block, &ptemp1);
2597 gfc_start_block (&body);
2598 gfc_init_se (&lse, NULL);
2599 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2600 gfc_init_se (&rse, NULL);
2601 rse.want_pointer = 1;
2602 gfc_conv_expr (&rse, expr2);
2603 gfc_add_block_to_block (&body, &rse.pre);
2604 gfc_add_modify (&body, lse.expr,
2605 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2606 gfc_add_block_to_block (&body, &rse.post);
2608 /* Increment count. */
2609 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2610 count, gfc_index_one_node);
2611 gfc_add_modify (&body, count, tmp);
2613 tmp = gfc_finish_block (&body);
2615 /* Generate body and loops according to the information in
2616 nested_forall_info. */
2617 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2618 gfc_add_expr_to_block (block, tmp);
2620 /* Reset count. */
2621 gfc_add_modify (block, count, gfc_index_zero_node);
2623 gfc_start_block (&body);
2624 gfc_init_se (&lse, NULL);
2625 gfc_init_se (&rse, NULL);
2626 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2627 lse.want_pointer = 1;
2628 gfc_conv_expr (&lse, expr1);
2629 gfc_add_block_to_block (&body, &lse.pre);
2630 gfc_add_modify (&body, lse.expr, rse.expr);
2631 gfc_add_block_to_block (&body, &lse.post);
2632 /* Increment count. */
2633 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2634 count, gfc_index_one_node);
2635 gfc_add_modify (&body, count, tmp);
2636 tmp = gfc_finish_block (&body);
2638 /* Generate body and loops according to the information in
2639 nested_forall_info. */
2640 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2641 gfc_add_expr_to_block (block, tmp);
2643 else
2645 gfc_init_loopinfo (&loop);
2647 /* Associate the SS with the loop. */
2648 gfc_add_ss_to_loop (&loop, rss);
2650 /* Setup the scalarizing loops and bounds. */
2651 gfc_conv_ss_startstride (&loop);
2653 gfc_conv_loop_setup (&loop, &expr2->where);
2655 info = &rss->data.info;
2656 desc = info->descriptor;
2658 /* Make a new descriptor. */
2659 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2660 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2661 loop.from, loop.to, 1,
2662 GFC_ARRAY_UNKNOWN);
2664 /* Allocate temporary for nested forall construct. */
2665 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2666 inner_size, NULL, block, &ptemp1);
2667 gfc_start_block (&body);
2668 gfc_init_se (&lse, NULL);
2669 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2670 lse.direct_byref = 1;
2671 rss = gfc_walk_expr (expr2);
2672 gfc_conv_expr_descriptor (&lse, expr2, rss);
2674 gfc_add_block_to_block (&body, &lse.pre);
2675 gfc_add_block_to_block (&body, &lse.post);
2677 /* Increment count. */
2678 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2679 count, gfc_index_one_node);
2680 gfc_add_modify (&body, count, tmp);
2682 tmp = gfc_finish_block (&body);
2684 /* Generate body and loops according to the information in
2685 nested_forall_info. */
2686 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2687 gfc_add_expr_to_block (block, tmp);
2689 /* Reset count. */
2690 gfc_add_modify (block, count, gfc_index_zero_node);
2692 parm = gfc_build_array_ref (tmp1, count, NULL);
2693 lss = gfc_walk_expr (expr1);
2694 gfc_init_se (&lse, NULL);
2695 gfc_conv_expr_descriptor (&lse, expr1, lss);
2696 gfc_add_modify (&lse.pre, lse.expr, parm);
2697 gfc_start_block (&body);
2698 gfc_add_block_to_block (&body, &lse.pre);
2699 gfc_add_block_to_block (&body, &lse.post);
2701 /* Increment count. */
2702 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2703 count, gfc_index_one_node);
2704 gfc_add_modify (&body, count, tmp);
2706 tmp = gfc_finish_block (&body);
2708 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2709 gfc_add_expr_to_block (block, tmp);
2711 /* Free the temporary. */
2712 if (ptemp1)
2714 tmp = gfc_call_free (ptemp1);
2715 gfc_add_expr_to_block (block, tmp);
2720 /* FORALL and WHERE statements are really nasty, especially when you nest
2721 them. All the rhs of a forall assignment must be evaluated before the
2722 actual assignments are performed. Presumably this also applies to all the
2723 assignments in an inner where statement. */
2725 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2726 linear array, relying on the fact that we process in the same order in all
2727 loops.
2729 forall (i=start:end:stride; maskexpr)
2730 e<i> = f<i>
2731 g<i> = h<i>
2732 end forall
2733 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2734 Translates to:
2735 count = ((end + 1 - start) / stride)
2736 masktmp(:) = maskexpr(:)
2738 maskindex = 0;
2739 for (i = start; i <= end; i += stride)
2741 if (masktmp[maskindex++])
2742 e<i> = f<i>
2744 maskindex = 0;
2745 for (i = start; i <= end; i += stride)
2747 if (masktmp[maskindex++])
2748 g<i> = h<i>
2751 Note that this code only works when there are no dependencies.
2752 Forall loop with array assignments and data dependencies are a real pain,
2753 because the size of the temporary cannot always be determined before the
2754 loop is executed. This problem is compounded by the presence of nested
2755 FORALL constructs.
2758 static tree
2759 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2761 stmtblock_t pre;
2762 stmtblock_t post;
2763 stmtblock_t block;
2764 stmtblock_t body;
2765 tree *var;
2766 tree *start;
2767 tree *end;
2768 tree *step;
2769 gfc_expr **varexpr;
2770 tree tmp;
2771 tree assign;
2772 tree size;
2773 tree maskindex;
2774 tree mask;
2775 tree pmask;
2776 int n;
2777 int nvar;
2778 int need_temp;
2779 gfc_forall_iterator *fa;
2780 gfc_se se;
2781 gfc_code *c;
2782 gfc_saved_var *saved_vars;
2783 iter_info *this_forall;
2784 forall_info *info;
2785 bool need_mask;
2787 /* Do nothing if the mask is false. */
2788 if (code->expr
2789 && code->expr->expr_type == EXPR_CONSTANT
2790 && !code->expr->value.logical)
2791 return build_empty_stmt ();
2793 n = 0;
2794 /* Count the FORALL index number. */
2795 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2796 n++;
2797 nvar = n;
2799 /* Allocate the space for var, start, end, step, varexpr. */
2800 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2801 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2802 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2803 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2804 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2805 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2807 /* Allocate the space for info. */
2808 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2810 gfc_start_block (&pre);
2811 gfc_init_block (&post);
2812 gfc_init_block (&block);
2814 n = 0;
2815 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2817 gfc_symbol *sym = fa->var->symtree->n.sym;
2819 /* Allocate space for this_forall. */
2820 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2822 /* Create a temporary variable for the FORALL index. */
2823 tmp = gfc_typenode_for_spec (&sym->ts);
2824 var[n] = gfc_create_var (tmp, sym->name);
2825 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2827 /* Record it in this_forall. */
2828 this_forall->var = var[n];
2830 /* Replace the index symbol's backend_decl with the temporary decl. */
2831 sym->backend_decl = var[n];
2833 /* Work out the start, end and stride for the loop. */
2834 gfc_init_se (&se, NULL);
2835 gfc_conv_expr_val (&se, fa->start);
2836 /* Record it in this_forall. */
2837 this_forall->start = se.expr;
2838 gfc_add_block_to_block (&block, &se.pre);
2839 start[n] = se.expr;
2841 gfc_init_se (&se, NULL);
2842 gfc_conv_expr_val (&se, fa->end);
2843 /* Record it in this_forall. */
2844 this_forall->end = se.expr;
2845 gfc_make_safe_expr (&se);
2846 gfc_add_block_to_block (&block, &se.pre);
2847 end[n] = se.expr;
2849 gfc_init_se (&se, NULL);
2850 gfc_conv_expr_val (&se, fa->stride);
2851 /* Record it in this_forall. */
2852 this_forall->step = se.expr;
2853 gfc_make_safe_expr (&se);
2854 gfc_add_block_to_block (&block, &se.pre);
2855 step[n] = se.expr;
2857 /* Set the NEXT field of this_forall to NULL. */
2858 this_forall->next = NULL;
2859 /* Link this_forall to the info construct. */
2860 if (info->this_loop)
2862 iter_info *iter_tmp = info->this_loop;
2863 while (iter_tmp->next != NULL)
2864 iter_tmp = iter_tmp->next;
2865 iter_tmp->next = this_forall;
2867 else
2868 info->this_loop = this_forall;
2870 n++;
2872 nvar = n;
2874 /* Calculate the size needed for the current forall level. */
2875 size = gfc_index_one_node;
2876 for (n = 0; n < nvar; n++)
2878 /* size = (end + step - start) / step. */
2879 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2880 step[n], start[n]);
2881 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2883 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2884 tmp = convert (gfc_array_index_type, tmp);
2886 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2889 /* Record the nvar and size of current forall level. */
2890 info->nvar = nvar;
2891 info->size = size;
2893 if (code->expr)
2895 /* If the mask is .true., consider the FORALL unconditional. */
2896 if (code->expr->expr_type == EXPR_CONSTANT
2897 && code->expr->value.logical)
2898 need_mask = false;
2899 else
2900 need_mask = true;
2902 else
2903 need_mask = false;
2905 /* First we need to allocate the mask. */
2906 if (need_mask)
2908 /* As the mask array can be very big, prefer compact boolean types. */
2909 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2910 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2911 size, NULL, &block, &pmask);
2912 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2914 /* Record them in the info structure. */
2915 info->maskindex = maskindex;
2916 info->mask = mask;
2918 else
2920 /* No mask was specified. */
2921 maskindex = NULL_TREE;
2922 mask = pmask = NULL_TREE;
2925 /* Link the current forall level to nested_forall_info. */
2926 info->prev_nest = nested_forall_info;
2927 nested_forall_info = info;
2929 /* Copy the mask into a temporary variable if required.
2930 For now we assume a mask temporary is needed. */
2931 if (need_mask)
2933 /* As the mask array can be very big, prefer compact boolean types. */
2934 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2936 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2938 /* Start of mask assignment loop body. */
2939 gfc_start_block (&body);
2941 /* Evaluate the mask expression. */
2942 gfc_init_se (&se, NULL);
2943 gfc_conv_expr_val (&se, code->expr);
2944 gfc_add_block_to_block (&body, &se.pre);
2946 /* Store the mask. */
2947 se.expr = convert (mask_type, se.expr);
2949 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2950 gfc_add_modify (&body, tmp, se.expr);
2952 /* Advance to the next mask element. */
2953 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2954 maskindex, gfc_index_one_node);
2955 gfc_add_modify (&body, maskindex, tmp);
2957 /* Generate the loops. */
2958 tmp = gfc_finish_block (&body);
2959 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2960 gfc_add_expr_to_block (&block, tmp);
2963 c = code->block->next;
2965 /* TODO: loop merging in FORALL statements. */
2966 /* Now that we've got a copy of the mask, generate the assignment loops. */
2967 while (c)
2969 switch (c->op)
2971 case EXEC_ASSIGN:
2972 /* A scalar or array assignment. DO the simple check for
2973 lhs to rhs dependencies. These make a temporary for the
2974 rhs and form a second forall block to copy to variable. */
2975 need_temp = check_forall_dependencies(c, &pre, &post);
2977 /* Temporaries due to array assignment data dependencies introduce
2978 no end of problems. */
2979 if (need_temp)
2980 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2981 nested_forall_info, &block);
2982 else
2984 /* Use the normal assignment copying routines. */
2985 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2987 /* Generate body and loops. */
2988 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2989 assign, 1);
2990 gfc_add_expr_to_block (&block, tmp);
2993 /* Cleanup any temporary symtrees that have been made to deal
2994 with dependencies. */
2995 if (new_symtree)
2996 cleanup_forall_symtrees (c);
2998 break;
3000 case EXEC_WHERE:
3001 /* Translate WHERE or WHERE construct nested in FORALL. */
3002 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3003 break;
3005 /* Pointer assignment inside FORALL. */
3006 case EXEC_POINTER_ASSIGN:
3007 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
3008 if (need_temp)
3009 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
3010 nested_forall_info, &block);
3011 else
3013 /* Use the normal assignment copying routines. */
3014 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
3016 /* Generate body and loops. */
3017 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3018 assign, 1);
3019 gfc_add_expr_to_block (&block, tmp);
3021 break;
3023 case EXEC_FORALL:
3024 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3025 gfc_add_expr_to_block (&block, tmp);
3026 break;
3028 /* Explicit subroutine calls are prevented by the frontend but interface
3029 assignments can legitimately produce them. */
3030 case EXEC_ASSIGN_CALL:
3031 assign = gfc_trans_call (c, true);
3032 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3033 gfc_add_expr_to_block (&block, tmp);
3034 break;
3036 default:
3037 gcc_unreachable ();
3040 c = c->next;
3043 /* Restore the original index variables. */
3044 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3045 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3047 /* Free the space for var, start, end, step, varexpr. */
3048 gfc_free (var);
3049 gfc_free (start);
3050 gfc_free (end);
3051 gfc_free (step);
3052 gfc_free (varexpr);
3053 gfc_free (saved_vars);
3055 /* Free the space for this forall_info. */
3056 gfc_free (info);
3058 if (pmask)
3060 /* Free the temporary for the mask. */
3061 tmp = gfc_call_free (pmask);
3062 gfc_add_expr_to_block (&block, tmp);
3064 if (maskindex)
3065 pushdecl (maskindex);
3067 gfc_add_block_to_block (&pre, &block);
3068 gfc_add_block_to_block (&pre, &post);
3070 return gfc_finish_block (&pre);
3074 /* Translate the FORALL statement or construct. */
3076 tree gfc_trans_forall (gfc_code * code)
3078 return gfc_trans_forall_1 (code, NULL);
3082 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3083 If the WHERE construct is nested in FORALL, compute the overall temporary
3084 needed by the WHERE mask expression multiplied by the iterator number of
3085 the nested forall.
3086 ME is the WHERE mask expression.
3087 MASK is the current execution mask upon input, whose sense may or may
3088 not be inverted as specified by the INVERT argument.
3089 CMASK is the updated execution mask on output, or NULL if not required.
3090 PMASK is the pending execution mask on output, or NULL if not required.
3091 BLOCK is the block in which to place the condition evaluation loops. */
3093 static void
3094 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3095 tree mask, bool invert, tree cmask, tree pmask,
3096 tree mask_type, stmtblock_t * block)
3098 tree tmp, tmp1;
3099 gfc_ss *lss, *rss;
3100 gfc_loopinfo loop;
3101 stmtblock_t body, body1;
3102 tree count, cond, mtmp;
3103 gfc_se lse, rse;
3105 gfc_init_loopinfo (&loop);
3107 lss = gfc_walk_expr (me);
3108 rss = gfc_walk_expr (me);
3110 /* Variable to index the temporary. */
3111 count = gfc_create_var (gfc_array_index_type, "count");
3112 /* Initialize count. */
3113 gfc_add_modify (block, count, gfc_index_zero_node);
3115 gfc_start_block (&body);
3117 gfc_init_se (&rse, NULL);
3118 gfc_init_se (&lse, NULL);
3120 if (lss == gfc_ss_terminator)
3122 gfc_init_block (&body1);
3124 else
3126 /* Initialize the loop. */
3127 gfc_init_loopinfo (&loop);
3129 /* We may need LSS to determine the shape of the expression. */
3130 gfc_add_ss_to_loop (&loop, lss);
3131 gfc_add_ss_to_loop (&loop, rss);
3133 gfc_conv_ss_startstride (&loop);
3134 gfc_conv_loop_setup (&loop, &me->where);
3136 gfc_mark_ss_chain_used (rss, 1);
3137 /* Start the loop body. */
3138 gfc_start_scalarized_body (&loop, &body1);
3140 /* Translate the expression. */
3141 gfc_copy_loopinfo_to_se (&rse, &loop);
3142 rse.ss = rss;
3143 gfc_conv_expr (&rse, me);
3146 /* Variable to evaluate mask condition. */
3147 cond = gfc_create_var (mask_type, "cond");
3148 if (mask && (cmask || pmask))
3149 mtmp = gfc_create_var (mask_type, "mask");
3150 else mtmp = NULL_TREE;
3152 gfc_add_block_to_block (&body1, &lse.pre);
3153 gfc_add_block_to_block (&body1, &rse.pre);
3155 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3157 if (mask && (cmask || pmask))
3159 tmp = gfc_build_array_ref (mask, count, NULL);
3160 if (invert)
3161 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3162 gfc_add_modify (&body1, mtmp, tmp);
3165 if (cmask)
3167 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3168 tmp = cond;
3169 if (mask)
3170 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3171 gfc_add_modify (&body1, tmp1, tmp);
3174 if (pmask)
3176 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3177 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3178 if (mask)
3179 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3180 gfc_add_modify (&body1, tmp1, tmp);
3183 gfc_add_block_to_block (&body1, &lse.post);
3184 gfc_add_block_to_block (&body1, &rse.post);
3186 if (lss == gfc_ss_terminator)
3188 gfc_add_block_to_block (&body, &body1);
3190 else
3192 /* Increment count. */
3193 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3194 gfc_index_one_node);
3195 gfc_add_modify (&body1, count, tmp1);
3197 /* Generate the copying loops. */
3198 gfc_trans_scalarizing_loops (&loop, &body1);
3200 gfc_add_block_to_block (&body, &loop.pre);
3201 gfc_add_block_to_block (&body, &loop.post);
3203 gfc_cleanup_loop (&loop);
3204 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3205 as tree nodes in SS may not be valid in different scope. */
3208 tmp1 = gfc_finish_block (&body);
3209 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3210 if (nested_forall_info != NULL)
3211 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3213 gfc_add_expr_to_block (block, tmp1);
3217 /* Translate an assignment statement in a WHERE statement or construct
3218 statement. The MASK expression is used to control which elements
3219 of EXPR1 shall be assigned. The sense of MASK is specified by
3220 INVERT. */
3222 static tree
3223 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3224 tree mask, bool invert,
3225 tree count1, tree count2,
3226 gfc_symbol *sym)
3228 gfc_se lse;
3229 gfc_se rse;
3230 gfc_ss *lss;
3231 gfc_ss *lss_section;
3232 gfc_ss *rss;
3234 gfc_loopinfo loop;
3235 tree tmp;
3236 stmtblock_t block;
3237 stmtblock_t body;
3238 tree index, maskexpr;
3240 #if 0
3241 /* TODO: handle this special case.
3242 Special case a single function returning an array. */
3243 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3245 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3246 if (tmp)
3247 return tmp;
3249 #endif
3251 /* Assignment of the form lhs = rhs. */
3252 gfc_start_block (&block);
3254 gfc_init_se (&lse, NULL);
3255 gfc_init_se (&rse, NULL);
3257 /* Walk the lhs. */
3258 lss = gfc_walk_expr (expr1);
3259 rss = NULL;
3261 /* In each where-assign-stmt, the mask-expr and the variable being
3262 defined shall be arrays of the same shape. */
3263 gcc_assert (lss != gfc_ss_terminator);
3265 /* The assignment needs scalarization. */
3266 lss_section = lss;
3268 /* Find a non-scalar SS from the lhs. */
3269 while (lss_section != gfc_ss_terminator
3270 && lss_section->type != GFC_SS_SECTION)
3271 lss_section = lss_section->next;
3273 gcc_assert (lss_section != gfc_ss_terminator);
3275 /* Initialize the scalarizer. */
3276 gfc_init_loopinfo (&loop);
3278 /* Walk the rhs. */
3279 rss = gfc_walk_expr (expr2);
3280 if (rss == gfc_ss_terminator)
3282 /* The rhs is scalar. Add a ss for the expression. */
3283 rss = gfc_get_ss ();
3284 rss->where = 1;
3285 rss->next = gfc_ss_terminator;
3286 rss->type = GFC_SS_SCALAR;
3287 rss->expr = expr2;
3290 /* Associate the SS with the loop. */
3291 gfc_add_ss_to_loop (&loop, lss);
3292 gfc_add_ss_to_loop (&loop, rss);
3294 /* Calculate the bounds of the scalarization. */
3295 gfc_conv_ss_startstride (&loop);
3297 /* Resolve any data dependencies in the statement. */
3298 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3300 /* Setup the scalarizing loops. */
3301 gfc_conv_loop_setup (&loop, &expr2->where);
3303 /* Setup the gfc_se structures. */
3304 gfc_copy_loopinfo_to_se (&lse, &loop);
3305 gfc_copy_loopinfo_to_se (&rse, &loop);
3307 rse.ss = rss;
3308 gfc_mark_ss_chain_used (rss, 1);
3309 if (loop.temp_ss == NULL)
3311 lse.ss = lss;
3312 gfc_mark_ss_chain_used (lss, 1);
3314 else
3316 lse.ss = loop.temp_ss;
3317 gfc_mark_ss_chain_used (lss, 3);
3318 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3321 /* Start the scalarized loop body. */
3322 gfc_start_scalarized_body (&loop, &body);
3324 /* Translate the expression. */
3325 gfc_conv_expr (&rse, expr2);
3326 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3328 gfc_conv_tmp_array_ref (&lse);
3329 gfc_advance_se_ss_chain (&lse);
3331 else
3332 gfc_conv_expr (&lse, expr1);
3334 /* Form the mask expression according to the mask. */
3335 index = count1;
3336 maskexpr = gfc_build_array_ref (mask, index, NULL);
3337 if (invert)
3338 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3340 /* Use the scalar assignment as is. */
3341 if (sym == NULL)
3342 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3343 loop.temp_ss != NULL, false);
3344 else
3345 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3347 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3349 gfc_add_expr_to_block (&body, tmp);
3351 if (lss == gfc_ss_terminator)
3353 /* Increment count1. */
3354 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3355 count1, gfc_index_one_node);
3356 gfc_add_modify (&body, count1, tmp);
3358 /* Use the scalar assignment as is. */
3359 gfc_add_block_to_block (&block, &body);
3361 else
3363 gcc_assert (lse.ss == gfc_ss_terminator
3364 && rse.ss == gfc_ss_terminator);
3366 if (loop.temp_ss != NULL)
3368 /* Increment count1 before finish the main body of a scalarized
3369 expression. */
3370 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3371 count1, gfc_index_one_node);
3372 gfc_add_modify (&body, count1, tmp);
3373 gfc_trans_scalarized_loop_boundary (&loop, &body);
3375 /* We need to copy the temporary to the actual lhs. */
3376 gfc_init_se (&lse, NULL);
3377 gfc_init_se (&rse, NULL);
3378 gfc_copy_loopinfo_to_se (&lse, &loop);
3379 gfc_copy_loopinfo_to_se (&rse, &loop);
3381 rse.ss = loop.temp_ss;
3382 lse.ss = lss;
3384 gfc_conv_tmp_array_ref (&rse);
3385 gfc_advance_se_ss_chain (&rse);
3386 gfc_conv_expr (&lse, expr1);
3388 gcc_assert (lse.ss == gfc_ss_terminator
3389 && rse.ss == gfc_ss_terminator);
3391 /* Form the mask expression according to the mask tree list. */
3392 index = count2;
3393 maskexpr = gfc_build_array_ref (mask, index, NULL);
3394 if (invert)
3395 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3396 maskexpr);
3398 /* Use the scalar assignment as is. */
3399 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3400 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3401 gfc_add_expr_to_block (&body, tmp);
3403 /* Increment count2. */
3404 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3405 count2, gfc_index_one_node);
3406 gfc_add_modify (&body, count2, tmp);
3408 else
3410 /* Increment count1. */
3411 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3412 count1, gfc_index_one_node);
3413 gfc_add_modify (&body, count1, tmp);
3416 /* Generate the copying loops. */
3417 gfc_trans_scalarizing_loops (&loop, &body);
3419 /* Wrap the whole thing up. */
3420 gfc_add_block_to_block (&block, &loop.pre);
3421 gfc_add_block_to_block (&block, &loop.post);
3422 gfc_cleanup_loop (&loop);
3425 return gfc_finish_block (&block);
3429 /* Translate the WHERE construct or statement.
3430 This function can be called iteratively to translate the nested WHERE
3431 construct or statement.
3432 MASK is the control mask. */
3434 static void
3435 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3436 forall_info * nested_forall_info, stmtblock_t * block)
3438 stmtblock_t inner_size_body;
3439 tree inner_size, size;
3440 gfc_ss *lss, *rss;
3441 tree mask_type;
3442 gfc_expr *expr1;
3443 gfc_expr *expr2;
3444 gfc_code *cblock;
3445 gfc_code *cnext;
3446 tree tmp;
3447 tree cond;
3448 tree count1, count2;
3449 bool need_cmask;
3450 bool need_pmask;
3451 int need_temp;
3452 tree pcmask = NULL_TREE;
3453 tree ppmask = NULL_TREE;
3454 tree cmask = NULL_TREE;
3455 tree pmask = NULL_TREE;
3456 gfc_actual_arglist *arg;
3458 /* the WHERE statement or the WHERE construct statement. */
3459 cblock = code->block;
3461 /* As the mask array can be very big, prefer compact boolean types. */
3462 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3464 /* Determine which temporary masks are needed. */
3465 if (!cblock->block)
3467 /* One clause: No ELSEWHEREs. */
3468 need_cmask = (cblock->next != 0);
3469 need_pmask = false;
3471 else if (cblock->block->block)
3473 /* Three or more clauses: Conditional ELSEWHEREs. */
3474 need_cmask = true;
3475 need_pmask = true;
3477 else if (cblock->next)
3479 /* Two clauses, the first non-empty. */
3480 need_cmask = true;
3481 need_pmask = (mask != NULL_TREE
3482 && cblock->block->next != 0);
3484 else if (!cblock->block->next)
3486 /* Two clauses, both empty. */
3487 need_cmask = false;
3488 need_pmask = false;
3490 /* Two clauses, the first empty, the second non-empty. */
3491 else if (mask)
3493 need_cmask = (cblock->block->expr != 0);
3494 need_pmask = true;
3496 else
3498 need_cmask = true;
3499 need_pmask = false;
3502 if (need_cmask || need_pmask)
3504 /* Calculate the size of temporary needed by the mask-expr. */
3505 gfc_init_block (&inner_size_body);
3506 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3507 &inner_size_body, &lss, &rss);
3509 /* Calculate the total size of temporary needed. */
3510 size = compute_overall_iter_number (nested_forall_info, inner_size,
3511 &inner_size_body, block);
3513 /* Check whether the size is negative. */
3514 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3515 gfc_index_zero_node);
3516 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3517 gfc_index_zero_node, size);
3518 size = gfc_evaluate_now (size, block);
3520 /* Allocate temporary for WHERE mask if needed. */
3521 if (need_cmask)
3522 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3523 &pcmask);
3525 /* Allocate temporary for !mask if needed. */
3526 if (need_pmask)
3527 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3528 &ppmask);
3531 while (cblock)
3533 /* Each time around this loop, the where clause is conditional
3534 on the value of mask and invert, which are updated at the
3535 bottom of the loop. */
3537 /* Has mask-expr. */
3538 if (cblock->expr)
3540 /* Ensure that the WHERE mask will be evaluated exactly once.
3541 If there are no statements in this WHERE/ELSEWHERE clause,
3542 then we don't need to update the control mask (cmask).
3543 If this is the last clause of the WHERE construct, then
3544 we don't need to update the pending control mask (pmask). */
3545 if (mask)
3546 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3547 mask, invert,
3548 cblock->next ? cmask : NULL_TREE,
3549 cblock->block ? pmask : NULL_TREE,
3550 mask_type, block);
3551 else
3552 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3553 NULL_TREE, false,
3554 (cblock->next || cblock->block)
3555 ? cmask : NULL_TREE,
3556 NULL_TREE, mask_type, block);
3558 invert = false;
3560 /* It's a final elsewhere-stmt. No mask-expr is present. */
3561 else
3562 cmask = mask;
3564 /* The body of this where clause are controlled by cmask with
3565 sense specified by invert. */
3567 /* Get the assignment statement of a WHERE statement, or the first
3568 statement in where-body-construct of a WHERE construct. */
3569 cnext = cblock->next;
3570 while (cnext)
3572 switch (cnext->op)
3574 /* WHERE assignment statement. */
3575 case EXEC_ASSIGN_CALL:
3577 arg = cnext->ext.actual;
3578 expr1 = expr2 = NULL;
3579 for (; arg; arg = arg->next)
3581 if (!arg->expr)
3582 continue;
3583 if (expr1 == NULL)
3584 expr1 = arg->expr;
3585 else
3586 expr2 = arg->expr;
3588 goto evaluate;
3590 case EXEC_ASSIGN:
3591 expr1 = cnext->expr;
3592 expr2 = cnext->expr2;
3593 evaluate:
3594 if (nested_forall_info != NULL)
3596 need_temp = gfc_check_dependency (expr1, expr2, 0);
3597 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3598 gfc_trans_assign_need_temp (expr1, expr2,
3599 cmask, invert,
3600 nested_forall_info, block);
3601 else
3603 /* Variables to control maskexpr. */
3604 count1 = gfc_create_var (gfc_array_index_type, "count1");
3605 count2 = gfc_create_var (gfc_array_index_type, "count2");
3606 gfc_add_modify (block, count1, gfc_index_zero_node);
3607 gfc_add_modify (block, count2, gfc_index_zero_node);
3609 tmp = gfc_trans_where_assign (expr1, expr2,
3610 cmask, invert,
3611 count1, count2,
3612 cnext->resolved_sym);
3614 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3615 tmp, 1);
3616 gfc_add_expr_to_block (block, tmp);
3619 else
3621 /* Variables to control maskexpr. */
3622 count1 = gfc_create_var (gfc_array_index_type, "count1");
3623 count2 = gfc_create_var (gfc_array_index_type, "count2");
3624 gfc_add_modify (block, count1, gfc_index_zero_node);
3625 gfc_add_modify (block, count2, gfc_index_zero_node);
3627 tmp = gfc_trans_where_assign (expr1, expr2,
3628 cmask, invert,
3629 count1, count2,
3630 cnext->resolved_sym);
3631 gfc_add_expr_to_block (block, tmp);
3634 break;
3636 /* WHERE or WHERE construct is part of a where-body-construct. */
3637 case EXEC_WHERE:
3638 gfc_trans_where_2 (cnext, cmask, invert,
3639 nested_forall_info, block);
3640 break;
3642 default:
3643 gcc_unreachable ();
3646 /* The next statement within the same where-body-construct. */
3647 cnext = cnext->next;
3649 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3650 cblock = cblock->block;
3651 if (mask == NULL_TREE)
3653 /* If we're the initial WHERE, we can simply invert the sense
3654 of the current mask to obtain the "mask" for the remaining
3655 ELSEWHEREs. */
3656 invert = true;
3657 mask = cmask;
3659 else
3661 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3662 invert = false;
3663 mask = pmask;
3667 /* If we allocated a pending mask array, deallocate it now. */
3668 if (ppmask)
3670 tmp = gfc_call_free (ppmask);
3671 gfc_add_expr_to_block (block, tmp);
3674 /* If we allocated a current mask array, deallocate it now. */
3675 if (pcmask)
3677 tmp = gfc_call_free (pcmask);
3678 gfc_add_expr_to_block (block, tmp);
3682 /* Translate a simple WHERE construct or statement without dependencies.
3683 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3684 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3685 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3687 static tree
3688 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3690 stmtblock_t block, body;
3691 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3692 tree tmp, cexpr, tstmt, estmt;
3693 gfc_ss *css, *tdss, *tsss;
3694 gfc_se cse, tdse, tsse, edse, esse;
3695 gfc_loopinfo loop;
3696 gfc_ss *edss = 0;
3697 gfc_ss *esss = 0;
3699 cond = cblock->expr;
3700 tdst = cblock->next->expr;
3701 tsrc = cblock->next->expr2;
3702 edst = eblock ? eblock->next->expr : NULL;
3703 esrc = eblock ? eblock->next->expr2 : NULL;
3705 gfc_start_block (&block);
3706 gfc_init_loopinfo (&loop);
3708 /* Handle the condition. */
3709 gfc_init_se (&cse, NULL);
3710 css = gfc_walk_expr (cond);
3711 gfc_add_ss_to_loop (&loop, css);
3713 /* Handle the then-clause. */
3714 gfc_init_se (&tdse, NULL);
3715 gfc_init_se (&tsse, NULL);
3716 tdss = gfc_walk_expr (tdst);
3717 tsss = gfc_walk_expr (tsrc);
3718 if (tsss == gfc_ss_terminator)
3720 tsss = gfc_get_ss ();
3721 tsss->where = 1;
3722 tsss->next = gfc_ss_terminator;
3723 tsss->type = GFC_SS_SCALAR;
3724 tsss->expr = tsrc;
3726 gfc_add_ss_to_loop (&loop, tdss);
3727 gfc_add_ss_to_loop (&loop, tsss);
3729 if (eblock)
3731 /* Handle the else clause. */
3732 gfc_init_se (&edse, NULL);
3733 gfc_init_se (&esse, NULL);
3734 edss = gfc_walk_expr (edst);
3735 esss = gfc_walk_expr (esrc);
3736 if (esss == gfc_ss_terminator)
3738 esss = gfc_get_ss ();
3739 esss->where = 1;
3740 esss->next = gfc_ss_terminator;
3741 esss->type = GFC_SS_SCALAR;
3742 esss->expr = esrc;
3744 gfc_add_ss_to_loop (&loop, edss);
3745 gfc_add_ss_to_loop (&loop, esss);
3748 gfc_conv_ss_startstride (&loop);
3749 gfc_conv_loop_setup (&loop, &tdst->where);
3751 gfc_mark_ss_chain_used (css, 1);
3752 gfc_mark_ss_chain_used (tdss, 1);
3753 gfc_mark_ss_chain_used (tsss, 1);
3754 if (eblock)
3756 gfc_mark_ss_chain_used (edss, 1);
3757 gfc_mark_ss_chain_used (esss, 1);
3760 gfc_start_scalarized_body (&loop, &body);
3762 gfc_copy_loopinfo_to_se (&cse, &loop);
3763 gfc_copy_loopinfo_to_se (&tdse, &loop);
3764 gfc_copy_loopinfo_to_se (&tsse, &loop);
3765 cse.ss = css;
3766 tdse.ss = tdss;
3767 tsse.ss = tsss;
3768 if (eblock)
3770 gfc_copy_loopinfo_to_se (&edse, &loop);
3771 gfc_copy_loopinfo_to_se (&esse, &loop);
3772 edse.ss = edss;
3773 esse.ss = esss;
3776 gfc_conv_expr (&cse, cond);
3777 gfc_add_block_to_block (&body, &cse.pre);
3778 cexpr = cse.expr;
3780 gfc_conv_expr (&tsse, tsrc);
3781 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3783 gfc_conv_tmp_array_ref (&tdse);
3784 gfc_advance_se_ss_chain (&tdse);
3786 else
3787 gfc_conv_expr (&tdse, tdst);
3789 if (eblock)
3791 gfc_conv_expr (&esse, esrc);
3792 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3794 gfc_conv_tmp_array_ref (&edse);
3795 gfc_advance_se_ss_chain (&edse);
3797 else
3798 gfc_conv_expr (&edse, edst);
3801 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3802 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3803 : build_empty_stmt ();
3804 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3805 gfc_add_expr_to_block (&body, tmp);
3806 gfc_add_block_to_block (&body, &cse.post);
3808 gfc_trans_scalarizing_loops (&loop, &body);
3809 gfc_add_block_to_block (&block, &loop.pre);
3810 gfc_add_block_to_block (&block, &loop.post);
3811 gfc_cleanup_loop (&loop);
3813 return gfc_finish_block (&block);
3816 /* As the WHERE or WHERE construct statement can be nested, we call
3817 gfc_trans_where_2 to do the translation, and pass the initial
3818 NULL values for both the control mask and the pending control mask. */
3820 tree
3821 gfc_trans_where (gfc_code * code)
3823 stmtblock_t block;
3824 gfc_code *cblock;
3825 gfc_code *eblock;
3827 cblock = code->block;
3828 if (cblock->next
3829 && cblock->next->op == EXEC_ASSIGN
3830 && !cblock->next->next)
3832 eblock = cblock->block;
3833 if (!eblock)
3835 /* A simple "WHERE (cond) x = y" statement or block is
3836 dependence free if cond is not dependent upon writing x,
3837 and the source y is unaffected by the destination x. */
3838 if (!gfc_check_dependency (cblock->next->expr,
3839 cblock->expr, 0)
3840 && !gfc_check_dependency (cblock->next->expr,
3841 cblock->next->expr2, 0))
3842 return gfc_trans_where_3 (cblock, NULL);
3844 else if (!eblock->expr
3845 && !eblock->block
3846 && eblock->next
3847 && eblock->next->op == EXEC_ASSIGN
3848 && !eblock->next->next)
3850 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3851 block is dependence free if cond is not dependent on writes
3852 to x1 and x2, y1 is not dependent on writes to x2, and y2
3853 is not dependent on writes to x1, and both y's are not
3854 dependent upon their own x's. In addition to this, the
3855 final two dependency checks below exclude all but the same
3856 array reference if the where and elswhere destinations
3857 are the same. In short, this is VERY conservative and this
3858 is needed because the two loops, required by the standard
3859 are coalesced in gfc_trans_where_3. */
3860 if (!gfc_check_dependency(cblock->next->expr,
3861 cblock->expr, 0)
3862 && !gfc_check_dependency(eblock->next->expr,
3863 cblock->expr, 0)
3864 && !gfc_check_dependency(cblock->next->expr,
3865 eblock->next->expr2, 1)
3866 && !gfc_check_dependency(eblock->next->expr,
3867 cblock->next->expr2, 1)
3868 && !gfc_check_dependency(cblock->next->expr,
3869 cblock->next->expr2, 1)
3870 && !gfc_check_dependency(eblock->next->expr,
3871 eblock->next->expr2, 1)
3872 && !gfc_check_dependency(cblock->next->expr,
3873 eblock->next->expr, 0)
3874 && !gfc_check_dependency(eblock->next->expr,
3875 cblock->next->expr, 0))
3876 return gfc_trans_where_3 (cblock, eblock);
3880 gfc_start_block (&block);
3882 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3884 return gfc_finish_block (&block);
3888 /* CYCLE a DO loop. The label decl has already been created by
3889 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3890 node at the head of the loop. We must mark the label as used. */
3892 tree
3893 gfc_trans_cycle (gfc_code * code)
3895 tree cycle_label;
3897 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3898 TREE_USED (cycle_label) = 1;
3899 return build1_v (GOTO_EXPR, cycle_label);
3903 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3904 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3905 loop. */
3907 tree
3908 gfc_trans_exit (gfc_code * code)
3910 tree exit_label;
3912 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3913 TREE_USED (exit_label) = 1;
3914 return build1_v (GOTO_EXPR, exit_label);
3918 /* Translate the ALLOCATE statement. */
3920 tree
3921 gfc_trans_allocate (gfc_code * code)
3923 gfc_alloc *al;
3924 gfc_expr *expr;
3925 gfc_se se;
3926 tree tmp;
3927 tree parm;
3928 tree stat;
3929 tree pstat;
3930 tree error_label;
3931 stmtblock_t block;
3933 if (!code->ext.alloc_list)
3934 return NULL_TREE;
3936 pstat = stat = error_label = tmp = NULL_TREE;
3938 gfc_start_block (&block);
3940 /* Either STAT= and/or ERRMSG is present. */
3941 if (code->expr || code->expr2)
3943 tree gfc_int4_type_node = gfc_get_int_type (4);
3945 stat = gfc_create_var (gfc_int4_type_node, "stat");
3946 pstat = gfc_build_addr_expr (NULL_TREE, stat);
3948 error_label = gfc_build_label_decl (NULL_TREE);
3949 TREE_USED (error_label) = 1;
3952 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3954 expr = al->expr;
3956 gfc_init_se (&se, NULL);
3957 gfc_start_block (&se.pre);
3959 se.want_pointer = 1;
3960 se.descriptor_only = 1;
3961 gfc_conv_expr (&se, expr);
3963 if (!gfc_array_allocate (&se, expr, pstat))
3965 /* A scalar or derived type. */
3966 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3968 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3969 tmp = se.string_length;
3971 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3972 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3973 fold_convert (TREE_TYPE (se.expr), tmp));
3974 gfc_add_expr_to_block (&se.pre, tmp);
3976 if (code->expr || code->expr2)
3978 tmp = build1_v (GOTO_EXPR, error_label);
3979 parm = fold_build2 (NE_EXPR, boolean_type_node,
3980 stat, build_int_cst (TREE_TYPE (stat), 0));
3981 tmp = fold_build3 (COND_EXPR, void_type_node,
3982 parm, tmp, build_empty_stmt ());
3983 gfc_add_expr_to_block (&se.pre, tmp);
3986 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3988 tmp = build_fold_indirect_ref (se.expr);
3989 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3990 gfc_add_expr_to_block (&se.pre, tmp);
3995 tmp = gfc_finish_block (&se.pre);
3996 gfc_add_expr_to_block (&block, tmp);
3999 /* STAT block. */
4000 if (code->expr)
4002 tmp = build1_v (LABEL_EXPR, error_label);
4003 gfc_add_expr_to_block (&block, tmp);
4005 gfc_init_se (&se, NULL);
4006 gfc_conv_expr_lhs (&se, code->expr);
4007 tmp = convert (TREE_TYPE (se.expr), stat);
4008 gfc_add_modify (&block, se.expr, tmp);
4011 /* ERRMSG block. */
4012 if (code->expr2)
4014 /* A better error message may be possible, but not required. */
4015 const char *msg = "Attempt to allocate an allocated object";
4016 tree errmsg, slen, dlen;
4018 gfc_init_se (&se, NULL);
4019 gfc_conv_expr_lhs (&se, code->expr2);
4021 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4023 gfc_add_modify (&block, errmsg,
4024 gfc_build_addr_expr (pchar_type_node,
4025 gfc_build_localized_cstring_const (msg)));
4027 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4028 dlen = gfc_get_expr_charlen (code->expr2);
4029 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4031 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4032 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4034 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4035 build_int_cst (TREE_TYPE (stat), 0));
4037 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4039 gfc_add_expr_to_block (&block, tmp);
4042 return gfc_finish_block (&block);
4046 /* Translate a DEALLOCATE statement. */
4048 tree
4049 gfc_trans_deallocate (gfc_code *code)
4051 gfc_se se;
4052 gfc_alloc *al;
4053 gfc_expr *expr;
4054 tree apstat, astat, pstat, stat, tmp;
4055 stmtblock_t block;
4057 pstat = apstat = stat = astat = tmp = NULL_TREE;
4059 gfc_start_block (&block);
4061 /* Count the number of failed deallocations. If deallocate() was
4062 called with STAT= , then set STAT to the count. If deallocate
4063 was called with ERRMSG, then set ERRMG to a string. */
4064 if (code->expr || code->expr2)
4066 tree gfc_int4_type_node = gfc_get_int_type (4);
4068 stat = gfc_create_var (gfc_int4_type_node, "stat");
4069 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4071 /* Running total of possible deallocation failures. */
4072 astat = gfc_create_var (gfc_int4_type_node, "astat");
4073 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4075 /* Initialize astat to 0. */
4076 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4079 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4081 expr = al->expr;
4082 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4084 gfc_init_se (&se, NULL);
4085 gfc_start_block (&se.pre);
4087 se.want_pointer = 1;
4088 se.descriptor_only = 1;
4089 gfc_conv_expr (&se, expr);
4091 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4093 gfc_ref *ref;
4094 gfc_ref *last = NULL;
4095 for (ref = expr->ref; ref; ref = ref->next)
4096 if (ref->type == REF_COMPONENT)
4097 last = ref;
4099 /* Do not deallocate the components of a derived type
4100 ultimate pointer component. */
4101 if (!(last && last->u.c.component->attr.pointer)
4102 && !(!last && expr->symtree->n.sym->attr.pointer))
4104 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4105 expr->rank);
4106 gfc_add_expr_to_block (&se.pre, tmp);
4110 if (expr->rank)
4111 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4112 else
4114 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4115 gfc_add_expr_to_block (&se.pre, tmp);
4117 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4118 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4121 gfc_add_expr_to_block (&se.pre, tmp);
4123 /* Keep track of the number of failed deallocations by adding stat
4124 of the last deallocation to the running total. */
4125 if (code->expr || code->expr2)
4127 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4128 gfc_add_modify (&se.pre, astat, apstat);
4131 tmp = gfc_finish_block (&se.pre);
4132 gfc_add_expr_to_block (&block, tmp);
4136 /* Set STAT. */
4137 if (code->expr)
4139 gfc_init_se (&se, NULL);
4140 gfc_conv_expr_lhs (&se, code->expr);
4141 tmp = convert (TREE_TYPE (se.expr), astat);
4142 gfc_add_modify (&block, se.expr, tmp);
4145 /* Set ERRMSG. */
4146 if (code->expr2)
4148 /* A better error message may be possible, but not required. */
4149 const char *msg = "Attempt to deallocate an unallocated object";
4150 tree errmsg, slen, dlen;
4152 gfc_init_se (&se, NULL);
4153 gfc_conv_expr_lhs (&se, code->expr2);
4155 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4157 gfc_add_modify (&block, errmsg,
4158 gfc_build_addr_expr (pchar_type_node,
4159 gfc_build_localized_cstring_const (msg)));
4161 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4162 dlen = gfc_get_expr_charlen (code->expr2);
4163 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4165 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4166 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4168 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4169 build_int_cst (TREE_TYPE (astat), 0));
4171 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4173 gfc_add_expr_to_block (&block, tmp);
4176 return gfc_finish_block (&block);