2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob0e8ce67c443c485ca0c3e5d3de98594d8b54fbd0
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
119 else
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 code = code->block;
163 if (code == NULL)
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
170 /* Check the label list. */
173 target = gfc_get_label_decl (code->label1);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt (input_location));
179 gfc_add_expr_to_block (&se.pre, tmp);
180 code = code->block;
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
186 return gfc_finish_block (&se.pre);
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 tree
192 gfc_trans_entry (gfc_code * code)
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
202 static void
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
207 gfc_actual_arglist *arg0;
208 gfc_expr *e;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
211 gfc_se parmse;
212 gfc_ss *ss;
213 gfc_ss_info *info;
214 gfc_symbol *fsym;
215 int n;
216 tree data;
217 tree offset;
218 tree size;
219 tree tmp;
221 if (loopse->ss == NULL)
222 return;
224 ss = loopse->ss;
225 arg0 = arg;
226 formal = sym->formal;
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
231 e = arg->expr;
232 if (e == NULL)
233 continue;
235 /* Obtain the info structure for the current argument. */
236 info = NULL;
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
239 if (ss->expr != e)
240 continue;
241 info = &ss->data.info;
242 break;
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
249 && e->rank && fsym
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0, check_variable))
254 tree initial, temptype;
255 stmtblock_t temp_post;
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
267 /* Obtain the argument descriptor for unpacking. */
268 gfc_init_se (&parmse, NULL);
269 parmse.want_pointer = 1;
270 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
271 gfc_add_block_to_block (&se->pre, &parmse.pre);
273 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
274 initialize the array temporary with a copy of the values. */
275 if (fsym->attr.intent == INTENT_INOUT
276 || (fsym->ts.type ==BT_DERIVED
277 && fsym->attr.intent == INTENT_OUT))
278 initial = parmse.expr;
279 else
280 initial = NULL_TREE;
282 /* Find the type of the temporary to create; we don't use the type
283 of e itself as this breaks for subcomponent-references in e (where
284 the type of e is that of the final reference, but parmse.expr's
285 type corresponds to the full derived-type). */
286 /* TODO: Fix this somehow so we don't need a temporary of the whole
287 array but instead only the components referenced. */
288 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
289 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
290 temptype = TREE_TYPE (temptype);
291 temptype = gfc_get_element_type (temptype);
293 /* Generate the temporary. Cleaning up the temporary should be the
294 very last thing done, so we add the code to a new block and add it
295 to se->post as last instructions. */
296 size = gfc_create_var (gfc_array_index_type, NULL);
297 data = gfc_create_var (pvoid_type_node, NULL);
298 gfc_init_block (&temp_post);
299 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
300 &tmp_loop, info, temptype,
301 initial,
302 false, true, false,
303 &arg->expr->where);
304 gfc_add_modify (&se->pre, size, tmp);
305 tmp = fold_convert (pvoid_type_node, info->data);
306 gfc_add_modify (&se->pre, data, tmp);
308 /* Calculate the offset for the temporary. */
309 offset = gfc_index_zero_node;
310 for (n = 0; n < info->dimen; n++)
312 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
313 gfc_rank_cst[n]);
314 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
315 loopse->loop->from[n], tmp);
316 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
317 offset, tmp);
319 info->offset = gfc_create_var (gfc_array_index_type, NULL);
320 gfc_add_modify (&se->pre, info->offset, offset);
322 /* Copy the result back using unpack. */
323 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
324 gfc_add_expr_to_block (&se->post, tmp);
326 /* parmse.pre is already added above. */
327 gfc_add_block_to_block (&se->post, &parmse.post);
328 gfc_add_block_to_block (&se->post, &temp_post);
334 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
336 tree
337 gfc_trans_call (gfc_code * code, bool dependency_check,
338 tree mask, tree count1, bool invert)
340 gfc_se se;
341 gfc_ss * ss;
342 int has_alternate_specifier;
343 gfc_dep_check check_variable;
344 tree index = NULL_TREE;
345 tree maskexpr = NULL_TREE;
346 tree tmp;
348 /* A CALL starts a new block because the actual arguments may have to
349 be evaluated first. */
350 gfc_init_se (&se, NULL);
351 gfc_start_block (&se.pre);
353 gcc_assert (code->resolved_sym);
355 ss = gfc_ss_terminator;
356 if (code->resolved_sym->attr.elemental)
357 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
359 /* Is not an elemental subroutine call with array valued arguments. */
360 if (ss == gfc_ss_terminator)
363 /* Translate the call. */
364 has_alternate_specifier
365 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
366 code->expr1, NULL_TREE);
368 /* A subroutine without side-effect, by definition, does nothing! */
369 TREE_SIDE_EFFECTS (se.expr) = 1;
371 /* Chain the pieces together and return the block. */
372 if (has_alternate_specifier)
374 gfc_code *select_code;
375 gfc_symbol *sym;
376 select_code = code->next;
377 gcc_assert(select_code->op == EXEC_SELECT);
378 sym = select_code->expr1->symtree->n.sym;
379 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
380 if (sym->backend_decl == NULL)
381 sym->backend_decl = gfc_get_symbol_decl (sym);
382 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
384 else
385 gfc_add_expr_to_block (&se.pre, se.expr);
387 gfc_add_block_to_block (&se.pre, &se.post);
390 else
392 /* An elemental subroutine call with array valued arguments has
393 to be scalarized. */
394 gfc_loopinfo loop;
395 stmtblock_t body;
396 stmtblock_t block;
397 gfc_se loopse;
398 gfc_se depse;
400 /* gfc_walk_elemental_function_args renders the ss chain in the
401 reverse order to the actual argument order. */
402 ss = gfc_reverse_ss (ss);
404 /* Initialize the loop. */
405 gfc_init_se (&loopse, NULL);
406 gfc_init_loopinfo (&loop);
407 gfc_add_ss_to_loop (&loop, ss);
409 gfc_conv_ss_startstride (&loop);
410 /* TODO: gfc_conv_loop_setup generates a temporary for vector
411 subscripts. This could be prevented in the elemental case
412 as temporaries are handled separatedly
413 (below in gfc_conv_elemental_dependencies). */
414 gfc_conv_loop_setup (&loop, &code->expr1->where);
415 gfc_mark_ss_chain_used (ss, 1);
417 /* Convert the arguments, checking for dependencies. */
418 gfc_copy_loopinfo_to_se (&loopse, &loop);
419 loopse.ss = ss;
421 /* For operator assignment, do dependency checking. */
422 if (dependency_check)
423 check_variable = ELEM_CHECK_VARIABLE;
424 else
425 check_variable = ELEM_DONT_CHECK_VARIABLE;
427 gfc_init_se (&depse, NULL);
428 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
429 code->ext.actual, check_variable);
431 gfc_add_block_to_block (&loop.pre, &depse.pre);
432 gfc_add_block_to_block (&loop.post, &depse.post);
434 /* Generate the loop body. */
435 gfc_start_scalarized_body (&loop, &body);
436 gfc_init_block (&block);
438 if (mask && count1)
440 /* Form the mask expression according to the mask. */
441 index = count1;
442 maskexpr = gfc_build_array_ref (mask, index, NULL);
443 if (invert)
444 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
445 maskexpr);
448 /* Add the subroutine call to the block. */
449 gfc_conv_procedure_call (&loopse, code->resolved_sym,
450 code->ext.actual, code->expr1,
451 NULL_TREE);
453 if (mask && count1)
455 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
456 build_empty_stmt (input_location));
457 gfc_add_expr_to_block (&loopse.pre, tmp);
458 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
459 count1, gfc_index_one_node);
460 gfc_add_modify (&loopse.pre, count1, tmp);
462 else
463 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
465 gfc_add_block_to_block (&block, &loopse.pre);
466 gfc_add_block_to_block (&block, &loopse.post);
468 /* Finish up the loop block and the loop. */
469 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
470 gfc_trans_scalarizing_loops (&loop, &body);
471 gfc_add_block_to_block (&se.pre, &loop.pre);
472 gfc_add_block_to_block (&se.pre, &loop.post);
473 gfc_add_block_to_block (&se.pre, &se.post);
474 gfc_cleanup_loop (&loop);
477 return gfc_finish_block (&se.pre);
481 /* Translate the RETURN statement. */
483 tree
484 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
486 if (code->expr1)
488 gfc_se se;
489 tree tmp;
490 tree result;
492 /* If code->expr is not NULL, this return statement must appear
493 in a subroutine and current_fake_result_decl has already
494 been generated. */
496 result = gfc_get_fake_result_decl (NULL, 0);
497 if (!result)
499 gfc_warning ("An alternate return at %L without a * dummy argument",
500 &code->expr1->where);
501 return build1_v (GOTO_EXPR, gfc_get_return_label ());
504 /* Start a new block for this statement. */
505 gfc_init_se (&se, NULL);
506 gfc_start_block (&se.pre);
508 gfc_conv_expr (&se, code->expr1);
510 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
511 fold_convert (TREE_TYPE (result), se.expr));
512 gfc_add_expr_to_block (&se.pre, tmp);
514 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
515 gfc_add_expr_to_block (&se.pre, tmp);
516 gfc_add_block_to_block (&se.pre, &se.post);
517 return gfc_finish_block (&se.pre);
519 else
520 return build1_v (GOTO_EXPR, gfc_get_return_label ());
524 /* Translate the PAUSE statement. We have to translate this statement
525 to a runtime library call. */
527 tree
528 gfc_trans_pause (gfc_code * code)
530 tree gfc_int4_type_node = gfc_get_int_type (4);
531 gfc_se se;
532 tree tmp;
534 /* Start a new block for this statement. */
535 gfc_init_se (&se, NULL);
536 gfc_start_block (&se.pre);
539 if (code->expr1 == NULL)
541 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
542 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
544 else
546 gfc_conv_expr_reference (&se, code->expr1);
547 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
548 se.expr, se.string_length);
551 gfc_add_expr_to_block (&se.pre, tmp);
553 gfc_add_block_to_block (&se.pre, &se.post);
555 return gfc_finish_block (&se.pre);
559 /* Translate the STOP statement. We have to translate this statement
560 to a runtime library call. */
562 tree
563 gfc_trans_stop (gfc_code * code)
565 tree gfc_int4_type_node = gfc_get_int_type (4);
566 gfc_se se;
567 tree tmp;
569 /* Start a new block for this statement. */
570 gfc_init_se (&se, NULL);
571 gfc_start_block (&se.pre);
574 if (code->expr1 == NULL)
576 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
577 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
579 else
581 gfc_conv_expr_reference (&se, code->expr1);
582 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
583 se.expr, se.string_length);
586 gfc_add_expr_to_block (&se.pre, tmp);
588 gfc_add_block_to_block (&se.pre, &se.post);
590 return gfc_finish_block (&se.pre);
594 /* Generate GENERIC for the IF construct. This function also deals with
595 the simple IF statement, because the front end translates the IF
596 statement into an IF construct.
598 We translate:
600 IF (cond) THEN
601 then_clause
602 ELSEIF (cond2)
603 elseif_clause
604 ELSE
605 else_clause
606 ENDIF
608 into:
610 pre_cond_s;
611 if (cond_s)
613 then_clause;
615 else
617 pre_cond_s
618 if (cond_s)
620 elseif_clause
622 else
624 else_clause;
628 where COND_S is the simplified version of the predicate. PRE_COND_S
629 are the pre side-effects produced by the translation of the
630 conditional.
631 We need to build the chain recursively otherwise we run into
632 problems with folding incomplete statements. */
634 static tree
635 gfc_trans_if_1 (gfc_code * code)
637 gfc_se if_se;
638 tree stmt, elsestmt;
640 /* Check for an unconditional ELSE clause. */
641 if (!code->expr1)
642 return gfc_trans_code (code->next);
644 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
645 gfc_init_se (&if_se, NULL);
646 gfc_start_block (&if_se.pre);
648 /* Calculate the IF condition expression. */
649 gfc_conv_expr_val (&if_se, code->expr1);
651 /* Translate the THEN clause. */
652 stmt = gfc_trans_code (code->next);
654 /* Translate the ELSE clause. */
655 if (code->block)
656 elsestmt = gfc_trans_if_1 (code->block);
657 else
658 elsestmt = build_empty_stmt (input_location);
660 /* Build the condition expression and add it to the condition block. */
661 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
663 gfc_add_expr_to_block (&if_se.pre, stmt);
665 /* Finish off this statement. */
666 return gfc_finish_block (&if_se.pre);
669 tree
670 gfc_trans_if (gfc_code * code)
672 /* Ignore the top EXEC_IF, it only announces an IF construct. The
673 actual code we must translate is in code->block. */
675 return gfc_trans_if_1 (code->block);
679 /* Translate an arithmetic IF expression.
681 IF (cond) label1, label2, label3 translates to
683 if (cond <= 0)
685 if (cond < 0)
686 goto label1;
687 else // cond == 0
688 goto label2;
690 else // cond > 0
691 goto label3;
693 An optimized version can be generated in case of equal labels.
694 E.g., if label1 is equal to label2, we can translate it to
696 if (cond <= 0)
697 goto label1;
698 else
699 goto label3;
702 tree
703 gfc_trans_arithmetic_if (gfc_code * code)
705 gfc_se se;
706 tree tmp;
707 tree branch1;
708 tree branch2;
709 tree zero;
711 /* Start a new block. */
712 gfc_init_se (&se, NULL);
713 gfc_start_block (&se.pre);
715 /* Pre-evaluate COND. */
716 gfc_conv_expr_val (&se, code->expr1);
717 se.expr = gfc_evaluate_now (se.expr, &se.pre);
719 /* Build something to compare with. */
720 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
722 if (code->label1->value != code->label2->value)
724 /* If (cond < 0) take branch1 else take branch2.
725 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
726 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
727 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
729 if (code->label1->value != code->label3->value)
730 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
731 else
732 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
734 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
736 else
737 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
739 if (code->label1->value != code->label3->value
740 && code->label2->value != code->label3->value)
742 /* if (cond <= 0) take branch1 else take branch2. */
743 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
744 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
745 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
748 /* Append the COND_EXPR to the evaluation of COND, and return. */
749 gfc_add_expr_to_block (&se.pre, branch1);
750 return gfc_finish_block (&se.pre);
754 /* Translate the simple DO construct. This is where the loop variable has
755 integer type and step +-1. We can't use this in the general case
756 because integer overflow and floating point errors could give incorrect
757 results.
758 We translate a do loop from:
760 DO dovar = from, to, step
761 body
762 END DO
766 [Evaluate loop bounds and step]
767 dovar = from;
768 if ((step > 0) ? (dovar <= to) : (dovar => to))
770 for (;;)
772 body;
773 cycle_label:
774 cond = (dovar == to);
775 dovar += step;
776 if (cond) goto end_label;
779 end_label:
781 This helps the optimizers by avoiding the extra induction variable
782 used in the general case. */
784 static tree
785 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
786 tree from, tree to, tree step)
788 stmtblock_t body;
789 tree type;
790 tree cond;
791 tree tmp;
792 tree saved_dovar = NULL;
793 tree cycle_label;
794 tree exit_label;
796 type = TREE_TYPE (dovar);
798 /* Initialize the DO variable: dovar = from. */
799 gfc_add_modify (pblock, dovar, from);
801 /* Save value for do-tinkering checking. */
802 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
804 saved_dovar = gfc_create_var (type, ".saved_dovar");
805 gfc_add_modify (pblock, saved_dovar, dovar);
808 /* Cycle and exit statements are implemented with gotos. */
809 cycle_label = gfc_build_label_decl (NULL_TREE);
810 exit_label = gfc_build_label_decl (NULL_TREE);
812 /* Put the labels where they can be found later. See gfc_trans_do(). */
813 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
815 /* Loop body. */
816 gfc_start_block (&body);
818 /* Main loop body. */
819 tmp = gfc_trans_code (code->block->next);
820 gfc_add_expr_to_block (&body, tmp);
822 /* Label for cycle statements (if needed). */
823 if (TREE_USED (cycle_label))
825 tmp = build1_v (LABEL_EXPR, cycle_label);
826 gfc_add_expr_to_block (&body, tmp);
829 /* Check whether someone has modified the loop variable. */
830 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
832 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
833 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
834 "Loop variable has been modified");
837 /* Evaluate the loop condition. */
838 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
839 cond = gfc_evaluate_now (cond, &body);
841 /* Increment the loop variable. */
842 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
843 gfc_add_modify (&body, dovar, tmp);
845 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
846 gfc_add_modify (&body, saved_dovar, dovar);
848 /* The loop exit. */
849 tmp = build1_v (GOTO_EXPR, exit_label);
850 TREE_USED (exit_label) = 1;
851 tmp = fold_build3 (COND_EXPR, void_type_node,
852 cond, tmp, build_empty_stmt (input_location));
853 gfc_add_expr_to_block (&body, tmp);
855 /* Finish the loop body. */
856 tmp = gfc_finish_block (&body);
857 tmp = build1_v (LOOP_EXPR, tmp);
859 /* Only execute the loop if the number of iterations is positive. */
860 if (tree_int_cst_sgn (step) > 0)
861 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
862 else
863 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
864 tmp = fold_build3 (COND_EXPR, void_type_node,
865 cond, tmp, build_empty_stmt (input_location));
866 gfc_add_expr_to_block (pblock, tmp);
868 /* Add the exit label. */
869 tmp = build1_v (LABEL_EXPR, exit_label);
870 gfc_add_expr_to_block (pblock, tmp);
872 return gfc_finish_block (pblock);
875 /* Translate the DO construct. This obviously is one of the most
876 important ones to get right with any compiler, but especially
877 so for Fortran.
879 We special case some loop forms as described in gfc_trans_simple_do.
880 For other cases we implement them with a separate loop count,
881 as described in the standard.
883 We translate a do loop from:
885 DO dovar = from, to, step
886 body
887 END DO
891 [evaluate loop bounds and step]
892 empty = (step > 0 ? to < from : to > from);
893 countm1 = (to - from) / step;
894 dovar = from;
895 if (empty) goto exit_label;
896 for (;;)
898 body;
899 cycle_label:
900 dovar += step
901 if (countm1 ==0) goto exit_label;
902 countm1--;
904 exit_label:
906 countm1 is an unsigned integer. It is equal to the loop count minus one,
907 because the loop count itself can overflow. */
909 tree
910 gfc_trans_do (gfc_code * code)
912 gfc_se se;
913 tree dovar;
914 tree saved_dovar = NULL;
915 tree from;
916 tree to;
917 tree step;
918 tree countm1;
919 tree type;
920 tree utype;
921 tree cond;
922 tree cycle_label;
923 tree exit_label;
924 tree tmp;
925 tree pos_step;
926 stmtblock_t block;
927 stmtblock_t body;
929 gfc_start_block (&block);
931 /* Evaluate all the expressions in the iterator. */
932 gfc_init_se (&se, NULL);
933 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
934 gfc_add_block_to_block (&block, &se.pre);
935 dovar = se.expr;
936 type = TREE_TYPE (dovar);
938 gfc_init_se (&se, NULL);
939 gfc_conv_expr_val (&se, code->ext.iterator->start);
940 gfc_add_block_to_block (&block, &se.pre);
941 from = gfc_evaluate_now (se.expr, &block);
943 gfc_init_se (&se, NULL);
944 gfc_conv_expr_val (&se, code->ext.iterator->end);
945 gfc_add_block_to_block (&block, &se.pre);
946 to = gfc_evaluate_now (se.expr, &block);
948 gfc_init_se (&se, NULL);
949 gfc_conv_expr_val (&se, code->ext.iterator->step);
950 gfc_add_block_to_block (&block, &se.pre);
951 step = gfc_evaluate_now (se.expr, &block);
953 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
955 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
956 fold_convert (type, integer_zero_node));
957 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
958 "DO step value is zero");
961 /* Special case simple loops. */
962 if (TREE_CODE (type) == INTEGER_TYPE
963 && (integer_onep (step)
964 || tree_int_cst_equal (step, integer_minus_one_node)))
965 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
967 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
968 fold_convert (type, integer_zero_node));
970 if (TREE_CODE (type) == INTEGER_TYPE)
971 utype = unsigned_type_for (type);
972 else
973 utype = unsigned_type_for (gfc_array_index_type);
974 countm1 = gfc_create_var (utype, "countm1");
976 /* Cycle and exit statements are implemented with gotos. */
977 cycle_label = gfc_build_label_decl (NULL_TREE);
978 exit_label = gfc_build_label_decl (NULL_TREE);
979 TREE_USED (exit_label) = 1;
981 /* Initialize the DO variable: dovar = from. */
982 gfc_add_modify (&block, dovar, from);
984 /* Save value for do-tinkering checking. */
985 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
987 saved_dovar = gfc_create_var (type, ".saved_dovar");
988 gfc_add_modify (&block, saved_dovar, dovar);
991 /* Initialize loop count and jump to exit label if the loop is empty.
992 This code is executed before we enter the loop body. We generate:
993 if (step > 0)
995 if (to < from) goto exit_label;
996 countm1 = (to - from) / step;
998 else
1000 if (to > from) goto exit_label;
1001 countm1 = (from - to) / -step;
1002 } */
1003 if (TREE_CODE (type) == INTEGER_TYPE)
1005 tree pos, neg;
1007 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1008 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1009 build1_v (GOTO_EXPR, exit_label),
1010 build_empty_stmt (input_location));
1011 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1012 tmp = fold_convert (utype, tmp);
1013 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
1014 fold_convert (utype, step));
1015 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1016 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
1018 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1019 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1020 build1_v (GOTO_EXPR, exit_label),
1021 build_empty_stmt (input_location));
1022 tmp = fold_build2 (MINUS_EXPR, type, from, to);
1023 tmp = fold_convert (utype, tmp);
1024 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
1025 fold_convert (utype, fold_build1 (NEGATE_EXPR,
1026 type, step)));
1027 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1028 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
1030 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1031 gfc_add_expr_to_block (&block, tmp);
1033 else
1035 /* TODO: We could use the same width as the real type.
1036 This would probably cause more problems that it solves
1037 when we implement "long double" types. */
1039 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1040 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1041 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1042 gfc_add_modify (&block, countm1, tmp);
1044 /* We need a special check for empty loops:
1045 empty = (step > 0 ? to < from : to > from); */
1046 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1047 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1048 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1049 /* If the loop is empty, go directly to the exit label. */
1050 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1051 build1_v (GOTO_EXPR, exit_label),
1052 build_empty_stmt (input_location));
1053 gfc_add_expr_to_block (&block, tmp);
1056 /* Loop body. */
1057 gfc_start_block (&body);
1059 /* Put these labels where they can be found later. We put the
1060 labels in a TREE_LIST node (because TREE_CHAIN is already
1061 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1062 label in TREE_VALUE (backend_decl). */
1064 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1066 /* Main loop body. */
1067 tmp = gfc_trans_code (code->block->next);
1068 gfc_add_expr_to_block (&body, tmp);
1070 /* Label for cycle statements (if needed). */
1071 if (TREE_USED (cycle_label))
1073 tmp = build1_v (LABEL_EXPR, cycle_label);
1074 gfc_add_expr_to_block (&body, tmp);
1077 /* Check whether someone has modified the loop variable. */
1078 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1080 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1081 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1082 "Loop variable has been modified");
1085 /* Increment the loop variable. */
1086 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1087 gfc_add_modify (&body, dovar, tmp);
1089 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1090 gfc_add_modify (&body, saved_dovar, dovar);
1092 /* End with the loop condition. Loop until countm1 == 0. */
1093 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1094 build_int_cst (utype, 0));
1095 tmp = build1_v (GOTO_EXPR, exit_label);
1096 tmp = fold_build3 (COND_EXPR, void_type_node,
1097 cond, tmp, build_empty_stmt (input_location));
1098 gfc_add_expr_to_block (&body, tmp);
1100 /* Decrement the loop count. */
1101 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1102 gfc_add_modify (&body, countm1, tmp);
1104 /* End of loop body. */
1105 tmp = gfc_finish_block (&body);
1107 /* The for loop itself. */
1108 tmp = build1_v (LOOP_EXPR, tmp);
1109 gfc_add_expr_to_block (&block, tmp);
1111 /* Add the exit label. */
1112 tmp = build1_v (LABEL_EXPR, exit_label);
1113 gfc_add_expr_to_block (&block, tmp);
1115 return gfc_finish_block (&block);
1119 /* Translate the DO WHILE construct.
1121 We translate
1123 DO WHILE (cond)
1124 body
1125 END DO
1129 for ( ; ; )
1131 pre_cond;
1132 if (! cond) goto exit_label;
1133 body;
1134 cycle_label:
1136 exit_label:
1138 Because the evaluation of the exit condition `cond' may have side
1139 effects, we can't do much for empty loop bodies. The backend optimizers
1140 should be smart enough to eliminate any dead loops. */
1142 tree
1143 gfc_trans_do_while (gfc_code * code)
1145 gfc_se cond;
1146 tree tmp;
1147 tree cycle_label;
1148 tree exit_label;
1149 stmtblock_t block;
1151 /* Everything we build here is part of the loop body. */
1152 gfc_start_block (&block);
1154 /* Cycle and exit statements are implemented with gotos. */
1155 cycle_label = gfc_build_label_decl (NULL_TREE);
1156 exit_label = gfc_build_label_decl (NULL_TREE);
1158 /* Put the labels where they can be found later. See gfc_trans_do(). */
1159 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1161 /* Create a GIMPLE version of the exit condition. */
1162 gfc_init_se (&cond, NULL);
1163 gfc_conv_expr_val (&cond, code->expr1);
1164 gfc_add_block_to_block (&block, &cond.pre);
1165 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1167 /* Build "IF (! cond) GOTO exit_label". */
1168 tmp = build1_v (GOTO_EXPR, exit_label);
1169 TREE_USED (exit_label) = 1;
1170 tmp = fold_build3 (COND_EXPR, void_type_node,
1171 cond.expr, tmp, build_empty_stmt (input_location));
1172 gfc_add_expr_to_block (&block, tmp);
1174 /* The main body of the loop. */
1175 tmp = gfc_trans_code (code->block->next);
1176 gfc_add_expr_to_block (&block, tmp);
1178 /* Label for cycle statements (if needed). */
1179 if (TREE_USED (cycle_label))
1181 tmp = build1_v (LABEL_EXPR, cycle_label);
1182 gfc_add_expr_to_block (&block, tmp);
1185 /* End of loop body. */
1186 tmp = gfc_finish_block (&block);
1188 gfc_init_block (&block);
1189 /* Build the loop. */
1190 tmp = build1_v (LOOP_EXPR, tmp);
1191 gfc_add_expr_to_block (&block, tmp);
1193 /* Add the exit label. */
1194 tmp = build1_v (LABEL_EXPR, exit_label);
1195 gfc_add_expr_to_block (&block, tmp);
1197 return gfc_finish_block (&block);
1201 /* Translate the SELECT CASE construct for INTEGER case expressions,
1202 without killing all potential optimizations. The problem is that
1203 Fortran allows unbounded cases, but the back-end does not, so we
1204 need to intercept those before we enter the equivalent SWITCH_EXPR
1205 we can build.
1207 For example, we translate this,
1209 SELECT CASE (expr)
1210 CASE (:100,101,105:115)
1211 block_1
1212 CASE (190:199,200:)
1213 block_2
1214 CASE (300)
1215 block_3
1216 CASE DEFAULT
1217 block_4
1218 END SELECT
1220 to the GENERIC equivalent,
1222 switch (expr)
1224 case (minimum value for typeof(expr) ... 100:
1225 case 101:
1226 case 105 ... 114:
1227 block1:
1228 goto end_label;
1230 case 200 ... (maximum value for typeof(expr):
1231 case 190 ... 199:
1232 block2;
1233 goto end_label;
1235 case 300:
1236 block_3;
1237 goto end_label;
1239 default:
1240 block_4;
1241 goto end_label;
1244 end_label: */
1246 static tree
1247 gfc_trans_integer_select (gfc_code * code)
1249 gfc_code *c;
1250 gfc_case *cp;
1251 tree end_label;
1252 tree tmp;
1253 gfc_se se;
1254 stmtblock_t block;
1255 stmtblock_t body;
1257 gfc_start_block (&block);
1259 /* Calculate the switch expression. */
1260 gfc_init_se (&se, NULL);
1261 gfc_conv_expr_val (&se, code->expr1);
1262 gfc_add_block_to_block (&block, &se.pre);
1264 end_label = gfc_build_label_decl (NULL_TREE);
1266 gfc_init_block (&body);
1268 for (c = code->block; c; c = c->block)
1270 for (cp = c->ext.case_list; cp; cp = cp->next)
1272 tree low, high;
1273 tree label;
1275 /* Assume it's the default case. */
1276 low = high = NULL_TREE;
1278 if (cp->low)
1280 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1281 cp->low->ts.kind);
1283 /* If there's only a lower bound, set the high bound to the
1284 maximum value of the case expression. */
1285 if (!cp->high)
1286 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1289 if (cp->high)
1291 /* Three cases are possible here:
1293 1) There is no lower bound, e.g. CASE (:N).
1294 2) There is a lower bound .NE. high bound, that is
1295 a case range, e.g. CASE (N:M) where M>N (we make
1296 sure that M>N during type resolution).
1297 3) There is a lower bound, and it has the same value
1298 as the high bound, e.g. CASE (N:N). This is our
1299 internal representation of CASE(N).
1301 In the first and second case, we need to set a value for
1302 high. In the third case, we don't because the GCC middle
1303 end represents a single case value by just letting high be
1304 a NULL_TREE. We can't do that because we need to be able
1305 to represent unbounded cases. */
1307 if (!cp->low
1308 || (cp->low
1309 && mpz_cmp (cp->low->value.integer,
1310 cp->high->value.integer) != 0))
1311 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1312 cp->high->ts.kind);
1314 /* Unbounded case. */
1315 if (!cp->low)
1316 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1319 /* Build a label. */
1320 label = gfc_build_label_decl (NULL_TREE);
1322 /* Add this case label.
1323 Add parameter 'label', make it match GCC backend. */
1324 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1325 low, high, label);
1326 gfc_add_expr_to_block (&body, tmp);
1329 /* Add the statements for this case. */
1330 tmp = gfc_trans_code (c->next);
1331 gfc_add_expr_to_block (&body, tmp);
1333 /* Break to the end of the construct. */
1334 tmp = build1_v (GOTO_EXPR, end_label);
1335 gfc_add_expr_to_block (&body, tmp);
1338 tmp = gfc_finish_block (&body);
1339 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1340 gfc_add_expr_to_block (&block, tmp);
1342 tmp = build1_v (LABEL_EXPR, end_label);
1343 gfc_add_expr_to_block (&block, tmp);
1345 return gfc_finish_block (&block);
1349 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1351 There are only two cases possible here, even though the standard
1352 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1353 .FALSE., and DEFAULT.
1355 We never generate more than two blocks here. Instead, we always
1356 try to eliminate the DEFAULT case. This way, we can translate this
1357 kind of SELECT construct to a simple
1359 if {} else {};
1361 expression in GENERIC. */
1363 static tree
1364 gfc_trans_logical_select (gfc_code * code)
1366 gfc_code *c;
1367 gfc_code *t, *f, *d;
1368 gfc_case *cp;
1369 gfc_se se;
1370 stmtblock_t block;
1372 /* Assume we don't have any cases at all. */
1373 t = f = d = NULL;
1375 /* Now see which ones we actually do have. We can have at most two
1376 cases in a single case list: one for .TRUE. and one for .FALSE.
1377 The default case is always separate. If the cases for .TRUE. and
1378 .FALSE. are in the same case list, the block for that case list
1379 always executed, and we don't generate code a COND_EXPR. */
1380 for (c = code->block; c; c = c->block)
1382 for (cp = c->ext.case_list; cp; cp = cp->next)
1384 if (cp->low)
1386 if (cp->low->value.logical == 0) /* .FALSE. */
1387 f = c;
1388 else /* if (cp->value.logical != 0), thus .TRUE. */
1389 t = c;
1391 else
1392 d = c;
1396 /* Start a new block. */
1397 gfc_start_block (&block);
1399 /* Calculate the switch expression. We always need to do this
1400 because it may have side effects. */
1401 gfc_init_se (&se, NULL);
1402 gfc_conv_expr_val (&se, code->expr1);
1403 gfc_add_block_to_block (&block, &se.pre);
1405 if (t == f && t != NULL)
1407 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1408 translate the code for these cases, append it to the current
1409 block. */
1410 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1412 else
1414 tree true_tree, false_tree, stmt;
1416 true_tree = build_empty_stmt (input_location);
1417 false_tree = build_empty_stmt (input_location);
1419 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1420 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1421 make the missing case the default case. */
1422 if (t != NULL && f != NULL)
1423 d = NULL;
1424 else if (d != NULL)
1426 if (t == NULL)
1427 t = d;
1428 else
1429 f = d;
1432 /* Translate the code for each of these blocks, and append it to
1433 the current block. */
1434 if (t != NULL)
1435 true_tree = gfc_trans_code (t->next);
1437 if (f != NULL)
1438 false_tree = gfc_trans_code (f->next);
1440 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1441 true_tree, false_tree);
1442 gfc_add_expr_to_block (&block, stmt);
1445 return gfc_finish_block (&block);
1449 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1450 Instead of generating compares and jumps, it is far simpler to
1451 generate a data structure describing the cases in order and call a
1452 library subroutine that locates the right case.
1453 This is particularly true because this is the only case where we
1454 might have to dispose of a temporary.
1455 The library subroutine returns a pointer to jump to or NULL if no
1456 branches are to be taken. */
1458 static tree
1459 gfc_trans_character_select (gfc_code *code)
1461 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1462 stmtblock_t block, body;
1463 gfc_case *cp, *d;
1464 gfc_code *c;
1465 gfc_se se;
1466 int n, k;
1468 /* The jump table types are stored in static variables to avoid
1469 constructing them from scratch every single time. */
1470 static tree select_struct[2];
1471 static tree ss_string1[2], ss_string1_len[2];
1472 static tree ss_string2[2], ss_string2_len[2];
1473 static tree ss_target[2];
1475 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1477 if (code->expr1->ts.kind == 1)
1478 k = 0;
1479 else if (code->expr1->ts.kind == 4)
1480 k = 1;
1481 else
1482 gcc_unreachable ();
1484 if (select_struct[k] == NULL)
1486 select_struct[k] = make_node (RECORD_TYPE);
1488 if (code->expr1->ts.kind == 1)
1489 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1490 else if (code->expr1->ts.kind == 4)
1491 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1492 else
1493 gcc_unreachable ();
1495 #undef ADD_FIELD
1496 #define ADD_FIELD(NAME, TYPE) \
1497 ss_##NAME[k] = gfc_add_field_to_struct \
1498 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1499 get_identifier (stringize(NAME)), TYPE)
1501 ADD_FIELD (string1, pchartype);
1502 ADD_FIELD (string1_len, gfc_charlen_type_node);
1504 ADD_FIELD (string2, pchartype);
1505 ADD_FIELD (string2_len, gfc_charlen_type_node);
1507 ADD_FIELD (target, integer_type_node);
1508 #undef ADD_FIELD
1510 gfc_finish_type (select_struct[k]);
1513 cp = code->block->ext.case_list;
1514 while (cp->left != NULL)
1515 cp = cp->left;
1517 n = 0;
1518 for (d = cp; d; d = d->right)
1519 d->n = n++;
1521 end_label = gfc_build_label_decl (NULL_TREE);
1523 /* Generate the body */
1524 gfc_start_block (&block);
1525 gfc_init_block (&body);
1527 for (c = code->block; c; c = c->block)
1529 for (d = c->ext.case_list; d; d = d->next)
1531 label = gfc_build_label_decl (NULL_TREE);
1532 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1533 build_int_cst (NULL_TREE, d->n),
1534 build_int_cst (NULL_TREE, d->n), label);
1535 gfc_add_expr_to_block (&body, tmp);
1538 tmp = gfc_trans_code (c->next);
1539 gfc_add_expr_to_block (&body, tmp);
1541 tmp = build1_v (GOTO_EXPR, end_label);
1542 gfc_add_expr_to_block (&body, tmp);
1545 /* Generate the structure describing the branches */
1546 init = NULL_TREE;
1548 for(d = cp; d; d = d->right)
1550 node = NULL_TREE;
1552 gfc_init_se (&se, NULL);
1554 if (d->low == NULL)
1556 node = tree_cons (ss_string1[k], null_pointer_node, node);
1557 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1559 else
1561 gfc_conv_expr_reference (&se, d->low);
1563 node = tree_cons (ss_string1[k], se.expr, node);
1564 node = tree_cons (ss_string1_len[k], se.string_length, node);
1567 if (d->high == NULL)
1569 node = tree_cons (ss_string2[k], null_pointer_node, node);
1570 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1572 else
1574 gfc_init_se (&se, NULL);
1575 gfc_conv_expr_reference (&se, d->high);
1577 node = tree_cons (ss_string2[k], se.expr, node);
1578 node = tree_cons (ss_string2_len[k], se.string_length, node);
1581 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1582 node);
1584 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1585 init = tree_cons (NULL_TREE, tmp, init);
1588 type = build_array_type (select_struct[k],
1589 build_index_type (build_int_cst (NULL_TREE, n-1)));
1591 init = build_constructor_from_list (type, nreverse(init));
1592 TREE_CONSTANT (init) = 1;
1593 TREE_STATIC (init) = 1;
1594 /* Create a static variable to hold the jump table. */
1595 tmp = gfc_create_var (type, "jumptable");
1596 TREE_CONSTANT (tmp) = 1;
1597 TREE_STATIC (tmp) = 1;
1598 TREE_READONLY (tmp) = 1;
1599 DECL_INITIAL (tmp) = init;
1600 init = tmp;
1602 /* Build the library call */
1603 init = gfc_build_addr_expr (pvoid_type_node, init);
1605 gfc_init_se (&se, NULL);
1606 gfc_conv_expr_reference (&se, code->expr1);
1608 gfc_add_block_to_block (&block, &se.pre);
1610 if (code->expr1->ts.kind == 1)
1611 fndecl = gfor_fndecl_select_string;
1612 else if (code->expr1->ts.kind == 4)
1613 fndecl = gfor_fndecl_select_string_char4;
1614 else
1615 gcc_unreachable ();
1617 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1618 se.expr, se.string_length);
1619 case_num = gfc_create_var (integer_type_node, "case_num");
1620 gfc_add_modify (&block, case_num, tmp);
1622 gfc_add_block_to_block (&block, &se.post);
1624 tmp = gfc_finish_block (&body);
1625 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1626 gfc_add_expr_to_block (&block, tmp);
1628 tmp = build1_v (LABEL_EXPR, end_label);
1629 gfc_add_expr_to_block (&block, tmp);
1631 return gfc_finish_block (&block);
1635 /* Translate the three variants of the SELECT CASE construct.
1637 SELECT CASEs with INTEGER case expressions can be translated to an
1638 equivalent GENERIC switch statement, and for LOGICAL case
1639 expressions we build one or two if-else compares.
1641 SELECT CASEs with CHARACTER case expressions are a whole different
1642 story, because they don't exist in GENERIC. So we sort them and
1643 do a binary search at runtime.
1645 Fortran has no BREAK statement, and it does not allow jumps from
1646 one case block to another. That makes things a lot easier for
1647 the optimizers. */
1649 tree
1650 gfc_trans_select (gfc_code * code)
1652 gcc_assert (code && code->expr1);
1654 /* Empty SELECT constructs are legal. */
1655 if (code->block == NULL)
1656 return build_empty_stmt (input_location);
1658 /* Select the correct translation function. */
1659 switch (code->expr1->ts.type)
1661 case BT_LOGICAL: return gfc_trans_logical_select (code);
1662 case BT_INTEGER: return gfc_trans_integer_select (code);
1663 case BT_CHARACTER: return gfc_trans_character_select (code);
1664 default:
1665 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1666 /* Not reached */
1671 /* Traversal function to substitute a replacement symtree if the symbol
1672 in the expression is the same as that passed. f == 2 signals that
1673 that variable itself is not to be checked - only the references.
1674 This group of functions is used when the variable expression in a
1675 FORALL assignment has internal references. For example:
1676 FORALL (i = 1:4) p(p(i)) = i
1677 The only recourse here is to store a copy of 'p' for the index
1678 expression. */
1680 static gfc_symtree *new_symtree;
1681 static gfc_symtree *old_symtree;
1683 static bool
1684 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1686 if (expr->expr_type != EXPR_VARIABLE)
1687 return false;
1689 if (*f == 2)
1690 *f = 1;
1691 else if (expr->symtree->n.sym == sym)
1692 expr->symtree = new_symtree;
1694 return false;
1697 static void
1698 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1700 gfc_traverse_expr (e, sym, forall_replace, f);
1703 static bool
1704 forall_restore (gfc_expr *expr,
1705 gfc_symbol *sym ATTRIBUTE_UNUSED,
1706 int *f ATTRIBUTE_UNUSED)
1708 if (expr->expr_type != EXPR_VARIABLE)
1709 return false;
1711 if (expr->symtree == new_symtree)
1712 expr->symtree = old_symtree;
1714 return false;
1717 static void
1718 forall_restore_symtree (gfc_expr *e)
1720 gfc_traverse_expr (e, NULL, forall_restore, 0);
1723 static void
1724 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1726 gfc_se tse;
1727 gfc_se rse;
1728 gfc_expr *e;
1729 gfc_symbol *new_sym;
1730 gfc_symbol *old_sym;
1731 gfc_symtree *root;
1732 tree tmp;
1734 /* Build a copy of the lvalue. */
1735 old_symtree = c->expr1->symtree;
1736 old_sym = old_symtree->n.sym;
1737 e = gfc_lval_expr_from_sym (old_sym);
1738 if (old_sym->attr.dimension)
1740 gfc_init_se (&tse, NULL);
1741 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1742 gfc_add_block_to_block (pre, &tse.pre);
1743 gfc_add_block_to_block (post, &tse.post);
1744 tse.expr = build_fold_indirect_ref (tse.expr);
1746 if (e->ts.type != BT_CHARACTER)
1748 /* Use the variable offset for the temporary. */
1749 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1750 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1753 else
1755 gfc_init_se (&tse, NULL);
1756 gfc_init_se (&rse, NULL);
1757 gfc_conv_expr (&rse, e);
1758 if (e->ts.type == BT_CHARACTER)
1760 tse.string_length = rse.string_length;
1761 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1762 tse.string_length);
1763 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1764 rse.string_length);
1765 gfc_add_block_to_block (pre, &tse.pre);
1766 gfc_add_block_to_block (post, &tse.post);
1768 else
1770 tmp = gfc_typenode_for_spec (&e->ts);
1771 tse.expr = gfc_create_var (tmp, "temp");
1774 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1775 e->expr_type == EXPR_VARIABLE);
1776 gfc_add_expr_to_block (pre, tmp);
1778 gfc_free_expr (e);
1780 /* Create a new symbol to represent the lvalue. */
1781 new_sym = gfc_new_symbol (old_sym->name, NULL);
1782 new_sym->ts = old_sym->ts;
1783 new_sym->attr.referenced = 1;
1784 new_sym->attr.temporary = 1;
1785 new_sym->attr.dimension = old_sym->attr.dimension;
1786 new_sym->attr.flavor = old_sym->attr.flavor;
1788 /* Use the temporary as the backend_decl. */
1789 new_sym->backend_decl = tse.expr;
1791 /* Create a fake symtree for it. */
1792 root = NULL;
1793 new_symtree = gfc_new_symtree (&root, old_sym->name);
1794 new_symtree->n.sym = new_sym;
1795 gcc_assert (new_symtree == root);
1797 /* Go through the expression reference replacing the old_symtree
1798 with the new. */
1799 forall_replace_symtree (c->expr1, old_sym, 2);
1801 /* Now we have made this temporary, we might as well use it for
1802 the right hand side. */
1803 forall_replace_symtree (c->expr2, old_sym, 1);
1807 /* Handles dependencies in forall assignments. */
1808 static int
1809 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1811 gfc_ref *lref;
1812 gfc_ref *rref;
1813 int need_temp;
1814 gfc_symbol *lsym;
1816 lsym = c->expr1->symtree->n.sym;
1817 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1819 /* Now check for dependencies within the 'variable'
1820 expression itself. These are treated by making a complete
1821 copy of variable and changing all the references to it
1822 point to the copy instead. Note that the shallow copy of
1823 the variable will not suffice for derived types with
1824 pointer components. We therefore leave these to their
1825 own devices. */
1826 if (lsym->ts.type == BT_DERIVED
1827 && lsym->ts.derived->attr.pointer_comp)
1828 return need_temp;
1830 new_symtree = NULL;
1831 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1833 forall_make_variable_temp (c, pre, post);
1834 need_temp = 0;
1837 /* Substrings with dependencies are treated in the same
1838 way. */
1839 if (c->expr1->ts.type == BT_CHARACTER
1840 && c->expr1->ref
1841 && c->expr2->expr_type == EXPR_VARIABLE
1842 && lsym == c->expr2->symtree->n.sym)
1844 for (lref = c->expr1->ref; lref; lref = lref->next)
1845 if (lref->type == REF_SUBSTRING)
1846 break;
1847 for (rref = c->expr2->ref; rref; rref = rref->next)
1848 if (rref->type == REF_SUBSTRING)
1849 break;
1851 if (rref && lref
1852 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1854 forall_make_variable_temp (c, pre, post);
1855 need_temp = 0;
1858 return need_temp;
1862 static void
1863 cleanup_forall_symtrees (gfc_code *c)
1865 forall_restore_symtree (c->expr1);
1866 forall_restore_symtree (c->expr2);
1867 gfc_free (new_symtree->n.sym);
1868 gfc_free (new_symtree);
1872 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1873 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1874 indicates whether we should generate code to test the FORALLs mask
1875 array. OUTER is the loop header to be used for initializing mask
1876 indices.
1878 The generated loop format is:
1879 count = (end - start + step) / step
1880 loopvar = start
1881 while (1)
1883 if (count <=0 )
1884 goto end_of_loop
1885 <body>
1886 loopvar += step
1887 count --
1889 end_of_loop: */
1891 static tree
1892 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1893 int mask_flag, stmtblock_t *outer)
1895 int n, nvar;
1896 tree tmp;
1897 tree cond;
1898 stmtblock_t block;
1899 tree exit_label;
1900 tree count;
1901 tree var, start, end, step;
1902 iter_info *iter;
1904 /* Initialize the mask index outside the FORALL nest. */
1905 if (mask_flag && forall_tmp->mask)
1906 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1908 iter = forall_tmp->this_loop;
1909 nvar = forall_tmp->nvar;
1910 for (n = 0; n < nvar; n++)
1912 var = iter->var;
1913 start = iter->start;
1914 end = iter->end;
1915 step = iter->step;
1917 exit_label = gfc_build_label_decl (NULL_TREE);
1918 TREE_USED (exit_label) = 1;
1920 /* The loop counter. */
1921 count = gfc_create_var (TREE_TYPE (var), "count");
1923 /* The body of the loop. */
1924 gfc_init_block (&block);
1926 /* The exit condition. */
1927 cond = fold_build2 (LE_EXPR, boolean_type_node,
1928 count, build_int_cst (TREE_TYPE (count), 0));
1929 tmp = build1_v (GOTO_EXPR, exit_label);
1930 tmp = fold_build3 (COND_EXPR, void_type_node,
1931 cond, tmp, build_empty_stmt (input_location));
1932 gfc_add_expr_to_block (&block, tmp);
1934 /* The main loop body. */
1935 gfc_add_expr_to_block (&block, body);
1937 /* Increment the loop variable. */
1938 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1939 gfc_add_modify (&block, var, tmp);
1941 /* Advance to the next mask element. Only do this for the
1942 innermost loop. */
1943 if (n == 0 && mask_flag && forall_tmp->mask)
1945 tree maskindex = forall_tmp->maskindex;
1946 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1947 maskindex, gfc_index_one_node);
1948 gfc_add_modify (&block, maskindex, tmp);
1951 /* Decrement the loop counter. */
1952 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1953 build_int_cst (TREE_TYPE (var), 1));
1954 gfc_add_modify (&block, count, tmp);
1956 body = gfc_finish_block (&block);
1958 /* Loop var initialization. */
1959 gfc_init_block (&block);
1960 gfc_add_modify (&block, var, start);
1963 /* Initialize the loop counter. */
1964 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1965 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1966 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1967 gfc_add_modify (&block, count, tmp);
1969 /* The loop expression. */
1970 tmp = build1_v (LOOP_EXPR, body);
1971 gfc_add_expr_to_block (&block, tmp);
1973 /* The exit label. */
1974 tmp = build1_v (LABEL_EXPR, exit_label);
1975 gfc_add_expr_to_block (&block, tmp);
1977 body = gfc_finish_block (&block);
1978 iter = iter->next;
1980 return body;
1984 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1985 is nonzero, the body is controlled by all masks in the forall nest.
1986 Otherwise, the innermost loop is not controlled by it's mask. This
1987 is used for initializing that mask. */
1989 static tree
1990 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1991 int mask_flag)
1993 tree tmp;
1994 stmtblock_t header;
1995 forall_info *forall_tmp;
1996 tree mask, maskindex;
1998 gfc_start_block (&header);
2000 forall_tmp = nested_forall_info;
2001 while (forall_tmp != NULL)
2003 /* Generate body with masks' control. */
2004 if (mask_flag)
2006 mask = forall_tmp->mask;
2007 maskindex = forall_tmp->maskindex;
2009 /* If a mask was specified make the assignment conditional. */
2010 if (mask)
2012 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2013 body = build3_v (COND_EXPR, tmp, body,
2014 build_empty_stmt (input_location));
2017 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2018 forall_tmp = forall_tmp->prev_nest;
2019 mask_flag = 1;
2022 gfc_add_expr_to_block (&header, body);
2023 return gfc_finish_block (&header);
2027 /* Allocate data for holding a temporary array. Returns either a local
2028 temporary array or a pointer variable. */
2030 static tree
2031 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2032 tree elem_type)
2034 tree tmpvar;
2035 tree type;
2036 tree tmp;
2038 if (INTEGER_CST_P (size))
2040 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2041 gfc_index_one_node);
2043 else
2044 tmp = NULL_TREE;
2046 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2047 type = build_array_type (elem_type, type);
2048 if (gfc_can_put_var_on_stack (bytesize))
2050 gcc_assert (INTEGER_CST_P (size));
2051 tmpvar = gfc_create_var (type, "temp");
2052 *pdata = NULL_TREE;
2054 else
2056 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2057 *pdata = convert (pvoid_type_node, tmpvar);
2059 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2060 gfc_add_modify (pblock, tmpvar, tmp);
2062 return tmpvar;
2066 /* Generate codes to copy the temporary to the actual lhs. */
2068 static tree
2069 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2070 tree count1, tree wheremask, bool invert)
2072 gfc_ss *lss;
2073 gfc_se lse, rse;
2074 stmtblock_t block, body;
2075 gfc_loopinfo loop1;
2076 tree tmp;
2077 tree wheremaskexpr;
2079 /* Walk the lhs. */
2080 lss = gfc_walk_expr (expr);
2082 if (lss == gfc_ss_terminator)
2084 gfc_start_block (&block);
2086 gfc_init_se (&lse, NULL);
2088 /* Translate the expression. */
2089 gfc_conv_expr (&lse, expr);
2091 /* Form the expression for the temporary. */
2092 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2094 /* Use the scalar assignment as is. */
2095 gfc_add_block_to_block (&block, &lse.pre);
2096 gfc_add_modify (&block, lse.expr, tmp);
2097 gfc_add_block_to_block (&block, &lse.post);
2099 /* Increment the count1. */
2100 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2101 gfc_index_one_node);
2102 gfc_add_modify (&block, count1, tmp);
2104 tmp = gfc_finish_block (&block);
2106 else
2108 gfc_start_block (&block);
2110 gfc_init_loopinfo (&loop1);
2111 gfc_init_se (&rse, NULL);
2112 gfc_init_se (&lse, NULL);
2114 /* Associate the lss with the loop. */
2115 gfc_add_ss_to_loop (&loop1, lss);
2117 /* Calculate the bounds of the scalarization. */
2118 gfc_conv_ss_startstride (&loop1);
2119 /* Setup the scalarizing loops. */
2120 gfc_conv_loop_setup (&loop1, &expr->where);
2122 gfc_mark_ss_chain_used (lss, 1);
2124 /* Start the scalarized loop body. */
2125 gfc_start_scalarized_body (&loop1, &body);
2127 /* Setup the gfc_se structures. */
2128 gfc_copy_loopinfo_to_se (&lse, &loop1);
2129 lse.ss = lss;
2131 /* Form the expression of the temporary. */
2132 if (lss != gfc_ss_terminator)
2133 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2134 /* Translate expr. */
2135 gfc_conv_expr (&lse, expr);
2137 /* Use the scalar assignment. */
2138 rse.string_length = lse.string_length;
2139 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2141 /* Form the mask expression according to the mask tree list. */
2142 if (wheremask)
2144 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2145 if (invert)
2146 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2147 TREE_TYPE (wheremaskexpr),
2148 wheremaskexpr);
2149 tmp = fold_build3 (COND_EXPR, void_type_node,
2150 wheremaskexpr, tmp,
2151 build_empty_stmt (input_location));
2154 gfc_add_expr_to_block (&body, tmp);
2156 /* Increment count1. */
2157 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2158 count1, gfc_index_one_node);
2159 gfc_add_modify (&body, count1, tmp);
2161 /* Increment count3. */
2162 if (count3)
2164 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2165 count3, gfc_index_one_node);
2166 gfc_add_modify (&body, count3, tmp);
2169 /* Generate the copying loops. */
2170 gfc_trans_scalarizing_loops (&loop1, &body);
2171 gfc_add_block_to_block (&block, &loop1.pre);
2172 gfc_add_block_to_block (&block, &loop1.post);
2173 gfc_cleanup_loop (&loop1);
2175 tmp = gfc_finish_block (&block);
2177 return tmp;
2181 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2182 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2183 and should not be freed. WHEREMASK is the conditional execution mask
2184 whose sense may be inverted by INVERT. */
2186 static tree
2187 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2188 tree count1, gfc_ss *lss, gfc_ss *rss,
2189 tree wheremask, bool invert)
2191 stmtblock_t block, body1;
2192 gfc_loopinfo loop;
2193 gfc_se lse;
2194 gfc_se rse;
2195 tree tmp;
2196 tree wheremaskexpr;
2198 gfc_start_block (&block);
2200 gfc_init_se (&rse, NULL);
2201 gfc_init_se (&lse, NULL);
2203 if (lss == gfc_ss_terminator)
2205 gfc_init_block (&body1);
2206 gfc_conv_expr (&rse, expr2);
2207 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2209 else
2211 /* Initialize the loop. */
2212 gfc_init_loopinfo (&loop);
2214 /* We may need LSS to determine the shape of the expression. */
2215 gfc_add_ss_to_loop (&loop, lss);
2216 gfc_add_ss_to_loop (&loop, rss);
2218 gfc_conv_ss_startstride (&loop);
2219 gfc_conv_loop_setup (&loop, &expr2->where);
2221 gfc_mark_ss_chain_used (rss, 1);
2222 /* Start the loop body. */
2223 gfc_start_scalarized_body (&loop, &body1);
2225 /* Translate the expression. */
2226 gfc_copy_loopinfo_to_se (&rse, &loop);
2227 rse.ss = rss;
2228 gfc_conv_expr (&rse, expr2);
2230 /* Form the expression of the temporary. */
2231 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2234 /* Use the scalar assignment. */
2235 lse.string_length = rse.string_length;
2236 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2237 expr2->expr_type == EXPR_VARIABLE);
2239 /* Form the mask expression according to the mask tree list. */
2240 if (wheremask)
2242 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2243 if (invert)
2244 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2245 TREE_TYPE (wheremaskexpr),
2246 wheremaskexpr);
2247 tmp = fold_build3 (COND_EXPR, void_type_node,
2248 wheremaskexpr, tmp, build_empty_stmt (input_location));
2251 gfc_add_expr_to_block (&body1, tmp);
2253 if (lss == gfc_ss_terminator)
2255 gfc_add_block_to_block (&block, &body1);
2257 /* Increment count1. */
2258 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2259 gfc_index_one_node);
2260 gfc_add_modify (&block, count1, tmp);
2262 else
2264 /* Increment count1. */
2265 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2266 count1, gfc_index_one_node);
2267 gfc_add_modify (&body1, count1, tmp);
2269 /* Increment count3. */
2270 if (count3)
2272 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2273 count3, gfc_index_one_node);
2274 gfc_add_modify (&body1, count3, tmp);
2277 /* Generate the copying loops. */
2278 gfc_trans_scalarizing_loops (&loop, &body1);
2280 gfc_add_block_to_block (&block, &loop.pre);
2281 gfc_add_block_to_block (&block, &loop.post);
2283 gfc_cleanup_loop (&loop);
2284 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2285 as tree nodes in SS may not be valid in different scope. */
2288 tmp = gfc_finish_block (&block);
2289 return tmp;
2293 /* Calculate the size of temporary needed in the assignment inside forall.
2294 LSS and RSS are filled in this function. */
2296 static tree
2297 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2298 stmtblock_t * pblock,
2299 gfc_ss **lss, gfc_ss **rss)
2301 gfc_loopinfo loop;
2302 tree size;
2303 int i;
2304 int save_flag;
2305 tree tmp;
2307 *lss = gfc_walk_expr (expr1);
2308 *rss = NULL;
2310 size = gfc_index_one_node;
2311 if (*lss != gfc_ss_terminator)
2313 gfc_init_loopinfo (&loop);
2315 /* Walk the RHS of the expression. */
2316 *rss = gfc_walk_expr (expr2);
2317 if (*rss == gfc_ss_terminator)
2319 /* The rhs is scalar. Add a ss for the expression. */
2320 *rss = gfc_get_ss ();
2321 (*rss)->next = gfc_ss_terminator;
2322 (*rss)->type = GFC_SS_SCALAR;
2323 (*rss)->expr = expr2;
2326 /* Associate the SS with the loop. */
2327 gfc_add_ss_to_loop (&loop, *lss);
2328 /* We don't actually need to add the rhs at this point, but it might
2329 make guessing the loop bounds a bit easier. */
2330 gfc_add_ss_to_loop (&loop, *rss);
2332 /* We only want the shape of the expression, not rest of the junk
2333 generated by the scalarizer. */
2334 loop.array_parameter = 1;
2336 /* Calculate the bounds of the scalarization. */
2337 save_flag = gfc_option.rtcheck;
2338 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2339 gfc_conv_ss_startstride (&loop);
2340 gfc_option.rtcheck = save_flag;
2341 gfc_conv_loop_setup (&loop, &expr2->where);
2343 /* Figure out how many elements we need. */
2344 for (i = 0; i < loop.dimen; i++)
2346 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2347 gfc_index_one_node, loop.from[i]);
2348 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2349 tmp, loop.to[i]);
2350 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2352 gfc_add_block_to_block (pblock, &loop.pre);
2353 size = gfc_evaluate_now (size, pblock);
2354 gfc_add_block_to_block (pblock, &loop.post);
2356 /* TODO: write a function that cleans up a loopinfo without freeing
2357 the SS chains. Currently a NOP. */
2360 return size;
2364 /* Calculate the overall iterator number of the nested forall construct.
2365 This routine actually calculates the number of times the body of the
2366 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2367 that by the expression INNER_SIZE. The BLOCK argument specifies the
2368 block in which to calculate the result, and the optional INNER_SIZE_BODY
2369 argument contains any statements that need to executed (inside the loop)
2370 to initialize or calculate INNER_SIZE. */
2372 static tree
2373 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2374 stmtblock_t *inner_size_body, stmtblock_t *block)
2376 forall_info *forall_tmp = nested_forall_info;
2377 tree tmp, number;
2378 stmtblock_t body;
2380 /* We can eliminate the innermost unconditional loops with constant
2381 array bounds. */
2382 if (INTEGER_CST_P (inner_size))
2384 while (forall_tmp
2385 && !forall_tmp->mask
2386 && INTEGER_CST_P (forall_tmp->size))
2388 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2389 inner_size, forall_tmp->size);
2390 forall_tmp = forall_tmp->prev_nest;
2393 /* If there are no loops left, we have our constant result. */
2394 if (!forall_tmp)
2395 return inner_size;
2398 /* Otherwise, create a temporary variable to compute the result. */
2399 number = gfc_create_var (gfc_array_index_type, "num");
2400 gfc_add_modify (block, number, gfc_index_zero_node);
2402 gfc_start_block (&body);
2403 if (inner_size_body)
2404 gfc_add_block_to_block (&body, inner_size_body);
2405 if (forall_tmp)
2406 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2407 number, inner_size);
2408 else
2409 tmp = inner_size;
2410 gfc_add_modify (&body, number, tmp);
2411 tmp = gfc_finish_block (&body);
2413 /* Generate loops. */
2414 if (forall_tmp != NULL)
2415 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2417 gfc_add_expr_to_block (block, tmp);
2419 return number;
2423 /* Allocate temporary for forall construct. SIZE is the size of temporary
2424 needed. PTEMP1 is returned for space free. */
2426 static tree
2427 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2428 tree * ptemp1)
2430 tree bytesize;
2431 tree unit;
2432 tree tmp;
2434 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2435 if (!integer_onep (unit))
2436 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2437 else
2438 bytesize = size;
2440 *ptemp1 = NULL;
2441 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2443 if (*ptemp1)
2444 tmp = build_fold_indirect_ref (tmp);
2445 return tmp;
2449 /* Allocate temporary for forall construct according to the information in
2450 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2451 assignment inside forall. PTEMP1 is returned for space free. */
2453 static tree
2454 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2455 tree inner_size, stmtblock_t * inner_size_body,
2456 stmtblock_t * block, tree * ptemp1)
2458 tree size;
2460 /* Calculate the total size of temporary needed in forall construct. */
2461 size = compute_overall_iter_number (nested_forall_info, inner_size,
2462 inner_size_body, block);
2464 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2468 /* Handle assignments inside forall which need temporary.
2470 forall (i=start:end:stride; maskexpr)
2471 e<i> = f<i>
2472 end forall
2473 (where e,f<i> are arbitrary expressions possibly involving i
2474 and there is a dependency between e<i> and f<i>)
2475 Translates to:
2476 masktmp(:) = maskexpr(:)
2478 maskindex = 0;
2479 count1 = 0;
2480 num = 0;
2481 for (i = start; i <= end; i += stride)
2482 num += SIZE (f<i>)
2483 count1 = 0;
2484 ALLOCATE (tmp(num))
2485 for (i = start; i <= end; i += stride)
2487 if (masktmp[maskindex++])
2488 tmp[count1++] = f<i>
2490 maskindex = 0;
2491 count1 = 0;
2492 for (i = start; i <= end; i += stride)
2494 if (masktmp[maskindex++])
2495 e<i> = tmp[count1++]
2497 DEALLOCATE (tmp)
2499 static void
2500 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2501 tree wheremask, bool invert,
2502 forall_info * nested_forall_info,
2503 stmtblock_t * block)
2505 tree type;
2506 tree inner_size;
2507 gfc_ss *lss, *rss;
2508 tree count, count1;
2509 tree tmp, tmp1;
2510 tree ptemp1;
2511 stmtblock_t inner_size_body;
2513 /* Create vars. count1 is the current iterator number of the nested
2514 forall. */
2515 count1 = gfc_create_var (gfc_array_index_type, "count1");
2517 /* Count is the wheremask index. */
2518 if (wheremask)
2520 count = gfc_create_var (gfc_array_index_type, "count");
2521 gfc_add_modify (block, count, gfc_index_zero_node);
2523 else
2524 count = NULL;
2526 /* Initialize count1. */
2527 gfc_add_modify (block, count1, gfc_index_zero_node);
2529 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2530 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2531 gfc_init_block (&inner_size_body);
2532 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2533 &lss, &rss);
2535 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2536 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2538 if (!expr1->ts.cl->backend_decl)
2540 gfc_se tse;
2541 gfc_init_se (&tse, NULL);
2542 gfc_conv_expr (&tse, expr1->ts.cl->length);
2543 expr1->ts.cl->backend_decl = tse.expr;
2545 type = gfc_get_character_type_len (gfc_default_character_kind,
2546 expr1->ts.cl->backend_decl);
2548 else
2549 type = gfc_typenode_for_spec (&expr1->ts);
2551 /* Allocate temporary for nested forall construct according to the
2552 information in nested_forall_info and inner_size. */
2553 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2554 &inner_size_body, block, &ptemp1);
2556 /* Generate codes to copy rhs to the temporary . */
2557 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2558 wheremask, invert);
2560 /* Generate body and loops according to the information in
2561 nested_forall_info. */
2562 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2563 gfc_add_expr_to_block (block, tmp);
2565 /* Reset count1. */
2566 gfc_add_modify (block, count1, gfc_index_zero_node);
2568 /* Reset count. */
2569 if (wheremask)
2570 gfc_add_modify (block, count, gfc_index_zero_node);
2572 /* Generate codes to copy the temporary to lhs. */
2573 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2574 wheremask, invert);
2576 /* Generate body and loops according to the information in
2577 nested_forall_info. */
2578 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2579 gfc_add_expr_to_block (block, tmp);
2581 if (ptemp1)
2583 /* Free the temporary. */
2584 tmp = gfc_call_free (ptemp1);
2585 gfc_add_expr_to_block (block, tmp);
2590 /* Translate pointer assignment inside FORALL which need temporary. */
2592 static void
2593 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2594 forall_info * nested_forall_info,
2595 stmtblock_t * block)
2597 tree type;
2598 tree inner_size;
2599 gfc_ss *lss, *rss;
2600 gfc_se lse;
2601 gfc_se rse;
2602 gfc_ss_info *info;
2603 gfc_loopinfo loop;
2604 tree desc;
2605 tree parm;
2606 tree parmtype;
2607 stmtblock_t body;
2608 tree count;
2609 tree tmp, tmp1, ptemp1;
2611 count = gfc_create_var (gfc_array_index_type, "count");
2612 gfc_add_modify (block, count, gfc_index_zero_node);
2614 inner_size = integer_one_node;
2615 lss = gfc_walk_expr (expr1);
2616 rss = gfc_walk_expr (expr2);
2617 if (lss == gfc_ss_terminator)
2619 type = gfc_typenode_for_spec (&expr1->ts);
2620 type = build_pointer_type (type);
2622 /* Allocate temporary for nested forall construct according to the
2623 information in nested_forall_info and inner_size. */
2624 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2625 inner_size, NULL, block, &ptemp1);
2626 gfc_start_block (&body);
2627 gfc_init_se (&lse, NULL);
2628 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2629 gfc_init_se (&rse, NULL);
2630 rse.want_pointer = 1;
2631 gfc_conv_expr (&rse, expr2);
2632 gfc_add_block_to_block (&body, &rse.pre);
2633 gfc_add_modify (&body, lse.expr,
2634 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2635 gfc_add_block_to_block (&body, &rse.post);
2637 /* Increment count. */
2638 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2639 count, gfc_index_one_node);
2640 gfc_add_modify (&body, count, tmp);
2642 tmp = gfc_finish_block (&body);
2644 /* Generate body and loops according to the information in
2645 nested_forall_info. */
2646 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2647 gfc_add_expr_to_block (block, tmp);
2649 /* Reset count. */
2650 gfc_add_modify (block, count, gfc_index_zero_node);
2652 gfc_start_block (&body);
2653 gfc_init_se (&lse, NULL);
2654 gfc_init_se (&rse, NULL);
2655 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2656 lse.want_pointer = 1;
2657 gfc_conv_expr (&lse, expr1);
2658 gfc_add_block_to_block (&body, &lse.pre);
2659 gfc_add_modify (&body, lse.expr, rse.expr);
2660 gfc_add_block_to_block (&body, &lse.post);
2661 /* Increment count. */
2662 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2663 count, gfc_index_one_node);
2664 gfc_add_modify (&body, count, tmp);
2665 tmp = gfc_finish_block (&body);
2667 /* Generate body and loops according to the information in
2668 nested_forall_info. */
2669 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2670 gfc_add_expr_to_block (block, tmp);
2672 else
2674 gfc_init_loopinfo (&loop);
2676 /* Associate the SS with the loop. */
2677 gfc_add_ss_to_loop (&loop, rss);
2679 /* Setup the scalarizing loops and bounds. */
2680 gfc_conv_ss_startstride (&loop);
2682 gfc_conv_loop_setup (&loop, &expr2->where);
2684 info = &rss->data.info;
2685 desc = info->descriptor;
2687 /* Make a new descriptor. */
2688 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2689 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2690 loop.from, loop.to, 1,
2691 GFC_ARRAY_UNKNOWN);
2693 /* Allocate temporary for nested forall construct. */
2694 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2695 inner_size, NULL, block, &ptemp1);
2696 gfc_start_block (&body);
2697 gfc_init_se (&lse, NULL);
2698 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2699 lse.direct_byref = 1;
2700 rss = gfc_walk_expr (expr2);
2701 gfc_conv_expr_descriptor (&lse, expr2, rss);
2703 gfc_add_block_to_block (&body, &lse.pre);
2704 gfc_add_block_to_block (&body, &lse.post);
2706 /* Increment count. */
2707 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2708 count, gfc_index_one_node);
2709 gfc_add_modify (&body, count, tmp);
2711 tmp = gfc_finish_block (&body);
2713 /* Generate body and loops according to the information in
2714 nested_forall_info. */
2715 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2716 gfc_add_expr_to_block (block, tmp);
2718 /* Reset count. */
2719 gfc_add_modify (block, count, gfc_index_zero_node);
2721 parm = gfc_build_array_ref (tmp1, count, NULL);
2722 lss = gfc_walk_expr (expr1);
2723 gfc_init_se (&lse, NULL);
2724 gfc_conv_expr_descriptor (&lse, expr1, lss);
2725 gfc_add_modify (&lse.pre, lse.expr, parm);
2726 gfc_start_block (&body);
2727 gfc_add_block_to_block (&body, &lse.pre);
2728 gfc_add_block_to_block (&body, &lse.post);
2730 /* Increment count. */
2731 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2732 count, gfc_index_one_node);
2733 gfc_add_modify (&body, count, tmp);
2735 tmp = gfc_finish_block (&body);
2737 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2738 gfc_add_expr_to_block (block, tmp);
2740 /* Free the temporary. */
2741 if (ptemp1)
2743 tmp = gfc_call_free (ptemp1);
2744 gfc_add_expr_to_block (block, tmp);
2749 /* FORALL and WHERE statements are really nasty, especially when you nest
2750 them. All the rhs of a forall assignment must be evaluated before the
2751 actual assignments are performed. Presumably this also applies to all the
2752 assignments in an inner where statement. */
2754 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2755 linear array, relying on the fact that we process in the same order in all
2756 loops.
2758 forall (i=start:end:stride; maskexpr)
2759 e<i> = f<i>
2760 g<i> = h<i>
2761 end forall
2762 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2763 Translates to:
2764 count = ((end + 1 - start) / stride)
2765 masktmp(:) = maskexpr(:)
2767 maskindex = 0;
2768 for (i = start; i <= end; i += stride)
2770 if (masktmp[maskindex++])
2771 e<i> = f<i>
2773 maskindex = 0;
2774 for (i = start; i <= end; i += stride)
2776 if (masktmp[maskindex++])
2777 g<i> = h<i>
2780 Note that this code only works when there are no dependencies.
2781 Forall loop with array assignments and data dependencies are a real pain,
2782 because the size of the temporary cannot always be determined before the
2783 loop is executed. This problem is compounded by the presence of nested
2784 FORALL constructs.
2787 static tree
2788 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2790 stmtblock_t pre;
2791 stmtblock_t post;
2792 stmtblock_t block;
2793 stmtblock_t body;
2794 tree *var;
2795 tree *start;
2796 tree *end;
2797 tree *step;
2798 gfc_expr **varexpr;
2799 tree tmp;
2800 tree assign;
2801 tree size;
2802 tree maskindex;
2803 tree mask;
2804 tree pmask;
2805 int n;
2806 int nvar;
2807 int need_temp;
2808 gfc_forall_iterator *fa;
2809 gfc_se se;
2810 gfc_code *c;
2811 gfc_saved_var *saved_vars;
2812 iter_info *this_forall;
2813 forall_info *info;
2814 bool need_mask;
2816 /* Do nothing if the mask is false. */
2817 if (code->expr1
2818 && code->expr1->expr_type == EXPR_CONSTANT
2819 && !code->expr1->value.logical)
2820 return build_empty_stmt (input_location);
2822 n = 0;
2823 /* Count the FORALL index number. */
2824 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2825 n++;
2826 nvar = n;
2828 /* Allocate the space for var, start, end, step, varexpr. */
2829 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2830 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2831 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2832 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2833 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2834 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2836 /* Allocate the space for info. */
2837 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2839 gfc_start_block (&pre);
2840 gfc_init_block (&post);
2841 gfc_init_block (&block);
2843 n = 0;
2844 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2846 gfc_symbol *sym = fa->var->symtree->n.sym;
2848 /* Allocate space for this_forall. */
2849 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2851 /* Create a temporary variable for the FORALL index. */
2852 tmp = gfc_typenode_for_spec (&sym->ts);
2853 var[n] = gfc_create_var (tmp, sym->name);
2854 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2856 /* Record it in this_forall. */
2857 this_forall->var = var[n];
2859 /* Replace the index symbol's backend_decl with the temporary decl. */
2860 sym->backend_decl = var[n];
2862 /* Work out the start, end and stride for the loop. */
2863 gfc_init_se (&se, NULL);
2864 gfc_conv_expr_val (&se, fa->start);
2865 /* Record it in this_forall. */
2866 this_forall->start = se.expr;
2867 gfc_add_block_to_block (&block, &se.pre);
2868 start[n] = se.expr;
2870 gfc_init_se (&se, NULL);
2871 gfc_conv_expr_val (&se, fa->end);
2872 /* Record it in this_forall. */
2873 this_forall->end = se.expr;
2874 gfc_make_safe_expr (&se);
2875 gfc_add_block_to_block (&block, &se.pre);
2876 end[n] = se.expr;
2878 gfc_init_se (&se, NULL);
2879 gfc_conv_expr_val (&se, fa->stride);
2880 /* Record it in this_forall. */
2881 this_forall->step = se.expr;
2882 gfc_make_safe_expr (&se);
2883 gfc_add_block_to_block (&block, &se.pre);
2884 step[n] = se.expr;
2886 /* Set the NEXT field of this_forall to NULL. */
2887 this_forall->next = NULL;
2888 /* Link this_forall to the info construct. */
2889 if (info->this_loop)
2891 iter_info *iter_tmp = info->this_loop;
2892 while (iter_tmp->next != NULL)
2893 iter_tmp = iter_tmp->next;
2894 iter_tmp->next = this_forall;
2896 else
2897 info->this_loop = this_forall;
2899 n++;
2901 nvar = n;
2903 /* Calculate the size needed for the current forall level. */
2904 size = gfc_index_one_node;
2905 for (n = 0; n < nvar; n++)
2907 /* size = (end + step - start) / step. */
2908 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2909 step[n], start[n]);
2910 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2912 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2913 tmp = convert (gfc_array_index_type, tmp);
2915 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2918 /* Record the nvar and size of current forall level. */
2919 info->nvar = nvar;
2920 info->size = size;
2922 if (code->expr1)
2924 /* If the mask is .true., consider the FORALL unconditional. */
2925 if (code->expr1->expr_type == EXPR_CONSTANT
2926 && code->expr1->value.logical)
2927 need_mask = false;
2928 else
2929 need_mask = true;
2931 else
2932 need_mask = false;
2934 /* First we need to allocate the mask. */
2935 if (need_mask)
2937 /* As the mask array can be very big, prefer compact boolean types. */
2938 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2939 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2940 size, NULL, &block, &pmask);
2941 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2943 /* Record them in the info structure. */
2944 info->maskindex = maskindex;
2945 info->mask = mask;
2947 else
2949 /* No mask was specified. */
2950 maskindex = NULL_TREE;
2951 mask = pmask = NULL_TREE;
2954 /* Link the current forall level to nested_forall_info. */
2955 info->prev_nest = nested_forall_info;
2956 nested_forall_info = info;
2958 /* Copy the mask into a temporary variable if required.
2959 For now we assume a mask temporary is needed. */
2960 if (need_mask)
2962 /* As the mask array can be very big, prefer compact boolean types. */
2963 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2965 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2967 /* Start of mask assignment loop body. */
2968 gfc_start_block (&body);
2970 /* Evaluate the mask expression. */
2971 gfc_init_se (&se, NULL);
2972 gfc_conv_expr_val (&se, code->expr1);
2973 gfc_add_block_to_block (&body, &se.pre);
2975 /* Store the mask. */
2976 se.expr = convert (mask_type, se.expr);
2978 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2979 gfc_add_modify (&body, tmp, se.expr);
2981 /* Advance to the next mask element. */
2982 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2983 maskindex, gfc_index_one_node);
2984 gfc_add_modify (&body, maskindex, tmp);
2986 /* Generate the loops. */
2987 tmp = gfc_finish_block (&body);
2988 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2989 gfc_add_expr_to_block (&block, tmp);
2992 c = code->block->next;
2994 /* TODO: loop merging in FORALL statements. */
2995 /* Now that we've got a copy of the mask, generate the assignment loops. */
2996 while (c)
2998 switch (c->op)
3000 case EXEC_ASSIGN:
3001 /* A scalar or array assignment. DO the simple check for
3002 lhs to rhs dependencies. These make a temporary for the
3003 rhs and form a second forall block to copy to variable. */
3004 need_temp = check_forall_dependencies(c, &pre, &post);
3006 /* Temporaries due to array assignment data dependencies introduce
3007 no end of problems. */
3008 if (need_temp)
3009 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3010 nested_forall_info, &block);
3011 else
3013 /* Use the normal assignment copying routines. */
3014 assign = gfc_trans_assignment (c->expr1, c->expr2, false);
3016 /* Generate body and loops. */
3017 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3018 assign, 1);
3019 gfc_add_expr_to_block (&block, tmp);
3022 /* Cleanup any temporary symtrees that have been made to deal
3023 with dependencies. */
3024 if (new_symtree)
3025 cleanup_forall_symtrees (c);
3027 break;
3029 case EXEC_WHERE:
3030 /* Translate WHERE or WHERE construct nested in FORALL. */
3031 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3032 break;
3034 /* Pointer assignment inside FORALL. */
3035 case EXEC_POINTER_ASSIGN:
3036 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3037 if (need_temp)
3038 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3039 nested_forall_info, &block);
3040 else
3042 /* Use the normal assignment copying routines. */
3043 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3045 /* Generate body and loops. */
3046 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3047 assign, 1);
3048 gfc_add_expr_to_block (&block, tmp);
3050 break;
3052 case EXEC_FORALL:
3053 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3054 gfc_add_expr_to_block (&block, tmp);
3055 break;
3057 /* Explicit subroutine calls are prevented by the frontend but interface
3058 assignments can legitimately produce them. */
3059 case EXEC_ASSIGN_CALL:
3060 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3061 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3062 gfc_add_expr_to_block (&block, tmp);
3063 break;
3065 default:
3066 gcc_unreachable ();
3069 c = c->next;
3072 /* Restore the original index variables. */
3073 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3074 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3076 /* Free the space for var, start, end, step, varexpr. */
3077 gfc_free (var);
3078 gfc_free (start);
3079 gfc_free (end);
3080 gfc_free (step);
3081 gfc_free (varexpr);
3082 gfc_free (saved_vars);
3084 /* Free the space for this forall_info. */
3085 gfc_free (info);
3087 if (pmask)
3089 /* Free the temporary for the mask. */
3090 tmp = gfc_call_free (pmask);
3091 gfc_add_expr_to_block (&block, tmp);
3093 if (maskindex)
3094 pushdecl (maskindex);
3096 gfc_add_block_to_block (&pre, &block);
3097 gfc_add_block_to_block (&pre, &post);
3099 return gfc_finish_block (&pre);
3103 /* Translate the FORALL statement or construct. */
3105 tree gfc_trans_forall (gfc_code * code)
3107 return gfc_trans_forall_1 (code, NULL);
3111 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3112 If the WHERE construct is nested in FORALL, compute the overall temporary
3113 needed by the WHERE mask expression multiplied by the iterator number of
3114 the nested forall.
3115 ME is the WHERE mask expression.
3116 MASK is the current execution mask upon input, whose sense may or may
3117 not be inverted as specified by the INVERT argument.
3118 CMASK is the updated execution mask on output, or NULL if not required.
3119 PMASK is the pending execution mask on output, or NULL if not required.
3120 BLOCK is the block in which to place the condition evaluation loops. */
3122 static void
3123 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3124 tree mask, bool invert, tree cmask, tree pmask,
3125 tree mask_type, stmtblock_t * block)
3127 tree tmp, tmp1;
3128 gfc_ss *lss, *rss;
3129 gfc_loopinfo loop;
3130 stmtblock_t body, body1;
3131 tree count, cond, mtmp;
3132 gfc_se lse, rse;
3134 gfc_init_loopinfo (&loop);
3136 lss = gfc_walk_expr (me);
3137 rss = gfc_walk_expr (me);
3139 /* Variable to index the temporary. */
3140 count = gfc_create_var (gfc_array_index_type, "count");
3141 /* Initialize count. */
3142 gfc_add_modify (block, count, gfc_index_zero_node);
3144 gfc_start_block (&body);
3146 gfc_init_se (&rse, NULL);
3147 gfc_init_se (&lse, NULL);
3149 if (lss == gfc_ss_terminator)
3151 gfc_init_block (&body1);
3153 else
3155 /* Initialize the loop. */
3156 gfc_init_loopinfo (&loop);
3158 /* We may need LSS to determine the shape of the expression. */
3159 gfc_add_ss_to_loop (&loop, lss);
3160 gfc_add_ss_to_loop (&loop, rss);
3162 gfc_conv_ss_startstride (&loop);
3163 gfc_conv_loop_setup (&loop, &me->where);
3165 gfc_mark_ss_chain_used (rss, 1);
3166 /* Start the loop body. */
3167 gfc_start_scalarized_body (&loop, &body1);
3169 /* Translate the expression. */
3170 gfc_copy_loopinfo_to_se (&rse, &loop);
3171 rse.ss = rss;
3172 gfc_conv_expr (&rse, me);
3175 /* Variable to evaluate mask condition. */
3176 cond = gfc_create_var (mask_type, "cond");
3177 if (mask && (cmask || pmask))
3178 mtmp = gfc_create_var (mask_type, "mask");
3179 else mtmp = NULL_TREE;
3181 gfc_add_block_to_block (&body1, &lse.pre);
3182 gfc_add_block_to_block (&body1, &rse.pre);
3184 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3186 if (mask && (cmask || pmask))
3188 tmp = gfc_build_array_ref (mask, count, NULL);
3189 if (invert)
3190 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3191 gfc_add_modify (&body1, mtmp, tmp);
3194 if (cmask)
3196 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3197 tmp = cond;
3198 if (mask)
3199 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3200 gfc_add_modify (&body1, tmp1, tmp);
3203 if (pmask)
3205 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3206 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3207 if (mask)
3208 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3209 gfc_add_modify (&body1, tmp1, tmp);
3212 gfc_add_block_to_block (&body1, &lse.post);
3213 gfc_add_block_to_block (&body1, &rse.post);
3215 if (lss == gfc_ss_terminator)
3217 gfc_add_block_to_block (&body, &body1);
3219 else
3221 /* Increment count. */
3222 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3223 gfc_index_one_node);
3224 gfc_add_modify (&body1, count, tmp1);
3226 /* Generate the copying loops. */
3227 gfc_trans_scalarizing_loops (&loop, &body1);
3229 gfc_add_block_to_block (&body, &loop.pre);
3230 gfc_add_block_to_block (&body, &loop.post);
3232 gfc_cleanup_loop (&loop);
3233 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3234 as tree nodes in SS may not be valid in different scope. */
3237 tmp1 = gfc_finish_block (&body);
3238 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3239 if (nested_forall_info != NULL)
3240 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3242 gfc_add_expr_to_block (block, tmp1);
3246 /* Translate an assignment statement in a WHERE statement or construct
3247 statement. The MASK expression is used to control which elements
3248 of EXPR1 shall be assigned. The sense of MASK is specified by
3249 INVERT. */
3251 static tree
3252 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3253 tree mask, bool invert,
3254 tree count1, tree count2,
3255 gfc_code *cnext)
3257 gfc_se lse;
3258 gfc_se rse;
3259 gfc_ss *lss;
3260 gfc_ss *lss_section;
3261 gfc_ss *rss;
3263 gfc_loopinfo loop;
3264 tree tmp;
3265 stmtblock_t block;
3266 stmtblock_t body;
3267 tree index, maskexpr;
3269 /* A defined assignment. */
3270 if (cnext && cnext->resolved_sym)
3271 return gfc_trans_call (cnext, true, mask, count1, invert);
3273 #if 0
3274 /* TODO: handle this special case.
3275 Special case a single function returning an array. */
3276 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3278 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3279 if (tmp)
3280 return tmp;
3282 #endif
3284 /* Assignment of the form lhs = rhs. */
3285 gfc_start_block (&block);
3287 gfc_init_se (&lse, NULL);
3288 gfc_init_se (&rse, NULL);
3290 /* Walk the lhs. */
3291 lss = gfc_walk_expr (expr1);
3292 rss = NULL;
3294 /* In each where-assign-stmt, the mask-expr and the variable being
3295 defined shall be arrays of the same shape. */
3296 gcc_assert (lss != gfc_ss_terminator);
3298 /* The assignment needs scalarization. */
3299 lss_section = lss;
3301 /* Find a non-scalar SS from the lhs. */
3302 while (lss_section != gfc_ss_terminator
3303 && lss_section->type != GFC_SS_SECTION)
3304 lss_section = lss_section->next;
3306 gcc_assert (lss_section != gfc_ss_terminator);
3308 /* Initialize the scalarizer. */
3309 gfc_init_loopinfo (&loop);
3311 /* Walk the rhs. */
3312 rss = gfc_walk_expr (expr2);
3313 if (rss == gfc_ss_terminator)
3315 /* The rhs is scalar. Add a ss for the expression. */
3316 rss = gfc_get_ss ();
3317 rss->where = 1;
3318 rss->next = gfc_ss_terminator;
3319 rss->type = GFC_SS_SCALAR;
3320 rss->expr = expr2;
3323 /* Associate the SS with the loop. */
3324 gfc_add_ss_to_loop (&loop, lss);
3325 gfc_add_ss_to_loop (&loop, rss);
3327 /* Calculate the bounds of the scalarization. */
3328 gfc_conv_ss_startstride (&loop);
3330 /* Resolve any data dependencies in the statement. */
3331 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3333 /* Setup the scalarizing loops. */
3334 gfc_conv_loop_setup (&loop, &expr2->where);
3336 /* Setup the gfc_se structures. */
3337 gfc_copy_loopinfo_to_se (&lse, &loop);
3338 gfc_copy_loopinfo_to_se (&rse, &loop);
3340 rse.ss = rss;
3341 gfc_mark_ss_chain_used (rss, 1);
3342 if (loop.temp_ss == NULL)
3344 lse.ss = lss;
3345 gfc_mark_ss_chain_used (lss, 1);
3347 else
3349 lse.ss = loop.temp_ss;
3350 gfc_mark_ss_chain_used (lss, 3);
3351 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3354 /* Start the scalarized loop body. */
3355 gfc_start_scalarized_body (&loop, &body);
3357 /* Translate the expression. */
3358 gfc_conv_expr (&rse, expr2);
3359 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3361 gfc_conv_tmp_array_ref (&lse);
3362 gfc_advance_se_ss_chain (&lse);
3364 else
3365 gfc_conv_expr (&lse, expr1);
3367 /* Form the mask expression according to the mask. */
3368 index = count1;
3369 maskexpr = gfc_build_array_ref (mask, index, NULL);
3370 if (invert)
3371 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3373 /* Use the scalar assignment as is. */
3374 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3375 loop.temp_ss != NULL, false);
3377 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3379 gfc_add_expr_to_block (&body, tmp);
3381 if (lss == gfc_ss_terminator)
3383 /* Increment count1. */
3384 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3385 count1, gfc_index_one_node);
3386 gfc_add_modify (&body, count1, tmp);
3388 /* Use the scalar assignment as is. */
3389 gfc_add_block_to_block (&block, &body);
3391 else
3393 gcc_assert (lse.ss == gfc_ss_terminator
3394 && rse.ss == gfc_ss_terminator);
3396 if (loop.temp_ss != NULL)
3398 /* Increment count1 before finish the main body of a scalarized
3399 expression. */
3400 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3401 count1, gfc_index_one_node);
3402 gfc_add_modify (&body, count1, tmp);
3403 gfc_trans_scalarized_loop_boundary (&loop, &body);
3405 /* We need to copy the temporary to the actual lhs. */
3406 gfc_init_se (&lse, NULL);
3407 gfc_init_se (&rse, NULL);
3408 gfc_copy_loopinfo_to_se (&lse, &loop);
3409 gfc_copy_loopinfo_to_se (&rse, &loop);
3411 rse.ss = loop.temp_ss;
3412 lse.ss = lss;
3414 gfc_conv_tmp_array_ref (&rse);
3415 gfc_advance_se_ss_chain (&rse);
3416 gfc_conv_expr (&lse, expr1);
3418 gcc_assert (lse.ss == gfc_ss_terminator
3419 && rse.ss == gfc_ss_terminator);
3421 /* Form the mask expression according to the mask tree list. */
3422 index = count2;
3423 maskexpr = gfc_build_array_ref (mask, index, NULL);
3424 if (invert)
3425 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3426 maskexpr);
3428 /* Use the scalar assignment as is. */
3429 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3430 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3431 build_empty_stmt (input_location));
3432 gfc_add_expr_to_block (&body, tmp);
3434 /* Increment count2. */
3435 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3436 count2, gfc_index_one_node);
3437 gfc_add_modify (&body, count2, tmp);
3439 else
3441 /* Increment count1. */
3442 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3443 count1, gfc_index_one_node);
3444 gfc_add_modify (&body, count1, tmp);
3447 /* Generate the copying loops. */
3448 gfc_trans_scalarizing_loops (&loop, &body);
3450 /* Wrap the whole thing up. */
3451 gfc_add_block_to_block (&block, &loop.pre);
3452 gfc_add_block_to_block (&block, &loop.post);
3453 gfc_cleanup_loop (&loop);
3456 return gfc_finish_block (&block);
3460 /* Translate the WHERE construct or statement.
3461 This function can be called iteratively to translate the nested WHERE
3462 construct or statement.
3463 MASK is the control mask. */
3465 static void
3466 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3467 forall_info * nested_forall_info, stmtblock_t * block)
3469 stmtblock_t inner_size_body;
3470 tree inner_size, size;
3471 gfc_ss *lss, *rss;
3472 tree mask_type;
3473 gfc_expr *expr1;
3474 gfc_expr *expr2;
3475 gfc_code *cblock;
3476 gfc_code *cnext;
3477 tree tmp;
3478 tree cond;
3479 tree count1, count2;
3480 bool need_cmask;
3481 bool need_pmask;
3482 int need_temp;
3483 tree pcmask = NULL_TREE;
3484 tree ppmask = NULL_TREE;
3485 tree cmask = NULL_TREE;
3486 tree pmask = NULL_TREE;
3487 gfc_actual_arglist *arg;
3489 /* the WHERE statement or the WHERE construct statement. */
3490 cblock = code->block;
3492 /* As the mask array can be very big, prefer compact boolean types. */
3493 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3495 /* Determine which temporary masks are needed. */
3496 if (!cblock->block)
3498 /* One clause: No ELSEWHEREs. */
3499 need_cmask = (cblock->next != 0);
3500 need_pmask = false;
3502 else if (cblock->block->block)
3504 /* Three or more clauses: Conditional ELSEWHEREs. */
3505 need_cmask = true;
3506 need_pmask = true;
3508 else if (cblock->next)
3510 /* Two clauses, the first non-empty. */
3511 need_cmask = true;
3512 need_pmask = (mask != NULL_TREE
3513 && cblock->block->next != 0);
3515 else if (!cblock->block->next)
3517 /* Two clauses, both empty. */
3518 need_cmask = false;
3519 need_pmask = false;
3521 /* Two clauses, the first empty, the second non-empty. */
3522 else if (mask)
3524 need_cmask = (cblock->block->expr1 != 0);
3525 need_pmask = true;
3527 else
3529 need_cmask = true;
3530 need_pmask = false;
3533 if (need_cmask || need_pmask)
3535 /* Calculate the size of temporary needed by the mask-expr. */
3536 gfc_init_block (&inner_size_body);
3537 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3538 &inner_size_body, &lss, &rss);
3540 /* Calculate the total size of temporary needed. */
3541 size = compute_overall_iter_number (nested_forall_info, inner_size,
3542 &inner_size_body, block);
3544 /* Check whether the size is negative. */
3545 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3546 gfc_index_zero_node);
3547 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3548 gfc_index_zero_node, size);
3549 size = gfc_evaluate_now (size, block);
3551 /* Allocate temporary for WHERE mask if needed. */
3552 if (need_cmask)
3553 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3554 &pcmask);
3556 /* Allocate temporary for !mask if needed. */
3557 if (need_pmask)
3558 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3559 &ppmask);
3562 while (cblock)
3564 /* Each time around this loop, the where clause is conditional
3565 on the value of mask and invert, which are updated at the
3566 bottom of the loop. */
3568 /* Has mask-expr. */
3569 if (cblock->expr1)
3571 /* Ensure that the WHERE mask will be evaluated exactly once.
3572 If there are no statements in this WHERE/ELSEWHERE clause,
3573 then we don't need to update the control mask (cmask).
3574 If this is the last clause of the WHERE construct, then
3575 we don't need to update the pending control mask (pmask). */
3576 if (mask)
3577 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3578 mask, invert,
3579 cblock->next ? cmask : NULL_TREE,
3580 cblock->block ? pmask : NULL_TREE,
3581 mask_type, block);
3582 else
3583 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3584 NULL_TREE, false,
3585 (cblock->next || cblock->block)
3586 ? cmask : NULL_TREE,
3587 NULL_TREE, mask_type, block);
3589 invert = false;
3591 /* It's a final elsewhere-stmt. No mask-expr is present. */
3592 else
3593 cmask = mask;
3595 /* The body of this where clause are controlled by cmask with
3596 sense specified by invert. */
3598 /* Get the assignment statement of a WHERE statement, or the first
3599 statement in where-body-construct of a WHERE construct. */
3600 cnext = cblock->next;
3601 while (cnext)
3603 switch (cnext->op)
3605 /* WHERE assignment statement. */
3606 case EXEC_ASSIGN_CALL:
3608 arg = cnext->ext.actual;
3609 expr1 = expr2 = NULL;
3610 for (; arg; arg = arg->next)
3612 if (!arg->expr)
3613 continue;
3614 if (expr1 == NULL)
3615 expr1 = arg->expr;
3616 else
3617 expr2 = arg->expr;
3619 goto evaluate;
3621 case EXEC_ASSIGN:
3622 expr1 = cnext->expr1;
3623 expr2 = cnext->expr2;
3624 evaluate:
3625 if (nested_forall_info != NULL)
3627 need_temp = gfc_check_dependency (expr1, expr2, 0);
3628 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3629 gfc_trans_assign_need_temp (expr1, expr2,
3630 cmask, invert,
3631 nested_forall_info, block);
3632 else
3634 /* Variables to control maskexpr. */
3635 count1 = gfc_create_var (gfc_array_index_type, "count1");
3636 count2 = gfc_create_var (gfc_array_index_type, "count2");
3637 gfc_add_modify (block, count1, gfc_index_zero_node);
3638 gfc_add_modify (block, count2, gfc_index_zero_node);
3640 tmp = gfc_trans_where_assign (expr1, expr2,
3641 cmask, invert,
3642 count1, count2,
3643 cnext);
3645 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3646 tmp, 1);
3647 gfc_add_expr_to_block (block, tmp);
3650 else
3652 /* Variables to control maskexpr. */
3653 count1 = gfc_create_var (gfc_array_index_type, "count1");
3654 count2 = gfc_create_var (gfc_array_index_type, "count2");
3655 gfc_add_modify (block, count1, gfc_index_zero_node);
3656 gfc_add_modify (block, count2, gfc_index_zero_node);
3658 tmp = gfc_trans_where_assign (expr1, expr2,
3659 cmask, invert,
3660 count1, count2,
3661 cnext);
3662 gfc_add_expr_to_block (block, tmp);
3665 break;
3667 /* WHERE or WHERE construct is part of a where-body-construct. */
3668 case EXEC_WHERE:
3669 gfc_trans_where_2 (cnext, cmask, invert,
3670 nested_forall_info, block);
3671 break;
3673 default:
3674 gcc_unreachable ();
3677 /* The next statement within the same where-body-construct. */
3678 cnext = cnext->next;
3680 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3681 cblock = cblock->block;
3682 if (mask == NULL_TREE)
3684 /* If we're the initial WHERE, we can simply invert the sense
3685 of the current mask to obtain the "mask" for the remaining
3686 ELSEWHEREs. */
3687 invert = true;
3688 mask = cmask;
3690 else
3692 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3693 invert = false;
3694 mask = pmask;
3698 /* If we allocated a pending mask array, deallocate it now. */
3699 if (ppmask)
3701 tmp = gfc_call_free (ppmask);
3702 gfc_add_expr_to_block (block, tmp);
3705 /* If we allocated a current mask array, deallocate it now. */
3706 if (pcmask)
3708 tmp = gfc_call_free (pcmask);
3709 gfc_add_expr_to_block (block, tmp);
3713 /* Translate a simple WHERE construct or statement without dependencies.
3714 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3715 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3716 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3718 static tree
3719 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3721 stmtblock_t block, body;
3722 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3723 tree tmp, cexpr, tstmt, estmt;
3724 gfc_ss *css, *tdss, *tsss;
3725 gfc_se cse, tdse, tsse, edse, esse;
3726 gfc_loopinfo loop;
3727 gfc_ss *edss = 0;
3728 gfc_ss *esss = 0;
3730 /* Allow the scalarizer to workshare simple where loops. */
3731 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3732 ompws_flags |= OMPWS_SCALARIZER_WS;
3734 cond = cblock->expr1;
3735 tdst = cblock->next->expr1;
3736 tsrc = cblock->next->expr2;
3737 edst = eblock ? eblock->next->expr1 : NULL;
3738 esrc = eblock ? eblock->next->expr2 : NULL;
3740 gfc_start_block (&block);
3741 gfc_init_loopinfo (&loop);
3743 /* Handle the condition. */
3744 gfc_init_se (&cse, NULL);
3745 css = gfc_walk_expr (cond);
3746 gfc_add_ss_to_loop (&loop, css);
3748 /* Handle the then-clause. */
3749 gfc_init_se (&tdse, NULL);
3750 gfc_init_se (&tsse, NULL);
3751 tdss = gfc_walk_expr (tdst);
3752 tsss = gfc_walk_expr (tsrc);
3753 if (tsss == gfc_ss_terminator)
3755 tsss = gfc_get_ss ();
3756 tsss->where = 1;
3757 tsss->next = gfc_ss_terminator;
3758 tsss->type = GFC_SS_SCALAR;
3759 tsss->expr = tsrc;
3761 gfc_add_ss_to_loop (&loop, tdss);
3762 gfc_add_ss_to_loop (&loop, tsss);
3764 if (eblock)
3766 /* Handle the else clause. */
3767 gfc_init_se (&edse, NULL);
3768 gfc_init_se (&esse, NULL);
3769 edss = gfc_walk_expr (edst);
3770 esss = gfc_walk_expr (esrc);
3771 if (esss == gfc_ss_terminator)
3773 esss = gfc_get_ss ();
3774 esss->where = 1;
3775 esss->next = gfc_ss_terminator;
3776 esss->type = GFC_SS_SCALAR;
3777 esss->expr = esrc;
3779 gfc_add_ss_to_loop (&loop, edss);
3780 gfc_add_ss_to_loop (&loop, esss);
3783 gfc_conv_ss_startstride (&loop);
3784 gfc_conv_loop_setup (&loop, &tdst->where);
3786 gfc_mark_ss_chain_used (css, 1);
3787 gfc_mark_ss_chain_used (tdss, 1);
3788 gfc_mark_ss_chain_used (tsss, 1);
3789 if (eblock)
3791 gfc_mark_ss_chain_used (edss, 1);
3792 gfc_mark_ss_chain_used (esss, 1);
3795 gfc_start_scalarized_body (&loop, &body);
3797 gfc_copy_loopinfo_to_se (&cse, &loop);
3798 gfc_copy_loopinfo_to_se (&tdse, &loop);
3799 gfc_copy_loopinfo_to_se (&tsse, &loop);
3800 cse.ss = css;
3801 tdse.ss = tdss;
3802 tsse.ss = tsss;
3803 if (eblock)
3805 gfc_copy_loopinfo_to_se (&edse, &loop);
3806 gfc_copy_loopinfo_to_se (&esse, &loop);
3807 edse.ss = edss;
3808 esse.ss = esss;
3811 gfc_conv_expr (&cse, cond);
3812 gfc_add_block_to_block (&body, &cse.pre);
3813 cexpr = cse.expr;
3815 gfc_conv_expr (&tsse, tsrc);
3816 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3818 gfc_conv_tmp_array_ref (&tdse);
3819 gfc_advance_se_ss_chain (&tdse);
3821 else
3822 gfc_conv_expr (&tdse, tdst);
3824 if (eblock)
3826 gfc_conv_expr (&esse, esrc);
3827 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3829 gfc_conv_tmp_array_ref (&edse);
3830 gfc_advance_se_ss_chain (&edse);
3832 else
3833 gfc_conv_expr (&edse, edst);
3836 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3837 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3838 : build_empty_stmt (input_location);
3839 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3840 gfc_add_expr_to_block (&body, tmp);
3841 gfc_add_block_to_block (&body, &cse.post);
3843 gfc_trans_scalarizing_loops (&loop, &body);
3844 gfc_add_block_to_block (&block, &loop.pre);
3845 gfc_add_block_to_block (&block, &loop.post);
3846 gfc_cleanup_loop (&loop);
3848 return gfc_finish_block (&block);
3851 /* As the WHERE or WHERE construct statement can be nested, we call
3852 gfc_trans_where_2 to do the translation, and pass the initial
3853 NULL values for both the control mask and the pending control mask. */
3855 tree
3856 gfc_trans_where (gfc_code * code)
3858 stmtblock_t block;
3859 gfc_code *cblock;
3860 gfc_code *eblock;
3862 cblock = code->block;
3863 if (cblock->next
3864 && cblock->next->op == EXEC_ASSIGN
3865 && !cblock->next->next)
3867 eblock = cblock->block;
3868 if (!eblock)
3870 /* A simple "WHERE (cond) x = y" statement or block is
3871 dependence free if cond is not dependent upon writing x,
3872 and the source y is unaffected by the destination x. */
3873 if (!gfc_check_dependency (cblock->next->expr1,
3874 cblock->expr1, 0)
3875 && !gfc_check_dependency (cblock->next->expr1,
3876 cblock->next->expr2, 0))
3877 return gfc_trans_where_3 (cblock, NULL);
3879 else if (!eblock->expr1
3880 && !eblock->block
3881 && eblock->next
3882 && eblock->next->op == EXEC_ASSIGN
3883 && !eblock->next->next)
3885 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3886 block is dependence free if cond is not dependent on writes
3887 to x1 and x2, y1 is not dependent on writes to x2, and y2
3888 is not dependent on writes to x1, and both y's are not
3889 dependent upon their own x's. In addition to this, the
3890 final two dependency checks below exclude all but the same
3891 array reference if the where and elswhere destinations
3892 are the same. In short, this is VERY conservative and this
3893 is needed because the two loops, required by the standard
3894 are coalesced in gfc_trans_where_3. */
3895 if (!gfc_check_dependency(cblock->next->expr1,
3896 cblock->expr1, 0)
3897 && !gfc_check_dependency(eblock->next->expr1,
3898 cblock->expr1, 0)
3899 && !gfc_check_dependency(cblock->next->expr1,
3900 eblock->next->expr2, 1)
3901 && !gfc_check_dependency(eblock->next->expr1,
3902 cblock->next->expr2, 1)
3903 && !gfc_check_dependency(cblock->next->expr1,
3904 cblock->next->expr2, 1)
3905 && !gfc_check_dependency(eblock->next->expr1,
3906 eblock->next->expr2, 1)
3907 && !gfc_check_dependency(cblock->next->expr1,
3908 eblock->next->expr1, 0)
3909 && !gfc_check_dependency(eblock->next->expr1,
3910 cblock->next->expr1, 0))
3911 return gfc_trans_where_3 (cblock, eblock);
3915 gfc_start_block (&block);
3917 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3919 return gfc_finish_block (&block);
3923 /* CYCLE a DO loop. The label decl has already been created by
3924 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3925 node at the head of the loop. We must mark the label as used. */
3927 tree
3928 gfc_trans_cycle (gfc_code * code)
3930 tree cycle_label;
3932 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3933 TREE_USED (cycle_label) = 1;
3934 return build1_v (GOTO_EXPR, cycle_label);
3938 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3939 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3940 loop. */
3942 tree
3943 gfc_trans_exit (gfc_code * code)
3945 tree exit_label;
3947 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3948 TREE_USED (exit_label) = 1;
3949 return build1_v (GOTO_EXPR, exit_label);
3953 /* Translate the ALLOCATE statement. */
3955 tree
3956 gfc_trans_allocate (gfc_code * code)
3958 gfc_alloc *al;
3959 gfc_expr *expr;
3960 gfc_se se;
3961 tree tmp;
3962 tree parm;
3963 tree stat;
3964 tree pstat;
3965 tree error_label;
3966 stmtblock_t block;
3968 if (!code->ext.alloc_list)
3969 return NULL_TREE;
3971 pstat = stat = error_label = tmp = NULL_TREE;
3973 gfc_start_block (&block);
3975 /* Either STAT= and/or ERRMSG is present. */
3976 if (code->expr1 || code->expr2)
3978 tree gfc_int4_type_node = gfc_get_int_type (4);
3980 stat = gfc_create_var (gfc_int4_type_node, "stat");
3981 pstat = gfc_build_addr_expr (NULL_TREE, stat);
3983 error_label = gfc_build_label_decl (NULL_TREE);
3984 TREE_USED (error_label) = 1;
3987 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3989 expr = al->expr;
3991 gfc_init_se (&se, NULL);
3992 gfc_start_block (&se.pre);
3994 se.want_pointer = 1;
3995 se.descriptor_only = 1;
3996 gfc_conv_expr (&se, expr);
3998 if (!gfc_array_allocate (&se, expr, pstat))
4000 /* A scalar or derived type. */
4001 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4003 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
4004 tmp = se.string_length;
4006 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
4007 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4008 fold_convert (TREE_TYPE (se.expr), tmp));
4009 gfc_add_expr_to_block (&se.pre, tmp);
4011 if (code->expr1 || code->expr2)
4013 tmp = build1_v (GOTO_EXPR, error_label);
4014 parm = fold_build2 (NE_EXPR, boolean_type_node,
4015 stat, build_int_cst (TREE_TYPE (stat), 0));
4016 tmp = fold_build3 (COND_EXPR, void_type_node,
4017 parm, tmp, build_empty_stmt (input_location));
4018 gfc_add_expr_to_block (&se.pre, tmp);
4021 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4023 tmp = build_fold_indirect_ref (se.expr);
4024 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
4025 gfc_add_expr_to_block (&se.pre, tmp);
4030 tmp = gfc_finish_block (&se.pre);
4031 gfc_add_expr_to_block (&block, tmp);
4034 /* STAT block. */
4035 if (code->expr1)
4037 tmp = build1_v (LABEL_EXPR, error_label);
4038 gfc_add_expr_to_block (&block, tmp);
4040 gfc_init_se (&se, NULL);
4041 gfc_conv_expr_lhs (&se, code->expr1);
4042 tmp = convert (TREE_TYPE (se.expr), stat);
4043 gfc_add_modify (&block, se.expr, tmp);
4046 /* ERRMSG block. */
4047 if (code->expr2)
4049 /* A better error message may be possible, but not required. */
4050 const char *msg = "Attempt to allocate an allocated object";
4051 tree errmsg, slen, dlen;
4053 gfc_init_se (&se, NULL);
4054 gfc_conv_expr_lhs (&se, code->expr2);
4056 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4058 gfc_add_modify (&block, errmsg,
4059 gfc_build_addr_expr (pchar_type_node,
4060 gfc_build_localized_cstring_const (msg)));
4062 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4063 dlen = gfc_get_expr_charlen (code->expr2);
4064 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4066 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4067 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4069 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4070 build_int_cst (TREE_TYPE (stat), 0));
4072 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4074 gfc_add_expr_to_block (&block, tmp);
4077 return gfc_finish_block (&block);
4081 /* Translate a DEALLOCATE statement. */
4083 tree
4084 gfc_trans_deallocate (gfc_code *code)
4086 gfc_se se;
4087 gfc_alloc *al;
4088 gfc_expr *expr;
4089 tree apstat, astat, pstat, stat, tmp;
4090 stmtblock_t block;
4092 pstat = apstat = stat = astat = tmp = NULL_TREE;
4094 gfc_start_block (&block);
4096 /* Count the number of failed deallocations. If deallocate() was
4097 called with STAT= , then set STAT to the count. If deallocate
4098 was called with ERRMSG, then set ERRMG to a string. */
4099 if (code->expr1 || code->expr2)
4101 tree gfc_int4_type_node = gfc_get_int_type (4);
4103 stat = gfc_create_var (gfc_int4_type_node, "stat");
4104 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4106 /* Running total of possible deallocation failures. */
4107 astat = gfc_create_var (gfc_int4_type_node, "astat");
4108 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4110 /* Initialize astat to 0. */
4111 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4114 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4116 expr = al->expr;
4117 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4119 gfc_init_se (&se, NULL);
4120 gfc_start_block (&se.pre);
4122 se.want_pointer = 1;
4123 se.descriptor_only = 1;
4124 gfc_conv_expr (&se, expr);
4126 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4128 gfc_ref *ref;
4129 gfc_ref *last = NULL;
4130 for (ref = expr->ref; ref; ref = ref->next)
4131 if (ref->type == REF_COMPONENT)
4132 last = ref;
4134 /* Do not deallocate the components of a derived type
4135 ultimate pointer component. */
4136 if (!(last && last->u.c.component->attr.pointer)
4137 && !(!last && expr->symtree->n.sym->attr.pointer))
4139 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4140 expr->rank);
4141 gfc_add_expr_to_block (&se.pre, tmp);
4145 if (expr->rank)
4146 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4147 else
4149 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4150 gfc_add_expr_to_block (&se.pre, tmp);
4152 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4153 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4156 gfc_add_expr_to_block (&se.pre, tmp);
4158 /* Keep track of the number of failed deallocations by adding stat
4159 of the last deallocation to the running total. */
4160 if (code->expr1 || code->expr2)
4162 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4163 gfc_add_modify (&se.pre, astat, apstat);
4166 tmp = gfc_finish_block (&se.pre);
4167 gfc_add_expr_to_block (&block, tmp);
4171 /* Set STAT. */
4172 if (code->expr1)
4174 gfc_init_se (&se, NULL);
4175 gfc_conv_expr_lhs (&se, code->expr1);
4176 tmp = convert (TREE_TYPE (se.expr), astat);
4177 gfc_add_modify (&block, se.expr, tmp);
4180 /* Set ERRMSG. */
4181 if (code->expr2)
4183 /* A better error message may be possible, but not required. */
4184 const char *msg = "Attempt to deallocate an unallocated object";
4185 tree errmsg, slen, dlen;
4187 gfc_init_se (&se, NULL);
4188 gfc_conv_expr_lhs (&se, code->expr2);
4190 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4192 gfc_add_modify (&block, errmsg,
4193 gfc_build_addr_expr (pchar_type_node,
4194 gfc_build_localized_cstring_const (msg)));
4196 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4197 dlen = gfc_get_expr_charlen (code->expr2);
4198 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4200 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4201 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4203 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4204 build_int_cst (TREE_TYPE (astat), 0));
4206 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4208 gfc_add_expr_to_block (&block, tmp);
4211 return gfc_finish_block (&block);