* trans-array.c (gfc_trans_array_constructor_value): Make the
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob3c14d0299ef0d5982c80e7cd731b36170de89bcb
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
3 Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tree.h"
29 #include "tree-gimple.h"
30 #include "ggc.h"
31 #include "toplev.h"
32 #include "real.h"
33 #include "gfortran.h"
34 #include "flags.h"
35 #include "trans.h"
36 #include "trans-stmt.h"
37 #include "trans-types.h"
38 #include "trans-array.h"
39 #include "trans-const.h"
40 #include "arith.h"
41 #include "dependency.h"
43 typedef struct iter_info
45 tree var;
46 tree start;
47 tree end;
48 tree step;
49 struct iter_info *next;
51 iter_info;
53 typedef struct forall_info
55 iter_info *this_loop;
56 tree mask;
57 tree pmask;
58 tree maskindex;
59 int nvar;
60 tree size;
61 struct forall_info *outer;
62 struct forall_info *next_nest;
64 forall_info;
66 static void gfc_trans_where_2 (gfc_code *, tree, bool,
67 forall_info *, stmtblock_t *);
69 /* Translate a F95 label number to a LABEL_EXPR. */
71 tree
72 gfc_trans_label_here (gfc_code * code)
74 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
78 /* Given a variable expression which has been ASSIGNed to, find the decl
79 containing the auxiliary variables. For variables in common blocks this
80 is a field_decl. */
82 void
83 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
85 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
86 gfc_conv_expr (se, expr);
87 /* Deals with variable in common block. Get the field declaration. */
88 if (TREE_CODE (se->expr) == COMPONENT_REF)
89 se->expr = TREE_OPERAND (se->expr, 1);
90 /* Deals with dummy argument. Get the parameter declaration. */
91 else if (TREE_CODE (se->expr) == INDIRECT_REF)
92 se->expr = TREE_OPERAND (se->expr, 0);
95 /* Translate a label assignment statement. */
97 tree
98 gfc_trans_label_assign (gfc_code * code)
100 tree label_tree;
101 gfc_se se;
102 tree len;
103 tree addr;
104 tree len_tree;
105 char *label_str;
106 int label_len;
108 /* Start a new block. */
109 gfc_init_se (&se, NULL);
110 gfc_start_block (&se.pre);
111 gfc_conv_label_variable (&se, code->expr);
113 len = GFC_DECL_STRING_LEN (se.expr);
114 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
116 label_tree = gfc_get_label_decl (code->label);
118 if (code->label->defined == ST_LABEL_TARGET)
120 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
121 len_tree = integer_minus_one_node;
123 else
125 label_str = code->label->format->value.character.string;
126 label_len = code->label->format->value.character.length;
127 len_tree = build_int_cst (NULL_TREE, label_len);
128 label_tree = gfc_build_string_const (label_len + 1, label_str);
129 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
132 gfc_add_modify_expr (&se.pre, len, len_tree);
133 gfc_add_modify_expr (&se.pre, addr, label_tree);
135 return gfc_finish_block (&se.pre);
138 /* Translate a GOTO statement. */
140 tree
141 gfc_trans_goto (gfc_code * code)
143 locus loc = code->loc;
144 tree assigned_goto;
145 tree target;
146 tree tmp;
147 gfc_se se;
149 if (code->label != NULL)
150 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
152 /* ASSIGNED GOTO. */
153 gfc_init_se (&se, NULL);
154 gfc_start_block (&se.pre);
155 gfc_conv_label_variable (&se, code->expr);
156 tmp = GFC_DECL_STRING_LEN (se.expr);
157 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
158 build_int_cst (TREE_TYPE (tmp), -1));
159 gfc_trans_runtime_check (tmp, "Assigned label is not a target label",
160 &se.pre, &loc);
162 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
164 code = code->block;
165 if (code == NULL)
167 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
172 /* Check the label list. */
175 target = gfc_get_label_decl (code->label);
176 tmp = gfc_build_addr_expr (pvoid_type_node, target);
177 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
178 tmp = build3_v (COND_EXPR, tmp,
179 build1 (GOTO_EXPR, void_type_node, target),
180 build_empty_stmt ());
181 gfc_add_expr_to_block (&se.pre, tmp);
182 code = code->block;
184 while (code != NULL);
185 gfc_trans_runtime_check (boolean_true_node,
186 "Assigned label is not in the list", &se.pre, &loc);
188 return gfc_finish_block (&se.pre);
192 /* Translate an ENTRY statement. Just adds a label for this entry point. */
193 tree
194 gfc_trans_entry (gfc_code * code)
196 return build1_v (LABEL_EXPR, code->ext.entry->label);
200 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
201 elemental subroutines. Make temporaries for output arguments if any such
202 dependencies are found. Output arguments are chosen because internal_unpack
203 can be used, as is, to copy the result back to the variable. */
204 static void
205 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
206 gfc_symbol * sym, gfc_actual_arglist * arg)
208 gfc_actual_arglist *arg0;
209 gfc_expr *e;
210 gfc_formal_arglist *formal;
211 gfc_loopinfo tmp_loop;
212 gfc_se parmse;
213 gfc_ss *ss;
214 gfc_ss_info *info;
215 gfc_symbol *fsym;
216 int n;
217 stmtblock_t block;
218 tree data;
219 tree offset;
220 tree size;
221 tree tmp;
223 if (loopse->ss == NULL)
224 return;
226 ss = loopse->ss;
227 arg0 = arg;
228 formal = sym->formal;
230 /* Loop over all the arguments testing for dependencies. */
231 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
233 e = arg->expr;
234 if (e == NULL)
235 continue;
237 /* Obtain the info structure for the current argument. */
238 info = NULL;
239 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
241 if (ss->expr != e)
242 continue;
243 info = &ss->data.info;
244 break;
247 /* If there is a dependency, create a temporary and use it
248 instead of the variable. */
249 fsym = formal ? formal->sym : NULL;
250 if (e->expr_type == EXPR_VARIABLE
251 && e->rank && fsym
252 && fsym->attr.intent == INTENT_OUT
253 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
255 /* Make a local loopinfo for the temporary creation, so that
256 none of the other ss->info's have to be renormalized. */
257 gfc_init_loopinfo (&tmp_loop);
258 for (n = 0; n < info->dimen; n++)
260 tmp_loop.to[n] = loopse->loop->to[n];
261 tmp_loop.from[n] = loopse->loop->from[n];
262 tmp_loop.order[n] = loopse->loop->order[n];
265 /* Generate the temporary. Merge the block so that the
266 declarations are put at the right binding level. */
267 size = gfc_create_var (gfc_array_index_type, NULL);
268 data = gfc_create_var (pvoid_type_node, NULL);
269 gfc_start_block (&block);
270 tmp = gfc_typenode_for_spec (&e->ts);
271 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
272 &tmp_loop, info, tmp,
273 false, true, false, false);
274 gfc_add_modify_expr (&se->pre, size, tmp);
275 tmp = fold_convert (pvoid_type_node, info->data);
276 gfc_add_modify_expr (&se->pre, data, tmp);
277 gfc_merge_block_scope (&block);
279 /* Obtain the argument descriptor for unpacking. */
280 gfc_init_se (&parmse, NULL);
281 parmse.want_pointer = 1;
282 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
283 gfc_add_block_to_block (&se->pre, &parmse.pre);
285 /* Calculate the offset for the temporary. */
286 offset = gfc_index_zero_node;
287 for (n = 0; n < info->dimen; n++)
289 tmp = gfc_conv_descriptor_stride (info->descriptor,
290 gfc_rank_cst[n]);
291 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
292 loopse->loop->from[n], tmp);
293 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
294 offset, tmp);
296 info->offset = gfc_create_var (gfc_array_index_type, NULL);
297 gfc_add_modify_expr (&se->pre, info->offset, offset);
299 /* Copy the result back using unpack. */
300 tmp = gfc_chainon_list (NULL_TREE, parmse.expr);
301 tmp = gfc_chainon_list (tmp, data);
302 tmp = build_function_call_expr (gfor_fndecl_in_unpack, tmp);
303 gfc_add_expr_to_block (&se->post, tmp);
305 gfc_add_block_to_block (&se->post, &parmse.post);
311 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
313 tree
314 gfc_trans_call (gfc_code * code, bool dependency_check)
316 gfc_se se;
317 gfc_ss * ss;
318 int has_alternate_specifier;
320 /* A CALL starts a new block because the actual arguments may have to
321 be evaluated first. */
322 gfc_init_se (&se, NULL);
323 gfc_start_block (&se.pre);
325 gcc_assert (code->resolved_sym);
327 ss = gfc_ss_terminator;
328 if (code->resolved_sym->attr.elemental)
329 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
331 /* Is not an elemental subroutine call with array valued arguments. */
332 if (ss == gfc_ss_terminator)
335 /* Translate the call. */
336 has_alternate_specifier
337 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
338 NULL_TREE);
340 /* A subroutine without side-effect, by definition, does nothing! */
341 TREE_SIDE_EFFECTS (se.expr) = 1;
343 /* Chain the pieces together and return the block. */
344 if (has_alternate_specifier)
346 gfc_code *select_code;
347 gfc_symbol *sym;
348 select_code = code->next;
349 gcc_assert(select_code->op == EXEC_SELECT);
350 sym = select_code->expr->symtree->n.sym;
351 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
352 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
354 else
355 gfc_add_expr_to_block (&se.pre, se.expr);
357 gfc_add_block_to_block (&se.pre, &se.post);
360 else
362 /* An elemental subroutine call with array valued arguments has
363 to be scalarized. */
364 gfc_loopinfo loop;
365 stmtblock_t body;
366 stmtblock_t block;
367 gfc_se loopse;
369 /* gfc_walk_elemental_function_args renders the ss chain in the
370 reverse order to the actual argument order. */
371 ss = gfc_reverse_ss (ss);
373 /* Initialize the loop. */
374 gfc_init_se (&loopse, NULL);
375 gfc_init_loopinfo (&loop);
376 gfc_add_ss_to_loop (&loop, ss);
378 gfc_conv_ss_startstride (&loop);
379 gfc_conv_loop_setup (&loop);
380 gfc_mark_ss_chain_used (ss, 1);
382 /* Convert the arguments, checking for dependencies. */
383 gfc_copy_loopinfo_to_se (&loopse, &loop);
384 loopse.ss = ss;
386 /* For operator assignment, we need to do dependency checking.
387 We also check the intent of the parameters. */
388 if (dependency_check)
390 gfc_symbol *sym;
391 sym = code->resolved_sym;
392 gcc_assert (sym->formal->sym->attr.intent = INTENT_OUT);
393 gcc_assert (sym->formal->next->sym->attr.intent = INTENT_IN);
394 gfc_conv_elemental_dependencies (&se, &loopse, sym,
395 code->ext.actual);
398 /* Generate the loop body. */
399 gfc_start_scalarized_body (&loop, &body);
400 gfc_init_block (&block);
402 /* Add the subroutine call to the block. */
403 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
404 NULL_TREE);
405 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
407 gfc_add_block_to_block (&block, &loopse.pre);
408 gfc_add_block_to_block (&block, &loopse.post);
410 /* Finish up the loop block and the loop. */
411 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
412 gfc_trans_scalarizing_loops (&loop, &body);
413 gfc_add_block_to_block (&se.pre, &loop.pre);
414 gfc_add_block_to_block (&se.pre, &loop.post);
415 gfc_add_block_to_block (&se.pre, &se.post);
416 gfc_cleanup_loop (&loop);
419 return gfc_finish_block (&se.pre);
423 /* Translate the RETURN statement. */
425 tree
426 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
428 if (code->expr)
430 gfc_se se;
431 tree tmp;
432 tree result;
434 /* If code->expr is not NULL, this return statement must appear
435 in a subroutine and current_fake_result_decl has already
436 been generated. */
438 result = gfc_get_fake_result_decl (NULL, 0);
439 if (!result)
441 gfc_warning ("An alternate return at %L without a * dummy argument",
442 &code->expr->where);
443 return build1_v (GOTO_EXPR, gfc_get_return_label ());
446 /* Start a new block for this statement. */
447 gfc_init_se (&se, NULL);
448 gfc_start_block (&se.pre);
450 gfc_conv_expr (&se, code->expr);
452 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result, se.expr);
453 gfc_add_expr_to_block (&se.pre, tmp);
455 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
456 gfc_add_expr_to_block (&se.pre, tmp);
457 gfc_add_block_to_block (&se.pre, &se.post);
458 return gfc_finish_block (&se.pre);
460 else
461 return build1_v (GOTO_EXPR, gfc_get_return_label ());
465 /* Translate the PAUSE statement. We have to translate this statement
466 to a runtime library call. */
468 tree
469 gfc_trans_pause (gfc_code * code)
471 tree gfc_int4_type_node = gfc_get_int_type (4);
472 gfc_se se;
473 tree args;
474 tree tmp;
475 tree fndecl;
477 /* Start a new block for this statement. */
478 gfc_init_se (&se, NULL);
479 gfc_start_block (&se.pre);
482 if (code->expr == NULL)
484 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
485 args = gfc_chainon_list (NULL_TREE, tmp);
486 fndecl = gfor_fndecl_pause_numeric;
488 else
490 gfc_conv_expr_reference (&se, code->expr);
491 args = gfc_chainon_list (NULL_TREE, se.expr);
492 args = gfc_chainon_list (args, se.string_length);
493 fndecl = gfor_fndecl_pause_string;
496 tmp = build_function_call_expr (fndecl, args);
497 gfc_add_expr_to_block (&se.pre, tmp);
499 gfc_add_block_to_block (&se.pre, &se.post);
501 return gfc_finish_block (&se.pre);
505 /* Translate the STOP statement. We have to translate this statement
506 to a runtime library call. */
508 tree
509 gfc_trans_stop (gfc_code * code)
511 tree gfc_int4_type_node = gfc_get_int_type (4);
512 gfc_se se;
513 tree args;
514 tree tmp;
515 tree fndecl;
517 /* Start a new block for this statement. */
518 gfc_init_se (&se, NULL);
519 gfc_start_block (&se.pre);
522 if (code->expr == NULL)
524 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
525 args = gfc_chainon_list (NULL_TREE, tmp);
526 fndecl = gfor_fndecl_stop_numeric;
528 else
530 gfc_conv_expr_reference (&se, code->expr);
531 args = gfc_chainon_list (NULL_TREE, se.expr);
532 args = gfc_chainon_list (args, se.string_length);
533 fndecl = gfor_fndecl_stop_string;
536 tmp = build_function_call_expr (fndecl, args);
537 gfc_add_expr_to_block (&se.pre, tmp);
539 gfc_add_block_to_block (&se.pre, &se.post);
541 return gfc_finish_block (&se.pre);
545 /* Generate GENERIC for the IF construct. This function also deals with
546 the simple IF statement, because the front end translates the IF
547 statement into an IF construct.
549 We translate:
551 IF (cond) THEN
552 then_clause
553 ELSEIF (cond2)
554 elseif_clause
555 ELSE
556 else_clause
557 ENDIF
559 into:
561 pre_cond_s;
562 if (cond_s)
564 then_clause;
566 else
568 pre_cond_s
569 if (cond_s)
571 elseif_clause
573 else
575 else_clause;
579 where COND_S is the simplified version of the predicate. PRE_COND_S
580 are the pre side-effects produced by the translation of the
581 conditional.
582 We need to build the chain recursively otherwise we run into
583 problems with folding incomplete statements. */
585 static tree
586 gfc_trans_if_1 (gfc_code * code)
588 gfc_se if_se;
589 tree stmt, elsestmt;
591 /* Check for an unconditional ELSE clause. */
592 if (!code->expr)
593 return gfc_trans_code (code->next);
595 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
596 gfc_init_se (&if_se, NULL);
597 gfc_start_block (&if_se.pre);
599 /* Calculate the IF condition expression. */
600 gfc_conv_expr_val (&if_se, code->expr);
602 /* Translate the THEN clause. */
603 stmt = gfc_trans_code (code->next);
605 /* Translate the ELSE clause. */
606 if (code->block)
607 elsestmt = gfc_trans_if_1 (code->block);
608 else
609 elsestmt = build_empty_stmt ();
611 /* Build the condition expression and add it to the condition block. */
612 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
614 gfc_add_expr_to_block (&if_se.pre, stmt);
616 /* Finish off this statement. */
617 return gfc_finish_block (&if_se.pre);
620 tree
621 gfc_trans_if (gfc_code * code)
623 /* Ignore the top EXEC_IF, it only announces an IF construct. The
624 actual code we must translate is in code->block. */
626 return gfc_trans_if_1 (code->block);
630 /* Translate an arithmetic IF expression.
632 IF (cond) label1, label2, label3 translates to
634 if (cond <= 0)
636 if (cond < 0)
637 goto label1;
638 else // cond == 0
639 goto label2;
641 else // cond > 0
642 goto label3;
644 An optimized version can be generated in case of equal labels.
645 E.g., if label1 is equal to label2, we can translate it to
647 if (cond <= 0)
648 goto label1;
649 else
650 goto label3;
653 tree
654 gfc_trans_arithmetic_if (gfc_code * code)
656 gfc_se se;
657 tree tmp;
658 tree branch1;
659 tree branch2;
660 tree zero;
662 /* Start a new block. */
663 gfc_init_se (&se, NULL);
664 gfc_start_block (&se.pre);
666 /* Pre-evaluate COND. */
667 gfc_conv_expr_val (&se, code->expr);
668 se.expr = gfc_evaluate_now (se.expr, &se.pre);
670 /* Build something to compare with. */
671 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
673 if (code->label->value != code->label2->value)
675 /* If (cond < 0) take branch1 else take branch2.
676 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
677 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
678 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
680 if (code->label->value != code->label3->value)
681 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
682 else
683 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
685 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
687 else
688 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
690 if (code->label->value != code->label3->value
691 && code->label2->value != code->label3->value)
693 /* if (cond <= 0) take branch1 else take branch2. */
694 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
695 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
696 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
699 /* Append the COND_EXPR to the evaluation of COND, and return. */
700 gfc_add_expr_to_block (&se.pre, branch1);
701 return gfc_finish_block (&se.pre);
705 /* Translate the simple DO construct. This is where the loop variable has
706 integer type and step +-1. We can't use this in the general case
707 because integer overflow and floating point errors could give incorrect
708 results.
709 We translate a do loop from:
711 DO dovar = from, to, step
712 body
713 END DO
717 [Evaluate loop bounds and step]
718 dovar = from;
719 if ((step > 0) ? (dovar <= to) : (dovar => to))
721 for (;;)
723 body;
724 cycle_label:
725 cond = (dovar == to);
726 dovar += step;
727 if (cond) goto end_label;
730 end_label:
732 This helps the optimizers by avoiding the extra induction variable
733 used in the general case. */
735 static tree
736 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
737 tree from, tree to, tree step)
739 stmtblock_t body;
740 tree type;
741 tree cond;
742 tree tmp;
743 tree cycle_label;
744 tree exit_label;
746 type = TREE_TYPE (dovar);
748 /* Initialize the DO variable: dovar = from. */
749 gfc_add_modify_expr (pblock, dovar, from);
751 /* Cycle and exit statements are implemented with gotos. */
752 cycle_label = gfc_build_label_decl (NULL_TREE);
753 exit_label = gfc_build_label_decl (NULL_TREE);
755 /* Put the labels where they can be found later. See gfc_trans_do(). */
756 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
758 /* Loop body. */
759 gfc_start_block (&body);
761 /* Main loop body. */
762 tmp = gfc_trans_code (code->block->next);
763 gfc_add_expr_to_block (&body, tmp);
765 /* Label for cycle statements (if needed). */
766 if (TREE_USED (cycle_label))
768 tmp = build1_v (LABEL_EXPR, cycle_label);
769 gfc_add_expr_to_block (&body, tmp);
772 /* Evaluate the loop condition. */
773 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
774 cond = gfc_evaluate_now (cond, &body);
776 /* Increment the loop variable. */
777 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
778 gfc_add_modify_expr (&body, dovar, tmp);
780 /* The loop exit. */
781 tmp = build1_v (GOTO_EXPR, exit_label);
782 TREE_USED (exit_label) = 1;
783 tmp = fold_build3 (COND_EXPR, void_type_node,
784 cond, tmp, build_empty_stmt ());
785 gfc_add_expr_to_block (&body, tmp);
787 /* Finish the loop body. */
788 tmp = gfc_finish_block (&body);
789 tmp = build1_v (LOOP_EXPR, tmp);
791 /* Only execute the loop if the number of iterations is positive. */
792 if (tree_int_cst_sgn (step) > 0)
793 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
794 else
795 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
796 tmp = fold_build3 (COND_EXPR, void_type_node,
797 cond, tmp, build_empty_stmt ());
798 gfc_add_expr_to_block (pblock, tmp);
800 /* Add the exit label. */
801 tmp = build1_v (LABEL_EXPR, exit_label);
802 gfc_add_expr_to_block (pblock, tmp);
804 return gfc_finish_block (pblock);
807 /* Translate the DO construct. This obviously is one of the most
808 important ones to get right with any compiler, but especially
809 so for Fortran.
811 We special case some loop forms as described in gfc_trans_simple_do.
812 For other cases we implement them with a separate loop count,
813 as described in the standard.
815 We translate a do loop from:
817 DO dovar = from, to, step
818 body
819 END DO
823 [evaluate loop bounds and step]
824 count = (to + step - from) / step;
825 dovar = from;
826 for (;;)
828 body;
829 cycle_label:
830 dovar += step
831 count--;
832 if (count <=0) goto exit_label;
834 exit_label:
836 TODO: Large loop counts
837 The code above assumes the loop count fits into a signed integer kind,
838 i.e. Does not work for loop counts > 2^31 for integer(kind=4) variables
839 We must support the full range. */
841 tree
842 gfc_trans_do (gfc_code * code)
844 gfc_se se;
845 tree dovar;
846 tree from;
847 tree to;
848 tree step;
849 tree count;
850 tree count_one;
851 tree type;
852 tree cond;
853 tree cycle_label;
854 tree exit_label;
855 tree tmp;
856 stmtblock_t block;
857 stmtblock_t body;
859 gfc_start_block (&block);
861 /* Evaluate all the expressions in the iterator. */
862 gfc_init_se (&se, NULL);
863 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
864 gfc_add_block_to_block (&block, &se.pre);
865 dovar = se.expr;
866 type = TREE_TYPE (dovar);
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->start);
870 gfc_add_block_to_block (&block, &se.pre);
871 from = gfc_evaluate_now (se.expr, &block);
873 gfc_init_se (&se, NULL);
874 gfc_conv_expr_val (&se, code->ext.iterator->end);
875 gfc_add_block_to_block (&block, &se.pre);
876 to = gfc_evaluate_now (se.expr, &block);
878 gfc_init_se (&se, NULL);
879 gfc_conv_expr_val (&se, code->ext.iterator->step);
880 gfc_add_block_to_block (&block, &se.pre);
881 step = gfc_evaluate_now (se.expr, &block);
883 /* Special case simple loops. */
884 if (TREE_CODE (type) == INTEGER_TYPE
885 && (integer_onep (step)
886 || tree_int_cst_equal (step, integer_minus_one_node)))
887 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
889 /* Initialize loop count. This code is executed before we enter the
890 loop body. We generate: count = (to + step - from) / step. */
892 tmp = fold_build2 (MINUS_EXPR, type, step, from);
893 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
894 if (TREE_CODE (type) == INTEGER_TYPE)
896 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
897 count = gfc_create_var (type, "count");
899 else
901 /* TODO: We could use the same width as the real type.
902 This would probably cause more problems that it solves
903 when we implement "long double" types. */
904 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
905 tmp = fold_build1 (FIX_TRUNC_EXPR, gfc_array_index_type, tmp);
906 count = gfc_create_var (gfc_array_index_type, "count");
908 gfc_add_modify_expr (&block, count, tmp);
910 count_one = build_int_cst (TREE_TYPE (count), 1);
912 /* Initialize the DO variable: dovar = from. */
913 gfc_add_modify_expr (&block, dovar, from);
915 /* Loop body. */
916 gfc_start_block (&body);
918 /* Cycle and exit statements are implemented with gotos. */
919 cycle_label = gfc_build_label_decl (NULL_TREE);
920 exit_label = gfc_build_label_decl (NULL_TREE);
922 /* Start with the loop condition. Loop until count <= 0. */
923 cond = fold_build2 (LE_EXPR, boolean_type_node, count,
924 build_int_cst (TREE_TYPE (count), 0));
925 tmp = build1_v (GOTO_EXPR, exit_label);
926 TREE_USED (exit_label) = 1;
927 tmp = fold_build3 (COND_EXPR, void_type_node,
928 cond, tmp, build_empty_stmt ());
929 gfc_add_expr_to_block (&body, tmp);
931 /* Put these labels where they can be found later. We put the
932 labels in a TREE_LIST node (because TREE_CHAIN is already
933 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
934 label in TREE_VALUE (backend_decl). */
936 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
938 /* Main loop body. */
939 tmp = gfc_trans_code (code->block->next);
940 gfc_add_expr_to_block (&body, tmp);
942 /* Label for cycle statements (if needed). */
943 if (TREE_USED (cycle_label))
945 tmp = build1_v (LABEL_EXPR, cycle_label);
946 gfc_add_expr_to_block (&body, tmp);
949 /* Increment the loop variable. */
950 tmp = build2 (PLUS_EXPR, type, dovar, step);
951 gfc_add_modify_expr (&body, dovar, tmp);
953 /* Decrement the loop count. */
954 tmp = build2 (MINUS_EXPR, TREE_TYPE (count), count, count_one);
955 gfc_add_modify_expr (&body, count, tmp);
957 /* End of loop body. */
958 tmp = gfc_finish_block (&body);
960 /* The for loop itself. */
961 tmp = build1_v (LOOP_EXPR, tmp);
962 gfc_add_expr_to_block (&block, tmp);
964 /* Add the exit label. */
965 tmp = build1_v (LABEL_EXPR, exit_label);
966 gfc_add_expr_to_block (&block, tmp);
968 return gfc_finish_block (&block);
972 /* Translate the DO WHILE construct.
974 We translate
976 DO WHILE (cond)
977 body
978 END DO
982 for ( ; ; )
984 pre_cond;
985 if (! cond) goto exit_label;
986 body;
987 cycle_label:
989 exit_label:
991 Because the evaluation of the exit condition `cond' may have side
992 effects, we can't do much for empty loop bodies. The backend optimizers
993 should be smart enough to eliminate any dead loops. */
995 tree
996 gfc_trans_do_while (gfc_code * code)
998 gfc_se cond;
999 tree tmp;
1000 tree cycle_label;
1001 tree exit_label;
1002 stmtblock_t block;
1004 /* Everything we build here is part of the loop body. */
1005 gfc_start_block (&block);
1007 /* Cycle and exit statements are implemented with gotos. */
1008 cycle_label = gfc_build_label_decl (NULL_TREE);
1009 exit_label = gfc_build_label_decl (NULL_TREE);
1011 /* Put the labels where they can be found later. See gfc_trans_do(). */
1012 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1014 /* Create a GIMPLE version of the exit condition. */
1015 gfc_init_se (&cond, NULL);
1016 gfc_conv_expr_val (&cond, code->expr);
1017 gfc_add_block_to_block (&block, &cond.pre);
1018 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1020 /* Build "IF (! cond) GOTO exit_label". */
1021 tmp = build1_v (GOTO_EXPR, exit_label);
1022 TREE_USED (exit_label) = 1;
1023 tmp = fold_build3 (COND_EXPR, void_type_node,
1024 cond.expr, tmp, build_empty_stmt ());
1025 gfc_add_expr_to_block (&block, tmp);
1027 /* The main body of the loop. */
1028 tmp = gfc_trans_code (code->block->next);
1029 gfc_add_expr_to_block (&block, tmp);
1031 /* Label for cycle statements (if needed). */
1032 if (TREE_USED (cycle_label))
1034 tmp = build1_v (LABEL_EXPR, cycle_label);
1035 gfc_add_expr_to_block (&block, tmp);
1038 /* End of loop body. */
1039 tmp = gfc_finish_block (&block);
1041 gfc_init_block (&block);
1042 /* Build the loop. */
1043 tmp = build1_v (LOOP_EXPR, tmp);
1044 gfc_add_expr_to_block (&block, tmp);
1046 /* Add the exit label. */
1047 tmp = build1_v (LABEL_EXPR, exit_label);
1048 gfc_add_expr_to_block (&block, tmp);
1050 return gfc_finish_block (&block);
1054 /* Translate the SELECT CASE construct for INTEGER case expressions,
1055 without killing all potential optimizations. The problem is that
1056 Fortran allows unbounded cases, but the back-end does not, so we
1057 need to intercept those before we enter the equivalent SWITCH_EXPR
1058 we can build.
1060 For example, we translate this,
1062 SELECT CASE (expr)
1063 CASE (:100,101,105:115)
1064 block_1
1065 CASE (190:199,200:)
1066 block_2
1067 CASE (300)
1068 block_3
1069 CASE DEFAULT
1070 block_4
1071 END SELECT
1073 to the GENERIC equivalent,
1075 switch (expr)
1077 case (minimum value for typeof(expr) ... 100:
1078 case 101:
1079 case 105 ... 114:
1080 block1:
1081 goto end_label;
1083 case 200 ... (maximum value for typeof(expr):
1084 case 190 ... 199:
1085 block2;
1086 goto end_label;
1088 case 300:
1089 block_3;
1090 goto end_label;
1092 default:
1093 block_4;
1094 goto end_label;
1097 end_label: */
1099 static tree
1100 gfc_trans_integer_select (gfc_code * code)
1102 gfc_code *c;
1103 gfc_case *cp;
1104 tree end_label;
1105 tree tmp;
1106 gfc_se se;
1107 stmtblock_t block;
1108 stmtblock_t body;
1110 gfc_start_block (&block);
1112 /* Calculate the switch expression. */
1113 gfc_init_se (&se, NULL);
1114 gfc_conv_expr_val (&se, code->expr);
1115 gfc_add_block_to_block (&block, &se.pre);
1117 end_label = gfc_build_label_decl (NULL_TREE);
1119 gfc_init_block (&body);
1121 for (c = code->block; c; c = c->block)
1123 for (cp = c->ext.case_list; cp; cp = cp->next)
1125 tree low, high;
1126 tree label;
1128 /* Assume it's the default case. */
1129 low = high = NULL_TREE;
1131 if (cp->low)
1133 low = gfc_conv_constant_to_tree (cp->low);
1135 /* If there's only a lower bound, set the high bound to the
1136 maximum value of the case expression. */
1137 if (!cp->high)
1138 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1141 if (cp->high)
1143 /* Three cases are possible here:
1145 1) There is no lower bound, e.g. CASE (:N).
1146 2) There is a lower bound .NE. high bound, that is
1147 a case range, e.g. CASE (N:M) where M>N (we make
1148 sure that M>N during type resolution).
1149 3) There is a lower bound, and it has the same value
1150 as the high bound, e.g. CASE (N:N). This is our
1151 internal representation of CASE(N).
1153 In the first and second case, we need to set a value for
1154 high. In the third case, we don't because the GCC middle
1155 end represents a single case value by just letting high be
1156 a NULL_TREE. We can't do that because we need to be able
1157 to represent unbounded cases. */
1159 if (!cp->low
1160 || (cp->low
1161 && mpz_cmp (cp->low->value.integer,
1162 cp->high->value.integer) != 0))
1163 high = gfc_conv_constant_to_tree (cp->high);
1165 /* Unbounded case. */
1166 if (!cp->low)
1167 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1170 /* Build a label. */
1171 label = gfc_build_label_decl (NULL_TREE);
1173 /* Add this case label.
1174 Add parameter 'label', make it match GCC backend. */
1175 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1176 gfc_add_expr_to_block (&body, tmp);
1179 /* Add the statements for this case. */
1180 tmp = gfc_trans_code (c->next);
1181 gfc_add_expr_to_block (&body, tmp);
1183 /* Break to the end of the construct. */
1184 tmp = build1_v (GOTO_EXPR, end_label);
1185 gfc_add_expr_to_block (&body, tmp);
1188 tmp = gfc_finish_block (&body);
1189 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1190 gfc_add_expr_to_block (&block, tmp);
1192 tmp = build1_v (LABEL_EXPR, end_label);
1193 gfc_add_expr_to_block (&block, tmp);
1195 return gfc_finish_block (&block);
1199 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1201 There are only two cases possible here, even though the standard
1202 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1203 .FALSE., and DEFAULT.
1205 We never generate more than two blocks here. Instead, we always
1206 try to eliminate the DEFAULT case. This way, we can translate this
1207 kind of SELECT construct to a simple
1209 if {} else {};
1211 expression in GENERIC. */
1213 static tree
1214 gfc_trans_logical_select (gfc_code * code)
1216 gfc_code *c;
1217 gfc_code *t, *f, *d;
1218 gfc_case *cp;
1219 gfc_se se;
1220 stmtblock_t block;
1222 /* Assume we don't have any cases at all. */
1223 t = f = d = NULL;
1225 /* Now see which ones we actually do have. We can have at most two
1226 cases in a single case list: one for .TRUE. and one for .FALSE.
1227 The default case is always separate. If the cases for .TRUE. and
1228 .FALSE. are in the same case list, the block for that case list
1229 always executed, and we don't generate code a COND_EXPR. */
1230 for (c = code->block; c; c = c->block)
1232 for (cp = c->ext.case_list; cp; cp = cp->next)
1234 if (cp->low)
1236 if (cp->low->value.logical == 0) /* .FALSE. */
1237 f = c;
1238 else /* if (cp->value.logical != 0), thus .TRUE. */
1239 t = c;
1241 else
1242 d = c;
1246 /* Start a new block. */
1247 gfc_start_block (&block);
1249 /* Calculate the switch expression. We always need to do this
1250 because it may have side effects. */
1251 gfc_init_se (&se, NULL);
1252 gfc_conv_expr_val (&se, code->expr);
1253 gfc_add_block_to_block (&block, &se.pre);
1255 if (t == f && t != NULL)
1257 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1258 translate the code for these cases, append it to the current
1259 block. */
1260 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1262 else
1264 tree true_tree, false_tree, stmt;
1266 true_tree = build_empty_stmt ();
1267 false_tree = build_empty_stmt ();
1269 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1270 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1271 make the missing case the default case. */
1272 if (t != NULL && f != NULL)
1273 d = NULL;
1274 else if (d != NULL)
1276 if (t == NULL)
1277 t = d;
1278 else
1279 f = d;
1282 /* Translate the code for each of these blocks, and append it to
1283 the current block. */
1284 if (t != NULL)
1285 true_tree = gfc_trans_code (t->next);
1287 if (f != NULL)
1288 false_tree = gfc_trans_code (f->next);
1290 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1291 true_tree, false_tree);
1292 gfc_add_expr_to_block (&block, stmt);
1295 return gfc_finish_block (&block);
1299 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1300 Instead of generating compares and jumps, it is far simpler to
1301 generate a data structure describing the cases in order and call a
1302 library subroutine that locates the right case.
1303 This is particularly true because this is the only case where we
1304 might have to dispose of a temporary.
1305 The library subroutine returns a pointer to jump to or NULL if no
1306 branches are to be taken. */
1308 static tree
1309 gfc_trans_character_select (gfc_code *code)
1311 tree init, node, end_label, tmp, type, args, *labels;
1312 tree case_label;
1313 stmtblock_t block, body;
1314 gfc_case *cp, *d;
1315 gfc_code *c;
1316 gfc_se se;
1317 int i, n;
1319 static tree select_struct;
1320 static tree ss_string1, ss_string1_len;
1321 static tree ss_string2, ss_string2_len;
1322 static tree ss_target;
1324 if (select_struct == NULL)
1326 tree gfc_int4_type_node = gfc_get_int_type (4);
1328 select_struct = make_node (RECORD_TYPE);
1329 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1331 #undef ADD_FIELD
1332 #define ADD_FIELD(NAME, TYPE) \
1333 ss_##NAME = gfc_add_field_to_struct \
1334 (&(TYPE_FIELDS (select_struct)), select_struct, \
1335 get_identifier (stringize(NAME)), TYPE)
1337 ADD_FIELD (string1, pchar_type_node);
1338 ADD_FIELD (string1_len, gfc_int4_type_node);
1340 ADD_FIELD (string2, pchar_type_node);
1341 ADD_FIELD (string2_len, gfc_int4_type_node);
1343 ADD_FIELD (target, pvoid_type_node);
1344 #undef ADD_FIELD
1346 gfc_finish_type (select_struct);
1349 cp = code->block->ext.case_list;
1350 while (cp->left != NULL)
1351 cp = cp->left;
1353 n = 0;
1354 for (d = cp; d; d = d->right)
1355 d->n = n++;
1357 if (n != 0)
1358 labels = gfc_getmem (n * sizeof (tree));
1359 else
1360 labels = NULL;
1362 for(i = 0; i < n; i++)
1364 labels[i] = gfc_build_label_decl (NULL_TREE);
1365 TREE_USED (labels[i]) = 1;
1366 /* TODO: The gimplifier should do this for us, but it has
1367 inadequacies when dealing with static initializers. */
1368 FORCED_LABEL (labels[i]) = 1;
1371 end_label = gfc_build_label_decl (NULL_TREE);
1373 /* Generate the body */
1374 gfc_start_block (&block);
1375 gfc_init_block (&body);
1377 for (c = code->block; c; c = c->block)
1379 for (d = c->ext.case_list; d; d = d->next)
1381 tmp = build1_v (LABEL_EXPR, labels[d->n]);
1382 gfc_add_expr_to_block (&body, tmp);
1385 tmp = gfc_trans_code (c->next);
1386 gfc_add_expr_to_block (&body, tmp);
1388 tmp = build1_v (GOTO_EXPR, end_label);
1389 gfc_add_expr_to_block (&body, tmp);
1392 /* Generate the structure describing the branches */
1393 init = NULL_TREE;
1394 i = 0;
1396 for(d = cp; d; d = d->right, i++)
1398 node = NULL_TREE;
1400 gfc_init_se (&se, NULL);
1402 if (d->low == NULL)
1404 node = tree_cons (ss_string1, null_pointer_node, node);
1405 node = tree_cons (ss_string1_len, integer_zero_node, node);
1407 else
1409 gfc_conv_expr_reference (&se, d->low);
1411 node = tree_cons (ss_string1, se.expr, node);
1412 node = tree_cons (ss_string1_len, se.string_length, node);
1415 if (d->high == NULL)
1417 node = tree_cons (ss_string2, null_pointer_node, node);
1418 node = tree_cons (ss_string2_len, integer_zero_node, node);
1420 else
1422 gfc_init_se (&se, NULL);
1423 gfc_conv_expr_reference (&se, d->high);
1425 node = tree_cons (ss_string2, se.expr, node);
1426 node = tree_cons (ss_string2_len, se.string_length, node);
1429 tmp = gfc_build_addr_expr (pvoid_type_node, labels[i]);
1430 node = tree_cons (ss_target, tmp, node);
1432 tmp = build_constructor_from_list (select_struct, nreverse (node));
1433 init = tree_cons (NULL_TREE, tmp, init);
1436 type = build_array_type (select_struct, build_index_type
1437 (build_int_cst (NULL_TREE, n - 1)));
1439 init = build_constructor_from_list (type, nreverse(init));
1440 TREE_CONSTANT (init) = 1;
1441 TREE_INVARIANT (init) = 1;
1442 TREE_STATIC (init) = 1;
1443 /* Create a static variable to hold the jump table. */
1444 tmp = gfc_create_var (type, "jumptable");
1445 TREE_CONSTANT (tmp) = 1;
1446 TREE_INVARIANT (tmp) = 1;
1447 TREE_STATIC (tmp) = 1;
1448 TREE_READONLY (tmp) = 1;
1449 DECL_INITIAL (tmp) = init;
1450 init = tmp;
1452 /* Build an argument list for the library call */
1453 init = gfc_build_addr_expr (pvoid_type_node, init);
1454 args = gfc_chainon_list (NULL_TREE, init);
1456 tmp = build_int_cst (NULL_TREE, n);
1457 args = gfc_chainon_list (args, tmp);
1459 tmp = gfc_build_addr_expr (pvoid_type_node, end_label);
1460 args = gfc_chainon_list (args, tmp);
1462 gfc_init_se (&se, NULL);
1463 gfc_conv_expr_reference (&se, code->expr);
1465 args = gfc_chainon_list (args, se.expr);
1466 args = gfc_chainon_list (args, se.string_length);
1468 gfc_add_block_to_block (&block, &se.pre);
1470 tmp = build_function_call_expr (gfor_fndecl_select_string, args);
1471 case_label = gfc_create_var (TREE_TYPE (tmp), "case_label");
1472 gfc_add_modify_expr (&block, case_label, tmp);
1474 gfc_add_block_to_block (&block, &se.post);
1476 tmp = build1 (GOTO_EXPR, void_type_node, case_label);
1477 gfc_add_expr_to_block (&block, tmp);
1479 tmp = gfc_finish_block (&body);
1480 gfc_add_expr_to_block (&block, tmp);
1481 tmp = build1_v (LABEL_EXPR, end_label);
1482 gfc_add_expr_to_block (&block, tmp);
1484 if (n != 0)
1485 gfc_free (labels);
1487 return gfc_finish_block (&block);
1491 /* Translate the three variants of the SELECT CASE construct.
1493 SELECT CASEs with INTEGER case expressions can be translated to an
1494 equivalent GENERIC switch statement, and for LOGICAL case
1495 expressions we build one or two if-else compares.
1497 SELECT CASEs with CHARACTER case expressions are a whole different
1498 story, because they don't exist in GENERIC. So we sort them and
1499 do a binary search at runtime.
1501 Fortran has no BREAK statement, and it does not allow jumps from
1502 one case block to another. That makes things a lot easier for
1503 the optimizers. */
1505 tree
1506 gfc_trans_select (gfc_code * code)
1508 gcc_assert (code && code->expr);
1510 /* Empty SELECT constructs are legal. */
1511 if (code->block == NULL)
1512 return build_empty_stmt ();
1514 /* Select the correct translation function. */
1515 switch (code->expr->ts.type)
1517 case BT_LOGICAL: return gfc_trans_logical_select (code);
1518 case BT_INTEGER: return gfc_trans_integer_select (code);
1519 case BT_CHARACTER: return gfc_trans_character_select (code);
1520 default:
1521 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1522 /* Not reached */
1527 /* Generate the loops for a FORALL block. The normal loop format:
1528 count = (end - start + step) / step
1529 loopvar = start
1530 while (1)
1532 if (count <=0 )
1533 goto end_of_loop
1534 <body>
1535 loopvar += step
1536 count --
1538 end_of_loop: */
1540 static tree
1541 gfc_trans_forall_loop (forall_info *forall_tmp, int nvar, tree body, int mask_flag)
1543 int n;
1544 tree tmp;
1545 tree cond;
1546 stmtblock_t block;
1547 tree exit_label;
1548 tree count;
1549 tree var, start, end, step;
1550 iter_info *iter;
1552 iter = forall_tmp->this_loop;
1553 for (n = 0; n < nvar; n++)
1555 var = iter->var;
1556 start = iter->start;
1557 end = iter->end;
1558 step = iter->step;
1560 exit_label = gfc_build_label_decl (NULL_TREE);
1561 TREE_USED (exit_label) = 1;
1563 /* The loop counter. */
1564 count = gfc_create_var (TREE_TYPE (var), "count");
1566 /* The body of the loop. */
1567 gfc_init_block (&block);
1569 /* The exit condition. */
1570 cond = fold_build2 (LE_EXPR, boolean_type_node,
1571 count, build_int_cst (TREE_TYPE (count), 0));
1572 tmp = build1_v (GOTO_EXPR, exit_label);
1573 tmp = fold_build3 (COND_EXPR, void_type_node,
1574 cond, tmp, build_empty_stmt ());
1575 gfc_add_expr_to_block (&block, tmp);
1577 /* The main loop body. */
1578 gfc_add_expr_to_block (&block, body);
1580 /* Increment the loop variable. */
1581 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1582 gfc_add_modify_expr (&block, var, tmp);
1584 /* Advance to the next mask element. Only do this for the
1585 innermost loop. */
1586 if (n == 0 && mask_flag && forall_tmp->mask)
1588 tree maskindex = forall_tmp->maskindex;
1589 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1590 maskindex, gfc_index_one_node);
1591 gfc_add_modify_expr (&block, maskindex, tmp);
1594 /* Decrement the loop counter. */
1595 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count, gfc_index_one_node);
1596 gfc_add_modify_expr (&block, count, tmp);
1598 body = gfc_finish_block (&block);
1600 /* Loop var initialization. */
1601 gfc_init_block (&block);
1602 gfc_add_modify_expr (&block, var, start);
1604 /* Initialize maskindex counter. Only do this before the
1605 outermost loop. */
1606 if (n == nvar - 1 && mask_flag && forall_tmp->mask)
1607 gfc_add_modify_expr (&block, forall_tmp->maskindex,
1608 gfc_index_zero_node);
1610 /* Initialize the loop counter. */
1611 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1612 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1613 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1614 gfc_add_modify_expr (&block, count, tmp);
1616 /* The loop expression. */
1617 tmp = build1_v (LOOP_EXPR, body);
1618 gfc_add_expr_to_block (&block, tmp);
1620 /* The exit label. */
1621 tmp = build1_v (LABEL_EXPR, exit_label);
1622 gfc_add_expr_to_block (&block, tmp);
1624 body = gfc_finish_block (&block);
1625 iter = iter->next;
1627 return body;
1631 /* Generate the body and loops according to MASK_FLAG and NEST_FLAG.
1632 if MASK_FLAG is nonzero, the body is controlled by maskes in forall
1633 nest, otherwise, the body is not controlled by maskes.
1634 if NEST_FLAG is nonzero, generate loops for nested forall, otherwise,
1635 only generate loops for the current forall level. */
1637 static tree
1638 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1639 int mask_flag, int nest_flag)
1641 tree tmp;
1642 int nvar;
1643 forall_info *forall_tmp;
1644 tree pmask, mask, maskindex;
1646 forall_tmp = nested_forall_info;
1647 /* Generate loops for nested forall. */
1648 if (nest_flag)
1650 while (forall_tmp->next_nest != NULL)
1651 forall_tmp = forall_tmp->next_nest;
1652 while (forall_tmp != NULL)
1654 /* Generate body with masks' control. */
1655 if (mask_flag)
1657 pmask = forall_tmp->pmask;
1658 mask = forall_tmp->mask;
1659 maskindex = forall_tmp->maskindex;
1661 if (mask)
1663 /* If a mask was specified make the assignment conditional. */
1664 if (pmask)
1665 tmp = build_fold_indirect_ref (mask);
1666 else
1667 tmp = mask;
1668 tmp = gfc_build_array_ref (tmp, maskindex);
1670 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1673 nvar = forall_tmp->nvar;
1674 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1675 forall_tmp = forall_tmp->outer;
1678 else
1680 nvar = forall_tmp->nvar;
1681 body = gfc_trans_forall_loop (forall_tmp, nvar, body, mask_flag);
1684 return body;
1688 /* Allocate data for holding a temporary array. Returns either a local
1689 temporary array or a pointer variable. */
1691 static tree
1692 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1693 tree elem_type)
1695 tree tmpvar;
1696 tree type;
1697 tree tmp;
1698 tree args;
1700 if (INTEGER_CST_P (size))
1702 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1703 gfc_index_one_node);
1705 else
1706 tmp = NULL_TREE;
1708 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1709 type = build_array_type (elem_type, type);
1710 if (gfc_can_put_var_on_stack (bytesize))
1712 gcc_assert (INTEGER_CST_P (size));
1713 tmpvar = gfc_create_var (type, "temp");
1714 *pdata = NULL_TREE;
1716 else
1718 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1719 *pdata = convert (pvoid_type_node, tmpvar);
1721 args = gfc_chainon_list (NULL_TREE, bytesize);
1722 if (gfc_index_integer_kind == 4)
1723 tmp = gfor_fndecl_internal_malloc;
1724 else if (gfc_index_integer_kind == 8)
1725 tmp = gfor_fndecl_internal_malloc64;
1726 else
1727 gcc_unreachable ();
1728 tmp = build_function_call_expr (tmp, args);
1729 tmp = convert (TREE_TYPE (tmpvar), tmp);
1730 gfc_add_modify_expr (pblock, tmpvar, tmp);
1732 return tmpvar;
1736 /* Generate codes to copy the temporary to the actual lhs. */
1738 static tree
1739 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1740 tree count1, tree wheremask, bool invert)
1742 gfc_ss *lss;
1743 gfc_se lse, rse;
1744 stmtblock_t block, body;
1745 gfc_loopinfo loop1;
1746 tree tmp;
1747 tree wheremaskexpr;
1749 /* Walk the lhs. */
1750 lss = gfc_walk_expr (expr);
1752 if (lss == gfc_ss_terminator)
1754 gfc_start_block (&block);
1756 gfc_init_se (&lse, NULL);
1758 /* Translate the expression. */
1759 gfc_conv_expr (&lse, expr);
1761 /* Form the expression for the temporary. */
1762 tmp = gfc_build_array_ref (tmp1, count1);
1764 /* Use the scalar assignment as is. */
1765 gfc_add_block_to_block (&block, &lse.pre);
1766 gfc_add_modify_expr (&block, lse.expr, tmp);
1767 gfc_add_block_to_block (&block, &lse.post);
1769 /* Increment the count1. */
1770 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1771 gfc_index_one_node);
1772 gfc_add_modify_expr (&block, count1, tmp);
1774 tmp = gfc_finish_block (&block);
1776 else
1778 gfc_start_block (&block);
1780 gfc_init_loopinfo (&loop1);
1781 gfc_init_se (&rse, NULL);
1782 gfc_init_se (&lse, NULL);
1784 /* Associate the lss with the loop. */
1785 gfc_add_ss_to_loop (&loop1, lss);
1787 /* Calculate the bounds of the scalarization. */
1788 gfc_conv_ss_startstride (&loop1);
1789 /* Setup the scalarizing loops. */
1790 gfc_conv_loop_setup (&loop1);
1792 gfc_mark_ss_chain_used (lss, 1);
1794 /* Start the scalarized loop body. */
1795 gfc_start_scalarized_body (&loop1, &body);
1797 /* Setup the gfc_se structures. */
1798 gfc_copy_loopinfo_to_se (&lse, &loop1);
1799 lse.ss = lss;
1801 /* Form the expression of the temporary. */
1802 if (lss != gfc_ss_terminator)
1803 rse.expr = gfc_build_array_ref (tmp1, count1);
1804 /* Translate expr. */
1805 gfc_conv_expr (&lse, expr);
1807 /* Use the scalar assignment. */
1808 rse.string_length = lse.string_length;
1809 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1811 /* Form the mask expression according to the mask tree list. */
1812 if (wheremask)
1814 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1815 if (invert)
1816 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1817 TREE_TYPE (wheremaskexpr),
1818 wheremaskexpr);
1819 tmp = fold_build3 (COND_EXPR, void_type_node,
1820 wheremaskexpr, tmp, build_empty_stmt ());
1823 gfc_add_expr_to_block (&body, tmp);
1825 /* Increment count1. */
1826 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1827 count1, gfc_index_one_node);
1828 gfc_add_modify_expr (&body, count1, tmp);
1830 /* Increment count3. */
1831 if (count3)
1833 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1834 count3, gfc_index_one_node);
1835 gfc_add_modify_expr (&body, count3, tmp);
1838 /* Generate the copying loops. */
1839 gfc_trans_scalarizing_loops (&loop1, &body);
1840 gfc_add_block_to_block (&block, &loop1.pre);
1841 gfc_add_block_to_block (&block, &loop1.post);
1842 gfc_cleanup_loop (&loop1);
1844 tmp = gfc_finish_block (&block);
1846 return tmp;
1850 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1851 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1852 and should not be freed. WHEREMASK is the conditional execution mask
1853 whose sense may be inverted by INVERT. */
1855 static tree
1856 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1857 tree count1, gfc_ss *lss, gfc_ss *rss,
1858 tree wheremask, bool invert)
1860 stmtblock_t block, body1;
1861 gfc_loopinfo loop;
1862 gfc_se lse;
1863 gfc_se rse;
1864 tree tmp;
1865 tree wheremaskexpr;
1867 gfc_start_block (&block);
1869 gfc_init_se (&rse, NULL);
1870 gfc_init_se (&lse, NULL);
1872 if (lss == gfc_ss_terminator)
1874 gfc_init_block (&body1);
1875 gfc_conv_expr (&rse, expr2);
1876 lse.expr = gfc_build_array_ref (tmp1, count1);
1878 else
1880 /* Initialize the loop. */
1881 gfc_init_loopinfo (&loop);
1883 /* We may need LSS to determine the shape of the expression. */
1884 gfc_add_ss_to_loop (&loop, lss);
1885 gfc_add_ss_to_loop (&loop, rss);
1887 gfc_conv_ss_startstride (&loop);
1888 gfc_conv_loop_setup (&loop);
1890 gfc_mark_ss_chain_used (rss, 1);
1891 /* Start the loop body. */
1892 gfc_start_scalarized_body (&loop, &body1);
1894 /* Translate the expression. */
1895 gfc_copy_loopinfo_to_se (&rse, &loop);
1896 rse.ss = rss;
1897 gfc_conv_expr (&rse, expr2);
1899 /* Form the expression of the temporary. */
1900 lse.expr = gfc_build_array_ref (tmp1, count1);
1903 /* Use the scalar assignment. */
1904 lse.string_length = rse.string_length;
1905 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1906 expr2->expr_type == EXPR_VARIABLE);
1908 /* Form the mask expression according to the mask tree list. */
1909 if (wheremask)
1911 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1912 if (invert)
1913 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1914 TREE_TYPE (wheremaskexpr),
1915 wheremaskexpr);
1916 tmp = fold_build3 (COND_EXPR, void_type_node,
1917 wheremaskexpr, tmp, build_empty_stmt ());
1920 gfc_add_expr_to_block (&body1, tmp);
1922 if (lss == gfc_ss_terminator)
1924 gfc_add_block_to_block (&block, &body1);
1926 /* Increment count1. */
1927 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1928 gfc_index_one_node);
1929 gfc_add_modify_expr (&block, count1, tmp);
1931 else
1933 /* Increment count1. */
1934 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1935 count1, gfc_index_one_node);
1936 gfc_add_modify_expr (&body1, count1, tmp);
1938 /* Increment count3. */
1939 if (count3)
1941 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1942 count3, gfc_index_one_node);
1943 gfc_add_modify_expr (&body1, count3, tmp);
1946 /* Generate the copying loops. */
1947 gfc_trans_scalarizing_loops (&loop, &body1);
1949 gfc_add_block_to_block (&block, &loop.pre);
1950 gfc_add_block_to_block (&block, &loop.post);
1952 gfc_cleanup_loop (&loop);
1953 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1954 as tree nodes in SS may not be valid in different scope. */
1957 tmp = gfc_finish_block (&block);
1958 return tmp;
1962 /* Calculate the size of temporary needed in the assignment inside forall.
1963 LSS and RSS are filled in this function. */
1965 static tree
1966 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1967 stmtblock_t * pblock,
1968 gfc_ss **lss, gfc_ss **rss)
1970 gfc_loopinfo loop;
1971 tree size;
1972 int i;
1973 int save_flag;
1974 tree tmp;
1976 *lss = gfc_walk_expr (expr1);
1977 *rss = NULL;
1979 size = gfc_index_one_node;
1980 if (*lss != gfc_ss_terminator)
1982 gfc_init_loopinfo (&loop);
1984 /* Walk the RHS of the expression. */
1985 *rss = gfc_walk_expr (expr2);
1986 if (*rss == gfc_ss_terminator)
1988 /* The rhs is scalar. Add a ss for the expression. */
1989 *rss = gfc_get_ss ();
1990 (*rss)->next = gfc_ss_terminator;
1991 (*rss)->type = GFC_SS_SCALAR;
1992 (*rss)->expr = expr2;
1995 /* Associate the SS with the loop. */
1996 gfc_add_ss_to_loop (&loop, *lss);
1997 /* We don't actually need to add the rhs at this point, but it might
1998 make guessing the loop bounds a bit easier. */
1999 gfc_add_ss_to_loop (&loop, *rss);
2001 /* We only want the shape of the expression, not rest of the junk
2002 generated by the scalarizer. */
2003 loop.array_parameter = 1;
2005 /* Calculate the bounds of the scalarization. */
2006 save_flag = flag_bounds_check;
2007 flag_bounds_check = 0;
2008 gfc_conv_ss_startstride (&loop);
2009 flag_bounds_check = save_flag;
2010 gfc_conv_loop_setup (&loop);
2012 /* Figure out how many elements we need. */
2013 for (i = 0; i < loop.dimen; i++)
2015 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2016 gfc_index_one_node, loop.from[i]);
2017 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2018 tmp, loop.to[i]);
2019 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2021 gfc_add_block_to_block (pblock, &loop.pre);
2022 size = gfc_evaluate_now (size, pblock);
2023 gfc_add_block_to_block (pblock, &loop.post);
2025 /* TODO: write a function that cleans up a loopinfo without freeing
2026 the SS chains. Currently a NOP. */
2029 return size;
2033 /* Calculate the overall iterator number of the nested forall construct. */
2035 static tree
2036 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2037 stmtblock_t *inner_size_body, stmtblock_t *block)
2039 tree tmp, number;
2040 stmtblock_t body;
2042 /* TODO: optimizing the computing process. */
2043 number = gfc_create_var (gfc_array_index_type, "num");
2044 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2046 gfc_start_block (&body);
2047 if (inner_size_body)
2048 gfc_add_block_to_block (&body, inner_size_body);
2049 if (nested_forall_info)
2050 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2051 inner_size);
2052 else
2053 tmp = inner_size;
2054 gfc_add_modify_expr (&body, number, tmp);
2055 tmp = gfc_finish_block (&body);
2057 /* Generate loops. */
2058 if (nested_forall_info != NULL)
2059 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 0, 1);
2061 gfc_add_expr_to_block (block, tmp);
2063 return number;
2067 /* Allocate temporary for forall construct. SIZE is the size of temporary
2068 needed. PTEMP1 is returned for space free. */
2070 static tree
2071 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2072 tree * ptemp1)
2074 tree unit;
2075 tree temp1;
2076 tree tmp;
2077 tree bytesize;
2079 unit = TYPE_SIZE_UNIT (type);
2080 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2082 *ptemp1 = NULL;
2083 temp1 = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2085 if (*ptemp1)
2086 tmp = build_fold_indirect_ref (temp1);
2087 else
2088 tmp = temp1;
2090 return tmp;
2094 /* Allocate temporary for forall construct according to the information in
2095 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2096 assignment inside forall. PTEMP1 is returned for space free. */
2098 static tree
2099 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2100 tree inner_size, stmtblock_t * inner_size_body,
2101 stmtblock_t * block, tree * ptemp1)
2103 tree size;
2105 /* Calculate the total size of temporary needed in forall construct. */
2106 size = compute_overall_iter_number (nested_forall_info, inner_size,
2107 inner_size_body, block);
2109 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2113 /* Handle assignments inside forall which need temporary.
2115 forall (i=start:end:stride; maskexpr)
2116 e<i> = f<i>
2117 end forall
2118 (where e,f<i> are arbitrary expressions possibly involving i
2119 and there is a dependency between e<i> and f<i>)
2120 Translates to:
2121 masktmp(:) = maskexpr(:)
2123 maskindex = 0;
2124 count1 = 0;
2125 num = 0;
2126 for (i = start; i <= end; i += stride)
2127 num += SIZE (f<i>)
2128 count1 = 0;
2129 ALLOCATE (tmp(num))
2130 for (i = start; i <= end; i += stride)
2132 if (masktmp[maskindex++])
2133 tmp[count1++] = f<i>
2135 maskindex = 0;
2136 count1 = 0;
2137 for (i = start; i <= end; i += stride)
2139 if (masktmp[maskindex++])
2140 e<i> = tmp[count1++]
2142 DEALLOCATE (tmp)
2144 static void
2145 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2146 tree wheremask, bool invert,
2147 forall_info * nested_forall_info,
2148 stmtblock_t * block)
2150 tree type;
2151 tree inner_size;
2152 gfc_ss *lss, *rss;
2153 tree count, count1;
2154 tree tmp, tmp1;
2155 tree ptemp1;
2156 stmtblock_t inner_size_body;
2158 /* Create vars. count1 is the current iterator number of the nested
2159 forall. */
2160 count1 = gfc_create_var (gfc_array_index_type, "count1");
2162 /* Count is the wheremask index. */
2163 if (wheremask)
2165 count = gfc_create_var (gfc_array_index_type, "count");
2166 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2168 else
2169 count = NULL;
2171 /* Initialize count1. */
2172 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2174 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2175 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2176 gfc_init_block (&inner_size_body);
2177 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2178 &lss, &rss);
2180 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2181 type = gfc_typenode_for_spec (&expr1->ts);
2183 /* Allocate temporary for nested forall construct according to the
2184 information in nested_forall_info and inner_size. */
2185 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2186 &inner_size_body, block, &ptemp1);
2188 /* Generate codes to copy rhs to the temporary . */
2189 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2190 wheremask, invert);
2192 /* Generate body and loops according to the information in
2193 nested_forall_info. */
2194 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2195 gfc_add_expr_to_block (block, tmp);
2197 /* Reset count1. */
2198 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2200 /* Reset count. */
2201 if (wheremask)
2202 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2204 /* Generate codes to copy the temporary to lhs. */
2205 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2206 wheremask, invert);
2208 /* Generate body and loops according to the information in
2209 nested_forall_info. */
2210 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2211 gfc_add_expr_to_block (block, tmp);
2213 if (ptemp1)
2215 /* Free the temporary. */
2216 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2217 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2218 gfc_add_expr_to_block (block, tmp);
2223 /* Translate pointer assignment inside FORALL which need temporary. */
2225 static void
2226 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2227 forall_info * nested_forall_info,
2228 stmtblock_t * block)
2230 tree type;
2231 tree inner_size;
2232 gfc_ss *lss, *rss;
2233 gfc_se lse;
2234 gfc_se rse;
2235 gfc_ss_info *info;
2236 gfc_loopinfo loop;
2237 tree desc;
2238 tree parm;
2239 tree parmtype;
2240 stmtblock_t body;
2241 tree count;
2242 tree tmp, tmp1, ptemp1;
2244 count = gfc_create_var (gfc_array_index_type, "count");
2245 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2247 inner_size = integer_one_node;
2248 lss = gfc_walk_expr (expr1);
2249 rss = gfc_walk_expr (expr2);
2250 if (lss == gfc_ss_terminator)
2252 type = gfc_typenode_for_spec (&expr1->ts);
2253 type = build_pointer_type (type);
2255 /* Allocate temporary for nested forall construct according to the
2256 information in nested_forall_info and inner_size. */
2257 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2258 inner_size, NULL, block, &ptemp1);
2259 gfc_start_block (&body);
2260 gfc_init_se (&lse, NULL);
2261 lse.expr = gfc_build_array_ref (tmp1, count);
2262 gfc_init_se (&rse, NULL);
2263 rse.want_pointer = 1;
2264 gfc_conv_expr (&rse, expr2);
2265 gfc_add_block_to_block (&body, &rse.pre);
2266 gfc_add_modify_expr (&body, lse.expr,
2267 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2268 gfc_add_block_to_block (&body, &rse.post);
2270 /* Increment count. */
2271 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2272 count, gfc_index_one_node);
2273 gfc_add_modify_expr (&body, count, tmp);
2275 tmp = gfc_finish_block (&body);
2277 /* Generate body and loops according to the information in
2278 nested_forall_info. */
2279 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2280 gfc_add_expr_to_block (block, tmp);
2282 /* Reset count. */
2283 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2285 gfc_start_block (&body);
2286 gfc_init_se (&lse, NULL);
2287 gfc_init_se (&rse, NULL);
2288 rse.expr = gfc_build_array_ref (tmp1, count);
2289 lse.want_pointer = 1;
2290 gfc_conv_expr (&lse, expr1);
2291 gfc_add_block_to_block (&body, &lse.pre);
2292 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2293 gfc_add_block_to_block (&body, &lse.post);
2294 /* Increment count. */
2295 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2296 count, gfc_index_one_node);
2297 gfc_add_modify_expr (&body, count, tmp);
2298 tmp = gfc_finish_block (&body);
2300 /* Generate body and loops according to the information in
2301 nested_forall_info. */
2302 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2303 gfc_add_expr_to_block (block, tmp);
2305 else
2307 gfc_init_loopinfo (&loop);
2309 /* Associate the SS with the loop. */
2310 gfc_add_ss_to_loop (&loop, rss);
2312 /* Setup the scalarizing loops and bounds. */
2313 gfc_conv_ss_startstride (&loop);
2315 gfc_conv_loop_setup (&loop);
2317 info = &rss->data.info;
2318 desc = info->descriptor;
2320 /* Make a new descriptor. */
2321 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2322 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2323 loop.from, loop.to, 1);
2325 /* Allocate temporary for nested forall construct. */
2326 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2327 inner_size, NULL, block, &ptemp1);
2328 gfc_start_block (&body);
2329 gfc_init_se (&lse, NULL);
2330 lse.expr = gfc_build_array_ref (tmp1, count);
2331 lse.direct_byref = 1;
2332 rss = gfc_walk_expr (expr2);
2333 gfc_conv_expr_descriptor (&lse, expr2, rss);
2335 gfc_add_block_to_block (&body, &lse.pre);
2336 gfc_add_block_to_block (&body, &lse.post);
2338 /* Increment count. */
2339 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2340 count, gfc_index_one_node);
2341 gfc_add_modify_expr (&body, count, tmp);
2343 tmp = gfc_finish_block (&body);
2345 /* Generate body and loops according to the information in
2346 nested_forall_info. */
2347 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2348 gfc_add_expr_to_block (block, tmp);
2350 /* Reset count. */
2351 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2353 parm = gfc_build_array_ref (tmp1, count);
2354 lss = gfc_walk_expr (expr1);
2355 gfc_init_se (&lse, NULL);
2356 gfc_conv_expr_descriptor (&lse, expr1, lss);
2357 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2358 gfc_start_block (&body);
2359 gfc_add_block_to_block (&body, &lse.pre);
2360 gfc_add_block_to_block (&body, &lse.post);
2362 /* Increment count. */
2363 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2364 count, gfc_index_one_node);
2365 gfc_add_modify_expr (&body, count, tmp);
2367 tmp = gfc_finish_block (&body);
2369 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1, 1);
2370 gfc_add_expr_to_block (block, tmp);
2372 /* Free the temporary. */
2373 if (ptemp1)
2375 tmp = gfc_chainon_list (NULL_TREE, ptemp1);
2376 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2377 gfc_add_expr_to_block (block, tmp);
2382 /* FORALL and WHERE statements are really nasty, especially when you nest
2383 them. All the rhs of a forall assignment must be evaluated before the
2384 actual assignments are performed. Presumably this also applies to all the
2385 assignments in an inner where statement. */
2387 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2388 linear array, relying on the fact that we process in the same order in all
2389 loops.
2391 forall (i=start:end:stride; maskexpr)
2392 e<i> = f<i>
2393 g<i> = h<i>
2394 end forall
2395 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2396 Translates to:
2397 count = ((end + 1 - start) / stride)
2398 masktmp(:) = maskexpr(:)
2400 maskindex = 0;
2401 for (i = start; i <= end; i += stride)
2403 if (masktmp[maskindex++])
2404 e<i> = f<i>
2406 maskindex = 0;
2407 for (i = start; i <= end; i += stride)
2409 if (masktmp[maskindex++])
2410 g<i> = h<i>
2413 Note that this code only works when there are no dependencies.
2414 Forall loop with array assignments and data dependencies are a real pain,
2415 because the size of the temporary cannot always be determined before the
2416 loop is executed. This problem is compounded by the presence of nested
2417 FORALL constructs.
2420 static tree
2421 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2423 stmtblock_t block;
2424 stmtblock_t body;
2425 tree *var;
2426 tree *start;
2427 tree *end;
2428 tree *step;
2429 gfc_expr **varexpr;
2430 tree tmp;
2431 tree assign;
2432 tree size;
2433 tree bytesize;
2434 tree tmpvar;
2435 tree sizevar;
2436 tree lenvar;
2437 tree maskindex;
2438 tree mask;
2439 tree pmask;
2440 int n;
2441 int nvar;
2442 int need_temp;
2443 gfc_forall_iterator *fa;
2444 gfc_se se;
2445 gfc_code *c;
2446 gfc_saved_var *saved_vars;
2447 iter_info *this_forall, *iter_tmp;
2448 forall_info *info, *forall_tmp;
2450 gfc_start_block (&block);
2452 n = 0;
2453 /* Count the FORALL index number. */
2454 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2455 n++;
2456 nvar = n;
2458 /* Allocate the space for var, start, end, step, varexpr. */
2459 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2460 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2461 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2462 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2463 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2464 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2466 /* Allocate the space for info. */
2467 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2468 n = 0;
2469 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2471 gfc_symbol *sym = fa->var->symtree->n.sym;
2473 /* allocate space for this_forall. */
2474 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2476 /* Create a temporary variable for the FORALL index. */
2477 tmp = gfc_typenode_for_spec (&sym->ts);
2478 var[n] = gfc_create_var (tmp, sym->name);
2479 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2481 /* Record it in this_forall. */
2482 this_forall->var = var[n];
2484 /* Replace the index symbol's backend_decl with the temporary decl. */
2485 sym->backend_decl = var[n];
2487 /* Work out the start, end and stride for the loop. */
2488 gfc_init_se (&se, NULL);
2489 gfc_conv_expr_val (&se, fa->start);
2490 /* Record it in this_forall. */
2491 this_forall->start = se.expr;
2492 gfc_add_block_to_block (&block, &se.pre);
2493 start[n] = se.expr;
2495 gfc_init_se (&se, NULL);
2496 gfc_conv_expr_val (&se, fa->end);
2497 /* Record it in this_forall. */
2498 this_forall->end = se.expr;
2499 gfc_make_safe_expr (&se);
2500 gfc_add_block_to_block (&block, &se.pre);
2501 end[n] = se.expr;
2503 gfc_init_se (&se, NULL);
2504 gfc_conv_expr_val (&se, fa->stride);
2505 /* Record it in this_forall. */
2506 this_forall->step = se.expr;
2507 gfc_make_safe_expr (&se);
2508 gfc_add_block_to_block (&block, &se.pre);
2509 step[n] = se.expr;
2511 /* Set the NEXT field of this_forall to NULL. */
2512 this_forall->next = NULL;
2513 /* Link this_forall to the info construct. */
2514 if (info->this_loop == NULL)
2515 info->this_loop = this_forall;
2516 else
2518 iter_tmp = info->this_loop;
2519 while (iter_tmp->next != NULL)
2520 iter_tmp = iter_tmp->next;
2521 iter_tmp->next = this_forall;
2524 n++;
2526 nvar = n;
2528 /* Work out the number of elements in the mask array. */
2529 tmpvar = NULL_TREE;
2530 lenvar = NULL_TREE;
2531 size = gfc_index_one_node;
2532 sizevar = NULL_TREE;
2534 for (n = 0; n < nvar; n++)
2536 if (lenvar && TREE_TYPE (lenvar) != TREE_TYPE (start[n]))
2537 lenvar = NULL_TREE;
2539 /* size = (end + step - start) / step. */
2540 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2541 step[n], start[n]);
2542 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2544 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2545 tmp = convert (gfc_array_index_type, tmp);
2547 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2550 /* Record the nvar and size of current forall level. */
2551 info->nvar = nvar;
2552 info->size = size;
2554 /* Link the current forall level to nested_forall_info. */
2555 forall_tmp = nested_forall_info;
2556 if (forall_tmp == NULL)
2557 nested_forall_info = info;
2558 else
2560 while (forall_tmp->next_nest != NULL)
2561 forall_tmp = forall_tmp->next_nest;
2562 info->outer = forall_tmp;
2563 forall_tmp->next_nest = info;
2566 /* Copy the mask into a temporary variable if required.
2567 For now we assume a mask temporary is needed. */
2568 if (code->expr)
2570 /* As the mask array can be very big, prefer compact
2571 boolean types. */
2572 tree smallest_boolean_type_node
2573 = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2575 /* Allocate the mask temporary. */
2576 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
2577 TYPE_SIZE_UNIT (smallest_boolean_type_node));
2579 mask = gfc_do_allocate (bytesize, size, &pmask, &block,
2580 smallest_boolean_type_node);
2582 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2583 /* Record them in the info structure. */
2584 info->pmask = pmask;
2585 info->mask = mask;
2586 info->maskindex = maskindex;
2588 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2590 /* Start of mask assignment loop body. */
2591 gfc_start_block (&body);
2593 /* Evaluate the mask expression. */
2594 gfc_init_se (&se, NULL);
2595 gfc_conv_expr_val (&se, code->expr);
2596 gfc_add_block_to_block (&body, &se.pre);
2598 /* Store the mask. */
2599 se.expr = convert (smallest_boolean_type_node, se.expr);
2601 if (pmask)
2602 tmp = build_fold_indirect_ref (mask);
2603 else
2604 tmp = mask;
2605 tmp = gfc_build_array_ref (tmp, maskindex);
2606 gfc_add_modify_expr (&body, tmp, se.expr);
2608 /* Advance to the next mask element. */
2609 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2610 maskindex, gfc_index_one_node);
2611 gfc_add_modify_expr (&body, maskindex, tmp);
2613 /* Generate the loops. */
2614 tmp = gfc_finish_block (&body);
2615 tmp = gfc_trans_nested_forall_loop (info, tmp, 0, 0);
2616 gfc_add_expr_to_block (&block, tmp);
2618 else
2620 /* No mask was specified. */
2621 maskindex = NULL_TREE;
2622 mask = pmask = NULL_TREE;
2625 c = code->block->next;
2627 /* TODO: loop merging in FORALL statements. */
2628 /* Now that we've got a copy of the mask, generate the assignment loops. */
2629 while (c)
2631 switch (c->op)
2633 case EXEC_ASSIGN:
2634 /* A scalar or array assignment. */
2635 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2636 /* Temporaries due to array assignment data dependencies introduce
2637 no end of problems. */
2638 if (need_temp)
2639 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2640 nested_forall_info, &block);
2641 else
2643 /* Use the normal assignment copying routines. */
2644 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2646 /* Generate body and loops. */
2647 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2648 gfc_add_expr_to_block (&block, tmp);
2651 break;
2653 case EXEC_WHERE:
2654 /* Translate WHERE or WHERE construct nested in FORALL. */
2655 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2656 break;
2658 /* Pointer assignment inside FORALL. */
2659 case EXEC_POINTER_ASSIGN:
2660 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2661 if (need_temp)
2662 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2663 nested_forall_info, &block);
2664 else
2666 /* Use the normal assignment copying routines. */
2667 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2669 /* Generate body and loops. */
2670 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign,
2671 1, 1);
2672 gfc_add_expr_to_block (&block, tmp);
2674 break;
2676 case EXEC_FORALL:
2677 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2678 gfc_add_expr_to_block (&block, tmp);
2679 break;
2681 /* Explicit subroutine calls are prevented by the frontend but interface
2682 assignments can legitimately produce them. */
2683 case EXEC_ASSIGN_CALL:
2684 assign = gfc_trans_call (c, true);
2685 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1, 1);
2686 gfc_add_expr_to_block (&block, tmp);
2687 break;
2689 default:
2690 gcc_unreachable ();
2693 c = c->next;
2696 /* Restore the original index variables. */
2697 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2698 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2700 /* Free the space for var, start, end, step, varexpr. */
2701 gfc_free (var);
2702 gfc_free (start);
2703 gfc_free (end);
2704 gfc_free (step);
2705 gfc_free (varexpr);
2706 gfc_free (saved_vars);
2708 if (pmask)
2710 /* Free the temporary for the mask. */
2711 tmp = gfc_chainon_list (NULL_TREE, pmask);
2712 tmp = build_function_call_expr (gfor_fndecl_internal_free, tmp);
2713 gfc_add_expr_to_block (&block, tmp);
2715 if (maskindex)
2716 pushdecl (maskindex);
2718 return gfc_finish_block (&block);
2722 /* Translate the FORALL statement or construct. */
2724 tree gfc_trans_forall (gfc_code * code)
2726 return gfc_trans_forall_1 (code, NULL);
2730 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2731 If the WHERE construct is nested in FORALL, compute the overall temporary
2732 needed by the WHERE mask expression multiplied by the iterator number of
2733 the nested forall.
2734 ME is the WHERE mask expression.
2735 MASK is the current execution mask upon input, whose sense may or may
2736 not be inverted as specified by the INVERT argument.
2737 CMASK is the updated execution mask on output, or NULL if not required.
2738 PMASK is the pending execution mask on output, or NULL if not required.
2739 BLOCK is the block in which to place the condition evaluation loops. */
2741 static void
2742 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2743 tree mask, bool invert, tree cmask, tree pmask,
2744 tree mask_type, stmtblock_t * block)
2746 tree tmp, tmp1;
2747 gfc_ss *lss, *rss;
2748 gfc_loopinfo loop;
2749 stmtblock_t body, body1;
2750 tree count, cond, mtmp;
2751 gfc_se lse, rse;
2753 gfc_init_loopinfo (&loop);
2755 lss = gfc_walk_expr (me);
2756 rss = gfc_walk_expr (me);
2758 /* Variable to index the temporary. */
2759 count = gfc_create_var (gfc_array_index_type, "count");
2760 /* Initialize count. */
2761 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2763 gfc_start_block (&body);
2765 gfc_init_se (&rse, NULL);
2766 gfc_init_se (&lse, NULL);
2768 if (lss == gfc_ss_terminator)
2770 gfc_init_block (&body1);
2772 else
2774 /* Initialize the loop. */
2775 gfc_init_loopinfo (&loop);
2777 /* We may need LSS to determine the shape of the expression. */
2778 gfc_add_ss_to_loop (&loop, lss);
2779 gfc_add_ss_to_loop (&loop, rss);
2781 gfc_conv_ss_startstride (&loop);
2782 gfc_conv_loop_setup (&loop);
2784 gfc_mark_ss_chain_used (rss, 1);
2785 /* Start the loop body. */
2786 gfc_start_scalarized_body (&loop, &body1);
2788 /* Translate the expression. */
2789 gfc_copy_loopinfo_to_se (&rse, &loop);
2790 rse.ss = rss;
2791 gfc_conv_expr (&rse, me);
2794 /* Variable to evaluate mask condition. */
2795 cond = gfc_create_var (mask_type, "cond");
2796 if (mask && (cmask || pmask))
2797 mtmp = gfc_create_var (mask_type, "mask");
2798 else mtmp = NULL_TREE;
2800 gfc_add_block_to_block (&body1, &lse.pre);
2801 gfc_add_block_to_block (&body1, &rse.pre);
2803 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2805 if (mask && (cmask || pmask))
2807 tmp = gfc_build_array_ref (mask, count);
2808 if (invert)
2809 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2810 gfc_add_modify_expr (&body1, mtmp, tmp);
2813 if (cmask)
2815 tmp1 = gfc_build_array_ref (cmask, count);
2816 tmp = cond;
2817 if (mask)
2818 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2819 gfc_add_modify_expr (&body1, tmp1, tmp);
2822 if (pmask)
2824 tmp1 = gfc_build_array_ref (pmask, count);
2825 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2826 if (mask)
2827 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2828 gfc_add_modify_expr (&body1, tmp1, tmp);
2831 gfc_add_block_to_block (&body1, &lse.post);
2832 gfc_add_block_to_block (&body1, &rse.post);
2834 if (lss == gfc_ss_terminator)
2836 gfc_add_block_to_block (&body, &body1);
2838 else
2840 /* Increment count. */
2841 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2842 gfc_index_one_node);
2843 gfc_add_modify_expr (&body1, count, tmp1);
2845 /* Generate the copying loops. */
2846 gfc_trans_scalarizing_loops (&loop, &body1);
2848 gfc_add_block_to_block (&body, &loop.pre);
2849 gfc_add_block_to_block (&body, &loop.post);
2851 gfc_cleanup_loop (&loop);
2852 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2853 as tree nodes in SS may not be valid in different scope. */
2856 tmp1 = gfc_finish_block (&body);
2857 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2858 if (nested_forall_info != NULL)
2859 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1, 1);
2861 gfc_add_expr_to_block (block, tmp1);
2865 /* Translate an assignment statement in a WHERE statement or construct
2866 statement. The MASK expression is used to control which elements
2867 of EXPR1 shall be assigned. The sense of MASK is specified by
2868 INVERT. */
2870 static tree
2871 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2872 tree mask, bool invert,
2873 tree count1, tree count2)
2875 gfc_se lse;
2876 gfc_se rse;
2877 gfc_ss *lss;
2878 gfc_ss *lss_section;
2879 gfc_ss *rss;
2881 gfc_loopinfo loop;
2882 tree tmp;
2883 stmtblock_t block;
2884 stmtblock_t body;
2885 tree index, maskexpr;
2887 #if 0
2888 /* TODO: handle this special case.
2889 Special case a single function returning an array. */
2890 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2892 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2893 if (tmp)
2894 return tmp;
2896 #endif
2898 /* Assignment of the form lhs = rhs. */
2899 gfc_start_block (&block);
2901 gfc_init_se (&lse, NULL);
2902 gfc_init_se (&rse, NULL);
2904 /* Walk the lhs. */
2905 lss = gfc_walk_expr (expr1);
2906 rss = NULL;
2908 /* In each where-assign-stmt, the mask-expr and the variable being
2909 defined shall be arrays of the same shape. */
2910 gcc_assert (lss != gfc_ss_terminator);
2912 /* The assignment needs scalarization. */
2913 lss_section = lss;
2915 /* Find a non-scalar SS from the lhs. */
2916 while (lss_section != gfc_ss_terminator
2917 && lss_section->type != GFC_SS_SECTION)
2918 lss_section = lss_section->next;
2920 gcc_assert (lss_section != gfc_ss_terminator);
2922 /* Initialize the scalarizer. */
2923 gfc_init_loopinfo (&loop);
2925 /* Walk the rhs. */
2926 rss = gfc_walk_expr (expr2);
2927 if (rss == gfc_ss_terminator)
2929 /* The rhs is scalar. Add a ss for the expression. */
2930 rss = gfc_get_ss ();
2931 rss->next = gfc_ss_terminator;
2932 rss->type = GFC_SS_SCALAR;
2933 rss->expr = expr2;
2936 /* Associate the SS with the loop. */
2937 gfc_add_ss_to_loop (&loop, lss);
2938 gfc_add_ss_to_loop (&loop, rss);
2940 /* Calculate the bounds of the scalarization. */
2941 gfc_conv_ss_startstride (&loop);
2943 /* Resolve any data dependencies in the statement. */
2944 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2946 /* Setup the scalarizing loops. */
2947 gfc_conv_loop_setup (&loop);
2949 /* Setup the gfc_se structures. */
2950 gfc_copy_loopinfo_to_se (&lse, &loop);
2951 gfc_copy_loopinfo_to_se (&rse, &loop);
2953 rse.ss = rss;
2954 gfc_mark_ss_chain_used (rss, 1);
2955 if (loop.temp_ss == NULL)
2957 lse.ss = lss;
2958 gfc_mark_ss_chain_used (lss, 1);
2960 else
2962 lse.ss = loop.temp_ss;
2963 gfc_mark_ss_chain_used (lss, 3);
2964 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2967 /* Start the scalarized loop body. */
2968 gfc_start_scalarized_body (&loop, &body);
2970 /* Translate the expression. */
2971 gfc_conv_expr (&rse, expr2);
2972 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2974 gfc_conv_tmp_array_ref (&lse);
2975 gfc_advance_se_ss_chain (&lse);
2977 else
2978 gfc_conv_expr (&lse, expr1);
2980 /* Form the mask expression according to the mask. */
2981 index = count1;
2982 maskexpr = gfc_build_array_ref (mask, index);
2983 if (invert)
2984 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2986 /* Use the scalar assignment as is. */
2987 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2988 loop.temp_ss != NULL, false);
2989 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2991 gfc_add_expr_to_block (&body, tmp);
2993 if (lss == gfc_ss_terminator)
2995 /* Increment count1. */
2996 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2997 count1, gfc_index_one_node);
2998 gfc_add_modify_expr (&body, count1, tmp);
3000 /* Use the scalar assignment as is. */
3001 gfc_add_block_to_block (&block, &body);
3003 else
3005 gcc_assert (lse.ss == gfc_ss_terminator
3006 && rse.ss == gfc_ss_terminator);
3008 if (loop.temp_ss != NULL)
3010 /* Increment count1 before finish the main body of a scalarized
3011 expression. */
3012 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3013 count1, gfc_index_one_node);
3014 gfc_add_modify_expr (&body, count1, tmp);
3015 gfc_trans_scalarized_loop_boundary (&loop, &body);
3017 /* We need to copy the temporary to the actual lhs. */
3018 gfc_init_se (&lse, NULL);
3019 gfc_init_se (&rse, NULL);
3020 gfc_copy_loopinfo_to_se (&lse, &loop);
3021 gfc_copy_loopinfo_to_se (&rse, &loop);
3023 rse.ss = loop.temp_ss;
3024 lse.ss = lss;
3026 gfc_conv_tmp_array_ref (&rse);
3027 gfc_advance_se_ss_chain (&rse);
3028 gfc_conv_expr (&lse, expr1);
3030 gcc_assert (lse.ss == gfc_ss_terminator
3031 && rse.ss == gfc_ss_terminator);
3033 /* Form the mask expression according to the mask tree list. */
3034 index = count2;
3035 maskexpr = gfc_build_array_ref (mask, index);
3036 if (invert)
3037 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3038 maskexpr);
3040 /* Use the scalar assignment as is. */
3041 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3042 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3043 gfc_add_expr_to_block (&body, tmp);
3045 /* Increment count2. */
3046 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3047 count2, gfc_index_one_node);
3048 gfc_add_modify_expr (&body, count2, tmp);
3050 else
3052 /* Increment count1. */
3053 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3054 count1, gfc_index_one_node);
3055 gfc_add_modify_expr (&body, count1, tmp);
3058 /* Generate the copying loops. */
3059 gfc_trans_scalarizing_loops (&loop, &body);
3061 /* Wrap the whole thing up. */
3062 gfc_add_block_to_block (&block, &loop.pre);
3063 gfc_add_block_to_block (&block, &loop.post);
3064 gfc_cleanup_loop (&loop);
3067 return gfc_finish_block (&block);
3071 /* Translate the WHERE construct or statement.
3072 This function can be called iteratively to translate the nested WHERE
3073 construct or statement.
3074 MASK is the control mask. */
3076 static void
3077 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3078 forall_info * nested_forall_info, stmtblock_t * block)
3080 stmtblock_t inner_size_body;
3081 tree inner_size, size;
3082 gfc_ss *lss, *rss;
3083 tree mask_type;
3084 gfc_expr *expr1;
3085 gfc_expr *expr2;
3086 gfc_code *cblock;
3087 gfc_code *cnext;
3088 tree tmp;
3089 tree count1, count2;
3090 bool need_cmask;
3091 bool need_pmask;
3092 int need_temp;
3093 tree pcmask = NULL_TREE;
3094 tree ppmask = NULL_TREE;
3095 tree cmask = NULL_TREE;
3096 tree pmask = NULL_TREE;
3098 /* the WHERE statement or the WHERE construct statement. */
3099 cblock = code->block;
3101 /* As the mask array can be very big, prefer compact boolean types. */
3102 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3104 /* Determine which temporary masks are needed. */
3105 if (!cblock->block)
3107 /* One clause: No ELSEWHEREs. */
3108 need_cmask = (cblock->next != 0);
3109 need_pmask = false;
3111 else if (cblock->block->block)
3113 /* Three or more clauses: Conditional ELSEWHEREs. */
3114 need_cmask = true;
3115 need_pmask = true;
3117 else if (cblock->next)
3119 /* Two clauses, the first non-empty. */
3120 need_cmask = true;
3121 need_pmask = (mask != NULL_TREE
3122 && cblock->block->next != 0);
3124 else if (!cblock->block->next)
3126 /* Two clauses, both empty. */
3127 need_cmask = false;
3128 need_pmask = false;
3130 /* Two clauses, the first empty, the second non-empty. */
3131 else if (mask)
3133 need_cmask = (cblock->block->expr != 0);
3134 need_pmask = true;
3136 else
3138 need_cmask = true;
3139 need_pmask = false;
3142 if (need_cmask || need_pmask)
3144 /* Calculate the size of temporary needed by the mask-expr. */
3145 gfc_init_block (&inner_size_body);
3146 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3147 &inner_size_body, &lss, &rss);
3149 /* Calculate the total size of temporary needed. */
3150 size = compute_overall_iter_number (nested_forall_info, inner_size,
3151 &inner_size_body, block);
3153 /* Allocate temporary for WHERE mask if needed. */
3154 if (need_cmask)
3155 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3156 &pcmask);
3158 /* Allocate temporary for !mask if needed. */
3159 if (need_pmask)
3160 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3161 &ppmask);
3164 while (cblock)
3166 /* Each time around this loop, the where clause is conditional
3167 on the value of mask and invert, which are updated at the
3168 bottom of the loop. */
3170 /* Has mask-expr. */
3171 if (cblock->expr)
3173 /* Ensure that the WHERE mask will be evaluated exactly once.
3174 If there are no statements in this WHERE/ELSEWHERE clause,
3175 then we don't need to update the control mask (cmask).
3176 If this is the last clause of the WHERE construct, then
3177 we don't need to update the pending control mask (pmask). */
3178 if (mask)
3179 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3180 mask, invert,
3181 cblock->next ? cmask : NULL_TREE,
3182 cblock->block ? pmask : NULL_TREE,
3183 mask_type, block);
3184 else
3185 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3186 NULL_TREE, false,
3187 (cblock->next || cblock->block)
3188 ? cmask : NULL_TREE,
3189 NULL_TREE, mask_type, block);
3191 invert = false;
3193 /* It's a final elsewhere-stmt. No mask-expr is present. */
3194 else
3195 cmask = mask;
3197 /* The body of this where clause are controlled by cmask with
3198 sense specified by invert. */
3200 /* Get the assignment statement of a WHERE statement, or the first
3201 statement in where-body-construct of a WHERE construct. */
3202 cnext = cblock->next;
3203 while (cnext)
3205 switch (cnext->op)
3207 /* WHERE assignment statement. */
3208 case EXEC_ASSIGN:
3209 expr1 = cnext->expr;
3210 expr2 = cnext->expr2;
3211 if (nested_forall_info != NULL)
3213 need_temp = gfc_check_dependency (expr1, expr2, 0);
3214 if (need_temp)
3215 gfc_trans_assign_need_temp (expr1, expr2,
3216 cmask, invert,
3217 nested_forall_info, block);
3218 else
3220 /* Variables to control maskexpr. */
3221 count1 = gfc_create_var (gfc_array_index_type, "count1");
3222 count2 = gfc_create_var (gfc_array_index_type, "count2");
3223 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3224 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3226 tmp = gfc_trans_where_assign (expr1, expr2,
3227 cmask, invert,
3228 count1, count2);
3230 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3231 tmp, 1, 1);
3232 gfc_add_expr_to_block (block, tmp);
3235 else
3237 /* Variables to control maskexpr. */
3238 count1 = gfc_create_var (gfc_array_index_type, "count1");
3239 count2 = gfc_create_var (gfc_array_index_type, "count2");
3240 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3241 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3243 tmp = gfc_trans_where_assign (expr1, expr2,
3244 cmask, invert,
3245 count1, count2);
3246 gfc_add_expr_to_block (block, tmp);
3249 break;
3251 /* WHERE or WHERE construct is part of a where-body-construct. */
3252 case EXEC_WHERE:
3253 gfc_trans_where_2 (cnext, cmask, invert,
3254 nested_forall_info, block);
3255 break;
3257 default:
3258 gcc_unreachable ();
3261 /* The next statement within the same where-body-construct. */
3262 cnext = cnext->next;
3264 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3265 cblock = cblock->block;
3266 if (mask == NULL_TREE)
3268 /* If we're the initial WHERE, we can simply invert the sense
3269 of the current mask to obtain the "mask" for the remaining
3270 ELSEWHEREs. */
3271 invert = true;
3272 mask = cmask;
3274 else
3276 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3277 invert = false;
3278 mask = pmask;
3282 /* If we allocated a pending mask array, deallocate it now. */
3283 if (ppmask)
3285 tree args = gfc_chainon_list (NULL_TREE, ppmask);
3286 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3287 gfc_add_expr_to_block (block, tmp);
3290 /* If we allocated a current mask array, deallocate it now. */
3291 if (pcmask)
3293 tree args = gfc_chainon_list (NULL_TREE, pcmask);
3294 tmp = build_function_call_expr (gfor_fndecl_internal_free, args);
3295 gfc_add_expr_to_block (block, tmp);
3299 /* Translate a simple WHERE construct or statement without dependencies.
3300 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3301 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3302 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3304 static tree
3305 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3307 stmtblock_t block, body;
3308 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3309 tree tmp, cexpr, tstmt, estmt;
3310 gfc_ss *css, *tdss, *tsss;
3311 gfc_se cse, tdse, tsse, edse, esse;
3312 gfc_loopinfo loop;
3313 gfc_ss *edss = 0;
3314 gfc_ss *esss = 0;
3316 cond = cblock->expr;
3317 tdst = cblock->next->expr;
3318 tsrc = cblock->next->expr2;
3319 edst = eblock ? eblock->next->expr : NULL;
3320 esrc = eblock ? eblock->next->expr2 : NULL;
3322 gfc_start_block (&block);
3323 gfc_init_loopinfo (&loop);
3325 /* Handle the condition. */
3326 gfc_init_se (&cse, NULL);
3327 css = gfc_walk_expr (cond);
3328 gfc_add_ss_to_loop (&loop, css);
3330 /* Handle the then-clause. */
3331 gfc_init_se (&tdse, NULL);
3332 gfc_init_se (&tsse, NULL);
3333 tdss = gfc_walk_expr (tdst);
3334 tsss = gfc_walk_expr (tsrc);
3335 if (tsss == gfc_ss_terminator)
3337 tsss = gfc_get_ss ();
3338 tsss->next = gfc_ss_terminator;
3339 tsss->type = GFC_SS_SCALAR;
3340 tsss->expr = tsrc;
3342 gfc_add_ss_to_loop (&loop, tdss);
3343 gfc_add_ss_to_loop (&loop, tsss);
3345 if (eblock)
3347 /* Handle the else clause. */
3348 gfc_init_se (&edse, NULL);
3349 gfc_init_se (&esse, NULL);
3350 edss = gfc_walk_expr (edst);
3351 esss = gfc_walk_expr (esrc);
3352 if (esss == gfc_ss_terminator)
3354 esss = gfc_get_ss ();
3355 esss->next = gfc_ss_terminator;
3356 esss->type = GFC_SS_SCALAR;
3357 esss->expr = esrc;
3359 gfc_add_ss_to_loop (&loop, edss);
3360 gfc_add_ss_to_loop (&loop, esss);
3363 gfc_conv_ss_startstride (&loop);
3364 gfc_conv_loop_setup (&loop);
3366 gfc_mark_ss_chain_used (css, 1);
3367 gfc_mark_ss_chain_used (tdss, 1);
3368 gfc_mark_ss_chain_used (tsss, 1);
3369 if (eblock)
3371 gfc_mark_ss_chain_used (edss, 1);
3372 gfc_mark_ss_chain_used (esss, 1);
3375 gfc_start_scalarized_body (&loop, &body);
3377 gfc_copy_loopinfo_to_se (&cse, &loop);
3378 gfc_copy_loopinfo_to_se (&tdse, &loop);
3379 gfc_copy_loopinfo_to_se (&tsse, &loop);
3380 cse.ss = css;
3381 tdse.ss = tdss;
3382 tsse.ss = tsss;
3383 if (eblock)
3385 gfc_copy_loopinfo_to_se (&edse, &loop);
3386 gfc_copy_loopinfo_to_se (&esse, &loop);
3387 edse.ss = edss;
3388 esse.ss = esss;
3391 gfc_conv_expr (&cse, cond);
3392 gfc_add_block_to_block (&body, &cse.pre);
3393 cexpr = cse.expr;
3395 gfc_conv_expr (&tsse, tsrc);
3396 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3398 gfc_conv_tmp_array_ref (&tdse);
3399 gfc_advance_se_ss_chain (&tdse);
3401 else
3402 gfc_conv_expr (&tdse, tdst);
3404 if (eblock)
3406 gfc_conv_expr (&esse, esrc);
3407 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3409 gfc_conv_tmp_array_ref (&edse);
3410 gfc_advance_se_ss_chain (&edse);
3412 else
3413 gfc_conv_expr (&edse, edst);
3416 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3417 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3418 : build_empty_stmt ();
3419 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3420 gfc_add_expr_to_block (&body, tmp);
3421 gfc_add_block_to_block (&body, &cse.post);
3423 gfc_trans_scalarizing_loops (&loop, &body);
3424 gfc_add_block_to_block (&block, &loop.pre);
3425 gfc_add_block_to_block (&block, &loop.post);
3426 gfc_cleanup_loop (&loop);
3428 return gfc_finish_block (&block);
3431 /* As the WHERE or WHERE construct statement can be nested, we call
3432 gfc_trans_where_2 to do the translation, and pass the initial
3433 NULL values for both the control mask and the pending control mask. */
3435 tree
3436 gfc_trans_where (gfc_code * code)
3438 stmtblock_t block;
3439 gfc_code *cblock;
3440 gfc_code *eblock;
3442 cblock = code->block;
3443 if (cblock->next
3444 && cblock->next->op == EXEC_ASSIGN
3445 && !cblock->next->next)
3447 eblock = cblock->block;
3448 if (!eblock)
3450 /* A simple "WHERE (cond) x = y" statement or block is
3451 dependence free if cond is not dependent upon writing x,
3452 and the source y is unaffected by the destination x. */
3453 if (!gfc_check_dependency (cblock->next->expr,
3454 cblock->expr, 0)
3455 && !gfc_check_dependency (cblock->next->expr,
3456 cblock->next->expr2, 0))
3457 return gfc_trans_where_3 (cblock, NULL);
3459 else if (!eblock->expr
3460 && !eblock->block
3461 && eblock->next
3462 && eblock->next->op == EXEC_ASSIGN
3463 && !eblock->next->next)
3465 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3466 block is dependence free if cond is not dependent on writes
3467 to x1 and x2, y1 is not dependent on writes to x2, and y2
3468 is not dependent on writes to x1, and both y's are not
3469 dependent upon their own x's. */
3470 if (!gfc_check_dependency(cblock->next->expr,
3471 cblock->expr, 0)
3472 && !gfc_check_dependency(eblock->next->expr,
3473 cblock->expr, 0)
3474 && !gfc_check_dependency(cblock->next->expr,
3475 eblock->next->expr2, 0)
3476 && !gfc_check_dependency(eblock->next->expr,
3477 cblock->next->expr2, 0)
3478 && !gfc_check_dependency(cblock->next->expr,
3479 cblock->next->expr2, 0)
3480 && !gfc_check_dependency(eblock->next->expr,
3481 eblock->next->expr2, 0))
3482 return gfc_trans_where_3 (cblock, eblock);
3486 gfc_start_block (&block);
3488 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3490 return gfc_finish_block (&block);
3494 /* CYCLE a DO loop. The label decl has already been created by
3495 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3496 node at the head of the loop. We must mark the label as used. */
3498 tree
3499 gfc_trans_cycle (gfc_code * code)
3501 tree cycle_label;
3503 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3504 TREE_USED (cycle_label) = 1;
3505 return build1_v (GOTO_EXPR, cycle_label);
3509 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3510 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3511 loop. */
3513 tree
3514 gfc_trans_exit (gfc_code * code)
3516 tree exit_label;
3518 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3519 TREE_USED (exit_label) = 1;
3520 return build1_v (GOTO_EXPR, exit_label);
3524 /* Translate the ALLOCATE statement. */
3526 tree
3527 gfc_trans_allocate (gfc_code * code)
3529 gfc_alloc *al;
3530 gfc_expr *expr;
3531 gfc_se se;
3532 tree tmp;
3533 tree parm;
3534 tree stat;
3535 tree pstat;
3536 tree error_label;
3537 stmtblock_t block;
3539 if (!code->ext.alloc_list)
3540 return NULL_TREE;
3542 gfc_start_block (&block);
3544 if (code->expr)
3546 tree gfc_int4_type_node = gfc_get_int_type (4);
3548 stat = gfc_create_var (gfc_int4_type_node, "stat");
3549 pstat = build_fold_addr_expr (stat);
3551 error_label = gfc_build_label_decl (NULL_TREE);
3552 TREE_USED (error_label) = 1;
3554 else
3556 pstat = integer_zero_node;
3557 stat = error_label = NULL_TREE;
3561 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3563 expr = al->expr;
3565 gfc_init_se (&se, NULL);
3566 gfc_start_block (&se.pre);
3568 se.want_pointer = 1;
3569 se.descriptor_only = 1;
3570 gfc_conv_expr (&se, expr);
3572 if (!gfc_array_allocate (&se, expr, pstat))
3574 /* A scalar or derived type. */
3575 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3577 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3578 tmp = se.string_length;
3580 parm = gfc_chainon_list (NULL_TREE, tmp);
3581 parm = gfc_chainon_list (parm, pstat);
3582 tmp = build_function_call_expr (gfor_fndecl_allocate, parm);
3583 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr, tmp);
3584 gfc_add_expr_to_block (&se.pre, tmp);
3586 if (code->expr)
3588 tmp = build1_v (GOTO_EXPR, error_label);
3589 parm = fold_build2 (NE_EXPR, boolean_type_node,
3590 stat, build_int_cst (TREE_TYPE (stat), 0));
3591 tmp = fold_build3 (COND_EXPR, void_type_node,
3592 parm, tmp, build_empty_stmt ());
3593 gfc_add_expr_to_block (&se.pre, tmp);
3596 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3598 tmp = build_fold_indirect_ref (se.expr);
3599 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3600 gfc_add_expr_to_block (&se.pre, tmp);
3605 tmp = gfc_finish_block (&se.pre);
3606 gfc_add_expr_to_block (&block, tmp);
3609 /* Assign the value to the status variable. */
3610 if (code->expr)
3612 tmp = build1_v (LABEL_EXPR, error_label);
3613 gfc_add_expr_to_block (&block, tmp);
3615 gfc_init_se (&se, NULL);
3616 gfc_conv_expr_lhs (&se, code->expr);
3617 tmp = convert (TREE_TYPE (se.expr), stat);
3618 gfc_add_modify_expr (&block, se.expr, tmp);
3621 return gfc_finish_block (&block);
3625 /* Translate a DEALLOCATE statement.
3626 There are two cases within the for loop:
3627 (1) deallocate(a1, a2, a3) is translated into the following sequence
3628 _gfortran_deallocate(a1, 0B)
3629 _gfortran_deallocate(a2, 0B)
3630 _gfortran_deallocate(a3, 0B)
3631 where the STAT= variable is passed a NULL pointer.
3632 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3633 astat = 0
3634 _gfortran_deallocate(a1, &stat)
3635 astat = astat + stat
3636 _gfortran_deallocate(a2, &stat)
3637 astat = astat + stat
3638 _gfortran_deallocate(a3, &stat)
3639 astat = astat + stat
3640 In case (1), we simply return at the end of the for loop. In case (2)
3641 we set STAT= astat. */
3642 tree
3643 gfc_trans_deallocate (gfc_code * code)
3645 gfc_se se;
3646 gfc_alloc *al;
3647 gfc_expr *expr;
3648 tree apstat, astat, parm, pstat, stat, tmp;
3649 stmtblock_t block;
3651 gfc_start_block (&block);
3653 /* Set up the optional STAT= */
3654 if (code->expr)
3656 tree gfc_int4_type_node = gfc_get_int_type (4);
3658 /* Variable used with the library call. */
3659 stat = gfc_create_var (gfc_int4_type_node, "stat");
3660 pstat = build_fold_addr_expr (stat);
3662 /* Running total of possible deallocation failures. */
3663 astat = gfc_create_var (gfc_int4_type_node, "astat");
3664 apstat = build_fold_addr_expr (astat);
3666 /* Initialize astat to 0. */
3667 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3669 else
3671 pstat = apstat = null_pointer_node;
3672 stat = astat = NULL_TREE;
3675 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3677 expr = al->expr;
3678 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3680 gfc_init_se (&se, NULL);
3681 gfc_start_block (&se.pre);
3683 se.want_pointer = 1;
3684 se.descriptor_only = 1;
3685 gfc_conv_expr (&se, expr);
3687 if (expr->ts.type == BT_DERIVED
3688 && expr->ts.derived->attr.alloc_comp)
3690 gfc_ref *ref;
3691 gfc_ref *last = NULL;
3692 for (ref = expr->ref; ref; ref = ref->next)
3693 if (ref->type == REF_COMPONENT)
3694 last = ref;
3696 /* Do not deallocate the components of a derived type
3697 ultimate pointer component. */
3698 if (!(last && last->u.c.component->pointer)
3699 && !(!last && expr->symtree->n.sym->attr.pointer))
3701 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3702 expr->rank);
3703 gfc_add_expr_to_block (&se.pre, tmp);
3707 if (expr->rank)
3708 tmp = gfc_array_deallocate (se.expr, pstat);
3709 else
3711 parm = gfc_chainon_list (NULL_TREE, se.expr);
3712 parm = gfc_chainon_list (parm, pstat);
3713 tmp = build_function_call_expr (gfor_fndecl_deallocate, parm);
3714 gfc_add_expr_to_block (&se.pre, tmp);
3716 tmp = build2 (MODIFY_EXPR, void_type_node,
3717 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3720 gfc_add_expr_to_block (&se.pre, tmp);
3722 /* Keep track of the number of failed deallocations by adding stat
3723 of the last deallocation to the running total. */
3724 if (code->expr)
3726 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3727 gfc_add_modify_expr (&se.pre, astat, apstat);
3730 tmp = gfc_finish_block (&se.pre);
3731 gfc_add_expr_to_block (&block, tmp);
3735 /* Assign the value to the status variable. */
3736 if (code->expr)
3738 gfc_init_se (&se, NULL);
3739 gfc_conv_expr_lhs (&se, code->expr);
3740 tmp = convert (TREE_TYPE (se.expr), astat);
3741 gfc_add_modify_expr (&block, se.expr, tmp);
3744 return gfc_finish_block (&block);