Remove outermost loop parameter.
[official-gcc/graphite-test-results.git] / gcc / fortran / trans-stmt.c
blob37b577f2cc4472c77dc955bc52437609c366cb3d
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gfortran.h"
29 #include "flags.h"
30 #include "trans.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
35 #include "arith.h"
36 #include "dependency.h"
38 typedef struct iter_info
40 tree var;
41 tree start;
42 tree end;
43 tree step;
44 struct iter_info *next;
46 iter_info;
48 typedef struct forall_info
50 iter_info *this_loop;
51 tree mask;
52 tree maskindex;
53 int nvar;
54 tree size;
55 struct forall_info *prev_nest;
57 forall_info;
59 static void gfc_trans_where_2 (gfc_code *, tree, bool,
60 forall_info *, stmtblock_t *);
62 /* Translate a F95 label number to a LABEL_EXPR. */
64 tree
65 gfc_trans_label_here (gfc_code * code)
67 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
71 /* Given a variable expression which has been ASSIGNed to, find the decl
72 containing the auxiliary variables. For variables in common blocks this
73 is a field_decl. */
75 void
76 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
78 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
79 gfc_conv_expr (se, expr);
80 /* Deals with variable in common block. Get the field declaration. */
81 if (TREE_CODE (se->expr) == COMPONENT_REF)
82 se->expr = TREE_OPERAND (se->expr, 1);
83 /* Deals with dummy argument. Get the parameter declaration. */
84 else if (TREE_CODE (se->expr) == INDIRECT_REF)
85 se->expr = TREE_OPERAND (se->expr, 0);
88 /* Translate a label assignment statement. */
90 tree
91 gfc_trans_label_assign (gfc_code * code)
93 tree label_tree;
94 gfc_se se;
95 tree len;
96 tree addr;
97 tree len_tree;
98 int label_len;
100 /* Start a new block. */
101 gfc_init_se (&se, NULL);
102 gfc_start_block (&se.pre);
103 gfc_conv_label_variable (&se, code->expr1);
105 len = GFC_DECL_STRING_LEN (se.expr);
106 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
108 label_tree = gfc_get_label_decl (code->label1);
110 if (code->label1->defined == ST_LABEL_TARGET)
112 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
113 len_tree = integer_minus_one_node;
115 else
117 gfc_expr *format = code->label1->format;
119 label_len = format->value.character.length;
120 len_tree = build_int_cst (NULL_TREE, label_len);
121 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
122 format->value.character.string);
123 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
126 gfc_add_modify (&se.pre, len, len_tree);
127 gfc_add_modify (&se.pre, addr, label_tree);
129 return gfc_finish_block (&se.pre);
132 /* Translate a GOTO statement. */
134 tree
135 gfc_trans_goto (gfc_code * code)
137 locus loc = code->loc;
138 tree assigned_goto;
139 tree target;
140 tree tmp;
141 gfc_se se;
143 if (code->label1 != NULL)
144 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
146 /* ASSIGNED GOTO. */
147 gfc_init_se (&se, NULL);
148 gfc_start_block (&se.pre);
149 gfc_conv_label_variable (&se, code->expr1);
150 tmp = GFC_DECL_STRING_LEN (se.expr);
151 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
152 build_int_cst (TREE_TYPE (tmp), -1));
153 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
154 "Assigned label is not a target label");
156 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
158 /* We're going to ignore a label list. It does not really change the
159 statement's semantics (because it is just a further restriction on
160 what's legal code); before, we were comparing label addresses here, but
161 that's a very fragile business and may break with optimization. So
162 just ignore it. */
164 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
165 gfc_add_expr_to_block (&se.pre, target);
166 return gfc_finish_block (&se.pre);
170 /* Translate an ENTRY statement. Just adds a label for this entry point. */
171 tree
172 gfc_trans_entry (gfc_code * code)
174 return build1_v (LABEL_EXPR, code->ext.entry->label);
178 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
179 elemental subroutines. Make temporaries for output arguments if any such
180 dependencies are found. Output arguments are chosen because internal_unpack
181 can be used, as is, to copy the result back to the variable. */
182 static void
183 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
184 gfc_symbol * sym, gfc_actual_arglist * arg,
185 gfc_dep_check check_variable)
187 gfc_actual_arglist *arg0;
188 gfc_expr *e;
189 gfc_formal_arglist *formal;
190 gfc_loopinfo tmp_loop;
191 gfc_se parmse;
192 gfc_ss *ss;
193 gfc_ss_info *info;
194 gfc_symbol *fsym;
195 gfc_ref *ref;
196 int n;
197 tree data;
198 tree offset;
199 tree size;
200 tree tmp;
202 if (loopse->ss == NULL)
203 return;
205 ss = loopse->ss;
206 arg0 = arg;
207 formal = sym->formal;
209 /* Loop over all the arguments testing for dependencies. */
210 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
212 e = arg->expr;
213 if (e == NULL)
214 continue;
216 /* Obtain the info structure for the current argument. */
217 info = NULL;
218 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
220 if (ss->expr != e)
221 continue;
222 info = &ss->data.info;
223 break;
226 /* If there is a dependency, create a temporary and use it
227 instead of the variable. */
228 fsym = formal ? formal->sym : NULL;
229 if (e->expr_type == EXPR_VARIABLE
230 && e->rank && fsym
231 && fsym->attr.intent != INTENT_IN
232 && gfc_check_fncall_dependency (e, fsym->attr.intent,
233 sym, arg0, check_variable))
235 tree initial, temptype;
236 stmtblock_t temp_post;
238 /* Make a local loopinfo for the temporary creation, so that
239 none of the other ss->info's have to be renormalized. */
240 gfc_init_loopinfo (&tmp_loop);
241 for (n = 0; n < info->dimen; n++)
243 tmp_loop.to[n] = loopse->loop->to[n];
244 tmp_loop.from[n] = loopse->loop->from[n];
245 tmp_loop.order[n] = loopse->loop->order[n];
248 /* Obtain the argument descriptor for unpacking. */
249 gfc_init_se (&parmse, NULL);
250 parmse.want_pointer = 1;
252 /* The scalarizer introduces some specific peculiarities when
253 handling elemental subroutines; the stride can be needed up to
254 the dim_array - 1, rather than dim_loop - 1 to calculate
255 offsets outside the loop. For this reason, we make sure that
256 the descriptor has the dimensionality of the array by converting
257 trailing elements into ranges with end = start. */
258 for (ref = e->ref; ref; ref = ref->next)
259 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
260 break;
262 if (ref)
264 bool seen_range = false;
265 for (n = 0; n < ref->u.ar.dimen; n++)
267 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
268 seen_range = true;
270 if (!seen_range
271 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
272 continue;
274 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
275 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
279 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
280 gfc_add_block_to_block (&se->pre, &parmse.pre);
282 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
283 initialize the array temporary with a copy of the values. */
284 if (fsym->attr.intent == INTENT_INOUT
285 || (fsym->ts.type ==BT_DERIVED
286 && fsym->attr.intent == INTENT_OUT))
287 initial = parmse.expr;
288 else
289 initial = NULL_TREE;
291 /* Find the type of the temporary to create; we don't use the type
292 of e itself as this breaks for subcomponent-references in e (where
293 the type of e is that of the final reference, but parmse.expr's
294 type corresponds to the full derived-type). */
295 /* TODO: Fix this somehow so we don't need a temporary of the whole
296 array but instead only the components referenced. */
297 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
298 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
299 temptype = TREE_TYPE (temptype);
300 temptype = gfc_get_element_type (temptype);
302 /* Generate the temporary. Cleaning up the temporary should be the
303 very last thing done, so we add the code to a new block and add it
304 to se->post as last instructions. */
305 size = gfc_create_var (gfc_array_index_type, NULL);
306 data = gfc_create_var (pvoid_type_node, NULL);
307 gfc_init_block (&temp_post);
308 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
309 &tmp_loop, info, temptype,
310 initial,
311 false, true, false,
312 &arg->expr->where);
313 gfc_add_modify (&se->pre, size, tmp);
314 tmp = fold_convert (pvoid_type_node, info->data);
315 gfc_add_modify (&se->pre, data, tmp);
317 /* Calculate the offset for the temporary. */
318 offset = gfc_index_zero_node;
319 for (n = 0; n < info->dimen; n++)
321 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
322 gfc_rank_cst[n]);
323 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
324 loopse->loop->from[n], tmp);
325 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
326 offset, tmp);
328 info->offset = gfc_create_var (gfc_array_index_type, NULL);
329 gfc_add_modify (&se->pre, info->offset, offset);
331 /* Copy the result back using unpack. */
332 tmp = build_call_expr_loc (input_location,
333 gfor_fndecl_in_unpack, 2, parmse.expr, data);
334 gfc_add_expr_to_block (&se->post, tmp);
336 /* parmse.pre is already added above. */
337 gfc_add_block_to_block (&se->post, &parmse.post);
338 gfc_add_block_to_block (&se->post, &temp_post);
344 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
346 tree
347 gfc_trans_call (gfc_code * code, bool dependency_check,
348 tree mask, tree count1, bool invert)
350 gfc_se se;
351 gfc_ss * ss;
352 int has_alternate_specifier;
353 gfc_dep_check check_variable;
354 tree index = NULL_TREE;
355 tree maskexpr = NULL_TREE;
356 tree tmp;
358 /* A CALL starts a new block because the actual arguments may have to
359 be evaluated first. */
360 gfc_init_se (&se, NULL);
361 gfc_start_block (&se.pre);
363 gcc_assert (code->resolved_sym);
365 ss = gfc_ss_terminator;
366 if (code->resolved_sym->attr.elemental)
367 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
369 /* Is not an elemental subroutine call with array valued arguments. */
370 if (ss == gfc_ss_terminator)
373 /* Translate the call. */
374 has_alternate_specifier
375 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
376 code->expr1, NULL_TREE);
378 /* A subroutine without side-effect, by definition, does nothing! */
379 TREE_SIDE_EFFECTS (se.expr) = 1;
381 /* Chain the pieces together and return the block. */
382 if (has_alternate_specifier)
384 gfc_code *select_code;
385 gfc_symbol *sym;
386 select_code = code->next;
387 gcc_assert(select_code->op == EXEC_SELECT);
388 sym = select_code->expr1->symtree->n.sym;
389 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
390 if (sym->backend_decl == NULL)
391 sym->backend_decl = gfc_get_symbol_decl (sym);
392 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
394 else
395 gfc_add_expr_to_block (&se.pre, se.expr);
397 gfc_add_block_to_block (&se.pre, &se.post);
400 else
402 /* An elemental subroutine call with array valued arguments has
403 to be scalarized. */
404 gfc_loopinfo loop;
405 stmtblock_t body;
406 stmtblock_t block;
407 gfc_se loopse;
408 gfc_se depse;
410 /* gfc_walk_elemental_function_args renders the ss chain in the
411 reverse order to the actual argument order. */
412 ss = gfc_reverse_ss (ss);
414 /* Initialize the loop. */
415 gfc_init_se (&loopse, NULL);
416 gfc_init_loopinfo (&loop);
417 gfc_add_ss_to_loop (&loop, ss);
419 gfc_conv_ss_startstride (&loop);
420 /* TODO: gfc_conv_loop_setup generates a temporary for vector
421 subscripts. This could be prevented in the elemental case
422 as temporaries are handled separatedly
423 (below in gfc_conv_elemental_dependencies). */
424 gfc_conv_loop_setup (&loop, &code->expr1->where);
425 gfc_mark_ss_chain_used (ss, 1);
427 /* Convert the arguments, checking for dependencies. */
428 gfc_copy_loopinfo_to_se (&loopse, &loop);
429 loopse.ss = ss;
431 /* For operator assignment, do dependency checking. */
432 if (dependency_check)
433 check_variable = ELEM_CHECK_VARIABLE;
434 else
435 check_variable = ELEM_DONT_CHECK_VARIABLE;
437 gfc_init_se (&depse, NULL);
438 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
439 code->ext.actual, check_variable);
441 gfc_add_block_to_block (&loop.pre, &depse.pre);
442 gfc_add_block_to_block (&loop.post, &depse.post);
444 /* Generate the loop body. */
445 gfc_start_scalarized_body (&loop, &body);
446 gfc_init_block (&block);
448 if (mask && count1)
450 /* Form the mask expression according to the mask. */
451 index = count1;
452 maskexpr = gfc_build_array_ref (mask, index, NULL);
453 if (invert)
454 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
455 maskexpr);
458 /* Add the subroutine call to the block. */
459 gfc_conv_procedure_call (&loopse, code->resolved_sym,
460 code->ext.actual, code->expr1,
461 NULL_TREE);
463 if (mask && count1)
465 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
466 build_empty_stmt (input_location));
467 gfc_add_expr_to_block (&loopse.pre, tmp);
468 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
469 count1, gfc_index_one_node);
470 gfc_add_modify (&loopse.pre, count1, tmp);
472 else
473 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
475 gfc_add_block_to_block (&block, &loopse.pre);
476 gfc_add_block_to_block (&block, &loopse.post);
478 /* Finish up the loop block and the loop. */
479 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
480 gfc_trans_scalarizing_loops (&loop, &body);
481 gfc_add_block_to_block (&se.pre, &loop.pre);
482 gfc_add_block_to_block (&se.pre, &loop.post);
483 gfc_add_block_to_block (&se.pre, &se.post);
484 gfc_cleanup_loop (&loop);
487 return gfc_finish_block (&se.pre);
491 /* Translate the RETURN statement. */
493 tree
494 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
496 if (code->expr1)
498 gfc_se se;
499 tree tmp;
500 tree result;
502 /* If code->expr is not NULL, this return statement must appear
503 in a subroutine and current_fake_result_decl has already
504 been generated. */
506 result = gfc_get_fake_result_decl (NULL, 0);
507 if (!result)
509 gfc_warning ("An alternate return at %L without a * dummy argument",
510 &code->expr1->where);
511 return build1_v (GOTO_EXPR, gfc_get_return_label ());
514 /* Start a new block for this statement. */
515 gfc_init_se (&se, NULL);
516 gfc_start_block (&se.pre);
518 gfc_conv_expr (&se, code->expr1);
520 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
521 fold_convert (TREE_TYPE (result), se.expr));
522 gfc_add_expr_to_block (&se.pre, tmp);
524 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
525 gfc_add_expr_to_block (&se.pre, tmp);
526 gfc_add_block_to_block (&se.pre, &se.post);
527 return gfc_finish_block (&se.pre);
529 else
530 return build1_v (GOTO_EXPR, gfc_get_return_label ());
534 /* Translate the PAUSE statement. We have to translate this statement
535 to a runtime library call. */
537 tree
538 gfc_trans_pause (gfc_code * code)
540 tree gfc_int4_type_node = gfc_get_int_type (4);
541 gfc_se se;
542 tree tmp;
544 /* Start a new block for this statement. */
545 gfc_init_se (&se, NULL);
546 gfc_start_block (&se.pre);
549 if (code->expr1 == NULL)
551 tmp = build_int_cst (gfc_int4_type_node, 0);
552 tmp = build_call_expr_loc (input_location,
553 gfor_fndecl_pause_string, 2,
554 build_int_cst (pchar_type_node, 0), tmp);
556 else if (code->expr1->ts.type == BT_INTEGER)
558 gfc_conv_expr (&se, code->expr1);
559 tmp = build_call_expr_loc (input_location,
560 gfor_fndecl_pause_numeric, 1,
561 fold_convert (gfc_int4_type_node, se.expr));
563 else
565 gfc_conv_expr_reference (&se, code->expr1);
566 tmp = build_call_expr_loc (input_location,
567 gfor_fndecl_pause_string, 2,
568 se.expr, se.string_length);
571 gfc_add_expr_to_block (&se.pre, tmp);
573 gfc_add_block_to_block (&se.pre, &se.post);
575 return gfc_finish_block (&se.pre);
579 /* Translate the STOP statement. We have to translate this statement
580 to a runtime library call. */
582 tree
583 gfc_trans_stop (gfc_code *code, bool error_stop)
585 tree gfc_int4_type_node = gfc_get_int_type (4);
586 gfc_se se;
587 tree tmp;
589 /* Start a new block for this statement. */
590 gfc_init_se (&se, NULL);
591 gfc_start_block (&se.pre);
593 if (code->expr1 == NULL)
595 tmp = build_int_cst (gfc_int4_type_node, 0);
596 tmp = build_call_expr_loc (input_location,
597 error_stop ? gfor_fndecl_error_stop_string
598 : gfor_fndecl_stop_string,
599 2, build_int_cst (pchar_type_node, 0), tmp);
601 else if (code->expr1->ts.type == BT_INTEGER)
603 gfc_conv_expr (&se, code->expr1);
604 tmp = build_call_expr_loc (input_location,
605 error_stop ? gfor_fndecl_error_stop_numeric
606 : gfor_fndecl_stop_numeric, 1,
607 fold_convert (gfc_int4_type_node, se.expr));
609 else
611 gfc_conv_expr_reference (&se, code->expr1);
612 tmp = build_call_expr_loc (input_location,
613 error_stop ? gfor_fndecl_error_stop_string
614 : gfor_fndecl_stop_string,
615 2, se.expr, se.string_length);
618 gfc_add_expr_to_block (&se.pre, tmp);
620 gfc_add_block_to_block (&se.pre, &se.post);
622 return gfc_finish_block (&se.pre);
626 tree
627 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
629 gfc_se se;
631 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
633 gfc_init_se (&se, NULL);
634 gfc_start_block (&se.pre);
637 /* Check SYNC IMAGES(imageset) for valid image index.
638 FIXME: Add a check for image-set arrays. */
639 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
640 && code->expr1->rank == 0)
642 tree cond;
643 gfc_conv_expr (&se, code->expr1);
644 cond = fold_build2 (NE_EXPR, boolean_type_node, se.expr,
645 build_int_cst (TREE_TYPE (se.expr), 1));
646 gfc_trans_runtime_check (true, false, cond, &se.pre,
647 &code->expr1->where, "Invalid image number "
648 "%d in SYNC IMAGES",
649 fold_convert (integer_type_node, se.expr));
652 /* If STAT is present, set it to zero. */
653 if (code->expr2)
655 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
656 gfc_conv_expr (&se, code->expr2);
657 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
660 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
661 return gfc_finish_block (&se.pre);
663 return NULL_TREE;
667 /* Generate GENERIC for the IF construct. This function also deals with
668 the simple IF statement, because the front end translates the IF
669 statement into an IF construct.
671 We translate:
673 IF (cond) THEN
674 then_clause
675 ELSEIF (cond2)
676 elseif_clause
677 ELSE
678 else_clause
679 ENDIF
681 into:
683 pre_cond_s;
684 if (cond_s)
686 then_clause;
688 else
690 pre_cond_s
691 if (cond_s)
693 elseif_clause
695 else
697 else_clause;
701 where COND_S is the simplified version of the predicate. PRE_COND_S
702 are the pre side-effects produced by the translation of the
703 conditional.
704 We need to build the chain recursively otherwise we run into
705 problems with folding incomplete statements. */
707 static tree
708 gfc_trans_if_1 (gfc_code * code)
710 gfc_se if_se;
711 tree stmt, elsestmt;
713 /* Check for an unconditional ELSE clause. */
714 if (!code->expr1)
715 return gfc_trans_code (code->next);
717 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
718 gfc_init_se (&if_se, NULL);
719 gfc_start_block (&if_se.pre);
721 /* Calculate the IF condition expression. */
722 gfc_conv_expr_val (&if_se, code->expr1);
724 /* Translate the THEN clause. */
725 stmt = gfc_trans_code (code->next);
727 /* Translate the ELSE clause. */
728 if (code->block)
729 elsestmt = gfc_trans_if_1 (code->block);
730 else
731 elsestmt = build_empty_stmt (input_location);
733 /* Build the condition expression and add it to the condition block. */
734 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
736 gfc_add_expr_to_block (&if_se.pre, stmt);
738 /* Finish off this statement. */
739 return gfc_finish_block (&if_se.pre);
742 tree
743 gfc_trans_if (gfc_code * code)
745 /* Ignore the top EXEC_IF, it only announces an IF construct. The
746 actual code we must translate is in code->block. */
748 return gfc_trans_if_1 (code->block);
752 /* Translate an arithmetic IF expression.
754 IF (cond) label1, label2, label3 translates to
756 if (cond <= 0)
758 if (cond < 0)
759 goto label1;
760 else // cond == 0
761 goto label2;
763 else // cond > 0
764 goto label3;
766 An optimized version can be generated in case of equal labels.
767 E.g., if label1 is equal to label2, we can translate it to
769 if (cond <= 0)
770 goto label1;
771 else
772 goto label3;
775 tree
776 gfc_trans_arithmetic_if (gfc_code * code)
778 gfc_se se;
779 tree tmp;
780 tree branch1;
781 tree branch2;
782 tree zero;
784 /* Start a new block. */
785 gfc_init_se (&se, NULL);
786 gfc_start_block (&se.pre);
788 /* Pre-evaluate COND. */
789 gfc_conv_expr_val (&se, code->expr1);
790 se.expr = gfc_evaluate_now (se.expr, &se.pre);
792 /* Build something to compare with. */
793 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
795 if (code->label1->value != code->label2->value)
797 /* If (cond < 0) take branch1 else take branch2.
798 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
799 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
800 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
802 if (code->label1->value != code->label3->value)
803 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
804 else
805 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
807 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
809 else
810 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
812 if (code->label1->value != code->label3->value
813 && code->label2->value != code->label3->value)
815 /* if (cond <= 0) take branch1 else take branch2. */
816 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
817 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
818 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
821 /* Append the COND_EXPR to the evaluation of COND, and return. */
822 gfc_add_expr_to_block (&se.pre, branch1);
823 return gfc_finish_block (&se.pre);
827 /* Translate a CRITICAL block. */
828 tree
829 gfc_trans_critical (gfc_code *code)
831 stmtblock_t block;
832 tree tmp;
834 gfc_start_block (&block);
835 tmp = gfc_trans_code (code->block->next);
836 gfc_add_expr_to_block (&block, tmp);
838 return gfc_finish_block (&block);
842 /* Translate a BLOCK construct. This is basically what we would do for a
843 procedure body. */
845 tree
846 gfc_trans_block_construct (gfc_code* code)
848 gfc_namespace* ns;
849 gfc_symbol* sym;
850 stmtblock_t body;
851 tree tmp;
853 ns = code->ext.ns;
854 gcc_assert (ns);
855 sym = ns->proc_name;
856 gcc_assert (sym);
858 gcc_assert (!sym->tlink);
859 sym->tlink = sym;
861 gfc_start_block (&body);
862 gfc_process_block_locals (ns);
864 tmp = gfc_trans_code (ns->code);
865 tmp = gfc_trans_deferred_vars (sym, tmp);
867 gfc_add_expr_to_block (&body, tmp);
868 return gfc_finish_block (&body);
872 /* Translate the simple DO construct. This is where the loop variable has
873 integer type and step +-1. We can't use this in the general case
874 because integer overflow and floating point errors could give incorrect
875 results.
876 We translate a do loop from:
878 DO dovar = from, to, step
879 body
880 END DO
884 [Evaluate loop bounds and step]
885 dovar = from;
886 if ((step > 0) ? (dovar <= to) : (dovar => to))
888 for (;;)
890 body;
891 cycle_label:
892 cond = (dovar == to);
893 dovar += step;
894 if (cond) goto end_label;
897 end_label:
899 This helps the optimizers by avoiding the extra induction variable
900 used in the general case. */
902 static tree
903 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
904 tree from, tree to, tree step, tree exit_cond)
906 stmtblock_t body;
907 tree type;
908 tree cond;
909 tree tmp;
910 tree saved_dovar = NULL;
911 tree cycle_label;
912 tree exit_label;
914 type = TREE_TYPE (dovar);
916 /* Initialize the DO variable: dovar = from. */
917 gfc_add_modify (pblock, dovar, from);
919 /* Save value for do-tinkering checking. */
920 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
922 saved_dovar = gfc_create_var (type, ".saved_dovar");
923 gfc_add_modify (pblock, saved_dovar, dovar);
926 /* Cycle and exit statements are implemented with gotos. */
927 cycle_label = gfc_build_label_decl (NULL_TREE);
928 exit_label = gfc_build_label_decl (NULL_TREE);
930 /* Put the labels where they can be found later. See gfc_trans_do(). */
931 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
933 /* Loop body. */
934 gfc_start_block (&body);
936 /* Main loop body. */
937 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
938 gfc_add_expr_to_block (&body, tmp);
940 /* Label for cycle statements (if needed). */
941 if (TREE_USED (cycle_label))
943 tmp = build1_v (LABEL_EXPR, cycle_label);
944 gfc_add_expr_to_block (&body, tmp);
947 /* Check whether someone has modified the loop variable. */
948 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
950 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
951 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
952 "Loop variable has been modified");
955 /* Exit the loop if there is an I/O result condition or error. */
956 if (exit_cond)
958 tmp = build1_v (GOTO_EXPR, exit_label);
959 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
960 build_empty_stmt (input_location));
961 gfc_add_expr_to_block (&body, tmp);
964 /* Evaluate the loop condition. */
965 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
966 cond = gfc_evaluate_now (cond, &body);
968 /* Increment the loop variable. */
969 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
970 gfc_add_modify (&body, dovar, tmp);
972 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
973 gfc_add_modify (&body, saved_dovar, dovar);
975 /* The loop exit. */
976 tmp = build1_v (GOTO_EXPR, exit_label);
977 TREE_USED (exit_label) = 1;
978 tmp = fold_build3 (COND_EXPR, void_type_node,
979 cond, tmp, build_empty_stmt (input_location));
980 gfc_add_expr_to_block (&body, tmp);
982 /* Finish the loop body. */
983 tmp = gfc_finish_block (&body);
984 tmp = build1_v (LOOP_EXPR, tmp);
986 /* Only execute the loop if the number of iterations is positive. */
987 if (tree_int_cst_sgn (step) > 0)
988 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
989 else
990 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
991 tmp = fold_build3 (COND_EXPR, void_type_node,
992 cond, tmp, build_empty_stmt (input_location));
993 gfc_add_expr_to_block (pblock, tmp);
995 /* Add the exit label. */
996 tmp = build1_v (LABEL_EXPR, exit_label);
997 gfc_add_expr_to_block (pblock, tmp);
999 return gfc_finish_block (pblock);
1002 /* Translate the DO construct. This obviously is one of the most
1003 important ones to get right with any compiler, but especially
1004 so for Fortran.
1006 We special case some loop forms as described in gfc_trans_simple_do.
1007 For other cases we implement them with a separate loop count,
1008 as described in the standard.
1010 We translate a do loop from:
1012 DO dovar = from, to, step
1013 body
1014 END DO
1018 [evaluate loop bounds and step]
1019 empty = (step > 0 ? to < from : to > from);
1020 countm1 = (to - from) / step;
1021 dovar = from;
1022 if (empty) goto exit_label;
1023 for (;;)
1025 body;
1026 cycle_label:
1027 dovar += step
1028 if (countm1 ==0) goto exit_label;
1029 countm1--;
1031 exit_label:
1033 countm1 is an unsigned integer. It is equal to the loop count minus one,
1034 because the loop count itself can overflow. */
1036 tree
1037 gfc_trans_do (gfc_code * code, tree exit_cond)
1039 gfc_se se;
1040 tree dovar;
1041 tree saved_dovar = NULL;
1042 tree from;
1043 tree to;
1044 tree step;
1045 tree countm1;
1046 tree type;
1047 tree utype;
1048 tree cond;
1049 tree cycle_label;
1050 tree exit_label;
1051 tree tmp;
1052 tree pos_step;
1053 stmtblock_t block;
1054 stmtblock_t body;
1056 gfc_start_block (&block);
1058 /* Evaluate all the expressions in the iterator. */
1059 gfc_init_se (&se, NULL);
1060 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1061 gfc_add_block_to_block (&block, &se.pre);
1062 dovar = se.expr;
1063 type = TREE_TYPE (dovar);
1065 gfc_init_se (&se, NULL);
1066 gfc_conv_expr_val (&se, code->ext.iterator->start);
1067 gfc_add_block_to_block (&block, &se.pre);
1068 from = gfc_evaluate_now (se.expr, &block);
1070 gfc_init_se (&se, NULL);
1071 gfc_conv_expr_val (&se, code->ext.iterator->end);
1072 gfc_add_block_to_block (&block, &se.pre);
1073 to = gfc_evaluate_now (se.expr, &block);
1075 gfc_init_se (&se, NULL);
1076 gfc_conv_expr_val (&se, code->ext.iterator->step);
1077 gfc_add_block_to_block (&block, &se.pre);
1078 step = gfc_evaluate_now (se.expr, &block);
1080 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1082 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1083 fold_convert (type, integer_zero_node));
1084 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1085 "DO step value is zero");
1088 /* Special case simple loops. */
1089 if (TREE_CODE (type) == INTEGER_TYPE
1090 && (integer_onep (step)
1091 || tree_int_cst_equal (step, integer_minus_one_node)))
1092 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1094 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1095 fold_convert (type, integer_zero_node));
1097 if (TREE_CODE (type) == INTEGER_TYPE)
1098 utype = unsigned_type_for (type);
1099 else
1100 utype = unsigned_type_for (gfc_array_index_type);
1101 countm1 = gfc_create_var (utype, "countm1");
1103 /* Cycle and exit statements are implemented with gotos. */
1104 cycle_label = gfc_build_label_decl (NULL_TREE);
1105 exit_label = gfc_build_label_decl (NULL_TREE);
1106 TREE_USED (exit_label) = 1;
1108 /* Initialize the DO variable: dovar = from. */
1109 gfc_add_modify (&block, dovar, from);
1111 /* Save value for do-tinkering checking. */
1112 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1114 saved_dovar = gfc_create_var (type, ".saved_dovar");
1115 gfc_add_modify (&block, saved_dovar, dovar);
1118 /* Initialize loop count and jump to exit label if the loop is empty.
1119 This code is executed before we enter the loop body. We generate:
1120 step_sign = sign(1,step);
1121 if (step > 0)
1123 if (to < from)
1124 goto exit_label;
1126 else
1128 if (to > from)
1129 goto exit_label;
1131 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1135 if (TREE_CODE (type) == INTEGER_TYPE)
1137 tree pos, neg, step_sign, to2, from2, step2;
1139 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1141 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1142 build_int_cst (TREE_TYPE (step), 0));
1143 step_sign = fold_build3 (COND_EXPR, type, tmp,
1144 build_int_cst (type, -1),
1145 build_int_cst (type, 1));
1147 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1148 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1149 build1_v (GOTO_EXPR, exit_label),
1150 build_empty_stmt (input_location));
1152 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1153 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1154 build1_v (GOTO_EXPR, exit_label),
1155 build_empty_stmt (input_location));
1156 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1158 gfc_add_expr_to_block (&block, tmp);
1160 /* Calculate the loop count. to-from can overflow, so
1161 we cast to unsigned. */
1163 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1164 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1165 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1166 step2 = fold_convert (utype, step2);
1167 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1168 tmp = fold_convert (utype, tmp);
1169 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1170 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1171 gfc_add_expr_to_block (&block, tmp);
1173 else
1175 /* TODO: We could use the same width as the real type.
1176 This would probably cause more problems that it solves
1177 when we implement "long double" types. */
1179 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1180 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1181 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1182 gfc_add_modify (&block, countm1, tmp);
1184 /* We need a special check for empty loops:
1185 empty = (step > 0 ? to < from : to > from); */
1186 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1187 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1188 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1189 /* If the loop is empty, go directly to the exit label. */
1190 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1191 build1_v (GOTO_EXPR, exit_label),
1192 build_empty_stmt (input_location));
1193 gfc_add_expr_to_block (&block, tmp);
1196 /* Loop body. */
1197 gfc_start_block (&body);
1199 /* Put these labels where they can be found later. We put the
1200 labels in a TREE_LIST node (because TREE_CHAIN is already
1201 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1202 label in TREE_VALUE (backend_decl). */
1204 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1206 /* Main loop body. */
1207 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1208 gfc_add_expr_to_block (&body, tmp);
1210 /* Label for cycle statements (if needed). */
1211 if (TREE_USED (cycle_label))
1213 tmp = build1_v (LABEL_EXPR, cycle_label);
1214 gfc_add_expr_to_block (&body, tmp);
1217 /* Check whether someone has modified the loop variable. */
1218 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1220 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1221 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1222 "Loop variable has been modified");
1225 /* Exit the loop if there is an I/O result condition or error. */
1226 if (exit_cond)
1228 tmp = build1_v (GOTO_EXPR, exit_label);
1229 tmp = fold_build3 (COND_EXPR, void_type_node, exit_cond, tmp,
1230 build_empty_stmt (input_location));
1231 gfc_add_expr_to_block (&body, tmp);
1234 /* Increment the loop variable. */
1235 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1236 gfc_add_modify (&body, dovar, tmp);
1238 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1239 gfc_add_modify (&body, saved_dovar, dovar);
1241 /* End with the loop condition. Loop until countm1 == 0. */
1242 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1243 build_int_cst (utype, 0));
1244 tmp = build1_v (GOTO_EXPR, exit_label);
1245 tmp = fold_build3 (COND_EXPR, void_type_node,
1246 cond, tmp, build_empty_stmt (input_location));
1247 gfc_add_expr_to_block (&body, tmp);
1249 /* Decrement the loop count. */
1250 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1251 gfc_add_modify (&body, countm1, tmp);
1253 /* End of loop body. */
1254 tmp = gfc_finish_block (&body);
1256 /* The for loop itself. */
1257 tmp = build1_v (LOOP_EXPR, tmp);
1258 gfc_add_expr_to_block (&block, tmp);
1260 /* Add the exit label. */
1261 tmp = build1_v (LABEL_EXPR, exit_label);
1262 gfc_add_expr_to_block (&block, tmp);
1264 return gfc_finish_block (&block);
1268 /* Translate the DO WHILE construct.
1270 We translate
1272 DO WHILE (cond)
1273 body
1274 END DO
1278 for ( ; ; )
1280 pre_cond;
1281 if (! cond) goto exit_label;
1282 body;
1283 cycle_label:
1285 exit_label:
1287 Because the evaluation of the exit condition `cond' may have side
1288 effects, we can't do much for empty loop bodies. The backend optimizers
1289 should be smart enough to eliminate any dead loops. */
1291 tree
1292 gfc_trans_do_while (gfc_code * code)
1294 gfc_se cond;
1295 tree tmp;
1296 tree cycle_label;
1297 tree exit_label;
1298 stmtblock_t block;
1300 /* Everything we build here is part of the loop body. */
1301 gfc_start_block (&block);
1303 /* Cycle and exit statements are implemented with gotos. */
1304 cycle_label = gfc_build_label_decl (NULL_TREE);
1305 exit_label = gfc_build_label_decl (NULL_TREE);
1307 /* Put the labels where they can be found later. See gfc_trans_do(). */
1308 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1310 /* Create a GIMPLE version of the exit condition. */
1311 gfc_init_se (&cond, NULL);
1312 gfc_conv_expr_val (&cond, code->expr1);
1313 gfc_add_block_to_block (&block, &cond.pre);
1314 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1316 /* Build "IF (! cond) GOTO exit_label". */
1317 tmp = build1_v (GOTO_EXPR, exit_label);
1318 TREE_USED (exit_label) = 1;
1319 tmp = fold_build3 (COND_EXPR, void_type_node,
1320 cond.expr, tmp, build_empty_stmt (input_location));
1321 gfc_add_expr_to_block (&block, tmp);
1323 /* The main body of the loop. */
1324 tmp = gfc_trans_code (code->block->next);
1325 gfc_add_expr_to_block (&block, tmp);
1327 /* Label for cycle statements (if needed). */
1328 if (TREE_USED (cycle_label))
1330 tmp = build1_v (LABEL_EXPR, cycle_label);
1331 gfc_add_expr_to_block (&block, tmp);
1334 /* End of loop body. */
1335 tmp = gfc_finish_block (&block);
1337 gfc_init_block (&block);
1338 /* Build the loop. */
1339 tmp = build1_v (LOOP_EXPR, tmp);
1340 gfc_add_expr_to_block (&block, tmp);
1342 /* Add the exit label. */
1343 tmp = build1_v (LABEL_EXPR, exit_label);
1344 gfc_add_expr_to_block (&block, tmp);
1346 return gfc_finish_block (&block);
1350 /* Translate the SELECT CASE construct for INTEGER case expressions,
1351 without killing all potential optimizations. The problem is that
1352 Fortran allows unbounded cases, but the back-end does not, so we
1353 need to intercept those before we enter the equivalent SWITCH_EXPR
1354 we can build.
1356 For example, we translate this,
1358 SELECT CASE (expr)
1359 CASE (:100,101,105:115)
1360 block_1
1361 CASE (190:199,200:)
1362 block_2
1363 CASE (300)
1364 block_3
1365 CASE DEFAULT
1366 block_4
1367 END SELECT
1369 to the GENERIC equivalent,
1371 switch (expr)
1373 case (minimum value for typeof(expr) ... 100:
1374 case 101:
1375 case 105 ... 114:
1376 block1:
1377 goto end_label;
1379 case 200 ... (maximum value for typeof(expr):
1380 case 190 ... 199:
1381 block2;
1382 goto end_label;
1384 case 300:
1385 block_3;
1386 goto end_label;
1388 default:
1389 block_4;
1390 goto end_label;
1393 end_label: */
1395 static tree
1396 gfc_trans_integer_select (gfc_code * code)
1398 gfc_code *c;
1399 gfc_case *cp;
1400 tree end_label;
1401 tree tmp;
1402 gfc_se se;
1403 stmtblock_t block;
1404 stmtblock_t body;
1406 gfc_start_block (&block);
1408 /* Calculate the switch expression. */
1409 gfc_init_se (&se, NULL);
1410 gfc_conv_expr_val (&se, code->expr1);
1411 gfc_add_block_to_block (&block, &se.pre);
1413 end_label = gfc_build_label_decl (NULL_TREE);
1415 gfc_init_block (&body);
1417 for (c = code->block; c; c = c->block)
1419 for (cp = c->ext.case_list; cp; cp = cp->next)
1421 tree low, high;
1422 tree label;
1424 /* Assume it's the default case. */
1425 low = high = NULL_TREE;
1427 if (cp->low)
1429 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1430 cp->low->ts.kind);
1432 /* If there's only a lower bound, set the high bound to the
1433 maximum value of the case expression. */
1434 if (!cp->high)
1435 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1438 if (cp->high)
1440 /* Three cases are possible here:
1442 1) There is no lower bound, e.g. CASE (:N).
1443 2) There is a lower bound .NE. high bound, that is
1444 a case range, e.g. CASE (N:M) where M>N (we make
1445 sure that M>N during type resolution).
1446 3) There is a lower bound, and it has the same value
1447 as the high bound, e.g. CASE (N:N). This is our
1448 internal representation of CASE(N).
1450 In the first and second case, we need to set a value for
1451 high. In the third case, we don't because the GCC middle
1452 end represents a single case value by just letting high be
1453 a NULL_TREE. We can't do that because we need to be able
1454 to represent unbounded cases. */
1456 if (!cp->low
1457 || (cp->low
1458 && mpz_cmp (cp->low->value.integer,
1459 cp->high->value.integer) != 0))
1460 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1461 cp->high->ts.kind);
1463 /* Unbounded case. */
1464 if (!cp->low)
1465 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1468 /* Build a label. */
1469 label = gfc_build_label_decl (NULL_TREE);
1471 /* Add this case label.
1472 Add parameter 'label', make it match GCC backend. */
1473 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1474 low, high, label);
1475 gfc_add_expr_to_block (&body, tmp);
1478 /* Add the statements for this case. */
1479 tmp = gfc_trans_code (c->next);
1480 gfc_add_expr_to_block (&body, tmp);
1482 /* Break to the end of the construct. */
1483 tmp = build1_v (GOTO_EXPR, end_label);
1484 gfc_add_expr_to_block (&body, tmp);
1487 tmp = gfc_finish_block (&body);
1488 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1489 gfc_add_expr_to_block (&block, tmp);
1491 tmp = build1_v (LABEL_EXPR, end_label);
1492 gfc_add_expr_to_block (&block, tmp);
1494 return gfc_finish_block (&block);
1498 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1500 There are only two cases possible here, even though the standard
1501 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1502 .FALSE., and DEFAULT.
1504 We never generate more than two blocks here. Instead, we always
1505 try to eliminate the DEFAULT case. This way, we can translate this
1506 kind of SELECT construct to a simple
1508 if {} else {};
1510 expression in GENERIC. */
1512 static tree
1513 gfc_trans_logical_select (gfc_code * code)
1515 gfc_code *c;
1516 gfc_code *t, *f, *d;
1517 gfc_case *cp;
1518 gfc_se se;
1519 stmtblock_t block;
1521 /* Assume we don't have any cases at all. */
1522 t = f = d = NULL;
1524 /* Now see which ones we actually do have. We can have at most two
1525 cases in a single case list: one for .TRUE. and one for .FALSE.
1526 The default case is always separate. If the cases for .TRUE. and
1527 .FALSE. are in the same case list, the block for that case list
1528 always executed, and we don't generate code a COND_EXPR. */
1529 for (c = code->block; c; c = c->block)
1531 for (cp = c->ext.case_list; cp; cp = cp->next)
1533 if (cp->low)
1535 if (cp->low->value.logical == 0) /* .FALSE. */
1536 f = c;
1537 else /* if (cp->value.logical != 0), thus .TRUE. */
1538 t = c;
1540 else
1541 d = c;
1545 /* Start a new block. */
1546 gfc_start_block (&block);
1548 /* Calculate the switch expression. We always need to do this
1549 because it may have side effects. */
1550 gfc_init_se (&se, NULL);
1551 gfc_conv_expr_val (&se, code->expr1);
1552 gfc_add_block_to_block (&block, &se.pre);
1554 if (t == f && t != NULL)
1556 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1557 translate the code for these cases, append it to the current
1558 block. */
1559 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1561 else
1563 tree true_tree, false_tree, stmt;
1565 true_tree = build_empty_stmt (input_location);
1566 false_tree = build_empty_stmt (input_location);
1568 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1569 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1570 make the missing case the default case. */
1571 if (t != NULL && f != NULL)
1572 d = NULL;
1573 else if (d != NULL)
1575 if (t == NULL)
1576 t = d;
1577 else
1578 f = d;
1581 /* Translate the code for each of these blocks, and append it to
1582 the current block. */
1583 if (t != NULL)
1584 true_tree = gfc_trans_code (t->next);
1586 if (f != NULL)
1587 false_tree = gfc_trans_code (f->next);
1589 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1590 true_tree, false_tree);
1591 gfc_add_expr_to_block (&block, stmt);
1594 return gfc_finish_block (&block);
1598 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1599 Instead of generating compares and jumps, it is far simpler to
1600 generate a data structure describing the cases in order and call a
1601 library subroutine that locates the right case.
1602 This is particularly true because this is the only case where we
1603 might have to dispose of a temporary.
1604 The library subroutine returns a pointer to jump to or NULL if no
1605 branches are to be taken. */
1607 static tree
1608 gfc_trans_character_select (gfc_code *code)
1610 tree init, end_label, tmp, type, case_num, label, fndecl;
1611 stmtblock_t block, body;
1612 gfc_case *cp, *d;
1613 gfc_code *c;
1614 gfc_se se;
1615 int n, k;
1616 VEC(constructor_elt,gc) *inits = NULL;
1618 /* The jump table types are stored in static variables to avoid
1619 constructing them from scratch every single time. */
1620 static tree select_struct[2];
1621 static tree ss_string1[2], ss_string1_len[2];
1622 static tree ss_string2[2], ss_string2_len[2];
1623 static tree ss_target[2];
1625 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1627 if (code->expr1->ts.kind == 1)
1628 k = 0;
1629 else if (code->expr1->ts.kind == 4)
1630 k = 1;
1631 else
1632 gcc_unreachable ();
1634 if (select_struct[k] == NULL)
1636 select_struct[k] = make_node (RECORD_TYPE);
1638 if (code->expr1->ts.kind == 1)
1639 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1640 else if (code->expr1->ts.kind == 4)
1641 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1642 else
1643 gcc_unreachable ();
1645 #undef ADD_FIELD
1646 #define ADD_FIELD(NAME, TYPE) \
1647 ss_##NAME[k] = gfc_add_field_to_struct \
1648 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1649 get_identifier (stringize(NAME)), TYPE)
1651 ADD_FIELD (string1, pchartype);
1652 ADD_FIELD (string1_len, gfc_charlen_type_node);
1654 ADD_FIELD (string2, pchartype);
1655 ADD_FIELD (string2_len, gfc_charlen_type_node);
1657 ADD_FIELD (target, integer_type_node);
1658 #undef ADD_FIELD
1660 gfc_finish_type (select_struct[k]);
1663 cp = code->block->ext.case_list;
1664 while (cp->left != NULL)
1665 cp = cp->left;
1667 n = 0;
1668 for (d = cp; d; d = d->right)
1669 d->n = n++;
1671 end_label = gfc_build_label_decl (NULL_TREE);
1673 /* Generate the body */
1674 gfc_start_block (&block);
1675 gfc_init_block (&body);
1677 for (c = code->block; c; c = c->block)
1679 for (d = c->ext.case_list; d; d = d->next)
1681 label = gfc_build_label_decl (NULL_TREE);
1682 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1683 build_int_cst (NULL_TREE, d->n),
1684 build_int_cst (NULL_TREE, d->n), label);
1685 gfc_add_expr_to_block (&body, tmp);
1688 tmp = gfc_trans_code (c->next);
1689 gfc_add_expr_to_block (&body, tmp);
1691 tmp = build1_v (GOTO_EXPR, end_label);
1692 gfc_add_expr_to_block (&body, tmp);
1695 /* Generate the structure describing the branches */
1696 for(d = cp; d; d = d->right)
1698 VEC(constructor_elt,gc) *node = NULL;
1700 gfc_init_se (&se, NULL);
1702 if (d->low == NULL)
1704 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
1705 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
1707 else
1709 gfc_conv_expr_reference (&se, d->low);
1711 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
1712 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
1715 if (d->high == NULL)
1717 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
1718 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
1720 else
1722 gfc_init_se (&se, NULL);
1723 gfc_conv_expr_reference (&se, d->high);
1725 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
1726 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
1729 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
1730 build_int_cst (integer_type_node, d->n));
1732 tmp = build_constructor (select_struct[k], node);
1733 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
1736 type = build_array_type (select_struct[k],
1737 build_index_type (build_int_cst (NULL_TREE, n-1)));
1739 init = build_constructor (type, inits);
1740 TREE_CONSTANT (init) = 1;
1741 TREE_STATIC (init) = 1;
1742 /* Create a static variable to hold the jump table. */
1743 tmp = gfc_create_var (type, "jumptable");
1744 TREE_CONSTANT (tmp) = 1;
1745 TREE_STATIC (tmp) = 1;
1746 TREE_READONLY (tmp) = 1;
1747 DECL_INITIAL (tmp) = init;
1748 init = tmp;
1750 /* Build the library call */
1751 init = gfc_build_addr_expr (pvoid_type_node, init);
1753 gfc_init_se (&se, NULL);
1754 gfc_conv_expr_reference (&se, code->expr1);
1756 gfc_add_block_to_block (&block, &se.pre);
1758 if (code->expr1->ts.kind == 1)
1759 fndecl = gfor_fndecl_select_string;
1760 else if (code->expr1->ts.kind == 4)
1761 fndecl = gfor_fndecl_select_string_char4;
1762 else
1763 gcc_unreachable ();
1765 tmp = build_call_expr_loc (input_location,
1766 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1767 se.expr, se.string_length);
1768 case_num = gfc_create_var (integer_type_node, "case_num");
1769 gfc_add_modify (&block, case_num, tmp);
1771 gfc_add_block_to_block (&block, &se.post);
1773 tmp = gfc_finish_block (&body);
1774 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1775 gfc_add_expr_to_block (&block, tmp);
1777 tmp = build1_v (LABEL_EXPR, end_label);
1778 gfc_add_expr_to_block (&block, tmp);
1780 return gfc_finish_block (&block);
1784 /* Translate the three variants of the SELECT CASE construct.
1786 SELECT CASEs with INTEGER case expressions can be translated to an
1787 equivalent GENERIC switch statement, and for LOGICAL case
1788 expressions we build one or two if-else compares.
1790 SELECT CASEs with CHARACTER case expressions are a whole different
1791 story, because they don't exist in GENERIC. So we sort them and
1792 do a binary search at runtime.
1794 Fortran has no BREAK statement, and it does not allow jumps from
1795 one case block to another. That makes things a lot easier for
1796 the optimizers. */
1798 tree
1799 gfc_trans_select (gfc_code * code)
1801 gcc_assert (code && code->expr1);
1803 /* Empty SELECT constructs are legal. */
1804 if (code->block == NULL)
1805 return build_empty_stmt (input_location);
1807 /* Select the correct translation function. */
1808 switch (code->expr1->ts.type)
1810 case BT_LOGICAL: return gfc_trans_logical_select (code);
1811 case BT_INTEGER: return gfc_trans_integer_select (code);
1812 case BT_CHARACTER: return gfc_trans_character_select (code);
1813 default:
1814 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1815 /* Not reached */
1820 /* Traversal function to substitute a replacement symtree if the symbol
1821 in the expression is the same as that passed. f == 2 signals that
1822 that variable itself is not to be checked - only the references.
1823 This group of functions is used when the variable expression in a
1824 FORALL assignment has internal references. For example:
1825 FORALL (i = 1:4) p(p(i)) = i
1826 The only recourse here is to store a copy of 'p' for the index
1827 expression. */
1829 static gfc_symtree *new_symtree;
1830 static gfc_symtree *old_symtree;
1832 static bool
1833 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1835 if (expr->expr_type != EXPR_VARIABLE)
1836 return false;
1838 if (*f == 2)
1839 *f = 1;
1840 else if (expr->symtree->n.sym == sym)
1841 expr->symtree = new_symtree;
1843 return false;
1846 static void
1847 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1849 gfc_traverse_expr (e, sym, forall_replace, f);
1852 static bool
1853 forall_restore (gfc_expr *expr,
1854 gfc_symbol *sym ATTRIBUTE_UNUSED,
1855 int *f ATTRIBUTE_UNUSED)
1857 if (expr->expr_type != EXPR_VARIABLE)
1858 return false;
1860 if (expr->symtree == new_symtree)
1861 expr->symtree = old_symtree;
1863 return false;
1866 static void
1867 forall_restore_symtree (gfc_expr *e)
1869 gfc_traverse_expr (e, NULL, forall_restore, 0);
1872 static void
1873 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1875 gfc_se tse;
1876 gfc_se rse;
1877 gfc_expr *e;
1878 gfc_symbol *new_sym;
1879 gfc_symbol *old_sym;
1880 gfc_symtree *root;
1881 tree tmp;
1883 /* Build a copy of the lvalue. */
1884 old_symtree = c->expr1->symtree;
1885 old_sym = old_symtree->n.sym;
1886 e = gfc_lval_expr_from_sym (old_sym);
1887 if (old_sym->attr.dimension)
1889 gfc_init_se (&tse, NULL);
1890 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
1891 gfc_add_block_to_block (pre, &tse.pre);
1892 gfc_add_block_to_block (post, &tse.post);
1893 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1895 if (e->ts.type != BT_CHARACTER)
1897 /* Use the variable offset for the temporary. */
1898 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1899 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1902 else
1904 gfc_init_se (&tse, NULL);
1905 gfc_init_se (&rse, NULL);
1906 gfc_conv_expr (&rse, e);
1907 if (e->ts.type == BT_CHARACTER)
1909 tse.string_length = rse.string_length;
1910 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1911 tse.string_length);
1912 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1913 rse.string_length);
1914 gfc_add_block_to_block (pre, &tse.pre);
1915 gfc_add_block_to_block (post, &tse.post);
1917 else
1919 tmp = gfc_typenode_for_spec (&e->ts);
1920 tse.expr = gfc_create_var (tmp, "temp");
1923 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1924 e->expr_type == EXPR_VARIABLE, true);
1925 gfc_add_expr_to_block (pre, tmp);
1927 gfc_free_expr (e);
1929 /* Create a new symbol to represent the lvalue. */
1930 new_sym = gfc_new_symbol (old_sym->name, NULL);
1931 new_sym->ts = old_sym->ts;
1932 new_sym->attr.referenced = 1;
1933 new_sym->attr.temporary = 1;
1934 new_sym->attr.dimension = old_sym->attr.dimension;
1935 new_sym->attr.flavor = old_sym->attr.flavor;
1937 /* Use the temporary as the backend_decl. */
1938 new_sym->backend_decl = tse.expr;
1940 /* Create a fake symtree for it. */
1941 root = NULL;
1942 new_symtree = gfc_new_symtree (&root, old_sym->name);
1943 new_symtree->n.sym = new_sym;
1944 gcc_assert (new_symtree == root);
1946 /* Go through the expression reference replacing the old_symtree
1947 with the new. */
1948 forall_replace_symtree (c->expr1, old_sym, 2);
1950 /* Now we have made this temporary, we might as well use it for
1951 the right hand side. */
1952 forall_replace_symtree (c->expr2, old_sym, 1);
1956 /* Handles dependencies in forall assignments. */
1957 static int
1958 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1960 gfc_ref *lref;
1961 gfc_ref *rref;
1962 int need_temp;
1963 gfc_symbol *lsym;
1965 lsym = c->expr1->symtree->n.sym;
1966 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1968 /* Now check for dependencies within the 'variable'
1969 expression itself. These are treated by making a complete
1970 copy of variable and changing all the references to it
1971 point to the copy instead. Note that the shallow copy of
1972 the variable will not suffice for derived types with
1973 pointer components. We therefore leave these to their
1974 own devices. */
1975 if (lsym->ts.type == BT_DERIVED
1976 && lsym->ts.u.derived->attr.pointer_comp)
1977 return need_temp;
1979 new_symtree = NULL;
1980 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1982 forall_make_variable_temp (c, pre, post);
1983 need_temp = 0;
1986 /* Substrings with dependencies are treated in the same
1987 way. */
1988 if (c->expr1->ts.type == BT_CHARACTER
1989 && c->expr1->ref
1990 && c->expr2->expr_type == EXPR_VARIABLE
1991 && lsym == c->expr2->symtree->n.sym)
1993 for (lref = c->expr1->ref; lref; lref = lref->next)
1994 if (lref->type == REF_SUBSTRING)
1995 break;
1996 for (rref = c->expr2->ref; rref; rref = rref->next)
1997 if (rref->type == REF_SUBSTRING)
1998 break;
2000 if (rref && lref
2001 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2003 forall_make_variable_temp (c, pre, post);
2004 need_temp = 0;
2007 return need_temp;
2011 static void
2012 cleanup_forall_symtrees (gfc_code *c)
2014 forall_restore_symtree (c->expr1);
2015 forall_restore_symtree (c->expr2);
2016 gfc_free (new_symtree->n.sym);
2017 gfc_free (new_symtree);
2021 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2022 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2023 indicates whether we should generate code to test the FORALLs mask
2024 array. OUTER is the loop header to be used for initializing mask
2025 indices.
2027 The generated loop format is:
2028 count = (end - start + step) / step
2029 loopvar = start
2030 while (1)
2032 if (count <=0 )
2033 goto end_of_loop
2034 <body>
2035 loopvar += step
2036 count --
2038 end_of_loop: */
2040 static tree
2041 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2042 int mask_flag, stmtblock_t *outer)
2044 int n, nvar;
2045 tree tmp;
2046 tree cond;
2047 stmtblock_t block;
2048 tree exit_label;
2049 tree count;
2050 tree var, start, end, step;
2051 iter_info *iter;
2053 /* Initialize the mask index outside the FORALL nest. */
2054 if (mask_flag && forall_tmp->mask)
2055 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2057 iter = forall_tmp->this_loop;
2058 nvar = forall_tmp->nvar;
2059 for (n = 0; n < nvar; n++)
2061 var = iter->var;
2062 start = iter->start;
2063 end = iter->end;
2064 step = iter->step;
2066 exit_label = gfc_build_label_decl (NULL_TREE);
2067 TREE_USED (exit_label) = 1;
2069 /* The loop counter. */
2070 count = gfc_create_var (TREE_TYPE (var), "count");
2072 /* The body of the loop. */
2073 gfc_init_block (&block);
2075 /* The exit condition. */
2076 cond = fold_build2 (LE_EXPR, boolean_type_node,
2077 count, build_int_cst (TREE_TYPE (count), 0));
2078 tmp = build1_v (GOTO_EXPR, exit_label);
2079 tmp = fold_build3 (COND_EXPR, void_type_node,
2080 cond, tmp, build_empty_stmt (input_location));
2081 gfc_add_expr_to_block (&block, tmp);
2083 /* The main loop body. */
2084 gfc_add_expr_to_block (&block, body);
2086 /* Increment the loop variable. */
2087 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2088 gfc_add_modify (&block, var, tmp);
2090 /* Advance to the next mask element. Only do this for the
2091 innermost loop. */
2092 if (n == 0 && mask_flag && forall_tmp->mask)
2094 tree maskindex = forall_tmp->maskindex;
2095 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2096 maskindex, gfc_index_one_node);
2097 gfc_add_modify (&block, maskindex, tmp);
2100 /* Decrement the loop counter. */
2101 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2102 build_int_cst (TREE_TYPE (var), 1));
2103 gfc_add_modify (&block, count, tmp);
2105 body = gfc_finish_block (&block);
2107 /* Loop var initialization. */
2108 gfc_init_block (&block);
2109 gfc_add_modify (&block, var, start);
2112 /* Initialize the loop counter. */
2113 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2114 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2115 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2116 gfc_add_modify (&block, count, tmp);
2118 /* The loop expression. */
2119 tmp = build1_v (LOOP_EXPR, body);
2120 gfc_add_expr_to_block (&block, tmp);
2122 /* The exit label. */
2123 tmp = build1_v (LABEL_EXPR, exit_label);
2124 gfc_add_expr_to_block (&block, tmp);
2126 body = gfc_finish_block (&block);
2127 iter = iter->next;
2129 return body;
2133 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2134 is nonzero, the body is controlled by all masks in the forall nest.
2135 Otherwise, the innermost loop is not controlled by it's mask. This
2136 is used for initializing that mask. */
2138 static tree
2139 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2140 int mask_flag)
2142 tree tmp;
2143 stmtblock_t header;
2144 forall_info *forall_tmp;
2145 tree mask, maskindex;
2147 gfc_start_block (&header);
2149 forall_tmp = nested_forall_info;
2150 while (forall_tmp != NULL)
2152 /* Generate body with masks' control. */
2153 if (mask_flag)
2155 mask = forall_tmp->mask;
2156 maskindex = forall_tmp->maskindex;
2158 /* If a mask was specified make the assignment conditional. */
2159 if (mask)
2161 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2162 body = build3_v (COND_EXPR, tmp, body,
2163 build_empty_stmt (input_location));
2166 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2167 forall_tmp = forall_tmp->prev_nest;
2168 mask_flag = 1;
2171 gfc_add_expr_to_block (&header, body);
2172 return gfc_finish_block (&header);
2176 /* Allocate data for holding a temporary array. Returns either a local
2177 temporary array or a pointer variable. */
2179 static tree
2180 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2181 tree elem_type)
2183 tree tmpvar;
2184 tree type;
2185 tree tmp;
2187 if (INTEGER_CST_P (size))
2189 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2190 gfc_index_one_node);
2192 else
2193 tmp = NULL_TREE;
2195 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2196 type = build_array_type (elem_type, type);
2197 if (gfc_can_put_var_on_stack (bytesize))
2199 gcc_assert (INTEGER_CST_P (size));
2200 tmpvar = gfc_create_var (type, "temp");
2201 *pdata = NULL_TREE;
2203 else
2205 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2206 *pdata = convert (pvoid_type_node, tmpvar);
2208 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2209 gfc_add_modify (pblock, tmpvar, tmp);
2211 return tmpvar;
2215 /* Generate codes to copy the temporary to the actual lhs. */
2217 static tree
2218 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2219 tree count1, tree wheremask, bool invert)
2221 gfc_ss *lss;
2222 gfc_se lse, rse;
2223 stmtblock_t block, body;
2224 gfc_loopinfo loop1;
2225 tree tmp;
2226 tree wheremaskexpr;
2228 /* Walk the lhs. */
2229 lss = gfc_walk_expr (expr);
2231 if (lss == gfc_ss_terminator)
2233 gfc_start_block (&block);
2235 gfc_init_se (&lse, NULL);
2237 /* Translate the expression. */
2238 gfc_conv_expr (&lse, expr);
2240 /* Form the expression for the temporary. */
2241 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2243 /* Use the scalar assignment as is. */
2244 gfc_add_block_to_block (&block, &lse.pre);
2245 gfc_add_modify (&block, lse.expr, tmp);
2246 gfc_add_block_to_block (&block, &lse.post);
2248 /* Increment the count1. */
2249 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2250 gfc_index_one_node);
2251 gfc_add_modify (&block, count1, tmp);
2253 tmp = gfc_finish_block (&block);
2255 else
2257 gfc_start_block (&block);
2259 gfc_init_loopinfo (&loop1);
2260 gfc_init_se (&rse, NULL);
2261 gfc_init_se (&lse, NULL);
2263 /* Associate the lss with the loop. */
2264 gfc_add_ss_to_loop (&loop1, lss);
2266 /* Calculate the bounds of the scalarization. */
2267 gfc_conv_ss_startstride (&loop1);
2268 /* Setup the scalarizing loops. */
2269 gfc_conv_loop_setup (&loop1, &expr->where);
2271 gfc_mark_ss_chain_used (lss, 1);
2273 /* Start the scalarized loop body. */
2274 gfc_start_scalarized_body (&loop1, &body);
2276 /* Setup the gfc_se structures. */
2277 gfc_copy_loopinfo_to_se (&lse, &loop1);
2278 lse.ss = lss;
2280 /* Form the expression of the temporary. */
2281 if (lss != gfc_ss_terminator)
2282 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2283 /* Translate expr. */
2284 gfc_conv_expr (&lse, expr);
2286 /* Use the scalar assignment. */
2287 rse.string_length = lse.string_length;
2288 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2290 /* Form the mask expression according to the mask tree list. */
2291 if (wheremask)
2293 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2294 if (invert)
2295 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2296 TREE_TYPE (wheremaskexpr),
2297 wheremaskexpr);
2298 tmp = fold_build3 (COND_EXPR, void_type_node,
2299 wheremaskexpr, tmp,
2300 build_empty_stmt (input_location));
2303 gfc_add_expr_to_block (&body, tmp);
2305 /* Increment count1. */
2306 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2307 count1, gfc_index_one_node);
2308 gfc_add_modify (&body, count1, tmp);
2310 /* Increment count3. */
2311 if (count3)
2313 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2314 count3, gfc_index_one_node);
2315 gfc_add_modify (&body, count3, tmp);
2318 /* Generate the copying loops. */
2319 gfc_trans_scalarizing_loops (&loop1, &body);
2320 gfc_add_block_to_block (&block, &loop1.pre);
2321 gfc_add_block_to_block (&block, &loop1.post);
2322 gfc_cleanup_loop (&loop1);
2324 tmp = gfc_finish_block (&block);
2326 return tmp;
2330 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2331 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2332 and should not be freed. WHEREMASK is the conditional execution mask
2333 whose sense may be inverted by INVERT. */
2335 static tree
2336 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2337 tree count1, gfc_ss *lss, gfc_ss *rss,
2338 tree wheremask, bool invert)
2340 stmtblock_t block, body1;
2341 gfc_loopinfo loop;
2342 gfc_se lse;
2343 gfc_se rse;
2344 tree tmp;
2345 tree wheremaskexpr;
2347 gfc_start_block (&block);
2349 gfc_init_se (&rse, NULL);
2350 gfc_init_se (&lse, NULL);
2352 if (lss == gfc_ss_terminator)
2354 gfc_init_block (&body1);
2355 gfc_conv_expr (&rse, expr2);
2356 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2358 else
2360 /* Initialize the loop. */
2361 gfc_init_loopinfo (&loop);
2363 /* We may need LSS to determine the shape of the expression. */
2364 gfc_add_ss_to_loop (&loop, lss);
2365 gfc_add_ss_to_loop (&loop, rss);
2367 gfc_conv_ss_startstride (&loop);
2368 gfc_conv_loop_setup (&loop, &expr2->where);
2370 gfc_mark_ss_chain_used (rss, 1);
2371 /* Start the loop body. */
2372 gfc_start_scalarized_body (&loop, &body1);
2374 /* Translate the expression. */
2375 gfc_copy_loopinfo_to_se (&rse, &loop);
2376 rse.ss = rss;
2377 gfc_conv_expr (&rse, expr2);
2379 /* Form the expression of the temporary. */
2380 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2383 /* Use the scalar assignment. */
2384 lse.string_length = rse.string_length;
2385 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2386 expr2->expr_type == EXPR_VARIABLE, true);
2388 /* Form the mask expression according to the mask tree list. */
2389 if (wheremask)
2391 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2392 if (invert)
2393 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2394 TREE_TYPE (wheremaskexpr),
2395 wheremaskexpr);
2396 tmp = fold_build3 (COND_EXPR, void_type_node,
2397 wheremaskexpr, tmp, build_empty_stmt (input_location));
2400 gfc_add_expr_to_block (&body1, tmp);
2402 if (lss == gfc_ss_terminator)
2404 gfc_add_block_to_block (&block, &body1);
2406 /* Increment count1. */
2407 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2408 gfc_index_one_node);
2409 gfc_add_modify (&block, count1, tmp);
2411 else
2413 /* Increment count1. */
2414 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2415 count1, gfc_index_one_node);
2416 gfc_add_modify (&body1, count1, tmp);
2418 /* Increment count3. */
2419 if (count3)
2421 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2422 count3, gfc_index_one_node);
2423 gfc_add_modify (&body1, count3, tmp);
2426 /* Generate the copying loops. */
2427 gfc_trans_scalarizing_loops (&loop, &body1);
2429 gfc_add_block_to_block (&block, &loop.pre);
2430 gfc_add_block_to_block (&block, &loop.post);
2432 gfc_cleanup_loop (&loop);
2433 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2434 as tree nodes in SS may not be valid in different scope. */
2437 tmp = gfc_finish_block (&block);
2438 return tmp;
2442 /* Calculate the size of temporary needed in the assignment inside forall.
2443 LSS and RSS are filled in this function. */
2445 static tree
2446 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2447 stmtblock_t * pblock,
2448 gfc_ss **lss, gfc_ss **rss)
2450 gfc_loopinfo loop;
2451 tree size;
2452 int i;
2453 int save_flag;
2454 tree tmp;
2456 *lss = gfc_walk_expr (expr1);
2457 *rss = NULL;
2459 size = gfc_index_one_node;
2460 if (*lss != gfc_ss_terminator)
2462 gfc_init_loopinfo (&loop);
2464 /* Walk the RHS of the expression. */
2465 *rss = gfc_walk_expr (expr2);
2466 if (*rss == gfc_ss_terminator)
2468 /* The rhs is scalar. Add a ss for the expression. */
2469 *rss = gfc_get_ss ();
2470 (*rss)->next = gfc_ss_terminator;
2471 (*rss)->type = GFC_SS_SCALAR;
2472 (*rss)->expr = expr2;
2475 /* Associate the SS with the loop. */
2476 gfc_add_ss_to_loop (&loop, *lss);
2477 /* We don't actually need to add the rhs at this point, but it might
2478 make guessing the loop bounds a bit easier. */
2479 gfc_add_ss_to_loop (&loop, *rss);
2481 /* We only want the shape of the expression, not rest of the junk
2482 generated by the scalarizer. */
2483 loop.array_parameter = 1;
2485 /* Calculate the bounds of the scalarization. */
2486 save_flag = gfc_option.rtcheck;
2487 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2488 gfc_conv_ss_startstride (&loop);
2489 gfc_option.rtcheck = save_flag;
2490 gfc_conv_loop_setup (&loop, &expr2->where);
2492 /* Figure out how many elements we need. */
2493 for (i = 0; i < loop.dimen; i++)
2495 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2496 gfc_index_one_node, loop.from[i]);
2497 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2498 tmp, loop.to[i]);
2499 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2501 gfc_add_block_to_block (pblock, &loop.pre);
2502 size = gfc_evaluate_now (size, pblock);
2503 gfc_add_block_to_block (pblock, &loop.post);
2505 /* TODO: write a function that cleans up a loopinfo without freeing
2506 the SS chains. Currently a NOP. */
2509 return size;
2513 /* Calculate the overall iterator number of the nested forall construct.
2514 This routine actually calculates the number of times the body of the
2515 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2516 that by the expression INNER_SIZE. The BLOCK argument specifies the
2517 block in which to calculate the result, and the optional INNER_SIZE_BODY
2518 argument contains any statements that need to executed (inside the loop)
2519 to initialize or calculate INNER_SIZE. */
2521 static tree
2522 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2523 stmtblock_t *inner_size_body, stmtblock_t *block)
2525 forall_info *forall_tmp = nested_forall_info;
2526 tree tmp, number;
2527 stmtblock_t body;
2529 /* We can eliminate the innermost unconditional loops with constant
2530 array bounds. */
2531 if (INTEGER_CST_P (inner_size))
2533 while (forall_tmp
2534 && !forall_tmp->mask
2535 && INTEGER_CST_P (forall_tmp->size))
2537 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2538 inner_size, forall_tmp->size);
2539 forall_tmp = forall_tmp->prev_nest;
2542 /* If there are no loops left, we have our constant result. */
2543 if (!forall_tmp)
2544 return inner_size;
2547 /* Otherwise, create a temporary variable to compute the result. */
2548 number = gfc_create_var (gfc_array_index_type, "num");
2549 gfc_add_modify (block, number, gfc_index_zero_node);
2551 gfc_start_block (&body);
2552 if (inner_size_body)
2553 gfc_add_block_to_block (&body, inner_size_body);
2554 if (forall_tmp)
2555 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2556 number, inner_size);
2557 else
2558 tmp = inner_size;
2559 gfc_add_modify (&body, number, tmp);
2560 tmp = gfc_finish_block (&body);
2562 /* Generate loops. */
2563 if (forall_tmp != NULL)
2564 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2566 gfc_add_expr_to_block (block, tmp);
2568 return number;
2572 /* Allocate temporary for forall construct. SIZE is the size of temporary
2573 needed. PTEMP1 is returned for space free. */
2575 static tree
2576 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2577 tree * ptemp1)
2579 tree bytesize;
2580 tree unit;
2581 tree tmp;
2583 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2584 if (!integer_onep (unit))
2585 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2586 else
2587 bytesize = size;
2589 *ptemp1 = NULL;
2590 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2592 if (*ptemp1)
2593 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2594 return tmp;
2598 /* Allocate temporary for forall construct according to the information in
2599 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2600 assignment inside forall. PTEMP1 is returned for space free. */
2602 static tree
2603 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2604 tree inner_size, stmtblock_t * inner_size_body,
2605 stmtblock_t * block, tree * ptemp1)
2607 tree size;
2609 /* Calculate the total size of temporary needed in forall construct. */
2610 size = compute_overall_iter_number (nested_forall_info, inner_size,
2611 inner_size_body, block);
2613 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2617 /* Handle assignments inside forall which need temporary.
2619 forall (i=start:end:stride; maskexpr)
2620 e<i> = f<i>
2621 end forall
2622 (where e,f<i> are arbitrary expressions possibly involving i
2623 and there is a dependency between e<i> and f<i>)
2624 Translates to:
2625 masktmp(:) = maskexpr(:)
2627 maskindex = 0;
2628 count1 = 0;
2629 num = 0;
2630 for (i = start; i <= end; i += stride)
2631 num += SIZE (f<i>)
2632 count1 = 0;
2633 ALLOCATE (tmp(num))
2634 for (i = start; i <= end; i += stride)
2636 if (masktmp[maskindex++])
2637 tmp[count1++] = f<i>
2639 maskindex = 0;
2640 count1 = 0;
2641 for (i = start; i <= end; i += stride)
2643 if (masktmp[maskindex++])
2644 e<i> = tmp[count1++]
2646 DEALLOCATE (tmp)
2648 static void
2649 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2650 tree wheremask, bool invert,
2651 forall_info * nested_forall_info,
2652 stmtblock_t * block)
2654 tree type;
2655 tree inner_size;
2656 gfc_ss *lss, *rss;
2657 tree count, count1;
2658 tree tmp, tmp1;
2659 tree ptemp1;
2660 stmtblock_t inner_size_body;
2662 /* Create vars. count1 is the current iterator number of the nested
2663 forall. */
2664 count1 = gfc_create_var (gfc_array_index_type, "count1");
2666 /* Count is the wheremask index. */
2667 if (wheremask)
2669 count = gfc_create_var (gfc_array_index_type, "count");
2670 gfc_add_modify (block, count, gfc_index_zero_node);
2672 else
2673 count = NULL;
2675 /* Initialize count1. */
2676 gfc_add_modify (block, count1, gfc_index_zero_node);
2678 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2679 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2680 gfc_init_block (&inner_size_body);
2681 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2682 &lss, &rss);
2684 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2685 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2687 if (!expr1->ts.u.cl->backend_decl)
2689 gfc_se tse;
2690 gfc_init_se (&tse, NULL);
2691 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2692 expr1->ts.u.cl->backend_decl = tse.expr;
2694 type = gfc_get_character_type_len (gfc_default_character_kind,
2695 expr1->ts.u.cl->backend_decl);
2697 else
2698 type = gfc_typenode_for_spec (&expr1->ts);
2700 /* Allocate temporary for nested forall construct according to the
2701 information in nested_forall_info and inner_size. */
2702 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2703 &inner_size_body, block, &ptemp1);
2705 /* Generate codes to copy rhs to the temporary . */
2706 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2707 wheremask, invert);
2709 /* Generate body and loops according to the information in
2710 nested_forall_info. */
2711 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2712 gfc_add_expr_to_block (block, tmp);
2714 /* Reset count1. */
2715 gfc_add_modify (block, count1, gfc_index_zero_node);
2717 /* Reset count. */
2718 if (wheremask)
2719 gfc_add_modify (block, count, gfc_index_zero_node);
2721 /* Generate codes to copy the temporary to lhs. */
2722 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2723 wheremask, invert);
2725 /* Generate body and loops according to the information in
2726 nested_forall_info. */
2727 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2728 gfc_add_expr_to_block (block, tmp);
2730 if (ptemp1)
2732 /* Free the temporary. */
2733 tmp = gfc_call_free (ptemp1);
2734 gfc_add_expr_to_block (block, tmp);
2739 /* Translate pointer assignment inside FORALL which need temporary. */
2741 static void
2742 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2743 forall_info * nested_forall_info,
2744 stmtblock_t * block)
2746 tree type;
2747 tree inner_size;
2748 gfc_ss *lss, *rss;
2749 gfc_se lse;
2750 gfc_se rse;
2751 gfc_ss_info *info;
2752 gfc_loopinfo loop;
2753 tree desc;
2754 tree parm;
2755 tree parmtype;
2756 stmtblock_t body;
2757 tree count;
2758 tree tmp, tmp1, ptemp1;
2760 count = gfc_create_var (gfc_array_index_type, "count");
2761 gfc_add_modify (block, count, gfc_index_zero_node);
2763 inner_size = integer_one_node;
2764 lss = gfc_walk_expr (expr1);
2765 rss = gfc_walk_expr (expr2);
2766 if (lss == gfc_ss_terminator)
2768 type = gfc_typenode_for_spec (&expr1->ts);
2769 type = build_pointer_type (type);
2771 /* Allocate temporary for nested forall construct according to the
2772 information in nested_forall_info and inner_size. */
2773 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2774 inner_size, NULL, block, &ptemp1);
2775 gfc_start_block (&body);
2776 gfc_init_se (&lse, NULL);
2777 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2778 gfc_init_se (&rse, NULL);
2779 rse.want_pointer = 1;
2780 gfc_conv_expr (&rse, expr2);
2781 gfc_add_block_to_block (&body, &rse.pre);
2782 gfc_add_modify (&body, lse.expr,
2783 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2784 gfc_add_block_to_block (&body, &rse.post);
2786 /* Increment count. */
2787 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2788 count, gfc_index_one_node);
2789 gfc_add_modify (&body, count, tmp);
2791 tmp = gfc_finish_block (&body);
2793 /* Generate body and loops according to the information in
2794 nested_forall_info. */
2795 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2796 gfc_add_expr_to_block (block, tmp);
2798 /* Reset count. */
2799 gfc_add_modify (block, count, gfc_index_zero_node);
2801 gfc_start_block (&body);
2802 gfc_init_se (&lse, NULL);
2803 gfc_init_se (&rse, NULL);
2804 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2805 lse.want_pointer = 1;
2806 gfc_conv_expr (&lse, expr1);
2807 gfc_add_block_to_block (&body, &lse.pre);
2808 gfc_add_modify (&body, lse.expr, rse.expr);
2809 gfc_add_block_to_block (&body, &lse.post);
2810 /* Increment count. */
2811 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2812 count, gfc_index_one_node);
2813 gfc_add_modify (&body, count, tmp);
2814 tmp = gfc_finish_block (&body);
2816 /* Generate body and loops according to the information in
2817 nested_forall_info. */
2818 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2819 gfc_add_expr_to_block (block, tmp);
2821 else
2823 gfc_init_loopinfo (&loop);
2825 /* Associate the SS with the loop. */
2826 gfc_add_ss_to_loop (&loop, rss);
2828 /* Setup the scalarizing loops and bounds. */
2829 gfc_conv_ss_startstride (&loop);
2831 gfc_conv_loop_setup (&loop, &expr2->where);
2833 info = &rss->data.info;
2834 desc = info->descriptor;
2836 /* Make a new descriptor. */
2837 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2838 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
2839 loop.from, loop.to, 1,
2840 GFC_ARRAY_UNKNOWN, true);
2842 /* Allocate temporary for nested forall construct. */
2843 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2844 inner_size, NULL, block, &ptemp1);
2845 gfc_start_block (&body);
2846 gfc_init_se (&lse, NULL);
2847 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2848 lse.direct_byref = 1;
2849 rss = gfc_walk_expr (expr2);
2850 gfc_conv_expr_descriptor (&lse, expr2, rss);
2852 gfc_add_block_to_block (&body, &lse.pre);
2853 gfc_add_block_to_block (&body, &lse.post);
2855 /* Increment count. */
2856 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2857 count, gfc_index_one_node);
2858 gfc_add_modify (&body, count, tmp);
2860 tmp = gfc_finish_block (&body);
2862 /* Generate body and loops according to the information in
2863 nested_forall_info. */
2864 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2865 gfc_add_expr_to_block (block, tmp);
2867 /* Reset count. */
2868 gfc_add_modify (block, count, gfc_index_zero_node);
2870 parm = gfc_build_array_ref (tmp1, count, NULL);
2871 lss = gfc_walk_expr (expr1);
2872 gfc_init_se (&lse, NULL);
2873 gfc_conv_expr_descriptor (&lse, expr1, lss);
2874 gfc_add_modify (&lse.pre, lse.expr, parm);
2875 gfc_start_block (&body);
2876 gfc_add_block_to_block (&body, &lse.pre);
2877 gfc_add_block_to_block (&body, &lse.post);
2879 /* Increment count. */
2880 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2881 count, gfc_index_one_node);
2882 gfc_add_modify (&body, count, tmp);
2884 tmp = gfc_finish_block (&body);
2886 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2887 gfc_add_expr_to_block (block, tmp);
2889 /* Free the temporary. */
2890 if (ptemp1)
2892 tmp = gfc_call_free (ptemp1);
2893 gfc_add_expr_to_block (block, tmp);
2898 /* FORALL and WHERE statements are really nasty, especially when you nest
2899 them. All the rhs of a forall assignment must be evaluated before the
2900 actual assignments are performed. Presumably this also applies to all the
2901 assignments in an inner where statement. */
2903 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2904 linear array, relying on the fact that we process in the same order in all
2905 loops.
2907 forall (i=start:end:stride; maskexpr)
2908 e<i> = f<i>
2909 g<i> = h<i>
2910 end forall
2911 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2912 Translates to:
2913 count = ((end + 1 - start) / stride)
2914 masktmp(:) = maskexpr(:)
2916 maskindex = 0;
2917 for (i = start; i <= end; i += stride)
2919 if (masktmp[maskindex++])
2920 e<i> = f<i>
2922 maskindex = 0;
2923 for (i = start; i <= end; i += stride)
2925 if (masktmp[maskindex++])
2926 g<i> = h<i>
2929 Note that this code only works when there are no dependencies.
2930 Forall loop with array assignments and data dependencies are a real pain,
2931 because the size of the temporary cannot always be determined before the
2932 loop is executed. This problem is compounded by the presence of nested
2933 FORALL constructs.
2936 static tree
2937 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2939 stmtblock_t pre;
2940 stmtblock_t post;
2941 stmtblock_t block;
2942 stmtblock_t body;
2943 tree *var;
2944 tree *start;
2945 tree *end;
2946 tree *step;
2947 gfc_expr **varexpr;
2948 tree tmp;
2949 tree assign;
2950 tree size;
2951 tree maskindex;
2952 tree mask;
2953 tree pmask;
2954 int n;
2955 int nvar;
2956 int need_temp;
2957 gfc_forall_iterator *fa;
2958 gfc_se se;
2959 gfc_code *c;
2960 gfc_saved_var *saved_vars;
2961 iter_info *this_forall;
2962 forall_info *info;
2963 bool need_mask;
2965 /* Do nothing if the mask is false. */
2966 if (code->expr1
2967 && code->expr1->expr_type == EXPR_CONSTANT
2968 && !code->expr1->value.logical)
2969 return build_empty_stmt (input_location);
2971 n = 0;
2972 /* Count the FORALL index number. */
2973 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2974 n++;
2975 nvar = n;
2977 /* Allocate the space for var, start, end, step, varexpr. */
2978 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2979 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2980 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2981 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2982 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2983 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2985 /* Allocate the space for info. */
2986 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2988 gfc_start_block (&pre);
2989 gfc_init_block (&post);
2990 gfc_init_block (&block);
2992 n = 0;
2993 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2995 gfc_symbol *sym = fa->var->symtree->n.sym;
2997 /* Allocate space for this_forall. */
2998 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3000 /* Create a temporary variable for the FORALL index. */
3001 tmp = gfc_typenode_for_spec (&sym->ts);
3002 var[n] = gfc_create_var (tmp, sym->name);
3003 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3005 /* Record it in this_forall. */
3006 this_forall->var = var[n];
3008 /* Replace the index symbol's backend_decl with the temporary decl. */
3009 sym->backend_decl = var[n];
3011 /* Work out the start, end and stride for the loop. */
3012 gfc_init_se (&se, NULL);
3013 gfc_conv_expr_val (&se, fa->start);
3014 /* Record it in this_forall. */
3015 this_forall->start = se.expr;
3016 gfc_add_block_to_block (&block, &se.pre);
3017 start[n] = se.expr;
3019 gfc_init_se (&se, NULL);
3020 gfc_conv_expr_val (&se, fa->end);
3021 /* Record it in this_forall. */
3022 this_forall->end = se.expr;
3023 gfc_make_safe_expr (&se);
3024 gfc_add_block_to_block (&block, &se.pre);
3025 end[n] = se.expr;
3027 gfc_init_se (&se, NULL);
3028 gfc_conv_expr_val (&se, fa->stride);
3029 /* Record it in this_forall. */
3030 this_forall->step = se.expr;
3031 gfc_make_safe_expr (&se);
3032 gfc_add_block_to_block (&block, &se.pre);
3033 step[n] = se.expr;
3035 /* Set the NEXT field of this_forall to NULL. */
3036 this_forall->next = NULL;
3037 /* Link this_forall to the info construct. */
3038 if (info->this_loop)
3040 iter_info *iter_tmp = info->this_loop;
3041 while (iter_tmp->next != NULL)
3042 iter_tmp = iter_tmp->next;
3043 iter_tmp->next = this_forall;
3045 else
3046 info->this_loop = this_forall;
3048 n++;
3050 nvar = n;
3052 /* Calculate the size needed for the current forall level. */
3053 size = gfc_index_one_node;
3054 for (n = 0; n < nvar; n++)
3056 /* size = (end + step - start) / step. */
3057 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
3058 step[n], start[n]);
3059 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
3061 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
3062 tmp = convert (gfc_array_index_type, tmp);
3064 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
3067 /* Record the nvar and size of current forall level. */
3068 info->nvar = nvar;
3069 info->size = size;
3071 if (code->expr1)
3073 /* If the mask is .true., consider the FORALL unconditional. */
3074 if (code->expr1->expr_type == EXPR_CONSTANT
3075 && code->expr1->value.logical)
3076 need_mask = false;
3077 else
3078 need_mask = true;
3080 else
3081 need_mask = false;
3083 /* First we need to allocate the mask. */
3084 if (need_mask)
3086 /* As the mask array can be very big, prefer compact boolean types. */
3087 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3088 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3089 size, NULL, &block, &pmask);
3090 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3092 /* Record them in the info structure. */
3093 info->maskindex = maskindex;
3094 info->mask = mask;
3096 else
3098 /* No mask was specified. */
3099 maskindex = NULL_TREE;
3100 mask = pmask = NULL_TREE;
3103 /* Link the current forall level to nested_forall_info. */
3104 info->prev_nest = nested_forall_info;
3105 nested_forall_info = info;
3107 /* Copy the mask into a temporary variable if required.
3108 For now we assume a mask temporary is needed. */
3109 if (need_mask)
3111 /* As the mask array can be very big, prefer compact boolean types. */
3112 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3114 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3116 /* Start of mask assignment loop body. */
3117 gfc_start_block (&body);
3119 /* Evaluate the mask expression. */
3120 gfc_init_se (&se, NULL);
3121 gfc_conv_expr_val (&se, code->expr1);
3122 gfc_add_block_to_block (&body, &se.pre);
3124 /* Store the mask. */
3125 se.expr = convert (mask_type, se.expr);
3127 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3128 gfc_add_modify (&body, tmp, se.expr);
3130 /* Advance to the next mask element. */
3131 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3132 maskindex, gfc_index_one_node);
3133 gfc_add_modify (&body, maskindex, tmp);
3135 /* Generate the loops. */
3136 tmp = gfc_finish_block (&body);
3137 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3138 gfc_add_expr_to_block (&block, tmp);
3141 c = code->block->next;
3143 /* TODO: loop merging in FORALL statements. */
3144 /* Now that we've got a copy of the mask, generate the assignment loops. */
3145 while (c)
3147 switch (c->op)
3149 case EXEC_ASSIGN:
3150 /* A scalar or array assignment. DO the simple check for
3151 lhs to rhs dependencies. These make a temporary for the
3152 rhs and form a second forall block to copy to variable. */
3153 need_temp = check_forall_dependencies(c, &pre, &post);
3155 /* Temporaries due to array assignment data dependencies introduce
3156 no end of problems. */
3157 if (need_temp)
3158 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3159 nested_forall_info, &block);
3160 else
3162 /* Use the normal assignment copying routines. */
3163 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3165 /* Generate body and loops. */
3166 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3167 assign, 1);
3168 gfc_add_expr_to_block (&block, tmp);
3171 /* Cleanup any temporary symtrees that have been made to deal
3172 with dependencies. */
3173 if (new_symtree)
3174 cleanup_forall_symtrees (c);
3176 break;
3178 case EXEC_WHERE:
3179 /* Translate WHERE or WHERE construct nested in FORALL. */
3180 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3181 break;
3183 /* Pointer assignment inside FORALL. */
3184 case EXEC_POINTER_ASSIGN:
3185 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3186 if (need_temp)
3187 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3188 nested_forall_info, &block);
3189 else
3191 /* Use the normal assignment copying routines. */
3192 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3194 /* Generate body and loops. */
3195 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3196 assign, 1);
3197 gfc_add_expr_to_block (&block, tmp);
3199 break;
3201 case EXEC_FORALL:
3202 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3203 gfc_add_expr_to_block (&block, tmp);
3204 break;
3206 /* Explicit subroutine calls are prevented by the frontend but interface
3207 assignments can legitimately produce them. */
3208 case EXEC_ASSIGN_CALL:
3209 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3210 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3211 gfc_add_expr_to_block (&block, tmp);
3212 break;
3214 default:
3215 gcc_unreachable ();
3218 c = c->next;
3221 /* Restore the original index variables. */
3222 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3223 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3225 /* Free the space for var, start, end, step, varexpr. */
3226 gfc_free (var);
3227 gfc_free (start);
3228 gfc_free (end);
3229 gfc_free (step);
3230 gfc_free (varexpr);
3231 gfc_free (saved_vars);
3233 /* Free the space for this forall_info. */
3234 gfc_free (info);
3236 if (pmask)
3238 /* Free the temporary for the mask. */
3239 tmp = gfc_call_free (pmask);
3240 gfc_add_expr_to_block (&block, tmp);
3242 if (maskindex)
3243 pushdecl (maskindex);
3245 gfc_add_block_to_block (&pre, &block);
3246 gfc_add_block_to_block (&pre, &post);
3248 return gfc_finish_block (&pre);
3252 /* Translate the FORALL statement or construct. */
3254 tree gfc_trans_forall (gfc_code * code)
3256 return gfc_trans_forall_1 (code, NULL);
3260 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3261 If the WHERE construct is nested in FORALL, compute the overall temporary
3262 needed by the WHERE mask expression multiplied by the iterator number of
3263 the nested forall.
3264 ME is the WHERE mask expression.
3265 MASK is the current execution mask upon input, whose sense may or may
3266 not be inverted as specified by the INVERT argument.
3267 CMASK is the updated execution mask on output, or NULL if not required.
3268 PMASK is the pending execution mask on output, or NULL if not required.
3269 BLOCK is the block in which to place the condition evaluation loops. */
3271 static void
3272 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3273 tree mask, bool invert, tree cmask, tree pmask,
3274 tree mask_type, stmtblock_t * block)
3276 tree tmp, tmp1;
3277 gfc_ss *lss, *rss;
3278 gfc_loopinfo loop;
3279 stmtblock_t body, body1;
3280 tree count, cond, mtmp;
3281 gfc_se lse, rse;
3283 gfc_init_loopinfo (&loop);
3285 lss = gfc_walk_expr (me);
3286 rss = gfc_walk_expr (me);
3288 /* Variable to index the temporary. */
3289 count = gfc_create_var (gfc_array_index_type, "count");
3290 /* Initialize count. */
3291 gfc_add_modify (block, count, gfc_index_zero_node);
3293 gfc_start_block (&body);
3295 gfc_init_se (&rse, NULL);
3296 gfc_init_se (&lse, NULL);
3298 if (lss == gfc_ss_terminator)
3300 gfc_init_block (&body1);
3302 else
3304 /* Initialize the loop. */
3305 gfc_init_loopinfo (&loop);
3307 /* We may need LSS to determine the shape of the expression. */
3308 gfc_add_ss_to_loop (&loop, lss);
3309 gfc_add_ss_to_loop (&loop, rss);
3311 gfc_conv_ss_startstride (&loop);
3312 gfc_conv_loop_setup (&loop, &me->where);
3314 gfc_mark_ss_chain_used (rss, 1);
3315 /* Start the loop body. */
3316 gfc_start_scalarized_body (&loop, &body1);
3318 /* Translate the expression. */
3319 gfc_copy_loopinfo_to_se (&rse, &loop);
3320 rse.ss = rss;
3321 gfc_conv_expr (&rse, me);
3324 /* Variable to evaluate mask condition. */
3325 cond = gfc_create_var (mask_type, "cond");
3326 if (mask && (cmask || pmask))
3327 mtmp = gfc_create_var (mask_type, "mask");
3328 else mtmp = NULL_TREE;
3330 gfc_add_block_to_block (&body1, &lse.pre);
3331 gfc_add_block_to_block (&body1, &rse.pre);
3333 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3335 if (mask && (cmask || pmask))
3337 tmp = gfc_build_array_ref (mask, count, NULL);
3338 if (invert)
3339 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3340 gfc_add_modify (&body1, mtmp, tmp);
3343 if (cmask)
3345 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3346 tmp = cond;
3347 if (mask)
3348 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3349 gfc_add_modify (&body1, tmp1, tmp);
3352 if (pmask)
3354 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3355 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3356 if (mask)
3357 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3358 gfc_add_modify (&body1, tmp1, tmp);
3361 gfc_add_block_to_block (&body1, &lse.post);
3362 gfc_add_block_to_block (&body1, &rse.post);
3364 if (lss == gfc_ss_terminator)
3366 gfc_add_block_to_block (&body, &body1);
3368 else
3370 /* Increment count. */
3371 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3372 gfc_index_one_node);
3373 gfc_add_modify (&body1, count, tmp1);
3375 /* Generate the copying loops. */
3376 gfc_trans_scalarizing_loops (&loop, &body1);
3378 gfc_add_block_to_block (&body, &loop.pre);
3379 gfc_add_block_to_block (&body, &loop.post);
3381 gfc_cleanup_loop (&loop);
3382 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3383 as tree nodes in SS may not be valid in different scope. */
3386 tmp1 = gfc_finish_block (&body);
3387 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3388 if (nested_forall_info != NULL)
3389 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3391 gfc_add_expr_to_block (block, tmp1);
3395 /* Translate an assignment statement in a WHERE statement or construct
3396 statement. The MASK expression is used to control which elements
3397 of EXPR1 shall be assigned. The sense of MASK is specified by
3398 INVERT. */
3400 static tree
3401 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3402 tree mask, bool invert,
3403 tree count1, tree count2,
3404 gfc_code *cnext)
3406 gfc_se lse;
3407 gfc_se rse;
3408 gfc_ss *lss;
3409 gfc_ss *lss_section;
3410 gfc_ss *rss;
3412 gfc_loopinfo loop;
3413 tree tmp;
3414 stmtblock_t block;
3415 stmtblock_t body;
3416 tree index, maskexpr;
3418 /* A defined assignment. */
3419 if (cnext && cnext->resolved_sym)
3420 return gfc_trans_call (cnext, true, mask, count1, invert);
3422 #if 0
3423 /* TODO: handle this special case.
3424 Special case a single function returning an array. */
3425 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3427 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3428 if (tmp)
3429 return tmp;
3431 #endif
3433 /* Assignment of the form lhs = rhs. */
3434 gfc_start_block (&block);
3436 gfc_init_se (&lse, NULL);
3437 gfc_init_se (&rse, NULL);
3439 /* Walk the lhs. */
3440 lss = gfc_walk_expr (expr1);
3441 rss = NULL;
3443 /* In each where-assign-stmt, the mask-expr and the variable being
3444 defined shall be arrays of the same shape. */
3445 gcc_assert (lss != gfc_ss_terminator);
3447 /* The assignment needs scalarization. */
3448 lss_section = lss;
3450 /* Find a non-scalar SS from the lhs. */
3451 while (lss_section != gfc_ss_terminator
3452 && lss_section->type != GFC_SS_SECTION)
3453 lss_section = lss_section->next;
3455 gcc_assert (lss_section != gfc_ss_terminator);
3457 /* Initialize the scalarizer. */
3458 gfc_init_loopinfo (&loop);
3460 /* Walk the rhs. */
3461 rss = gfc_walk_expr (expr2);
3462 if (rss == gfc_ss_terminator)
3464 /* The rhs is scalar. Add a ss for the expression. */
3465 rss = gfc_get_ss ();
3466 rss->where = 1;
3467 rss->next = gfc_ss_terminator;
3468 rss->type = GFC_SS_SCALAR;
3469 rss->expr = expr2;
3472 /* Associate the SS with the loop. */
3473 gfc_add_ss_to_loop (&loop, lss);
3474 gfc_add_ss_to_loop (&loop, rss);
3476 /* Calculate the bounds of the scalarization. */
3477 gfc_conv_ss_startstride (&loop);
3479 /* Resolve any data dependencies in the statement. */
3480 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3482 /* Setup the scalarizing loops. */
3483 gfc_conv_loop_setup (&loop, &expr2->where);
3485 /* Setup the gfc_se structures. */
3486 gfc_copy_loopinfo_to_se (&lse, &loop);
3487 gfc_copy_loopinfo_to_se (&rse, &loop);
3489 rse.ss = rss;
3490 gfc_mark_ss_chain_used (rss, 1);
3491 if (loop.temp_ss == NULL)
3493 lse.ss = lss;
3494 gfc_mark_ss_chain_used (lss, 1);
3496 else
3498 lse.ss = loop.temp_ss;
3499 gfc_mark_ss_chain_used (lss, 3);
3500 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3503 /* Start the scalarized loop body. */
3504 gfc_start_scalarized_body (&loop, &body);
3506 /* Translate the expression. */
3507 gfc_conv_expr (&rse, expr2);
3508 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3510 gfc_conv_tmp_array_ref (&lse);
3511 gfc_advance_se_ss_chain (&lse);
3513 else
3514 gfc_conv_expr (&lse, expr1);
3516 /* Form the mask expression according to the mask. */
3517 index = count1;
3518 maskexpr = gfc_build_array_ref (mask, index, NULL);
3519 if (invert)
3520 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3522 /* Use the scalar assignment as is. */
3523 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3524 loop.temp_ss != NULL, false, true);
3526 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3528 gfc_add_expr_to_block (&body, tmp);
3530 if (lss == gfc_ss_terminator)
3532 /* Increment count1. */
3533 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3534 count1, gfc_index_one_node);
3535 gfc_add_modify (&body, count1, tmp);
3537 /* Use the scalar assignment as is. */
3538 gfc_add_block_to_block (&block, &body);
3540 else
3542 gcc_assert (lse.ss == gfc_ss_terminator
3543 && rse.ss == gfc_ss_terminator);
3545 if (loop.temp_ss != NULL)
3547 /* Increment count1 before finish the main body of a scalarized
3548 expression. */
3549 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3550 count1, gfc_index_one_node);
3551 gfc_add_modify (&body, count1, tmp);
3552 gfc_trans_scalarized_loop_boundary (&loop, &body);
3554 /* We need to copy the temporary to the actual lhs. */
3555 gfc_init_se (&lse, NULL);
3556 gfc_init_se (&rse, NULL);
3557 gfc_copy_loopinfo_to_se (&lse, &loop);
3558 gfc_copy_loopinfo_to_se (&rse, &loop);
3560 rse.ss = loop.temp_ss;
3561 lse.ss = lss;
3563 gfc_conv_tmp_array_ref (&rse);
3564 gfc_advance_se_ss_chain (&rse);
3565 gfc_conv_expr (&lse, expr1);
3567 gcc_assert (lse.ss == gfc_ss_terminator
3568 && rse.ss == gfc_ss_terminator);
3570 /* Form the mask expression according to the mask tree list. */
3571 index = count2;
3572 maskexpr = gfc_build_array_ref (mask, index, NULL);
3573 if (invert)
3574 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3575 maskexpr);
3577 /* Use the scalar assignment as is. */
3578 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3579 true);
3580 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3581 build_empty_stmt (input_location));
3582 gfc_add_expr_to_block (&body, tmp);
3584 /* Increment count2. */
3585 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3586 count2, gfc_index_one_node);
3587 gfc_add_modify (&body, count2, tmp);
3589 else
3591 /* Increment count1. */
3592 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3593 count1, gfc_index_one_node);
3594 gfc_add_modify (&body, count1, tmp);
3597 /* Generate the copying loops. */
3598 gfc_trans_scalarizing_loops (&loop, &body);
3600 /* Wrap the whole thing up. */
3601 gfc_add_block_to_block (&block, &loop.pre);
3602 gfc_add_block_to_block (&block, &loop.post);
3603 gfc_cleanup_loop (&loop);
3606 return gfc_finish_block (&block);
3610 /* Translate the WHERE construct or statement.
3611 This function can be called iteratively to translate the nested WHERE
3612 construct or statement.
3613 MASK is the control mask. */
3615 static void
3616 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3617 forall_info * nested_forall_info, stmtblock_t * block)
3619 stmtblock_t inner_size_body;
3620 tree inner_size, size;
3621 gfc_ss *lss, *rss;
3622 tree mask_type;
3623 gfc_expr *expr1;
3624 gfc_expr *expr2;
3625 gfc_code *cblock;
3626 gfc_code *cnext;
3627 tree tmp;
3628 tree cond;
3629 tree count1, count2;
3630 bool need_cmask;
3631 bool need_pmask;
3632 int need_temp;
3633 tree pcmask = NULL_TREE;
3634 tree ppmask = NULL_TREE;
3635 tree cmask = NULL_TREE;
3636 tree pmask = NULL_TREE;
3637 gfc_actual_arglist *arg;
3639 /* the WHERE statement or the WHERE construct statement. */
3640 cblock = code->block;
3642 /* As the mask array can be very big, prefer compact boolean types. */
3643 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3645 /* Determine which temporary masks are needed. */
3646 if (!cblock->block)
3648 /* One clause: No ELSEWHEREs. */
3649 need_cmask = (cblock->next != 0);
3650 need_pmask = false;
3652 else if (cblock->block->block)
3654 /* Three or more clauses: Conditional ELSEWHEREs. */
3655 need_cmask = true;
3656 need_pmask = true;
3658 else if (cblock->next)
3660 /* Two clauses, the first non-empty. */
3661 need_cmask = true;
3662 need_pmask = (mask != NULL_TREE
3663 && cblock->block->next != 0);
3665 else if (!cblock->block->next)
3667 /* Two clauses, both empty. */
3668 need_cmask = false;
3669 need_pmask = false;
3671 /* Two clauses, the first empty, the second non-empty. */
3672 else if (mask)
3674 need_cmask = (cblock->block->expr1 != 0);
3675 need_pmask = true;
3677 else
3679 need_cmask = true;
3680 need_pmask = false;
3683 if (need_cmask || need_pmask)
3685 /* Calculate the size of temporary needed by the mask-expr. */
3686 gfc_init_block (&inner_size_body);
3687 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3688 &inner_size_body, &lss, &rss);
3690 /* Calculate the total size of temporary needed. */
3691 size = compute_overall_iter_number (nested_forall_info, inner_size,
3692 &inner_size_body, block);
3694 /* Check whether the size is negative. */
3695 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3696 gfc_index_zero_node);
3697 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3698 gfc_index_zero_node, size);
3699 size = gfc_evaluate_now (size, block);
3701 /* Allocate temporary for WHERE mask if needed. */
3702 if (need_cmask)
3703 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3704 &pcmask);
3706 /* Allocate temporary for !mask if needed. */
3707 if (need_pmask)
3708 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3709 &ppmask);
3712 while (cblock)
3714 /* Each time around this loop, the where clause is conditional
3715 on the value of mask and invert, which are updated at the
3716 bottom of the loop. */
3718 /* Has mask-expr. */
3719 if (cblock->expr1)
3721 /* Ensure that the WHERE mask will be evaluated exactly once.
3722 If there are no statements in this WHERE/ELSEWHERE clause,
3723 then we don't need to update the control mask (cmask).
3724 If this is the last clause of the WHERE construct, then
3725 we don't need to update the pending control mask (pmask). */
3726 if (mask)
3727 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3728 mask, invert,
3729 cblock->next ? cmask : NULL_TREE,
3730 cblock->block ? pmask : NULL_TREE,
3731 mask_type, block);
3732 else
3733 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3734 NULL_TREE, false,
3735 (cblock->next || cblock->block)
3736 ? cmask : NULL_TREE,
3737 NULL_TREE, mask_type, block);
3739 invert = false;
3741 /* It's a final elsewhere-stmt. No mask-expr is present. */
3742 else
3743 cmask = mask;
3745 /* The body of this where clause are controlled by cmask with
3746 sense specified by invert. */
3748 /* Get the assignment statement of a WHERE statement, or the first
3749 statement in where-body-construct of a WHERE construct. */
3750 cnext = cblock->next;
3751 while (cnext)
3753 switch (cnext->op)
3755 /* WHERE assignment statement. */
3756 case EXEC_ASSIGN_CALL:
3758 arg = cnext->ext.actual;
3759 expr1 = expr2 = NULL;
3760 for (; arg; arg = arg->next)
3762 if (!arg->expr)
3763 continue;
3764 if (expr1 == NULL)
3765 expr1 = arg->expr;
3766 else
3767 expr2 = arg->expr;
3769 goto evaluate;
3771 case EXEC_ASSIGN:
3772 expr1 = cnext->expr1;
3773 expr2 = cnext->expr2;
3774 evaluate:
3775 if (nested_forall_info != NULL)
3777 need_temp = gfc_check_dependency (expr1, expr2, 0);
3778 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3779 gfc_trans_assign_need_temp (expr1, expr2,
3780 cmask, invert,
3781 nested_forall_info, block);
3782 else
3784 /* Variables to control maskexpr. */
3785 count1 = gfc_create_var (gfc_array_index_type, "count1");
3786 count2 = gfc_create_var (gfc_array_index_type, "count2");
3787 gfc_add_modify (block, count1, gfc_index_zero_node);
3788 gfc_add_modify (block, count2, gfc_index_zero_node);
3790 tmp = gfc_trans_where_assign (expr1, expr2,
3791 cmask, invert,
3792 count1, count2,
3793 cnext);
3795 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3796 tmp, 1);
3797 gfc_add_expr_to_block (block, tmp);
3800 else
3802 /* Variables to control maskexpr. */
3803 count1 = gfc_create_var (gfc_array_index_type, "count1");
3804 count2 = gfc_create_var (gfc_array_index_type, "count2");
3805 gfc_add_modify (block, count1, gfc_index_zero_node);
3806 gfc_add_modify (block, count2, gfc_index_zero_node);
3808 tmp = gfc_trans_where_assign (expr1, expr2,
3809 cmask, invert,
3810 count1, count2,
3811 cnext);
3812 gfc_add_expr_to_block (block, tmp);
3815 break;
3817 /* WHERE or WHERE construct is part of a where-body-construct. */
3818 case EXEC_WHERE:
3819 gfc_trans_where_2 (cnext, cmask, invert,
3820 nested_forall_info, block);
3821 break;
3823 default:
3824 gcc_unreachable ();
3827 /* The next statement within the same where-body-construct. */
3828 cnext = cnext->next;
3830 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3831 cblock = cblock->block;
3832 if (mask == NULL_TREE)
3834 /* If we're the initial WHERE, we can simply invert the sense
3835 of the current mask to obtain the "mask" for the remaining
3836 ELSEWHEREs. */
3837 invert = true;
3838 mask = cmask;
3840 else
3842 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3843 invert = false;
3844 mask = pmask;
3848 /* If we allocated a pending mask array, deallocate it now. */
3849 if (ppmask)
3851 tmp = gfc_call_free (ppmask);
3852 gfc_add_expr_to_block (block, tmp);
3855 /* If we allocated a current mask array, deallocate it now. */
3856 if (pcmask)
3858 tmp = gfc_call_free (pcmask);
3859 gfc_add_expr_to_block (block, tmp);
3863 /* Translate a simple WHERE construct or statement without dependencies.
3864 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3865 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3866 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3868 static tree
3869 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3871 stmtblock_t block, body;
3872 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3873 tree tmp, cexpr, tstmt, estmt;
3874 gfc_ss *css, *tdss, *tsss;
3875 gfc_se cse, tdse, tsse, edse, esse;
3876 gfc_loopinfo loop;
3877 gfc_ss *edss = 0;
3878 gfc_ss *esss = 0;
3880 /* Allow the scalarizer to workshare simple where loops. */
3881 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3882 ompws_flags |= OMPWS_SCALARIZER_WS;
3884 cond = cblock->expr1;
3885 tdst = cblock->next->expr1;
3886 tsrc = cblock->next->expr2;
3887 edst = eblock ? eblock->next->expr1 : NULL;
3888 esrc = eblock ? eblock->next->expr2 : NULL;
3890 gfc_start_block (&block);
3891 gfc_init_loopinfo (&loop);
3893 /* Handle the condition. */
3894 gfc_init_se (&cse, NULL);
3895 css = gfc_walk_expr (cond);
3896 gfc_add_ss_to_loop (&loop, css);
3898 /* Handle the then-clause. */
3899 gfc_init_se (&tdse, NULL);
3900 gfc_init_se (&tsse, NULL);
3901 tdss = gfc_walk_expr (tdst);
3902 tsss = gfc_walk_expr (tsrc);
3903 if (tsss == gfc_ss_terminator)
3905 tsss = gfc_get_ss ();
3906 tsss->where = 1;
3907 tsss->next = gfc_ss_terminator;
3908 tsss->type = GFC_SS_SCALAR;
3909 tsss->expr = tsrc;
3911 gfc_add_ss_to_loop (&loop, tdss);
3912 gfc_add_ss_to_loop (&loop, tsss);
3914 if (eblock)
3916 /* Handle the else clause. */
3917 gfc_init_se (&edse, NULL);
3918 gfc_init_se (&esse, NULL);
3919 edss = gfc_walk_expr (edst);
3920 esss = gfc_walk_expr (esrc);
3921 if (esss == gfc_ss_terminator)
3923 esss = gfc_get_ss ();
3924 esss->where = 1;
3925 esss->next = gfc_ss_terminator;
3926 esss->type = GFC_SS_SCALAR;
3927 esss->expr = esrc;
3929 gfc_add_ss_to_loop (&loop, edss);
3930 gfc_add_ss_to_loop (&loop, esss);
3933 gfc_conv_ss_startstride (&loop);
3934 gfc_conv_loop_setup (&loop, &tdst->where);
3936 gfc_mark_ss_chain_used (css, 1);
3937 gfc_mark_ss_chain_used (tdss, 1);
3938 gfc_mark_ss_chain_used (tsss, 1);
3939 if (eblock)
3941 gfc_mark_ss_chain_used (edss, 1);
3942 gfc_mark_ss_chain_used (esss, 1);
3945 gfc_start_scalarized_body (&loop, &body);
3947 gfc_copy_loopinfo_to_se (&cse, &loop);
3948 gfc_copy_loopinfo_to_se (&tdse, &loop);
3949 gfc_copy_loopinfo_to_se (&tsse, &loop);
3950 cse.ss = css;
3951 tdse.ss = tdss;
3952 tsse.ss = tsss;
3953 if (eblock)
3955 gfc_copy_loopinfo_to_se (&edse, &loop);
3956 gfc_copy_loopinfo_to_se (&esse, &loop);
3957 edse.ss = edss;
3958 esse.ss = esss;
3961 gfc_conv_expr (&cse, cond);
3962 gfc_add_block_to_block (&body, &cse.pre);
3963 cexpr = cse.expr;
3965 gfc_conv_expr (&tsse, tsrc);
3966 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3968 gfc_conv_tmp_array_ref (&tdse);
3969 gfc_advance_se_ss_chain (&tdse);
3971 else
3972 gfc_conv_expr (&tdse, tdst);
3974 if (eblock)
3976 gfc_conv_expr (&esse, esrc);
3977 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3979 gfc_conv_tmp_array_ref (&edse);
3980 gfc_advance_se_ss_chain (&edse);
3982 else
3983 gfc_conv_expr (&edse, edst);
3986 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
3987 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
3988 false, true)
3989 : build_empty_stmt (input_location);
3990 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3991 gfc_add_expr_to_block (&body, tmp);
3992 gfc_add_block_to_block (&body, &cse.post);
3994 gfc_trans_scalarizing_loops (&loop, &body);
3995 gfc_add_block_to_block (&block, &loop.pre);
3996 gfc_add_block_to_block (&block, &loop.post);
3997 gfc_cleanup_loop (&loop);
3999 return gfc_finish_block (&block);
4002 /* As the WHERE or WHERE construct statement can be nested, we call
4003 gfc_trans_where_2 to do the translation, and pass the initial
4004 NULL values for both the control mask and the pending control mask. */
4006 tree
4007 gfc_trans_where (gfc_code * code)
4009 stmtblock_t block;
4010 gfc_code *cblock;
4011 gfc_code *eblock;
4013 cblock = code->block;
4014 if (cblock->next
4015 && cblock->next->op == EXEC_ASSIGN
4016 && !cblock->next->next)
4018 eblock = cblock->block;
4019 if (!eblock)
4021 /* A simple "WHERE (cond) x = y" statement or block is
4022 dependence free if cond is not dependent upon writing x,
4023 and the source y is unaffected by the destination x. */
4024 if (!gfc_check_dependency (cblock->next->expr1,
4025 cblock->expr1, 0)
4026 && !gfc_check_dependency (cblock->next->expr1,
4027 cblock->next->expr2, 0))
4028 return gfc_trans_where_3 (cblock, NULL);
4030 else if (!eblock->expr1
4031 && !eblock->block
4032 && eblock->next
4033 && eblock->next->op == EXEC_ASSIGN
4034 && !eblock->next->next)
4036 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4037 block is dependence free if cond is not dependent on writes
4038 to x1 and x2, y1 is not dependent on writes to x2, and y2
4039 is not dependent on writes to x1, and both y's are not
4040 dependent upon their own x's. In addition to this, the
4041 final two dependency checks below exclude all but the same
4042 array reference if the where and elswhere destinations
4043 are the same. In short, this is VERY conservative and this
4044 is needed because the two loops, required by the standard
4045 are coalesced in gfc_trans_where_3. */
4046 if (!gfc_check_dependency(cblock->next->expr1,
4047 cblock->expr1, 0)
4048 && !gfc_check_dependency(eblock->next->expr1,
4049 cblock->expr1, 0)
4050 && !gfc_check_dependency(cblock->next->expr1,
4051 eblock->next->expr2, 1)
4052 && !gfc_check_dependency(eblock->next->expr1,
4053 cblock->next->expr2, 1)
4054 && !gfc_check_dependency(cblock->next->expr1,
4055 cblock->next->expr2, 1)
4056 && !gfc_check_dependency(eblock->next->expr1,
4057 eblock->next->expr2, 1)
4058 && !gfc_check_dependency(cblock->next->expr1,
4059 eblock->next->expr1, 0)
4060 && !gfc_check_dependency(eblock->next->expr1,
4061 cblock->next->expr1, 0))
4062 return gfc_trans_where_3 (cblock, eblock);
4066 gfc_start_block (&block);
4068 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4070 return gfc_finish_block (&block);
4074 /* CYCLE a DO loop. The label decl has already been created by
4075 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4076 node at the head of the loop. We must mark the label as used. */
4078 tree
4079 gfc_trans_cycle (gfc_code * code)
4081 tree cycle_label;
4083 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
4084 TREE_USED (cycle_label) = 1;
4085 return build1_v (GOTO_EXPR, cycle_label);
4089 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4090 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4091 loop. */
4093 tree
4094 gfc_trans_exit (gfc_code * code)
4096 tree exit_label;
4098 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4099 TREE_USED (exit_label) = 1;
4100 return build1_v (GOTO_EXPR, exit_label);
4104 /* Translate the ALLOCATE statement. */
4106 tree
4107 gfc_trans_allocate (gfc_code * code)
4109 gfc_alloc *al;
4110 gfc_expr *expr;
4111 gfc_se se;
4112 tree tmp;
4113 tree parm;
4114 tree stat;
4115 tree pstat;
4116 tree error_label;
4117 tree memsz;
4118 stmtblock_t block;
4120 if (!code->ext.alloc.list)
4121 return NULL_TREE;
4123 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4125 gfc_start_block (&block);
4127 /* Either STAT= and/or ERRMSG is present. */
4128 if (code->expr1 || code->expr2)
4130 tree gfc_int4_type_node = gfc_get_int_type (4);
4132 stat = gfc_create_var (gfc_int4_type_node, "stat");
4133 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4135 error_label = gfc_build_label_decl (NULL_TREE);
4136 TREE_USED (error_label) = 1;
4139 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4141 expr = gfc_copy_expr (al->expr);
4143 if (expr->ts.type == BT_CLASS)
4144 gfc_add_component_ref (expr, "$data");
4146 gfc_init_se (&se, NULL);
4147 gfc_start_block (&se.pre);
4149 se.want_pointer = 1;
4150 se.descriptor_only = 1;
4151 gfc_conv_expr (&se, expr);
4153 if (!gfc_array_allocate (&se, expr, pstat))
4155 /* A scalar or derived type. */
4157 /* Determine allocate size. */
4158 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4160 gfc_expr *sz;
4161 gfc_se se_sz;
4162 sz = gfc_copy_expr (code->expr3);
4163 gfc_add_component_ref (sz, "$vptr");
4164 gfc_add_component_ref (sz, "$size");
4165 gfc_init_se (&se_sz, NULL);
4166 gfc_conv_expr (&se_sz, sz);
4167 gfc_free_expr (sz);
4168 memsz = se_sz.expr;
4170 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4171 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4172 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4173 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4174 else
4175 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4177 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4178 memsz = se.string_length;
4180 /* Allocate - for non-pointers with re-alloc checking. */
4182 gfc_ref *ref;
4183 bool allocatable;
4185 ref = expr->ref;
4187 /* Find the last reference in the chain. */
4188 while (ref && ref->next != NULL)
4190 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4191 ref = ref->next;
4194 if (!ref)
4195 allocatable = expr->symtree->n.sym->attr.allocatable;
4196 else
4197 allocatable = ref->u.c.component->attr.allocatable;
4199 if (allocatable)
4200 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4201 pstat, expr);
4202 else
4203 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4206 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4207 fold_convert (TREE_TYPE (se.expr), tmp));
4208 gfc_add_expr_to_block (&se.pre, tmp);
4210 if (code->expr1 || code->expr2)
4212 tmp = build1_v (GOTO_EXPR, error_label);
4213 parm = fold_build2 (NE_EXPR, boolean_type_node,
4214 stat, build_int_cst (TREE_TYPE (stat), 0));
4215 tmp = fold_build3 (COND_EXPR, void_type_node,
4216 parm, tmp, build_empty_stmt (input_location));
4217 gfc_add_expr_to_block (&se.pre, tmp);
4220 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4222 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4223 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4224 gfc_add_expr_to_block (&se.pre, tmp);
4229 tmp = gfc_finish_block (&se.pre);
4230 gfc_add_expr_to_block (&block, tmp);
4232 /* Initialization via SOURCE block. */
4233 if (code->expr3)
4235 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4236 if (al->expr->ts.type == BT_CLASS)
4238 gfc_se dst,src;
4239 if (rhs->ts.type == BT_CLASS)
4240 gfc_add_component_ref (rhs, "$data");
4241 gfc_init_se (&dst, NULL);
4242 gfc_init_se (&src, NULL);
4243 gfc_conv_expr (&dst, expr);
4244 gfc_conv_expr (&src, rhs);
4245 gfc_add_block_to_block (&block, &src.pre);
4246 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4248 else
4249 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4250 rhs, false, false);
4251 gfc_free_expr (rhs);
4252 gfc_add_expr_to_block (&block, tmp);
4255 /* Allocation of CLASS entities. */
4256 gfc_free_expr (expr);
4257 expr = al->expr;
4258 if (expr->ts.type == BT_CLASS)
4260 gfc_expr *lhs,*rhs;
4261 gfc_se lse;
4263 /* Initialize VPTR for CLASS objects. */
4264 lhs = gfc_expr_to_initialize (expr);
4265 gfc_add_component_ref (lhs, "$vptr");
4266 rhs = NULL;
4267 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4269 /* VPTR must be determined at run time. */
4270 rhs = gfc_copy_expr (code->expr3);
4271 gfc_add_component_ref (rhs, "$vptr");
4272 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4273 gfc_add_expr_to_block (&block, tmp);
4274 gfc_free_expr (rhs);
4276 else
4278 /* VPTR is fixed at compile time. */
4279 gfc_symbol *vtab;
4280 gfc_typespec *ts;
4281 if (code->expr3)
4282 ts = &code->expr3->ts;
4283 else if (expr->ts.type == BT_DERIVED)
4284 ts = &expr->ts;
4285 else if (code->ext.alloc.ts.type == BT_DERIVED)
4286 ts = &code->ext.alloc.ts;
4287 else if (expr->ts.type == BT_CLASS)
4288 ts = &CLASS_DATA (expr)->ts;
4289 else
4290 ts = &expr->ts;
4292 if (ts->type == BT_DERIVED)
4294 vtab = gfc_find_derived_vtab (ts->u.derived, true);
4295 gcc_assert (vtab);
4296 gfc_trans_assign_vtab_procs (&block, ts->u.derived, vtab);
4297 gfc_init_se (&lse, NULL);
4298 lse.want_pointer = 1;
4299 gfc_conv_expr (&lse, lhs);
4300 tmp = gfc_build_addr_expr (NULL_TREE,
4301 gfc_get_symbol_decl (vtab));
4302 gfc_add_modify (&block, lse.expr,
4303 fold_convert (TREE_TYPE (lse.expr), tmp));
4310 /* STAT block. */
4311 if (code->expr1)
4313 tmp = build1_v (LABEL_EXPR, error_label);
4314 gfc_add_expr_to_block (&block, tmp);
4316 gfc_init_se (&se, NULL);
4317 gfc_conv_expr_lhs (&se, code->expr1);
4318 tmp = convert (TREE_TYPE (se.expr), stat);
4319 gfc_add_modify (&block, se.expr, tmp);
4322 /* ERRMSG block. */
4323 if (code->expr2)
4325 /* A better error message may be possible, but not required. */
4326 const char *msg = "Attempt to allocate an allocated object";
4327 tree errmsg, slen, dlen;
4329 gfc_init_se (&se, NULL);
4330 gfc_conv_expr_lhs (&se, code->expr2);
4332 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4334 gfc_add_modify (&block, errmsg,
4335 gfc_build_addr_expr (pchar_type_node,
4336 gfc_build_localized_cstring_const (msg)));
4338 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4339 dlen = gfc_get_expr_charlen (code->expr2);
4340 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4342 dlen = build_call_expr_loc (input_location,
4343 built_in_decls[BUILT_IN_MEMCPY], 3,
4344 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4346 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4347 build_int_cst (TREE_TYPE (stat), 0));
4349 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4351 gfc_add_expr_to_block (&block, tmp);
4354 return gfc_finish_block (&block);
4358 /* Translate a DEALLOCATE statement. */
4360 tree
4361 gfc_trans_deallocate (gfc_code *code)
4363 gfc_se se;
4364 gfc_alloc *al;
4365 gfc_expr *expr;
4366 tree apstat, astat, pstat, stat, tmp;
4367 stmtblock_t block;
4369 pstat = apstat = stat = astat = tmp = NULL_TREE;
4371 gfc_start_block (&block);
4373 /* Count the number of failed deallocations. If deallocate() was
4374 called with STAT= , then set STAT to the count. If deallocate
4375 was called with ERRMSG, then set ERRMG to a string. */
4376 if (code->expr1 || code->expr2)
4378 tree gfc_int4_type_node = gfc_get_int_type (4);
4380 stat = gfc_create_var (gfc_int4_type_node, "stat");
4381 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4383 /* Running total of possible deallocation failures. */
4384 astat = gfc_create_var (gfc_int4_type_node, "astat");
4385 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4387 /* Initialize astat to 0. */
4388 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4391 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4393 expr = al->expr;
4394 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4396 gfc_init_se (&se, NULL);
4397 gfc_start_block (&se.pre);
4399 se.want_pointer = 1;
4400 se.descriptor_only = 1;
4401 gfc_conv_expr (&se, expr);
4403 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4405 gfc_ref *ref;
4406 gfc_ref *last = NULL;
4407 for (ref = expr->ref; ref; ref = ref->next)
4408 if (ref->type == REF_COMPONENT)
4409 last = ref;
4411 /* Do not deallocate the components of a derived type
4412 ultimate pointer component. */
4413 if (!(last && last->u.c.component->attr.pointer)
4414 && !(!last && expr->symtree->n.sym->attr.pointer))
4416 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4417 expr->rank);
4418 gfc_add_expr_to_block (&se.pre, tmp);
4422 if (expr->rank)
4423 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4424 else
4426 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4427 gfc_add_expr_to_block (&se.pre, tmp);
4429 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4430 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4433 gfc_add_expr_to_block (&se.pre, tmp);
4435 /* Keep track of the number of failed deallocations by adding stat
4436 of the last deallocation to the running total. */
4437 if (code->expr1 || code->expr2)
4439 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4440 gfc_add_modify (&se.pre, astat, apstat);
4443 tmp = gfc_finish_block (&se.pre);
4444 gfc_add_expr_to_block (&block, tmp);
4448 /* Set STAT. */
4449 if (code->expr1)
4451 gfc_init_se (&se, NULL);
4452 gfc_conv_expr_lhs (&se, code->expr1);
4453 tmp = convert (TREE_TYPE (se.expr), astat);
4454 gfc_add_modify (&block, se.expr, tmp);
4457 /* Set ERRMSG. */
4458 if (code->expr2)
4460 /* A better error message may be possible, but not required. */
4461 const char *msg = "Attempt to deallocate an unallocated object";
4462 tree errmsg, slen, dlen;
4464 gfc_init_se (&se, NULL);
4465 gfc_conv_expr_lhs (&se, code->expr2);
4467 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4469 gfc_add_modify (&block, errmsg,
4470 gfc_build_addr_expr (pchar_type_node,
4471 gfc_build_localized_cstring_const (msg)));
4473 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4474 dlen = gfc_get_expr_charlen (code->expr2);
4475 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4477 dlen = build_call_expr_loc (input_location,
4478 built_in_decls[BUILT_IN_MEMCPY], 3,
4479 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4481 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4482 build_int_cst (TREE_TYPE (astat), 0));
4484 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4486 gfc_add_expr_to_block (&block, tmp);
4489 return gfc_finish_block (&block);