2010-01-21 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob010d86f2acb8c159856ef59527be0fa3653e0027
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
119 else
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 /* We're going to ignore a label list. It does not really change the
163 statement's semantics (because it is just a further restriction on
164 what's legal code); before, we were comparing label addresses here, but
165 that's a very fragile business and may break with optimization. So
166 just ignore it. */
168 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169 gfc_add_expr_to_block (&se.pre, target);
170 return gfc_finish_block (&se.pre);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 tree
176 gfc_trans_entry (gfc_code * code)
178 return build1_v (LABEL_EXPR, code->ext.entry->label);
182 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183 elemental subroutines. Make temporaries for output arguments if any such
184 dependencies are found. Output arguments are chosen because internal_unpack
185 can be used, as is, to copy the result back to the variable. */
186 static void
187 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188 gfc_symbol * sym, gfc_actual_arglist * arg,
189 gfc_dep_check check_variable)
191 gfc_actual_arglist *arg0;
192 gfc_expr *e;
193 gfc_formal_arglist *formal;
194 gfc_loopinfo tmp_loop;
195 gfc_se parmse;
196 gfc_ss *ss;
197 gfc_ss_info *info;
198 gfc_symbol *fsym;
199 gfc_ref *ref;
200 int n;
201 tree data;
202 tree offset;
203 tree size;
204 tree tmp;
206 if (loopse->ss == NULL)
207 return;
209 ss = loopse->ss;
210 arg0 = arg;
211 formal = sym->formal;
213 /* Loop over all the arguments testing for dependencies. */
214 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
216 e = arg->expr;
217 if (e == NULL)
218 continue;
220 /* Obtain the info structure for the current argument. */
221 info = NULL;
222 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
224 if (ss->expr != e)
225 continue;
226 info = &ss->data.info;
227 break;
230 /* If there is a dependency, create a temporary and use it
231 instead of the variable. */
232 fsym = formal ? formal->sym : NULL;
233 if (e->expr_type == EXPR_VARIABLE
234 && e->rank && fsym
235 && fsym->attr.intent != INTENT_IN
236 && gfc_check_fncall_dependency (e, fsym->attr.intent,
237 sym, arg0, check_variable))
239 tree initial, temptype;
240 stmtblock_t temp_post;
242 /* Make a local loopinfo for the temporary creation, so that
243 none of the other ss->info's have to be renormalized. */
244 gfc_init_loopinfo (&tmp_loop);
245 for (n = 0; n < info->dimen; n++)
247 tmp_loop.to[n] = loopse->loop->to[n];
248 tmp_loop.from[n] = loopse->loop->from[n];
249 tmp_loop.order[n] = loopse->loop->order[n];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse, NULL);
254 parmse.want_pointer = 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref = e->ref; ref; ref = ref->next)
263 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
264 break;
266 if (ref)
268 bool seen_range = false;
269 for (n = 0; n < ref->u.ar.dimen; n++)
271 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
272 seen_range = true;
274 if (!seen_range
275 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
276 continue;
278 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym->attr.intent == INTENT_INOUT
289 || (fsym->ts.type ==BT_DERIVED
290 && fsym->attr.intent == INTENT_OUT))
291 initial = parmse.expr;
292 else
293 initial = NULL_TREE;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303 temptype = TREE_TYPE (temptype);
304 temptype = gfc_get_element_type (temptype);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size = gfc_create_var (gfc_array_index_type, NULL);
310 data = gfc_create_var (pvoid_type_node, NULL);
311 gfc_init_block (&temp_post);
312 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313 &tmp_loop, info, temptype,
314 initial,
315 false, true, false,
316 &arg->expr->where);
317 gfc_add_modify (&se->pre, size, tmp);
318 tmp = fold_convert (pvoid_type_node, info->data);
319 gfc_add_modify (&se->pre, data, tmp);
321 /* Calculate the offset for the temporary. */
322 offset = gfc_index_zero_node;
323 for (n = 0; n < info->dimen; n++)
325 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
326 gfc_rank_cst[n]);
327 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
328 loopse->loop->from[n], tmp);
329 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
330 offset, tmp);
332 info->offset = gfc_create_var (gfc_array_index_type, NULL);
333 gfc_add_modify (&se->pre, info->offset, offset);
335 /* Copy the result back using unpack. */
336 tmp = build_call_expr_loc (input_location,
337 gfor_fndecl_in_unpack, 2, parmse.expr, data);
338 gfc_add_expr_to_block (&se->post, tmp);
340 /* parmse.pre is already added above. */
341 gfc_add_block_to_block (&se->post, &parmse.post);
342 gfc_add_block_to_block (&se->post, &temp_post);
348 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
350 tree
351 gfc_trans_call (gfc_code * code, bool dependency_check,
352 tree mask, tree count1, bool invert)
354 gfc_se se;
355 gfc_ss * ss;
356 int has_alternate_specifier;
357 gfc_dep_check check_variable;
358 tree index = NULL_TREE;
359 tree maskexpr = NULL_TREE;
360 tree tmp;
362 /* A CALL starts a new block because the actual arguments may have to
363 be evaluated first. */
364 gfc_init_se (&se, NULL);
365 gfc_start_block (&se.pre);
367 gcc_assert (code->resolved_sym);
369 ss = gfc_ss_terminator;
370 if (code->resolved_sym->attr.elemental)
371 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
373 /* Is not an elemental subroutine call with array valued arguments. */
374 if (ss == gfc_ss_terminator)
377 /* Translate the call. */
378 has_alternate_specifier
379 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
380 code->expr1, NULL_TREE);
382 /* A subroutine without side-effect, by definition, does nothing! */
383 TREE_SIDE_EFFECTS (se.expr) = 1;
385 /* Chain the pieces together and return the block. */
386 if (has_alternate_specifier)
388 gfc_code *select_code;
389 gfc_symbol *sym;
390 select_code = code->next;
391 gcc_assert(select_code->op == EXEC_SELECT);
392 sym = select_code->expr1->symtree->n.sym;
393 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
394 if (sym->backend_decl == NULL)
395 sym->backend_decl = gfc_get_symbol_decl (sym);
396 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
398 else
399 gfc_add_expr_to_block (&se.pre, se.expr);
401 gfc_add_block_to_block (&se.pre, &se.post);
404 else
406 /* An elemental subroutine call with array valued arguments has
407 to be scalarized. */
408 gfc_loopinfo loop;
409 stmtblock_t body;
410 stmtblock_t block;
411 gfc_se loopse;
412 gfc_se depse;
414 /* gfc_walk_elemental_function_args renders the ss chain in the
415 reverse order to the actual argument order. */
416 ss = gfc_reverse_ss (ss);
418 /* Initialize the loop. */
419 gfc_init_se (&loopse, NULL);
420 gfc_init_loopinfo (&loop);
421 gfc_add_ss_to_loop (&loop, ss);
423 gfc_conv_ss_startstride (&loop);
424 /* TODO: gfc_conv_loop_setup generates a temporary for vector
425 subscripts. This could be prevented in the elemental case
426 as temporaries are handled separatedly
427 (below in gfc_conv_elemental_dependencies). */
428 gfc_conv_loop_setup (&loop, &code->expr1->where);
429 gfc_mark_ss_chain_used (ss, 1);
431 /* Convert the arguments, checking for dependencies. */
432 gfc_copy_loopinfo_to_se (&loopse, &loop);
433 loopse.ss = ss;
435 /* For operator assignment, do dependency checking. */
436 if (dependency_check)
437 check_variable = ELEM_CHECK_VARIABLE;
438 else
439 check_variable = ELEM_DONT_CHECK_VARIABLE;
441 gfc_init_se (&depse, NULL);
442 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
443 code->ext.actual, check_variable);
445 gfc_add_block_to_block (&loop.pre, &depse.pre);
446 gfc_add_block_to_block (&loop.post, &depse.post);
448 /* Generate the loop body. */
449 gfc_start_scalarized_body (&loop, &body);
450 gfc_init_block (&block);
452 if (mask && count1)
454 /* Form the mask expression according to the mask. */
455 index = count1;
456 maskexpr = gfc_build_array_ref (mask, index, NULL);
457 if (invert)
458 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
459 maskexpr);
462 /* Add the subroutine call to the block. */
463 gfc_conv_procedure_call (&loopse, code->resolved_sym,
464 code->ext.actual, code->expr1,
465 NULL_TREE);
467 if (mask && count1)
469 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470 build_empty_stmt (input_location));
471 gfc_add_expr_to_block (&loopse.pre, tmp);
472 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
473 count1, gfc_index_one_node);
474 gfc_add_modify (&loopse.pre, count1, tmp);
476 else
477 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
479 gfc_add_block_to_block (&block, &loopse.pre);
480 gfc_add_block_to_block (&block, &loopse.post);
482 /* Finish up the loop block and the loop. */
483 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
484 gfc_trans_scalarizing_loops (&loop, &body);
485 gfc_add_block_to_block (&se.pre, &loop.pre);
486 gfc_add_block_to_block (&se.pre, &loop.post);
487 gfc_add_block_to_block (&se.pre, &se.post);
488 gfc_cleanup_loop (&loop);
491 return gfc_finish_block (&se.pre);
495 /* Translate the RETURN statement. */
497 tree
498 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
500 if (code->expr1)
502 gfc_se se;
503 tree tmp;
504 tree result;
506 /* If code->expr is not NULL, this return statement must appear
507 in a subroutine and current_fake_result_decl has already
508 been generated. */
510 result = gfc_get_fake_result_decl (NULL, 0);
511 if (!result)
513 gfc_warning ("An alternate return at %L without a * dummy argument",
514 &code->expr1->where);
515 return build1_v (GOTO_EXPR, gfc_get_return_label ());
518 /* Start a new block for this statement. */
519 gfc_init_se (&se, NULL);
520 gfc_start_block (&se.pre);
522 gfc_conv_expr (&se, code->expr1);
524 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
525 fold_convert (TREE_TYPE (result), se.expr));
526 gfc_add_expr_to_block (&se.pre, tmp);
528 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
529 gfc_add_expr_to_block (&se.pre, tmp);
530 gfc_add_block_to_block (&se.pre, &se.post);
531 return gfc_finish_block (&se.pre);
533 else
534 return build1_v (GOTO_EXPR, gfc_get_return_label ());
538 /* Translate the PAUSE statement. We have to translate this statement
539 to a runtime library call. */
541 tree
542 gfc_trans_pause (gfc_code * code)
544 tree gfc_int4_type_node = gfc_get_int_type (4);
545 gfc_se se;
546 tree tmp;
548 /* Start a new block for this statement. */
549 gfc_init_se (&se, NULL);
550 gfc_start_block (&se.pre);
553 if (code->expr1 == NULL)
555 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
556 tmp = build_call_expr_loc (input_location,
557 gfor_fndecl_pause_numeric, 1, tmp);
559 else
561 gfc_conv_expr_reference (&se, code->expr1);
562 tmp = build_call_expr_loc (input_location,
563 gfor_fndecl_pause_string, 2,
564 se.expr, se.string_length);
567 gfc_add_expr_to_block (&se.pre, tmp);
569 gfc_add_block_to_block (&se.pre, &se.post);
571 return gfc_finish_block (&se.pre);
575 /* Translate the STOP statement. We have to translate this statement
576 to a runtime library call. */
578 tree
579 gfc_trans_stop (gfc_code * code)
581 tree gfc_int4_type_node = gfc_get_int_type (4);
582 gfc_se se;
583 tree tmp;
585 /* Start a new block for this statement. */
586 gfc_init_se (&se, NULL);
587 gfc_start_block (&se.pre);
590 if (code->expr1 == NULL)
592 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
593 tmp = build_call_expr_loc (input_location,
594 gfor_fndecl_stop_numeric, 1, tmp);
596 else
598 gfc_conv_expr_reference (&se, code->expr1);
599 tmp = build_call_expr_loc (input_location,
600 gfor_fndecl_stop_string, 2,
601 se.expr, se.string_length);
604 gfc_add_expr_to_block (&se.pre, tmp);
606 gfc_add_block_to_block (&se.pre, &se.post);
608 return gfc_finish_block (&se.pre);
612 /* Generate GENERIC for the IF construct. This function also deals with
613 the simple IF statement, because the front end translates the IF
614 statement into an IF construct.
616 We translate:
618 IF (cond) THEN
619 then_clause
620 ELSEIF (cond2)
621 elseif_clause
622 ELSE
623 else_clause
624 ENDIF
626 into:
628 pre_cond_s;
629 if (cond_s)
631 then_clause;
633 else
635 pre_cond_s
636 if (cond_s)
638 elseif_clause
640 else
642 else_clause;
646 where COND_S is the simplified version of the predicate. PRE_COND_S
647 are the pre side-effects produced by the translation of the
648 conditional.
649 We need to build the chain recursively otherwise we run into
650 problems with folding incomplete statements. */
652 static tree
653 gfc_trans_if_1 (gfc_code * code)
655 gfc_se if_se;
656 tree stmt, elsestmt;
658 /* Check for an unconditional ELSE clause. */
659 if (!code->expr1)
660 return gfc_trans_code (code->next);
662 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
663 gfc_init_se (&if_se, NULL);
664 gfc_start_block (&if_se.pre);
666 /* Calculate the IF condition expression. */
667 gfc_conv_expr_val (&if_se, code->expr1);
669 /* Translate the THEN clause. */
670 stmt = gfc_trans_code (code->next);
672 /* Translate the ELSE clause. */
673 if (code->block)
674 elsestmt = gfc_trans_if_1 (code->block);
675 else
676 elsestmt = build_empty_stmt (input_location);
678 /* Build the condition expression and add it to the condition block. */
679 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
681 gfc_add_expr_to_block (&if_se.pre, stmt);
683 /* Finish off this statement. */
684 return gfc_finish_block (&if_se.pre);
687 tree
688 gfc_trans_if (gfc_code * code)
690 /* Ignore the top EXEC_IF, it only announces an IF construct. The
691 actual code we must translate is in code->block. */
693 return gfc_trans_if_1 (code->block);
697 /* Translate an arithmetic IF expression.
699 IF (cond) label1, label2, label3 translates to
701 if (cond <= 0)
703 if (cond < 0)
704 goto label1;
705 else // cond == 0
706 goto label2;
708 else // cond > 0
709 goto label3;
711 An optimized version can be generated in case of equal labels.
712 E.g., if label1 is equal to label2, we can translate it to
714 if (cond <= 0)
715 goto label1;
716 else
717 goto label3;
720 tree
721 gfc_trans_arithmetic_if (gfc_code * code)
723 gfc_se se;
724 tree tmp;
725 tree branch1;
726 tree branch2;
727 tree zero;
729 /* Start a new block. */
730 gfc_init_se (&se, NULL);
731 gfc_start_block (&se.pre);
733 /* Pre-evaluate COND. */
734 gfc_conv_expr_val (&se, code->expr1);
735 se.expr = gfc_evaluate_now (se.expr, &se.pre);
737 /* Build something to compare with. */
738 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
740 if (code->label1->value != code->label2->value)
742 /* If (cond < 0) take branch1 else take branch2.
743 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
744 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
745 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
747 if (code->label1->value != code->label3->value)
748 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
749 else
750 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
752 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
754 else
755 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
757 if (code->label1->value != code->label3->value
758 && code->label2->value != code->label3->value)
760 /* if (cond <= 0) take branch1 else take branch2. */
761 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
762 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
763 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
766 /* Append the COND_EXPR to the evaluation of COND, and return. */
767 gfc_add_expr_to_block (&se.pre, branch1);
768 return gfc_finish_block (&se.pre);
772 /* Translate a BLOCK construct. This is basically what we would do for a
773 procedure body. */
775 tree
776 gfc_trans_block_construct (gfc_code* code)
778 gfc_namespace* ns;
779 gfc_symbol* sym;
780 stmtblock_t body;
781 tree tmp;
783 ns = code->ext.ns;
784 gcc_assert (ns);
785 sym = ns->proc_name;
786 gcc_assert (sym);
788 gcc_assert (!sym->tlink);
789 sym->tlink = sym;
791 gfc_start_block (&body);
792 gfc_process_block_locals (ns);
794 tmp = gfc_trans_code (ns->code);
795 tmp = gfc_trans_deferred_vars (sym, tmp);
797 gfc_add_expr_to_block (&body, tmp);
798 return gfc_finish_block (&body);
802 /* Translate the simple DO construct. This is where the loop variable has
803 integer type and step +-1. We can't use this in the general case
804 because integer overflow and floating point errors could give incorrect
805 results.
806 We translate a do loop from:
808 DO dovar = from, to, step
809 body
810 END DO
814 [Evaluate loop bounds and step]
815 dovar = from;
816 if ((step > 0) ? (dovar <= to) : (dovar => to))
818 for (;;)
820 body;
821 cycle_label:
822 cond = (dovar == to);
823 dovar += step;
824 if (cond) goto end_label;
827 end_label:
829 This helps the optimizers by avoiding the extra induction variable
830 used in the general case. */
832 static tree
833 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
834 tree from, tree to, tree step)
836 stmtblock_t body;
837 tree type;
838 tree cond;
839 tree tmp;
840 tree saved_dovar = NULL;
841 tree cycle_label;
842 tree exit_label;
844 type = TREE_TYPE (dovar);
846 /* Initialize the DO variable: dovar = from. */
847 gfc_add_modify (pblock, dovar, from);
849 /* Save value for do-tinkering checking. */
850 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
852 saved_dovar = gfc_create_var (type, ".saved_dovar");
853 gfc_add_modify (pblock, saved_dovar, dovar);
856 /* Cycle and exit statements are implemented with gotos. */
857 cycle_label = gfc_build_label_decl (NULL_TREE);
858 exit_label = gfc_build_label_decl (NULL_TREE);
860 /* Put the labels where they can be found later. See gfc_trans_do(). */
861 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
863 /* Loop body. */
864 gfc_start_block (&body);
866 /* Main loop body. */
867 tmp = gfc_trans_code (code->block->next);
868 gfc_add_expr_to_block (&body, tmp);
870 /* Label for cycle statements (if needed). */
871 if (TREE_USED (cycle_label))
873 tmp = build1_v (LABEL_EXPR, cycle_label);
874 gfc_add_expr_to_block (&body, tmp);
877 /* Check whether someone has modified the loop variable. */
878 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
880 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
881 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
882 "Loop variable has been modified");
885 /* Evaluate the loop condition. */
886 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
887 cond = gfc_evaluate_now (cond, &body);
889 /* Increment the loop variable. */
890 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
891 gfc_add_modify (&body, dovar, tmp);
893 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
894 gfc_add_modify (&body, saved_dovar, dovar);
896 /* The loop exit. */
897 tmp = build1_v (GOTO_EXPR, exit_label);
898 TREE_USED (exit_label) = 1;
899 tmp = fold_build3 (COND_EXPR, void_type_node,
900 cond, tmp, build_empty_stmt (input_location));
901 gfc_add_expr_to_block (&body, tmp);
903 /* Finish the loop body. */
904 tmp = gfc_finish_block (&body);
905 tmp = build1_v (LOOP_EXPR, tmp);
907 /* Only execute the loop if the number of iterations is positive. */
908 if (tree_int_cst_sgn (step) > 0)
909 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
910 else
911 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
912 tmp = fold_build3 (COND_EXPR, void_type_node,
913 cond, tmp, build_empty_stmt (input_location));
914 gfc_add_expr_to_block (pblock, tmp);
916 /* Add the exit label. */
917 tmp = build1_v (LABEL_EXPR, exit_label);
918 gfc_add_expr_to_block (pblock, tmp);
920 return gfc_finish_block (pblock);
923 /* Translate the DO construct. This obviously is one of the most
924 important ones to get right with any compiler, but especially
925 so for Fortran.
927 We special case some loop forms as described in gfc_trans_simple_do.
928 For other cases we implement them with a separate loop count,
929 as described in the standard.
931 We translate a do loop from:
933 DO dovar = from, to, step
934 body
935 END DO
939 [evaluate loop bounds and step]
940 empty = (step > 0 ? to < from : to > from);
941 countm1 = (to - from) / step;
942 dovar = from;
943 if (empty) goto exit_label;
944 for (;;)
946 body;
947 cycle_label:
948 dovar += step
949 if (countm1 ==0) goto exit_label;
950 countm1--;
952 exit_label:
954 countm1 is an unsigned integer. It is equal to the loop count minus one,
955 because the loop count itself can overflow. */
957 tree
958 gfc_trans_do (gfc_code * code)
960 gfc_se se;
961 tree dovar;
962 tree saved_dovar = NULL;
963 tree from;
964 tree to;
965 tree step;
966 tree countm1;
967 tree type;
968 tree utype;
969 tree cond;
970 tree cycle_label;
971 tree exit_label;
972 tree tmp;
973 tree pos_step;
974 stmtblock_t block;
975 stmtblock_t body;
977 gfc_start_block (&block);
979 /* Evaluate all the expressions in the iterator. */
980 gfc_init_se (&se, NULL);
981 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
982 gfc_add_block_to_block (&block, &se.pre);
983 dovar = se.expr;
984 type = TREE_TYPE (dovar);
986 gfc_init_se (&se, NULL);
987 gfc_conv_expr_val (&se, code->ext.iterator->start);
988 gfc_add_block_to_block (&block, &se.pre);
989 from = gfc_evaluate_now (se.expr, &block);
991 gfc_init_se (&se, NULL);
992 gfc_conv_expr_val (&se, code->ext.iterator->end);
993 gfc_add_block_to_block (&block, &se.pre);
994 to = gfc_evaluate_now (se.expr, &block);
996 gfc_init_se (&se, NULL);
997 gfc_conv_expr_val (&se, code->ext.iterator->step);
998 gfc_add_block_to_block (&block, &se.pre);
999 step = gfc_evaluate_now (se.expr, &block);
1001 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1003 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
1004 fold_convert (type, integer_zero_node));
1005 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1006 "DO step value is zero");
1009 /* Special case simple loops. */
1010 if (TREE_CODE (type) == INTEGER_TYPE
1011 && (integer_onep (step)
1012 || tree_int_cst_equal (step, integer_minus_one_node)))
1013 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
1015 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
1016 fold_convert (type, integer_zero_node));
1018 if (TREE_CODE (type) == INTEGER_TYPE)
1019 utype = unsigned_type_for (type);
1020 else
1021 utype = unsigned_type_for (gfc_array_index_type);
1022 countm1 = gfc_create_var (utype, "countm1");
1024 /* Cycle and exit statements are implemented with gotos. */
1025 cycle_label = gfc_build_label_decl (NULL_TREE);
1026 exit_label = gfc_build_label_decl (NULL_TREE);
1027 TREE_USED (exit_label) = 1;
1029 /* Initialize the DO variable: dovar = from. */
1030 gfc_add_modify (&block, dovar, from);
1032 /* Save value for do-tinkering checking. */
1033 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1035 saved_dovar = gfc_create_var (type, ".saved_dovar");
1036 gfc_add_modify (&block, saved_dovar, dovar);
1039 /* Initialize loop count and jump to exit label if the loop is empty.
1040 This code is executed before we enter the loop body. We generate:
1041 step_sign = sign(1,step);
1042 if (step > 0)
1044 if (to < from)
1045 goto exit_label;
1047 else
1049 if (to > from)
1050 goto exit_label;
1052 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1056 if (TREE_CODE (type) == INTEGER_TYPE)
1058 tree pos, neg, step_sign, to2, from2, step2;
1060 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1062 tmp = fold_build2 (LT_EXPR, boolean_type_node, step,
1063 build_int_cst (TREE_TYPE (step), 0));
1064 step_sign = fold_build3 (COND_EXPR, type, tmp,
1065 build_int_cst (type, -1),
1066 build_int_cst (type, 1));
1068 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1069 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1070 build1_v (GOTO_EXPR, exit_label),
1071 build_empty_stmt (input_location));
1073 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1074 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1075 build1_v (GOTO_EXPR, exit_label),
1076 build_empty_stmt (input_location));
1077 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1079 gfc_add_expr_to_block (&block, tmp);
1081 /* Calculate the loop count. to-from can overflow, so
1082 we cast to unsigned. */
1084 to2 = fold_build2 (MULT_EXPR, type, step_sign, to);
1085 from2 = fold_build2 (MULT_EXPR, type, step_sign, from);
1086 step2 = fold_build2 (MULT_EXPR, type, step_sign, step);
1087 step2 = fold_convert (utype, step2);
1088 tmp = fold_build2 (MINUS_EXPR, type, to2, from2);
1089 tmp = fold_convert (utype, tmp);
1090 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp, step2);
1091 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1092 gfc_add_expr_to_block (&block, tmp);
1094 else
1096 /* TODO: We could use the same width as the real type.
1097 This would probably cause more problems that it solves
1098 when we implement "long double" types. */
1100 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1101 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1102 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1103 gfc_add_modify (&block, countm1, tmp);
1105 /* We need a special check for empty loops:
1106 empty = (step > 0 ? to < from : to > from); */
1107 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1108 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1109 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1110 /* If the loop is empty, go directly to the exit label. */
1111 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1112 build1_v (GOTO_EXPR, exit_label),
1113 build_empty_stmt (input_location));
1114 gfc_add_expr_to_block (&block, tmp);
1117 /* Loop body. */
1118 gfc_start_block (&body);
1120 /* Put these labels where they can be found later. We put the
1121 labels in a TREE_LIST node (because TREE_CHAIN is already
1122 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1123 label in TREE_VALUE (backend_decl). */
1125 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1127 /* Main loop body. */
1128 tmp = gfc_trans_code (code->block->next);
1129 gfc_add_expr_to_block (&body, tmp);
1131 /* Label for cycle statements (if needed). */
1132 if (TREE_USED (cycle_label))
1134 tmp = build1_v (LABEL_EXPR, cycle_label);
1135 gfc_add_expr_to_block (&body, tmp);
1138 /* Check whether someone has modified the loop variable. */
1139 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1141 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1142 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1143 "Loop variable has been modified");
1146 /* Increment the loop variable. */
1147 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1148 gfc_add_modify (&body, dovar, tmp);
1150 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1151 gfc_add_modify (&body, saved_dovar, dovar);
1153 /* End with the loop condition. Loop until countm1 == 0. */
1154 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1155 build_int_cst (utype, 0));
1156 tmp = build1_v (GOTO_EXPR, exit_label);
1157 tmp = fold_build3 (COND_EXPR, void_type_node,
1158 cond, tmp, build_empty_stmt (input_location));
1159 gfc_add_expr_to_block (&body, tmp);
1161 /* Decrement the loop count. */
1162 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1163 gfc_add_modify (&body, countm1, tmp);
1165 /* End of loop body. */
1166 tmp = gfc_finish_block (&body);
1168 /* The for loop itself. */
1169 tmp = build1_v (LOOP_EXPR, tmp);
1170 gfc_add_expr_to_block (&block, tmp);
1172 /* Add the exit label. */
1173 tmp = build1_v (LABEL_EXPR, exit_label);
1174 gfc_add_expr_to_block (&block, tmp);
1176 return gfc_finish_block (&block);
1180 /* Translate the DO WHILE construct.
1182 We translate
1184 DO WHILE (cond)
1185 body
1186 END DO
1190 for ( ; ; )
1192 pre_cond;
1193 if (! cond) goto exit_label;
1194 body;
1195 cycle_label:
1197 exit_label:
1199 Because the evaluation of the exit condition `cond' may have side
1200 effects, we can't do much for empty loop bodies. The backend optimizers
1201 should be smart enough to eliminate any dead loops. */
1203 tree
1204 gfc_trans_do_while (gfc_code * code)
1206 gfc_se cond;
1207 tree tmp;
1208 tree cycle_label;
1209 tree exit_label;
1210 stmtblock_t block;
1212 /* Everything we build here is part of the loop body. */
1213 gfc_start_block (&block);
1215 /* Cycle and exit statements are implemented with gotos. */
1216 cycle_label = gfc_build_label_decl (NULL_TREE);
1217 exit_label = gfc_build_label_decl (NULL_TREE);
1219 /* Put the labels where they can be found later. See gfc_trans_do(). */
1220 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1222 /* Create a GIMPLE version of the exit condition. */
1223 gfc_init_se (&cond, NULL);
1224 gfc_conv_expr_val (&cond, code->expr1);
1225 gfc_add_block_to_block (&block, &cond.pre);
1226 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1228 /* Build "IF (! cond) GOTO exit_label". */
1229 tmp = build1_v (GOTO_EXPR, exit_label);
1230 TREE_USED (exit_label) = 1;
1231 tmp = fold_build3 (COND_EXPR, void_type_node,
1232 cond.expr, tmp, build_empty_stmt (input_location));
1233 gfc_add_expr_to_block (&block, tmp);
1235 /* The main body of the loop. */
1236 tmp = gfc_trans_code (code->block->next);
1237 gfc_add_expr_to_block (&block, tmp);
1239 /* Label for cycle statements (if needed). */
1240 if (TREE_USED (cycle_label))
1242 tmp = build1_v (LABEL_EXPR, cycle_label);
1243 gfc_add_expr_to_block (&block, tmp);
1246 /* End of loop body. */
1247 tmp = gfc_finish_block (&block);
1249 gfc_init_block (&block);
1250 /* Build the loop. */
1251 tmp = build1_v (LOOP_EXPR, tmp);
1252 gfc_add_expr_to_block (&block, tmp);
1254 /* Add the exit label. */
1255 tmp = build1_v (LABEL_EXPR, exit_label);
1256 gfc_add_expr_to_block (&block, tmp);
1258 return gfc_finish_block (&block);
1262 /* Translate the SELECT CASE construct for INTEGER case expressions,
1263 without killing all potential optimizations. The problem is that
1264 Fortran allows unbounded cases, but the back-end does not, so we
1265 need to intercept those before we enter the equivalent SWITCH_EXPR
1266 we can build.
1268 For example, we translate this,
1270 SELECT CASE (expr)
1271 CASE (:100,101,105:115)
1272 block_1
1273 CASE (190:199,200:)
1274 block_2
1275 CASE (300)
1276 block_3
1277 CASE DEFAULT
1278 block_4
1279 END SELECT
1281 to the GENERIC equivalent,
1283 switch (expr)
1285 case (minimum value for typeof(expr) ... 100:
1286 case 101:
1287 case 105 ... 114:
1288 block1:
1289 goto end_label;
1291 case 200 ... (maximum value for typeof(expr):
1292 case 190 ... 199:
1293 block2;
1294 goto end_label;
1296 case 300:
1297 block_3;
1298 goto end_label;
1300 default:
1301 block_4;
1302 goto end_label;
1305 end_label: */
1307 static tree
1308 gfc_trans_integer_select (gfc_code * code)
1310 gfc_code *c;
1311 gfc_case *cp;
1312 tree end_label;
1313 tree tmp;
1314 gfc_se se;
1315 stmtblock_t block;
1316 stmtblock_t body;
1318 gfc_start_block (&block);
1320 /* Calculate the switch expression. */
1321 gfc_init_se (&se, NULL);
1322 gfc_conv_expr_val (&se, code->expr1);
1323 gfc_add_block_to_block (&block, &se.pre);
1325 end_label = gfc_build_label_decl (NULL_TREE);
1327 gfc_init_block (&body);
1329 for (c = code->block; c; c = c->block)
1331 for (cp = c->ext.case_list; cp; cp = cp->next)
1333 tree low, high;
1334 tree label;
1336 /* Assume it's the default case. */
1337 low = high = NULL_TREE;
1339 if (cp->low)
1341 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1342 cp->low->ts.kind);
1344 /* If there's only a lower bound, set the high bound to the
1345 maximum value of the case expression. */
1346 if (!cp->high)
1347 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1350 if (cp->high)
1352 /* Three cases are possible here:
1354 1) There is no lower bound, e.g. CASE (:N).
1355 2) There is a lower bound .NE. high bound, that is
1356 a case range, e.g. CASE (N:M) where M>N (we make
1357 sure that M>N during type resolution).
1358 3) There is a lower bound, and it has the same value
1359 as the high bound, e.g. CASE (N:N). This is our
1360 internal representation of CASE(N).
1362 In the first and second case, we need to set a value for
1363 high. In the third case, we don't because the GCC middle
1364 end represents a single case value by just letting high be
1365 a NULL_TREE. We can't do that because we need to be able
1366 to represent unbounded cases. */
1368 if (!cp->low
1369 || (cp->low
1370 && mpz_cmp (cp->low->value.integer,
1371 cp->high->value.integer) != 0))
1372 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1373 cp->high->ts.kind);
1375 /* Unbounded case. */
1376 if (!cp->low)
1377 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1380 /* Build a label. */
1381 label = gfc_build_label_decl (NULL_TREE);
1383 /* Add this case label.
1384 Add parameter 'label', make it match GCC backend. */
1385 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1386 low, high, label);
1387 gfc_add_expr_to_block (&body, tmp);
1390 /* Add the statements for this case. */
1391 tmp = gfc_trans_code (c->next);
1392 gfc_add_expr_to_block (&body, tmp);
1394 /* Break to the end of the construct. */
1395 tmp = build1_v (GOTO_EXPR, end_label);
1396 gfc_add_expr_to_block (&body, tmp);
1399 tmp = gfc_finish_block (&body);
1400 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1401 gfc_add_expr_to_block (&block, tmp);
1403 tmp = build1_v (LABEL_EXPR, end_label);
1404 gfc_add_expr_to_block (&block, tmp);
1406 return gfc_finish_block (&block);
1410 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1412 There are only two cases possible here, even though the standard
1413 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1414 .FALSE., and DEFAULT.
1416 We never generate more than two blocks here. Instead, we always
1417 try to eliminate the DEFAULT case. This way, we can translate this
1418 kind of SELECT construct to a simple
1420 if {} else {};
1422 expression in GENERIC. */
1424 static tree
1425 gfc_trans_logical_select (gfc_code * code)
1427 gfc_code *c;
1428 gfc_code *t, *f, *d;
1429 gfc_case *cp;
1430 gfc_se se;
1431 stmtblock_t block;
1433 /* Assume we don't have any cases at all. */
1434 t = f = d = NULL;
1436 /* Now see which ones we actually do have. We can have at most two
1437 cases in a single case list: one for .TRUE. and one for .FALSE.
1438 The default case is always separate. If the cases for .TRUE. and
1439 .FALSE. are in the same case list, the block for that case list
1440 always executed, and we don't generate code a COND_EXPR. */
1441 for (c = code->block; c; c = c->block)
1443 for (cp = c->ext.case_list; cp; cp = cp->next)
1445 if (cp->low)
1447 if (cp->low->value.logical == 0) /* .FALSE. */
1448 f = c;
1449 else /* if (cp->value.logical != 0), thus .TRUE. */
1450 t = c;
1452 else
1453 d = c;
1457 /* Start a new block. */
1458 gfc_start_block (&block);
1460 /* Calculate the switch expression. We always need to do this
1461 because it may have side effects. */
1462 gfc_init_se (&se, NULL);
1463 gfc_conv_expr_val (&se, code->expr1);
1464 gfc_add_block_to_block (&block, &se.pre);
1466 if (t == f && t != NULL)
1468 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1469 translate the code for these cases, append it to the current
1470 block. */
1471 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1473 else
1475 tree true_tree, false_tree, stmt;
1477 true_tree = build_empty_stmt (input_location);
1478 false_tree = build_empty_stmt (input_location);
1480 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1481 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1482 make the missing case the default case. */
1483 if (t != NULL && f != NULL)
1484 d = NULL;
1485 else if (d != NULL)
1487 if (t == NULL)
1488 t = d;
1489 else
1490 f = d;
1493 /* Translate the code for each of these blocks, and append it to
1494 the current block. */
1495 if (t != NULL)
1496 true_tree = gfc_trans_code (t->next);
1498 if (f != NULL)
1499 false_tree = gfc_trans_code (f->next);
1501 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1502 true_tree, false_tree);
1503 gfc_add_expr_to_block (&block, stmt);
1506 return gfc_finish_block (&block);
1510 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1511 Instead of generating compares and jumps, it is far simpler to
1512 generate a data structure describing the cases in order and call a
1513 library subroutine that locates the right case.
1514 This is particularly true because this is the only case where we
1515 might have to dispose of a temporary.
1516 The library subroutine returns a pointer to jump to or NULL if no
1517 branches are to be taken. */
1519 static tree
1520 gfc_trans_character_select (gfc_code *code)
1522 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1523 stmtblock_t block, body;
1524 gfc_case *cp, *d;
1525 gfc_code *c;
1526 gfc_se se;
1527 int n, k;
1529 /* The jump table types are stored in static variables to avoid
1530 constructing them from scratch every single time. */
1531 static tree select_struct[2];
1532 static tree ss_string1[2], ss_string1_len[2];
1533 static tree ss_string2[2], ss_string2_len[2];
1534 static tree ss_target[2];
1536 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1538 if (code->expr1->ts.kind == 1)
1539 k = 0;
1540 else if (code->expr1->ts.kind == 4)
1541 k = 1;
1542 else
1543 gcc_unreachable ();
1545 if (select_struct[k] == NULL)
1547 select_struct[k] = make_node (RECORD_TYPE);
1549 if (code->expr1->ts.kind == 1)
1550 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1551 else if (code->expr1->ts.kind == 4)
1552 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1553 else
1554 gcc_unreachable ();
1556 #undef ADD_FIELD
1557 #define ADD_FIELD(NAME, TYPE) \
1558 ss_##NAME[k] = gfc_add_field_to_struct \
1559 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1560 get_identifier (stringize(NAME)), TYPE)
1562 ADD_FIELD (string1, pchartype);
1563 ADD_FIELD (string1_len, gfc_charlen_type_node);
1565 ADD_FIELD (string2, pchartype);
1566 ADD_FIELD (string2_len, gfc_charlen_type_node);
1568 ADD_FIELD (target, integer_type_node);
1569 #undef ADD_FIELD
1571 gfc_finish_type (select_struct[k]);
1574 cp = code->block->ext.case_list;
1575 while (cp->left != NULL)
1576 cp = cp->left;
1578 n = 0;
1579 for (d = cp; d; d = d->right)
1580 d->n = n++;
1582 end_label = gfc_build_label_decl (NULL_TREE);
1584 /* Generate the body */
1585 gfc_start_block (&block);
1586 gfc_init_block (&body);
1588 for (c = code->block; c; c = c->block)
1590 for (d = c->ext.case_list; d; d = d->next)
1592 label = gfc_build_label_decl (NULL_TREE);
1593 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1594 build_int_cst (NULL_TREE, d->n),
1595 build_int_cst (NULL_TREE, d->n), label);
1596 gfc_add_expr_to_block (&body, tmp);
1599 tmp = gfc_trans_code (c->next);
1600 gfc_add_expr_to_block (&body, tmp);
1602 tmp = build1_v (GOTO_EXPR, end_label);
1603 gfc_add_expr_to_block (&body, tmp);
1606 /* Generate the structure describing the branches */
1607 init = NULL_TREE;
1609 for(d = cp; d; d = d->right)
1611 node = NULL_TREE;
1613 gfc_init_se (&se, NULL);
1615 if (d->low == NULL)
1617 node = tree_cons (ss_string1[k], null_pointer_node, node);
1618 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1620 else
1622 gfc_conv_expr_reference (&se, d->low);
1624 node = tree_cons (ss_string1[k], se.expr, node);
1625 node = tree_cons (ss_string1_len[k], se.string_length, node);
1628 if (d->high == NULL)
1630 node = tree_cons (ss_string2[k], null_pointer_node, node);
1631 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1633 else
1635 gfc_init_se (&se, NULL);
1636 gfc_conv_expr_reference (&se, d->high);
1638 node = tree_cons (ss_string2[k], se.expr, node);
1639 node = tree_cons (ss_string2_len[k], se.string_length, node);
1642 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1643 node);
1645 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1646 init = tree_cons (NULL_TREE, tmp, init);
1649 type = build_array_type (select_struct[k],
1650 build_index_type (build_int_cst (NULL_TREE, n-1)));
1652 init = build_constructor_from_list (type, nreverse(init));
1653 TREE_CONSTANT (init) = 1;
1654 TREE_STATIC (init) = 1;
1655 /* Create a static variable to hold the jump table. */
1656 tmp = gfc_create_var (type, "jumptable");
1657 TREE_CONSTANT (tmp) = 1;
1658 TREE_STATIC (tmp) = 1;
1659 TREE_READONLY (tmp) = 1;
1660 DECL_INITIAL (tmp) = init;
1661 init = tmp;
1663 /* Build the library call */
1664 init = gfc_build_addr_expr (pvoid_type_node, init);
1666 gfc_init_se (&se, NULL);
1667 gfc_conv_expr_reference (&se, code->expr1);
1669 gfc_add_block_to_block (&block, &se.pre);
1671 if (code->expr1->ts.kind == 1)
1672 fndecl = gfor_fndecl_select_string;
1673 else if (code->expr1->ts.kind == 4)
1674 fndecl = gfor_fndecl_select_string_char4;
1675 else
1676 gcc_unreachable ();
1678 tmp = build_call_expr_loc (input_location,
1679 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1680 se.expr, se.string_length);
1681 case_num = gfc_create_var (integer_type_node, "case_num");
1682 gfc_add_modify (&block, case_num, tmp);
1684 gfc_add_block_to_block (&block, &se.post);
1686 tmp = gfc_finish_block (&body);
1687 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1688 gfc_add_expr_to_block (&block, tmp);
1690 tmp = build1_v (LABEL_EXPR, end_label);
1691 gfc_add_expr_to_block (&block, tmp);
1693 return gfc_finish_block (&block);
1697 /* Translate the three variants of the SELECT CASE construct.
1699 SELECT CASEs with INTEGER case expressions can be translated to an
1700 equivalent GENERIC switch statement, and for LOGICAL case
1701 expressions we build one or two if-else compares.
1703 SELECT CASEs with CHARACTER case expressions are a whole different
1704 story, because they don't exist in GENERIC. So we sort them and
1705 do a binary search at runtime.
1707 Fortran has no BREAK statement, and it does not allow jumps from
1708 one case block to another. That makes things a lot easier for
1709 the optimizers. */
1711 tree
1712 gfc_trans_select (gfc_code * code)
1714 gcc_assert (code && code->expr1);
1716 /* Empty SELECT constructs are legal. */
1717 if (code->block == NULL)
1718 return build_empty_stmt (input_location);
1720 /* Select the correct translation function. */
1721 switch (code->expr1->ts.type)
1723 case BT_LOGICAL: return gfc_trans_logical_select (code);
1724 case BT_INTEGER: return gfc_trans_integer_select (code);
1725 case BT_CHARACTER: return gfc_trans_character_select (code);
1726 default:
1727 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1728 /* Not reached */
1733 /* Traversal function to substitute a replacement symtree if the symbol
1734 in the expression is the same as that passed. f == 2 signals that
1735 that variable itself is not to be checked - only the references.
1736 This group of functions is used when the variable expression in a
1737 FORALL assignment has internal references. For example:
1738 FORALL (i = 1:4) p(p(i)) = i
1739 The only recourse here is to store a copy of 'p' for the index
1740 expression. */
1742 static gfc_symtree *new_symtree;
1743 static gfc_symtree *old_symtree;
1745 static bool
1746 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1748 if (expr->expr_type != EXPR_VARIABLE)
1749 return false;
1751 if (*f == 2)
1752 *f = 1;
1753 else if (expr->symtree->n.sym == sym)
1754 expr->symtree = new_symtree;
1756 return false;
1759 static void
1760 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1762 gfc_traverse_expr (e, sym, forall_replace, f);
1765 static bool
1766 forall_restore (gfc_expr *expr,
1767 gfc_symbol *sym ATTRIBUTE_UNUSED,
1768 int *f ATTRIBUTE_UNUSED)
1770 if (expr->expr_type != EXPR_VARIABLE)
1771 return false;
1773 if (expr->symtree == new_symtree)
1774 expr->symtree = old_symtree;
1776 return false;
1779 static void
1780 forall_restore_symtree (gfc_expr *e)
1782 gfc_traverse_expr (e, NULL, forall_restore, 0);
1785 static void
1786 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1788 gfc_se tse;
1789 gfc_se rse;
1790 gfc_expr *e;
1791 gfc_symbol *new_sym;
1792 gfc_symbol *old_sym;
1793 gfc_symtree *root;
1794 tree tmp;
1796 /* Build a copy of the lvalue. */
1797 old_symtree = c->expr1->symtree;
1798 old_sym = old_symtree->n.sym;
1799 e = gfc_lval_expr_from_sym (old_sym);
1800 if (old_sym->attr.dimension)
1802 gfc_init_se (&tse, NULL);
1803 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1804 gfc_add_block_to_block (pre, &tse.pre);
1805 gfc_add_block_to_block (post, &tse.post);
1806 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1808 if (e->ts.type != BT_CHARACTER)
1810 /* Use the variable offset for the temporary. */
1811 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1812 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1815 else
1817 gfc_init_se (&tse, NULL);
1818 gfc_init_se (&rse, NULL);
1819 gfc_conv_expr (&rse, e);
1820 if (e->ts.type == BT_CHARACTER)
1822 tse.string_length = rse.string_length;
1823 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1824 tse.string_length);
1825 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1826 rse.string_length);
1827 gfc_add_block_to_block (pre, &tse.pre);
1828 gfc_add_block_to_block (post, &tse.post);
1830 else
1832 tmp = gfc_typenode_for_spec (&e->ts);
1833 tse.expr = gfc_create_var (tmp, "temp");
1836 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1837 e->expr_type == EXPR_VARIABLE);
1838 gfc_add_expr_to_block (pre, tmp);
1840 gfc_free_expr (e);
1842 /* Create a new symbol to represent the lvalue. */
1843 new_sym = gfc_new_symbol (old_sym->name, NULL);
1844 new_sym->ts = old_sym->ts;
1845 new_sym->attr.referenced = 1;
1846 new_sym->attr.temporary = 1;
1847 new_sym->attr.dimension = old_sym->attr.dimension;
1848 new_sym->attr.flavor = old_sym->attr.flavor;
1850 /* Use the temporary as the backend_decl. */
1851 new_sym->backend_decl = tse.expr;
1853 /* Create a fake symtree for it. */
1854 root = NULL;
1855 new_symtree = gfc_new_symtree (&root, old_sym->name);
1856 new_symtree->n.sym = new_sym;
1857 gcc_assert (new_symtree == root);
1859 /* Go through the expression reference replacing the old_symtree
1860 with the new. */
1861 forall_replace_symtree (c->expr1, old_sym, 2);
1863 /* Now we have made this temporary, we might as well use it for
1864 the right hand side. */
1865 forall_replace_symtree (c->expr2, old_sym, 1);
1869 /* Handles dependencies in forall assignments. */
1870 static int
1871 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1873 gfc_ref *lref;
1874 gfc_ref *rref;
1875 int need_temp;
1876 gfc_symbol *lsym;
1878 lsym = c->expr1->symtree->n.sym;
1879 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1881 /* Now check for dependencies within the 'variable'
1882 expression itself. These are treated by making a complete
1883 copy of variable and changing all the references to it
1884 point to the copy instead. Note that the shallow copy of
1885 the variable will not suffice for derived types with
1886 pointer components. We therefore leave these to their
1887 own devices. */
1888 if (lsym->ts.type == BT_DERIVED
1889 && lsym->ts.u.derived->attr.pointer_comp)
1890 return need_temp;
1892 new_symtree = NULL;
1893 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1895 forall_make_variable_temp (c, pre, post);
1896 need_temp = 0;
1899 /* Substrings with dependencies are treated in the same
1900 way. */
1901 if (c->expr1->ts.type == BT_CHARACTER
1902 && c->expr1->ref
1903 && c->expr2->expr_type == EXPR_VARIABLE
1904 && lsym == c->expr2->symtree->n.sym)
1906 for (lref = c->expr1->ref; lref; lref = lref->next)
1907 if (lref->type == REF_SUBSTRING)
1908 break;
1909 for (rref = c->expr2->ref; rref; rref = rref->next)
1910 if (rref->type == REF_SUBSTRING)
1911 break;
1913 if (rref && lref
1914 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1916 forall_make_variable_temp (c, pre, post);
1917 need_temp = 0;
1920 return need_temp;
1924 static void
1925 cleanup_forall_symtrees (gfc_code *c)
1927 forall_restore_symtree (c->expr1);
1928 forall_restore_symtree (c->expr2);
1929 gfc_free (new_symtree->n.sym);
1930 gfc_free (new_symtree);
1934 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1935 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1936 indicates whether we should generate code to test the FORALLs mask
1937 array. OUTER is the loop header to be used for initializing mask
1938 indices.
1940 The generated loop format is:
1941 count = (end - start + step) / step
1942 loopvar = start
1943 while (1)
1945 if (count <=0 )
1946 goto end_of_loop
1947 <body>
1948 loopvar += step
1949 count --
1951 end_of_loop: */
1953 static tree
1954 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1955 int mask_flag, stmtblock_t *outer)
1957 int n, nvar;
1958 tree tmp;
1959 tree cond;
1960 stmtblock_t block;
1961 tree exit_label;
1962 tree count;
1963 tree var, start, end, step;
1964 iter_info *iter;
1966 /* Initialize the mask index outside the FORALL nest. */
1967 if (mask_flag && forall_tmp->mask)
1968 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1970 iter = forall_tmp->this_loop;
1971 nvar = forall_tmp->nvar;
1972 for (n = 0; n < nvar; n++)
1974 var = iter->var;
1975 start = iter->start;
1976 end = iter->end;
1977 step = iter->step;
1979 exit_label = gfc_build_label_decl (NULL_TREE);
1980 TREE_USED (exit_label) = 1;
1982 /* The loop counter. */
1983 count = gfc_create_var (TREE_TYPE (var), "count");
1985 /* The body of the loop. */
1986 gfc_init_block (&block);
1988 /* The exit condition. */
1989 cond = fold_build2 (LE_EXPR, boolean_type_node,
1990 count, build_int_cst (TREE_TYPE (count), 0));
1991 tmp = build1_v (GOTO_EXPR, exit_label);
1992 tmp = fold_build3 (COND_EXPR, void_type_node,
1993 cond, tmp, build_empty_stmt (input_location));
1994 gfc_add_expr_to_block (&block, tmp);
1996 /* The main loop body. */
1997 gfc_add_expr_to_block (&block, body);
1999 /* Increment the loop variable. */
2000 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
2001 gfc_add_modify (&block, var, tmp);
2003 /* Advance to the next mask element. Only do this for the
2004 innermost loop. */
2005 if (n == 0 && mask_flag && forall_tmp->mask)
2007 tree maskindex = forall_tmp->maskindex;
2008 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2009 maskindex, gfc_index_one_node);
2010 gfc_add_modify (&block, maskindex, tmp);
2013 /* Decrement the loop counter. */
2014 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
2015 build_int_cst (TREE_TYPE (var), 1));
2016 gfc_add_modify (&block, count, tmp);
2018 body = gfc_finish_block (&block);
2020 /* Loop var initialization. */
2021 gfc_init_block (&block);
2022 gfc_add_modify (&block, var, start);
2025 /* Initialize the loop counter. */
2026 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
2027 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
2028 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
2029 gfc_add_modify (&block, count, tmp);
2031 /* The loop expression. */
2032 tmp = build1_v (LOOP_EXPR, body);
2033 gfc_add_expr_to_block (&block, tmp);
2035 /* The exit label. */
2036 tmp = build1_v (LABEL_EXPR, exit_label);
2037 gfc_add_expr_to_block (&block, tmp);
2039 body = gfc_finish_block (&block);
2040 iter = iter->next;
2042 return body;
2046 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2047 is nonzero, the body is controlled by all masks in the forall nest.
2048 Otherwise, the innermost loop is not controlled by it's mask. This
2049 is used for initializing that mask. */
2051 static tree
2052 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2053 int mask_flag)
2055 tree tmp;
2056 stmtblock_t header;
2057 forall_info *forall_tmp;
2058 tree mask, maskindex;
2060 gfc_start_block (&header);
2062 forall_tmp = nested_forall_info;
2063 while (forall_tmp != NULL)
2065 /* Generate body with masks' control. */
2066 if (mask_flag)
2068 mask = forall_tmp->mask;
2069 maskindex = forall_tmp->maskindex;
2071 /* If a mask was specified make the assignment conditional. */
2072 if (mask)
2074 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2075 body = build3_v (COND_EXPR, tmp, body,
2076 build_empty_stmt (input_location));
2079 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2080 forall_tmp = forall_tmp->prev_nest;
2081 mask_flag = 1;
2084 gfc_add_expr_to_block (&header, body);
2085 return gfc_finish_block (&header);
2089 /* Allocate data for holding a temporary array. Returns either a local
2090 temporary array or a pointer variable. */
2092 static tree
2093 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2094 tree elem_type)
2096 tree tmpvar;
2097 tree type;
2098 tree tmp;
2100 if (INTEGER_CST_P (size))
2102 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2103 gfc_index_one_node);
2105 else
2106 tmp = NULL_TREE;
2108 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2109 type = build_array_type (elem_type, type);
2110 if (gfc_can_put_var_on_stack (bytesize))
2112 gcc_assert (INTEGER_CST_P (size));
2113 tmpvar = gfc_create_var (type, "temp");
2114 *pdata = NULL_TREE;
2116 else
2118 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2119 *pdata = convert (pvoid_type_node, tmpvar);
2121 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2122 gfc_add_modify (pblock, tmpvar, tmp);
2124 return tmpvar;
2128 /* Generate codes to copy the temporary to the actual lhs. */
2130 static tree
2131 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2132 tree count1, tree wheremask, bool invert)
2134 gfc_ss *lss;
2135 gfc_se lse, rse;
2136 stmtblock_t block, body;
2137 gfc_loopinfo loop1;
2138 tree tmp;
2139 tree wheremaskexpr;
2141 /* Walk the lhs. */
2142 lss = gfc_walk_expr (expr);
2144 if (lss == gfc_ss_terminator)
2146 gfc_start_block (&block);
2148 gfc_init_se (&lse, NULL);
2150 /* Translate the expression. */
2151 gfc_conv_expr (&lse, expr);
2153 /* Form the expression for the temporary. */
2154 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2156 /* Use the scalar assignment as is. */
2157 gfc_add_block_to_block (&block, &lse.pre);
2158 gfc_add_modify (&block, lse.expr, tmp);
2159 gfc_add_block_to_block (&block, &lse.post);
2161 /* Increment the count1. */
2162 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2163 gfc_index_one_node);
2164 gfc_add_modify (&block, count1, tmp);
2166 tmp = gfc_finish_block (&block);
2168 else
2170 gfc_start_block (&block);
2172 gfc_init_loopinfo (&loop1);
2173 gfc_init_se (&rse, NULL);
2174 gfc_init_se (&lse, NULL);
2176 /* Associate the lss with the loop. */
2177 gfc_add_ss_to_loop (&loop1, lss);
2179 /* Calculate the bounds of the scalarization. */
2180 gfc_conv_ss_startstride (&loop1);
2181 /* Setup the scalarizing loops. */
2182 gfc_conv_loop_setup (&loop1, &expr->where);
2184 gfc_mark_ss_chain_used (lss, 1);
2186 /* Start the scalarized loop body. */
2187 gfc_start_scalarized_body (&loop1, &body);
2189 /* Setup the gfc_se structures. */
2190 gfc_copy_loopinfo_to_se (&lse, &loop1);
2191 lse.ss = lss;
2193 /* Form the expression of the temporary. */
2194 if (lss != gfc_ss_terminator)
2195 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2196 /* Translate expr. */
2197 gfc_conv_expr (&lse, expr);
2199 /* Use the scalar assignment. */
2200 rse.string_length = lse.string_length;
2201 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2203 /* Form the mask expression according to the mask tree list. */
2204 if (wheremask)
2206 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2207 if (invert)
2208 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2209 TREE_TYPE (wheremaskexpr),
2210 wheremaskexpr);
2211 tmp = fold_build3 (COND_EXPR, void_type_node,
2212 wheremaskexpr, tmp,
2213 build_empty_stmt (input_location));
2216 gfc_add_expr_to_block (&body, tmp);
2218 /* Increment count1. */
2219 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2220 count1, gfc_index_one_node);
2221 gfc_add_modify (&body, count1, tmp);
2223 /* Increment count3. */
2224 if (count3)
2226 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2227 count3, gfc_index_one_node);
2228 gfc_add_modify (&body, count3, tmp);
2231 /* Generate the copying loops. */
2232 gfc_trans_scalarizing_loops (&loop1, &body);
2233 gfc_add_block_to_block (&block, &loop1.pre);
2234 gfc_add_block_to_block (&block, &loop1.post);
2235 gfc_cleanup_loop (&loop1);
2237 tmp = gfc_finish_block (&block);
2239 return tmp;
2243 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2244 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2245 and should not be freed. WHEREMASK is the conditional execution mask
2246 whose sense may be inverted by INVERT. */
2248 static tree
2249 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2250 tree count1, gfc_ss *lss, gfc_ss *rss,
2251 tree wheremask, bool invert)
2253 stmtblock_t block, body1;
2254 gfc_loopinfo loop;
2255 gfc_se lse;
2256 gfc_se rse;
2257 tree tmp;
2258 tree wheremaskexpr;
2260 gfc_start_block (&block);
2262 gfc_init_se (&rse, NULL);
2263 gfc_init_se (&lse, NULL);
2265 if (lss == gfc_ss_terminator)
2267 gfc_init_block (&body1);
2268 gfc_conv_expr (&rse, expr2);
2269 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2271 else
2273 /* Initialize the loop. */
2274 gfc_init_loopinfo (&loop);
2276 /* We may need LSS to determine the shape of the expression. */
2277 gfc_add_ss_to_loop (&loop, lss);
2278 gfc_add_ss_to_loop (&loop, rss);
2280 gfc_conv_ss_startstride (&loop);
2281 gfc_conv_loop_setup (&loop, &expr2->where);
2283 gfc_mark_ss_chain_used (rss, 1);
2284 /* Start the loop body. */
2285 gfc_start_scalarized_body (&loop, &body1);
2287 /* Translate the expression. */
2288 gfc_copy_loopinfo_to_se (&rse, &loop);
2289 rse.ss = rss;
2290 gfc_conv_expr (&rse, expr2);
2292 /* Form the expression of the temporary. */
2293 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2296 /* Use the scalar assignment. */
2297 lse.string_length = rse.string_length;
2298 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2299 expr2->expr_type == EXPR_VARIABLE);
2301 /* Form the mask expression according to the mask tree list. */
2302 if (wheremask)
2304 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2305 if (invert)
2306 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2307 TREE_TYPE (wheremaskexpr),
2308 wheremaskexpr);
2309 tmp = fold_build3 (COND_EXPR, void_type_node,
2310 wheremaskexpr, tmp, build_empty_stmt (input_location));
2313 gfc_add_expr_to_block (&body1, tmp);
2315 if (lss == gfc_ss_terminator)
2317 gfc_add_block_to_block (&block, &body1);
2319 /* Increment count1. */
2320 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2321 gfc_index_one_node);
2322 gfc_add_modify (&block, count1, tmp);
2324 else
2326 /* Increment count1. */
2327 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2328 count1, gfc_index_one_node);
2329 gfc_add_modify (&body1, count1, tmp);
2331 /* Increment count3. */
2332 if (count3)
2334 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2335 count3, gfc_index_one_node);
2336 gfc_add_modify (&body1, count3, tmp);
2339 /* Generate the copying loops. */
2340 gfc_trans_scalarizing_loops (&loop, &body1);
2342 gfc_add_block_to_block (&block, &loop.pre);
2343 gfc_add_block_to_block (&block, &loop.post);
2345 gfc_cleanup_loop (&loop);
2346 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2347 as tree nodes in SS may not be valid in different scope. */
2350 tmp = gfc_finish_block (&block);
2351 return tmp;
2355 /* Calculate the size of temporary needed in the assignment inside forall.
2356 LSS and RSS are filled in this function. */
2358 static tree
2359 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2360 stmtblock_t * pblock,
2361 gfc_ss **lss, gfc_ss **rss)
2363 gfc_loopinfo loop;
2364 tree size;
2365 int i;
2366 int save_flag;
2367 tree tmp;
2369 *lss = gfc_walk_expr (expr1);
2370 *rss = NULL;
2372 size = gfc_index_one_node;
2373 if (*lss != gfc_ss_terminator)
2375 gfc_init_loopinfo (&loop);
2377 /* Walk the RHS of the expression. */
2378 *rss = gfc_walk_expr (expr2);
2379 if (*rss == gfc_ss_terminator)
2381 /* The rhs is scalar. Add a ss for the expression. */
2382 *rss = gfc_get_ss ();
2383 (*rss)->next = gfc_ss_terminator;
2384 (*rss)->type = GFC_SS_SCALAR;
2385 (*rss)->expr = expr2;
2388 /* Associate the SS with the loop. */
2389 gfc_add_ss_to_loop (&loop, *lss);
2390 /* We don't actually need to add the rhs at this point, but it might
2391 make guessing the loop bounds a bit easier. */
2392 gfc_add_ss_to_loop (&loop, *rss);
2394 /* We only want the shape of the expression, not rest of the junk
2395 generated by the scalarizer. */
2396 loop.array_parameter = 1;
2398 /* Calculate the bounds of the scalarization. */
2399 save_flag = gfc_option.rtcheck;
2400 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2401 gfc_conv_ss_startstride (&loop);
2402 gfc_option.rtcheck = save_flag;
2403 gfc_conv_loop_setup (&loop, &expr2->where);
2405 /* Figure out how many elements we need. */
2406 for (i = 0; i < loop.dimen; i++)
2408 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2409 gfc_index_one_node, loop.from[i]);
2410 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2411 tmp, loop.to[i]);
2412 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2414 gfc_add_block_to_block (pblock, &loop.pre);
2415 size = gfc_evaluate_now (size, pblock);
2416 gfc_add_block_to_block (pblock, &loop.post);
2418 /* TODO: write a function that cleans up a loopinfo without freeing
2419 the SS chains. Currently a NOP. */
2422 return size;
2426 /* Calculate the overall iterator number of the nested forall construct.
2427 This routine actually calculates the number of times the body of the
2428 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2429 that by the expression INNER_SIZE. The BLOCK argument specifies the
2430 block in which to calculate the result, and the optional INNER_SIZE_BODY
2431 argument contains any statements that need to executed (inside the loop)
2432 to initialize or calculate INNER_SIZE. */
2434 static tree
2435 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2436 stmtblock_t *inner_size_body, stmtblock_t *block)
2438 forall_info *forall_tmp = nested_forall_info;
2439 tree tmp, number;
2440 stmtblock_t body;
2442 /* We can eliminate the innermost unconditional loops with constant
2443 array bounds. */
2444 if (INTEGER_CST_P (inner_size))
2446 while (forall_tmp
2447 && !forall_tmp->mask
2448 && INTEGER_CST_P (forall_tmp->size))
2450 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2451 inner_size, forall_tmp->size);
2452 forall_tmp = forall_tmp->prev_nest;
2455 /* If there are no loops left, we have our constant result. */
2456 if (!forall_tmp)
2457 return inner_size;
2460 /* Otherwise, create a temporary variable to compute the result. */
2461 number = gfc_create_var (gfc_array_index_type, "num");
2462 gfc_add_modify (block, number, gfc_index_zero_node);
2464 gfc_start_block (&body);
2465 if (inner_size_body)
2466 gfc_add_block_to_block (&body, inner_size_body);
2467 if (forall_tmp)
2468 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2469 number, inner_size);
2470 else
2471 tmp = inner_size;
2472 gfc_add_modify (&body, number, tmp);
2473 tmp = gfc_finish_block (&body);
2475 /* Generate loops. */
2476 if (forall_tmp != NULL)
2477 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2479 gfc_add_expr_to_block (block, tmp);
2481 return number;
2485 /* Allocate temporary for forall construct. SIZE is the size of temporary
2486 needed. PTEMP1 is returned for space free. */
2488 static tree
2489 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2490 tree * ptemp1)
2492 tree bytesize;
2493 tree unit;
2494 tree tmp;
2496 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2497 if (!integer_onep (unit))
2498 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2499 else
2500 bytesize = size;
2502 *ptemp1 = NULL;
2503 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2505 if (*ptemp1)
2506 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2507 return tmp;
2511 /* Allocate temporary for forall construct according to the information in
2512 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2513 assignment inside forall. PTEMP1 is returned for space free. */
2515 static tree
2516 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2517 tree inner_size, stmtblock_t * inner_size_body,
2518 stmtblock_t * block, tree * ptemp1)
2520 tree size;
2522 /* Calculate the total size of temporary needed in forall construct. */
2523 size = compute_overall_iter_number (nested_forall_info, inner_size,
2524 inner_size_body, block);
2526 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2530 /* Handle assignments inside forall which need temporary.
2532 forall (i=start:end:stride; maskexpr)
2533 e<i> = f<i>
2534 end forall
2535 (where e,f<i> are arbitrary expressions possibly involving i
2536 and there is a dependency between e<i> and f<i>)
2537 Translates to:
2538 masktmp(:) = maskexpr(:)
2540 maskindex = 0;
2541 count1 = 0;
2542 num = 0;
2543 for (i = start; i <= end; i += stride)
2544 num += SIZE (f<i>)
2545 count1 = 0;
2546 ALLOCATE (tmp(num))
2547 for (i = start; i <= end; i += stride)
2549 if (masktmp[maskindex++])
2550 tmp[count1++] = f<i>
2552 maskindex = 0;
2553 count1 = 0;
2554 for (i = start; i <= end; i += stride)
2556 if (masktmp[maskindex++])
2557 e<i> = tmp[count1++]
2559 DEALLOCATE (tmp)
2561 static void
2562 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2563 tree wheremask, bool invert,
2564 forall_info * nested_forall_info,
2565 stmtblock_t * block)
2567 tree type;
2568 tree inner_size;
2569 gfc_ss *lss, *rss;
2570 tree count, count1;
2571 tree tmp, tmp1;
2572 tree ptemp1;
2573 stmtblock_t inner_size_body;
2575 /* Create vars. count1 is the current iterator number of the nested
2576 forall. */
2577 count1 = gfc_create_var (gfc_array_index_type, "count1");
2579 /* Count is the wheremask index. */
2580 if (wheremask)
2582 count = gfc_create_var (gfc_array_index_type, "count");
2583 gfc_add_modify (block, count, gfc_index_zero_node);
2585 else
2586 count = NULL;
2588 /* Initialize count1. */
2589 gfc_add_modify (block, count1, gfc_index_zero_node);
2591 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2592 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2593 gfc_init_block (&inner_size_body);
2594 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2595 &lss, &rss);
2597 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2598 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2600 if (!expr1->ts.u.cl->backend_decl)
2602 gfc_se tse;
2603 gfc_init_se (&tse, NULL);
2604 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2605 expr1->ts.u.cl->backend_decl = tse.expr;
2607 type = gfc_get_character_type_len (gfc_default_character_kind,
2608 expr1->ts.u.cl->backend_decl);
2610 else
2611 type = gfc_typenode_for_spec (&expr1->ts);
2613 /* Allocate temporary for nested forall construct according to the
2614 information in nested_forall_info and inner_size. */
2615 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2616 &inner_size_body, block, &ptemp1);
2618 /* Generate codes to copy rhs to the temporary . */
2619 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2620 wheremask, invert);
2622 /* Generate body and loops according to the information in
2623 nested_forall_info. */
2624 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2625 gfc_add_expr_to_block (block, tmp);
2627 /* Reset count1. */
2628 gfc_add_modify (block, count1, gfc_index_zero_node);
2630 /* Reset count. */
2631 if (wheremask)
2632 gfc_add_modify (block, count, gfc_index_zero_node);
2634 /* Generate codes to copy the temporary to lhs. */
2635 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2636 wheremask, invert);
2638 /* Generate body and loops according to the information in
2639 nested_forall_info. */
2640 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2641 gfc_add_expr_to_block (block, tmp);
2643 if (ptemp1)
2645 /* Free the temporary. */
2646 tmp = gfc_call_free (ptemp1);
2647 gfc_add_expr_to_block (block, tmp);
2652 /* Translate pointer assignment inside FORALL which need temporary. */
2654 static void
2655 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2656 forall_info * nested_forall_info,
2657 stmtblock_t * block)
2659 tree type;
2660 tree inner_size;
2661 gfc_ss *lss, *rss;
2662 gfc_se lse;
2663 gfc_se rse;
2664 gfc_ss_info *info;
2665 gfc_loopinfo loop;
2666 tree desc;
2667 tree parm;
2668 tree parmtype;
2669 stmtblock_t body;
2670 tree count;
2671 tree tmp, tmp1, ptemp1;
2673 count = gfc_create_var (gfc_array_index_type, "count");
2674 gfc_add_modify (block, count, gfc_index_zero_node);
2676 inner_size = integer_one_node;
2677 lss = gfc_walk_expr (expr1);
2678 rss = gfc_walk_expr (expr2);
2679 if (lss == gfc_ss_terminator)
2681 type = gfc_typenode_for_spec (&expr1->ts);
2682 type = build_pointer_type (type);
2684 /* Allocate temporary for nested forall construct according to the
2685 information in nested_forall_info and inner_size. */
2686 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2687 inner_size, NULL, block, &ptemp1);
2688 gfc_start_block (&body);
2689 gfc_init_se (&lse, NULL);
2690 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2691 gfc_init_se (&rse, NULL);
2692 rse.want_pointer = 1;
2693 gfc_conv_expr (&rse, expr2);
2694 gfc_add_block_to_block (&body, &rse.pre);
2695 gfc_add_modify (&body, lse.expr,
2696 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2697 gfc_add_block_to_block (&body, &rse.post);
2699 /* Increment count. */
2700 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2701 count, gfc_index_one_node);
2702 gfc_add_modify (&body, count, tmp);
2704 tmp = gfc_finish_block (&body);
2706 /* Generate body and loops according to the information in
2707 nested_forall_info. */
2708 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2709 gfc_add_expr_to_block (block, tmp);
2711 /* Reset count. */
2712 gfc_add_modify (block, count, gfc_index_zero_node);
2714 gfc_start_block (&body);
2715 gfc_init_se (&lse, NULL);
2716 gfc_init_se (&rse, NULL);
2717 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2718 lse.want_pointer = 1;
2719 gfc_conv_expr (&lse, expr1);
2720 gfc_add_block_to_block (&body, &lse.pre);
2721 gfc_add_modify (&body, lse.expr, rse.expr);
2722 gfc_add_block_to_block (&body, &lse.post);
2723 /* Increment count. */
2724 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2725 count, gfc_index_one_node);
2726 gfc_add_modify (&body, count, tmp);
2727 tmp = gfc_finish_block (&body);
2729 /* Generate body and loops according to the information in
2730 nested_forall_info. */
2731 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2732 gfc_add_expr_to_block (block, tmp);
2734 else
2736 gfc_init_loopinfo (&loop);
2738 /* Associate the SS with the loop. */
2739 gfc_add_ss_to_loop (&loop, rss);
2741 /* Setup the scalarizing loops and bounds. */
2742 gfc_conv_ss_startstride (&loop);
2744 gfc_conv_loop_setup (&loop, &expr2->where);
2746 info = &rss->data.info;
2747 desc = info->descriptor;
2749 /* Make a new descriptor. */
2750 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2751 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2752 loop.from, loop.to, 1,
2753 GFC_ARRAY_UNKNOWN, true);
2755 /* Allocate temporary for nested forall construct. */
2756 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2757 inner_size, NULL, block, &ptemp1);
2758 gfc_start_block (&body);
2759 gfc_init_se (&lse, NULL);
2760 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2761 lse.direct_byref = 1;
2762 rss = gfc_walk_expr (expr2);
2763 gfc_conv_expr_descriptor (&lse, expr2, rss);
2765 gfc_add_block_to_block (&body, &lse.pre);
2766 gfc_add_block_to_block (&body, &lse.post);
2768 /* Increment count. */
2769 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2770 count, gfc_index_one_node);
2771 gfc_add_modify (&body, count, tmp);
2773 tmp = gfc_finish_block (&body);
2775 /* Generate body and loops according to the information in
2776 nested_forall_info. */
2777 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2778 gfc_add_expr_to_block (block, tmp);
2780 /* Reset count. */
2781 gfc_add_modify (block, count, gfc_index_zero_node);
2783 parm = gfc_build_array_ref (tmp1, count, NULL);
2784 lss = gfc_walk_expr (expr1);
2785 gfc_init_se (&lse, NULL);
2786 gfc_conv_expr_descriptor (&lse, expr1, lss);
2787 gfc_add_modify (&lse.pre, lse.expr, parm);
2788 gfc_start_block (&body);
2789 gfc_add_block_to_block (&body, &lse.pre);
2790 gfc_add_block_to_block (&body, &lse.post);
2792 /* Increment count. */
2793 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2794 count, gfc_index_one_node);
2795 gfc_add_modify (&body, count, tmp);
2797 tmp = gfc_finish_block (&body);
2799 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2800 gfc_add_expr_to_block (block, tmp);
2802 /* Free the temporary. */
2803 if (ptemp1)
2805 tmp = gfc_call_free (ptemp1);
2806 gfc_add_expr_to_block (block, tmp);
2811 /* FORALL and WHERE statements are really nasty, especially when you nest
2812 them. All the rhs of a forall assignment must be evaluated before the
2813 actual assignments are performed. Presumably this also applies to all the
2814 assignments in an inner where statement. */
2816 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2817 linear array, relying on the fact that we process in the same order in all
2818 loops.
2820 forall (i=start:end:stride; maskexpr)
2821 e<i> = f<i>
2822 g<i> = h<i>
2823 end forall
2824 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2825 Translates to:
2826 count = ((end + 1 - start) / stride)
2827 masktmp(:) = maskexpr(:)
2829 maskindex = 0;
2830 for (i = start; i <= end; i += stride)
2832 if (masktmp[maskindex++])
2833 e<i> = f<i>
2835 maskindex = 0;
2836 for (i = start; i <= end; i += stride)
2838 if (masktmp[maskindex++])
2839 g<i> = h<i>
2842 Note that this code only works when there are no dependencies.
2843 Forall loop with array assignments and data dependencies are a real pain,
2844 because the size of the temporary cannot always be determined before the
2845 loop is executed. This problem is compounded by the presence of nested
2846 FORALL constructs.
2849 static tree
2850 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2852 stmtblock_t pre;
2853 stmtblock_t post;
2854 stmtblock_t block;
2855 stmtblock_t body;
2856 tree *var;
2857 tree *start;
2858 tree *end;
2859 tree *step;
2860 gfc_expr **varexpr;
2861 tree tmp;
2862 tree assign;
2863 tree size;
2864 tree maskindex;
2865 tree mask;
2866 tree pmask;
2867 int n;
2868 int nvar;
2869 int need_temp;
2870 gfc_forall_iterator *fa;
2871 gfc_se se;
2872 gfc_code *c;
2873 gfc_saved_var *saved_vars;
2874 iter_info *this_forall;
2875 forall_info *info;
2876 bool need_mask;
2878 /* Do nothing if the mask is false. */
2879 if (code->expr1
2880 && code->expr1->expr_type == EXPR_CONSTANT
2881 && !code->expr1->value.logical)
2882 return build_empty_stmt (input_location);
2884 n = 0;
2885 /* Count the FORALL index number. */
2886 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2887 n++;
2888 nvar = n;
2890 /* Allocate the space for var, start, end, step, varexpr. */
2891 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2892 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2893 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2894 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2895 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2896 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2898 /* Allocate the space for info. */
2899 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2901 gfc_start_block (&pre);
2902 gfc_init_block (&post);
2903 gfc_init_block (&block);
2905 n = 0;
2906 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2908 gfc_symbol *sym = fa->var->symtree->n.sym;
2910 /* Allocate space for this_forall. */
2911 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2913 /* Create a temporary variable for the FORALL index. */
2914 tmp = gfc_typenode_for_spec (&sym->ts);
2915 var[n] = gfc_create_var (tmp, sym->name);
2916 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2918 /* Record it in this_forall. */
2919 this_forall->var = var[n];
2921 /* Replace the index symbol's backend_decl with the temporary decl. */
2922 sym->backend_decl = var[n];
2924 /* Work out the start, end and stride for the loop. */
2925 gfc_init_se (&se, NULL);
2926 gfc_conv_expr_val (&se, fa->start);
2927 /* Record it in this_forall. */
2928 this_forall->start = se.expr;
2929 gfc_add_block_to_block (&block, &se.pre);
2930 start[n] = se.expr;
2932 gfc_init_se (&se, NULL);
2933 gfc_conv_expr_val (&se, fa->end);
2934 /* Record it in this_forall. */
2935 this_forall->end = se.expr;
2936 gfc_make_safe_expr (&se);
2937 gfc_add_block_to_block (&block, &se.pre);
2938 end[n] = se.expr;
2940 gfc_init_se (&se, NULL);
2941 gfc_conv_expr_val (&se, fa->stride);
2942 /* Record it in this_forall. */
2943 this_forall->step = se.expr;
2944 gfc_make_safe_expr (&se);
2945 gfc_add_block_to_block (&block, &se.pre);
2946 step[n] = se.expr;
2948 /* Set the NEXT field of this_forall to NULL. */
2949 this_forall->next = NULL;
2950 /* Link this_forall to the info construct. */
2951 if (info->this_loop)
2953 iter_info *iter_tmp = info->this_loop;
2954 while (iter_tmp->next != NULL)
2955 iter_tmp = iter_tmp->next;
2956 iter_tmp->next = this_forall;
2958 else
2959 info->this_loop = this_forall;
2961 n++;
2963 nvar = n;
2965 /* Calculate the size needed for the current forall level. */
2966 size = gfc_index_one_node;
2967 for (n = 0; n < nvar; n++)
2969 /* size = (end + step - start) / step. */
2970 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2971 step[n], start[n]);
2972 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2974 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2975 tmp = convert (gfc_array_index_type, tmp);
2977 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2980 /* Record the nvar and size of current forall level. */
2981 info->nvar = nvar;
2982 info->size = size;
2984 if (code->expr1)
2986 /* If the mask is .true., consider the FORALL unconditional. */
2987 if (code->expr1->expr_type == EXPR_CONSTANT
2988 && code->expr1->value.logical)
2989 need_mask = false;
2990 else
2991 need_mask = true;
2993 else
2994 need_mask = false;
2996 /* First we need to allocate the mask. */
2997 if (need_mask)
2999 /* As the mask array can be very big, prefer compact boolean types. */
3000 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3001 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3002 size, NULL, &block, &pmask);
3003 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3005 /* Record them in the info structure. */
3006 info->maskindex = maskindex;
3007 info->mask = mask;
3009 else
3011 /* No mask was specified. */
3012 maskindex = NULL_TREE;
3013 mask = pmask = NULL_TREE;
3016 /* Link the current forall level to nested_forall_info. */
3017 info->prev_nest = nested_forall_info;
3018 nested_forall_info = info;
3020 /* Copy the mask into a temporary variable if required.
3021 For now we assume a mask temporary is needed. */
3022 if (need_mask)
3024 /* As the mask array can be very big, prefer compact boolean types. */
3025 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3027 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3029 /* Start of mask assignment loop body. */
3030 gfc_start_block (&body);
3032 /* Evaluate the mask expression. */
3033 gfc_init_se (&se, NULL);
3034 gfc_conv_expr_val (&se, code->expr1);
3035 gfc_add_block_to_block (&body, &se.pre);
3037 /* Store the mask. */
3038 se.expr = convert (mask_type, se.expr);
3040 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3041 gfc_add_modify (&body, tmp, se.expr);
3043 /* Advance to the next mask element. */
3044 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3045 maskindex, gfc_index_one_node);
3046 gfc_add_modify (&body, maskindex, tmp);
3048 /* Generate the loops. */
3049 tmp = gfc_finish_block (&body);
3050 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3051 gfc_add_expr_to_block (&block, tmp);
3054 c = code->block->next;
3056 /* TODO: loop merging in FORALL statements. */
3057 /* Now that we've got a copy of the mask, generate the assignment loops. */
3058 while (c)
3060 switch (c->op)
3062 case EXEC_ASSIGN:
3063 /* A scalar or array assignment. DO the simple check for
3064 lhs to rhs dependencies. These make a temporary for the
3065 rhs and form a second forall block to copy to variable. */
3066 need_temp = check_forall_dependencies(c, &pre, &post);
3068 /* Temporaries due to array assignment data dependencies introduce
3069 no end of problems. */
3070 if (need_temp)
3071 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3072 nested_forall_info, &block);
3073 else
3075 /* Use the normal assignment copying routines. */
3076 assign = gfc_trans_assignment (c->expr1, c->expr2, false);
3078 /* Generate body and loops. */
3079 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3080 assign, 1);
3081 gfc_add_expr_to_block (&block, tmp);
3084 /* Cleanup any temporary symtrees that have been made to deal
3085 with dependencies. */
3086 if (new_symtree)
3087 cleanup_forall_symtrees (c);
3089 break;
3091 case EXEC_WHERE:
3092 /* Translate WHERE or WHERE construct nested in FORALL. */
3093 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3094 break;
3096 /* Pointer assignment inside FORALL. */
3097 case EXEC_POINTER_ASSIGN:
3098 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3099 if (need_temp)
3100 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3101 nested_forall_info, &block);
3102 else
3104 /* Use the normal assignment copying routines. */
3105 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3107 /* Generate body and loops. */
3108 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3109 assign, 1);
3110 gfc_add_expr_to_block (&block, tmp);
3112 break;
3114 case EXEC_FORALL:
3115 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3116 gfc_add_expr_to_block (&block, tmp);
3117 break;
3119 /* Explicit subroutine calls are prevented by the frontend but interface
3120 assignments can legitimately produce them. */
3121 case EXEC_ASSIGN_CALL:
3122 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3123 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3124 gfc_add_expr_to_block (&block, tmp);
3125 break;
3127 default:
3128 gcc_unreachable ();
3131 c = c->next;
3134 /* Restore the original index variables. */
3135 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3136 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3138 /* Free the space for var, start, end, step, varexpr. */
3139 gfc_free (var);
3140 gfc_free (start);
3141 gfc_free (end);
3142 gfc_free (step);
3143 gfc_free (varexpr);
3144 gfc_free (saved_vars);
3146 /* Free the space for this forall_info. */
3147 gfc_free (info);
3149 if (pmask)
3151 /* Free the temporary for the mask. */
3152 tmp = gfc_call_free (pmask);
3153 gfc_add_expr_to_block (&block, tmp);
3155 if (maskindex)
3156 pushdecl (maskindex);
3158 gfc_add_block_to_block (&pre, &block);
3159 gfc_add_block_to_block (&pre, &post);
3161 return gfc_finish_block (&pre);
3165 /* Translate the FORALL statement or construct. */
3167 tree gfc_trans_forall (gfc_code * code)
3169 return gfc_trans_forall_1 (code, NULL);
3173 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3174 If the WHERE construct is nested in FORALL, compute the overall temporary
3175 needed by the WHERE mask expression multiplied by the iterator number of
3176 the nested forall.
3177 ME is the WHERE mask expression.
3178 MASK is the current execution mask upon input, whose sense may or may
3179 not be inverted as specified by the INVERT argument.
3180 CMASK is the updated execution mask on output, or NULL if not required.
3181 PMASK is the pending execution mask on output, or NULL if not required.
3182 BLOCK is the block in which to place the condition evaluation loops. */
3184 static void
3185 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3186 tree mask, bool invert, tree cmask, tree pmask,
3187 tree mask_type, stmtblock_t * block)
3189 tree tmp, tmp1;
3190 gfc_ss *lss, *rss;
3191 gfc_loopinfo loop;
3192 stmtblock_t body, body1;
3193 tree count, cond, mtmp;
3194 gfc_se lse, rse;
3196 gfc_init_loopinfo (&loop);
3198 lss = gfc_walk_expr (me);
3199 rss = gfc_walk_expr (me);
3201 /* Variable to index the temporary. */
3202 count = gfc_create_var (gfc_array_index_type, "count");
3203 /* Initialize count. */
3204 gfc_add_modify (block, count, gfc_index_zero_node);
3206 gfc_start_block (&body);
3208 gfc_init_se (&rse, NULL);
3209 gfc_init_se (&lse, NULL);
3211 if (lss == gfc_ss_terminator)
3213 gfc_init_block (&body1);
3215 else
3217 /* Initialize the loop. */
3218 gfc_init_loopinfo (&loop);
3220 /* We may need LSS to determine the shape of the expression. */
3221 gfc_add_ss_to_loop (&loop, lss);
3222 gfc_add_ss_to_loop (&loop, rss);
3224 gfc_conv_ss_startstride (&loop);
3225 gfc_conv_loop_setup (&loop, &me->where);
3227 gfc_mark_ss_chain_used (rss, 1);
3228 /* Start the loop body. */
3229 gfc_start_scalarized_body (&loop, &body1);
3231 /* Translate the expression. */
3232 gfc_copy_loopinfo_to_se (&rse, &loop);
3233 rse.ss = rss;
3234 gfc_conv_expr (&rse, me);
3237 /* Variable to evaluate mask condition. */
3238 cond = gfc_create_var (mask_type, "cond");
3239 if (mask && (cmask || pmask))
3240 mtmp = gfc_create_var (mask_type, "mask");
3241 else mtmp = NULL_TREE;
3243 gfc_add_block_to_block (&body1, &lse.pre);
3244 gfc_add_block_to_block (&body1, &rse.pre);
3246 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3248 if (mask && (cmask || pmask))
3250 tmp = gfc_build_array_ref (mask, count, NULL);
3251 if (invert)
3252 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3253 gfc_add_modify (&body1, mtmp, tmp);
3256 if (cmask)
3258 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3259 tmp = cond;
3260 if (mask)
3261 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3262 gfc_add_modify (&body1, tmp1, tmp);
3265 if (pmask)
3267 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3268 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3269 if (mask)
3270 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3271 gfc_add_modify (&body1, tmp1, tmp);
3274 gfc_add_block_to_block (&body1, &lse.post);
3275 gfc_add_block_to_block (&body1, &rse.post);
3277 if (lss == gfc_ss_terminator)
3279 gfc_add_block_to_block (&body, &body1);
3281 else
3283 /* Increment count. */
3284 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3285 gfc_index_one_node);
3286 gfc_add_modify (&body1, count, tmp1);
3288 /* Generate the copying loops. */
3289 gfc_trans_scalarizing_loops (&loop, &body1);
3291 gfc_add_block_to_block (&body, &loop.pre);
3292 gfc_add_block_to_block (&body, &loop.post);
3294 gfc_cleanup_loop (&loop);
3295 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3296 as tree nodes in SS may not be valid in different scope. */
3299 tmp1 = gfc_finish_block (&body);
3300 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3301 if (nested_forall_info != NULL)
3302 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3304 gfc_add_expr_to_block (block, tmp1);
3308 /* Translate an assignment statement in a WHERE statement or construct
3309 statement. The MASK expression is used to control which elements
3310 of EXPR1 shall be assigned. The sense of MASK is specified by
3311 INVERT. */
3313 static tree
3314 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3315 tree mask, bool invert,
3316 tree count1, tree count2,
3317 gfc_code *cnext)
3319 gfc_se lse;
3320 gfc_se rse;
3321 gfc_ss *lss;
3322 gfc_ss *lss_section;
3323 gfc_ss *rss;
3325 gfc_loopinfo loop;
3326 tree tmp;
3327 stmtblock_t block;
3328 stmtblock_t body;
3329 tree index, maskexpr;
3331 /* A defined assignment. */
3332 if (cnext && cnext->resolved_sym)
3333 return gfc_trans_call (cnext, true, mask, count1, invert);
3335 #if 0
3336 /* TODO: handle this special case.
3337 Special case a single function returning an array. */
3338 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3340 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3341 if (tmp)
3342 return tmp;
3344 #endif
3346 /* Assignment of the form lhs = rhs. */
3347 gfc_start_block (&block);
3349 gfc_init_se (&lse, NULL);
3350 gfc_init_se (&rse, NULL);
3352 /* Walk the lhs. */
3353 lss = gfc_walk_expr (expr1);
3354 rss = NULL;
3356 /* In each where-assign-stmt, the mask-expr and the variable being
3357 defined shall be arrays of the same shape. */
3358 gcc_assert (lss != gfc_ss_terminator);
3360 /* The assignment needs scalarization. */
3361 lss_section = lss;
3363 /* Find a non-scalar SS from the lhs. */
3364 while (lss_section != gfc_ss_terminator
3365 && lss_section->type != GFC_SS_SECTION)
3366 lss_section = lss_section->next;
3368 gcc_assert (lss_section != gfc_ss_terminator);
3370 /* Initialize the scalarizer. */
3371 gfc_init_loopinfo (&loop);
3373 /* Walk the rhs. */
3374 rss = gfc_walk_expr (expr2);
3375 if (rss == gfc_ss_terminator)
3377 /* The rhs is scalar. Add a ss for the expression. */
3378 rss = gfc_get_ss ();
3379 rss->where = 1;
3380 rss->next = gfc_ss_terminator;
3381 rss->type = GFC_SS_SCALAR;
3382 rss->expr = expr2;
3385 /* Associate the SS with the loop. */
3386 gfc_add_ss_to_loop (&loop, lss);
3387 gfc_add_ss_to_loop (&loop, rss);
3389 /* Calculate the bounds of the scalarization. */
3390 gfc_conv_ss_startstride (&loop);
3392 /* Resolve any data dependencies in the statement. */
3393 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3395 /* Setup the scalarizing loops. */
3396 gfc_conv_loop_setup (&loop, &expr2->where);
3398 /* Setup the gfc_se structures. */
3399 gfc_copy_loopinfo_to_se (&lse, &loop);
3400 gfc_copy_loopinfo_to_se (&rse, &loop);
3402 rse.ss = rss;
3403 gfc_mark_ss_chain_used (rss, 1);
3404 if (loop.temp_ss == NULL)
3406 lse.ss = lss;
3407 gfc_mark_ss_chain_used (lss, 1);
3409 else
3411 lse.ss = loop.temp_ss;
3412 gfc_mark_ss_chain_used (lss, 3);
3413 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3416 /* Start the scalarized loop body. */
3417 gfc_start_scalarized_body (&loop, &body);
3419 /* Translate the expression. */
3420 gfc_conv_expr (&rse, expr2);
3421 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3423 gfc_conv_tmp_array_ref (&lse);
3424 gfc_advance_se_ss_chain (&lse);
3426 else
3427 gfc_conv_expr (&lse, expr1);
3429 /* Form the mask expression according to the mask. */
3430 index = count1;
3431 maskexpr = gfc_build_array_ref (mask, index, NULL);
3432 if (invert)
3433 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3435 /* Use the scalar assignment as is. */
3436 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3437 loop.temp_ss != NULL, false);
3439 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3441 gfc_add_expr_to_block (&body, tmp);
3443 if (lss == gfc_ss_terminator)
3445 /* Increment count1. */
3446 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3447 count1, gfc_index_one_node);
3448 gfc_add_modify (&body, count1, tmp);
3450 /* Use the scalar assignment as is. */
3451 gfc_add_block_to_block (&block, &body);
3453 else
3455 gcc_assert (lse.ss == gfc_ss_terminator
3456 && rse.ss == gfc_ss_terminator);
3458 if (loop.temp_ss != NULL)
3460 /* Increment count1 before finish the main body of a scalarized
3461 expression. */
3462 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3463 count1, gfc_index_one_node);
3464 gfc_add_modify (&body, count1, tmp);
3465 gfc_trans_scalarized_loop_boundary (&loop, &body);
3467 /* We need to copy the temporary to the actual lhs. */
3468 gfc_init_se (&lse, NULL);
3469 gfc_init_se (&rse, NULL);
3470 gfc_copy_loopinfo_to_se (&lse, &loop);
3471 gfc_copy_loopinfo_to_se (&rse, &loop);
3473 rse.ss = loop.temp_ss;
3474 lse.ss = lss;
3476 gfc_conv_tmp_array_ref (&rse);
3477 gfc_advance_se_ss_chain (&rse);
3478 gfc_conv_expr (&lse, expr1);
3480 gcc_assert (lse.ss == gfc_ss_terminator
3481 && rse.ss == gfc_ss_terminator);
3483 /* Form the mask expression according to the mask tree list. */
3484 index = count2;
3485 maskexpr = gfc_build_array_ref (mask, index, NULL);
3486 if (invert)
3487 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3488 maskexpr);
3490 /* Use the scalar assignment as is. */
3491 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3492 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3493 build_empty_stmt (input_location));
3494 gfc_add_expr_to_block (&body, tmp);
3496 /* Increment count2. */
3497 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3498 count2, gfc_index_one_node);
3499 gfc_add_modify (&body, count2, tmp);
3501 else
3503 /* Increment count1. */
3504 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3505 count1, gfc_index_one_node);
3506 gfc_add_modify (&body, count1, tmp);
3509 /* Generate the copying loops. */
3510 gfc_trans_scalarizing_loops (&loop, &body);
3512 /* Wrap the whole thing up. */
3513 gfc_add_block_to_block (&block, &loop.pre);
3514 gfc_add_block_to_block (&block, &loop.post);
3515 gfc_cleanup_loop (&loop);
3518 return gfc_finish_block (&block);
3522 /* Translate the WHERE construct or statement.
3523 This function can be called iteratively to translate the nested WHERE
3524 construct or statement.
3525 MASK is the control mask. */
3527 static void
3528 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3529 forall_info * nested_forall_info, stmtblock_t * block)
3531 stmtblock_t inner_size_body;
3532 tree inner_size, size;
3533 gfc_ss *lss, *rss;
3534 tree mask_type;
3535 gfc_expr *expr1;
3536 gfc_expr *expr2;
3537 gfc_code *cblock;
3538 gfc_code *cnext;
3539 tree tmp;
3540 tree cond;
3541 tree count1, count2;
3542 bool need_cmask;
3543 bool need_pmask;
3544 int need_temp;
3545 tree pcmask = NULL_TREE;
3546 tree ppmask = NULL_TREE;
3547 tree cmask = NULL_TREE;
3548 tree pmask = NULL_TREE;
3549 gfc_actual_arglist *arg;
3551 /* the WHERE statement or the WHERE construct statement. */
3552 cblock = code->block;
3554 /* As the mask array can be very big, prefer compact boolean types. */
3555 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3557 /* Determine which temporary masks are needed. */
3558 if (!cblock->block)
3560 /* One clause: No ELSEWHEREs. */
3561 need_cmask = (cblock->next != 0);
3562 need_pmask = false;
3564 else if (cblock->block->block)
3566 /* Three or more clauses: Conditional ELSEWHEREs. */
3567 need_cmask = true;
3568 need_pmask = true;
3570 else if (cblock->next)
3572 /* Two clauses, the first non-empty. */
3573 need_cmask = true;
3574 need_pmask = (mask != NULL_TREE
3575 && cblock->block->next != 0);
3577 else if (!cblock->block->next)
3579 /* Two clauses, both empty. */
3580 need_cmask = false;
3581 need_pmask = false;
3583 /* Two clauses, the first empty, the second non-empty. */
3584 else if (mask)
3586 need_cmask = (cblock->block->expr1 != 0);
3587 need_pmask = true;
3589 else
3591 need_cmask = true;
3592 need_pmask = false;
3595 if (need_cmask || need_pmask)
3597 /* Calculate the size of temporary needed by the mask-expr. */
3598 gfc_init_block (&inner_size_body);
3599 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3600 &inner_size_body, &lss, &rss);
3602 /* Calculate the total size of temporary needed. */
3603 size = compute_overall_iter_number (nested_forall_info, inner_size,
3604 &inner_size_body, block);
3606 /* Check whether the size is negative. */
3607 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3608 gfc_index_zero_node);
3609 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3610 gfc_index_zero_node, size);
3611 size = gfc_evaluate_now (size, block);
3613 /* Allocate temporary for WHERE mask if needed. */
3614 if (need_cmask)
3615 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3616 &pcmask);
3618 /* Allocate temporary for !mask if needed. */
3619 if (need_pmask)
3620 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3621 &ppmask);
3624 while (cblock)
3626 /* Each time around this loop, the where clause is conditional
3627 on the value of mask and invert, which are updated at the
3628 bottom of the loop. */
3630 /* Has mask-expr. */
3631 if (cblock->expr1)
3633 /* Ensure that the WHERE mask will be evaluated exactly once.
3634 If there are no statements in this WHERE/ELSEWHERE clause,
3635 then we don't need to update the control mask (cmask).
3636 If this is the last clause of the WHERE construct, then
3637 we don't need to update the pending control mask (pmask). */
3638 if (mask)
3639 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3640 mask, invert,
3641 cblock->next ? cmask : NULL_TREE,
3642 cblock->block ? pmask : NULL_TREE,
3643 mask_type, block);
3644 else
3645 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3646 NULL_TREE, false,
3647 (cblock->next || cblock->block)
3648 ? cmask : NULL_TREE,
3649 NULL_TREE, mask_type, block);
3651 invert = false;
3653 /* It's a final elsewhere-stmt. No mask-expr is present. */
3654 else
3655 cmask = mask;
3657 /* The body of this where clause are controlled by cmask with
3658 sense specified by invert. */
3660 /* Get the assignment statement of a WHERE statement, or the first
3661 statement in where-body-construct of a WHERE construct. */
3662 cnext = cblock->next;
3663 while (cnext)
3665 switch (cnext->op)
3667 /* WHERE assignment statement. */
3668 case EXEC_ASSIGN_CALL:
3670 arg = cnext->ext.actual;
3671 expr1 = expr2 = NULL;
3672 for (; arg; arg = arg->next)
3674 if (!arg->expr)
3675 continue;
3676 if (expr1 == NULL)
3677 expr1 = arg->expr;
3678 else
3679 expr2 = arg->expr;
3681 goto evaluate;
3683 case EXEC_ASSIGN:
3684 expr1 = cnext->expr1;
3685 expr2 = cnext->expr2;
3686 evaluate:
3687 if (nested_forall_info != NULL)
3689 need_temp = gfc_check_dependency (expr1, expr2, 0);
3690 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3691 gfc_trans_assign_need_temp (expr1, expr2,
3692 cmask, invert,
3693 nested_forall_info, block);
3694 else
3696 /* Variables to control maskexpr. */
3697 count1 = gfc_create_var (gfc_array_index_type, "count1");
3698 count2 = gfc_create_var (gfc_array_index_type, "count2");
3699 gfc_add_modify (block, count1, gfc_index_zero_node);
3700 gfc_add_modify (block, count2, gfc_index_zero_node);
3702 tmp = gfc_trans_where_assign (expr1, expr2,
3703 cmask, invert,
3704 count1, count2,
3705 cnext);
3707 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3708 tmp, 1);
3709 gfc_add_expr_to_block (block, tmp);
3712 else
3714 /* Variables to control maskexpr. */
3715 count1 = gfc_create_var (gfc_array_index_type, "count1");
3716 count2 = gfc_create_var (gfc_array_index_type, "count2");
3717 gfc_add_modify (block, count1, gfc_index_zero_node);
3718 gfc_add_modify (block, count2, gfc_index_zero_node);
3720 tmp = gfc_trans_where_assign (expr1, expr2,
3721 cmask, invert,
3722 count1, count2,
3723 cnext);
3724 gfc_add_expr_to_block (block, tmp);
3727 break;
3729 /* WHERE or WHERE construct is part of a where-body-construct. */
3730 case EXEC_WHERE:
3731 gfc_trans_where_2 (cnext, cmask, invert,
3732 nested_forall_info, block);
3733 break;
3735 default:
3736 gcc_unreachable ();
3739 /* The next statement within the same where-body-construct. */
3740 cnext = cnext->next;
3742 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3743 cblock = cblock->block;
3744 if (mask == NULL_TREE)
3746 /* If we're the initial WHERE, we can simply invert the sense
3747 of the current mask to obtain the "mask" for the remaining
3748 ELSEWHEREs. */
3749 invert = true;
3750 mask = cmask;
3752 else
3754 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3755 invert = false;
3756 mask = pmask;
3760 /* If we allocated a pending mask array, deallocate it now. */
3761 if (ppmask)
3763 tmp = gfc_call_free (ppmask);
3764 gfc_add_expr_to_block (block, tmp);
3767 /* If we allocated a current mask array, deallocate it now. */
3768 if (pcmask)
3770 tmp = gfc_call_free (pcmask);
3771 gfc_add_expr_to_block (block, tmp);
3775 /* Translate a simple WHERE construct or statement without dependencies.
3776 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3777 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3778 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3780 static tree
3781 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3783 stmtblock_t block, body;
3784 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3785 tree tmp, cexpr, tstmt, estmt;
3786 gfc_ss *css, *tdss, *tsss;
3787 gfc_se cse, tdse, tsse, edse, esse;
3788 gfc_loopinfo loop;
3789 gfc_ss *edss = 0;
3790 gfc_ss *esss = 0;
3792 /* Allow the scalarizer to workshare simple where loops. */
3793 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3794 ompws_flags |= OMPWS_SCALARIZER_WS;
3796 cond = cblock->expr1;
3797 tdst = cblock->next->expr1;
3798 tsrc = cblock->next->expr2;
3799 edst = eblock ? eblock->next->expr1 : NULL;
3800 esrc = eblock ? eblock->next->expr2 : NULL;
3802 gfc_start_block (&block);
3803 gfc_init_loopinfo (&loop);
3805 /* Handle the condition. */
3806 gfc_init_se (&cse, NULL);
3807 css = gfc_walk_expr (cond);
3808 gfc_add_ss_to_loop (&loop, css);
3810 /* Handle the then-clause. */
3811 gfc_init_se (&tdse, NULL);
3812 gfc_init_se (&tsse, NULL);
3813 tdss = gfc_walk_expr (tdst);
3814 tsss = gfc_walk_expr (tsrc);
3815 if (tsss == gfc_ss_terminator)
3817 tsss = gfc_get_ss ();
3818 tsss->where = 1;
3819 tsss->next = gfc_ss_terminator;
3820 tsss->type = GFC_SS_SCALAR;
3821 tsss->expr = tsrc;
3823 gfc_add_ss_to_loop (&loop, tdss);
3824 gfc_add_ss_to_loop (&loop, tsss);
3826 if (eblock)
3828 /* Handle the else clause. */
3829 gfc_init_se (&edse, NULL);
3830 gfc_init_se (&esse, NULL);
3831 edss = gfc_walk_expr (edst);
3832 esss = gfc_walk_expr (esrc);
3833 if (esss == gfc_ss_terminator)
3835 esss = gfc_get_ss ();
3836 esss->where = 1;
3837 esss->next = gfc_ss_terminator;
3838 esss->type = GFC_SS_SCALAR;
3839 esss->expr = esrc;
3841 gfc_add_ss_to_loop (&loop, edss);
3842 gfc_add_ss_to_loop (&loop, esss);
3845 gfc_conv_ss_startstride (&loop);
3846 gfc_conv_loop_setup (&loop, &tdst->where);
3848 gfc_mark_ss_chain_used (css, 1);
3849 gfc_mark_ss_chain_used (tdss, 1);
3850 gfc_mark_ss_chain_used (tsss, 1);
3851 if (eblock)
3853 gfc_mark_ss_chain_used (edss, 1);
3854 gfc_mark_ss_chain_used (esss, 1);
3857 gfc_start_scalarized_body (&loop, &body);
3859 gfc_copy_loopinfo_to_se (&cse, &loop);
3860 gfc_copy_loopinfo_to_se (&tdse, &loop);
3861 gfc_copy_loopinfo_to_se (&tsse, &loop);
3862 cse.ss = css;
3863 tdse.ss = tdss;
3864 tsse.ss = tsss;
3865 if (eblock)
3867 gfc_copy_loopinfo_to_se (&edse, &loop);
3868 gfc_copy_loopinfo_to_se (&esse, &loop);
3869 edse.ss = edss;
3870 esse.ss = esss;
3873 gfc_conv_expr (&cse, cond);
3874 gfc_add_block_to_block (&body, &cse.pre);
3875 cexpr = cse.expr;
3877 gfc_conv_expr (&tsse, tsrc);
3878 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3880 gfc_conv_tmp_array_ref (&tdse);
3881 gfc_advance_se_ss_chain (&tdse);
3883 else
3884 gfc_conv_expr (&tdse, tdst);
3886 if (eblock)
3888 gfc_conv_expr (&esse, esrc);
3889 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3891 gfc_conv_tmp_array_ref (&edse);
3892 gfc_advance_se_ss_chain (&edse);
3894 else
3895 gfc_conv_expr (&edse, edst);
3898 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3899 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3900 : build_empty_stmt (input_location);
3901 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3902 gfc_add_expr_to_block (&body, tmp);
3903 gfc_add_block_to_block (&body, &cse.post);
3905 gfc_trans_scalarizing_loops (&loop, &body);
3906 gfc_add_block_to_block (&block, &loop.pre);
3907 gfc_add_block_to_block (&block, &loop.post);
3908 gfc_cleanup_loop (&loop);
3910 return gfc_finish_block (&block);
3913 /* As the WHERE or WHERE construct statement can be nested, we call
3914 gfc_trans_where_2 to do the translation, and pass the initial
3915 NULL values for both the control mask and the pending control mask. */
3917 tree
3918 gfc_trans_where (gfc_code * code)
3920 stmtblock_t block;
3921 gfc_code *cblock;
3922 gfc_code *eblock;
3924 cblock = code->block;
3925 if (cblock->next
3926 && cblock->next->op == EXEC_ASSIGN
3927 && !cblock->next->next)
3929 eblock = cblock->block;
3930 if (!eblock)
3932 /* A simple "WHERE (cond) x = y" statement or block is
3933 dependence free if cond is not dependent upon writing x,
3934 and the source y is unaffected by the destination x. */
3935 if (!gfc_check_dependency (cblock->next->expr1,
3936 cblock->expr1, 0)
3937 && !gfc_check_dependency (cblock->next->expr1,
3938 cblock->next->expr2, 0))
3939 return gfc_trans_where_3 (cblock, NULL);
3941 else if (!eblock->expr1
3942 && !eblock->block
3943 && eblock->next
3944 && eblock->next->op == EXEC_ASSIGN
3945 && !eblock->next->next)
3947 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3948 block is dependence free if cond is not dependent on writes
3949 to x1 and x2, y1 is not dependent on writes to x2, and y2
3950 is not dependent on writes to x1, and both y's are not
3951 dependent upon their own x's. In addition to this, the
3952 final two dependency checks below exclude all but the same
3953 array reference if the where and elswhere destinations
3954 are the same. In short, this is VERY conservative and this
3955 is needed because the two loops, required by the standard
3956 are coalesced in gfc_trans_where_3. */
3957 if (!gfc_check_dependency(cblock->next->expr1,
3958 cblock->expr1, 0)
3959 && !gfc_check_dependency(eblock->next->expr1,
3960 cblock->expr1, 0)
3961 && !gfc_check_dependency(cblock->next->expr1,
3962 eblock->next->expr2, 1)
3963 && !gfc_check_dependency(eblock->next->expr1,
3964 cblock->next->expr2, 1)
3965 && !gfc_check_dependency(cblock->next->expr1,
3966 cblock->next->expr2, 1)
3967 && !gfc_check_dependency(eblock->next->expr1,
3968 eblock->next->expr2, 1)
3969 && !gfc_check_dependency(cblock->next->expr1,
3970 eblock->next->expr1, 0)
3971 && !gfc_check_dependency(eblock->next->expr1,
3972 cblock->next->expr1, 0))
3973 return gfc_trans_where_3 (cblock, eblock);
3977 gfc_start_block (&block);
3979 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3981 return gfc_finish_block (&block);
3985 /* CYCLE a DO loop. The label decl has already been created by
3986 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3987 node at the head of the loop. We must mark the label as used. */
3989 tree
3990 gfc_trans_cycle (gfc_code * code)
3992 tree cycle_label;
3994 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3995 TREE_USED (cycle_label) = 1;
3996 return build1_v (GOTO_EXPR, cycle_label);
4000 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4001 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4002 loop. */
4004 tree
4005 gfc_trans_exit (gfc_code * code)
4007 tree exit_label;
4009 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
4010 TREE_USED (exit_label) = 1;
4011 return build1_v (GOTO_EXPR, exit_label);
4015 /* Translate the ALLOCATE statement. */
4017 tree
4018 gfc_trans_allocate (gfc_code * code)
4020 gfc_alloc *al;
4021 gfc_expr *expr, *init_e;
4022 gfc_se se;
4023 tree tmp;
4024 tree parm;
4025 tree stat;
4026 tree pstat;
4027 tree error_label;
4028 tree memsz;
4029 stmtblock_t block;
4031 if (!code->ext.alloc.list)
4032 return NULL_TREE;
4034 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4036 gfc_start_block (&block);
4038 /* Either STAT= and/or ERRMSG is present. */
4039 if (code->expr1 || code->expr2)
4041 tree gfc_int4_type_node = gfc_get_int_type (4);
4043 stat = gfc_create_var (gfc_int4_type_node, "stat");
4044 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4046 error_label = gfc_build_label_decl (NULL_TREE);
4047 TREE_USED (error_label) = 1;
4050 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4052 expr = gfc_copy_expr (al->expr);
4054 if (expr->ts.type == BT_CLASS)
4055 gfc_add_component_ref (expr, "$data");
4057 gfc_init_se (&se, NULL);
4058 gfc_start_block (&se.pre);
4060 se.want_pointer = 1;
4061 se.descriptor_only = 1;
4062 gfc_conv_expr (&se, expr);
4064 if (!gfc_array_allocate (&se, expr, pstat))
4066 /* A scalar or derived type. */
4068 /* Determine allocate size. */
4069 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4071 gfc_expr *sz;
4072 gfc_se se_sz;
4073 sz = gfc_copy_expr (code->expr3);
4074 gfc_add_component_ref (sz, "$vptr");
4075 gfc_add_component_ref (sz, "$size");
4076 gfc_init_se (&se_sz, NULL);
4077 gfc_conv_expr (&se_sz, sz);
4078 gfc_free_expr (sz);
4079 memsz = se_sz.expr;
4081 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4082 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4083 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4084 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4085 else
4086 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4088 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4089 memsz = se.string_length;
4091 /* Allocate - for non-pointers with re-alloc checking. */
4093 gfc_ref *ref;
4094 bool allocatable;
4096 ref = expr->ref;
4098 /* Find the last reference in the chain. */
4099 while (ref && ref->next != NULL)
4101 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT);
4102 ref = ref->next;
4105 if (!ref)
4106 allocatable = expr->symtree->n.sym->attr.allocatable;
4107 else
4108 allocatable = ref->u.c.component->attr.allocatable;
4110 if (allocatable)
4111 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4112 pstat, expr);
4113 else
4114 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4117 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4118 fold_convert (TREE_TYPE (se.expr), tmp));
4119 gfc_add_expr_to_block (&se.pre, tmp);
4121 if (code->expr1 || code->expr2)
4123 tmp = build1_v (GOTO_EXPR, error_label);
4124 parm = fold_build2 (NE_EXPR, boolean_type_node,
4125 stat, build_int_cst (TREE_TYPE (stat), 0));
4126 tmp = fold_build3 (COND_EXPR, void_type_node,
4127 parm, tmp, build_empty_stmt (input_location));
4128 gfc_add_expr_to_block (&se.pre, tmp);
4131 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4133 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4134 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4135 gfc_add_expr_to_block (&se.pre, tmp);
4140 tmp = gfc_finish_block (&se.pre);
4141 gfc_add_expr_to_block (&block, tmp);
4143 /* Initialization via SOURCE block. */
4144 if (code->expr3)
4146 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4147 if (al->expr->ts.type == BT_CLASS)
4149 gfc_se dst,src;
4150 if (rhs->ts.type == BT_CLASS)
4151 gfc_add_component_ref (rhs, "$data");
4152 gfc_init_se (&dst, NULL);
4153 gfc_init_se (&src, NULL);
4154 gfc_conv_expr (&dst, expr);
4155 gfc_conv_expr (&src, rhs);
4156 gfc_add_block_to_block (&block, &src.pre);
4157 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4159 else
4160 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4161 rhs, false);
4162 gfc_free_expr (rhs);
4163 gfc_add_expr_to_block (&block, tmp);
4165 /* Default initializer for CLASS variables. */
4166 else if (al->expr->ts.type == BT_CLASS
4167 && code->ext.alloc.ts.type == BT_DERIVED
4168 && (init_e = gfc_default_initializer (&code->ext.alloc.ts)))
4170 gfc_se dst,src;
4171 gfc_init_se (&dst, NULL);
4172 gfc_init_se (&src, NULL);
4173 gfc_conv_expr (&dst, expr);
4174 gfc_conv_expr (&src, init_e);
4175 gfc_add_block_to_block (&block, &src.pre);
4176 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4177 gfc_add_expr_to_block (&block, tmp);
4179 /* Add default initializer for those derived types that need them. */
4180 else if (expr->ts.type == BT_DERIVED
4181 && (init_e = gfc_default_initializer (&expr->ts)))
4183 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4184 init_e, true);
4185 gfc_add_expr_to_block (&block, tmp);
4188 /* Allocation of CLASS entities. */
4189 gfc_free_expr (expr);
4190 expr = al->expr;
4191 if (expr->ts.type == BT_CLASS)
4193 gfc_expr *lhs,*rhs;
4194 gfc_se lse;
4196 /* Initialize VPTR for CLASS objects. */
4197 lhs = gfc_expr_to_initialize (expr);
4198 gfc_add_component_ref (lhs, "$vptr");
4199 rhs = NULL;
4200 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4202 /* VPTR must be determined at run time. */
4203 rhs = gfc_copy_expr (code->expr3);
4204 gfc_add_component_ref (rhs, "$vptr");
4205 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4206 gfc_add_expr_to_block (&block, tmp);
4207 gfc_free_expr (rhs);
4209 else
4211 /* VPTR is fixed at compile time. */
4212 gfc_symbol *vtab;
4213 gfc_typespec *ts;
4214 if (code->expr3)
4215 ts = &code->expr3->ts;
4216 else if (expr->ts.type == BT_DERIVED)
4217 ts = &expr->ts;
4218 else if (code->ext.alloc.ts.type == BT_DERIVED)
4219 ts = &code->ext.alloc.ts;
4220 else if (expr->ts.type == BT_CLASS)
4221 ts = &expr->ts.u.derived->components->ts;
4222 else
4223 ts = &expr->ts;
4225 if (ts->type == BT_DERIVED)
4227 vtab = gfc_find_derived_vtab (ts->u.derived);
4228 gcc_assert (vtab);
4229 gfc_init_se (&lse, NULL);
4230 lse.want_pointer = 1;
4231 gfc_conv_expr (&lse, lhs);
4232 tmp = gfc_build_addr_expr (NULL_TREE,
4233 gfc_get_symbol_decl (vtab));
4234 gfc_add_modify (&block, lse.expr,
4235 fold_convert (TREE_TYPE (lse.expr), tmp));
4242 /* STAT block. */
4243 if (code->expr1)
4245 tmp = build1_v (LABEL_EXPR, error_label);
4246 gfc_add_expr_to_block (&block, tmp);
4248 gfc_init_se (&se, NULL);
4249 gfc_conv_expr_lhs (&se, code->expr1);
4250 tmp = convert (TREE_TYPE (se.expr), stat);
4251 gfc_add_modify (&block, se.expr, tmp);
4254 /* ERRMSG block. */
4255 if (code->expr2)
4257 /* A better error message may be possible, but not required. */
4258 const char *msg = "Attempt to allocate an allocated object";
4259 tree errmsg, slen, dlen;
4261 gfc_init_se (&se, NULL);
4262 gfc_conv_expr_lhs (&se, code->expr2);
4264 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4266 gfc_add_modify (&block, errmsg,
4267 gfc_build_addr_expr (pchar_type_node,
4268 gfc_build_localized_cstring_const (msg)));
4270 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4271 dlen = gfc_get_expr_charlen (code->expr2);
4272 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4274 dlen = build_call_expr_loc (input_location,
4275 built_in_decls[BUILT_IN_MEMCPY], 3,
4276 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4278 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4279 build_int_cst (TREE_TYPE (stat), 0));
4281 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4283 gfc_add_expr_to_block (&block, tmp);
4286 return gfc_finish_block (&block);
4290 /* Translate a DEALLOCATE statement. */
4292 tree
4293 gfc_trans_deallocate (gfc_code *code)
4295 gfc_se se;
4296 gfc_alloc *al;
4297 gfc_expr *expr;
4298 tree apstat, astat, pstat, stat, tmp;
4299 stmtblock_t block;
4301 pstat = apstat = stat = astat = tmp = NULL_TREE;
4303 gfc_start_block (&block);
4305 /* Count the number of failed deallocations. If deallocate() was
4306 called with STAT= , then set STAT to the count. If deallocate
4307 was called with ERRMSG, then set ERRMG to a string. */
4308 if (code->expr1 || code->expr2)
4310 tree gfc_int4_type_node = gfc_get_int_type (4);
4312 stat = gfc_create_var (gfc_int4_type_node, "stat");
4313 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4315 /* Running total of possible deallocation failures. */
4316 astat = gfc_create_var (gfc_int4_type_node, "astat");
4317 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4319 /* Initialize astat to 0. */
4320 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4323 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4325 expr = al->expr;
4326 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4328 gfc_init_se (&se, NULL);
4329 gfc_start_block (&se.pre);
4331 se.want_pointer = 1;
4332 se.descriptor_only = 1;
4333 gfc_conv_expr (&se, expr);
4335 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4337 gfc_ref *ref;
4338 gfc_ref *last = NULL;
4339 for (ref = expr->ref; ref; ref = ref->next)
4340 if (ref->type == REF_COMPONENT)
4341 last = ref;
4343 /* Do not deallocate the components of a derived type
4344 ultimate pointer component. */
4345 if (!(last && last->u.c.component->attr.pointer)
4346 && !(!last && expr->symtree->n.sym->attr.pointer))
4348 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4349 expr->rank);
4350 gfc_add_expr_to_block (&se.pre, tmp);
4354 if (expr->rank)
4355 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4356 else
4358 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4359 gfc_add_expr_to_block (&se.pre, tmp);
4361 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4362 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4365 gfc_add_expr_to_block (&se.pre, tmp);
4367 /* Keep track of the number of failed deallocations by adding stat
4368 of the last deallocation to the running total. */
4369 if (code->expr1 || code->expr2)
4371 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4372 gfc_add_modify (&se.pre, astat, apstat);
4375 tmp = gfc_finish_block (&se.pre);
4376 gfc_add_expr_to_block (&block, tmp);
4380 /* Set STAT. */
4381 if (code->expr1)
4383 gfc_init_se (&se, NULL);
4384 gfc_conv_expr_lhs (&se, code->expr1);
4385 tmp = convert (TREE_TYPE (se.expr), astat);
4386 gfc_add_modify (&block, se.expr, tmp);
4389 /* Set ERRMSG. */
4390 if (code->expr2)
4392 /* A better error message may be possible, but not required. */
4393 const char *msg = "Attempt to deallocate an unallocated object";
4394 tree errmsg, slen, dlen;
4396 gfc_init_se (&se, NULL);
4397 gfc_conv_expr_lhs (&se, code->expr2);
4399 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4401 gfc_add_modify (&block, errmsg,
4402 gfc_build_addr_expr (pchar_type_node,
4403 gfc_build_localized_cstring_const (msg)));
4405 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4406 dlen = gfc_get_expr_charlen (code->expr2);
4407 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4409 dlen = build_call_expr_loc (input_location,
4410 built_in_decls[BUILT_IN_MEMCPY], 3,
4411 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4413 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4414 build_int_cst (TREE_TYPE (astat), 0));
4416 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4418 gfc_add_expr_to_block (&block, tmp);
4421 return gfc_finish_block (&block);