* Makefile.in (s-header-vars): New rule.
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob9b2a6230853086b1605b08b02311836e4555a713
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
42 typedef struct iter_info
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
50 iter_info;
52 typedef struct forall_info
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
61 forall_info;
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
66 /* Translate a F95 label number to a LABEL_EXPR. */
68 tree
69 gfc_trans_label_here (gfc_code * code)
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
92 /* Translate a label assignment statement. */
94 tree
95 gfc_trans_label_assign (gfc_code * code)
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr1);
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112 label_tree = gfc_get_label_decl (code->label1);
114 if (code->label1->defined == ST_LABEL_TARGET)
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
119 else
121 gfc_expr *format = code->label1->format;
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
133 return gfc_finish_block (&se.pre);
136 /* Translate a GOTO statement. */
138 tree
139 gfc_trans_goto (gfc_code * code)
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
147 if (code->label1 != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr1);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
162 /* We're going to ignore a label list. It does not really change the
163 statement's semantics (because it is just a further restriction on
164 what's legal code); before, we were comparing label addresses here, but
165 that's a very fragile business and may break with optimization. So
166 just ignore it. */
168 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
169 gfc_add_expr_to_block (&se.pre, target);
170 return gfc_finish_block (&se.pre);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 tree
176 gfc_trans_entry (gfc_code * code)
178 return build1_v (LABEL_EXPR, code->ext.entry->label);
182 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
183 elemental subroutines. Make temporaries for output arguments if any such
184 dependencies are found. Output arguments are chosen because internal_unpack
185 can be used, as is, to copy the result back to the variable. */
186 static void
187 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
188 gfc_symbol * sym, gfc_actual_arglist * arg,
189 gfc_dep_check check_variable)
191 gfc_actual_arglist *arg0;
192 gfc_expr *e;
193 gfc_formal_arglist *formal;
194 gfc_loopinfo tmp_loop;
195 gfc_se parmse;
196 gfc_ss *ss;
197 gfc_ss_info *info;
198 gfc_symbol *fsym;
199 int n;
200 tree data;
201 tree offset;
202 tree size;
203 tree tmp;
205 if (loopse->ss == NULL)
206 return;
208 ss = loopse->ss;
209 arg0 = arg;
210 formal = sym->formal;
212 /* Loop over all the arguments testing for dependencies. */
213 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
215 e = arg->expr;
216 if (e == NULL)
217 continue;
219 /* Obtain the info structure for the current argument. */
220 info = NULL;
221 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
223 if (ss->expr != e)
224 continue;
225 info = &ss->data.info;
226 break;
229 /* If there is a dependency, create a temporary and use it
230 instead of the variable. */
231 fsym = formal ? formal->sym : NULL;
232 if (e->expr_type == EXPR_VARIABLE
233 && e->rank && fsym
234 && fsym->attr.intent != INTENT_IN
235 && gfc_check_fncall_dependency (e, fsym->attr.intent,
236 sym, arg0, check_variable))
238 tree initial, temptype;
239 stmtblock_t temp_post;
241 /* Make a local loopinfo for the temporary creation, so that
242 none of the other ss->info's have to be renormalized. */
243 gfc_init_loopinfo (&tmp_loop);
244 for (n = 0; n < info->dimen; n++)
246 tmp_loop.to[n] = loopse->loop->to[n];
247 tmp_loop.from[n] = loopse->loop->from[n];
248 tmp_loop.order[n] = loopse->loop->order[n];
251 /* Obtain the argument descriptor for unpacking. */
252 gfc_init_se (&parmse, NULL);
253 parmse.want_pointer = 1;
254 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
255 gfc_add_block_to_block (&se->pre, &parmse.pre);
257 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
258 initialize the array temporary with a copy of the values. */
259 if (fsym->attr.intent == INTENT_INOUT
260 || (fsym->ts.type ==BT_DERIVED
261 && fsym->attr.intent == INTENT_OUT))
262 initial = parmse.expr;
263 else
264 initial = NULL_TREE;
266 /* Find the type of the temporary to create; we don't use the type
267 of e itself as this breaks for subcomponent-references in e (where
268 the type of e is that of the final reference, but parmse.expr's
269 type corresponds to the full derived-type). */
270 /* TODO: Fix this somehow so we don't need a temporary of the whole
271 array but instead only the components referenced. */
272 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
273 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
274 temptype = TREE_TYPE (temptype);
275 temptype = gfc_get_element_type (temptype);
277 /* Generate the temporary. Cleaning up the temporary should be the
278 very last thing done, so we add the code to a new block and add it
279 to se->post as last instructions. */
280 size = gfc_create_var (gfc_array_index_type, NULL);
281 data = gfc_create_var (pvoid_type_node, NULL);
282 gfc_init_block (&temp_post);
283 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
284 &tmp_loop, info, temptype,
285 initial,
286 false, true, false,
287 &arg->expr->where);
288 gfc_add_modify (&se->pre, size, tmp);
289 tmp = fold_convert (pvoid_type_node, info->data);
290 gfc_add_modify (&se->pre, data, tmp);
292 /* Calculate the offset for the temporary. */
293 offset = gfc_index_zero_node;
294 for (n = 0; n < info->dimen; n++)
296 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
297 gfc_rank_cst[n]);
298 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
299 loopse->loop->from[n], tmp);
300 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
301 offset, tmp);
303 info->offset = gfc_create_var (gfc_array_index_type, NULL);
304 gfc_add_modify (&se->pre, info->offset, offset);
306 /* Copy the result back using unpack. */
307 tmp = build_call_expr_loc (input_location,
308 gfor_fndecl_in_unpack, 2, parmse.expr, data);
309 gfc_add_expr_to_block (&se->post, tmp);
311 /* parmse.pre is already added above. */
312 gfc_add_block_to_block (&se->post, &parmse.post);
313 gfc_add_block_to_block (&se->post, &temp_post);
319 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
321 tree
322 gfc_trans_call (gfc_code * code, bool dependency_check,
323 tree mask, tree count1, bool invert)
325 gfc_se se;
326 gfc_ss * ss;
327 int has_alternate_specifier;
328 gfc_dep_check check_variable;
329 tree index = NULL_TREE;
330 tree maskexpr = NULL_TREE;
331 tree tmp;
333 /* A CALL starts a new block because the actual arguments may have to
334 be evaluated first. */
335 gfc_init_se (&se, NULL);
336 gfc_start_block (&se.pre);
338 gcc_assert (code->resolved_sym);
340 ss = gfc_ss_terminator;
341 if (code->resolved_sym->attr.elemental)
342 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
344 /* Is not an elemental subroutine call with array valued arguments. */
345 if (ss == gfc_ss_terminator)
348 /* Translate the call. */
349 has_alternate_specifier
350 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
351 code->expr1, NULL_TREE);
353 /* A subroutine without side-effect, by definition, does nothing! */
354 TREE_SIDE_EFFECTS (se.expr) = 1;
356 /* Chain the pieces together and return the block. */
357 if (has_alternate_specifier)
359 gfc_code *select_code;
360 gfc_symbol *sym;
361 select_code = code->next;
362 gcc_assert(select_code->op == EXEC_SELECT);
363 sym = select_code->expr1->symtree->n.sym;
364 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
365 if (sym->backend_decl == NULL)
366 sym->backend_decl = gfc_get_symbol_decl (sym);
367 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
369 else
370 gfc_add_expr_to_block (&se.pre, se.expr);
372 gfc_add_block_to_block (&se.pre, &se.post);
375 else
377 /* An elemental subroutine call with array valued arguments has
378 to be scalarized. */
379 gfc_loopinfo loop;
380 stmtblock_t body;
381 stmtblock_t block;
382 gfc_se loopse;
383 gfc_se depse;
385 /* gfc_walk_elemental_function_args renders the ss chain in the
386 reverse order to the actual argument order. */
387 ss = gfc_reverse_ss (ss);
389 /* Initialize the loop. */
390 gfc_init_se (&loopse, NULL);
391 gfc_init_loopinfo (&loop);
392 gfc_add_ss_to_loop (&loop, ss);
394 gfc_conv_ss_startstride (&loop);
395 /* TODO: gfc_conv_loop_setup generates a temporary for vector
396 subscripts. This could be prevented in the elemental case
397 as temporaries are handled separatedly
398 (below in gfc_conv_elemental_dependencies). */
399 gfc_conv_loop_setup (&loop, &code->expr1->where);
400 gfc_mark_ss_chain_used (ss, 1);
402 /* Convert the arguments, checking for dependencies. */
403 gfc_copy_loopinfo_to_se (&loopse, &loop);
404 loopse.ss = ss;
406 /* For operator assignment, do dependency checking. */
407 if (dependency_check)
408 check_variable = ELEM_CHECK_VARIABLE;
409 else
410 check_variable = ELEM_DONT_CHECK_VARIABLE;
412 gfc_init_se (&depse, NULL);
413 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
414 code->ext.actual, check_variable);
416 gfc_add_block_to_block (&loop.pre, &depse.pre);
417 gfc_add_block_to_block (&loop.post, &depse.post);
419 /* Generate the loop body. */
420 gfc_start_scalarized_body (&loop, &body);
421 gfc_init_block (&block);
423 if (mask && count1)
425 /* Form the mask expression according to the mask. */
426 index = count1;
427 maskexpr = gfc_build_array_ref (mask, index, NULL);
428 if (invert)
429 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
430 maskexpr);
433 /* Add the subroutine call to the block. */
434 gfc_conv_procedure_call (&loopse, code->resolved_sym,
435 code->ext.actual, code->expr1,
436 NULL_TREE);
438 if (mask && count1)
440 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
441 build_empty_stmt (input_location));
442 gfc_add_expr_to_block (&loopse.pre, tmp);
443 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
444 count1, gfc_index_one_node);
445 gfc_add_modify (&loopse.pre, count1, tmp);
447 else
448 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
450 gfc_add_block_to_block (&block, &loopse.pre);
451 gfc_add_block_to_block (&block, &loopse.post);
453 /* Finish up the loop block and the loop. */
454 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
455 gfc_trans_scalarizing_loops (&loop, &body);
456 gfc_add_block_to_block (&se.pre, &loop.pre);
457 gfc_add_block_to_block (&se.pre, &loop.post);
458 gfc_add_block_to_block (&se.pre, &se.post);
459 gfc_cleanup_loop (&loop);
462 return gfc_finish_block (&se.pre);
466 /* Translate the RETURN statement. */
468 tree
469 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
471 if (code->expr1)
473 gfc_se se;
474 tree tmp;
475 tree result;
477 /* If code->expr is not NULL, this return statement must appear
478 in a subroutine and current_fake_result_decl has already
479 been generated. */
481 result = gfc_get_fake_result_decl (NULL, 0);
482 if (!result)
484 gfc_warning ("An alternate return at %L without a * dummy argument",
485 &code->expr1->where);
486 return build1_v (GOTO_EXPR, gfc_get_return_label ());
489 /* Start a new block for this statement. */
490 gfc_init_se (&se, NULL);
491 gfc_start_block (&se.pre);
493 gfc_conv_expr (&se, code->expr1);
495 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
496 fold_convert (TREE_TYPE (result), se.expr));
497 gfc_add_expr_to_block (&se.pre, tmp);
499 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
500 gfc_add_expr_to_block (&se.pre, tmp);
501 gfc_add_block_to_block (&se.pre, &se.post);
502 return gfc_finish_block (&se.pre);
504 else
505 return build1_v (GOTO_EXPR, gfc_get_return_label ());
509 /* Translate the PAUSE statement. We have to translate this statement
510 to a runtime library call. */
512 tree
513 gfc_trans_pause (gfc_code * code)
515 tree gfc_int4_type_node = gfc_get_int_type (4);
516 gfc_se se;
517 tree tmp;
519 /* Start a new block for this statement. */
520 gfc_init_se (&se, NULL);
521 gfc_start_block (&se.pre);
524 if (code->expr1 == NULL)
526 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
527 tmp = build_call_expr_loc (input_location,
528 gfor_fndecl_pause_numeric, 1, tmp);
530 else
532 gfc_conv_expr_reference (&se, code->expr1);
533 tmp = build_call_expr_loc (input_location,
534 gfor_fndecl_pause_string, 2,
535 se.expr, se.string_length);
538 gfc_add_expr_to_block (&se.pre, tmp);
540 gfc_add_block_to_block (&se.pre, &se.post);
542 return gfc_finish_block (&se.pre);
546 /* Translate the STOP statement. We have to translate this statement
547 to a runtime library call. */
549 tree
550 gfc_trans_stop (gfc_code * code)
552 tree gfc_int4_type_node = gfc_get_int_type (4);
553 gfc_se se;
554 tree tmp;
556 /* Start a new block for this statement. */
557 gfc_init_se (&se, NULL);
558 gfc_start_block (&se.pre);
561 if (code->expr1 == NULL)
563 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
564 tmp = build_call_expr_loc (input_location,
565 gfor_fndecl_stop_numeric, 1, tmp);
567 else
569 gfc_conv_expr_reference (&se, code->expr1);
570 tmp = build_call_expr_loc (input_location,
571 gfor_fndecl_stop_string, 2,
572 se.expr, se.string_length);
575 gfc_add_expr_to_block (&se.pre, tmp);
577 gfc_add_block_to_block (&se.pre, &se.post);
579 return gfc_finish_block (&se.pre);
583 /* Generate GENERIC for the IF construct. This function also deals with
584 the simple IF statement, because the front end translates the IF
585 statement into an IF construct.
587 We translate:
589 IF (cond) THEN
590 then_clause
591 ELSEIF (cond2)
592 elseif_clause
593 ELSE
594 else_clause
595 ENDIF
597 into:
599 pre_cond_s;
600 if (cond_s)
602 then_clause;
604 else
606 pre_cond_s
607 if (cond_s)
609 elseif_clause
611 else
613 else_clause;
617 where COND_S is the simplified version of the predicate. PRE_COND_S
618 are the pre side-effects produced by the translation of the
619 conditional.
620 We need to build the chain recursively otherwise we run into
621 problems with folding incomplete statements. */
623 static tree
624 gfc_trans_if_1 (gfc_code * code)
626 gfc_se if_se;
627 tree stmt, elsestmt;
629 /* Check for an unconditional ELSE clause. */
630 if (!code->expr1)
631 return gfc_trans_code (code->next);
633 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
634 gfc_init_se (&if_se, NULL);
635 gfc_start_block (&if_se.pre);
637 /* Calculate the IF condition expression. */
638 gfc_conv_expr_val (&if_se, code->expr1);
640 /* Translate the THEN clause. */
641 stmt = gfc_trans_code (code->next);
643 /* Translate the ELSE clause. */
644 if (code->block)
645 elsestmt = gfc_trans_if_1 (code->block);
646 else
647 elsestmt = build_empty_stmt (input_location);
649 /* Build the condition expression and add it to the condition block. */
650 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
652 gfc_add_expr_to_block (&if_se.pre, stmt);
654 /* Finish off this statement. */
655 return gfc_finish_block (&if_se.pre);
658 tree
659 gfc_trans_if (gfc_code * code)
661 /* Ignore the top EXEC_IF, it only announces an IF construct. The
662 actual code we must translate is in code->block. */
664 return gfc_trans_if_1 (code->block);
668 /* Translate an arithmetic IF expression.
670 IF (cond) label1, label2, label3 translates to
672 if (cond <= 0)
674 if (cond < 0)
675 goto label1;
676 else // cond == 0
677 goto label2;
679 else // cond > 0
680 goto label3;
682 An optimized version can be generated in case of equal labels.
683 E.g., if label1 is equal to label2, we can translate it to
685 if (cond <= 0)
686 goto label1;
687 else
688 goto label3;
691 tree
692 gfc_trans_arithmetic_if (gfc_code * code)
694 gfc_se se;
695 tree tmp;
696 tree branch1;
697 tree branch2;
698 tree zero;
700 /* Start a new block. */
701 gfc_init_se (&se, NULL);
702 gfc_start_block (&se.pre);
704 /* Pre-evaluate COND. */
705 gfc_conv_expr_val (&se, code->expr1);
706 se.expr = gfc_evaluate_now (se.expr, &se.pre);
708 /* Build something to compare with. */
709 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
711 if (code->label1->value != code->label2->value)
713 /* If (cond < 0) take branch1 else take branch2.
714 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
715 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
716 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
718 if (code->label1->value != code->label3->value)
719 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
720 else
721 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
723 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
725 else
726 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
728 if (code->label1->value != code->label3->value
729 && code->label2->value != code->label3->value)
731 /* if (cond <= 0) take branch1 else take branch2. */
732 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
733 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
734 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
737 /* Append the COND_EXPR to the evaluation of COND, and return. */
738 gfc_add_expr_to_block (&se.pre, branch1);
739 return gfc_finish_block (&se.pre);
743 /* Translate a BLOCK construct. This is basically what we would do for a
744 procedure body. */
746 tree
747 gfc_trans_block_construct (gfc_code* code)
749 gfc_namespace* ns;
750 gfc_symbol* sym;
751 stmtblock_t body;
752 tree tmp;
754 ns = code->ext.ns;
755 gcc_assert (ns);
756 sym = ns->proc_name;
757 gcc_assert (sym);
759 gcc_assert (!sym->tlink);
760 sym->tlink = sym;
762 gfc_start_block (&body);
763 gfc_process_block_locals (ns);
765 tmp = gfc_trans_code (ns->code);
766 tmp = gfc_trans_deferred_vars (sym, tmp);
768 gfc_add_expr_to_block (&body, tmp);
769 return gfc_finish_block (&body);
773 /* Translate the simple DO construct. This is where the loop variable has
774 integer type and step +-1. We can't use this in the general case
775 because integer overflow and floating point errors could give incorrect
776 results.
777 We translate a do loop from:
779 DO dovar = from, to, step
780 body
781 END DO
785 [Evaluate loop bounds and step]
786 dovar = from;
787 if ((step > 0) ? (dovar <= to) : (dovar => to))
789 for (;;)
791 body;
792 cycle_label:
793 cond = (dovar == to);
794 dovar += step;
795 if (cond) goto end_label;
798 end_label:
800 This helps the optimizers by avoiding the extra induction variable
801 used in the general case. */
803 static tree
804 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
805 tree from, tree to, tree step)
807 stmtblock_t body;
808 tree type;
809 tree cond;
810 tree tmp;
811 tree saved_dovar = NULL;
812 tree cycle_label;
813 tree exit_label;
815 type = TREE_TYPE (dovar);
817 /* Initialize the DO variable: dovar = from. */
818 gfc_add_modify (pblock, dovar, from);
820 /* Save value for do-tinkering checking. */
821 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
823 saved_dovar = gfc_create_var (type, ".saved_dovar");
824 gfc_add_modify (pblock, saved_dovar, dovar);
827 /* Cycle and exit statements are implemented with gotos. */
828 cycle_label = gfc_build_label_decl (NULL_TREE);
829 exit_label = gfc_build_label_decl (NULL_TREE);
831 /* Put the labels where they can be found later. See gfc_trans_do(). */
832 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
834 /* Loop body. */
835 gfc_start_block (&body);
837 /* Main loop body. */
838 tmp = gfc_trans_code (code->block->next);
839 gfc_add_expr_to_block (&body, tmp);
841 /* Label for cycle statements (if needed). */
842 if (TREE_USED (cycle_label))
844 tmp = build1_v (LABEL_EXPR, cycle_label);
845 gfc_add_expr_to_block (&body, tmp);
848 /* Check whether someone has modified the loop variable. */
849 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
851 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
852 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
853 "Loop variable has been modified");
856 /* Evaluate the loop condition. */
857 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
858 cond = gfc_evaluate_now (cond, &body);
860 /* Increment the loop variable. */
861 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
862 gfc_add_modify (&body, dovar, tmp);
864 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
865 gfc_add_modify (&body, saved_dovar, dovar);
867 /* The loop exit. */
868 tmp = build1_v (GOTO_EXPR, exit_label);
869 TREE_USED (exit_label) = 1;
870 tmp = fold_build3 (COND_EXPR, void_type_node,
871 cond, tmp, build_empty_stmt (input_location));
872 gfc_add_expr_to_block (&body, tmp);
874 /* Finish the loop body. */
875 tmp = gfc_finish_block (&body);
876 tmp = build1_v (LOOP_EXPR, tmp);
878 /* Only execute the loop if the number of iterations is positive. */
879 if (tree_int_cst_sgn (step) > 0)
880 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
881 else
882 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
883 tmp = fold_build3 (COND_EXPR, void_type_node,
884 cond, tmp, build_empty_stmt (input_location));
885 gfc_add_expr_to_block (pblock, tmp);
887 /* Add the exit label. */
888 tmp = build1_v (LABEL_EXPR, exit_label);
889 gfc_add_expr_to_block (pblock, tmp);
891 return gfc_finish_block (pblock);
894 /* Translate the DO construct. This obviously is one of the most
895 important ones to get right with any compiler, but especially
896 so for Fortran.
898 We special case some loop forms as described in gfc_trans_simple_do.
899 For other cases we implement them with a separate loop count,
900 as described in the standard.
902 We translate a do loop from:
904 DO dovar = from, to, step
905 body
906 END DO
910 [evaluate loop bounds and step]
911 empty = (step > 0 ? to < from : to > from);
912 countm1 = (to - from) / step;
913 dovar = from;
914 if (empty) goto exit_label;
915 for (;;)
917 body;
918 cycle_label:
919 dovar += step
920 if (countm1 ==0) goto exit_label;
921 countm1--;
923 exit_label:
925 countm1 is an unsigned integer. It is equal to the loop count minus one,
926 because the loop count itself can overflow. */
928 tree
929 gfc_trans_do (gfc_code * code)
931 gfc_se se;
932 tree dovar;
933 tree saved_dovar = NULL;
934 tree from;
935 tree to;
936 tree step;
937 tree countm1;
938 tree type;
939 tree utype;
940 tree cond;
941 tree cycle_label;
942 tree exit_label;
943 tree tmp;
944 tree pos_step;
945 stmtblock_t block;
946 stmtblock_t body;
948 gfc_start_block (&block);
950 /* Evaluate all the expressions in the iterator. */
951 gfc_init_se (&se, NULL);
952 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
953 gfc_add_block_to_block (&block, &se.pre);
954 dovar = se.expr;
955 type = TREE_TYPE (dovar);
957 gfc_init_se (&se, NULL);
958 gfc_conv_expr_val (&se, code->ext.iterator->start);
959 gfc_add_block_to_block (&block, &se.pre);
960 from = gfc_evaluate_now (se.expr, &block);
962 gfc_init_se (&se, NULL);
963 gfc_conv_expr_val (&se, code->ext.iterator->end);
964 gfc_add_block_to_block (&block, &se.pre);
965 to = gfc_evaluate_now (se.expr, &block);
967 gfc_init_se (&se, NULL);
968 gfc_conv_expr_val (&se, code->ext.iterator->step);
969 gfc_add_block_to_block (&block, &se.pre);
970 step = gfc_evaluate_now (se.expr, &block);
972 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
974 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
975 fold_convert (type, integer_zero_node));
976 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
977 "DO step value is zero");
980 /* Special case simple loops. */
981 if (TREE_CODE (type) == INTEGER_TYPE
982 && (integer_onep (step)
983 || tree_int_cst_equal (step, integer_minus_one_node)))
984 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
986 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
987 fold_convert (type, integer_zero_node));
989 if (TREE_CODE (type) == INTEGER_TYPE)
990 utype = unsigned_type_for (type);
991 else
992 utype = unsigned_type_for (gfc_array_index_type);
993 countm1 = gfc_create_var (utype, "countm1");
995 /* Cycle and exit statements are implemented with gotos. */
996 cycle_label = gfc_build_label_decl (NULL_TREE);
997 exit_label = gfc_build_label_decl (NULL_TREE);
998 TREE_USED (exit_label) = 1;
1000 /* Initialize the DO variable: dovar = from. */
1001 gfc_add_modify (&block, dovar, from);
1003 /* Save value for do-tinkering checking. */
1004 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1006 saved_dovar = gfc_create_var (type, ".saved_dovar");
1007 gfc_add_modify (&block, saved_dovar, dovar);
1010 /* Initialize loop count and jump to exit label if the loop is empty.
1011 This code is executed before we enter the loop body. We generate:
1012 if (step > 0)
1014 if (to < from) goto exit_label;
1015 countm1 = (to - from) / step;
1017 else
1019 if (to > from) goto exit_label;
1020 countm1 = (from - to) / -step;
1021 } */
1022 if (TREE_CODE (type) == INTEGER_TYPE)
1024 tree pos, neg;
1026 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
1027 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
1028 build1_v (GOTO_EXPR, exit_label),
1029 build_empty_stmt (input_location));
1030 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1031 tmp = fold_convert (utype, tmp);
1032 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
1033 fold_convert (utype, step));
1034 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1035 pos = fold_build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
1037 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
1038 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
1039 build1_v (GOTO_EXPR, exit_label),
1040 build_empty_stmt (input_location));
1041 tmp = fold_build2 (MINUS_EXPR, type, from, to);
1042 tmp = fold_convert (utype, tmp);
1043 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
1044 fold_convert (utype, fold_build1 (NEGATE_EXPR,
1045 type, step)));
1046 tmp = fold_build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1047 neg = fold_build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
1049 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1050 gfc_add_expr_to_block (&block, tmp);
1052 else
1054 /* TODO: We could use the same width as the real type.
1055 This would probably cause more problems that it solves
1056 when we implement "long double" types. */
1058 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1059 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1060 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1061 gfc_add_modify (&block, countm1, tmp);
1063 /* We need a special check for empty loops:
1064 empty = (step > 0 ? to < from : to > from); */
1065 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1066 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1067 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1068 /* If the loop is empty, go directly to the exit label. */
1069 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1070 build1_v (GOTO_EXPR, exit_label),
1071 build_empty_stmt (input_location));
1072 gfc_add_expr_to_block (&block, tmp);
1075 /* Loop body. */
1076 gfc_start_block (&body);
1078 /* Put these labels where they can be found later. We put the
1079 labels in a TREE_LIST node (because TREE_CHAIN is already
1080 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1081 label in TREE_VALUE (backend_decl). */
1083 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1085 /* Main loop body. */
1086 tmp = gfc_trans_code (code->block->next);
1087 gfc_add_expr_to_block (&body, tmp);
1089 /* Label for cycle statements (if needed). */
1090 if (TREE_USED (cycle_label))
1092 tmp = build1_v (LABEL_EXPR, cycle_label);
1093 gfc_add_expr_to_block (&body, tmp);
1096 /* Check whether someone has modified the loop variable. */
1097 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1099 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1100 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1101 "Loop variable has been modified");
1104 /* Increment the loop variable. */
1105 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1106 gfc_add_modify (&body, dovar, tmp);
1108 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1109 gfc_add_modify (&body, saved_dovar, dovar);
1111 /* End with the loop condition. Loop until countm1 == 0. */
1112 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1113 build_int_cst (utype, 0));
1114 tmp = build1_v (GOTO_EXPR, exit_label);
1115 tmp = fold_build3 (COND_EXPR, void_type_node,
1116 cond, tmp, build_empty_stmt (input_location));
1117 gfc_add_expr_to_block (&body, tmp);
1119 /* Decrement the loop count. */
1120 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1121 gfc_add_modify (&body, countm1, tmp);
1123 /* End of loop body. */
1124 tmp = gfc_finish_block (&body);
1126 /* The for loop itself. */
1127 tmp = build1_v (LOOP_EXPR, tmp);
1128 gfc_add_expr_to_block (&block, tmp);
1130 /* Add the exit label. */
1131 tmp = build1_v (LABEL_EXPR, exit_label);
1132 gfc_add_expr_to_block (&block, tmp);
1134 return gfc_finish_block (&block);
1138 /* Translate the DO WHILE construct.
1140 We translate
1142 DO WHILE (cond)
1143 body
1144 END DO
1148 for ( ; ; )
1150 pre_cond;
1151 if (! cond) goto exit_label;
1152 body;
1153 cycle_label:
1155 exit_label:
1157 Because the evaluation of the exit condition `cond' may have side
1158 effects, we can't do much for empty loop bodies. The backend optimizers
1159 should be smart enough to eliminate any dead loops. */
1161 tree
1162 gfc_trans_do_while (gfc_code * code)
1164 gfc_se cond;
1165 tree tmp;
1166 tree cycle_label;
1167 tree exit_label;
1168 stmtblock_t block;
1170 /* Everything we build here is part of the loop body. */
1171 gfc_start_block (&block);
1173 /* Cycle and exit statements are implemented with gotos. */
1174 cycle_label = gfc_build_label_decl (NULL_TREE);
1175 exit_label = gfc_build_label_decl (NULL_TREE);
1177 /* Put the labels where they can be found later. See gfc_trans_do(). */
1178 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1180 /* Create a GIMPLE version of the exit condition. */
1181 gfc_init_se (&cond, NULL);
1182 gfc_conv_expr_val (&cond, code->expr1);
1183 gfc_add_block_to_block (&block, &cond.pre);
1184 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1186 /* Build "IF (! cond) GOTO exit_label". */
1187 tmp = build1_v (GOTO_EXPR, exit_label);
1188 TREE_USED (exit_label) = 1;
1189 tmp = fold_build3 (COND_EXPR, void_type_node,
1190 cond.expr, tmp, build_empty_stmt (input_location));
1191 gfc_add_expr_to_block (&block, tmp);
1193 /* The main body of the loop. */
1194 tmp = gfc_trans_code (code->block->next);
1195 gfc_add_expr_to_block (&block, tmp);
1197 /* Label for cycle statements (if needed). */
1198 if (TREE_USED (cycle_label))
1200 tmp = build1_v (LABEL_EXPR, cycle_label);
1201 gfc_add_expr_to_block (&block, tmp);
1204 /* End of loop body. */
1205 tmp = gfc_finish_block (&block);
1207 gfc_init_block (&block);
1208 /* Build the loop. */
1209 tmp = build1_v (LOOP_EXPR, tmp);
1210 gfc_add_expr_to_block (&block, tmp);
1212 /* Add the exit label. */
1213 tmp = build1_v (LABEL_EXPR, exit_label);
1214 gfc_add_expr_to_block (&block, tmp);
1216 return gfc_finish_block (&block);
1220 /* Translate the SELECT CASE construct for INTEGER case expressions,
1221 without killing all potential optimizations. The problem is that
1222 Fortran allows unbounded cases, but the back-end does not, so we
1223 need to intercept those before we enter the equivalent SWITCH_EXPR
1224 we can build.
1226 For example, we translate this,
1228 SELECT CASE (expr)
1229 CASE (:100,101,105:115)
1230 block_1
1231 CASE (190:199,200:)
1232 block_2
1233 CASE (300)
1234 block_3
1235 CASE DEFAULT
1236 block_4
1237 END SELECT
1239 to the GENERIC equivalent,
1241 switch (expr)
1243 case (minimum value for typeof(expr) ... 100:
1244 case 101:
1245 case 105 ... 114:
1246 block1:
1247 goto end_label;
1249 case 200 ... (maximum value for typeof(expr):
1250 case 190 ... 199:
1251 block2;
1252 goto end_label;
1254 case 300:
1255 block_3;
1256 goto end_label;
1258 default:
1259 block_4;
1260 goto end_label;
1263 end_label: */
1265 static tree
1266 gfc_trans_integer_select (gfc_code * code)
1268 gfc_code *c;
1269 gfc_case *cp;
1270 tree end_label;
1271 tree tmp;
1272 gfc_se se;
1273 stmtblock_t block;
1274 stmtblock_t body;
1276 gfc_start_block (&block);
1278 /* Calculate the switch expression. */
1279 gfc_init_se (&se, NULL);
1280 gfc_conv_expr_val (&se, code->expr1);
1281 gfc_add_block_to_block (&block, &se.pre);
1283 end_label = gfc_build_label_decl (NULL_TREE);
1285 gfc_init_block (&body);
1287 for (c = code->block; c; c = c->block)
1289 for (cp = c->ext.case_list; cp; cp = cp->next)
1291 tree low, high;
1292 tree label;
1294 /* Assume it's the default case. */
1295 low = high = NULL_TREE;
1297 if (cp->low)
1299 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1300 cp->low->ts.kind);
1302 /* If there's only a lower bound, set the high bound to the
1303 maximum value of the case expression. */
1304 if (!cp->high)
1305 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1308 if (cp->high)
1310 /* Three cases are possible here:
1312 1) There is no lower bound, e.g. CASE (:N).
1313 2) There is a lower bound .NE. high bound, that is
1314 a case range, e.g. CASE (N:M) where M>N (we make
1315 sure that M>N during type resolution).
1316 3) There is a lower bound, and it has the same value
1317 as the high bound, e.g. CASE (N:N). This is our
1318 internal representation of CASE(N).
1320 In the first and second case, we need to set a value for
1321 high. In the third case, we don't because the GCC middle
1322 end represents a single case value by just letting high be
1323 a NULL_TREE. We can't do that because we need to be able
1324 to represent unbounded cases. */
1326 if (!cp->low
1327 || (cp->low
1328 && mpz_cmp (cp->low->value.integer,
1329 cp->high->value.integer) != 0))
1330 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1331 cp->high->ts.kind);
1333 /* Unbounded case. */
1334 if (!cp->low)
1335 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1338 /* Build a label. */
1339 label = gfc_build_label_decl (NULL_TREE);
1341 /* Add this case label.
1342 Add parameter 'label', make it match GCC backend. */
1343 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1344 low, high, label);
1345 gfc_add_expr_to_block (&body, tmp);
1348 /* Add the statements for this case. */
1349 tmp = gfc_trans_code (c->next);
1350 gfc_add_expr_to_block (&body, tmp);
1352 /* Break to the end of the construct. */
1353 tmp = build1_v (GOTO_EXPR, end_label);
1354 gfc_add_expr_to_block (&body, tmp);
1357 tmp = gfc_finish_block (&body);
1358 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1359 gfc_add_expr_to_block (&block, tmp);
1361 tmp = build1_v (LABEL_EXPR, end_label);
1362 gfc_add_expr_to_block (&block, tmp);
1364 return gfc_finish_block (&block);
1368 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1370 There are only two cases possible here, even though the standard
1371 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1372 .FALSE., and DEFAULT.
1374 We never generate more than two blocks here. Instead, we always
1375 try to eliminate the DEFAULT case. This way, we can translate this
1376 kind of SELECT construct to a simple
1378 if {} else {};
1380 expression in GENERIC. */
1382 static tree
1383 gfc_trans_logical_select (gfc_code * code)
1385 gfc_code *c;
1386 gfc_code *t, *f, *d;
1387 gfc_case *cp;
1388 gfc_se se;
1389 stmtblock_t block;
1391 /* Assume we don't have any cases at all. */
1392 t = f = d = NULL;
1394 /* Now see which ones we actually do have. We can have at most two
1395 cases in a single case list: one for .TRUE. and one for .FALSE.
1396 The default case is always separate. If the cases for .TRUE. and
1397 .FALSE. are in the same case list, the block for that case list
1398 always executed, and we don't generate code a COND_EXPR. */
1399 for (c = code->block; c; c = c->block)
1401 for (cp = c->ext.case_list; cp; cp = cp->next)
1403 if (cp->low)
1405 if (cp->low->value.logical == 0) /* .FALSE. */
1406 f = c;
1407 else /* if (cp->value.logical != 0), thus .TRUE. */
1408 t = c;
1410 else
1411 d = c;
1415 /* Start a new block. */
1416 gfc_start_block (&block);
1418 /* Calculate the switch expression. We always need to do this
1419 because it may have side effects. */
1420 gfc_init_se (&se, NULL);
1421 gfc_conv_expr_val (&se, code->expr1);
1422 gfc_add_block_to_block (&block, &se.pre);
1424 if (t == f && t != NULL)
1426 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1427 translate the code for these cases, append it to the current
1428 block. */
1429 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1431 else
1433 tree true_tree, false_tree, stmt;
1435 true_tree = build_empty_stmt (input_location);
1436 false_tree = build_empty_stmt (input_location);
1438 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1439 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1440 make the missing case the default case. */
1441 if (t != NULL && f != NULL)
1442 d = NULL;
1443 else if (d != NULL)
1445 if (t == NULL)
1446 t = d;
1447 else
1448 f = d;
1451 /* Translate the code for each of these blocks, and append it to
1452 the current block. */
1453 if (t != NULL)
1454 true_tree = gfc_trans_code (t->next);
1456 if (f != NULL)
1457 false_tree = gfc_trans_code (f->next);
1459 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1460 true_tree, false_tree);
1461 gfc_add_expr_to_block (&block, stmt);
1464 return gfc_finish_block (&block);
1468 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1469 Instead of generating compares and jumps, it is far simpler to
1470 generate a data structure describing the cases in order and call a
1471 library subroutine that locates the right case.
1472 This is particularly true because this is the only case where we
1473 might have to dispose of a temporary.
1474 The library subroutine returns a pointer to jump to or NULL if no
1475 branches are to be taken. */
1477 static tree
1478 gfc_trans_character_select (gfc_code *code)
1480 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1481 stmtblock_t block, body;
1482 gfc_case *cp, *d;
1483 gfc_code *c;
1484 gfc_se se;
1485 int n, k;
1487 /* The jump table types are stored in static variables to avoid
1488 constructing them from scratch every single time. */
1489 static tree select_struct[2];
1490 static tree ss_string1[2], ss_string1_len[2];
1491 static tree ss_string2[2], ss_string2_len[2];
1492 static tree ss_target[2];
1494 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1496 if (code->expr1->ts.kind == 1)
1497 k = 0;
1498 else if (code->expr1->ts.kind == 4)
1499 k = 1;
1500 else
1501 gcc_unreachable ();
1503 if (select_struct[k] == NULL)
1505 select_struct[k] = make_node (RECORD_TYPE);
1507 if (code->expr1->ts.kind == 1)
1508 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1509 else if (code->expr1->ts.kind == 4)
1510 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1511 else
1512 gcc_unreachable ();
1514 #undef ADD_FIELD
1515 #define ADD_FIELD(NAME, TYPE) \
1516 ss_##NAME[k] = gfc_add_field_to_struct \
1517 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1518 get_identifier (stringize(NAME)), TYPE)
1520 ADD_FIELD (string1, pchartype);
1521 ADD_FIELD (string1_len, gfc_charlen_type_node);
1523 ADD_FIELD (string2, pchartype);
1524 ADD_FIELD (string2_len, gfc_charlen_type_node);
1526 ADD_FIELD (target, integer_type_node);
1527 #undef ADD_FIELD
1529 gfc_finish_type (select_struct[k]);
1532 cp = code->block->ext.case_list;
1533 while (cp->left != NULL)
1534 cp = cp->left;
1536 n = 0;
1537 for (d = cp; d; d = d->right)
1538 d->n = n++;
1540 end_label = gfc_build_label_decl (NULL_TREE);
1542 /* Generate the body */
1543 gfc_start_block (&block);
1544 gfc_init_block (&body);
1546 for (c = code->block; c; c = c->block)
1548 for (d = c->ext.case_list; d; d = d->next)
1550 label = gfc_build_label_decl (NULL_TREE);
1551 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1552 build_int_cst (NULL_TREE, d->n),
1553 build_int_cst (NULL_TREE, d->n), label);
1554 gfc_add_expr_to_block (&body, tmp);
1557 tmp = gfc_trans_code (c->next);
1558 gfc_add_expr_to_block (&body, tmp);
1560 tmp = build1_v (GOTO_EXPR, end_label);
1561 gfc_add_expr_to_block (&body, tmp);
1564 /* Generate the structure describing the branches */
1565 init = NULL_TREE;
1567 for(d = cp; d; d = d->right)
1569 node = NULL_TREE;
1571 gfc_init_se (&se, NULL);
1573 if (d->low == NULL)
1575 node = tree_cons (ss_string1[k], null_pointer_node, node);
1576 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1578 else
1580 gfc_conv_expr_reference (&se, d->low);
1582 node = tree_cons (ss_string1[k], se.expr, node);
1583 node = tree_cons (ss_string1_len[k], se.string_length, node);
1586 if (d->high == NULL)
1588 node = tree_cons (ss_string2[k], null_pointer_node, node);
1589 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1591 else
1593 gfc_init_se (&se, NULL);
1594 gfc_conv_expr_reference (&se, d->high);
1596 node = tree_cons (ss_string2[k], se.expr, node);
1597 node = tree_cons (ss_string2_len[k], se.string_length, node);
1600 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1601 node);
1603 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1604 init = tree_cons (NULL_TREE, tmp, init);
1607 type = build_array_type (select_struct[k],
1608 build_index_type (build_int_cst (NULL_TREE, n-1)));
1610 init = build_constructor_from_list (type, nreverse(init));
1611 TREE_CONSTANT (init) = 1;
1612 TREE_STATIC (init) = 1;
1613 /* Create a static variable to hold the jump table. */
1614 tmp = gfc_create_var (type, "jumptable");
1615 TREE_CONSTANT (tmp) = 1;
1616 TREE_STATIC (tmp) = 1;
1617 TREE_READONLY (tmp) = 1;
1618 DECL_INITIAL (tmp) = init;
1619 init = tmp;
1621 /* Build the library call */
1622 init = gfc_build_addr_expr (pvoid_type_node, init);
1624 gfc_init_se (&se, NULL);
1625 gfc_conv_expr_reference (&se, code->expr1);
1627 gfc_add_block_to_block (&block, &se.pre);
1629 if (code->expr1->ts.kind == 1)
1630 fndecl = gfor_fndecl_select_string;
1631 else if (code->expr1->ts.kind == 4)
1632 fndecl = gfor_fndecl_select_string_char4;
1633 else
1634 gcc_unreachable ();
1636 tmp = build_call_expr_loc (input_location,
1637 fndecl, 4, init, build_int_cst (NULL_TREE, n),
1638 se.expr, se.string_length);
1639 case_num = gfc_create_var (integer_type_node, "case_num");
1640 gfc_add_modify (&block, case_num, tmp);
1642 gfc_add_block_to_block (&block, &se.post);
1644 tmp = gfc_finish_block (&body);
1645 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1646 gfc_add_expr_to_block (&block, tmp);
1648 tmp = build1_v (LABEL_EXPR, end_label);
1649 gfc_add_expr_to_block (&block, tmp);
1651 return gfc_finish_block (&block);
1655 /* Translate the three variants of the SELECT CASE construct.
1657 SELECT CASEs with INTEGER case expressions can be translated to an
1658 equivalent GENERIC switch statement, and for LOGICAL case
1659 expressions we build one or two if-else compares.
1661 SELECT CASEs with CHARACTER case expressions are a whole different
1662 story, because they don't exist in GENERIC. So we sort them and
1663 do a binary search at runtime.
1665 Fortran has no BREAK statement, and it does not allow jumps from
1666 one case block to another. That makes things a lot easier for
1667 the optimizers. */
1669 tree
1670 gfc_trans_select (gfc_code * code)
1672 gcc_assert (code && code->expr1);
1674 /* Empty SELECT constructs are legal. */
1675 if (code->block == NULL)
1676 return build_empty_stmt (input_location);
1678 /* Select the correct translation function. */
1679 switch (code->expr1->ts.type)
1681 case BT_LOGICAL: return gfc_trans_logical_select (code);
1682 case BT_INTEGER: return gfc_trans_integer_select (code);
1683 case BT_CHARACTER: return gfc_trans_character_select (code);
1684 default:
1685 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1686 /* Not reached */
1691 /* Traversal function to substitute a replacement symtree if the symbol
1692 in the expression is the same as that passed. f == 2 signals that
1693 that variable itself is not to be checked - only the references.
1694 This group of functions is used when the variable expression in a
1695 FORALL assignment has internal references. For example:
1696 FORALL (i = 1:4) p(p(i)) = i
1697 The only recourse here is to store a copy of 'p' for the index
1698 expression. */
1700 static gfc_symtree *new_symtree;
1701 static gfc_symtree *old_symtree;
1703 static bool
1704 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1706 if (expr->expr_type != EXPR_VARIABLE)
1707 return false;
1709 if (*f == 2)
1710 *f = 1;
1711 else if (expr->symtree->n.sym == sym)
1712 expr->symtree = new_symtree;
1714 return false;
1717 static void
1718 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1720 gfc_traverse_expr (e, sym, forall_replace, f);
1723 static bool
1724 forall_restore (gfc_expr *expr,
1725 gfc_symbol *sym ATTRIBUTE_UNUSED,
1726 int *f ATTRIBUTE_UNUSED)
1728 if (expr->expr_type != EXPR_VARIABLE)
1729 return false;
1731 if (expr->symtree == new_symtree)
1732 expr->symtree = old_symtree;
1734 return false;
1737 static void
1738 forall_restore_symtree (gfc_expr *e)
1740 gfc_traverse_expr (e, NULL, forall_restore, 0);
1743 static void
1744 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1746 gfc_se tse;
1747 gfc_se rse;
1748 gfc_expr *e;
1749 gfc_symbol *new_sym;
1750 gfc_symbol *old_sym;
1751 gfc_symtree *root;
1752 tree tmp;
1754 /* Build a copy of the lvalue. */
1755 old_symtree = c->expr1->symtree;
1756 old_sym = old_symtree->n.sym;
1757 e = gfc_lval_expr_from_sym (old_sym);
1758 if (old_sym->attr.dimension)
1760 gfc_init_se (&tse, NULL);
1761 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1762 gfc_add_block_to_block (pre, &tse.pre);
1763 gfc_add_block_to_block (post, &tse.post);
1764 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
1766 if (e->ts.type != BT_CHARACTER)
1768 /* Use the variable offset for the temporary. */
1769 tmp = gfc_conv_array_offset (old_sym->backend_decl);
1770 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
1773 else
1775 gfc_init_se (&tse, NULL);
1776 gfc_init_se (&rse, NULL);
1777 gfc_conv_expr (&rse, e);
1778 if (e->ts.type == BT_CHARACTER)
1780 tse.string_length = rse.string_length;
1781 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1782 tse.string_length);
1783 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1784 rse.string_length);
1785 gfc_add_block_to_block (pre, &tse.pre);
1786 gfc_add_block_to_block (post, &tse.post);
1788 else
1790 tmp = gfc_typenode_for_spec (&e->ts);
1791 tse.expr = gfc_create_var (tmp, "temp");
1794 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1795 e->expr_type == EXPR_VARIABLE);
1796 gfc_add_expr_to_block (pre, tmp);
1798 gfc_free_expr (e);
1800 /* Create a new symbol to represent the lvalue. */
1801 new_sym = gfc_new_symbol (old_sym->name, NULL);
1802 new_sym->ts = old_sym->ts;
1803 new_sym->attr.referenced = 1;
1804 new_sym->attr.temporary = 1;
1805 new_sym->attr.dimension = old_sym->attr.dimension;
1806 new_sym->attr.flavor = old_sym->attr.flavor;
1808 /* Use the temporary as the backend_decl. */
1809 new_sym->backend_decl = tse.expr;
1811 /* Create a fake symtree for it. */
1812 root = NULL;
1813 new_symtree = gfc_new_symtree (&root, old_sym->name);
1814 new_symtree->n.sym = new_sym;
1815 gcc_assert (new_symtree == root);
1817 /* Go through the expression reference replacing the old_symtree
1818 with the new. */
1819 forall_replace_symtree (c->expr1, old_sym, 2);
1821 /* Now we have made this temporary, we might as well use it for
1822 the right hand side. */
1823 forall_replace_symtree (c->expr2, old_sym, 1);
1827 /* Handles dependencies in forall assignments. */
1828 static int
1829 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1831 gfc_ref *lref;
1832 gfc_ref *rref;
1833 int need_temp;
1834 gfc_symbol *lsym;
1836 lsym = c->expr1->symtree->n.sym;
1837 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
1839 /* Now check for dependencies within the 'variable'
1840 expression itself. These are treated by making a complete
1841 copy of variable and changing all the references to it
1842 point to the copy instead. Note that the shallow copy of
1843 the variable will not suffice for derived types with
1844 pointer components. We therefore leave these to their
1845 own devices. */
1846 if (lsym->ts.type == BT_DERIVED
1847 && lsym->ts.u.derived->attr.pointer_comp)
1848 return need_temp;
1850 new_symtree = NULL;
1851 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
1853 forall_make_variable_temp (c, pre, post);
1854 need_temp = 0;
1857 /* Substrings with dependencies are treated in the same
1858 way. */
1859 if (c->expr1->ts.type == BT_CHARACTER
1860 && c->expr1->ref
1861 && c->expr2->expr_type == EXPR_VARIABLE
1862 && lsym == c->expr2->symtree->n.sym)
1864 for (lref = c->expr1->ref; lref; lref = lref->next)
1865 if (lref->type == REF_SUBSTRING)
1866 break;
1867 for (rref = c->expr2->ref; rref; rref = rref->next)
1868 if (rref->type == REF_SUBSTRING)
1869 break;
1871 if (rref && lref
1872 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1874 forall_make_variable_temp (c, pre, post);
1875 need_temp = 0;
1878 return need_temp;
1882 static void
1883 cleanup_forall_symtrees (gfc_code *c)
1885 forall_restore_symtree (c->expr1);
1886 forall_restore_symtree (c->expr2);
1887 gfc_free (new_symtree->n.sym);
1888 gfc_free (new_symtree);
1892 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1893 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1894 indicates whether we should generate code to test the FORALLs mask
1895 array. OUTER is the loop header to be used for initializing mask
1896 indices.
1898 The generated loop format is:
1899 count = (end - start + step) / step
1900 loopvar = start
1901 while (1)
1903 if (count <=0 )
1904 goto end_of_loop
1905 <body>
1906 loopvar += step
1907 count --
1909 end_of_loop: */
1911 static tree
1912 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1913 int mask_flag, stmtblock_t *outer)
1915 int n, nvar;
1916 tree tmp;
1917 tree cond;
1918 stmtblock_t block;
1919 tree exit_label;
1920 tree count;
1921 tree var, start, end, step;
1922 iter_info *iter;
1924 /* Initialize the mask index outside the FORALL nest. */
1925 if (mask_flag && forall_tmp->mask)
1926 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1928 iter = forall_tmp->this_loop;
1929 nvar = forall_tmp->nvar;
1930 for (n = 0; n < nvar; n++)
1932 var = iter->var;
1933 start = iter->start;
1934 end = iter->end;
1935 step = iter->step;
1937 exit_label = gfc_build_label_decl (NULL_TREE);
1938 TREE_USED (exit_label) = 1;
1940 /* The loop counter. */
1941 count = gfc_create_var (TREE_TYPE (var), "count");
1943 /* The body of the loop. */
1944 gfc_init_block (&block);
1946 /* The exit condition. */
1947 cond = fold_build2 (LE_EXPR, boolean_type_node,
1948 count, build_int_cst (TREE_TYPE (count), 0));
1949 tmp = build1_v (GOTO_EXPR, exit_label);
1950 tmp = fold_build3 (COND_EXPR, void_type_node,
1951 cond, tmp, build_empty_stmt (input_location));
1952 gfc_add_expr_to_block (&block, tmp);
1954 /* The main loop body. */
1955 gfc_add_expr_to_block (&block, body);
1957 /* Increment the loop variable. */
1958 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1959 gfc_add_modify (&block, var, tmp);
1961 /* Advance to the next mask element. Only do this for the
1962 innermost loop. */
1963 if (n == 0 && mask_flag && forall_tmp->mask)
1965 tree maskindex = forall_tmp->maskindex;
1966 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1967 maskindex, gfc_index_one_node);
1968 gfc_add_modify (&block, maskindex, tmp);
1971 /* Decrement the loop counter. */
1972 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1973 build_int_cst (TREE_TYPE (var), 1));
1974 gfc_add_modify (&block, count, tmp);
1976 body = gfc_finish_block (&block);
1978 /* Loop var initialization. */
1979 gfc_init_block (&block);
1980 gfc_add_modify (&block, var, start);
1983 /* Initialize the loop counter. */
1984 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1985 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1986 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1987 gfc_add_modify (&block, count, tmp);
1989 /* The loop expression. */
1990 tmp = build1_v (LOOP_EXPR, body);
1991 gfc_add_expr_to_block (&block, tmp);
1993 /* The exit label. */
1994 tmp = build1_v (LABEL_EXPR, exit_label);
1995 gfc_add_expr_to_block (&block, tmp);
1997 body = gfc_finish_block (&block);
1998 iter = iter->next;
2000 return body;
2004 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2005 is nonzero, the body is controlled by all masks in the forall nest.
2006 Otherwise, the innermost loop is not controlled by it's mask. This
2007 is used for initializing that mask. */
2009 static tree
2010 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2011 int mask_flag)
2013 tree tmp;
2014 stmtblock_t header;
2015 forall_info *forall_tmp;
2016 tree mask, maskindex;
2018 gfc_start_block (&header);
2020 forall_tmp = nested_forall_info;
2021 while (forall_tmp != NULL)
2023 /* Generate body with masks' control. */
2024 if (mask_flag)
2026 mask = forall_tmp->mask;
2027 maskindex = forall_tmp->maskindex;
2029 /* If a mask was specified make the assignment conditional. */
2030 if (mask)
2032 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2033 body = build3_v (COND_EXPR, tmp, body,
2034 build_empty_stmt (input_location));
2037 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2038 forall_tmp = forall_tmp->prev_nest;
2039 mask_flag = 1;
2042 gfc_add_expr_to_block (&header, body);
2043 return gfc_finish_block (&header);
2047 /* Allocate data for holding a temporary array. Returns either a local
2048 temporary array or a pointer variable. */
2050 static tree
2051 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2052 tree elem_type)
2054 tree tmpvar;
2055 tree type;
2056 tree tmp;
2058 if (INTEGER_CST_P (size))
2060 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2061 gfc_index_one_node);
2063 else
2064 tmp = NULL_TREE;
2066 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2067 type = build_array_type (elem_type, type);
2068 if (gfc_can_put_var_on_stack (bytesize))
2070 gcc_assert (INTEGER_CST_P (size));
2071 tmpvar = gfc_create_var (type, "temp");
2072 *pdata = NULL_TREE;
2074 else
2076 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2077 *pdata = convert (pvoid_type_node, tmpvar);
2079 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2080 gfc_add_modify (pblock, tmpvar, tmp);
2082 return tmpvar;
2086 /* Generate codes to copy the temporary to the actual lhs. */
2088 static tree
2089 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2090 tree count1, tree wheremask, bool invert)
2092 gfc_ss *lss;
2093 gfc_se lse, rse;
2094 stmtblock_t block, body;
2095 gfc_loopinfo loop1;
2096 tree tmp;
2097 tree wheremaskexpr;
2099 /* Walk the lhs. */
2100 lss = gfc_walk_expr (expr);
2102 if (lss == gfc_ss_terminator)
2104 gfc_start_block (&block);
2106 gfc_init_se (&lse, NULL);
2108 /* Translate the expression. */
2109 gfc_conv_expr (&lse, expr);
2111 /* Form the expression for the temporary. */
2112 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2114 /* Use the scalar assignment as is. */
2115 gfc_add_block_to_block (&block, &lse.pre);
2116 gfc_add_modify (&block, lse.expr, tmp);
2117 gfc_add_block_to_block (&block, &lse.post);
2119 /* Increment the count1. */
2120 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2121 gfc_index_one_node);
2122 gfc_add_modify (&block, count1, tmp);
2124 tmp = gfc_finish_block (&block);
2126 else
2128 gfc_start_block (&block);
2130 gfc_init_loopinfo (&loop1);
2131 gfc_init_se (&rse, NULL);
2132 gfc_init_se (&lse, NULL);
2134 /* Associate the lss with the loop. */
2135 gfc_add_ss_to_loop (&loop1, lss);
2137 /* Calculate the bounds of the scalarization. */
2138 gfc_conv_ss_startstride (&loop1);
2139 /* Setup the scalarizing loops. */
2140 gfc_conv_loop_setup (&loop1, &expr->where);
2142 gfc_mark_ss_chain_used (lss, 1);
2144 /* Start the scalarized loop body. */
2145 gfc_start_scalarized_body (&loop1, &body);
2147 /* Setup the gfc_se structures. */
2148 gfc_copy_loopinfo_to_se (&lse, &loop1);
2149 lse.ss = lss;
2151 /* Form the expression of the temporary. */
2152 if (lss != gfc_ss_terminator)
2153 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2154 /* Translate expr. */
2155 gfc_conv_expr (&lse, expr);
2157 /* Use the scalar assignment. */
2158 rse.string_length = lse.string_length;
2159 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2161 /* Form the mask expression according to the mask tree list. */
2162 if (wheremask)
2164 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2165 if (invert)
2166 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2167 TREE_TYPE (wheremaskexpr),
2168 wheremaskexpr);
2169 tmp = fold_build3 (COND_EXPR, void_type_node,
2170 wheremaskexpr, tmp,
2171 build_empty_stmt (input_location));
2174 gfc_add_expr_to_block (&body, tmp);
2176 /* Increment count1. */
2177 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2178 count1, gfc_index_one_node);
2179 gfc_add_modify (&body, count1, tmp);
2181 /* Increment count3. */
2182 if (count3)
2184 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2185 count3, gfc_index_one_node);
2186 gfc_add_modify (&body, count3, tmp);
2189 /* Generate the copying loops. */
2190 gfc_trans_scalarizing_loops (&loop1, &body);
2191 gfc_add_block_to_block (&block, &loop1.pre);
2192 gfc_add_block_to_block (&block, &loop1.post);
2193 gfc_cleanup_loop (&loop1);
2195 tmp = gfc_finish_block (&block);
2197 return tmp;
2201 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2202 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2203 and should not be freed. WHEREMASK is the conditional execution mask
2204 whose sense may be inverted by INVERT. */
2206 static tree
2207 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2208 tree count1, gfc_ss *lss, gfc_ss *rss,
2209 tree wheremask, bool invert)
2211 stmtblock_t block, body1;
2212 gfc_loopinfo loop;
2213 gfc_se lse;
2214 gfc_se rse;
2215 tree tmp;
2216 tree wheremaskexpr;
2218 gfc_start_block (&block);
2220 gfc_init_se (&rse, NULL);
2221 gfc_init_se (&lse, NULL);
2223 if (lss == gfc_ss_terminator)
2225 gfc_init_block (&body1);
2226 gfc_conv_expr (&rse, expr2);
2227 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2229 else
2231 /* Initialize the loop. */
2232 gfc_init_loopinfo (&loop);
2234 /* We may need LSS to determine the shape of the expression. */
2235 gfc_add_ss_to_loop (&loop, lss);
2236 gfc_add_ss_to_loop (&loop, rss);
2238 gfc_conv_ss_startstride (&loop);
2239 gfc_conv_loop_setup (&loop, &expr2->where);
2241 gfc_mark_ss_chain_used (rss, 1);
2242 /* Start the loop body. */
2243 gfc_start_scalarized_body (&loop, &body1);
2245 /* Translate the expression. */
2246 gfc_copy_loopinfo_to_se (&rse, &loop);
2247 rse.ss = rss;
2248 gfc_conv_expr (&rse, expr2);
2250 /* Form the expression of the temporary. */
2251 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2254 /* Use the scalar assignment. */
2255 lse.string_length = rse.string_length;
2256 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2257 expr2->expr_type == EXPR_VARIABLE);
2259 /* Form the mask expression according to the mask tree list. */
2260 if (wheremask)
2262 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2263 if (invert)
2264 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2265 TREE_TYPE (wheremaskexpr),
2266 wheremaskexpr);
2267 tmp = fold_build3 (COND_EXPR, void_type_node,
2268 wheremaskexpr, tmp, build_empty_stmt (input_location));
2271 gfc_add_expr_to_block (&body1, tmp);
2273 if (lss == gfc_ss_terminator)
2275 gfc_add_block_to_block (&block, &body1);
2277 /* Increment count1. */
2278 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2279 gfc_index_one_node);
2280 gfc_add_modify (&block, count1, tmp);
2282 else
2284 /* Increment count1. */
2285 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2286 count1, gfc_index_one_node);
2287 gfc_add_modify (&body1, count1, tmp);
2289 /* Increment count3. */
2290 if (count3)
2292 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2293 count3, gfc_index_one_node);
2294 gfc_add_modify (&body1, count3, tmp);
2297 /* Generate the copying loops. */
2298 gfc_trans_scalarizing_loops (&loop, &body1);
2300 gfc_add_block_to_block (&block, &loop.pre);
2301 gfc_add_block_to_block (&block, &loop.post);
2303 gfc_cleanup_loop (&loop);
2304 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2305 as tree nodes in SS may not be valid in different scope. */
2308 tmp = gfc_finish_block (&block);
2309 return tmp;
2313 /* Calculate the size of temporary needed in the assignment inside forall.
2314 LSS and RSS are filled in this function. */
2316 static tree
2317 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2318 stmtblock_t * pblock,
2319 gfc_ss **lss, gfc_ss **rss)
2321 gfc_loopinfo loop;
2322 tree size;
2323 int i;
2324 int save_flag;
2325 tree tmp;
2327 *lss = gfc_walk_expr (expr1);
2328 *rss = NULL;
2330 size = gfc_index_one_node;
2331 if (*lss != gfc_ss_terminator)
2333 gfc_init_loopinfo (&loop);
2335 /* Walk the RHS of the expression. */
2336 *rss = gfc_walk_expr (expr2);
2337 if (*rss == gfc_ss_terminator)
2339 /* The rhs is scalar. Add a ss for the expression. */
2340 *rss = gfc_get_ss ();
2341 (*rss)->next = gfc_ss_terminator;
2342 (*rss)->type = GFC_SS_SCALAR;
2343 (*rss)->expr = expr2;
2346 /* Associate the SS with the loop. */
2347 gfc_add_ss_to_loop (&loop, *lss);
2348 /* We don't actually need to add the rhs at this point, but it might
2349 make guessing the loop bounds a bit easier. */
2350 gfc_add_ss_to_loop (&loop, *rss);
2352 /* We only want the shape of the expression, not rest of the junk
2353 generated by the scalarizer. */
2354 loop.array_parameter = 1;
2356 /* Calculate the bounds of the scalarization. */
2357 save_flag = gfc_option.rtcheck;
2358 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2359 gfc_conv_ss_startstride (&loop);
2360 gfc_option.rtcheck = save_flag;
2361 gfc_conv_loop_setup (&loop, &expr2->where);
2363 /* Figure out how many elements we need. */
2364 for (i = 0; i < loop.dimen; i++)
2366 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2367 gfc_index_one_node, loop.from[i]);
2368 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2369 tmp, loop.to[i]);
2370 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2372 gfc_add_block_to_block (pblock, &loop.pre);
2373 size = gfc_evaluate_now (size, pblock);
2374 gfc_add_block_to_block (pblock, &loop.post);
2376 /* TODO: write a function that cleans up a loopinfo without freeing
2377 the SS chains. Currently a NOP. */
2380 return size;
2384 /* Calculate the overall iterator number of the nested forall construct.
2385 This routine actually calculates the number of times the body of the
2386 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2387 that by the expression INNER_SIZE. The BLOCK argument specifies the
2388 block in which to calculate the result, and the optional INNER_SIZE_BODY
2389 argument contains any statements that need to executed (inside the loop)
2390 to initialize or calculate INNER_SIZE. */
2392 static tree
2393 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2394 stmtblock_t *inner_size_body, stmtblock_t *block)
2396 forall_info *forall_tmp = nested_forall_info;
2397 tree tmp, number;
2398 stmtblock_t body;
2400 /* We can eliminate the innermost unconditional loops with constant
2401 array bounds. */
2402 if (INTEGER_CST_P (inner_size))
2404 while (forall_tmp
2405 && !forall_tmp->mask
2406 && INTEGER_CST_P (forall_tmp->size))
2408 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2409 inner_size, forall_tmp->size);
2410 forall_tmp = forall_tmp->prev_nest;
2413 /* If there are no loops left, we have our constant result. */
2414 if (!forall_tmp)
2415 return inner_size;
2418 /* Otherwise, create a temporary variable to compute the result. */
2419 number = gfc_create_var (gfc_array_index_type, "num");
2420 gfc_add_modify (block, number, gfc_index_zero_node);
2422 gfc_start_block (&body);
2423 if (inner_size_body)
2424 gfc_add_block_to_block (&body, inner_size_body);
2425 if (forall_tmp)
2426 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2427 number, inner_size);
2428 else
2429 tmp = inner_size;
2430 gfc_add_modify (&body, number, tmp);
2431 tmp = gfc_finish_block (&body);
2433 /* Generate loops. */
2434 if (forall_tmp != NULL)
2435 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2437 gfc_add_expr_to_block (block, tmp);
2439 return number;
2443 /* Allocate temporary for forall construct. SIZE is the size of temporary
2444 needed. PTEMP1 is returned for space free. */
2446 static tree
2447 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2448 tree * ptemp1)
2450 tree bytesize;
2451 tree unit;
2452 tree tmp;
2454 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2455 if (!integer_onep (unit))
2456 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2457 else
2458 bytesize = size;
2460 *ptemp1 = NULL;
2461 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2463 if (*ptemp1)
2464 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2465 return tmp;
2469 /* Allocate temporary for forall construct according to the information in
2470 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2471 assignment inside forall. PTEMP1 is returned for space free. */
2473 static tree
2474 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2475 tree inner_size, stmtblock_t * inner_size_body,
2476 stmtblock_t * block, tree * ptemp1)
2478 tree size;
2480 /* Calculate the total size of temporary needed in forall construct. */
2481 size = compute_overall_iter_number (nested_forall_info, inner_size,
2482 inner_size_body, block);
2484 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2488 /* Handle assignments inside forall which need temporary.
2490 forall (i=start:end:stride; maskexpr)
2491 e<i> = f<i>
2492 end forall
2493 (where e,f<i> are arbitrary expressions possibly involving i
2494 and there is a dependency between e<i> and f<i>)
2495 Translates to:
2496 masktmp(:) = maskexpr(:)
2498 maskindex = 0;
2499 count1 = 0;
2500 num = 0;
2501 for (i = start; i <= end; i += stride)
2502 num += SIZE (f<i>)
2503 count1 = 0;
2504 ALLOCATE (tmp(num))
2505 for (i = start; i <= end; i += stride)
2507 if (masktmp[maskindex++])
2508 tmp[count1++] = f<i>
2510 maskindex = 0;
2511 count1 = 0;
2512 for (i = start; i <= end; i += stride)
2514 if (masktmp[maskindex++])
2515 e<i> = tmp[count1++]
2517 DEALLOCATE (tmp)
2519 static void
2520 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2521 tree wheremask, bool invert,
2522 forall_info * nested_forall_info,
2523 stmtblock_t * block)
2525 tree type;
2526 tree inner_size;
2527 gfc_ss *lss, *rss;
2528 tree count, count1;
2529 tree tmp, tmp1;
2530 tree ptemp1;
2531 stmtblock_t inner_size_body;
2533 /* Create vars. count1 is the current iterator number of the nested
2534 forall. */
2535 count1 = gfc_create_var (gfc_array_index_type, "count1");
2537 /* Count is the wheremask index. */
2538 if (wheremask)
2540 count = gfc_create_var (gfc_array_index_type, "count");
2541 gfc_add_modify (block, count, gfc_index_zero_node);
2543 else
2544 count = NULL;
2546 /* Initialize count1. */
2547 gfc_add_modify (block, count1, gfc_index_zero_node);
2549 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2550 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2551 gfc_init_block (&inner_size_body);
2552 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2553 &lss, &rss);
2555 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2556 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
2558 if (!expr1->ts.u.cl->backend_decl)
2560 gfc_se tse;
2561 gfc_init_se (&tse, NULL);
2562 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
2563 expr1->ts.u.cl->backend_decl = tse.expr;
2565 type = gfc_get_character_type_len (gfc_default_character_kind,
2566 expr1->ts.u.cl->backend_decl);
2568 else
2569 type = gfc_typenode_for_spec (&expr1->ts);
2571 /* Allocate temporary for nested forall construct according to the
2572 information in nested_forall_info and inner_size. */
2573 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2574 &inner_size_body, block, &ptemp1);
2576 /* Generate codes to copy rhs to the temporary . */
2577 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2578 wheremask, invert);
2580 /* Generate body and loops according to the information in
2581 nested_forall_info. */
2582 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2583 gfc_add_expr_to_block (block, tmp);
2585 /* Reset count1. */
2586 gfc_add_modify (block, count1, gfc_index_zero_node);
2588 /* Reset count. */
2589 if (wheremask)
2590 gfc_add_modify (block, count, gfc_index_zero_node);
2592 /* Generate codes to copy the temporary to lhs. */
2593 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2594 wheremask, invert);
2596 /* Generate body and loops according to the information in
2597 nested_forall_info. */
2598 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2599 gfc_add_expr_to_block (block, tmp);
2601 if (ptemp1)
2603 /* Free the temporary. */
2604 tmp = gfc_call_free (ptemp1);
2605 gfc_add_expr_to_block (block, tmp);
2610 /* Translate pointer assignment inside FORALL which need temporary. */
2612 static void
2613 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2614 forall_info * nested_forall_info,
2615 stmtblock_t * block)
2617 tree type;
2618 tree inner_size;
2619 gfc_ss *lss, *rss;
2620 gfc_se lse;
2621 gfc_se rse;
2622 gfc_ss_info *info;
2623 gfc_loopinfo loop;
2624 tree desc;
2625 tree parm;
2626 tree parmtype;
2627 stmtblock_t body;
2628 tree count;
2629 tree tmp, tmp1, ptemp1;
2631 count = gfc_create_var (gfc_array_index_type, "count");
2632 gfc_add_modify (block, count, gfc_index_zero_node);
2634 inner_size = integer_one_node;
2635 lss = gfc_walk_expr (expr1);
2636 rss = gfc_walk_expr (expr2);
2637 if (lss == gfc_ss_terminator)
2639 type = gfc_typenode_for_spec (&expr1->ts);
2640 type = build_pointer_type (type);
2642 /* Allocate temporary for nested forall construct according to the
2643 information in nested_forall_info and inner_size. */
2644 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2645 inner_size, NULL, block, &ptemp1);
2646 gfc_start_block (&body);
2647 gfc_init_se (&lse, NULL);
2648 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2649 gfc_init_se (&rse, NULL);
2650 rse.want_pointer = 1;
2651 gfc_conv_expr (&rse, expr2);
2652 gfc_add_block_to_block (&body, &rse.pre);
2653 gfc_add_modify (&body, lse.expr,
2654 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2655 gfc_add_block_to_block (&body, &rse.post);
2657 /* Increment count. */
2658 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2659 count, gfc_index_one_node);
2660 gfc_add_modify (&body, count, tmp);
2662 tmp = gfc_finish_block (&body);
2664 /* Generate body and loops according to the information in
2665 nested_forall_info. */
2666 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2667 gfc_add_expr_to_block (block, tmp);
2669 /* Reset count. */
2670 gfc_add_modify (block, count, gfc_index_zero_node);
2672 gfc_start_block (&body);
2673 gfc_init_se (&lse, NULL);
2674 gfc_init_se (&rse, NULL);
2675 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2676 lse.want_pointer = 1;
2677 gfc_conv_expr (&lse, expr1);
2678 gfc_add_block_to_block (&body, &lse.pre);
2679 gfc_add_modify (&body, lse.expr, rse.expr);
2680 gfc_add_block_to_block (&body, &lse.post);
2681 /* Increment count. */
2682 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2683 count, gfc_index_one_node);
2684 gfc_add_modify (&body, count, tmp);
2685 tmp = gfc_finish_block (&body);
2687 /* Generate body and loops according to the information in
2688 nested_forall_info. */
2689 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2690 gfc_add_expr_to_block (block, tmp);
2692 else
2694 gfc_init_loopinfo (&loop);
2696 /* Associate the SS with the loop. */
2697 gfc_add_ss_to_loop (&loop, rss);
2699 /* Setup the scalarizing loops and bounds. */
2700 gfc_conv_ss_startstride (&loop);
2702 gfc_conv_loop_setup (&loop, &expr2->where);
2704 info = &rss->data.info;
2705 desc = info->descriptor;
2707 /* Make a new descriptor. */
2708 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2709 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2710 loop.from, loop.to, 1,
2711 GFC_ARRAY_UNKNOWN, true);
2713 /* Allocate temporary for nested forall construct. */
2714 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2715 inner_size, NULL, block, &ptemp1);
2716 gfc_start_block (&body);
2717 gfc_init_se (&lse, NULL);
2718 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2719 lse.direct_byref = 1;
2720 rss = gfc_walk_expr (expr2);
2721 gfc_conv_expr_descriptor (&lse, expr2, rss);
2723 gfc_add_block_to_block (&body, &lse.pre);
2724 gfc_add_block_to_block (&body, &lse.post);
2726 /* Increment count. */
2727 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2728 count, gfc_index_one_node);
2729 gfc_add_modify (&body, count, tmp);
2731 tmp = gfc_finish_block (&body);
2733 /* Generate body and loops according to the information in
2734 nested_forall_info. */
2735 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2736 gfc_add_expr_to_block (block, tmp);
2738 /* Reset count. */
2739 gfc_add_modify (block, count, gfc_index_zero_node);
2741 parm = gfc_build_array_ref (tmp1, count, NULL);
2742 lss = gfc_walk_expr (expr1);
2743 gfc_init_se (&lse, NULL);
2744 gfc_conv_expr_descriptor (&lse, expr1, lss);
2745 gfc_add_modify (&lse.pre, lse.expr, parm);
2746 gfc_start_block (&body);
2747 gfc_add_block_to_block (&body, &lse.pre);
2748 gfc_add_block_to_block (&body, &lse.post);
2750 /* Increment count. */
2751 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2752 count, gfc_index_one_node);
2753 gfc_add_modify (&body, count, tmp);
2755 tmp = gfc_finish_block (&body);
2757 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2758 gfc_add_expr_to_block (block, tmp);
2760 /* Free the temporary. */
2761 if (ptemp1)
2763 tmp = gfc_call_free (ptemp1);
2764 gfc_add_expr_to_block (block, tmp);
2769 /* FORALL and WHERE statements are really nasty, especially when you nest
2770 them. All the rhs of a forall assignment must be evaluated before the
2771 actual assignments are performed. Presumably this also applies to all the
2772 assignments in an inner where statement. */
2774 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2775 linear array, relying on the fact that we process in the same order in all
2776 loops.
2778 forall (i=start:end:stride; maskexpr)
2779 e<i> = f<i>
2780 g<i> = h<i>
2781 end forall
2782 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2783 Translates to:
2784 count = ((end + 1 - start) / stride)
2785 masktmp(:) = maskexpr(:)
2787 maskindex = 0;
2788 for (i = start; i <= end; i += stride)
2790 if (masktmp[maskindex++])
2791 e<i> = f<i>
2793 maskindex = 0;
2794 for (i = start; i <= end; i += stride)
2796 if (masktmp[maskindex++])
2797 g<i> = h<i>
2800 Note that this code only works when there are no dependencies.
2801 Forall loop with array assignments and data dependencies are a real pain,
2802 because the size of the temporary cannot always be determined before the
2803 loop is executed. This problem is compounded by the presence of nested
2804 FORALL constructs.
2807 static tree
2808 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2810 stmtblock_t pre;
2811 stmtblock_t post;
2812 stmtblock_t block;
2813 stmtblock_t body;
2814 tree *var;
2815 tree *start;
2816 tree *end;
2817 tree *step;
2818 gfc_expr **varexpr;
2819 tree tmp;
2820 tree assign;
2821 tree size;
2822 tree maskindex;
2823 tree mask;
2824 tree pmask;
2825 int n;
2826 int nvar;
2827 int need_temp;
2828 gfc_forall_iterator *fa;
2829 gfc_se se;
2830 gfc_code *c;
2831 gfc_saved_var *saved_vars;
2832 iter_info *this_forall;
2833 forall_info *info;
2834 bool need_mask;
2836 /* Do nothing if the mask is false. */
2837 if (code->expr1
2838 && code->expr1->expr_type == EXPR_CONSTANT
2839 && !code->expr1->value.logical)
2840 return build_empty_stmt (input_location);
2842 n = 0;
2843 /* Count the FORALL index number. */
2844 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2845 n++;
2846 nvar = n;
2848 /* Allocate the space for var, start, end, step, varexpr. */
2849 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2850 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2851 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2852 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2853 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2854 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2856 /* Allocate the space for info. */
2857 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2859 gfc_start_block (&pre);
2860 gfc_init_block (&post);
2861 gfc_init_block (&block);
2863 n = 0;
2864 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2866 gfc_symbol *sym = fa->var->symtree->n.sym;
2868 /* Allocate space for this_forall. */
2869 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2871 /* Create a temporary variable for the FORALL index. */
2872 tmp = gfc_typenode_for_spec (&sym->ts);
2873 var[n] = gfc_create_var (tmp, sym->name);
2874 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2876 /* Record it in this_forall. */
2877 this_forall->var = var[n];
2879 /* Replace the index symbol's backend_decl with the temporary decl. */
2880 sym->backend_decl = var[n];
2882 /* Work out the start, end and stride for the loop. */
2883 gfc_init_se (&se, NULL);
2884 gfc_conv_expr_val (&se, fa->start);
2885 /* Record it in this_forall. */
2886 this_forall->start = se.expr;
2887 gfc_add_block_to_block (&block, &se.pre);
2888 start[n] = se.expr;
2890 gfc_init_se (&se, NULL);
2891 gfc_conv_expr_val (&se, fa->end);
2892 /* Record it in this_forall. */
2893 this_forall->end = se.expr;
2894 gfc_make_safe_expr (&se);
2895 gfc_add_block_to_block (&block, &se.pre);
2896 end[n] = se.expr;
2898 gfc_init_se (&se, NULL);
2899 gfc_conv_expr_val (&se, fa->stride);
2900 /* Record it in this_forall. */
2901 this_forall->step = se.expr;
2902 gfc_make_safe_expr (&se);
2903 gfc_add_block_to_block (&block, &se.pre);
2904 step[n] = se.expr;
2906 /* Set the NEXT field of this_forall to NULL. */
2907 this_forall->next = NULL;
2908 /* Link this_forall to the info construct. */
2909 if (info->this_loop)
2911 iter_info *iter_tmp = info->this_loop;
2912 while (iter_tmp->next != NULL)
2913 iter_tmp = iter_tmp->next;
2914 iter_tmp->next = this_forall;
2916 else
2917 info->this_loop = this_forall;
2919 n++;
2921 nvar = n;
2923 /* Calculate the size needed for the current forall level. */
2924 size = gfc_index_one_node;
2925 for (n = 0; n < nvar; n++)
2927 /* size = (end + step - start) / step. */
2928 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2929 step[n], start[n]);
2930 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2932 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2933 tmp = convert (gfc_array_index_type, tmp);
2935 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2938 /* Record the nvar and size of current forall level. */
2939 info->nvar = nvar;
2940 info->size = size;
2942 if (code->expr1)
2944 /* If the mask is .true., consider the FORALL unconditional. */
2945 if (code->expr1->expr_type == EXPR_CONSTANT
2946 && code->expr1->value.logical)
2947 need_mask = false;
2948 else
2949 need_mask = true;
2951 else
2952 need_mask = false;
2954 /* First we need to allocate the mask. */
2955 if (need_mask)
2957 /* As the mask array can be very big, prefer compact boolean types. */
2958 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2959 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2960 size, NULL, &block, &pmask);
2961 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2963 /* Record them in the info structure. */
2964 info->maskindex = maskindex;
2965 info->mask = mask;
2967 else
2969 /* No mask was specified. */
2970 maskindex = NULL_TREE;
2971 mask = pmask = NULL_TREE;
2974 /* Link the current forall level to nested_forall_info. */
2975 info->prev_nest = nested_forall_info;
2976 nested_forall_info = info;
2978 /* Copy the mask into a temporary variable if required.
2979 For now we assume a mask temporary is needed. */
2980 if (need_mask)
2982 /* As the mask array can be very big, prefer compact boolean types. */
2983 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2985 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2987 /* Start of mask assignment loop body. */
2988 gfc_start_block (&body);
2990 /* Evaluate the mask expression. */
2991 gfc_init_se (&se, NULL);
2992 gfc_conv_expr_val (&se, code->expr1);
2993 gfc_add_block_to_block (&body, &se.pre);
2995 /* Store the mask. */
2996 se.expr = convert (mask_type, se.expr);
2998 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2999 gfc_add_modify (&body, tmp, se.expr);
3001 /* Advance to the next mask element. */
3002 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3003 maskindex, gfc_index_one_node);
3004 gfc_add_modify (&body, maskindex, tmp);
3006 /* Generate the loops. */
3007 tmp = gfc_finish_block (&body);
3008 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3009 gfc_add_expr_to_block (&block, tmp);
3012 c = code->block->next;
3014 /* TODO: loop merging in FORALL statements. */
3015 /* Now that we've got a copy of the mask, generate the assignment loops. */
3016 while (c)
3018 switch (c->op)
3020 case EXEC_ASSIGN:
3021 /* A scalar or array assignment. DO the simple check for
3022 lhs to rhs dependencies. These make a temporary for the
3023 rhs and form a second forall block to copy to variable. */
3024 need_temp = check_forall_dependencies(c, &pre, &post);
3026 /* Temporaries due to array assignment data dependencies introduce
3027 no end of problems. */
3028 if (need_temp)
3029 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3030 nested_forall_info, &block);
3031 else
3033 /* Use the normal assignment copying routines. */
3034 assign = gfc_trans_assignment (c->expr1, c->expr2, false);
3036 /* Generate body and loops. */
3037 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3038 assign, 1);
3039 gfc_add_expr_to_block (&block, tmp);
3042 /* Cleanup any temporary symtrees that have been made to deal
3043 with dependencies. */
3044 if (new_symtree)
3045 cleanup_forall_symtrees (c);
3047 break;
3049 case EXEC_WHERE:
3050 /* Translate WHERE or WHERE construct nested in FORALL. */
3051 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3052 break;
3054 /* Pointer assignment inside FORALL. */
3055 case EXEC_POINTER_ASSIGN:
3056 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3057 if (need_temp)
3058 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3059 nested_forall_info, &block);
3060 else
3062 /* Use the normal assignment copying routines. */
3063 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3065 /* Generate body and loops. */
3066 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3067 assign, 1);
3068 gfc_add_expr_to_block (&block, tmp);
3070 break;
3072 case EXEC_FORALL:
3073 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3074 gfc_add_expr_to_block (&block, tmp);
3075 break;
3077 /* Explicit subroutine calls are prevented by the frontend but interface
3078 assignments can legitimately produce them. */
3079 case EXEC_ASSIGN_CALL:
3080 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3081 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3082 gfc_add_expr_to_block (&block, tmp);
3083 break;
3085 default:
3086 gcc_unreachable ();
3089 c = c->next;
3092 /* Restore the original index variables. */
3093 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3094 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3096 /* Free the space for var, start, end, step, varexpr. */
3097 gfc_free (var);
3098 gfc_free (start);
3099 gfc_free (end);
3100 gfc_free (step);
3101 gfc_free (varexpr);
3102 gfc_free (saved_vars);
3104 /* Free the space for this forall_info. */
3105 gfc_free (info);
3107 if (pmask)
3109 /* Free the temporary for the mask. */
3110 tmp = gfc_call_free (pmask);
3111 gfc_add_expr_to_block (&block, tmp);
3113 if (maskindex)
3114 pushdecl (maskindex);
3116 gfc_add_block_to_block (&pre, &block);
3117 gfc_add_block_to_block (&pre, &post);
3119 return gfc_finish_block (&pre);
3123 /* Translate the FORALL statement or construct. */
3125 tree gfc_trans_forall (gfc_code * code)
3127 return gfc_trans_forall_1 (code, NULL);
3131 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3132 If the WHERE construct is nested in FORALL, compute the overall temporary
3133 needed by the WHERE mask expression multiplied by the iterator number of
3134 the nested forall.
3135 ME is the WHERE mask expression.
3136 MASK is the current execution mask upon input, whose sense may or may
3137 not be inverted as specified by the INVERT argument.
3138 CMASK is the updated execution mask on output, or NULL if not required.
3139 PMASK is the pending execution mask on output, or NULL if not required.
3140 BLOCK is the block in which to place the condition evaluation loops. */
3142 static void
3143 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3144 tree mask, bool invert, tree cmask, tree pmask,
3145 tree mask_type, stmtblock_t * block)
3147 tree tmp, tmp1;
3148 gfc_ss *lss, *rss;
3149 gfc_loopinfo loop;
3150 stmtblock_t body, body1;
3151 tree count, cond, mtmp;
3152 gfc_se lse, rse;
3154 gfc_init_loopinfo (&loop);
3156 lss = gfc_walk_expr (me);
3157 rss = gfc_walk_expr (me);
3159 /* Variable to index the temporary. */
3160 count = gfc_create_var (gfc_array_index_type, "count");
3161 /* Initialize count. */
3162 gfc_add_modify (block, count, gfc_index_zero_node);
3164 gfc_start_block (&body);
3166 gfc_init_se (&rse, NULL);
3167 gfc_init_se (&lse, NULL);
3169 if (lss == gfc_ss_terminator)
3171 gfc_init_block (&body1);
3173 else
3175 /* Initialize the loop. */
3176 gfc_init_loopinfo (&loop);
3178 /* We may need LSS to determine the shape of the expression. */
3179 gfc_add_ss_to_loop (&loop, lss);
3180 gfc_add_ss_to_loop (&loop, rss);
3182 gfc_conv_ss_startstride (&loop);
3183 gfc_conv_loop_setup (&loop, &me->where);
3185 gfc_mark_ss_chain_used (rss, 1);
3186 /* Start the loop body. */
3187 gfc_start_scalarized_body (&loop, &body1);
3189 /* Translate the expression. */
3190 gfc_copy_loopinfo_to_se (&rse, &loop);
3191 rse.ss = rss;
3192 gfc_conv_expr (&rse, me);
3195 /* Variable to evaluate mask condition. */
3196 cond = gfc_create_var (mask_type, "cond");
3197 if (mask && (cmask || pmask))
3198 mtmp = gfc_create_var (mask_type, "mask");
3199 else mtmp = NULL_TREE;
3201 gfc_add_block_to_block (&body1, &lse.pre);
3202 gfc_add_block_to_block (&body1, &rse.pre);
3204 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3206 if (mask && (cmask || pmask))
3208 tmp = gfc_build_array_ref (mask, count, NULL);
3209 if (invert)
3210 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3211 gfc_add_modify (&body1, mtmp, tmp);
3214 if (cmask)
3216 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3217 tmp = cond;
3218 if (mask)
3219 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3220 gfc_add_modify (&body1, tmp1, tmp);
3223 if (pmask)
3225 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3226 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3227 if (mask)
3228 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3229 gfc_add_modify (&body1, tmp1, tmp);
3232 gfc_add_block_to_block (&body1, &lse.post);
3233 gfc_add_block_to_block (&body1, &rse.post);
3235 if (lss == gfc_ss_terminator)
3237 gfc_add_block_to_block (&body, &body1);
3239 else
3241 /* Increment count. */
3242 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3243 gfc_index_one_node);
3244 gfc_add_modify (&body1, count, tmp1);
3246 /* Generate the copying loops. */
3247 gfc_trans_scalarizing_loops (&loop, &body1);
3249 gfc_add_block_to_block (&body, &loop.pre);
3250 gfc_add_block_to_block (&body, &loop.post);
3252 gfc_cleanup_loop (&loop);
3253 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3254 as tree nodes in SS may not be valid in different scope. */
3257 tmp1 = gfc_finish_block (&body);
3258 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3259 if (nested_forall_info != NULL)
3260 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3262 gfc_add_expr_to_block (block, tmp1);
3266 /* Translate an assignment statement in a WHERE statement or construct
3267 statement. The MASK expression is used to control which elements
3268 of EXPR1 shall be assigned. The sense of MASK is specified by
3269 INVERT. */
3271 static tree
3272 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3273 tree mask, bool invert,
3274 tree count1, tree count2,
3275 gfc_code *cnext)
3277 gfc_se lse;
3278 gfc_se rse;
3279 gfc_ss *lss;
3280 gfc_ss *lss_section;
3281 gfc_ss *rss;
3283 gfc_loopinfo loop;
3284 tree tmp;
3285 stmtblock_t block;
3286 stmtblock_t body;
3287 tree index, maskexpr;
3289 /* A defined assignment. */
3290 if (cnext && cnext->resolved_sym)
3291 return gfc_trans_call (cnext, true, mask, count1, invert);
3293 #if 0
3294 /* TODO: handle this special case.
3295 Special case a single function returning an array. */
3296 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3298 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3299 if (tmp)
3300 return tmp;
3302 #endif
3304 /* Assignment of the form lhs = rhs. */
3305 gfc_start_block (&block);
3307 gfc_init_se (&lse, NULL);
3308 gfc_init_se (&rse, NULL);
3310 /* Walk the lhs. */
3311 lss = gfc_walk_expr (expr1);
3312 rss = NULL;
3314 /* In each where-assign-stmt, the mask-expr and the variable being
3315 defined shall be arrays of the same shape. */
3316 gcc_assert (lss != gfc_ss_terminator);
3318 /* The assignment needs scalarization. */
3319 lss_section = lss;
3321 /* Find a non-scalar SS from the lhs. */
3322 while (lss_section != gfc_ss_terminator
3323 && lss_section->type != GFC_SS_SECTION)
3324 lss_section = lss_section->next;
3326 gcc_assert (lss_section != gfc_ss_terminator);
3328 /* Initialize the scalarizer. */
3329 gfc_init_loopinfo (&loop);
3331 /* Walk the rhs. */
3332 rss = gfc_walk_expr (expr2);
3333 if (rss == gfc_ss_terminator)
3335 /* The rhs is scalar. Add a ss for the expression. */
3336 rss = gfc_get_ss ();
3337 rss->where = 1;
3338 rss->next = gfc_ss_terminator;
3339 rss->type = GFC_SS_SCALAR;
3340 rss->expr = expr2;
3343 /* Associate the SS with the loop. */
3344 gfc_add_ss_to_loop (&loop, lss);
3345 gfc_add_ss_to_loop (&loop, rss);
3347 /* Calculate the bounds of the scalarization. */
3348 gfc_conv_ss_startstride (&loop);
3350 /* Resolve any data dependencies in the statement. */
3351 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3353 /* Setup the scalarizing loops. */
3354 gfc_conv_loop_setup (&loop, &expr2->where);
3356 /* Setup the gfc_se structures. */
3357 gfc_copy_loopinfo_to_se (&lse, &loop);
3358 gfc_copy_loopinfo_to_se (&rse, &loop);
3360 rse.ss = rss;
3361 gfc_mark_ss_chain_used (rss, 1);
3362 if (loop.temp_ss == NULL)
3364 lse.ss = lss;
3365 gfc_mark_ss_chain_used (lss, 1);
3367 else
3369 lse.ss = loop.temp_ss;
3370 gfc_mark_ss_chain_used (lss, 3);
3371 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3374 /* Start the scalarized loop body. */
3375 gfc_start_scalarized_body (&loop, &body);
3377 /* Translate the expression. */
3378 gfc_conv_expr (&rse, expr2);
3379 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3381 gfc_conv_tmp_array_ref (&lse);
3382 gfc_advance_se_ss_chain (&lse);
3384 else
3385 gfc_conv_expr (&lse, expr1);
3387 /* Form the mask expression according to the mask. */
3388 index = count1;
3389 maskexpr = gfc_build_array_ref (mask, index, NULL);
3390 if (invert)
3391 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3393 /* Use the scalar assignment as is. */
3394 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3395 loop.temp_ss != NULL, false);
3397 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3399 gfc_add_expr_to_block (&body, tmp);
3401 if (lss == gfc_ss_terminator)
3403 /* Increment count1. */
3404 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3405 count1, gfc_index_one_node);
3406 gfc_add_modify (&body, count1, tmp);
3408 /* Use the scalar assignment as is. */
3409 gfc_add_block_to_block (&block, &body);
3411 else
3413 gcc_assert (lse.ss == gfc_ss_terminator
3414 && rse.ss == gfc_ss_terminator);
3416 if (loop.temp_ss != NULL)
3418 /* Increment count1 before finish the main body of a scalarized
3419 expression. */
3420 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3421 count1, gfc_index_one_node);
3422 gfc_add_modify (&body, count1, tmp);
3423 gfc_trans_scalarized_loop_boundary (&loop, &body);
3425 /* We need to copy the temporary to the actual lhs. */
3426 gfc_init_se (&lse, NULL);
3427 gfc_init_se (&rse, NULL);
3428 gfc_copy_loopinfo_to_se (&lse, &loop);
3429 gfc_copy_loopinfo_to_se (&rse, &loop);
3431 rse.ss = loop.temp_ss;
3432 lse.ss = lss;
3434 gfc_conv_tmp_array_ref (&rse);
3435 gfc_advance_se_ss_chain (&rse);
3436 gfc_conv_expr (&lse, expr1);
3438 gcc_assert (lse.ss == gfc_ss_terminator
3439 && rse.ss == gfc_ss_terminator);
3441 /* Form the mask expression according to the mask tree list. */
3442 index = count2;
3443 maskexpr = gfc_build_array_ref (mask, index, NULL);
3444 if (invert)
3445 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3446 maskexpr);
3448 /* Use the scalar assignment as is. */
3449 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3450 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3451 build_empty_stmt (input_location));
3452 gfc_add_expr_to_block (&body, tmp);
3454 /* Increment count2. */
3455 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3456 count2, gfc_index_one_node);
3457 gfc_add_modify (&body, count2, tmp);
3459 else
3461 /* Increment count1. */
3462 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3463 count1, gfc_index_one_node);
3464 gfc_add_modify (&body, count1, tmp);
3467 /* Generate the copying loops. */
3468 gfc_trans_scalarizing_loops (&loop, &body);
3470 /* Wrap the whole thing up. */
3471 gfc_add_block_to_block (&block, &loop.pre);
3472 gfc_add_block_to_block (&block, &loop.post);
3473 gfc_cleanup_loop (&loop);
3476 return gfc_finish_block (&block);
3480 /* Translate the WHERE construct or statement.
3481 This function can be called iteratively to translate the nested WHERE
3482 construct or statement.
3483 MASK is the control mask. */
3485 static void
3486 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3487 forall_info * nested_forall_info, stmtblock_t * block)
3489 stmtblock_t inner_size_body;
3490 tree inner_size, size;
3491 gfc_ss *lss, *rss;
3492 tree mask_type;
3493 gfc_expr *expr1;
3494 gfc_expr *expr2;
3495 gfc_code *cblock;
3496 gfc_code *cnext;
3497 tree tmp;
3498 tree cond;
3499 tree count1, count2;
3500 bool need_cmask;
3501 bool need_pmask;
3502 int need_temp;
3503 tree pcmask = NULL_TREE;
3504 tree ppmask = NULL_TREE;
3505 tree cmask = NULL_TREE;
3506 tree pmask = NULL_TREE;
3507 gfc_actual_arglist *arg;
3509 /* the WHERE statement or the WHERE construct statement. */
3510 cblock = code->block;
3512 /* As the mask array can be very big, prefer compact boolean types. */
3513 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3515 /* Determine which temporary masks are needed. */
3516 if (!cblock->block)
3518 /* One clause: No ELSEWHEREs. */
3519 need_cmask = (cblock->next != 0);
3520 need_pmask = false;
3522 else if (cblock->block->block)
3524 /* Three or more clauses: Conditional ELSEWHEREs. */
3525 need_cmask = true;
3526 need_pmask = true;
3528 else if (cblock->next)
3530 /* Two clauses, the first non-empty. */
3531 need_cmask = true;
3532 need_pmask = (mask != NULL_TREE
3533 && cblock->block->next != 0);
3535 else if (!cblock->block->next)
3537 /* Two clauses, both empty. */
3538 need_cmask = false;
3539 need_pmask = false;
3541 /* Two clauses, the first empty, the second non-empty. */
3542 else if (mask)
3544 need_cmask = (cblock->block->expr1 != 0);
3545 need_pmask = true;
3547 else
3549 need_cmask = true;
3550 need_pmask = false;
3553 if (need_cmask || need_pmask)
3555 /* Calculate the size of temporary needed by the mask-expr. */
3556 gfc_init_block (&inner_size_body);
3557 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
3558 &inner_size_body, &lss, &rss);
3560 /* Calculate the total size of temporary needed. */
3561 size = compute_overall_iter_number (nested_forall_info, inner_size,
3562 &inner_size_body, block);
3564 /* Check whether the size is negative. */
3565 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3566 gfc_index_zero_node);
3567 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3568 gfc_index_zero_node, size);
3569 size = gfc_evaluate_now (size, block);
3571 /* Allocate temporary for WHERE mask if needed. */
3572 if (need_cmask)
3573 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3574 &pcmask);
3576 /* Allocate temporary for !mask if needed. */
3577 if (need_pmask)
3578 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3579 &ppmask);
3582 while (cblock)
3584 /* Each time around this loop, the where clause is conditional
3585 on the value of mask and invert, which are updated at the
3586 bottom of the loop. */
3588 /* Has mask-expr. */
3589 if (cblock->expr1)
3591 /* Ensure that the WHERE mask will be evaluated exactly once.
3592 If there are no statements in this WHERE/ELSEWHERE clause,
3593 then we don't need to update the control mask (cmask).
3594 If this is the last clause of the WHERE construct, then
3595 we don't need to update the pending control mask (pmask). */
3596 if (mask)
3597 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3598 mask, invert,
3599 cblock->next ? cmask : NULL_TREE,
3600 cblock->block ? pmask : NULL_TREE,
3601 mask_type, block);
3602 else
3603 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
3604 NULL_TREE, false,
3605 (cblock->next || cblock->block)
3606 ? cmask : NULL_TREE,
3607 NULL_TREE, mask_type, block);
3609 invert = false;
3611 /* It's a final elsewhere-stmt. No mask-expr is present. */
3612 else
3613 cmask = mask;
3615 /* The body of this where clause are controlled by cmask with
3616 sense specified by invert. */
3618 /* Get the assignment statement of a WHERE statement, or the first
3619 statement in where-body-construct of a WHERE construct. */
3620 cnext = cblock->next;
3621 while (cnext)
3623 switch (cnext->op)
3625 /* WHERE assignment statement. */
3626 case EXEC_ASSIGN_CALL:
3628 arg = cnext->ext.actual;
3629 expr1 = expr2 = NULL;
3630 for (; arg; arg = arg->next)
3632 if (!arg->expr)
3633 continue;
3634 if (expr1 == NULL)
3635 expr1 = arg->expr;
3636 else
3637 expr2 = arg->expr;
3639 goto evaluate;
3641 case EXEC_ASSIGN:
3642 expr1 = cnext->expr1;
3643 expr2 = cnext->expr2;
3644 evaluate:
3645 if (nested_forall_info != NULL)
3647 need_temp = gfc_check_dependency (expr1, expr2, 0);
3648 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3649 gfc_trans_assign_need_temp (expr1, expr2,
3650 cmask, invert,
3651 nested_forall_info, block);
3652 else
3654 /* Variables to control maskexpr. */
3655 count1 = gfc_create_var (gfc_array_index_type, "count1");
3656 count2 = gfc_create_var (gfc_array_index_type, "count2");
3657 gfc_add_modify (block, count1, gfc_index_zero_node);
3658 gfc_add_modify (block, count2, gfc_index_zero_node);
3660 tmp = gfc_trans_where_assign (expr1, expr2,
3661 cmask, invert,
3662 count1, count2,
3663 cnext);
3665 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3666 tmp, 1);
3667 gfc_add_expr_to_block (block, tmp);
3670 else
3672 /* Variables to control maskexpr. */
3673 count1 = gfc_create_var (gfc_array_index_type, "count1");
3674 count2 = gfc_create_var (gfc_array_index_type, "count2");
3675 gfc_add_modify (block, count1, gfc_index_zero_node);
3676 gfc_add_modify (block, count2, gfc_index_zero_node);
3678 tmp = gfc_trans_where_assign (expr1, expr2,
3679 cmask, invert,
3680 count1, count2,
3681 cnext);
3682 gfc_add_expr_to_block (block, tmp);
3685 break;
3687 /* WHERE or WHERE construct is part of a where-body-construct. */
3688 case EXEC_WHERE:
3689 gfc_trans_where_2 (cnext, cmask, invert,
3690 nested_forall_info, block);
3691 break;
3693 default:
3694 gcc_unreachable ();
3697 /* The next statement within the same where-body-construct. */
3698 cnext = cnext->next;
3700 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3701 cblock = cblock->block;
3702 if (mask == NULL_TREE)
3704 /* If we're the initial WHERE, we can simply invert the sense
3705 of the current mask to obtain the "mask" for the remaining
3706 ELSEWHEREs. */
3707 invert = true;
3708 mask = cmask;
3710 else
3712 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3713 invert = false;
3714 mask = pmask;
3718 /* If we allocated a pending mask array, deallocate it now. */
3719 if (ppmask)
3721 tmp = gfc_call_free (ppmask);
3722 gfc_add_expr_to_block (block, tmp);
3725 /* If we allocated a current mask array, deallocate it now. */
3726 if (pcmask)
3728 tmp = gfc_call_free (pcmask);
3729 gfc_add_expr_to_block (block, tmp);
3733 /* Translate a simple WHERE construct or statement without dependencies.
3734 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3735 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3736 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3738 static tree
3739 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3741 stmtblock_t block, body;
3742 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3743 tree tmp, cexpr, tstmt, estmt;
3744 gfc_ss *css, *tdss, *tsss;
3745 gfc_se cse, tdse, tsse, edse, esse;
3746 gfc_loopinfo loop;
3747 gfc_ss *edss = 0;
3748 gfc_ss *esss = 0;
3750 /* Allow the scalarizer to workshare simple where loops. */
3751 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
3752 ompws_flags |= OMPWS_SCALARIZER_WS;
3754 cond = cblock->expr1;
3755 tdst = cblock->next->expr1;
3756 tsrc = cblock->next->expr2;
3757 edst = eblock ? eblock->next->expr1 : NULL;
3758 esrc = eblock ? eblock->next->expr2 : NULL;
3760 gfc_start_block (&block);
3761 gfc_init_loopinfo (&loop);
3763 /* Handle the condition. */
3764 gfc_init_se (&cse, NULL);
3765 css = gfc_walk_expr (cond);
3766 gfc_add_ss_to_loop (&loop, css);
3768 /* Handle the then-clause. */
3769 gfc_init_se (&tdse, NULL);
3770 gfc_init_se (&tsse, NULL);
3771 tdss = gfc_walk_expr (tdst);
3772 tsss = gfc_walk_expr (tsrc);
3773 if (tsss == gfc_ss_terminator)
3775 tsss = gfc_get_ss ();
3776 tsss->where = 1;
3777 tsss->next = gfc_ss_terminator;
3778 tsss->type = GFC_SS_SCALAR;
3779 tsss->expr = tsrc;
3781 gfc_add_ss_to_loop (&loop, tdss);
3782 gfc_add_ss_to_loop (&loop, tsss);
3784 if (eblock)
3786 /* Handle the else clause. */
3787 gfc_init_se (&edse, NULL);
3788 gfc_init_se (&esse, NULL);
3789 edss = gfc_walk_expr (edst);
3790 esss = gfc_walk_expr (esrc);
3791 if (esss == gfc_ss_terminator)
3793 esss = gfc_get_ss ();
3794 esss->where = 1;
3795 esss->next = gfc_ss_terminator;
3796 esss->type = GFC_SS_SCALAR;
3797 esss->expr = esrc;
3799 gfc_add_ss_to_loop (&loop, edss);
3800 gfc_add_ss_to_loop (&loop, esss);
3803 gfc_conv_ss_startstride (&loop);
3804 gfc_conv_loop_setup (&loop, &tdst->where);
3806 gfc_mark_ss_chain_used (css, 1);
3807 gfc_mark_ss_chain_used (tdss, 1);
3808 gfc_mark_ss_chain_used (tsss, 1);
3809 if (eblock)
3811 gfc_mark_ss_chain_used (edss, 1);
3812 gfc_mark_ss_chain_used (esss, 1);
3815 gfc_start_scalarized_body (&loop, &body);
3817 gfc_copy_loopinfo_to_se (&cse, &loop);
3818 gfc_copy_loopinfo_to_se (&tdse, &loop);
3819 gfc_copy_loopinfo_to_se (&tsse, &loop);
3820 cse.ss = css;
3821 tdse.ss = tdss;
3822 tsse.ss = tsss;
3823 if (eblock)
3825 gfc_copy_loopinfo_to_se (&edse, &loop);
3826 gfc_copy_loopinfo_to_se (&esse, &loop);
3827 edse.ss = edss;
3828 esse.ss = esss;
3831 gfc_conv_expr (&cse, cond);
3832 gfc_add_block_to_block (&body, &cse.pre);
3833 cexpr = cse.expr;
3835 gfc_conv_expr (&tsse, tsrc);
3836 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3838 gfc_conv_tmp_array_ref (&tdse);
3839 gfc_advance_se_ss_chain (&tdse);
3841 else
3842 gfc_conv_expr (&tdse, tdst);
3844 if (eblock)
3846 gfc_conv_expr (&esse, esrc);
3847 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3849 gfc_conv_tmp_array_ref (&edse);
3850 gfc_advance_se_ss_chain (&edse);
3852 else
3853 gfc_conv_expr (&edse, edst);
3856 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3857 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3858 : build_empty_stmt (input_location);
3859 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3860 gfc_add_expr_to_block (&body, tmp);
3861 gfc_add_block_to_block (&body, &cse.post);
3863 gfc_trans_scalarizing_loops (&loop, &body);
3864 gfc_add_block_to_block (&block, &loop.pre);
3865 gfc_add_block_to_block (&block, &loop.post);
3866 gfc_cleanup_loop (&loop);
3868 return gfc_finish_block (&block);
3871 /* As the WHERE or WHERE construct statement can be nested, we call
3872 gfc_trans_where_2 to do the translation, and pass the initial
3873 NULL values for both the control mask and the pending control mask. */
3875 tree
3876 gfc_trans_where (gfc_code * code)
3878 stmtblock_t block;
3879 gfc_code *cblock;
3880 gfc_code *eblock;
3882 cblock = code->block;
3883 if (cblock->next
3884 && cblock->next->op == EXEC_ASSIGN
3885 && !cblock->next->next)
3887 eblock = cblock->block;
3888 if (!eblock)
3890 /* A simple "WHERE (cond) x = y" statement or block is
3891 dependence free if cond is not dependent upon writing x,
3892 and the source y is unaffected by the destination x. */
3893 if (!gfc_check_dependency (cblock->next->expr1,
3894 cblock->expr1, 0)
3895 && !gfc_check_dependency (cblock->next->expr1,
3896 cblock->next->expr2, 0))
3897 return gfc_trans_where_3 (cblock, NULL);
3899 else if (!eblock->expr1
3900 && !eblock->block
3901 && eblock->next
3902 && eblock->next->op == EXEC_ASSIGN
3903 && !eblock->next->next)
3905 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3906 block is dependence free if cond is not dependent on writes
3907 to x1 and x2, y1 is not dependent on writes to x2, and y2
3908 is not dependent on writes to x1, and both y's are not
3909 dependent upon their own x's. In addition to this, the
3910 final two dependency checks below exclude all but the same
3911 array reference if the where and elswhere destinations
3912 are the same. In short, this is VERY conservative and this
3913 is needed because the two loops, required by the standard
3914 are coalesced in gfc_trans_where_3. */
3915 if (!gfc_check_dependency(cblock->next->expr1,
3916 cblock->expr1, 0)
3917 && !gfc_check_dependency(eblock->next->expr1,
3918 cblock->expr1, 0)
3919 && !gfc_check_dependency(cblock->next->expr1,
3920 eblock->next->expr2, 1)
3921 && !gfc_check_dependency(eblock->next->expr1,
3922 cblock->next->expr2, 1)
3923 && !gfc_check_dependency(cblock->next->expr1,
3924 cblock->next->expr2, 1)
3925 && !gfc_check_dependency(eblock->next->expr1,
3926 eblock->next->expr2, 1)
3927 && !gfc_check_dependency(cblock->next->expr1,
3928 eblock->next->expr1, 0)
3929 && !gfc_check_dependency(eblock->next->expr1,
3930 cblock->next->expr1, 0))
3931 return gfc_trans_where_3 (cblock, eblock);
3935 gfc_start_block (&block);
3937 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3939 return gfc_finish_block (&block);
3943 /* CYCLE a DO loop. The label decl has already been created by
3944 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3945 node at the head of the loop. We must mark the label as used. */
3947 tree
3948 gfc_trans_cycle (gfc_code * code)
3950 tree cycle_label;
3952 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3953 TREE_USED (cycle_label) = 1;
3954 return build1_v (GOTO_EXPR, cycle_label);
3958 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3959 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3960 loop. */
3962 tree
3963 gfc_trans_exit (gfc_code * code)
3965 tree exit_label;
3967 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3968 TREE_USED (exit_label) = 1;
3969 return build1_v (GOTO_EXPR, exit_label);
3973 /* Translate the ALLOCATE statement. */
3975 tree
3976 gfc_trans_allocate (gfc_code * code)
3978 gfc_alloc *al;
3979 gfc_expr *expr, *init_e;
3980 gfc_se se;
3981 tree tmp;
3982 tree parm;
3983 tree stat;
3984 tree pstat;
3985 tree error_label;
3986 tree memsz;
3987 stmtblock_t block;
3989 if (!code->ext.alloc.list)
3990 return NULL_TREE;
3992 pstat = stat = error_label = tmp = memsz = NULL_TREE;
3994 gfc_start_block (&block);
3996 /* Either STAT= and/or ERRMSG is present. */
3997 if (code->expr1 || code->expr2)
3999 tree gfc_int4_type_node = gfc_get_int_type (4);
4001 stat = gfc_create_var (gfc_int4_type_node, "stat");
4002 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4004 error_label = gfc_build_label_decl (NULL_TREE);
4005 TREE_USED (error_label) = 1;
4008 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4010 expr = gfc_copy_expr (al->expr);
4012 if (expr->ts.type == BT_CLASS)
4013 gfc_add_component_ref (expr, "$data");
4015 gfc_init_se (&se, NULL);
4016 gfc_start_block (&se.pre);
4018 se.want_pointer = 1;
4019 se.descriptor_only = 1;
4020 gfc_conv_expr (&se, expr);
4022 if (!gfc_array_allocate (&se, expr, pstat))
4024 /* A scalar or derived type. */
4026 /* Determine allocate size. */
4027 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4029 gfc_expr *sz;
4030 gfc_se se_sz;
4031 sz = gfc_copy_expr (code->expr3);
4032 gfc_add_component_ref (sz, "$size");
4033 gfc_init_se (&se_sz, NULL);
4034 gfc_conv_expr (&se_sz, sz);
4035 gfc_free_expr (sz);
4036 memsz = se_sz.expr;
4038 else if (code->expr3 && code->expr3->ts.type != BT_CLASS)
4039 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4040 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4041 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4042 else
4043 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4045 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4046 memsz = se.string_length;
4048 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4049 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
4050 fold_convert (TREE_TYPE (se.expr), tmp));
4051 gfc_add_expr_to_block (&se.pre, tmp);
4053 if (code->expr1 || code->expr2)
4055 tmp = build1_v (GOTO_EXPR, error_label);
4056 parm = fold_build2 (NE_EXPR, boolean_type_node,
4057 stat, build_int_cst (TREE_TYPE (stat), 0));
4058 tmp = fold_build3 (COND_EXPR, void_type_node,
4059 parm, tmp, build_empty_stmt (input_location));
4060 gfc_add_expr_to_block (&se.pre, tmp);
4063 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4065 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4066 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4067 gfc_add_expr_to_block (&se.pre, tmp);
4072 tmp = gfc_finish_block (&se.pre);
4073 gfc_add_expr_to_block (&block, tmp);
4075 /* Initialization via SOURCE block. */
4076 if (code->expr3)
4078 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4079 if (al->expr->ts.type == BT_CLASS)
4081 gfc_se dst,src;
4082 if (rhs->ts.type == BT_CLASS)
4083 gfc_add_component_ref (rhs, "$data");
4084 gfc_init_se (&dst, NULL);
4085 gfc_init_se (&src, NULL);
4086 gfc_conv_expr (&dst, expr);
4087 gfc_conv_expr (&src, rhs);
4088 gfc_add_block_to_block (&block, &src.pre);
4089 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4091 else
4092 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4093 rhs, false);
4094 gfc_free_expr (rhs);
4095 gfc_add_expr_to_block (&block, tmp);
4097 /* Default initializer for CLASS variables. */
4098 else if (al->expr->ts.type == BT_CLASS
4099 && code->ext.alloc.ts.type == BT_DERIVED
4100 && (init_e = gfc_default_initializer (&code->ext.alloc.ts)))
4102 gfc_se dst,src;
4103 gfc_init_se (&dst, NULL);
4104 gfc_init_se (&src, NULL);
4105 gfc_conv_expr (&dst, expr);
4106 gfc_conv_expr (&src, init_e);
4107 gfc_add_block_to_block (&block, &src.pre);
4108 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4109 gfc_add_expr_to_block (&block, tmp);
4111 /* Add default initializer for those derived types that need them. */
4112 else if (expr->ts.type == BT_DERIVED
4113 && (init_e = gfc_default_initializer (&expr->ts)))
4115 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4116 init_e, true);
4117 gfc_add_expr_to_block (&block, tmp);
4120 /* Allocation of CLASS entities. */
4121 gfc_free_expr (expr);
4122 expr = al->expr;
4123 if (expr->ts.type == BT_CLASS)
4125 gfc_expr *lhs,*rhs;
4126 gfc_se lse;
4127 /* Initialize VINDEX for CLASS objects. */
4128 lhs = gfc_expr_to_initialize (expr);
4129 gfc_add_component_ref (lhs, "$vindex");
4130 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4132 /* vindex must be determined at run time. */
4133 rhs = gfc_copy_expr (code->expr3);
4134 gfc_add_component_ref (rhs, "$vindex");
4136 else
4138 /* vindex is fixed at compile time. */
4139 int vindex;
4140 if (code->expr3)
4141 vindex = code->expr3->ts.u.derived->vindex;
4142 else if (code->ext.alloc.ts.type == BT_DERIVED)
4143 vindex = code->ext.alloc.ts.u.derived->vindex;
4144 else if (expr->ts.type == BT_CLASS)
4145 vindex = expr->ts.u.derived->components->ts.u.derived->vindex;
4146 else
4147 vindex = expr->ts.u.derived->vindex;
4148 rhs = gfc_int_expr (vindex);
4150 tmp = gfc_trans_assignment (lhs, rhs, false);
4151 gfc_free_expr (lhs);
4152 gfc_free_expr (rhs);
4153 gfc_add_expr_to_block (&block, tmp);
4155 /* Initialize SIZE for CLASS objects. */
4156 lhs = gfc_expr_to_initialize (expr);
4157 gfc_add_component_ref (lhs, "$size");
4158 gfc_init_se (&lse, NULL);
4159 gfc_conv_expr (&lse, lhs);
4160 gfc_add_modify (&block, lse.expr,
4161 fold_convert (TREE_TYPE (lse.expr), memsz));
4162 gfc_free_expr (lhs);
4167 /* STAT block. */
4168 if (code->expr1)
4170 tmp = build1_v (LABEL_EXPR, error_label);
4171 gfc_add_expr_to_block (&block, tmp);
4173 gfc_init_se (&se, NULL);
4174 gfc_conv_expr_lhs (&se, code->expr1);
4175 tmp = convert (TREE_TYPE (se.expr), stat);
4176 gfc_add_modify (&block, se.expr, tmp);
4179 /* ERRMSG block. */
4180 if (code->expr2)
4182 /* A better error message may be possible, but not required. */
4183 const char *msg = "Attempt to allocate an allocated object";
4184 tree errmsg, slen, dlen;
4186 gfc_init_se (&se, NULL);
4187 gfc_conv_expr_lhs (&se, code->expr2);
4189 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4191 gfc_add_modify (&block, errmsg,
4192 gfc_build_addr_expr (pchar_type_node,
4193 gfc_build_localized_cstring_const (msg)));
4195 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4196 dlen = gfc_get_expr_charlen (code->expr2);
4197 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4199 dlen = build_call_expr_loc (input_location,
4200 built_in_decls[BUILT_IN_MEMCPY], 3,
4201 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4203 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4204 build_int_cst (TREE_TYPE (stat), 0));
4206 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4208 gfc_add_expr_to_block (&block, tmp);
4211 return gfc_finish_block (&block);
4215 /* Translate a DEALLOCATE statement. */
4217 tree
4218 gfc_trans_deallocate (gfc_code *code)
4220 gfc_se se;
4221 gfc_alloc *al;
4222 gfc_expr *expr;
4223 tree apstat, astat, pstat, stat, tmp;
4224 stmtblock_t block;
4226 pstat = apstat = stat = astat = tmp = NULL_TREE;
4228 gfc_start_block (&block);
4230 /* Count the number of failed deallocations. If deallocate() was
4231 called with STAT= , then set STAT to the count. If deallocate
4232 was called with ERRMSG, then set ERRMG to a string. */
4233 if (code->expr1 || code->expr2)
4235 tree gfc_int4_type_node = gfc_get_int_type (4);
4237 stat = gfc_create_var (gfc_int4_type_node, "stat");
4238 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4240 /* Running total of possible deallocation failures. */
4241 astat = gfc_create_var (gfc_int4_type_node, "astat");
4242 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4244 /* Initialize astat to 0. */
4245 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4248 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4250 expr = al->expr;
4251 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4253 gfc_init_se (&se, NULL);
4254 gfc_start_block (&se.pre);
4256 se.want_pointer = 1;
4257 se.descriptor_only = 1;
4258 gfc_conv_expr (&se, expr);
4260 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4262 gfc_ref *ref;
4263 gfc_ref *last = NULL;
4264 for (ref = expr->ref; ref; ref = ref->next)
4265 if (ref->type == REF_COMPONENT)
4266 last = ref;
4268 /* Do not deallocate the components of a derived type
4269 ultimate pointer component. */
4270 if (!(last && last->u.c.component->attr.pointer)
4271 && !(!last && expr->symtree->n.sym->attr.pointer))
4273 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4274 expr->rank);
4275 gfc_add_expr_to_block (&se.pre, tmp);
4279 if (expr->rank)
4280 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4281 else
4283 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4284 gfc_add_expr_to_block (&se.pre, tmp);
4286 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4287 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4290 gfc_add_expr_to_block (&se.pre, tmp);
4292 /* Keep track of the number of failed deallocations by adding stat
4293 of the last deallocation to the running total. */
4294 if (code->expr1 || code->expr2)
4296 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4297 gfc_add_modify (&se.pre, astat, apstat);
4300 tmp = gfc_finish_block (&se.pre);
4301 gfc_add_expr_to_block (&block, tmp);
4305 /* Set STAT. */
4306 if (code->expr1)
4308 gfc_init_se (&se, NULL);
4309 gfc_conv_expr_lhs (&se, code->expr1);
4310 tmp = convert (TREE_TYPE (se.expr), astat);
4311 gfc_add_modify (&block, se.expr, tmp);
4314 /* Set ERRMSG. */
4315 if (code->expr2)
4317 /* A better error message may be possible, but not required. */
4318 const char *msg = "Attempt to deallocate an unallocated object";
4319 tree errmsg, slen, dlen;
4321 gfc_init_se (&se, NULL);
4322 gfc_conv_expr_lhs (&se, code->expr2);
4324 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4326 gfc_add_modify (&block, errmsg,
4327 gfc_build_addr_expr (pchar_type_node,
4328 gfc_build_localized_cstring_const (msg)));
4330 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4331 dlen = gfc_get_expr_charlen (code->expr2);
4332 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4334 dlen = build_call_expr_loc (input_location,
4335 built_in_decls[BUILT_IN_MEMCPY], 3,
4336 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4338 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4339 build_int_cst (TREE_TYPE (astat), 0));
4341 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4343 gfc_add_expr_to_block (&block, tmp);
4346 return gfc_finish_block (&block);