2011-02-06 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / fortran / trans-stmt.c
blob6ddb2cab3ed7347c8a6fdeaffd7f224bee19a4c4
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tree.h"
29 #include "gfortran.h"
30 #include "flags.h"
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
37 #include "dependency.h"
38 #include "ggc.h"
40 typedef struct iter_info
42 tree var;
43 tree start;
44 tree end;
45 tree step;
46 struct iter_info *next;
48 iter_info;
50 typedef struct forall_info
52 iter_info *this_loop;
53 tree mask;
54 tree maskindex;
55 int nvar;
56 tree size;
57 struct forall_info *prev_nest;
59 forall_info;
61 static void gfc_trans_where_2 (gfc_code *, tree, bool,
62 forall_info *, stmtblock_t *);
64 /* Translate a F95 label number to a LABEL_EXPR. */
66 tree
67 gfc_trans_label_here (gfc_code * code)
69 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
75 is a field_decl. */
77 void
78 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
80 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
81 gfc_conv_expr (se, expr);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se->expr) == COMPONENT_REF)
84 se->expr = TREE_OPERAND (se->expr, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se->expr) == INDIRECT_REF)
87 se->expr = TREE_OPERAND (se->expr, 0);
90 /* Translate a label assignment statement. */
92 tree
93 gfc_trans_label_assign (gfc_code * code)
95 tree label_tree;
96 gfc_se se;
97 tree len;
98 tree addr;
99 tree len_tree;
100 int label_len;
102 /* Start a new block. */
103 gfc_init_se (&se, NULL);
104 gfc_start_block (&se.pre);
105 gfc_conv_label_variable (&se, code->expr1);
107 len = GFC_DECL_STRING_LEN (se.expr);
108 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
110 label_tree = gfc_get_label_decl (code->label1);
112 if (code->label1->defined == ST_LABEL_TARGET)
114 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
115 len_tree = integer_minus_one_node;
117 else
119 gfc_expr *format = code->label1->format;
121 label_len = format->value.character.length;
122 len_tree = build_int_cst (NULL_TREE, label_len);
123 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
124 format->value.character.string);
125 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 gfc_add_modify (&se.pre, len, len_tree);
129 gfc_add_modify (&se.pre, addr, label_tree);
131 return gfc_finish_block (&se.pre);
134 /* Translate a GOTO statement. */
136 tree
137 gfc_trans_goto (gfc_code * code)
139 locus loc = code->loc;
140 tree assigned_goto;
141 tree target;
142 tree tmp;
143 gfc_se se;
145 if (code->label1 != NULL)
146 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
148 /* ASSIGNED GOTO. */
149 gfc_init_se (&se, NULL);
150 gfc_start_block (&se.pre);
151 gfc_conv_label_variable (&se, code->expr1);
152 tmp = GFC_DECL_STRING_LEN (se.expr);
153 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, tmp,
154 build_int_cst (TREE_TYPE (tmp), -1));
155 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
156 "Assigned label is not a target label");
158 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
164 just ignore it. */
166 target = fold_build1_loc (input_location, GOTO_EXPR, void_type_node,
167 assigned_goto);
168 gfc_add_expr_to_block (&se.pre, target);
169 return gfc_finish_block (&se.pre);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
174 tree
175 gfc_trans_entry (gfc_code * code)
177 return build1_v (LABEL_EXPR, code->ext.entry->label);
181 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
182 elemental subroutines. Make temporaries for output arguments if any such
183 dependencies are found. Output arguments are chosen because internal_unpack
184 can be used, as is, to copy the result back to the variable. */
185 static void
186 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
187 gfc_symbol * sym, gfc_actual_arglist * arg,
188 gfc_dep_check check_variable)
190 gfc_actual_arglist *arg0;
191 gfc_expr *e;
192 gfc_formal_arglist *formal;
193 gfc_loopinfo tmp_loop;
194 gfc_se parmse;
195 gfc_ss *ss;
196 gfc_ss_info *info;
197 gfc_symbol *fsym;
198 gfc_ref *ref;
199 int n;
200 tree data;
201 tree offset;
202 tree size;
203 tree tmp;
205 if (loopse->ss == NULL)
206 return;
208 ss = loopse->ss;
209 arg0 = arg;
210 formal = sym->formal;
212 /* Loop over all the arguments testing for dependencies. */
213 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
215 e = arg->expr;
216 if (e == NULL)
217 continue;
219 /* Obtain the info structure for the current argument. */
220 info = NULL;
221 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
223 if (ss->expr != e)
224 continue;
225 info = &ss->data.info;
226 break;
229 /* If there is a dependency, create a temporary and use it
230 instead of the variable. */
231 fsym = formal ? formal->sym : NULL;
232 if (e->expr_type == EXPR_VARIABLE
233 && e->rank && fsym
234 && fsym->attr.intent != INTENT_IN
235 && gfc_check_fncall_dependency (e, fsym->attr.intent,
236 sym, arg0, check_variable))
238 tree initial, temptype;
239 stmtblock_t temp_post;
241 /* Make a local loopinfo for the temporary creation, so that
242 none of the other ss->info's have to be renormalized. */
243 gfc_init_loopinfo (&tmp_loop);
244 tmp_loop.dimen = info->dimen;
245 for (n = 0; n < info->dimen; n++)
247 tmp_loop.to[n] = loopse->loop->to[n];
248 tmp_loop.from[n] = loopse->loop->from[n];
249 tmp_loop.order[n] = loopse->loop->order[n];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse, NULL);
254 parmse.want_pointer = 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref = e->ref; ref; ref = ref->next)
263 if (ref->type == REF_ARRAY && ref->u.ar.type == AR_SECTION)
264 break;
266 if (ref)
268 bool seen_range = false;
269 for (n = 0; n < ref->u.ar.dimen; n++)
271 if (ref->u.ar.dimen_type[n] == DIMEN_RANGE)
272 seen_range = true;
274 if (!seen_range
275 || ref->u.ar.dimen_type[n] != DIMEN_ELEMENT)
276 continue;
278 ref->u.ar.end[n] = gfc_copy_expr (ref->u.ar.start[n]);
279 ref->u.ar.dimen_type[n] = DIMEN_RANGE;
283 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
284 gfc_add_block_to_block (&se->pre, &parmse.pre);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym->attr.intent == INTENT_INOUT
289 || (fsym->ts.type ==BT_DERIVED
290 && fsym->attr.intent == INTENT_OUT))
291 initial = parmse.expr;
292 else
293 initial = NULL_TREE;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
303 temptype = TREE_TYPE (temptype);
304 temptype = gfc_get_element_type (temptype);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size = gfc_create_var (gfc_array_index_type, NULL);
310 data = gfc_create_var (pvoid_type_node, NULL);
311 gfc_init_block (&temp_post);
312 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
313 &tmp_loop, info, temptype,
314 initial,
315 false, true, false,
316 &arg->expr->where);
317 gfc_add_modify (&se->pre, size, tmp);
318 tmp = fold_convert (pvoid_type_node, info->data);
319 gfc_add_modify (&se->pre, data, tmp);
321 /* Calculate the offset for the temporary. */
322 offset = gfc_index_zero_node;
323 for (n = 0; n < info->dimen; n++)
325 tmp = gfc_conv_descriptor_stride_get (info->descriptor,
326 gfc_rank_cst[n]);
327 tmp = fold_build2_loc (input_location, MULT_EXPR,
328 gfc_array_index_type,
329 loopse->loop->from[n], tmp);
330 offset = fold_build2_loc (input_location, MINUS_EXPR,
331 gfc_array_index_type, offset, tmp);
333 info->offset = gfc_create_var (gfc_array_index_type, NULL);
334 gfc_add_modify (&se->pre, info->offset, offset);
336 /* Copy the result back using unpack. */
337 tmp = build_call_expr_loc (input_location,
338 gfor_fndecl_in_unpack, 2, parmse.expr, data);
339 gfc_add_expr_to_block (&se->post, tmp);
341 /* parmse.pre is already added above. */
342 gfc_add_block_to_block (&se->post, &parmse.post);
343 gfc_add_block_to_block (&se->post, &temp_post);
349 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
351 tree
352 gfc_trans_call (gfc_code * code, bool dependency_check,
353 tree mask, tree count1, bool invert)
355 gfc_se se;
356 gfc_ss * ss;
357 int has_alternate_specifier;
358 gfc_dep_check check_variable;
359 tree index = NULL_TREE;
360 tree maskexpr = NULL_TREE;
361 tree tmp;
363 /* A CALL starts a new block because the actual arguments may have to
364 be evaluated first. */
365 gfc_init_se (&se, NULL);
366 gfc_start_block (&se.pre);
368 gcc_assert (code->resolved_sym);
370 ss = gfc_ss_terminator;
371 if (code->resolved_sym->attr.elemental)
372 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
374 /* Is not an elemental subroutine call with array valued arguments. */
375 if (ss == gfc_ss_terminator)
378 /* Translate the call. */
379 has_alternate_specifier
380 = gfc_conv_procedure_call (&se, code->resolved_sym, code->ext.actual,
381 code->expr1, NULL);
383 /* A subroutine without side-effect, by definition, does nothing! */
384 TREE_SIDE_EFFECTS (se.expr) = 1;
386 /* Chain the pieces together and return the block. */
387 if (has_alternate_specifier)
389 gfc_code *select_code;
390 gfc_symbol *sym;
391 select_code = code->next;
392 gcc_assert(select_code->op == EXEC_SELECT);
393 sym = select_code->expr1->symtree->n.sym;
394 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
395 if (sym->backend_decl == NULL)
396 sym->backend_decl = gfc_get_symbol_decl (sym);
397 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
399 else
400 gfc_add_expr_to_block (&se.pre, se.expr);
402 gfc_add_block_to_block (&se.pre, &se.post);
405 else
407 /* An elemental subroutine call with array valued arguments has
408 to be scalarized. */
409 gfc_loopinfo loop;
410 stmtblock_t body;
411 stmtblock_t block;
412 gfc_se loopse;
413 gfc_se depse;
415 /* gfc_walk_elemental_function_args renders the ss chain in the
416 reverse order to the actual argument order. */
417 ss = gfc_reverse_ss (ss);
419 /* Initialize the loop. */
420 gfc_init_se (&loopse, NULL);
421 gfc_init_loopinfo (&loop);
422 gfc_add_ss_to_loop (&loop, ss);
424 gfc_conv_ss_startstride (&loop);
425 /* TODO: gfc_conv_loop_setup generates a temporary for vector
426 subscripts. This could be prevented in the elemental case
427 as temporaries are handled separatedly
428 (below in gfc_conv_elemental_dependencies). */
429 gfc_conv_loop_setup (&loop, &code->expr1->where);
430 gfc_mark_ss_chain_used (ss, 1);
432 /* Convert the arguments, checking for dependencies. */
433 gfc_copy_loopinfo_to_se (&loopse, &loop);
434 loopse.ss = ss;
436 /* For operator assignment, do dependency checking. */
437 if (dependency_check)
438 check_variable = ELEM_CHECK_VARIABLE;
439 else
440 check_variable = ELEM_DONT_CHECK_VARIABLE;
442 gfc_init_se (&depse, NULL);
443 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
444 code->ext.actual, check_variable);
446 gfc_add_block_to_block (&loop.pre, &depse.pre);
447 gfc_add_block_to_block (&loop.post, &depse.post);
449 /* Generate the loop body. */
450 gfc_start_scalarized_body (&loop, &body);
451 gfc_init_block (&block);
453 if (mask && count1)
455 /* Form the mask expression according to the mask. */
456 index = count1;
457 maskexpr = gfc_build_array_ref (mask, index, NULL);
458 if (invert)
459 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
460 TREE_TYPE (maskexpr), maskexpr);
463 /* Add the subroutine call to the block. */
464 gfc_conv_procedure_call (&loopse, code->resolved_sym,
465 code->ext.actual, code->expr1, NULL);
467 if (mask && count1)
469 tmp = build3_v (COND_EXPR, maskexpr, loopse.expr,
470 build_empty_stmt (input_location));
471 gfc_add_expr_to_block (&loopse.pre, tmp);
472 tmp = fold_build2_loc (input_location, PLUS_EXPR,
473 gfc_array_index_type,
474 count1, gfc_index_one_node);
475 gfc_add_modify (&loopse.pre, count1, tmp);
477 else
478 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
480 gfc_add_block_to_block (&block, &loopse.pre);
481 gfc_add_block_to_block (&block, &loopse.post);
483 /* Finish up the loop block and the loop. */
484 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
485 gfc_trans_scalarizing_loops (&loop, &body);
486 gfc_add_block_to_block (&se.pre, &loop.pre);
487 gfc_add_block_to_block (&se.pre, &loop.post);
488 gfc_add_block_to_block (&se.pre, &se.post);
489 gfc_cleanup_loop (&loop);
492 return gfc_finish_block (&se.pre);
496 /* Translate the RETURN statement. */
498 tree
499 gfc_trans_return (gfc_code * code)
501 if (code->expr1)
503 gfc_se se;
504 tree tmp;
505 tree result;
507 /* If code->expr is not NULL, this return statement must appear
508 in a subroutine and current_fake_result_decl has already
509 been generated. */
511 result = gfc_get_fake_result_decl (NULL, 0);
512 if (!result)
514 gfc_warning ("An alternate return at %L without a * dummy argument",
515 &code->expr1->where);
516 return gfc_generate_return ();
519 /* Start a new block for this statement. */
520 gfc_init_se (&se, NULL);
521 gfc_start_block (&se.pre);
523 gfc_conv_expr (&se, code->expr1);
525 /* Note that the actually returned expression is a simple value and
526 does not depend on any pointers or such; thus we can clean-up with
527 se.post before returning. */
528 tmp = fold_build2_loc (input_location, MODIFY_EXPR, TREE_TYPE (result),
529 result, fold_convert (TREE_TYPE (result),
530 se.expr));
531 gfc_add_expr_to_block (&se.pre, tmp);
532 gfc_add_block_to_block (&se.pre, &se.post);
534 tmp = gfc_generate_return ();
535 gfc_add_expr_to_block (&se.pre, tmp);
536 return gfc_finish_block (&se.pre);
539 return gfc_generate_return ();
543 /* Translate the PAUSE statement. We have to translate this statement
544 to a runtime library call. */
546 tree
547 gfc_trans_pause (gfc_code * code)
549 tree gfc_int4_type_node = gfc_get_int_type (4);
550 gfc_se se;
551 tree tmp;
553 /* Start a new block for this statement. */
554 gfc_init_se (&se, NULL);
555 gfc_start_block (&se.pre);
558 if (code->expr1 == NULL)
560 tmp = build_int_cst (gfc_int4_type_node, 0);
561 tmp = build_call_expr_loc (input_location,
562 gfor_fndecl_pause_string, 2,
563 build_int_cst (pchar_type_node, 0), tmp);
565 else if (code->expr1->ts.type == BT_INTEGER)
567 gfc_conv_expr (&se, code->expr1);
568 tmp = build_call_expr_loc (input_location,
569 gfor_fndecl_pause_numeric, 1,
570 fold_convert (gfc_int4_type_node, se.expr));
572 else
574 gfc_conv_expr_reference (&se, code->expr1);
575 tmp = build_call_expr_loc (input_location,
576 gfor_fndecl_pause_string, 2,
577 se.expr, se.string_length);
580 gfc_add_expr_to_block (&se.pre, tmp);
582 gfc_add_block_to_block (&se.pre, &se.post);
584 return gfc_finish_block (&se.pre);
588 /* Translate the STOP statement. We have to translate this statement
589 to a runtime library call. */
591 tree
592 gfc_trans_stop (gfc_code *code, bool error_stop)
594 tree gfc_int4_type_node = gfc_get_int_type (4);
595 gfc_se se;
596 tree tmp;
598 /* Start a new block for this statement. */
599 gfc_init_se (&se, NULL);
600 gfc_start_block (&se.pre);
602 if (code->expr1 == NULL)
604 tmp = build_int_cst (gfc_int4_type_node, 0);
605 tmp = build_call_expr_loc (input_location,
606 error_stop ? gfor_fndecl_error_stop_string
607 : gfor_fndecl_stop_string,
608 2, build_int_cst (pchar_type_node, 0), tmp);
610 else if (code->expr1->ts.type == BT_INTEGER)
612 gfc_conv_expr (&se, code->expr1);
613 tmp = build_call_expr_loc (input_location,
614 error_stop ? gfor_fndecl_error_stop_numeric
615 : gfor_fndecl_stop_numeric_f08, 1,
616 fold_convert (gfc_int4_type_node, se.expr));
618 else
620 gfc_conv_expr_reference (&se, code->expr1);
621 tmp = build_call_expr_loc (input_location,
622 error_stop ? gfor_fndecl_error_stop_string
623 : gfor_fndecl_stop_string,
624 2, se.expr, se.string_length);
627 gfc_add_expr_to_block (&se.pre, tmp);
629 gfc_add_block_to_block (&se.pre, &se.post);
631 return gfc_finish_block (&se.pre);
635 tree
636 gfc_trans_sync (gfc_code *code, gfc_exec_op type __attribute__ ((unused)))
638 gfc_se se;
640 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
642 gfc_init_se (&se, NULL);
643 gfc_start_block (&se.pre);
646 /* Check SYNC IMAGES(imageset) for valid image index.
647 FIXME: Add a check for image-set arrays. */
648 if (code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
649 && code->expr1->rank == 0)
651 tree cond;
652 gfc_conv_expr (&se, code->expr1);
653 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
654 se.expr, build_int_cst (TREE_TYPE (se.expr), 1));
655 gfc_trans_runtime_check (true, false, cond, &se.pre,
656 &code->expr1->where, "Invalid image number "
657 "%d in SYNC IMAGES",
658 fold_convert (integer_type_node, se.expr));
661 /* If STAT is present, set it to zero. */
662 if (code->expr2)
664 gcc_assert (code->expr2->expr_type == EXPR_VARIABLE);
665 gfc_conv_expr (&se, code->expr2);
666 gfc_add_modify (&se.pre, se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
669 if ((code->expr1 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)) || code->expr2)
670 return gfc_finish_block (&se.pre);
672 return NULL_TREE;
676 /* Generate GENERIC for the IF construct. This function also deals with
677 the simple IF statement, because the front end translates the IF
678 statement into an IF construct.
680 We translate:
682 IF (cond) THEN
683 then_clause
684 ELSEIF (cond2)
685 elseif_clause
686 ELSE
687 else_clause
688 ENDIF
690 into:
692 pre_cond_s;
693 if (cond_s)
695 then_clause;
697 else
699 pre_cond_s
700 if (cond_s)
702 elseif_clause
704 else
706 else_clause;
710 where COND_S is the simplified version of the predicate. PRE_COND_S
711 are the pre side-effects produced by the translation of the
712 conditional.
713 We need to build the chain recursively otherwise we run into
714 problems with folding incomplete statements. */
716 static tree
717 gfc_trans_if_1 (gfc_code * code)
719 gfc_se if_se;
720 tree stmt, elsestmt;
721 location_t loc;
723 /* Check for an unconditional ELSE clause. */
724 if (!code->expr1)
725 return gfc_trans_code (code->next);
727 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
728 gfc_init_se (&if_se, NULL);
729 gfc_start_block (&if_se.pre);
731 /* Calculate the IF condition expression. */
732 gfc_conv_expr_val (&if_se, code->expr1);
734 /* Translate the THEN clause. */
735 stmt = gfc_trans_code (code->next);
737 /* Translate the ELSE clause. */
738 if (code->block)
739 elsestmt = gfc_trans_if_1 (code->block);
740 else
741 elsestmt = build_empty_stmt (input_location);
743 /* Build the condition expression and add it to the condition block. */
744 loc = code->expr1->where.lb ? code->expr1->where.lb->location : input_location;
745 stmt = fold_build3_loc (loc, COND_EXPR, void_type_node, if_se.expr, stmt,
746 elsestmt);
748 gfc_add_expr_to_block (&if_se.pre, stmt);
750 /* Finish off this statement. */
751 return gfc_finish_block (&if_se.pre);
754 tree
755 gfc_trans_if (gfc_code * code)
757 stmtblock_t body;
758 tree exit_label;
760 /* Create exit label so it is available for trans'ing the body code. */
761 exit_label = gfc_build_label_decl (NULL_TREE);
762 code->exit_label = exit_label;
764 /* Translate the actual code in code->block. */
765 gfc_init_block (&body);
766 gfc_add_expr_to_block (&body, gfc_trans_if_1 (code->block));
768 /* Add exit label. */
769 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
771 return gfc_finish_block (&body);
775 /* Translate an arithmetic IF expression.
777 IF (cond) label1, label2, label3 translates to
779 if (cond <= 0)
781 if (cond < 0)
782 goto label1;
783 else // cond == 0
784 goto label2;
786 else // cond > 0
787 goto label3;
789 An optimized version can be generated in case of equal labels.
790 E.g., if label1 is equal to label2, we can translate it to
792 if (cond <= 0)
793 goto label1;
794 else
795 goto label3;
798 tree
799 gfc_trans_arithmetic_if (gfc_code * code)
801 gfc_se se;
802 tree tmp;
803 tree branch1;
804 tree branch2;
805 tree zero;
807 /* Start a new block. */
808 gfc_init_se (&se, NULL);
809 gfc_start_block (&se.pre);
811 /* Pre-evaluate COND. */
812 gfc_conv_expr_val (&se, code->expr1);
813 se.expr = gfc_evaluate_now (se.expr, &se.pre);
815 /* Build something to compare with. */
816 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
818 if (code->label1->value != code->label2->value)
820 /* If (cond < 0) take branch1 else take branch2.
821 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
822 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
823 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
825 if (code->label1->value != code->label3->value)
826 tmp = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
827 se.expr, zero);
828 else
829 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
830 se.expr, zero);
832 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
833 tmp, branch1, branch2);
835 else
836 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label1));
838 if (code->label1->value != code->label3->value
839 && code->label2->value != code->label3->value)
841 /* if (cond <= 0) take branch1 else take branch2. */
842 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
843 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
844 se.expr, zero);
845 branch1 = fold_build3_loc (input_location, COND_EXPR, void_type_node,
846 tmp, branch1, branch2);
849 /* Append the COND_EXPR to the evaluation of COND, and return. */
850 gfc_add_expr_to_block (&se.pre, branch1);
851 return gfc_finish_block (&se.pre);
855 /* Translate a CRITICAL block. */
856 tree
857 gfc_trans_critical (gfc_code *code)
859 stmtblock_t block;
860 tree tmp;
862 gfc_start_block (&block);
863 tmp = gfc_trans_code (code->block->next);
864 gfc_add_expr_to_block (&block, tmp);
866 return gfc_finish_block (&block);
870 /* Do proper initialization for ASSOCIATE names. */
872 static void
873 trans_associate_var (gfc_symbol *sym, gfc_wrapped_block *block)
875 gfc_expr *e;
876 tree tmp;
878 gcc_assert (sym->assoc);
879 e = sym->assoc->target;
881 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
882 to array temporary) for arrays with either unknown shape or if associating
883 to a variable. */
884 if (sym->attr.dimension
885 && (sym->as->type == AS_DEFERRED || sym->assoc->variable))
887 gfc_se se;
888 gfc_ss *ss;
889 tree desc;
891 desc = sym->backend_decl;
893 /* If association is to an expression, evaluate it and create temporary.
894 Otherwise, get descriptor of target for pointer assignment. */
895 gfc_init_se (&se, NULL);
896 ss = gfc_walk_expr (e);
897 if (sym->assoc->variable)
899 se.direct_byref = 1;
900 se.expr = desc;
902 gfc_conv_expr_descriptor (&se, e, ss);
904 /* If we didn't already do the pointer assignment, set associate-name
905 descriptor to the one generated for the temporary. */
906 if (!sym->assoc->variable)
908 int dim;
910 gfc_add_modify (&se.pre, desc, se.expr);
912 /* The generated descriptor has lower bound zero (as array
913 temporary), shift bounds so we get lower bounds of 1. */
914 for (dim = 0; dim < e->rank; ++dim)
915 gfc_conv_shift_descriptor_lbound (&se.pre, desc,
916 dim, gfc_index_one_node);
919 /* Done, register stuff as init / cleanup code. */
920 gfc_add_init_cleanup (block, gfc_finish_block (&se.pre),
921 gfc_finish_block (&se.post));
924 /* Do a scalar pointer assignment; this is for scalar variable targets. */
925 else if (gfc_is_associate_pointer (sym))
927 gfc_se se;
929 gcc_assert (!sym->attr.dimension);
931 gfc_init_se (&se, NULL);
932 gfc_conv_expr (&se, e);
934 tmp = TREE_TYPE (sym->backend_decl);
935 tmp = gfc_build_addr_expr (tmp, se.expr);
936 gfc_add_modify (&se.pre, sym->backend_decl, tmp);
938 gfc_add_init_cleanup (block, gfc_finish_block( &se.pre),
939 gfc_finish_block (&se.post));
942 /* Do a simple assignment. This is for scalar expressions, where we
943 can simply use expression assignment. */
944 else
946 gfc_expr *lhs;
948 lhs = gfc_lval_expr_from_sym (sym);
949 tmp = gfc_trans_assignment (lhs, e, false, true);
950 gfc_add_init_cleanup (block, tmp, NULL_TREE);
955 /* Translate a BLOCK construct. This is basically what we would do for a
956 procedure body. */
958 tree
959 gfc_trans_block_construct (gfc_code* code)
961 gfc_namespace* ns;
962 gfc_symbol* sym;
963 gfc_wrapped_block block;
964 tree exit_label;
965 stmtblock_t body;
966 gfc_association_list *ass;
968 ns = code->ext.block.ns;
969 gcc_assert (ns);
970 sym = ns->proc_name;
971 gcc_assert (sym);
973 /* Process local variables. */
974 gcc_assert (!sym->tlink);
975 sym->tlink = sym;
976 gfc_process_block_locals (ns);
978 /* Generate code including exit-label. */
979 gfc_init_block (&body);
980 exit_label = gfc_build_label_decl (NULL_TREE);
981 code->exit_label = exit_label;
982 gfc_add_expr_to_block (&body, gfc_trans_code (ns->code));
983 gfc_add_expr_to_block (&body, build1_v (LABEL_EXPR, exit_label));
985 /* Finish everything. */
986 gfc_start_wrapped_block (&block, gfc_finish_block (&body));
987 gfc_trans_deferred_vars (sym, &block);
988 for (ass = code->ext.block.assoc; ass; ass = ass->next)
989 trans_associate_var (ass->st->n.sym, &block);
991 return gfc_finish_wrapped_block (&block);
995 /* Translate the simple DO construct. This is where the loop variable has
996 integer type and step +-1. We can't use this in the general case
997 because integer overflow and floating point errors could give incorrect
998 results.
999 We translate a do loop from:
1001 DO dovar = from, to, step
1002 body
1003 END DO
1007 [Evaluate loop bounds and step]
1008 dovar = from;
1009 if ((step > 0) ? (dovar <= to) : (dovar => to))
1011 for (;;)
1013 body;
1014 cycle_label:
1015 cond = (dovar == to);
1016 dovar += step;
1017 if (cond) goto end_label;
1020 end_label:
1022 This helps the optimizers by avoiding the extra induction variable
1023 used in the general case. */
1025 static tree
1026 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
1027 tree from, tree to, tree step, tree exit_cond)
1029 stmtblock_t body;
1030 tree type;
1031 tree cond;
1032 tree tmp;
1033 tree saved_dovar = NULL;
1034 tree cycle_label;
1035 tree exit_label;
1036 location_t loc;
1038 type = TREE_TYPE (dovar);
1040 loc = code->ext.iterator->start->where.lb->location;
1042 /* Initialize the DO variable: dovar = from. */
1043 gfc_add_modify_loc (loc, pblock, dovar, from);
1045 /* Save value for do-tinkering checking. */
1046 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1048 saved_dovar = gfc_create_var (type, ".saved_dovar");
1049 gfc_add_modify_loc (loc, pblock, saved_dovar, dovar);
1052 /* Cycle and exit statements are implemented with gotos. */
1053 cycle_label = gfc_build_label_decl (NULL_TREE);
1054 exit_label = gfc_build_label_decl (NULL_TREE);
1056 /* Put the labels where they can be found later. See gfc_trans_do(). */
1057 code->cycle_label = cycle_label;
1058 code->exit_label = exit_label;
1060 /* Loop body. */
1061 gfc_start_block (&body);
1063 /* Main loop body. */
1064 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1065 gfc_add_expr_to_block (&body, tmp);
1067 /* Label for cycle statements (if needed). */
1068 if (TREE_USED (cycle_label))
1070 tmp = build1_v (LABEL_EXPR, cycle_label);
1071 gfc_add_expr_to_block (&body, tmp);
1074 /* Check whether someone has modified the loop variable. */
1075 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1077 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node,
1078 dovar, saved_dovar);
1079 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1080 "Loop variable has been modified");
1083 /* Exit the loop if there is an I/O result condition or error. */
1084 if (exit_cond)
1086 tmp = build1_v (GOTO_EXPR, exit_label);
1087 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1088 exit_cond, tmp,
1089 build_empty_stmt (loc));
1090 gfc_add_expr_to_block (&body, tmp);
1093 /* Evaluate the loop condition. */
1094 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, dovar,
1095 to);
1096 cond = gfc_evaluate_now_loc (loc, cond, &body);
1098 /* Increment the loop variable. */
1099 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1100 gfc_add_modify_loc (loc, &body, dovar, tmp);
1102 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1103 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1105 /* The loop exit. */
1106 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1107 TREE_USED (exit_label) = 1;
1108 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1109 cond, tmp, build_empty_stmt (loc));
1110 gfc_add_expr_to_block (&body, tmp);
1112 /* Finish the loop body. */
1113 tmp = gfc_finish_block (&body);
1114 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1116 /* Only execute the loop if the number of iterations is positive. */
1117 if (tree_int_cst_sgn (step) > 0)
1118 cond = fold_build2_loc (loc, LE_EXPR, boolean_type_node, dovar,
1119 to);
1120 else
1121 cond = fold_build2_loc (loc, GE_EXPR, boolean_type_node, dovar,
1122 to);
1123 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, cond, tmp,
1124 build_empty_stmt (loc));
1125 gfc_add_expr_to_block (pblock, tmp);
1127 /* Add the exit label. */
1128 tmp = build1_v (LABEL_EXPR, exit_label);
1129 gfc_add_expr_to_block (pblock, tmp);
1131 return gfc_finish_block (pblock);
1134 /* Translate the DO construct. This obviously is one of the most
1135 important ones to get right with any compiler, but especially
1136 so for Fortran.
1138 We special case some loop forms as described in gfc_trans_simple_do.
1139 For other cases we implement them with a separate loop count,
1140 as described in the standard.
1142 We translate a do loop from:
1144 DO dovar = from, to, step
1145 body
1146 END DO
1150 [evaluate loop bounds and step]
1151 empty = (step > 0 ? to < from : to > from);
1152 countm1 = (to - from) / step;
1153 dovar = from;
1154 if (empty) goto exit_label;
1155 for (;;)
1157 body;
1158 cycle_label:
1159 dovar += step
1160 if (countm1 ==0) goto exit_label;
1161 countm1--;
1163 exit_label:
1165 countm1 is an unsigned integer. It is equal to the loop count minus one,
1166 because the loop count itself can overflow. */
1168 tree
1169 gfc_trans_do (gfc_code * code, tree exit_cond)
1171 gfc_se se;
1172 tree dovar;
1173 tree saved_dovar = NULL;
1174 tree from;
1175 tree to;
1176 tree step;
1177 tree countm1;
1178 tree type;
1179 tree utype;
1180 tree cond;
1181 tree cycle_label;
1182 tree exit_label;
1183 tree tmp;
1184 tree pos_step;
1185 stmtblock_t block;
1186 stmtblock_t body;
1187 location_t loc;
1189 gfc_start_block (&block);
1191 loc = code->ext.iterator->start->where.lb->location;
1193 /* Evaluate all the expressions in the iterator. */
1194 gfc_init_se (&se, NULL);
1195 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1196 gfc_add_block_to_block (&block, &se.pre);
1197 dovar = se.expr;
1198 type = TREE_TYPE (dovar);
1200 gfc_init_se (&se, NULL);
1201 gfc_conv_expr_val (&se, code->ext.iterator->start);
1202 gfc_add_block_to_block (&block, &se.pre);
1203 from = gfc_evaluate_now (se.expr, &block);
1205 gfc_init_se (&se, NULL);
1206 gfc_conv_expr_val (&se, code->ext.iterator->end);
1207 gfc_add_block_to_block (&block, &se.pre);
1208 to = gfc_evaluate_now (se.expr, &block);
1210 gfc_init_se (&se, NULL);
1211 gfc_conv_expr_val (&se, code->ext.iterator->step);
1212 gfc_add_block_to_block (&block, &se.pre);
1213 step = gfc_evaluate_now (se.expr, &block);
1215 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1217 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, step,
1218 build_zero_cst (type));
1219 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
1220 "DO step value is zero");
1223 /* Special case simple loops. */
1224 if (TREE_CODE (type) == INTEGER_TYPE
1225 && (integer_onep (step)
1226 || tree_int_cst_equal (step, integer_minus_one_node)))
1227 return gfc_trans_simple_do (code, &block, dovar, from, to, step, exit_cond);
1229 pos_step = fold_build2_loc (loc, GT_EXPR, boolean_type_node, step,
1230 build_zero_cst (type));
1232 if (TREE_CODE (type) == INTEGER_TYPE)
1233 utype = unsigned_type_for (type);
1234 else
1235 utype = unsigned_type_for (gfc_array_index_type);
1236 countm1 = gfc_create_var (utype, "countm1");
1238 /* Cycle and exit statements are implemented with gotos. */
1239 cycle_label = gfc_build_label_decl (NULL_TREE);
1240 exit_label = gfc_build_label_decl (NULL_TREE);
1241 TREE_USED (exit_label) = 1;
1243 /* Put these labels where they can be found later. */
1244 code->cycle_label = cycle_label;
1245 code->exit_label = exit_label;
1247 /* Initialize the DO variable: dovar = from. */
1248 gfc_add_modify (&block, dovar, from);
1250 /* Save value for do-tinkering checking. */
1251 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1253 saved_dovar = gfc_create_var (type, ".saved_dovar");
1254 gfc_add_modify_loc (loc, &block, saved_dovar, dovar);
1257 /* Initialize loop count and jump to exit label if the loop is empty.
1258 This code is executed before we enter the loop body. We generate:
1259 step_sign = sign(1,step);
1260 if (step > 0)
1262 if (to < from)
1263 goto exit_label;
1265 else
1267 if (to > from)
1268 goto exit_label;
1270 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1274 if (TREE_CODE (type) == INTEGER_TYPE)
1276 tree pos, neg, step_sign, to2, from2, step2;
1278 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1280 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, step,
1281 build_int_cst (TREE_TYPE (step), 0));
1282 step_sign = fold_build3_loc (loc, COND_EXPR, type, tmp,
1283 build_int_cst (type, -1),
1284 build_int_cst (type, 1));
1286 tmp = fold_build2_loc (loc, LT_EXPR, boolean_type_node, to, from);
1287 pos = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1288 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1289 exit_label),
1290 build_empty_stmt (loc));
1292 tmp = fold_build2_loc (loc, GT_EXPR, boolean_type_node, to,
1293 from);
1294 neg = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1295 fold_build1_loc (loc, GOTO_EXPR, void_type_node,
1296 exit_label),
1297 build_empty_stmt (loc));
1298 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1299 pos_step, pos, neg);
1301 gfc_add_expr_to_block (&block, tmp);
1303 /* Calculate the loop count. to-from can overflow, so
1304 we cast to unsigned. */
1306 to2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, to);
1307 from2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, from);
1308 step2 = fold_build2_loc (loc, MULT_EXPR, type, step_sign, step);
1309 step2 = fold_convert (utype, step2);
1310 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to2, from2);
1311 tmp = fold_convert (utype, tmp);
1312 tmp = fold_build2_loc (loc, TRUNC_DIV_EXPR, utype, tmp, step2);
1313 tmp = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, countm1, tmp);
1314 gfc_add_expr_to_block (&block, tmp);
1316 else
1318 /* TODO: We could use the same width as the real type.
1319 This would probably cause more problems that it solves
1320 when we implement "long double" types. */
1322 tmp = fold_build2_loc (loc, MINUS_EXPR, type, to, from);
1323 tmp = fold_build2_loc (loc, RDIV_EXPR, type, tmp, step);
1324 tmp = fold_build1_loc (loc, FIX_TRUNC_EXPR, utype, tmp);
1325 gfc_add_modify (&block, countm1, tmp);
1327 /* We need a special check for empty loops:
1328 empty = (step > 0 ? to < from : to > from); */
1329 tmp = fold_build3_loc (loc, COND_EXPR, boolean_type_node, pos_step,
1330 fold_build2_loc (loc, LT_EXPR,
1331 boolean_type_node, to, from),
1332 fold_build2_loc (loc, GT_EXPR,
1333 boolean_type_node, to, from));
1334 /* If the loop is empty, go directly to the exit label. */
1335 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node, tmp,
1336 build1_v (GOTO_EXPR, exit_label),
1337 build_empty_stmt (input_location));
1338 gfc_add_expr_to_block (&block, tmp);
1341 /* Loop body. */
1342 gfc_start_block (&body);
1344 /* Main loop body. */
1345 tmp = gfc_trans_code_cond (code->block->next, exit_cond);
1346 gfc_add_expr_to_block (&body, tmp);
1348 /* Label for cycle statements (if needed). */
1349 if (TREE_USED (cycle_label))
1351 tmp = build1_v (LABEL_EXPR, cycle_label);
1352 gfc_add_expr_to_block (&body, tmp);
1355 /* Check whether someone has modified the loop variable. */
1356 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1358 tmp = fold_build2_loc (loc, NE_EXPR, boolean_type_node, dovar,
1359 saved_dovar);
1360 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1361 "Loop variable has been modified");
1364 /* Exit the loop if there is an I/O result condition or error. */
1365 if (exit_cond)
1367 tmp = build1_v (GOTO_EXPR, exit_label);
1368 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1369 exit_cond, tmp,
1370 build_empty_stmt (input_location));
1371 gfc_add_expr_to_block (&body, tmp);
1374 /* Increment the loop variable. */
1375 tmp = fold_build2_loc (loc, PLUS_EXPR, type, dovar, step);
1376 gfc_add_modify_loc (loc, &body, dovar, tmp);
1378 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1379 gfc_add_modify_loc (loc, &body, saved_dovar, dovar);
1381 /* End with the loop condition. Loop until countm1 == 0. */
1382 cond = fold_build2_loc (loc, EQ_EXPR, boolean_type_node, countm1,
1383 build_int_cst (utype, 0));
1384 tmp = fold_build1_loc (loc, GOTO_EXPR, void_type_node, exit_label);
1385 tmp = fold_build3_loc (loc, COND_EXPR, void_type_node,
1386 cond, tmp, build_empty_stmt (loc));
1387 gfc_add_expr_to_block (&body, tmp);
1389 /* Decrement the loop count. */
1390 tmp = fold_build2_loc (loc, MINUS_EXPR, utype, countm1,
1391 build_int_cst (utype, 1));
1392 gfc_add_modify_loc (loc, &body, countm1, tmp);
1394 /* End of loop body. */
1395 tmp = gfc_finish_block (&body);
1397 /* The for loop itself. */
1398 tmp = fold_build1_loc (loc, LOOP_EXPR, void_type_node, tmp);
1399 gfc_add_expr_to_block (&block, tmp);
1401 /* Add the exit label. */
1402 tmp = build1_v (LABEL_EXPR, exit_label);
1403 gfc_add_expr_to_block (&block, tmp);
1405 return gfc_finish_block (&block);
1409 /* Translate the DO WHILE construct.
1411 We translate
1413 DO WHILE (cond)
1414 body
1415 END DO
1419 for ( ; ; )
1421 pre_cond;
1422 if (! cond) goto exit_label;
1423 body;
1424 cycle_label:
1426 exit_label:
1428 Because the evaluation of the exit condition `cond' may have side
1429 effects, we can't do much for empty loop bodies. The backend optimizers
1430 should be smart enough to eliminate any dead loops. */
1432 tree
1433 gfc_trans_do_while (gfc_code * code)
1435 gfc_se cond;
1436 tree tmp;
1437 tree cycle_label;
1438 tree exit_label;
1439 stmtblock_t block;
1441 /* Everything we build here is part of the loop body. */
1442 gfc_start_block (&block);
1444 /* Cycle and exit statements are implemented with gotos. */
1445 cycle_label = gfc_build_label_decl (NULL_TREE);
1446 exit_label = gfc_build_label_decl (NULL_TREE);
1448 /* Put the labels where they can be found later. See gfc_trans_do(). */
1449 code->cycle_label = cycle_label;
1450 code->exit_label = exit_label;
1452 /* Create a GIMPLE version of the exit condition. */
1453 gfc_init_se (&cond, NULL);
1454 gfc_conv_expr_val (&cond, code->expr1);
1455 gfc_add_block_to_block (&block, &cond.pre);
1456 cond.expr = fold_build1_loc (code->expr1->where.lb->location,
1457 TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1459 /* Build "IF (! cond) GOTO exit_label". */
1460 tmp = build1_v (GOTO_EXPR, exit_label);
1461 TREE_USED (exit_label) = 1;
1462 tmp = fold_build3_loc (code->expr1->where.lb->location, COND_EXPR,
1463 void_type_node, cond.expr, tmp,
1464 build_empty_stmt (code->expr1->where.lb->location));
1465 gfc_add_expr_to_block (&block, tmp);
1467 /* The main body of the loop. */
1468 tmp = gfc_trans_code (code->block->next);
1469 gfc_add_expr_to_block (&block, tmp);
1471 /* Label for cycle statements (if needed). */
1472 if (TREE_USED (cycle_label))
1474 tmp = build1_v (LABEL_EXPR, cycle_label);
1475 gfc_add_expr_to_block (&block, tmp);
1478 /* End of loop body. */
1479 tmp = gfc_finish_block (&block);
1481 gfc_init_block (&block);
1482 /* Build the loop. */
1483 tmp = fold_build1_loc (code->expr1->where.lb->location, LOOP_EXPR,
1484 void_type_node, tmp);
1485 gfc_add_expr_to_block (&block, tmp);
1487 /* Add the exit label. */
1488 tmp = build1_v (LABEL_EXPR, exit_label);
1489 gfc_add_expr_to_block (&block, tmp);
1491 return gfc_finish_block (&block);
1495 /* Translate the SELECT CASE construct for INTEGER case expressions,
1496 without killing all potential optimizations. The problem is that
1497 Fortran allows unbounded cases, but the back-end does not, so we
1498 need to intercept those before we enter the equivalent SWITCH_EXPR
1499 we can build.
1501 For example, we translate this,
1503 SELECT CASE (expr)
1504 CASE (:100,101,105:115)
1505 block_1
1506 CASE (190:199,200:)
1507 block_2
1508 CASE (300)
1509 block_3
1510 CASE DEFAULT
1511 block_4
1512 END SELECT
1514 to the GENERIC equivalent,
1516 switch (expr)
1518 case (minimum value for typeof(expr) ... 100:
1519 case 101:
1520 case 105 ... 114:
1521 block1:
1522 goto end_label;
1524 case 200 ... (maximum value for typeof(expr):
1525 case 190 ... 199:
1526 block2;
1527 goto end_label;
1529 case 300:
1530 block_3;
1531 goto end_label;
1533 default:
1534 block_4;
1535 goto end_label;
1538 end_label: */
1540 static tree
1541 gfc_trans_integer_select (gfc_code * code)
1543 gfc_code *c;
1544 gfc_case *cp;
1545 tree end_label;
1546 tree tmp;
1547 gfc_se se;
1548 stmtblock_t block;
1549 stmtblock_t body;
1551 gfc_start_block (&block);
1553 /* Calculate the switch expression. */
1554 gfc_init_se (&se, NULL);
1555 gfc_conv_expr_val (&se, code->expr1);
1556 gfc_add_block_to_block (&block, &se.pre);
1558 end_label = gfc_build_label_decl (NULL_TREE);
1560 gfc_init_block (&body);
1562 for (c = code->block; c; c = c->block)
1564 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1566 tree low, high;
1567 tree label;
1569 /* Assume it's the default case. */
1570 low = high = NULL_TREE;
1572 if (cp->low)
1574 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1575 cp->low->ts.kind);
1577 /* If there's only a lower bound, set the high bound to the
1578 maximum value of the case expression. */
1579 if (!cp->high)
1580 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1583 if (cp->high)
1585 /* Three cases are possible here:
1587 1) There is no lower bound, e.g. CASE (:N).
1588 2) There is a lower bound .NE. high bound, that is
1589 a case range, e.g. CASE (N:M) where M>N (we make
1590 sure that M>N during type resolution).
1591 3) There is a lower bound, and it has the same value
1592 as the high bound, e.g. CASE (N:N). This is our
1593 internal representation of CASE(N).
1595 In the first and second case, we need to set a value for
1596 high. In the third case, we don't because the GCC middle
1597 end represents a single case value by just letting high be
1598 a NULL_TREE. We can't do that because we need to be able
1599 to represent unbounded cases. */
1601 if (!cp->low
1602 || (cp->low
1603 && mpz_cmp (cp->low->value.integer,
1604 cp->high->value.integer) != 0))
1605 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1606 cp->high->ts.kind);
1608 /* Unbounded case. */
1609 if (!cp->low)
1610 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1613 /* Build a label. */
1614 label = gfc_build_label_decl (NULL_TREE);
1616 /* Add this case label.
1617 Add parameter 'label', make it match GCC backend. */
1618 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1619 void_type_node, low, high, label);
1620 gfc_add_expr_to_block (&body, tmp);
1623 /* Add the statements for this case. */
1624 tmp = gfc_trans_code (c->next);
1625 gfc_add_expr_to_block (&body, tmp);
1627 /* Break to the end of the construct. */
1628 tmp = build1_v (GOTO_EXPR, end_label);
1629 gfc_add_expr_to_block (&body, tmp);
1632 tmp = gfc_finish_block (&body);
1633 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1634 gfc_add_expr_to_block (&block, tmp);
1636 tmp = build1_v (LABEL_EXPR, end_label);
1637 gfc_add_expr_to_block (&block, tmp);
1639 return gfc_finish_block (&block);
1643 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1645 There are only two cases possible here, even though the standard
1646 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1647 .FALSE., and DEFAULT.
1649 We never generate more than two blocks here. Instead, we always
1650 try to eliminate the DEFAULT case. This way, we can translate this
1651 kind of SELECT construct to a simple
1653 if {} else {};
1655 expression in GENERIC. */
1657 static tree
1658 gfc_trans_logical_select (gfc_code * code)
1660 gfc_code *c;
1661 gfc_code *t, *f, *d;
1662 gfc_case *cp;
1663 gfc_se se;
1664 stmtblock_t block;
1666 /* Assume we don't have any cases at all. */
1667 t = f = d = NULL;
1669 /* Now see which ones we actually do have. We can have at most two
1670 cases in a single case list: one for .TRUE. and one for .FALSE.
1671 The default case is always separate. If the cases for .TRUE. and
1672 .FALSE. are in the same case list, the block for that case list
1673 always executed, and we don't generate code a COND_EXPR. */
1674 for (c = code->block; c; c = c->block)
1676 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1678 if (cp->low)
1680 if (cp->low->value.logical == 0) /* .FALSE. */
1681 f = c;
1682 else /* if (cp->value.logical != 0), thus .TRUE. */
1683 t = c;
1685 else
1686 d = c;
1690 /* Start a new block. */
1691 gfc_start_block (&block);
1693 /* Calculate the switch expression. We always need to do this
1694 because it may have side effects. */
1695 gfc_init_se (&se, NULL);
1696 gfc_conv_expr_val (&se, code->expr1);
1697 gfc_add_block_to_block (&block, &se.pre);
1699 if (t == f && t != NULL)
1701 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1702 translate the code for these cases, append it to the current
1703 block. */
1704 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1706 else
1708 tree true_tree, false_tree, stmt;
1710 true_tree = build_empty_stmt (input_location);
1711 false_tree = build_empty_stmt (input_location);
1713 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1714 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1715 make the missing case the default case. */
1716 if (t != NULL && f != NULL)
1717 d = NULL;
1718 else if (d != NULL)
1720 if (t == NULL)
1721 t = d;
1722 else
1723 f = d;
1726 /* Translate the code for each of these blocks, and append it to
1727 the current block. */
1728 if (t != NULL)
1729 true_tree = gfc_trans_code (t->next);
1731 if (f != NULL)
1732 false_tree = gfc_trans_code (f->next);
1734 stmt = fold_build3_loc (input_location, COND_EXPR, void_type_node,
1735 se.expr, true_tree, false_tree);
1736 gfc_add_expr_to_block (&block, stmt);
1739 return gfc_finish_block (&block);
1743 /* The jump table types are stored in static variables to avoid
1744 constructing them from scratch every single time. */
1745 static GTY(()) tree select_struct[2];
1747 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1748 Instead of generating compares and jumps, it is far simpler to
1749 generate a data structure describing the cases in order and call a
1750 library subroutine that locates the right case.
1751 This is particularly true because this is the only case where we
1752 might have to dispose of a temporary.
1753 The library subroutine returns a pointer to jump to or NULL if no
1754 branches are to be taken. */
1756 static tree
1757 gfc_trans_character_select (gfc_code *code)
1759 tree init, end_label, tmp, type, case_num, label, fndecl;
1760 stmtblock_t block, body;
1761 gfc_case *cp, *d;
1762 gfc_code *c;
1763 gfc_se se, expr1se;
1764 int n, k;
1765 VEC(constructor_elt,gc) *inits = NULL;
1767 tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
1769 /* The jump table types are stored in static variables to avoid
1770 constructing them from scratch every single time. */
1771 static tree ss_string1[2], ss_string1_len[2];
1772 static tree ss_string2[2], ss_string2_len[2];
1773 static tree ss_target[2];
1775 cp = code->block->ext.block.case_list;
1776 while (cp->left != NULL)
1777 cp = cp->left;
1779 /* Generate the body */
1780 gfc_start_block (&block);
1781 gfc_init_se (&expr1se, NULL);
1782 gfc_conv_expr_reference (&expr1se, code->expr1);
1784 gfc_add_block_to_block (&block, &expr1se.pre);
1786 end_label = gfc_build_label_decl (NULL_TREE);
1788 gfc_init_block (&body);
1790 /* Attempt to optimize length 1 selects. */
1791 if (integer_onep (expr1se.string_length))
1793 for (d = cp; d; d = d->right)
1795 int i;
1796 if (d->low)
1798 gcc_assert (d->low->expr_type == EXPR_CONSTANT
1799 && d->low->ts.type == BT_CHARACTER);
1800 if (d->low->value.character.length > 1)
1802 for (i = 1; i < d->low->value.character.length; i++)
1803 if (d->low->value.character.string[i] != ' ')
1804 break;
1805 if (i != d->low->value.character.length)
1807 if (optimize && d->high && i == 1)
1809 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1810 && d->high->ts.type == BT_CHARACTER);
1811 if (d->high->value.character.length > 1
1812 && (d->low->value.character.string[0]
1813 == d->high->value.character.string[0])
1814 && d->high->value.character.string[1] != ' '
1815 && ((d->low->value.character.string[1] < ' ')
1816 == (d->high->value.character.string[1]
1817 < ' ')))
1818 continue;
1820 break;
1824 if (d->high)
1826 gcc_assert (d->high->expr_type == EXPR_CONSTANT
1827 && d->high->ts.type == BT_CHARACTER);
1828 if (d->high->value.character.length > 1)
1830 for (i = 1; i < d->high->value.character.length; i++)
1831 if (d->high->value.character.string[i] != ' ')
1832 break;
1833 if (i != d->high->value.character.length)
1834 break;
1838 if (d == NULL)
1840 tree ctype = gfc_get_char_type (code->expr1->ts.kind);
1842 for (c = code->block; c; c = c->block)
1844 for (cp = c->ext.block.case_list; cp; cp = cp->next)
1846 tree low, high;
1847 tree label;
1848 gfc_char_t r;
1850 /* Assume it's the default case. */
1851 low = high = NULL_TREE;
1853 if (cp->low)
1855 /* CASE ('ab') or CASE ('ab':'az') will never match
1856 any length 1 character. */
1857 if (cp->low->value.character.length > 1
1858 && cp->low->value.character.string[1] != ' ')
1859 continue;
1861 if (cp->low->value.character.length > 0)
1862 r = cp->low->value.character.string[0];
1863 else
1864 r = ' ';
1865 low = build_int_cst (ctype, r);
1867 /* If there's only a lower bound, set the high bound
1868 to the maximum value of the case expression. */
1869 if (!cp->high)
1870 high = TYPE_MAX_VALUE (ctype);
1873 if (cp->high)
1875 if (!cp->low
1876 || (cp->low->value.character.string[0]
1877 != cp->high->value.character.string[0]))
1879 if (cp->high->value.character.length > 0)
1880 r = cp->high->value.character.string[0];
1881 else
1882 r = ' ';
1883 high = build_int_cst (ctype, r);
1886 /* Unbounded case. */
1887 if (!cp->low)
1888 low = TYPE_MIN_VALUE (ctype);
1891 /* Build a label. */
1892 label = gfc_build_label_decl (NULL_TREE);
1894 /* Add this case label.
1895 Add parameter 'label', make it match GCC backend. */
1896 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1897 void_type_node, low, high, label);
1898 gfc_add_expr_to_block (&body, tmp);
1901 /* Add the statements for this case. */
1902 tmp = gfc_trans_code (c->next);
1903 gfc_add_expr_to_block (&body, tmp);
1905 /* Break to the end of the construct. */
1906 tmp = build1_v (GOTO_EXPR, end_label);
1907 gfc_add_expr_to_block (&body, tmp);
1910 tmp = gfc_string_to_single_character (expr1se.string_length,
1911 expr1se.expr,
1912 code->expr1->ts.kind);
1913 case_num = gfc_create_var (ctype, "case_num");
1914 gfc_add_modify (&block, case_num, tmp);
1916 gfc_add_block_to_block (&block, &expr1se.post);
1918 tmp = gfc_finish_block (&body);
1919 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1920 gfc_add_expr_to_block (&block, tmp);
1922 tmp = build1_v (LABEL_EXPR, end_label);
1923 gfc_add_expr_to_block (&block, tmp);
1925 return gfc_finish_block (&block);
1929 if (code->expr1->ts.kind == 1)
1930 k = 0;
1931 else if (code->expr1->ts.kind == 4)
1932 k = 1;
1933 else
1934 gcc_unreachable ();
1936 if (select_struct[k] == NULL)
1938 tree *chain = NULL;
1939 select_struct[k] = make_node (RECORD_TYPE);
1941 if (code->expr1->ts.kind == 1)
1942 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1943 else if (code->expr1->ts.kind == 4)
1944 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1945 else
1946 gcc_unreachable ();
1948 #undef ADD_FIELD
1949 #define ADD_FIELD(NAME, TYPE) \
1950 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
1951 get_identifier (stringize(NAME)), \
1952 TYPE, \
1953 &chain)
1955 ADD_FIELD (string1, pchartype);
1956 ADD_FIELD (string1_len, gfc_charlen_type_node);
1958 ADD_FIELD (string2, pchartype);
1959 ADD_FIELD (string2_len, gfc_charlen_type_node);
1961 ADD_FIELD (target, integer_type_node);
1962 #undef ADD_FIELD
1964 gfc_finish_type (select_struct[k]);
1967 n = 0;
1968 for (d = cp; d; d = d->right)
1969 d->n = n++;
1971 for (c = code->block; c; c = c->block)
1973 for (d = c->ext.block.case_list; d; d = d->next)
1975 label = gfc_build_label_decl (NULL_TREE);
1976 tmp = fold_build3_loc (input_location, CASE_LABEL_EXPR,
1977 void_type_node,
1978 (d->low == NULL && d->high == NULL)
1979 ? NULL : build_int_cst (NULL_TREE, d->n),
1980 NULL, label);
1981 gfc_add_expr_to_block (&body, tmp);
1984 tmp = gfc_trans_code (c->next);
1985 gfc_add_expr_to_block (&body, tmp);
1987 tmp = build1_v (GOTO_EXPR, end_label);
1988 gfc_add_expr_to_block (&body, tmp);
1991 /* Generate the structure describing the branches */
1992 for (d = cp; d; d = d->right)
1994 VEC(constructor_elt,gc) *node = NULL;
1996 gfc_init_se (&se, NULL);
1998 if (d->low == NULL)
2000 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], null_pointer_node);
2001 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], integer_zero_node);
2003 else
2005 gfc_conv_expr_reference (&se, d->low);
2007 CONSTRUCTOR_APPEND_ELT (node, ss_string1[k], se.expr);
2008 CONSTRUCTOR_APPEND_ELT (node, ss_string1_len[k], se.string_length);
2011 if (d->high == NULL)
2013 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], null_pointer_node);
2014 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], integer_zero_node);
2016 else
2018 gfc_init_se (&se, NULL);
2019 gfc_conv_expr_reference (&se, d->high);
2021 CONSTRUCTOR_APPEND_ELT (node, ss_string2[k], se.expr);
2022 CONSTRUCTOR_APPEND_ELT (node, ss_string2_len[k], se.string_length);
2025 CONSTRUCTOR_APPEND_ELT (node, ss_target[k],
2026 build_int_cst (integer_type_node, d->n));
2028 tmp = build_constructor (select_struct[k], node);
2029 CONSTRUCTOR_APPEND_ELT (inits, NULL_TREE, tmp);
2032 type = build_array_type (select_struct[k],
2033 build_index_type (build_int_cst (NULL_TREE, n-1)));
2035 init = build_constructor (type, inits);
2036 TREE_CONSTANT (init) = 1;
2037 TREE_STATIC (init) = 1;
2038 /* Create a static variable to hold the jump table. */
2039 tmp = gfc_create_var (type, "jumptable");
2040 TREE_CONSTANT (tmp) = 1;
2041 TREE_STATIC (tmp) = 1;
2042 TREE_READONLY (tmp) = 1;
2043 DECL_INITIAL (tmp) = init;
2044 init = tmp;
2046 /* Build the library call */
2047 init = gfc_build_addr_expr (pvoid_type_node, init);
2049 if (code->expr1->ts.kind == 1)
2050 fndecl = gfor_fndecl_select_string;
2051 else if (code->expr1->ts.kind == 4)
2052 fndecl = gfor_fndecl_select_string_char4;
2053 else
2054 gcc_unreachable ();
2056 tmp = build_call_expr_loc (input_location,
2057 fndecl, 4, init, build_int_cst (NULL_TREE, n),
2058 expr1se.expr, expr1se.string_length);
2059 case_num = gfc_create_var (integer_type_node, "case_num");
2060 gfc_add_modify (&block, case_num, tmp);
2062 gfc_add_block_to_block (&block, &expr1se.post);
2064 tmp = gfc_finish_block (&body);
2065 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
2066 gfc_add_expr_to_block (&block, tmp);
2068 tmp = build1_v (LABEL_EXPR, end_label);
2069 gfc_add_expr_to_block (&block, tmp);
2071 return gfc_finish_block (&block);
2075 /* Translate the three variants of the SELECT CASE construct.
2077 SELECT CASEs with INTEGER case expressions can be translated to an
2078 equivalent GENERIC switch statement, and for LOGICAL case
2079 expressions we build one or two if-else compares.
2081 SELECT CASEs with CHARACTER case expressions are a whole different
2082 story, because they don't exist in GENERIC. So we sort them and
2083 do a binary search at runtime.
2085 Fortran has no BREAK statement, and it does not allow jumps from
2086 one case block to another. That makes things a lot easier for
2087 the optimizers. */
2089 tree
2090 gfc_trans_select (gfc_code * code)
2092 stmtblock_t block;
2093 tree body;
2094 tree exit_label;
2096 gcc_assert (code && code->expr1);
2097 gfc_init_block (&block);
2099 /* Build the exit label and hang it in. */
2100 exit_label = gfc_build_label_decl (NULL_TREE);
2101 code->exit_label = exit_label;
2103 /* Empty SELECT constructs are legal. */
2104 if (code->block == NULL)
2105 body = build_empty_stmt (input_location);
2107 /* Select the correct translation function. */
2108 else
2109 switch (code->expr1->ts.type)
2111 case BT_LOGICAL:
2112 body = gfc_trans_logical_select (code);
2113 break;
2115 case BT_INTEGER:
2116 body = gfc_trans_integer_select (code);
2117 break;
2119 case BT_CHARACTER:
2120 body = gfc_trans_character_select (code);
2121 break;
2123 default:
2124 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2125 /* Not reached */
2128 /* Build everything together. */
2129 gfc_add_expr_to_block (&block, body);
2130 gfc_add_expr_to_block (&block, build1_v (LABEL_EXPR, exit_label));
2132 return gfc_finish_block (&block);
2136 /* Traversal function to substitute a replacement symtree if the symbol
2137 in the expression is the same as that passed. f == 2 signals that
2138 that variable itself is not to be checked - only the references.
2139 This group of functions is used when the variable expression in a
2140 FORALL assignment has internal references. For example:
2141 FORALL (i = 1:4) p(p(i)) = i
2142 The only recourse here is to store a copy of 'p' for the index
2143 expression. */
2145 static gfc_symtree *new_symtree;
2146 static gfc_symtree *old_symtree;
2148 static bool
2149 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
2151 if (expr->expr_type != EXPR_VARIABLE)
2152 return false;
2154 if (*f == 2)
2155 *f = 1;
2156 else if (expr->symtree->n.sym == sym)
2157 expr->symtree = new_symtree;
2159 return false;
2162 static void
2163 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
2165 gfc_traverse_expr (e, sym, forall_replace, f);
2168 static bool
2169 forall_restore (gfc_expr *expr,
2170 gfc_symbol *sym ATTRIBUTE_UNUSED,
2171 int *f ATTRIBUTE_UNUSED)
2173 if (expr->expr_type != EXPR_VARIABLE)
2174 return false;
2176 if (expr->symtree == new_symtree)
2177 expr->symtree = old_symtree;
2179 return false;
2182 static void
2183 forall_restore_symtree (gfc_expr *e)
2185 gfc_traverse_expr (e, NULL, forall_restore, 0);
2188 static void
2189 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2191 gfc_se tse;
2192 gfc_se rse;
2193 gfc_expr *e;
2194 gfc_symbol *new_sym;
2195 gfc_symbol *old_sym;
2196 gfc_symtree *root;
2197 tree tmp;
2199 /* Build a copy of the lvalue. */
2200 old_symtree = c->expr1->symtree;
2201 old_sym = old_symtree->n.sym;
2202 e = gfc_lval_expr_from_sym (old_sym);
2203 if (old_sym->attr.dimension)
2205 gfc_init_se (&tse, NULL);
2206 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN, false);
2207 gfc_add_block_to_block (pre, &tse.pre);
2208 gfc_add_block_to_block (post, &tse.post);
2209 tse.expr = build_fold_indirect_ref_loc (input_location, tse.expr);
2211 if (e->ts.type != BT_CHARACTER)
2213 /* Use the variable offset for the temporary. */
2214 tmp = gfc_conv_array_offset (old_sym->backend_decl);
2215 gfc_conv_descriptor_offset_set (pre, tse.expr, tmp);
2218 else
2220 gfc_init_se (&tse, NULL);
2221 gfc_init_se (&rse, NULL);
2222 gfc_conv_expr (&rse, e);
2223 if (e->ts.type == BT_CHARACTER)
2225 tse.string_length = rse.string_length;
2226 tmp = gfc_get_character_type_len (gfc_default_character_kind,
2227 tse.string_length);
2228 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
2229 rse.string_length);
2230 gfc_add_block_to_block (pre, &tse.pre);
2231 gfc_add_block_to_block (post, &tse.post);
2233 else
2235 tmp = gfc_typenode_for_spec (&e->ts);
2236 tse.expr = gfc_create_var (tmp, "temp");
2239 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
2240 e->expr_type == EXPR_VARIABLE, true);
2241 gfc_add_expr_to_block (pre, tmp);
2243 gfc_free_expr (e);
2245 /* Create a new symbol to represent the lvalue. */
2246 new_sym = gfc_new_symbol (old_sym->name, NULL);
2247 new_sym->ts = old_sym->ts;
2248 new_sym->attr.referenced = 1;
2249 new_sym->attr.temporary = 1;
2250 new_sym->attr.dimension = old_sym->attr.dimension;
2251 new_sym->attr.flavor = old_sym->attr.flavor;
2253 /* Use the temporary as the backend_decl. */
2254 new_sym->backend_decl = tse.expr;
2256 /* Create a fake symtree for it. */
2257 root = NULL;
2258 new_symtree = gfc_new_symtree (&root, old_sym->name);
2259 new_symtree->n.sym = new_sym;
2260 gcc_assert (new_symtree == root);
2262 /* Go through the expression reference replacing the old_symtree
2263 with the new. */
2264 forall_replace_symtree (c->expr1, old_sym, 2);
2266 /* Now we have made this temporary, we might as well use it for
2267 the right hand side. */
2268 forall_replace_symtree (c->expr2, old_sym, 1);
2272 /* Handles dependencies in forall assignments. */
2273 static int
2274 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
2276 gfc_ref *lref;
2277 gfc_ref *rref;
2278 int need_temp;
2279 gfc_symbol *lsym;
2281 lsym = c->expr1->symtree->n.sym;
2282 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
2284 /* Now check for dependencies within the 'variable'
2285 expression itself. These are treated by making a complete
2286 copy of variable and changing all the references to it
2287 point to the copy instead. Note that the shallow copy of
2288 the variable will not suffice for derived types with
2289 pointer components. We therefore leave these to their
2290 own devices. */
2291 if (lsym->ts.type == BT_DERIVED
2292 && lsym->ts.u.derived->attr.pointer_comp)
2293 return need_temp;
2295 new_symtree = NULL;
2296 if (find_forall_index (c->expr1, lsym, 2) == SUCCESS)
2298 forall_make_variable_temp (c, pre, post);
2299 need_temp = 0;
2302 /* Substrings with dependencies are treated in the same
2303 way. */
2304 if (c->expr1->ts.type == BT_CHARACTER
2305 && c->expr1->ref
2306 && c->expr2->expr_type == EXPR_VARIABLE
2307 && lsym == c->expr2->symtree->n.sym)
2309 for (lref = c->expr1->ref; lref; lref = lref->next)
2310 if (lref->type == REF_SUBSTRING)
2311 break;
2312 for (rref = c->expr2->ref; rref; rref = rref->next)
2313 if (rref->type == REF_SUBSTRING)
2314 break;
2316 if (rref && lref
2317 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
2319 forall_make_variable_temp (c, pre, post);
2320 need_temp = 0;
2323 return need_temp;
2327 static void
2328 cleanup_forall_symtrees (gfc_code *c)
2330 forall_restore_symtree (c->expr1);
2331 forall_restore_symtree (c->expr2);
2332 gfc_free (new_symtree->n.sym);
2333 gfc_free (new_symtree);
2337 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2338 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2339 indicates whether we should generate code to test the FORALLs mask
2340 array. OUTER is the loop header to be used for initializing mask
2341 indices.
2343 The generated loop format is:
2344 count = (end - start + step) / step
2345 loopvar = start
2346 while (1)
2348 if (count <=0 )
2349 goto end_of_loop
2350 <body>
2351 loopvar += step
2352 count --
2354 end_of_loop: */
2356 static tree
2357 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
2358 int mask_flag, stmtblock_t *outer)
2360 int n, nvar;
2361 tree tmp;
2362 tree cond;
2363 stmtblock_t block;
2364 tree exit_label;
2365 tree count;
2366 tree var, start, end, step;
2367 iter_info *iter;
2369 /* Initialize the mask index outside the FORALL nest. */
2370 if (mask_flag && forall_tmp->mask)
2371 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
2373 iter = forall_tmp->this_loop;
2374 nvar = forall_tmp->nvar;
2375 for (n = 0; n < nvar; n++)
2377 var = iter->var;
2378 start = iter->start;
2379 end = iter->end;
2380 step = iter->step;
2382 exit_label = gfc_build_label_decl (NULL_TREE);
2383 TREE_USED (exit_label) = 1;
2385 /* The loop counter. */
2386 count = gfc_create_var (TREE_TYPE (var), "count");
2388 /* The body of the loop. */
2389 gfc_init_block (&block);
2391 /* The exit condition. */
2392 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
2393 count, build_int_cst (TREE_TYPE (count), 0));
2394 tmp = build1_v (GOTO_EXPR, exit_label);
2395 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2396 cond, tmp, build_empty_stmt (input_location));
2397 gfc_add_expr_to_block (&block, tmp);
2399 /* The main loop body. */
2400 gfc_add_expr_to_block (&block, body);
2402 /* Increment the loop variable. */
2403 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), var,
2404 step);
2405 gfc_add_modify (&block, var, tmp);
2407 /* Advance to the next mask element. Only do this for the
2408 innermost loop. */
2409 if (n == 0 && mask_flag && forall_tmp->mask)
2411 tree maskindex = forall_tmp->maskindex;
2412 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2413 maskindex, gfc_index_one_node);
2414 gfc_add_modify (&block, maskindex, tmp);
2417 /* Decrement the loop counter. */
2418 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), count,
2419 build_int_cst (TREE_TYPE (var), 1));
2420 gfc_add_modify (&block, count, tmp);
2422 body = gfc_finish_block (&block);
2424 /* Loop var initialization. */
2425 gfc_init_block (&block);
2426 gfc_add_modify (&block, var, start);
2429 /* Initialize the loop counter. */
2430 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (var), step,
2431 start);
2432 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (var), end,
2433 tmp);
2434 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR, TREE_TYPE (var),
2435 tmp, step);
2436 gfc_add_modify (&block, count, tmp);
2438 /* The loop expression. */
2439 tmp = build1_v (LOOP_EXPR, body);
2440 gfc_add_expr_to_block (&block, tmp);
2442 /* The exit label. */
2443 tmp = build1_v (LABEL_EXPR, exit_label);
2444 gfc_add_expr_to_block (&block, tmp);
2446 body = gfc_finish_block (&block);
2447 iter = iter->next;
2449 return body;
2453 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2454 is nonzero, the body is controlled by all masks in the forall nest.
2455 Otherwise, the innermost loop is not controlled by it's mask. This
2456 is used for initializing that mask. */
2458 static tree
2459 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
2460 int mask_flag)
2462 tree tmp;
2463 stmtblock_t header;
2464 forall_info *forall_tmp;
2465 tree mask, maskindex;
2467 gfc_start_block (&header);
2469 forall_tmp = nested_forall_info;
2470 while (forall_tmp != NULL)
2472 /* Generate body with masks' control. */
2473 if (mask_flag)
2475 mask = forall_tmp->mask;
2476 maskindex = forall_tmp->maskindex;
2478 /* If a mask was specified make the assignment conditional. */
2479 if (mask)
2481 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2482 body = build3_v (COND_EXPR, tmp, body,
2483 build_empty_stmt (input_location));
2486 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
2487 forall_tmp = forall_tmp->prev_nest;
2488 mask_flag = 1;
2491 gfc_add_expr_to_block (&header, body);
2492 return gfc_finish_block (&header);
2496 /* Allocate data for holding a temporary array. Returns either a local
2497 temporary array or a pointer variable. */
2499 static tree
2500 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2501 tree elem_type)
2503 tree tmpvar;
2504 tree type;
2505 tree tmp;
2507 if (INTEGER_CST_P (size))
2508 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
2509 size, gfc_index_one_node);
2510 else
2511 tmp = NULL_TREE;
2513 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2514 type = build_array_type (elem_type, type);
2515 if (gfc_can_put_var_on_stack (bytesize))
2517 gcc_assert (INTEGER_CST_P (size));
2518 tmpvar = gfc_create_var (type, "temp");
2519 *pdata = NULL_TREE;
2521 else
2523 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2524 *pdata = convert (pvoid_type_node, tmpvar);
2526 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2527 gfc_add_modify (pblock, tmpvar, tmp);
2529 return tmpvar;
2533 /* Generate codes to copy the temporary to the actual lhs. */
2535 static tree
2536 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2537 tree count1, tree wheremask, bool invert)
2539 gfc_ss *lss;
2540 gfc_se lse, rse;
2541 stmtblock_t block, body;
2542 gfc_loopinfo loop1;
2543 tree tmp;
2544 tree wheremaskexpr;
2546 /* Walk the lhs. */
2547 lss = gfc_walk_expr (expr);
2549 if (lss == gfc_ss_terminator)
2551 gfc_start_block (&block);
2553 gfc_init_se (&lse, NULL);
2555 /* Translate the expression. */
2556 gfc_conv_expr (&lse, expr);
2558 /* Form the expression for the temporary. */
2559 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2561 /* Use the scalar assignment as is. */
2562 gfc_add_block_to_block (&block, &lse.pre);
2563 gfc_add_modify (&block, lse.expr, tmp);
2564 gfc_add_block_to_block (&block, &lse.post);
2566 /* Increment the count1. */
2567 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2568 count1, gfc_index_one_node);
2569 gfc_add_modify (&block, count1, tmp);
2571 tmp = gfc_finish_block (&block);
2573 else
2575 gfc_start_block (&block);
2577 gfc_init_loopinfo (&loop1);
2578 gfc_init_se (&rse, NULL);
2579 gfc_init_se (&lse, NULL);
2581 /* Associate the lss with the loop. */
2582 gfc_add_ss_to_loop (&loop1, lss);
2584 /* Calculate the bounds of the scalarization. */
2585 gfc_conv_ss_startstride (&loop1);
2586 /* Setup the scalarizing loops. */
2587 gfc_conv_loop_setup (&loop1, &expr->where);
2589 gfc_mark_ss_chain_used (lss, 1);
2591 /* Start the scalarized loop body. */
2592 gfc_start_scalarized_body (&loop1, &body);
2594 /* Setup the gfc_se structures. */
2595 gfc_copy_loopinfo_to_se (&lse, &loop1);
2596 lse.ss = lss;
2598 /* Form the expression of the temporary. */
2599 if (lss != gfc_ss_terminator)
2600 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2601 /* Translate expr. */
2602 gfc_conv_expr (&lse, expr);
2604 /* Use the scalar assignment. */
2605 rse.string_length = lse.string_length;
2606 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, true, true);
2608 /* Form the mask expression according to the mask tree list. */
2609 if (wheremask)
2611 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2612 if (invert)
2613 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2614 TREE_TYPE (wheremaskexpr),
2615 wheremaskexpr);
2616 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2617 wheremaskexpr, tmp,
2618 build_empty_stmt (input_location));
2621 gfc_add_expr_to_block (&body, tmp);
2623 /* Increment count1. */
2624 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2625 count1, gfc_index_one_node);
2626 gfc_add_modify (&body, count1, tmp);
2628 /* Increment count3. */
2629 if (count3)
2631 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2632 gfc_array_index_type, count3,
2633 gfc_index_one_node);
2634 gfc_add_modify (&body, count3, tmp);
2637 /* Generate the copying loops. */
2638 gfc_trans_scalarizing_loops (&loop1, &body);
2639 gfc_add_block_to_block (&block, &loop1.pre);
2640 gfc_add_block_to_block (&block, &loop1.post);
2641 gfc_cleanup_loop (&loop1);
2643 tmp = gfc_finish_block (&block);
2645 return tmp;
2649 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2650 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2651 and should not be freed. WHEREMASK is the conditional execution mask
2652 whose sense may be inverted by INVERT. */
2654 static tree
2655 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2656 tree count1, gfc_ss *lss, gfc_ss *rss,
2657 tree wheremask, bool invert)
2659 stmtblock_t block, body1;
2660 gfc_loopinfo loop;
2661 gfc_se lse;
2662 gfc_se rse;
2663 tree tmp;
2664 tree wheremaskexpr;
2666 gfc_start_block (&block);
2668 gfc_init_se (&rse, NULL);
2669 gfc_init_se (&lse, NULL);
2671 if (lss == gfc_ss_terminator)
2673 gfc_init_block (&body1);
2674 gfc_conv_expr (&rse, expr2);
2675 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2677 else
2679 /* Initialize the loop. */
2680 gfc_init_loopinfo (&loop);
2682 /* We may need LSS to determine the shape of the expression. */
2683 gfc_add_ss_to_loop (&loop, lss);
2684 gfc_add_ss_to_loop (&loop, rss);
2686 gfc_conv_ss_startstride (&loop);
2687 gfc_conv_loop_setup (&loop, &expr2->where);
2689 gfc_mark_ss_chain_used (rss, 1);
2690 /* Start the loop body. */
2691 gfc_start_scalarized_body (&loop, &body1);
2693 /* Translate the expression. */
2694 gfc_copy_loopinfo_to_se (&rse, &loop);
2695 rse.ss = rss;
2696 gfc_conv_expr (&rse, expr2);
2698 /* Form the expression of the temporary. */
2699 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2702 /* Use the scalar assignment. */
2703 lse.string_length = rse.string_length;
2704 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2705 expr2->expr_type == EXPR_VARIABLE, true);
2707 /* Form the mask expression according to the mask tree list. */
2708 if (wheremask)
2710 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2711 if (invert)
2712 wheremaskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
2713 TREE_TYPE (wheremaskexpr),
2714 wheremaskexpr);
2715 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
2716 wheremaskexpr, tmp,
2717 build_empty_stmt (input_location));
2720 gfc_add_expr_to_block (&body1, tmp);
2722 if (lss == gfc_ss_terminator)
2724 gfc_add_block_to_block (&block, &body1);
2726 /* Increment count1. */
2727 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (count1),
2728 count1, gfc_index_one_node);
2729 gfc_add_modify (&block, count1, tmp);
2731 else
2733 /* Increment count1. */
2734 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
2735 count1, gfc_index_one_node);
2736 gfc_add_modify (&body1, count1, tmp);
2738 /* Increment count3. */
2739 if (count3)
2741 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2742 gfc_array_index_type,
2743 count3, gfc_index_one_node);
2744 gfc_add_modify (&body1, count3, tmp);
2747 /* Generate the copying loops. */
2748 gfc_trans_scalarizing_loops (&loop, &body1);
2750 gfc_add_block_to_block (&block, &loop.pre);
2751 gfc_add_block_to_block (&block, &loop.post);
2753 gfc_cleanup_loop (&loop);
2754 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2755 as tree nodes in SS may not be valid in different scope. */
2758 tmp = gfc_finish_block (&block);
2759 return tmp;
2763 /* Calculate the size of temporary needed in the assignment inside forall.
2764 LSS and RSS are filled in this function. */
2766 static tree
2767 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2768 stmtblock_t * pblock,
2769 gfc_ss **lss, gfc_ss **rss)
2771 gfc_loopinfo loop;
2772 tree size;
2773 int i;
2774 int save_flag;
2775 tree tmp;
2777 *lss = gfc_walk_expr (expr1);
2778 *rss = NULL;
2780 size = gfc_index_one_node;
2781 if (*lss != gfc_ss_terminator)
2783 gfc_init_loopinfo (&loop);
2785 /* Walk the RHS of the expression. */
2786 *rss = gfc_walk_expr (expr2);
2787 if (*rss == gfc_ss_terminator)
2789 /* The rhs is scalar. Add a ss for the expression. */
2790 *rss = gfc_get_ss ();
2791 (*rss)->next = gfc_ss_terminator;
2792 (*rss)->type = GFC_SS_SCALAR;
2793 (*rss)->expr = expr2;
2796 /* Associate the SS with the loop. */
2797 gfc_add_ss_to_loop (&loop, *lss);
2798 /* We don't actually need to add the rhs at this point, but it might
2799 make guessing the loop bounds a bit easier. */
2800 gfc_add_ss_to_loop (&loop, *rss);
2802 /* We only want the shape of the expression, not rest of the junk
2803 generated by the scalarizer. */
2804 loop.array_parameter = 1;
2806 /* Calculate the bounds of the scalarization. */
2807 save_flag = gfc_option.rtcheck;
2808 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2809 gfc_conv_ss_startstride (&loop);
2810 gfc_option.rtcheck = save_flag;
2811 gfc_conv_loop_setup (&loop, &expr2->where);
2813 /* Figure out how many elements we need. */
2814 for (i = 0; i < loop.dimen; i++)
2816 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2817 gfc_array_index_type,
2818 gfc_index_one_node, loop.from[i]);
2819 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2820 gfc_array_index_type, tmp, loop.to[i]);
2821 size = fold_build2_loc (input_location, MULT_EXPR,
2822 gfc_array_index_type, size, tmp);
2824 gfc_add_block_to_block (pblock, &loop.pre);
2825 size = gfc_evaluate_now (size, pblock);
2826 gfc_add_block_to_block (pblock, &loop.post);
2828 /* TODO: write a function that cleans up a loopinfo without freeing
2829 the SS chains. Currently a NOP. */
2832 return size;
2836 /* Calculate the overall iterator number of the nested forall construct.
2837 This routine actually calculates the number of times the body of the
2838 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2839 that by the expression INNER_SIZE. The BLOCK argument specifies the
2840 block in which to calculate the result, and the optional INNER_SIZE_BODY
2841 argument contains any statements that need to executed (inside the loop)
2842 to initialize or calculate INNER_SIZE. */
2844 static tree
2845 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2846 stmtblock_t *inner_size_body, stmtblock_t *block)
2848 forall_info *forall_tmp = nested_forall_info;
2849 tree tmp, number;
2850 stmtblock_t body;
2852 /* We can eliminate the innermost unconditional loops with constant
2853 array bounds. */
2854 if (INTEGER_CST_P (inner_size))
2856 while (forall_tmp
2857 && !forall_tmp->mask
2858 && INTEGER_CST_P (forall_tmp->size))
2860 inner_size = fold_build2_loc (input_location, MULT_EXPR,
2861 gfc_array_index_type,
2862 inner_size, forall_tmp->size);
2863 forall_tmp = forall_tmp->prev_nest;
2866 /* If there are no loops left, we have our constant result. */
2867 if (!forall_tmp)
2868 return inner_size;
2871 /* Otherwise, create a temporary variable to compute the result. */
2872 number = gfc_create_var (gfc_array_index_type, "num");
2873 gfc_add_modify (block, number, gfc_index_zero_node);
2875 gfc_start_block (&body);
2876 if (inner_size_body)
2877 gfc_add_block_to_block (&body, inner_size_body);
2878 if (forall_tmp)
2879 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2880 gfc_array_index_type, number, inner_size);
2881 else
2882 tmp = inner_size;
2883 gfc_add_modify (&body, number, tmp);
2884 tmp = gfc_finish_block (&body);
2886 /* Generate loops. */
2887 if (forall_tmp != NULL)
2888 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2890 gfc_add_expr_to_block (block, tmp);
2892 return number;
2896 /* Allocate temporary for forall construct. SIZE is the size of temporary
2897 needed. PTEMP1 is returned for space free. */
2899 static tree
2900 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2901 tree * ptemp1)
2903 tree bytesize;
2904 tree unit;
2905 tree tmp;
2907 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2908 if (!integer_onep (unit))
2909 bytesize = fold_build2_loc (input_location, MULT_EXPR,
2910 gfc_array_index_type, size, unit);
2911 else
2912 bytesize = size;
2914 *ptemp1 = NULL;
2915 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2917 if (*ptemp1)
2918 tmp = build_fold_indirect_ref_loc (input_location, tmp);
2919 return tmp;
2923 /* Allocate temporary for forall construct according to the information in
2924 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2925 assignment inside forall. PTEMP1 is returned for space free. */
2927 static tree
2928 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2929 tree inner_size, stmtblock_t * inner_size_body,
2930 stmtblock_t * block, tree * ptemp1)
2932 tree size;
2934 /* Calculate the total size of temporary needed in forall construct. */
2935 size = compute_overall_iter_number (nested_forall_info, inner_size,
2936 inner_size_body, block);
2938 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2942 /* Handle assignments inside forall which need temporary.
2944 forall (i=start:end:stride; maskexpr)
2945 e<i> = f<i>
2946 end forall
2947 (where e,f<i> are arbitrary expressions possibly involving i
2948 and there is a dependency between e<i> and f<i>)
2949 Translates to:
2950 masktmp(:) = maskexpr(:)
2952 maskindex = 0;
2953 count1 = 0;
2954 num = 0;
2955 for (i = start; i <= end; i += stride)
2956 num += SIZE (f<i>)
2957 count1 = 0;
2958 ALLOCATE (tmp(num))
2959 for (i = start; i <= end; i += stride)
2961 if (masktmp[maskindex++])
2962 tmp[count1++] = f<i>
2964 maskindex = 0;
2965 count1 = 0;
2966 for (i = start; i <= end; i += stride)
2968 if (masktmp[maskindex++])
2969 e<i> = tmp[count1++]
2971 DEALLOCATE (tmp)
2973 static void
2974 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2975 tree wheremask, bool invert,
2976 forall_info * nested_forall_info,
2977 stmtblock_t * block)
2979 tree type;
2980 tree inner_size;
2981 gfc_ss *lss, *rss;
2982 tree count, count1;
2983 tree tmp, tmp1;
2984 tree ptemp1;
2985 stmtblock_t inner_size_body;
2987 /* Create vars. count1 is the current iterator number of the nested
2988 forall. */
2989 count1 = gfc_create_var (gfc_array_index_type, "count1");
2991 /* Count is the wheremask index. */
2992 if (wheremask)
2994 count = gfc_create_var (gfc_array_index_type, "count");
2995 gfc_add_modify (block, count, gfc_index_zero_node);
2997 else
2998 count = NULL;
3000 /* Initialize count1. */
3001 gfc_add_modify (block, count1, gfc_index_zero_node);
3003 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3004 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3005 gfc_init_block (&inner_size_body);
3006 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
3007 &lss, &rss);
3009 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3010 if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->length)
3012 if (!expr1->ts.u.cl->backend_decl)
3014 gfc_se tse;
3015 gfc_init_se (&tse, NULL);
3016 gfc_conv_expr (&tse, expr1->ts.u.cl->length);
3017 expr1->ts.u.cl->backend_decl = tse.expr;
3019 type = gfc_get_character_type_len (gfc_default_character_kind,
3020 expr1->ts.u.cl->backend_decl);
3022 else
3023 type = gfc_typenode_for_spec (&expr1->ts);
3025 /* Allocate temporary for nested forall construct according to the
3026 information in nested_forall_info and inner_size. */
3027 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
3028 &inner_size_body, block, &ptemp1);
3030 /* Generate codes to copy rhs to the temporary . */
3031 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
3032 wheremask, invert);
3034 /* Generate body and loops according to the information in
3035 nested_forall_info. */
3036 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3037 gfc_add_expr_to_block (block, tmp);
3039 /* Reset count1. */
3040 gfc_add_modify (block, count1, gfc_index_zero_node);
3042 /* Reset count. */
3043 if (wheremask)
3044 gfc_add_modify (block, count, gfc_index_zero_node);
3046 /* Generate codes to copy the temporary to lhs. */
3047 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
3048 wheremask, invert);
3050 /* Generate body and loops according to the information in
3051 nested_forall_info. */
3052 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3053 gfc_add_expr_to_block (block, tmp);
3055 if (ptemp1)
3057 /* Free the temporary. */
3058 tmp = gfc_call_free (ptemp1);
3059 gfc_add_expr_to_block (block, tmp);
3064 /* Translate pointer assignment inside FORALL which need temporary. */
3066 static void
3067 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
3068 forall_info * nested_forall_info,
3069 stmtblock_t * block)
3071 tree type;
3072 tree inner_size;
3073 gfc_ss *lss, *rss;
3074 gfc_se lse;
3075 gfc_se rse;
3076 gfc_ss_info *info;
3077 gfc_loopinfo loop;
3078 tree desc;
3079 tree parm;
3080 tree parmtype;
3081 stmtblock_t body;
3082 tree count;
3083 tree tmp, tmp1, ptemp1;
3085 count = gfc_create_var (gfc_array_index_type, "count");
3086 gfc_add_modify (block, count, gfc_index_zero_node);
3088 inner_size = integer_one_node;
3089 lss = gfc_walk_expr (expr1);
3090 rss = gfc_walk_expr (expr2);
3091 if (lss == gfc_ss_terminator)
3093 type = gfc_typenode_for_spec (&expr1->ts);
3094 type = build_pointer_type (type);
3096 /* Allocate temporary for nested forall construct according to the
3097 information in nested_forall_info and inner_size. */
3098 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
3099 inner_size, NULL, block, &ptemp1);
3100 gfc_start_block (&body);
3101 gfc_init_se (&lse, NULL);
3102 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3103 gfc_init_se (&rse, NULL);
3104 rse.want_pointer = 1;
3105 gfc_conv_expr (&rse, expr2);
3106 gfc_add_block_to_block (&body, &rse.pre);
3107 gfc_add_modify (&body, lse.expr,
3108 fold_convert (TREE_TYPE (lse.expr), rse.expr));
3109 gfc_add_block_to_block (&body, &rse.post);
3111 /* Increment count. */
3112 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3113 count, gfc_index_one_node);
3114 gfc_add_modify (&body, count, tmp);
3116 tmp = gfc_finish_block (&body);
3118 /* Generate body and loops according to the information in
3119 nested_forall_info. */
3120 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3121 gfc_add_expr_to_block (block, tmp);
3123 /* Reset count. */
3124 gfc_add_modify (block, count, gfc_index_zero_node);
3126 gfc_start_block (&body);
3127 gfc_init_se (&lse, NULL);
3128 gfc_init_se (&rse, NULL);
3129 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
3130 lse.want_pointer = 1;
3131 gfc_conv_expr (&lse, expr1);
3132 gfc_add_block_to_block (&body, &lse.pre);
3133 gfc_add_modify (&body, lse.expr, rse.expr);
3134 gfc_add_block_to_block (&body, &lse.post);
3135 /* Increment count. */
3136 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3137 count, gfc_index_one_node);
3138 gfc_add_modify (&body, count, tmp);
3139 tmp = gfc_finish_block (&body);
3141 /* Generate body and loops according to the information in
3142 nested_forall_info. */
3143 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3144 gfc_add_expr_to_block (block, tmp);
3146 else
3148 gfc_init_loopinfo (&loop);
3150 /* Associate the SS with the loop. */
3151 gfc_add_ss_to_loop (&loop, rss);
3153 /* Setup the scalarizing loops and bounds. */
3154 gfc_conv_ss_startstride (&loop);
3156 gfc_conv_loop_setup (&loop, &expr2->where);
3158 info = &rss->data.info;
3159 desc = info->descriptor;
3161 /* Make a new descriptor. */
3162 parmtype = gfc_get_element_type (TREE_TYPE (desc));
3163 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, 0,
3164 loop.from, loop.to, 1,
3165 GFC_ARRAY_UNKNOWN, true);
3167 /* Allocate temporary for nested forall construct. */
3168 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
3169 inner_size, NULL, block, &ptemp1);
3170 gfc_start_block (&body);
3171 gfc_init_se (&lse, NULL);
3172 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
3173 lse.direct_byref = 1;
3174 rss = gfc_walk_expr (expr2);
3175 gfc_conv_expr_descriptor (&lse, expr2, rss);
3177 gfc_add_block_to_block (&body, &lse.pre);
3178 gfc_add_block_to_block (&body, &lse.post);
3180 /* Increment count. */
3181 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3182 count, gfc_index_one_node);
3183 gfc_add_modify (&body, count, tmp);
3185 tmp = gfc_finish_block (&body);
3187 /* Generate body and loops according to the information in
3188 nested_forall_info. */
3189 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3190 gfc_add_expr_to_block (block, tmp);
3192 /* Reset count. */
3193 gfc_add_modify (block, count, gfc_index_zero_node);
3195 parm = gfc_build_array_ref (tmp1, count, NULL);
3196 lss = gfc_walk_expr (expr1);
3197 gfc_init_se (&lse, NULL);
3198 gfc_conv_expr_descriptor (&lse, expr1, lss);
3199 gfc_add_modify (&lse.pre, lse.expr, parm);
3200 gfc_start_block (&body);
3201 gfc_add_block_to_block (&body, &lse.pre);
3202 gfc_add_block_to_block (&body, &lse.post);
3204 /* Increment count. */
3205 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3206 count, gfc_index_one_node);
3207 gfc_add_modify (&body, count, tmp);
3209 tmp = gfc_finish_block (&body);
3211 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
3212 gfc_add_expr_to_block (block, tmp);
3214 /* Free the temporary. */
3215 if (ptemp1)
3217 tmp = gfc_call_free (ptemp1);
3218 gfc_add_expr_to_block (block, tmp);
3223 /* FORALL and WHERE statements are really nasty, especially when you nest
3224 them. All the rhs of a forall assignment must be evaluated before the
3225 actual assignments are performed. Presumably this also applies to all the
3226 assignments in an inner where statement. */
3228 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3229 linear array, relying on the fact that we process in the same order in all
3230 loops.
3232 forall (i=start:end:stride; maskexpr)
3233 e<i> = f<i>
3234 g<i> = h<i>
3235 end forall
3236 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3237 Translates to:
3238 count = ((end + 1 - start) / stride)
3239 masktmp(:) = maskexpr(:)
3241 maskindex = 0;
3242 for (i = start; i <= end; i += stride)
3244 if (masktmp[maskindex++])
3245 e<i> = f<i>
3247 maskindex = 0;
3248 for (i = start; i <= end; i += stride)
3250 if (masktmp[maskindex++])
3251 g<i> = h<i>
3254 Note that this code only works when there are no dependencies.
3255 Forall loop with array assignments and data dependencies are a real pain,
3256 because the size of the temporary cannot always be determined before the
3257 loop is executed. This problem is compounded by the presence of nested
3258 FORALL constructs.
3261 static tree
3262 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
3264 stmtblock_t pre;
3265 stmtblock_t post;
3266 stmtblock_t block;
3267 stmtblock_t body;
3268 tree *var;
3269 tree *start;
3270 tree *end;
3271 tree *step;
3272 gfc_expr **varexpr;
3273 tree tmp;
3274 tree assign;
3275 tree size;
3276 tree maskindex;
3277 tree mask;
3278 tree pmask;
3279 int n;
3280 int nvar;
3281 int need_temp;
3282 gfc_forall_iterator *fa;
3283 gfc_se se;
3284 gfc_code *c;
3285 gfc_saved_var *saved_vars;
3286 iter_info *this_forall;
3287 forall_info *info;
3288 bool need_mask;
3290 /* Do nothing if the mask is false. */
3291 if (code->expr1
3292 && code->expr1->expr_type == EXPR_CONSTANT
3293 && !code->expr1->value.logical)
3294 return build_empty_stmt (input_location);
3296 n = 0;
3297 /* Count the FORALL index number. */
3298 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3299 n++;
3300 nvar = n;
3302 /* Allocate the space for var, start, end, step, varexpr. */
3303 var = (tree *) gfc_getmem (nvar * sizeof (tree));
3304 start = (tree *) gfc_getmem (nvar * sizeof (tree));
3305 end = (tree *) gfc_getmem (nvar * sizeof (tree));
3306 step = (tree *) gfc_getmem (nvar * sizeof (tree));
3307 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
3308 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
3310 /* Allocate the space for info. */
3311 info = (forall_info *) gfc_getmem (sizeof (forall_info));
3313 gfc_start_block (&pre);
3314 gfc_init_block (&post);
3315 gfc_init_block (&block);
3317 n = 0;
3318 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
3320 gfc_symbol *sym = fa->var->symtree->n.sym;
3322 /* Allocate space for this_forall. */
3323 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
3325 /* Create a temporary variable for the FORALL index. */
3326 tmp = gfc_typenode_for_spec (&sym->ts);
3327 var[n] = gfc_create_var (tmp, sym->name);
3328 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
3330 /* Record it in this_forall. */
3331 this_forall->var = var[n];
3333 /* Replace the index symbol's backend_decl with the temporary decl. */
3334 sym->backend_decl = var[n];
3336 /* Work out the start, end and stride for the loop. */
3337 gfc_init_se (&se, NULL);
3338 gfc_conv_expr_val (&se, fa->start);
3339 /* Record it in this_forall. */
3340 this_forall->start = se.expr;
3341 gfc_add_block_to_block (&block, &se.pre);
3342 start[n] = se.expr;
3344 gfc_init_se (&se, NULL);
3345 gfc_conv_expr_val (&se, fa->end);
3346 /* Record it in this_forall. */
3347 this_forall->end = se.expr;
3348 gfc_make_safe_expr (&se);
3349 gfc_add_block_to_block (&block, &se.pre);
3350 end[n] = se.expr;
3352 gfc_init_se (&se, NULL);
3353 gfc_conv_expr_val (&se, fa->stride);
3354 /* Record it in this_forall. */
3355 this_forall->step = se.expr;
3356 gfc_make_safe_expr (&se);
3357 gfc_add_block_to_block (&block, &se.pre);
3358 step[n] = se.expr;
3360 /* Set the NEXT field of this_forall to NULL. */
3361 this_forall->next = NULL;
3362 /* Link this_forall to the info construct. */
3363 if (info->this_loop)
3365 iter_info *iter_tmp = info->this_loop;
3366 while (iter_tmp->next != NULL)
3367 iter_tmp = iter_tmp->next;
3368 iter_tmp->next = this_forall;
3370 else
3371 info->this_loop = this_forall;
3373 n++;
3375 nvar = n;
3377 /* Calculate the size needed for the current forall level. */
3378 size = gfc_index_one_node;
3379 for (n = 0; n < nvar; n++)
3381 /* size = (end + step - start) / step. */
3382 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (start[n]),
3383 step[n], start[n]);
3384 tmp = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (end[n]),
3385 end[n], tmp);
3386 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, TREE_TYPE (tmp),
3387 tmp, step[n]);
3388 tmp = convert (gfc_array_index_type, tmp);
3390 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3391 size, tmp);
3394 /* Record the nvar and size of current forall level. */
3395 info->nvar = nvar;
3396 info->size = size;
3398 if (code->expr1)
3400 /* If the mask is .true., consider the FORALL unconditional. */
3401 if (code->expr1->expr_type == EXPR_CONSTANT
3402 && code->expr1->value.logical)
3403 need_mask = false;
3404 else
3405 need_mask = true;
3407 else
3408 need_mask = false;
3410 /* First we need to allocate the mask. */
3411 if (need_mask)
3413 /* As the mask array can be very big, prefer compact boolean types. */
3414 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3415 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
3416 size, NULL, &block, &pmask);
3417 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
3419 /* Record them in the info structure. */
3420 info->maskindex = maskindex;
3421 info->mask = mask;
3423 else
3425 /* No mask was specified. */
3426 maskindex = NULL_TREE;
3427 mask = pmask = NULL_TREE;
3430 /* Link the current forall level to nested_forall_info. */
3431 info->prev_nest = nested_forall_info;
3432 nested_forall_info = info;
3434 /* Copy the mask into a temporary variable if required.
3435 For now we assume a mask temporary is needed. */
3436 if (need_mask)
3438 /* As the mask array can be very big, prefer compact boolean types. */
3439 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3441 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
3443 /* Start of mask assignment loop body. */
3444 gfc_start_block (&body);
3446 /* Evaluate the mask expression. */
3447 gfc_init_se (&se, NULL);
3448 gfc_conv_expr_val (&se, code->expr1);
3449 gfc_add_block_to_block (&body, &se.pre);
3451 /* Store the mask. */
3452 se.expr = convert (mask_type, se.expr);
3454 tmp = gfc_build_array_ref (mask, maskindex, NULL);
3455 gfc_add_modify (&body, tmp, se.expr);
3457 /* Advance to the next mask element. */
3458 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3459 maskindex, gfc_index_one_node);
3460 gfc_add_modify (&body, maskindex, tmp);
3462 /* Generate the loops. */
3463 tmp = gfc_finish_block (&body);
3464 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
3465 gfc_add_expr_to_block (&block, tmp);
3468 c = code->block->next;
3470 /* TODO: loop merging in FORALL statements. */
3471 /* Now that we've got a copy of the mask, generate the assignment loops. */
3472 while (c)
3474 switch (c->op)
3476 case EXEC_ASSIGN:
3477 /* A scalar or array assignment. DO the simple check for
3478 lhs to rhs dependencies. These make a temporary for the
3479 rhs and form a second forall block to copy to variable. */
3480 need_temp = check_forall_dependencies(c, &pre, &post);
3482 /* Temporaries due to array assignment data dependencies introduce
3483 no end of problems. */
3484 if (need_temp)
3485 gfc_trans_assign_need_temp (c->expr1, c->expr2, NULL, false,
3486 nested_forall_info, &block);
3487 else
3489 /* Use the normal assignment copying routines. */
3490 assign = gfc_trans_assignment (c->expr1, c->expr2, false, true);
3492 /* Generate body and loops. */
3493 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3494 assign, 1);
3495 gfc_add_expr_to_block (&block, tmp);
3498 /* Cleanup any temporary symtrees that have been made to deal
3499 with dependencies. */
3500 if (new_symtree)
3501 cleanup_forall_symtrees (c);
3503 break;
3505 case EXEC_WHERE:
3506 /* Translate WHERE or WHERE construct nested in FORALL. */
3507 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3508 break;
3510 /* Pointer assignment inside FORALL. */
3511 case EXEC_POINTER_ASSIGN:
3512 need_temp = gfc_check_dependency (c->expr1, c->expr2, 0);
3513 if (need_temp)
3514 gfc_trans_pointer_assign_need_temp (c->expr1, c->expr2,
3515 nested_forall_info, &block);
3516 else
3518 /* Use the normal assignment copying routines. */
3519 assign = gfc_trans_pointer_assignment (c->expr1, c->expr2);
3521 /* Generate body and loops. */
3522 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3523 assign, 1);
3524 gfc_add_expr_to_block (&block, tmp);
3526 break;
3528 case EXEC_FORALL:
3529 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3530 gfc_add_expr_to_block (&block, tmp);
3531 break;
3533 /* Explicit subroutine calls are prevented by the frontend but interface
3534 assignments can legitimately produce them. */
3535 case EXEC_ASSIGN_CALL:
3536 assign = gfc_trans_call (c, true, NULL_TREE, NULL_TREE, false);
3537 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3538 gfc_add_expr_to_block (&block, tmp);
3539 break;
3541 default:
3542 gcc_unreachable ();
3545 c = c->next;
3548 /* Restore the original index variables. */
3549 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3550 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3552 /* Free the space for var, start, end, step, varexpr. */
3553 gfc_free (var);
3554 gfc_free (start);
3555 gfc_free (end);
3556 gfc_free (step);
3557 gfc_free (varexpr);
3558 gfc_free (saved_vars);
3560 for (this_forall = info->this_loop; this_forall;)
3562 iter_info *next = this_forall->next;
3563 gfc_free (this_forall);
3564 this_forall = next;
3567 /* Free the space for this forall_info. */
3568 gfc_free (info);
3570 if (pmask)
3572 /* Free the temporary for the mask. */
3573 tmp = gfc_call_free (pmask);
3574 gfc_add_expr_to_block (&block, tmp);
3576 if (maskindex)
3577 pushdecl (maskindex);
3579 gfc_add_block_to_block (&pre, &block);
3580 gfc_add_block_to_block (&pre, &post);
3582 return gfc_finish_block (&pre);
3586 /* Translate the FORALL statement or construct. */
3588 tree gfc_trans_forall (gfc_code * code)
3590 return gfc_trans_forall_1 (code, NULL);
3594 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3595 If the WHERE construct is nested in FORALL, compute the overall temporary
3596 needed by the WHERE mask expression multiplied by the iterator number of
3597 the nested forall.
3598 ME is the WHERE mask expression.
3599 MASK is the current execution mask upon input, whose sense may or may
3600 not be inverted as specified by the INVERT argument.
3601 CMASK is the updated execution mask on output, or NULL if not required.
3602 PMASK is the pending execution mask on output, or NULL if not required.
3603 BLOCK is the block in which to place the condition evaluation loops. */
3605 static void
3606 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3607 tree mask, bool invert, tree cmask, tree pmask,
3608 tree mask_type, stmtblock_t * block)
3610 tree tmp, tmp1;
3611 gfc_ss *lss, *rss;
3612 gfc_loopinfo loop;
3613 stmtblock_t body, body1;
3614 tree count, cond, mtmp;
3615 gfc_se lse, rse;
3617 gfc_init_loopinfo (&loop);
3619 lss = gfc_walk_expr (me);
3620 rss = gfc_walk_expr (me);
3622 /* Variable to index the temporary. */
3623 count = gfc_create_var (gfc_array_index_type, "count");
3624 /* Initialize count. */
3625 gfc_add_modify (block, count, gfc_index_zero_node);
3627 gfc_start_block (&body);
3629 gfc_init_se (&rse, NULL);
3630 gfc_init_se (&lse, NULL);
3632 if (lss == gfc_ss_terminator)
3634 gfc_init_block (&body1);
3636 else
3638 /* Initialize the loop. */
3639 gfc_init_loopinfo (&loop);
3641 /* We may need LSS to determine the shape of the expression. */
3642 gfc_add_ss_to_loop (&loop, lss);
3643 gfc_add_ss_to_loop (&loop, rss);
3645 gfc_conv_ss_startstride (&loop);
3646 gfc_conv_loop_setup (&loop, &me->where);
3648 gfc_mark_ss_chain_used (rss, 1);
3649 /* Start the loop body. */
3650 gfc_start_scalarized_body (&loop, &body1);
3652 /* Translate the expression. */
3653 gfc_copy_loopinfo_to_se (&rse, &loop);
3654 rse.ss = rss;
3655 gfc_conv_expr (&rse, me);
3658 /* Variable to evaluate mask condition. */
3659 cond = gfc_create_var (mask_type, "cond");
3660 if (mask && (cmask || pmask))
3661 mtmp = gfc_create_var (mask_type, "mask");
3662 else mtmp = NULL_TREE;
3664 gfc_add_block_to_block (&body1, &lse.pre);
3665 gfc_add_block_to_block (&body1, &rse.pre);
3667 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3669 if (mask && (cmask || pmask))
3671 tmp = gfc_build_array_ref (mask, count, NULL);
3672 if (invert)
3673 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, tmp);
3674 gfc_add_modify (&body1, mtmp, tmp);
3677 if (cmask)
3679 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3680 tmp = cond;
3681 if (mask)
3682 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type,
3683 mtmp, tmp);
3684 gfc_add_modify (&body1, tmp1, tmp);
3687 if (pmask)
3689 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3690 tmp = fold_build1_loc (input_location, TRUTH_NOT_EXPR, mask_type, cond);
3691 if (mask)
3692 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR, mask_type, mtmp,
3693 tmp);
3694 gfc_add_modify (&body1, tmp1, tmp);
3697 gfc_add_block_to_block (&body1, &lse.post);
3698 gfc_add_block_to_block (&body1, &rse.post);
3700 if (lss == gfc_ss_terminator)
3702 gfc_add_block_to_block (&body, &body1);
3704 else
3706 /* Increment count. */
3707 tmp1 = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3708 count, gfc_index_one_node);
3709 gfc_add_modify (&body1, count, tmp1);
3711 /* Generate the copying loops. */
3712 gfc_trans_scalarizing_loops (&loop, &body1);
3714 gfc_add_block_to_block (&body, &loop.pre);
3715 gfc_add_block_to_block (&body, &loop.post);
3717 gfc_cleanup_loop (&loop);
3718 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3719 as tree nodes in SS may not be valid in different scope. */
3722 tmp1 = gfc_finish_block (&body);
3723 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3724 if (nested_forall_info != NULL)
3725 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3727 gfc_add_expr_to_block (block, tmp1);
3731 /* Translate an assignment statement in a WHERE statement or construct
3732 statement. The MASK expression is used to control which elements
3733 of EXPR1 shall be assigned. The sense of MASK is specified by
3734 INVERT. */
3736 static tree
3737 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3738 tree mask, bool invert,
3739 tree count1, tree count2,
3740 gfc_code *cnext)
3742 gfc_se lse;
3743 gfc_se rse;
3744 gfc_ss *lss;
3745 gfc_ss *lss_section;
3746 gfc_ss *rss;
3748 gfc_loopinfo loop;
3749 tree tmp;
3750 stmtblock_t block;
3751 stmtblock_t body;
3752 tree index, maskexpr;
3754 /* A defined assignment. */
3755 if (cnext && cnext->resolved_sym)
3756 return gfc_trans_call (cnext, true, mask, count1, invert);
3758 #if 0
3759 /* TODO: handle this special case.
3760 Special case a single function returning an array. */
3761 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3763 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3764 if (tmp)
3765 return tmp;
3767 #endif
3769 /* Assignment of the form lhs = rhs. */
3770 gfc_start_block (&block);
3772 gfc_init_se (&lse, NULL);
3773 gfc_init_se (&rse, NULL);
3775 /* Walk the lhs. */
3776 lss = gfc_walk_expr (expr1);
3777 rss = NULL;
3779 /* In each where-assign-stmt, the mask-expr and the variable being
3780 defined shall be arrays of the same shape. */
3781 gcc_assert (lss != gfc_ss_terminator);
3783 /* The assignment needs scalarization. */
3784 lss_section = lss;
3786 /* Find a non-scalar SS from the lhs. */
3787 while (lss_section != gfc_ss_terminator
3788 && lss_section->type != GFC_SS_SECTION)
3789 lss_section = lss_section->next;
3791 gcc_assert (lss_section != gfc_ss_terminator);
3793 /* Initialize the scalarizer. */
3794 gfc_init_loopinfo (&loop);
3796 /* Walk the rhs. */
3797 rss = gfc_walk_expr (expr2);
3798 if (rss == gfc_ss_terminator)
3800 /* The rhs is scalar. Add a ss for the expression. */
3801 rss = gfc_get_ss ();
3802 rss->where = 1;
3803 rss->next = gfc_ss_terminator;
3804 rss->type = GFC_SS_SCALAR;
3805 rss->expr = expr2;
3808 /* Associate the SS with the loop. */
3809 gfc_add_ss_to_loop (&loop, lss);
3810 gfc_add_ss_to_loop (&loop, rss);
3812 /* Calculate the bounds of the scalarization. */
3813 gfc_conv_ss_startstride (&loop);
3815 /* Resolve any data dependencies in the statement. */
3816 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3818 /* Setup the scalarizing loops. */
3819 gfc_conv_loop_setup (&loop, &expr2->where);
3821 /* Setup the gfc_se structures. */
3822 gfc_copy_loopinfo_to_se (&lse, &loop);
3823 gfc_copy_loopinfo_to_se (&rse, &loop);
3825 rse.ss = rss;
3826 gfc_mark_ss_chain_used (rss, 1);
3827 if (loop.temp_ss == NULL)
3829 lse.ss = lss;
3830 gfc_mark_ss_chain_used (lss, 1);
3832 else
3834 lse.ss = loop.temp_ss;
3835 gfc_mark_ss_chain_used (lss, 3);
3836 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3839 /* Start the scalarized loop body. */
3840 gfc_start_scalarized_body (&loop, &body);
3842 /* Translate the expression. */
3843 gfc_conv_expr (&rse, expr2);
3844 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3845 gfc_conv_tmp_array_ref (&lse);
3846 else
3847 gfc_conv_expr (&lse, expr1);
3849 /* Form the mask expression according to the mask. */
3850 index = count1;
3851 maskexpr = gfc_build_array_ref (mask, index, NULL);
3852 if (invert)
3853 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3854 TREE_TYPE (maskexpr), maskexpr);
3856 /* Use the scalar assignment as is. */
3857 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3858 loop.temp_ss != NULL, false, true);
3860 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt (input_location));
3862 gfc_add_expr_to_block (&body, tmp);
3864 if (lss == gfc_ss_terminator)
3866 /* Increment count1. */
3867 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3868 count1, gfc_index_one_node);
3869 gfc_add_modify (&body, count1, tmp);
3871 /* Use the scalar assignment as is. */
3872 gfc_add_block_to_block (&block, &body);
3874 else
3876 gcc_assert (lse.ss == gfc_ss_terminator
3877 && rse.ss == gfc_ss_terminator);
3879 if (loop.temp_ss != NULL)
3881 /* Increment count1 before finish the main body of a scalarized
3882 expression. */
3883 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3884 gfc_array_index_type, count1, gfc_index_one_node);
3885 gfc_add_modify (&body, count1, tmp);
3886 gfc_trans_scalarized_loop_boundary (&loop, &body);
3888 /* We need to copy the temporary to the actual lhs. */
3889 gfc_init_se (&lse, NULL);
3890 gfc_init_se (&rse, NULL);
3891 gfc_copy_loopinfo_to_se (&lse, &loop);
3892 gfc_copy_loopinfo_to_se (&rse, &loop);
3894 rse.ss = loop.temp_ss;
3895 lse.ss = lss;
3897 gfc_conv_tmp_array_ref (&rse);
3898 gfc_conv_expr (&lse, expr1);
3900 gcc_assert (lse.ss == gfc_ss_terminator
3901 && rse.ss == gfc_ss_terminator);
3903 /* Form the mask expression according to the mask tree list. */
3904 index = count2;
3905 maskexpr = gfc_build_array_ref (mask, index, NULL);
3906 if (invert)
3907 maskexpr = fold_build1_loc (input_location, TRUTH_NOT_EXPR,
3908 TREE_TYPE (maskexpr), maskexpr);
3910 /* Use the scalar assignment as is. */
3911 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false,
3912 true);
3913 tmp = build3_v (COND_EXPR, maskexpr, tmp,
3914 build_empty_stmt (input_location));
3915 gfc_add_expr_to_block (&body, tmp);
3917 /* Increment count2. */
3918 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3919 gfc_array_index_type, count2,
3920 gfc_index_one_node);
3921 gfc_add_modify (&body, count2, tmp);
3923 else
3925 /* Increment count1. */
3926 tmp = fold_build2_loc (input_location, PLUS_EXPR,
3927 gfc_array_index_type, count1,
3928 gfc_index_one_node);
3929 gfc_add_modify (&body, count1, tmp);
3932 /* Generate the copying loops. */
3933 gfc_trans_scalarizing_loops (&loop, &body);
3935 /* Wrap the whole thing up. */
3936 gfc_add_block_to_block (&block, &loop.pre);
3937 gfc_add_block_to_block (&block, &loop.post);
3938 gfc_cleanup_loop (&loop);
3941 return gfc_finish_block (&block);
3945 /* Translate the WHERE construct or statement.
3946 This function can be called iteratively to translate the nested WHERE
3947 construct or statement.
3948 MASK is the control mask. */
3950 static void
3951 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3952 forall_info * nested_forall_info, stmtblock_t * block)
3954 stmtblock_t inner_size_body;
3955 tree inner_size, size;
3956 gfc_ss *lss, *rss;
3957 tree mask_type;
3958 gfc_expr *expr1;
3959 gfc_expr *expr2;
3960 gfc_code *cblock;
3961 gfc_code *cnext;
3962 tree tmp;
3963 tree cond;
3964 tree count1, count2;
3965 bool need_cmask;
3966 bool need_pmask;
3967 int need_temp;
3968 tree pcmask = NULL_TREE;
3969 tree ppmask = NULL_TREE;
3970 tree cmask = NULL_TREE;
3971 tree pmask = NULL_TREE;
3972 gfc_actual_arglist *arg;
3974 /* the WHERE statement or the WHERE construct statement. */
3975 cblock = code->block;
3977 /* As the mask array can be very big, prefer compact boolean types. */
3978 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3980 /* Determine which temporary masks are needed. */
3981 if (!cblock->block)
3983 /* One clause: No ELSEWHEREs. */
3984 need_cmask = (cblock->next != 0);
3985 need_pmask = false;
3987 else if (cblock->block->block)
3989 /* Three or more clauses: Conditional ELSEWHEREs. */
3990 need_cmask = true;
3991 need_pmask = true;
3993 else if (cblock->next)
3995 /* Two clauses, the first non-empty. */
3996 need_cmask = true;
3997 need_pmask = (mask != NULL_TREE
3998 && cblock->block->next != 0);
4000 else if (!cblock->block->next)
4002 /* Two clauses, both empty. */
4003 need_cmask = false;
4004 need_pmask = false;
4006 /* Two clauses, the first empty, the second non-empty. */
4007 else if (mask)
4009 need_cmask = (cblock->block->expr1 != 0);
4010 need_pmask = true;
4012 else
4014 need_cmask = true;
4015 need_pmask = false;
4018 if (need_cmask || need_pmask)
4020 /* Calculate the size of temporary needed by the mask-expr. */
4021 gfc_init_block (&inner_size_body);
4022 inner_size = compute_inner_temp_size (cblock->expr1, cblock->expr1,
4023 &inner_size_body, &lss, &rss);
4025 gfc_free_ss_chain (lss);
4026 gfc_free_ss_chain (rss);
4028 /* Calculate the total size of temporary needed. */
4029 size = compute_overall_iter_number (nested_forall_info, inner_size,
4030 &inner_size_body, block);
4032 /* Check whether the size is negative. */
4033 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, size,
4034 gfc_index_zero_node);
4035 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
4036 cond, gfc_index_zero_node, size);
4037 size = gfc_evaluate_now (size, block);
4039 /* Allocate temporary for WHERE mask if needed. */
4040 if (need_cmask)
4041 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4042 &pcmask);
4044 /* Allocate temporary for !mask if needed. */
4045 if (need_pmask)
4046 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
4047 &ppmask);
4050 while (cblock)
4052 /* Each time around this loop, the where clause is conditional
4053 on the value of mask and invert, which are updated at the
4054 bottom of the loop. */
4056 /* Has mask-expr. */
4057 if (cblock->expr1)
4059 /* Ensure that the WHERE mask will be evaluated exactly once.
4060 If there are no statements in this WHERE/ELSEWHERE clause,
4061 then we don't need to update the control mask (cmask).
4062 If this is the last clause of the WHERE construct, then
4063 we don't need to update the pending control mask (pmask). */
4064 if (mask)
4065 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4066 mask, invert,
4067 cblock->next ? cmask : NULL_TREE,
4068 cblock->block ? pmask : NULL_TREE,
4069 mask_type, block);
4070 else
4071 gfc_evaluate_where_mask (cblock->expr1, nested_forall_info,
4072 NULL_TREE, false,
4073 (cblock->next || cblock->block)
4074 ? cmask : NULL_TREE,
4075 NULL_TREE, mask_type, block);
4077 invert = false;
4079 /* It's a final elsewhere-stmt. No mask-expr is present. */
4080 else
4081 cmask = mask;
4083 /* The body of this where clause are controlled by cmask with
4084 sense specified by invert. */
4086 /* Get the assignment statement of a WHERE statement, or the first
4087 statement in where-body-construct of a WHERE construct. */
4088 cnext = cblock->next;
4089 while (cnext)
4091 switch (cnext->op)
4093 /* WHERE assignment statement. */
4094 case EXEC_ASSIGN_CALL:
4096 arg = cnext->ext.actual;
4097 expr1 = expr2 = NULL;
4098 for (; arg; arg = arg->next)
4100 if (!arg->expr)
4101 continue;
4102 if (expr1 == NULL)
4103 expr1 = arg->expr;
4104 else
4105 expr2 = arg->expr;
4107 goto evaluate;
4109 case EXEC_ASSIGN:
4110 expr1 = cnext->expr1;
4111 expr2 = cnext->expr2;
4112 evaluate:
4113 if (nested_forall_info != NULL)
4115 need_temp = gfc_check_dependency (expr1, expr2, 0);
4116 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
4117 gfc_trans_assign_need_temp (expr1, expr2,
4118 cmask, invert,
4119 nested_forall_info, block);
4120 else
4122 /* Variables to control maskexpr. */
4123 count1 = gfc_create_var (gfc_array_index_type, "count1");
4124 count2 = gfc_create_var (gfc_array_index_type, "count2");
4125 gfc_add_modify (block, count1, gfc_index_zero_node);
4126 gfc_add_modify (block, count2, gfc_index_zero_node);
4128 tmp = gfc_trans_where_assign (expr1, expr2,
4129 cmask, invert,
4130 count1, count2,
4131 cnext);
4133 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
4134 tmp, 1);
4135 gfc_add_expr_to_block (block, tmp);
4138 else
4140 /* Variables to control maskexpr. */
4141 count1 = gfc_create_var (gfc_array_index_type, "count1");
4142 count2 = gfc_create_var (gfc_array_index_type, "count2");
4143 gfc_add_modify (block, count1, gfc_index_zero_node);
4144 gfc_add_modify (block, count2, gfc_index_zero_node);
4146 tmp = gfc_trans_where_assign (expr1, expr2,
4147 cmask, invert,
4148 count1, count2,
4149 cnext);
4150 gfc_add_expr_to_block (block, tmp);
4153 break;
4155 /* WHERE or WHERE construct is part of a where-body-construct. */
4156 case EXEC_WHERE:
4157 gfc_trans_where_2 (cnext, cmask, invert,
4158 nested_forall_info, block);
4159 break;
4161 default:
4162 gcc_unreachable ();
4165 /* The next statement within the same where-body-construct. */
4166 cnext = cnext->next;
4168 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4169 cblock = cblock->block;
4170 if (mask == NULL_TREE)
4172 /* If we're the initial WHERE, we can simply invert the sense
4173 of the current mask to obtain the "mask" for the remaining
4174 ELSEWHEREs. */
4175 invert = true;
4176 mask = cmask;
4178 else
4180 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4181 invert = false;
4182 mask = pmask;
4186 /* If we allocated a pending mask array, deallocate it now. */
4187 if (ppmask)
4189 tmp = gfc_call_free (ppmask);
4190 gfc_add_expr_to_block (block, tmp);
4193 /* If we allocated a current mask array, deallocate it now. */
4194 if (pcmask)
4196 tmp = gfc_call_free (pcmask);
4197 gfc_add_expr_to_block (block, tmp);
4201 /* Translate a simple WHERE construct or statement without dependencies.
4202 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4203 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4204 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4206 static tree
4207 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
4209 stmtblock_t block, body;
4210 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
4211 tree tmp, cexpr, tstmt, estmt;
4212 gfc_ss *css, *tdss, *tsss;
4213 gfc_se cse, tdse, tsse, edse, esse;
4214 gfc_loopinfo loop;
4215 gfc_ss *edss = 0;
4216 gfc_ss *esss = 0;
4218 /* Allow the scalarizer to workshare simple where loops. */
4219 if (ompws_flags & OMPWS_WORKSHARE_FLAG)
4220 ompws_flags |= OMPWS_SCALARIZER_WS;
4222 cond = cblock->expr1;
4223 tdst = cblock->next->expr1;
4224 tsrc = cblock->next->expr2;
4225 edst = eblock ? eblock->next->expr1 : NULL;
4226 esrc = eblock ? eblock->next->expr2 : NULL;
4228 gfc_start_block (&block);
4229 gfc_init_loopinfo (&loop);
4231 /* Handle the condition. */
4232 gfc_init_se (&cse, NULL);
4233 css = gfc_walk_expr (cond);
4234 gfc_add_ss_to_loop (&loop, css);
4236 /* Handle the then-clause. */
4237 gfc_init_se (&tdse, NULL);
4238 gfc_init_se (&tsse, NULL);
4239 tdss = gfc_walk_expr (tdst);
4240 tsss = gfc_walk_expr (tsrc);
4241 if (tsss == gfc_ss_terminator)
4243 tsss = gfc_get_ss ();
4244 tsss->where = 1;
4245 tsss->next = gfc_ss_terminator;
4246 tsss->type = GFC_SS_SCALAR;
4247 tsss->expr = tsrc;
4249 gfc_add_ss_to_loop (&loop, tdss);
4250 gfc_add_ss_to_loop (&loop, tsss);
4252 if (eblock)
4254 /* Handle the else clause. */
4255 gfc_init_se (&edse, NULL);
4256 gfc_init_se (&esse, NULL);
4257 edss = gfc_walk_expr (edst);
4258 esss = gfc_walk_expr (esrc);
4259 if (esss == gfc_ss_terminator)
4261 esss = gfc_get_ss ();
4262 esss->where = 1;
4263 esss->next = gfc_ss_terminator;
4264 esss->type = GFC_SS_SCALAR;
4265 esss->expr = esrc;
4267 gfc_add_ss_to_loop (&loop, edss);
4268 gfc_add_ss_to_loop (&loop, esss);
4271 gfc_conv_ss_startstride (&loop);
4272 gfc_conv_loop_setup (&loop, &tdst->where);
4274 gfc_mark_ss_chain_used (css, 1);
4275 gfc_mark_ss_chain_used (tdss, 1);
4276 gfc_mark_ss_chain_used (tsss, 1);
4277 if (eblock)
4279 gfc_mark_ss_chain_used (edss, 1);
4280 gfc_mark_ss_chain_used (esss, 1);
4283 gfc_start_scalarized_body (&loop, &body);
4285 gfc_copy_loopinfo_to_se (&cse, &loop);
4286 gfc_copy_loopinfo_to_se (&tdse, &loop);
4287 gfc_copy_loopinfo_to_se (&tsse, &loop);
4288 cse.ss = css;
4289 tdse.ss = tdss;
4290 tsse.ss = tsss;
4291 if (eblock)
4293 gfc_copy_loopinfo_to_se (&edse, &loop);
4294 gfc_copy_loopinfo_to_se (&esse, &loop);
4295 edse.ss = edss;
4296 esse.ss = esss;
4299 gfc_conv_expr (&cse, cond);
4300 gfc_add_block_to_block (&body, &cse.pre);
4301 cexpr = cse.expr;
4303 gfc_conv_expr (&tsse, tsrc);
4304 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
4305 gfc_conv_tmp_array_ref (&tdse);
4306 else
4307 gfc_conv_expr (&tdse, tdst);
4309 if (eblock)
4311 gfc_conv_expr (&esse, esrc);
4312 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
4313 gfc_conv_tmp_array_ref (&edse);
4314 else
4315 gfc_conv_expr (&edse, edst);
4318 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false, true);
4319 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false,
4320 false, true)
4321 : build_empty_stmt (input_location);
4322 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
4323 gfc_add_expr_to_block (&body, tmp);
4324 gfc_add_block_to_block (&body, &cse.post);
4326 gfc_trans_scalarizing_loops (&loop, &body);
4327 gfc_add_block_to_block (&block, &loop.pre);
4328 gfc_add_block_to_block (&block, &loop.post);
4329 gfc_cleanup_loop (&loop);
4331 return gfc_finish_block (&block);
4334 /* As the WHERE or WHERE construct statement can be nested, we call
4335 gfc_trans_where_2 to do the translation, and pass the initial
4336 NULL values for both the control mask and the pending control mask. */
4338 tree
4339 gfc_trans_where (gfc_code * code)
4341 stmtblock_t block;
4342 gfc_code *cblock;
4343 gfc_code *eblock;
4345 cblock = code->block;
4346 if (cblock->next
4347 && cblock->next->op == EXEC_ASSIGN
4348 && !cblock->next->next)
4350 eblock = cblock->block;
4351 if (!eblock)
4353 /* A simple "WHERE (cond) x = y" statement or block is
4354 dependence free if cond is not dependent upon writing x,
4355 and the source y is unaffected by the destination x. */
4356 if (!gfc_check_dependency (cblock->next->expr1,
4357 cblock->expr1, 0)
4358 && !gfc_check_dependency (cblock->next->expr1,
4359 cblock->next->expr2, 0))
4360 return gfc_trans_where_3 (cblock, NULL);
4362 else if (!eblock->expr1
4363 && !eblock->block
4364 && eblock->next
4365 && eblock->next->op == EXEC_ASSIGN
4366 && !eblock->next->next)
4368 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4369 block is dependence free if cond is not dependent on writes
4370 to x1 and x2, y1 is not dependent on writes to x2, and y2
4371 is not dependent on writes to x1, and both y's are not
4372 dependent upon their own x's. In addition to this, the
4373 final two dependency checks below exclude all but the same
4374 array reference if the where and elswhere destinations
4375 are the same. In short, this is VERY conservative and this
4376 is needed because the two loops, required by the standard
4377 are coalesced in gfc_trans_where_3. */
4378 if (!gfc_check_dependency(cblock->next->expr1,
4379 cblock->expr1, 0)
4380 && !gfc_check_dependency(eblock->next->expr1,
4381 cblock->expr1, 0)
4382 && !gfc_check_dependency(cblock->next->expr1,
4383 eblock->next->expr2, 1)
4384 && !gfc_check_dependency(eblock->next->expr1,
4385 cblock->next->expr2, 1)
4386 && !gfc_check_dependency(cblock->next->expr1,
4387 cblock->next->expr2, 1)
4388 && !gfc_check_dependency(eblock->next->expr1,
4389 eblock->next->expr2, 1)
4390 && !gfc_check_dependency(cblock->next->expr1,
4391 eblock->next->expr1, 0)
4392 && !gfc_check_dependency(eblock->next->expr1,
4393 cblock->next->expr1, 0))
4394 return gfc_trans_where_3 (cblock, eblock);
4398 gfc_start_block (&block);
4400 gfc_trans_where_2 (code, NULL, false, NULL, &block);
4402 return gfc_finish_block (&block);
4406 /* CYCLE a DO loop. The label decl has already been created by
4407 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4408 node at the head of the loop. We must mark the label as used. */
4410 tree
4411 gfc_trans_cycle (gfc_code * code)
4413 tree cycle_label;
4415 cycle_label = code->ext.which_construct->cycle_label;
4416 gcc_assert (cycle_label);
4418 TREE_USED (cycle_label) = 1;
4419 return build1_v (GOTO_EXPR, cycle_label);
4423 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4424 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4425 loop. */
4427 tree
4428 gfc_trans_exit (gfc_code * code)
4430 tree exit_label;
4432 exit_label = code->ext.which_construct->exit_label;
4433 gcc_assert (exit_label);
4435 TREE_USED (exit_label) = 1;
4436 return build1_v (GOTO_EXPR, exit_label);
4440 /* Translate the ALLOCATE statement. */
4442 tree
4443 gfc_trans_allocate (gfc_code * code)
4445 gfc_alloc *al;
4446 gfc_expr *expr;
4447 gfc_se se;
4448 tree tmp;
4449 tree parm;
4450 tree stat;
4451 tree pstat;
4452 tree error_label;
4453 tree memsz;
4454 tree expr3;
4455 tree slen3;
4456 stmtblock_t block;
4457 stmtblock_t post;
4458 gfc_expr *sz;
4459 gfc_se se_sz;
4461 if (!code->ext.alloc.list)
4462 return NULL_TREE;
4464 pstat = stat = error_label = tmp = memsz = NULL_TREE;
4466 gfc_init_block (&block);
4467 gfc_init_block (&post);
4469 /* Either STAT= and/or ERRMSG is present. */
4470 if (code->expr1 || code->expr2)
4472 tree gfc_int4_type_node = gfc_get_int_type (4);
4474 stat = gfc_create_var (gfc_int4_type_node, "stat");
4475 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4477 error_label = gfc_build_label_decl (NULL_TREE);
4478 TREE_USED (error_label) = 1;
4481 expr3 = NULL_TREE;
4482 slen3 = NULL_TREE;
4484 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4486 expr = gfc_copy_expr (al->expr);
4488 if (expr->ts.type == BT_CLASS)
4489 gfc_add_data_component (expr);
4491 gfc_init_se (&se, NULL);
4493 se.want_pointer = 1;
4494 se.descriptor_only = 1;
4495 gfc_conv_expr (&se, expr);
4497 if (!gfc_array_allocate (&se, expr, pstat))
4499 /* A scalar or derived type. */
4501 /* Determine allocate size. */
4502 if (al->expr->ts.type == BT_CLASS && code->expr3)
4504 if (code->expr3->ts.type == BT_CLASS)
4506 sz = gfc_copy_expr (code->expr3);
4507 gfc_add_vptr_component (sz);
4508 gfc_add_size_component (sz);
4509 gfc_init_se (&se_sz, NULL);
4510 gfc_conv_expr (&se_sz, sz);
4511 gfc_free_expr (sz);
4512 memsz = se_sz.expr;
4514 else
4515 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->expr3->ts));
4517 else if (al->expr->ts.type == BT_CHARACTER
4518 && al->expr->ts.deferred && code->expr3)
4520 if (!code->expr3->ts.u.cl->backend_decl)
4522 /* Convert and use the length expression. */
4523 gfc_init_se (&se_sz, NULL);
4524 if (code->expr3->expr_type == EXPR_VARIABLE
4525 || code->expr3->expr_type == EXPR_CONSTANT)
4527 gfc_conv_expr (&se_sz, code->expr3);
4528 memsz = se_sz.string_length;
4530 else if (code->expr3->mold
4531 && code->expr3->ts.u.cl
4532 && code->expr3->ts.u.cl->length)
4534 gfc_conv_expr (&se_sz, code->expr3->ts.u.cl->length);
4535 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4536 se_sz.expr = gfc_evaluate_now (se_sz.expr, &se.pre);
4537 gfc_add_block_to_block (&se.pre, &se_sz.post);
4538 memsz = se_sz.expr;
4540 else
4542 /* This is would be inefficient and possibly could
4543 generate wrong code if the result were not stored
4544 in expr3/slen3. */
4545 if (slen3 == NULL_TREE)
4547 gfc_conv_expr (&se_sz, code->expr3);
4548 gfc_add_block_to_block (&se.pre, &se_sz.pre);
4549 expr3 = gfc_evaluate_now (se_sz.expr, &se.pre);
4550 gfc_add_block_to_block (&post, &se_sz.post);
4551 slen3 = gfc_evaluate_now (se_sz.string_length,
4552 &se.pre);
4554 memsz = slen3;
4557 else
4558 /* Otherwise use the stored string length. */
4559 memsz = code->expr3->ts.u.cl->backend_decl;
4560 tmp = al->expr->ts.u.cl->backend_decl;
4562 /* Store the string length. */
4563 if (tmp && TREE_CODE (tmp) == VAR_DECL)
4564 gfc_add_modify (&se.pre, tmp, fold_convert (TREE_TYPE (tmp),
4565 memsz));
4567 /* Convert to size in bytes, using the character KIND. */
4568 tmp = TREE_TYPE (gfc_typenode_for_spec (&al->expr->ts));
4569 tmp = TYPE_SIZE_UNIT (tmp);
4570 memsz = fold_build2_loc (input_location, MULT_EXPR,
4571 TREE_TYPE (tmp), tmp,
4572 fold_convert (TREE_TYPE (tmp), memsz));
4574 else if (code->ext.alloc.ts.type != BT_UNKNOWN)
4575 memsz = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code->ext.alloc.ts));
4576 else
4577 memsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
4579 if (expr->ts.type == BT_CHARACTER && memsz == NULL_TREE)
4581 memsz = se.string_length;
4583 /* Convert to size in bytes, using the character KIND. */
4584 tmp = TREE_TYPE (gfc_typenode_for_spec (&code->ext.alloc.ts));
4585 tmp = TYPE_SIZE_UNIT (tmp);
4586 memsz = fold_build2_loc (input_location, MULT_EXPR,
4587 TREE_TYPE (tmp), tmp,
4588 fold_convert (TREE_TYPE (tmp), memsz));
4591 /* Allocate - for non-pointers with re-alloc checking. */
4592 if (gfc_expr_attr (expr).allocatable)
4593 tmp = gfc_allocate_array_with_status (&se.pre, se.expr, memsz,
4594 pstat, expr);
4595 else
4596 tmp = gfc_allocate_with_status (&se.pre, memsz, pstat);
4598 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4599 se.expr,
4600 fold_convert (TREE_TYPE (se.expr), tmp));
4601 gfc_add_expr_to_block (&se.pre, tmp);
4603 if (code->expr1 || code->expr2)
4605 tmp = build1_v (GOTO_EXPR, error_label);
4606 parm = fold_build2_loc (input_location, NE_EXPR,
4607 boolean_type_node, stat,
4608 build_int_cst (TREE_TYPE (stat), 0));
4609 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
4610 parm, tmp,
4611 build_empty_stmt (input_location));
4612 gfc_add_expr_to_block (&se.pre, tmp);
4615 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4617 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4618 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, tmp, 0);
4619 gfc_add_expr_to_block (&se.pre, tmp);
4623 gfc_add_block_to_block (&block, &se.pre);
4625 if (code->expr3 && !code->expr3->mold)
4627 /* Initialization via SOURCE block
4628 (or static default initializer). */
4629 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4630 if (al->expr->ts.type == BT_CLASS)
4632 gfc_se call;
4633 gfc_actual_arglist *actual;
4634 gfc_expr *ppc;
4635 gfc_init_se (&call, NULL);
4636 /* Do a polymorphic deep copy. */
4637 actual = gfc_get_actual_arglist ();
4638 actual->expr = gfc_copy_expr (rhs);
4639 if (rhs->ts.type == BT_CLASS)
4640 gfc_add_data_component (actual->expr);
4641 actual->next = gfc_get_actual_arglist ();
4642 actual->next->expr = gfc_copy_expr (al->expr);
4643 gfc_add_data_component (actual->next->expr);
4644 if (rhs->ts.type == BT_CLASS)
4646 ppc = gfc_copy_expr (rhs);
4647 gfc_add_vptr_component (ppc);
4649 else
4650 ppc = gfc_lval_expr_from_sym (gfc_find_derived_vtab (rhs->ts.u.derived));
4651 gfc_add_component_ref (ppc, "_copy");
4652 gfc_conv_procedure_call (&call, ppc->symtree->n.sym, actual,
4653 ppc, NULL);
4654 gfc_add_expr_to_block (&call.pre, call.expr);
4655 gfc_add_block_to_block (&call.pre, &call.post);
4656 tmp = gfc_finish_block (&call.pre);
4658 else if (expr3 != NULL_TREE)
4660 tmp = build_fold_indirect_ref_loc (input_location, se.expr);
4661 gfc_trans_string_copy (&block, slen3, tmp, code->expr3->ts.kind,
4662 slen3, expr3, code->expr3->ts.kind);
4663 tmp = NULL_TREE;
4665 else
4667 /* Switch off automatic reallocation since we have just done
4668 the ALLOCATE. */
4669 int realloc_lhs = gfc_option.flag_realloc_lhs;
4670 gfc_option.flag_realloc_lhs = 0;
4671 tmp = gfc_trans_assignment (gfc_expr_to_initialize (expr),
4672 rhs, false, false);
4673 gfc_option.flag_realloc_lhs = realloc_lhs;
4675 gfc_free_expr (rhs);
4676 gfc_add_expr_to_block (&block, tmp);
4678 else if (code->expr3 && code->expr3->mold
4679 && code->expr3->ts.type == BT_CLASS)
4681 /* Default-initialization via MOLD (polymorphic). */
4682 gfc_expr *rhs = gfc_copy_expr (code->expr3);
4683 gfc_se dst,src;
4684 gfc_add_vptr_component (rhs);
4685 gfc_add_def_init_component (rhs);
4686 gfc_init_se (&dst, NULL);
4687 gfc_init_se (&src, NULL);
4688 gfc_conv_expr (&dst, expr);
4689 gfc_conv_expr (&src, rhs);
4690 gfc_add_block_to_block (&block, &src.pre);
4691 tmp = gfc_build_memcpy_call (dst.expr, src.expr, memsz);
4692 gfc_add_expr_to_block (&block, tmp);
4693 gfc_free_expr (rhs);
4696 /* Allocation of CLASS entities. */
4697 gfc_free_expr (expr);
4698 expr = al->expr;
4699 if (expr->ts.type == BT_CLASS)
4701 gfc_expr *lhs,*rhs;
4702 gfc_se lse;
4704 /* Initialize VPTR for CLASS objects. */
4705 lhs = gfc_expr_to_initialize (expr);
4706 gfc_add_vptr_component (lhs);
4707 rhs = NULL;
4708 if (code->expr3 && code->expr3->ts.type == BT_CLASS)
4710 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4711 rhs = gfc_copy_expr (code->expr3);
4712 gfc_add_vptr_component (rhs);
4713 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4714 gfc_add_expr_to_block (&block, tmp);
4715 gfc_free_expr (rhs);
4717 else
4719 /* VPTR is fixed at compile time. */
4720 gfc_symbol *vtab;
4721 gfc_typespec *ts;
4722 if (code->expr3)
4723 ts = &code->expr3->ts;
4724 else if (expr->ts.type == BT_DERIVED)
4725 ts = &expr->ts;
4726 else if (code->ext.alloc.ts.type == BT_DERIVED)
4727 ts = &code->ext.alloc.ts;
4728 else if (expr->ts.type == BT_CLASS)
4729 ts = &CLASS_DATA (expr)->ts;
4730 else
4731 ts = &expr->ts;
4733 if (ts->type == BT_DERIVED)
4735 vtab = gfc_find_derived_vtab (ts->u.derived);
4736 gcc_assert (vtab);
4737 gfc_init_se (&lse, NULL);
4738 lse.want_pointer = 1;
4739 gfc_conv_expr (&lse, lhs);
4740 tmp = gfc_build_addr_expr (NULL_TREE,
4741 gfc_get_symbol_decl (vtab));
4742 gfc_add_modify (&block, lse.expr,
4743 fold_convert (TREE_TYPE (lse.expr), tmp));
4746 gfc_free_expr (lhs);
4751 /* STAT block. */
4752 if (code->expr1)
4754 tmp = build1_v (LABEL_EXPR, error_label);
4755 gfc_add_expr_to_block (&block, tmp);
4757 gfc_init_se (&se, NULL);
4758 gfc_conv_expr_lhs (&se, code->expr1);
4759 tmp = convert (TREE_TYPE (se.expr), stat);
4760 gfc_add_modify (&block, se.expr, tmp);
4763 /* ERRMSG block. */
4764 if (code->expr2)
4766 /* A better error message may be possible, but not required. */
4767 const char *msg = "Attempt to allocate an allocated object";
4768 tree errmsg, slen, dlen;
4770 gfc_init_se (&se, NULL);
4771 gfc_conv_expr_lhs (&se, code->expr2);
4773 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4775 gfc_add_modify (&block, errmsg,
4776 gfc_build_addr_expr (pchar_type_node,
4777 gfc_build_localized_cstring_const (msg)));
4779 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4780 dlen = gfc_get_expr_charlen (code->expr2);
4781 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4782 slen);
4784 dlen = build_call_expr_loc (input_location,
4785 built_in_decls[BUILT_IN_MEMCPY], 3,
4786 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4788 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, stat,
4789 build_int_cst (TREE_TYPE (stat), 0));
4791 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4793 gfc_add_expr_to_block (&block, tmp);
4796 gfc_add_block_to_block (&block, &se.post);
4797 gfc_add_block_to_block (&block, &post);
4799 return gfc_finish_block (&block);
4803 /* Translate a DEALLOCATE statement. */
4805 tree
4806 gfc_trans_deallocate (gfc_code *code)
4808 gfc_se se;
4809 gfc_alloc *al;
4810 tree apstat, astat, pstat, stat, tmp;
4811 stmtblock_t block;
4813 pstat = apstat = stat = astat = tmp = NULL_TREE;
4815 gfc_start_block (&block);
4817 /* Count the number of failed deallocations. If deallocate() was
4818 called with STAT= , then set STAT to the count. If deallocate
4819 was called with ERRMSG, then set ERRMG to a string. */
4820 if (code->expr1 || code->expr2)
4822 tree gfc_int4_type_node = gfc_get_int_type (4);
4824 stat = gfc_create_var (gfc_int4_type_node, "stat");
4825 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4827 /* Running total of possible deallocation failures. */
4828 astat = gfc_create_var (gfc_int4_type_node, "astat");
4829 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4831 /* Initialize astat to 0. */
4832 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4835 for (al = code->ext.alloc.list; al != NULL; al = al->next)
4837 gfc_expr *expr = gfc_copy_expr (al->expr);
4838 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4840 if (expr->ts.type == BT_CLASS)
4841 gfc_add_data_component (expr);
4843 gfc_init_se (&se, NULL);
4844 gfc_start_block (&se.pre);
4846 se.want_pointer = 1;
4847 se.descriptor_only = 1;
4848 gfc_conv_expr (&se, expr);
4850 if (expr->rank)
4852 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp)
4854 gfc_ref *ref;
4855 gfc_ref *last = NULL;
4856 for (ref = expr->ref; ref; ref = ref->next)
4857 if (ref->type == REF_COMPONENT)
4858 last = ref;
4860 /* Do not deallocate the components of a derived type
4861 ultimate pointer component. */
4862 if (!(last && last->u.c.component->attr.pointer)
4863 && !(!last && expr->symtree->n.sym->attr.pointer))
4865 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se.expr,
4866 expr->rank);
4867 gfc_add_expr_to_block (&se.pre, tmp);
4870 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4871 gfc_add_expr_to_block (&se.pre, tmp);
4873 else
4875 tmp = gfc_deallocate_scalar_with_status (se.expr, pstat, false,
4876 expr, expr->ts);
4877 gfc_add_expr_to_block (&se.pre, tmp);
4879 /* Set to zero after deallocation. */
4880 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
4881 se.expr,
4882 build_int_cst (TREE_TYPE (se.expr), 0));
4883 gfc_add_expr_to_block (&se.pre, tmp);
4885 if (al->expr->ts.type == BT_CLASS)
4887 /* Reset _vptr component to declared type. */
4888 gfc_expr *rhs, *lhs = gfc_copy_expr (al->expr);
4889 gfc_symbol *vtab = gfc_find_derived_vtab (al->expr->ts.u.derived);
4890 gfc_add_vptr_component (lhs);
4891 rhs = gfc_lval_expr_from_sym (vtab);
4892 tmp = gfc_trans_pointer_assignment (lhs, rhs);
4893 gfc_add_expr_to_block (&se.pre, tmp);
4894 gfc_free_expr (lhs);
4895 gfc_free_expr (rhs);
4899 /* Keep track of the number of failed deallocations by adding stat
4900 of the last deallocation to the running total. */
4901 if (code->expr1 || code->expr2)
4903 apstat = fold_build2_loc (input_location, PLUS_EXPR,
4904 TREE_TYPE (stat), astat, stat);
4905 gfc_add_modify (&se.pre, astat, apstat);
4908 tmp = gfc_finish_block (&se.pre);
4909 gfc_add_expr_to_block (&block, tmp);
4910 gfc_free_expr (expr);
4913 /* Set STAT. */
4914 if (code->expr1)
4916 gfc_init_se (&se, NULL);
4917 gfc_conv_expr_lhs (&se, code->expr1);
4918 tmp = convert (TREE_TYPE (se.expr), astat);
4919 gfc_add_modify (&block, se.expr, tmp);
4922 /* Set ERRMSG. */
4923 if (code->expr2)
4925 /* A better error message may be possible, but not required. */
4926 const char *msg = "Attempt to deallocate an unallocated object";
4927 tree errmsg, slen, dlen;
4929 gfc_init_se (&se, NULL);
4930 gfc_conv_expr_lhs (&se, code->expr2);
4932 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4934 gfc_add_modify (&block, errmsg,
4935 gfc_build_addr_expr (pchar_type_node,
4936 gfc_build_localized_cstring_const (msg)));
4938 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4939 dlen = gfc_get_expr_charlen (code->expr2);
4940 slen = fold_build2_loc (input_location, MIN_EXPR, TREE_TYPE (slen), dlen,
4941 slen);
4943 dlen = build_call_expr_loc (input_location,
4944 built_in_decls[BUILT_IN_MEMCPY], 3,
4945 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4947 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node, astat,
4948 build_int_cst (TREE_TYPE (astat), 0));
4950 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt (input_location));
4952 gfc_add_expr_to_block (&block, tmp);
4955 return gfc_finish_block (&block);
4958 #include "gt-fortran-trans-stmt.h"