1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
37 #include "dependency.h"
40 typedef struct iter_info
46 struct iter_info
*next
;
50 typedef struct forall_info
57 struct forall_info
*prev_nest
;
61 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
62 forall_info
*, stmtblock_t
*);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code
* code
)
69 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
80 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
81 gfc_conv_expr (se
, expr
);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
84 se
->expr
= TREE_OPERAND (se
->expr
, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
87 se
->expr
= TREE_OPERAND (se
->expr
, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code
* code
)
102 /* Start a new block. */
103 gfc_init_se (&se
, NULL
);
104 gfc_start_block (&se
.pre
);
105 gfc_conv_label_variable (&se
, code
->expr1
);
107 len
= GFC_DECL_STRING_LEN (se
.expr
);
108 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
110 label_tree
= gfc_get_label_decl (code
->label1
);
112 if (code
->label1
->defined
== ST_LABEL_TARGET
)
114 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
115 len_tree
= integer_minus_one_node
;
119 gfc_expr
*format
= code
->label1
->format
;
121 label_len
= format
->value
.character
.length
;
122 len_tree
= build_int_cst (NULL_TREE
, label_len
);
123 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
124 format
->value
.character
.string
);
125 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
128 gfc_add_modify (&se
.pre
, len
, len_tree
);
129 gfc_add_modify (&se
.pre
, addr
, label_tree
);
131 return gfc_finish_block (&se
.pre
);
134 /* Translate a GOTO statement. */
137 gfc_trans_goto (gfc_code
* code
)
139 locus loc
= code
->loc
;
145 if (code
->label1
!= NULL
)
146 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
149 gfc_init_se (&se
, NULL
);
150 gfc_start_block (&se
.pre
);
151 gfc_conv_label_variable (&se
, code
->expr1
);
152 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
153 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
154 build_int_cst (TREE_TYPE (tmp
), -1));
155 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
156 "Assigned label is not a target label");
158 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
160 /* We're going to ignore a label list. It does not really change the
161 statement's semantics (because it is just a further restriction on
162 what's legal code); before, we were comparing label addresses here, but
163 that's a very fragile business and may break with optimization. So
166 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
168 gfc_add_expr_to_block (&se
.pre
, target
);
169 return gfc_finish_block (&se
.pre
);
173 /* Translate an ENTRY statement. Just adds a label for this entry point. */
175 gfc_trans_entry (gfc_code
* code
)
177 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
181 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
182 elemental subroutines. Make temporaries for output arguments if any such
183 dependencies are found. Output arguments are chosen because internal_unpack
184 can be used, as is, to copy the result back to the variable. */
186 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
187 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
188 gfc_dep_check check_variable
)
190 gfc_actual_arglist
*arg0
;
192 gfc_formal_arglist
*formal
;
193 gfc_loopinfo tmp_loop
;
205 if (loopse
->ss
== NULL
)
210 formal
= sym
->formal
;
212 /* Loop over all the arguments testing for dependencies. */
213 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
219 /* Obtain the info structure for the current argument. */
221 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
225 info
= &ss
->data
.info
;
229 /* If there is a dependency, create a temporary and use it
230 instead of the variable. */
231 fsym
= formal
? formal
->sym
: NULL
;
232 if (e
->expr_type
== EXPR_VARIABLE
234 && fsym
->attr
.intent
!= INTENT_IN
235 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
236 sym
, arg0
, check_variable
))
238 tree initial
, temptype
;
239 stmtblock_t temp_post
;
241 /* Make a local loopinfo for the temporary creation, so that
242 none of the other ss->info's have to be renormalized. */
243 gfc_init_loopinfo (&tmp_loop
);
244 tmp_loop
.dimen
= info
->dimen
;
245 for (n
= 0; n
< info
->dimen
; n
++)
247 tmp_loop
.to
[n
] = loopse
->loop
->to
[n
];
248 tmp_loop
.from
[n
] = loopse
->loop
->from
[n
];
249 tmp_loop
.order
[n
] = loopse
->loop
->order
[n
];
252 /* Obtain the argument descriptor for unpacking. */
253 gfc_init_se (&parmse
, NULL
);
254 parmse
.want_pointer
= 1;
256 /* The scalarizer introduces some specific peculiarities when
257 handling elemental subroutines; the stride can be needed up to
258 the dim_array - 1, rather than dim_loop - 1 to calculate
259 offsets outside the loop. For this reason, we make sure that
260 the descriptor has the dimensionality of the array by converting
261 trailing elements into ranges with end = start. */
262 for (ref
= e
->ref
; ref
; ref
= ref
->next
)
263 if (ref
->type
== REF_ARRAY
&& ref
->u
.ar
.type
== AR_SECTION
)
268 bool seen_range
= false;
269 for (n
= 0; n
< ref
->u
.ar
.dimen
; n
++)
271 if (ref
->u
.ar
.dimen_type
[n
] == DIMEN_RANGE
)
275 || ref
->u
.ar
.dimen_type
[n
] != DIMEN_ELEMENT
)
278 ref
->u
.ar
.end
[n
] = gfc_copy_expr (ref
->u
.ar
.start
[n
]);
279 ref
->u
.ar
.dimen_type
[n
] = DIMEN_RANGE
;
283 gfc_conv_expr_descriptor (&parmse
, e
, gfc_walk_expr (e
));
284 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
286 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
287 initialize the array temporary with a copy of the values. */
288 if (fsym
->attr
.intent
== INTENT_INOUT
289 || (fsym
->ts
.type
==BT_DERIVED
290 && fsym
->attr
.intent
== INTENT_OUT
))
291 initial
= parmse
.expr
;
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e (where
297 the type of e is that of the final reference, but parmse.expr's
298 type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
303 temptype
= TREE_TYPE (temptype
);
304 temptype
= gfc_get_element_type (temptype
);
306 /* Generate the temporary. Cleaning up the temporary should be the
307 very last thing done, so we add the code to a new block and add it
308 to se->post as last instructions. */
309 size
= gfc_create_var (gfc_array_index_type
, NULL
);
310 data
= gfc_create_var (pvoid_type_node
, NULL
);
311 gfc_init_block (&temp_post
);
312 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
,
313 &tmp_loop
, info
, temptype
,
317 gfc_add_modify (&se
->pre
, size
, tmp
);
318 tmp
= fold_convert (pvoid_type_node
, info
->data
);
319 gfc_add_modify (&se
->pre
, data
, tmp
);
321 /* Calculate the offset for the temporary. */
322 offset
= gfc_index_zero_node
;
323 for (n
= 0; n
< info
->dimen
; n
++)
325 tmp
= gfc_conv_descriptor_stride_get (info
->descriptor
,
327 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
328 gfc_array_index_type
,
329 loopse
->loop
->from
[n
], tmp
);
330 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
331 gfc_array_index_type
, offset
, tmp
);
333 info
->offset
= gfc_create_var (gfc_array_index_type
, NULL
);
334 gfc_add_modify (&se
->pre
, info
->offset
, offset
);
336 /* Copy the result back using unpack. */
337 tmp
= build_call_expr_loc (input_location
,
338 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
339 gfc_add_expr_to_block (&se
->post
, tmp
);
341 /* parmse.pre is already added above. */
342 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
343 gfc_add_block_to_block (&se
->post
, &temp_post
);
349 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
352 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
353 tree mask
, tree count1
, bool invert
)
357 int has_alternate_specifier
;
358 gfc_dep_check check_variable
;
359 tree index
= NULL_TREE
;
360 tree maskexpr
= NULL_TREE
;
363 /* A CALL starts a new block because the actual arguments may have to
364 be evaluated first. */
365 gfc_init_se (&se
, NULL
);
366 gfc_start_block (&se
.pre
);
368 gcc_assert (code
->resolved_sym
);
370 ss
= gfc_ss_terminator
;
371 if (code
->resolved_sym
->attr
.elemental
)
372 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
, GFC_SS_REFERENCE
);
374 /* Is not an elemental subroutine call with array valued arguments. */
375 if (ss
== gfc_ss_terminator
)
378 /* Translate the call. */
379 has_alternate_specifier
380 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
383 /* A subroutine without side-effect, by definition, does nothing! */
384 TREE_SIDE_EFFECTS (se
.expr
) = 1;
386 /* Chain the pieces together and return the block. */
387 if (has_alternate_specifier
)
389 gfc_code
*select_code
;
391 select_code
= code
->next
;
392 gcc_assert(select_code
->op
== EXEC_SELECT
);
393 sym
= select_code
->expr1
->symtree
->n
.sym
;
394 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
395 if (sym
->backend_decl
== NULL
)
396 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
397 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
400 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
402 gfc_add_block_to_block (&se
.pre
, &se
.post
);
407 /* An elemental subroutine call with array valued arguments has
415 /* gfc_walk_elemental_function_args renders the ss chain in the
416 reverse order to the actual argument order. */
417 ss
= gfc_reverse_ss (ss
);
419 /* Initialize the loop. */
420 gfc_init_se (&loopse
, NULL
);
421 gfc_init_loopinfo (&loop
);
422 gfc_add_ss_to_loop (&loop
, ss
);
424 gfc_conv_ss_startstride (&loop
);
425 /* TODO: gfc_conv_loop_setup generates a temporary for vector
426 subscripts. This could be prevented in the elemental case
427 as temporaries are handled separatedly
428 (below in gfc_conv_elemental_dependencies). */
429 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
430 gfc_mark_ss_chain_used (ss
, 1);
432 /* Convert the arguments, checking for dependencies. */
433 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
436 /* For operator assignment, do dependency checking. */
437 if (dependency_check
)
438 check_variable
= ELEM_CHECK_VARIABLE
;
440 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
442 gfc_init_se (&depse
, NULL
);
443 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
444 code
->ext
.actual
, check_variable
);
446 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
447 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
449 /* Generate the loop body. */
450 gfc_start_scalarized_body (&loop
, &body
);
451 gfc_init_block (&block
);
455 /* Form the mask expression according to the mask. */
457 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
459 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
460 TREE_TYPE (maskexpr
), maskexpr
);
463 /* Add the subroutine call to the block. */
464 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
465 code
->ext
.actual
, code
->expr1
, NULL
);
469 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
470 build_empty_stmt (input_location
));
471 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
472 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
473 gfc_array_index_type
,
474 count1
, gfc_index_one_node
);
475 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
478 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
480 gfc_add_block_to_block (&block
, &loopse
.pre
);
481 gfc_add_block_to_block (&block
, &loopse
.post
);
483 /* Finish up the loop block and the loop. */
484 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
485 gfc_trans_scalarizing_loops (&loop
, &body
);
486 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
487 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
488 gfc_add_block_to_block (&se
.pre
, &se
.post
);
489 gfc_cleanup_loop (&loop
);
492 return gfc_finish_block (&se
.pre
);
496 /* Translate the RETURN statement. */
499 gfc_trans_return (gfc_code
* code
)
507 /* If code->expr is not NULL, this return statement must appear
508 in a subroutine and current_fake_result_decl has already
511 result
= gfc_get_fake_result_decl (NULL
, 0);
514 gfc_warning ("An alternate return at %L without a * dummy argument",
515 &code
->expr1
->where
);
516 return gfc_generate_return ();
519 /* Start a new block for this statement. */
520 gfc_init_se (&se
, NULL
);
521 gfc_start_block (&se
.pre
);
523 gfc_conv_expr (&se
, code
->expr1
);
525 /* Note that the actually returned expression is a simple value and
526 does not depend on any pointers or such; thus we can clean-up with
527 se.post before returning. */
528 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
529 result
, fold_convert (TREE_TYPE (result
),
531 gfc_add_expr_to_block (&se
.pre
, tmp
);
532 gfc_add_block_to_block (&se
.pre
, &se
.post
);
534 tmp
= gfc_generate_return ();
535 gfc_add_expr_to_block (&se
.pre
, tmp
);
536 return gfc_finish_block (&se
.pre
);
539 return gfc_generate_return ();
543 /* Translate the PAUSE statement. We have to translate this statement
544 to a runtime library call. */
547 gfc_trans_pause (gfc_code
* code
)
549 tree gfc_int4_type_node
= gfc_get_int_type (4);
553 /* Start a new block for this statement. */
554 gfc_init_se (&se
, NULL
);
555 gfc_start_block (&se
.pre
);
558 if (code
->expr1
== NULL
)
560 tmp
= build_int_cst (gfc_int4_type_node
, 0);
561 tmp
= build_call_expr_loc (input_location
,
562 gfor_fndecl_pause_string
, 2,
563 build_int_cst (pchar_type_node
, 0), tmp
);
565 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
567 gfc_conv_expr (&se
, code
->expr1
);
568 tmp
= build_call_expr_loc (input_location
,
569 gfor_fndecl_pause_numeric
, 1,
570 fold_convert (gfc_int4_type_node
, se
.expr
));
574 gfc_conv_expr_reference (&se
, code
->expr1
);
575 tmp
= build_call_expr_loc (input_location
,
576 gfor_fndecl_pause_string
, 2,
577 se
.expr
, se
.string_length
);
580 gfc_add_expr_to_block (&se
.pre
, tmp
);
582 gfc_add_block_to_block (&se
.pre
, &se
.post
);
584 return gfc_finish_block (&se
.pre
);
588 /* Translate the STOP statement. We have to translate this statement
589 to a runtime library call. */
592 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
594 tree gfc_int4_type_node
= gfc_get_int_type (4);
598 /* Start a new block for this statement. */
599 gfc_init_se (&se
, NULL
);
600 gfc_start_block (&se
.pre
);
602 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
604 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
605 tmp
= built_in_decls
[BUILT_IN_SYNCHRONIZE
];
606 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
607 gfc_add_expr_to_block (&se
.pre
, tmp
);
609 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
610 gfc_add_expr_to_block (&se
.pre
, tmp
);
613 if (code
->expr1
== NULL
)
615 tmp
= build_int_cst (gfc_int4_type_node
, 0);
616 tmp
= build_call_expr_loc (input_location
,
618 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
619 ? gfor_fndecl_caf_error_stop_str
620 : gfor_fndecl_error_stop_string
)
621 : gfor_fndecl_stop_string
,
622 2, build_int_cst (pchar_type_node
, 0), tmp
);
624 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
626 gfc_conv_expr (&se
, code
->expr1
);
627 tmp
= build_call_expr_loc (input_location
,
629 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
630 ? gfor_fndecl_caf_error_stop
631 : gfor_fndecl_error_stop_numeric
)
632 : gfor_fndecl_stop_numeric_f08
, 1,
633 fold_convert (gfc_int4_type_node
, se
.expr
));
637 gfc_conv_expr_reference (&se
, code
->expr1
);
638 tmp
= build_call_expr_loc (input_location
,
640 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
641 ? gfor_fndecl_caf_error_stop_str
642 : gfor_fndecl_error_stop_string
)
643 : gfor_fndecl_stop_string
,
644 2, se
.expr
, se
.string_length
);
647 gfc_add_expr_to_block (&se
.pre
, tmp
);
649 gfc_add_block_to_block (&se
.pre
, &se
.post
);
651 return gfc_finish_block (&se
.pre
);
656 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
660 tree images
= NULL_TREE
, stat
= NULL_TREE
,
661 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
663 /* Short cut: For single images without bound checking or without STAT=,
664 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
665 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
666 && gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
669 gfc_init_se (&se
, NULL
);
670 gfc_start_block (&se
.pre
);
672 if (code
->expr1
&& code
->expr1
->rank
== 0)
674 gfc_init_se (&argse
, NULL
);
675 gfc_conv_expr_val (&argse
, code
->expr1
);
681 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
682 gfc_init_se (&argse
, NULL
);
683 gfc_conv_expr_val (&argse
, code
->expr2
);
687 if (code
->expr3
&& gfc_option
.coarray
== GFC_FCOARRAY_LIB
688 && type
!= EXEC_SYNC_MEMORY
)
690 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
691 gfc_init_se (&argse
, NULL
);
692 gfc_conv_expr (&argse
, code
->expr3
);
693 gfc_conv_string_parameter (&argse
);
695 errmsglen
= argse
.string_length
;
697 else if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
699 errmsg
= null_pointer_node
;
700 errmsglen
= build_int_cst (integer_type_node
, 0);
703 /* Check SYNC IMAGES(imageset) for valid image index.
704 FIXME: Add a check for image-set arrays. */
705 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
706 && code
->expr1
->rank
== 0)
709 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
710 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
711 images
, build_int_cst (TREE_TYPE (images
), 1));
715 cond
= fold_build2_loc (input_location
, GE_EXPR
, boolean_type_node
,
716 images
, gfort_gvar_caf_num_images
);
717 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
719 build_int_cst (TREE_TYPE (images
), 1));
720 cond
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
,
721 boolean_type_node
, cond
, cond2
);
723 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
724 &code
->expr1
->where
, "Invalid image number "
726 fold_convert (integer_type_node
, se
.expr
));
729 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
730 image control statements SYNC IMAGES and SYNC ALL. */
731 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
733 tmp
= built_in_decls
[BUILT_IN_SYNCHRONIZE
];
734 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
735 gfc_add_expr_to_block (&se
.pre
, tmp
);
738 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
740 /* Set STAT to zero. */
742 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
744 else if (type
== EXEC_SYNC_ALL
)
746 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
747 2, errmsg
, errmsglen
);
749 gfc_add_modify (&se
.pre
, stat
, fold_convert (TREE_TYPE (stat
), tmp
));
751 gfc_add_expr_to_block (&se
.pre
, tmp
);
757 gcc_assert (type
== EXEC_SYNC_IMAGES
);
761 len
= build_int_cst (integer_type_node
, -1);
762 images
= null_pointer_node
;
764 else if (code
->expr1
->rank
== 0)
766 len
= build_int_cst (integer_type_node
, 1);
767 images
= gfc_build_addr_expr (NULL_TREE
, images
);
772 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
773 gfc_fatal_error ("Sorry, only support for integer kind %d "
774 "implemented for image-set at %L",
775 gfc_c_int_kind
, &code
->expr1
->where
);
777 gfc_conv_array_parameter (&se
, code
->expr1
,
778 gfc_walk_expr (code
->expr1
), true, NULL
,
782 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
783 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
784 tmp
= gfc_get_element_type (tmp
);
786 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
787 TREE_TYPE (len
), len
,
788 fold_convert (TREE_TYPE (len
),
789 TYPE_SIZE_UNIT (tmp
)));
790 len
= fold_convert (integer_type_node
, len
);
793 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
, 4,
794 fold_convert (integer_type_node
, len
), images
,
797 gfc_add_modify (&se
.pre
, stat
, fold_convert (TREE_TYPE (stat
), tmp
));
799 gfc_add_expr_to_block (&se
.pre
, tmp
);
802 return gfc_finish_block (&se
.pre
);
806 /* Generate GENERIC for the IF construct. This function also deals with
807 the simple IF statement, because the front end translates the IF
808 statement into an IF construct.
840 where COND_S is the simplified version of the predicate. PRE_COND_S
841 are the pre side-effects produced by the translation of the
843 We need to build the chain recursively otherwise we run into
844 problems with folding incomplete statements. */
847 gfc_trans_if_1 (gfc_code
* code
)
854 /* Check for an unconditional ELSE clause. */
856 return gfc_trans_code (code
->next
);
858 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
859 gfc_init_se (&if_se
, NULL
);
860 gfc_start_block (&if_se
.pre
);
862 /* Calculate the IF condition expression. */
863 if (code
->expr1
->where
.lb
)
865 gfc_save_backend_locus (&saved_loc
);
866 gfc_set_backend_locus (&code
->expr1
->where
);
869 gfc_conv_expr_val (&if_se
, code
->expr1
);
871 if (code
->expr1
->where
.lb
)
872 gfc_restore_backend_locus (&saved_loc
);
874 /* Translate the THEN clause. */
875 stmt
= gfc_trans_code (code
->next
);
877 /* Translate the ELSE clause. */
879 elsestmt
= gfc_trans_if_1 (code
->block
);
881 elsestmt
= build_empty_stmt (input_location
);
883 /* Build the condition expression and add it to the condition block. */
884 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
885 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
888 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
890 /* Finish off this statement. */
891 return gfc_finish_block (&if_se
.pre
);
895 gfc_trans_if (gfc_code
* code
)
900 /* Create exit label so it is available for trans'ing the body code. */
901 exit_label
= gfc_build_label_decl (NULL_TREE
);
902 code
->exit_label
= exit_label
;
904 /* Translate the actual code in code->block. */
905 gfc_init_block (&body
);
906 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
908 /* Add exit label. */
909 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
911 return gfc_finish_block (&body
);
915 /* Translate an arithmetic IF expression.
917 IF (cond) label1, label2, label3 translates to
929 An optimized version can be generated in case of equal labels.
930 E.g., if label1 is equal to label2, we can translate it to
939 gfc_trans_arithmetic_if (gfc_code
* code
)
947 /* Start a new block. */
948 gfc_init_se (&se
, NULL
);
949 gfc_start_block (&se
.pre
);
951 /* Pre-evaluate COND. */
952 gfc_conv_expr_val (&se
, code
->expr1
);
953 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
955 /* Build something to compare with. */
956 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
958 if (code
->label1
->value
!= code
->label2
->value
)
960 /* If (cond < 0) take branch1 else take branch2.
961 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
962 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
963 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
965 if (code
->label1
->value
!= code
->label3
->value
)
966 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
969 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
972 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
973 tmp
, branch1
, branch2
);
976 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
978 if (code
->label1
->value
!= code
->label3
->value
979 && code
->label2
->value
!= code
->label3
->value
)
981 /* if (cond <= 0) take branch1 else take branch2. */
982 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
983 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
985 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
986 tmp
, branch1
, branch2
);
989 /* Append the COND_EXPR to the evaluation of COND, and return. */
990 gfc_add_expr_to_block (&se
.pre
, branch1
);
991 return gfc_finish_block (&se
.pre
);
995 /* Translate a CRITICAL block. */
997 gfc_trans_critical (gfc_code
*code
)
1002 gfc_start_block (&block
);
1004 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1006 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_critical
, 0);
1007 gfc_add_expr_to_block (&block
, tmp
);
1010 tmp
= gfc_trans_code (code
->block
->next
);
1011 gfc_add_expr_to_block (&block
, tmp
);
1013 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1015 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_end_critical
,
1017 gfc_add_expr_to_block (&block
, tmp
);
1021 return gfc_finish_block (&block
);
1025 /* Do proper initialization for ASSOCIATE names. */
1028 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1033 gcc_assert (sym
->assoc
);
1034 e
= sym
->assoc
->target
;
1036 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1037 to array temporary) for arrays with either unknown shape or if associating
1039 if (sym
->attr
.dimension
1040 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1046 desc
= sym
->backend_decl
;
1048 /* If association is to an expression, evaluate it and create temporary.
1049 Otherwise, get descriptor of target for pointer assignment. */
1050 gfc_init_se (&se
, NULL
);
1051 ss
= gfc_walk_expr (e
);
1052 if (sym
->assoc
->variable
)
1054 se
.direct_byref
= 1;
1057 gfc_conv_expr_descriptor (&se
, e
, ss
);
1059 /* If we didn't already do the pointer assignment, set associate-name
1060 descriptor to the one generated for the temporary. */
1061 if (!sym
->assoc
->variable
)
1065 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1067 /* The generated descriptor has lower bound zero (as array
1068 temporary), shift bounds so we get lower bounds of 1. */
1069 for (dim
= 0; dim
< e
->rank
; ++dim
)
1070 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1071 dim
, gfc_index_one_node
);
1074 /* Done, register stuff as init / cleanup code. */
1075 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1076 gfc_finish_block (&se
.post
));
1079 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1080 else if (gfc_is_associate_pointer (sym
))
1084 gcc_assert (!sym
->attr
.dimension
);
1086 gfc_init_se (&se
, NULL
);
1087 gfc_conv_expr (&se
, e
);
1089 tmp
= TREE_TYPE (sym
->backend_decl
);
1090 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1091 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1093 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1094 gfc_finish_block (&se
.post
));
1097 /* Do a simple assignment. This is for scalar expressions, where we
1098 can simply use expression assignment. */
1103 lhs
= gfc_lval_expr_from_sym (sym
);
1104 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1105 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1110 /* Translate a BLOCK construct. This is basically what we would do for a
1114 gfc_trans_block_construct (gfc_code
* code
)
1118 gfc_wrapped_block block
;
1121 gfc_association_list
*ass
;
1123 ns
= code
->ext
.block
.ns
;
1125 sym
= ns
->proc_name
;
1128 /* Process local variables. */
1129 gcc_assert (!sym
->tlink
);
1131 gfc_process_block_locals (ns
);
1133 /* Generate code including exit-label. */
1134 gfc_init_block (&body
);
1135 exit_label
= gfc_build_label_decl (NULL_TREE
);
1136 code
->exit_label
= exit_label
;
1137 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1138 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1140 /* Finish everything. */
1141 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1142 gfc_trans_deferred_vars (sym
, &block
);
1143 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1144 trans_associate_var (ass
->st
->n
.sym
, &block
);
1146 return gfc_finish_wrapped_block (&block
);
1150 /* Translate the simple DO construct. This is where the loop variable has
1151 integer type and step +-1. We can't use this in the general case
1152 because integer overflow and floating point errors could give incorrect
1154 We translate a do loop from:
1156 DO dovar = from, to, step
1162 [Evaluate loop bounds and step]
1164 if ((step > 0) ? (dovar <= to) : (dovar => to))
1170 cond = (dovar == to);
1172 if (cond) goto end_label;
1177 This helps the optimizers by avoiding the extra induction variable
1178 used in the general case. */
1181 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1182 tree from
, tree to
, tree step
, tree exit_cond
)
1188 tree saved_dovar
= NULL
;
1193 type
= TREE_TYPE (dovar
);
1195 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1197 /* Initialize the DO variable: dovar = from. */
1198 gfc_add_modify_loc (loc
, pblock
, dovar
, from
);
1200 /* Save value for do-tinkering checking. */
1201 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1203 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1204 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1207 /* Cycle and exit statements are implemented with gotos. */
1208 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1209 exit_label
= gfc_build_label_decl (NULL_TREE
);
1211 /* Put the labels where they can be found later. See gfc_trans_do(). */
1212 code
->cycle_label
= cycle_label
;
1213 code
->exit_label
= exit_label
;
1216 gfc_start_block (&body
);
1218 /* Main loop body. */
1219 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1220 gfc_add_expr_to_block (&body
, tmp
);
1222 /* Label for cycle statements (if needed). */
1223 if (TREE_USED (cycle_label
))
1225 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1226 gfc_add_expr_to_block (&body
, tmp
);
1229 /* Check whether someone has modified the loop variable. */
1230 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1232 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1233 dovar
, saved_dovar
);
1234 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1235 "Loop variable has been modified");
1238 /* Exit the loop if there is an I/O result condition or error. */
1241 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1242 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1244 build_empty_stmt (loc
));
1245 gfc_add_expr_to_block (&body
, tmp
);
1248 /* Evaluate the loop condition. */
1249 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1251 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1253 /* Increment the loop variable. */
1254 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1255 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1257 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1258 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1260 /* The loop exit. */
1261 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1262 TREE_USED (exit_label
) = 1;
1263 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1264 cond
, tmp
, build_empty_stmt (loc
));
1265 gfc_add_expr_to_block (&body
, tmp
);
1267 /* Finish the loop body. */
1268 tmp
= gfc_finish_block (&body
);
1269 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1271 /* Only execute the loop if the number of iterations is positive. */
1272 if (tree_int_cst_sgn (step
) > 0)
1273 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1276 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1278 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1279 build_empty_stmt (loc
));
1280 gfc_add_expr_to_block (pblock
, tmp
);
1282 /* Add the exit label. */
1283 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1284 gfc_add_expr_to_block (pblock
, tmp
);
1286 return gfc_finish_block (pblock
);
1289 /* Translate the DO construct. This obviously is one of the most
1290 important ones to get right with any compiler, but especially
1293 We special case some loop forms as described in gfc_trans_simple_do.
1294 For other cases we implement them with a separate loop count,
1295 as described in the standard.
1297 We translate a do loop from:
1299 DO dovar = from, to, step
1305 [evaluate loop bounds and step]
1306 empty = (step > 0 ? to < from : to > from);
1307 countm1 = (to - from) / step;
1309 if (empty) goto exit_label;
1315 if (countm1 ==0) goto exit_label;
1320 countm1 is an unsigned integer. It is equal to the loop count minus one,
1321 because the loop count itself can overflow. */
1324 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1328 tree saved_dovar
= NULL
;
1344 gfc_start_block (&block
);
1346 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1348 /* Evaluate all the expressions in the iterator. */
1349 gfc_init_se (&se
, NULL
);
1350 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1351 gfc_add_block_to_block (&block
, &se
.pre
);
1353 type
= TREE_TYPE (dovar
);
1355 gfc_init_se (&se
, NULL
);
1356 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1357 gfc_add_block_to_block (&block
, &se
.pre
);
1358 from
= gfc_evaluate_now (se
.expr
, &block
);
1360 gfc_init_se (&se
, NULL
);
1361 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1362 gfc_add_block_to_block (&block
, &se
.pre
);
1363 to
= gfc_evaluate_now (se
.expr
, &block
);
1365 gfc_init_se (&se
, NULL
);
1366 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1367 gfc_add_block_to_block (&block
, &se
.pre
);
1368 step
= gfc_evaluate_now (se
.expr
, &block
);
1370 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1372 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1373 build_zero_cst (type
));
1374 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1375 "DO step value is zero");
1378 /* Special case simple loops. */
1379 if (TREE_CODE (type
) == INTEGER_TYPE
1380 && (integer_onep (step
)
1381 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1382 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1384 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1385 build_zero_cst (type
));
1387 if (TREE_CODE (type
) == INTEGER_TYPE
)
1388 utype
= unsigned_type_for (type
);
1390 utype
= unsigned_type_for (gfc_array_index_type
);
1391 countm1
= gfc_create_var (utype
, "countm1");
1393 /* Cycle and exit statements are implemented with gotos. */
1394 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1395 exit_label
= gfc_build_label_decl (NULL_TREE
);
1396 TREE_USED (exit_label
) = 1;
1398 /* Put these labels where they can be found later. */
1399 code
->cycle_label
= cycle_label
;
1400 code
->exit_label
= exit_label
;
1402 /* Initialize the DO variable: dovar = from. */
1403 gfc_add_modify (&block
, dovar
, from
);
1405 /* Save value for do-tinkering checking. */
1406 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1408 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1409 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1412 /* Initialize loop count and jump to exit label if the loop is empty.
1413 This code is executed before we enter the loop body. We generate:
1414 step_sign = sign(1,step);
1425 countm1 = (to*step_sign - from*step_sign) / (step*step_sign);
1429 if (TREE_CODE (type
) == INTEGER_TYPE
)
1431 tree pos
, neg
, step_sign
, to2
, from2
, step2
;
1433 /* Calculate SIGN (1,step), as (step < 0 ? -1 : 1) */
1435 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1436 build_int_cst (TREE_TYPE (step
), 0));
1437 step_sign
= fold_build3_loc (loc
, COND_EXPR
, type
, tmp
,
1438 build_int_cst (type
, -1),
1439 build_int_cst (type
, 1));
1441 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1442 pos
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1443 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1445 build_empty_stmt (loc
));
1447 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
,
1449 neg
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1450 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1452 build_empty_stmt (loc
));
1453 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1454 pos_step
, pos
, neg
);
1456 gfc_add_expr_to_block (&block
, tmp
);
1458 /* Calculate the loop count. to-from can overflow, so
1459 we cast to unsigned. */
1461 to2
= fold_build2_loc (loc
, MULT_EXPR
, type
, step_sign
, to
);
1462 from2
= fold_build2_loc (loc
, MULT_EXPR
, type
, step_sign
, from
);
1463 step2
= fold_build2_loc (loc
, MULT_EXPR
, type
, step_sign
, step
);
1464 step2
= fold_convert (utype
, step2
);
1465 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to2
, from2
);
1466 tmp
= fold_convert (utype
, tmp
);
1467 tmp
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
, tmp
, step2
);
1468 tmp
= fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, countm1
, tmp
);
1469 gfc_add_expr_to_block (&block
, tmp
);
1473 /* TODO: We could use the same width as the real type.
1474 This would probably cause more problems that it solves
1475 when we implement "long double" types. */
1477 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1478 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1479 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1480 gfc_add_modify (&block
, countm1
, tmp
);
1482 /* We need a special check for empty loops:
1483 empty = (step > 0 ? to < from : to > from); */
1484 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1485 fold_build2_loc (loc
, LT_EXPR
,
1486 boolean_type_node
, to
, from
),
1487 fold_build2_loc (loc
, GT_EXPR
,
1488 boolean_type_node
, to
, from
));
1489 /* If the loop is empty, go directly to the exit label. */
1490 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1491 build1_v (GOTO_EXPR
, exit_label
),
1492 build_empty_stmt (input_location
));
1493 gfc_add_expr_to_block (&block
, tmp
);
1497 gfc_start_block (&body
);
1499 /* Main loop body. */
1500 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1501 gfc_add_expr_to_block (&body
, tmp
);
1503 /* Label for cycle statements (if needed). */
1504 if (TREE_USED (cycle_label
))
1506 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1507 gfc_add_expr_to_block (&body
, tmp
);
1510 /* Check whether someone has modified the loop variable. */
1511 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1513 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1515 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1516 "Loop variable has been modified");
1519 /* Exit the loop if there is an I/O result condition or error. */
1522 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1523 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1525 build_empty_stmt (input_location
));
1526 gfc_add_expr_to_block (&body
, tmp
);
1529 /* Increment the loop variable. */
1530 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1531 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1533 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1534 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1536 /* End with the loop condition. Loop until countm1 == 0. */
1537 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1
,
1538 build_int_cst (utype
, 0));
1539 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1540 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1541 cond
, tmp
, build_empty_stmt (loc
));
1542 gfc_add_expr_to_block (&body
, tmp
);
1544 /* Decrement the loop count. */
1545 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1546 build_int_cst (utype
, 1));
1547 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1549 /* End of loop body. */
1550 tmp
= gfc_finish_block (&body
);
1552 /* The for loop itself. */
1553 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1554 gfc_add_expr_to_block (&block
, tmp
);
1556 /* Add the exit label. */
1557 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1558 gfc_add_expr_to_block (&block
, tmp
);
1560 return gfc_finish_block (&block
);
1564 /* Translate the DO WHILE construct.
1577 if (! cond) goto exit_label;
1583 Because the evaluation of the exit condition `cond' may have side
1584 effects, we can't do much for empty loop bodies. The backend optimizers
1585 should be smart enough to eliminate any dead loops. */
1588 gfc_trans_do_while (gfc_code
* code
)
1596 /* Everything we build here is part of the loop body. */
1597 gfc_start_block (&block
);
1599 /* Cycle and exit statements are implemented with gotos. */
1600 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1601 exit_label
= gfc_build_label_decl (NULL_TREE
);
1603 /* Put the labels where they can be found later. See gfc_trans_do(). */
1604 code
->cycle_label
= cycle_label
;
1605 code
->exit_label
= exit_label
;
1607 /* Create a GIMPLE version of the exit condition. */
1608 gfc_init_se (&cond
, NULL
);
1609 gfc_conv_expr_val (&cond
, code
->expr1
);
1610 gfc_add_block_to_block (&block
, &cond
.pre
);
1611 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1612 TRUTH_NOT_EXPR
, boolean_type_node
, cond
.expr
);
1614 /* Build "IF (! cond) GOTO exit_label". */
1615 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1616 TREE_USED (exit_label
) = 1;
1617 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1618 void_type_node
, cond
.expr
, tmp
,
1619 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1620 gfc_add_expr_to_block (&block
, tmp
);
1622 /* The main body of the loop. */
1623 tmp
= gfc_trans_code (code
->block
->next
);
1624 gfc_add_expr_to_block (&block
, tmp
);
1626 /* Label for cycle statements (if needed). */
1627 if (TREE_USED (cycle_label
))
1629 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1630 gfc_add_expr_to_block (&block
, tmp
);
1633 /* End of loop body. */
1634 tmp
= gfc_finish_block (&block
);
1636 gfc_init_block (&block
);
1637 /* Build the loop. */
1638 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1639 void_type_node
, tmp
);
1640 gfc_add_expr_to_block (&block
, tmp
);
1642 /* Add the exit label. */
1643 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1644 gfc_add_expr_to_block (&block
, tmp
);
1646 return gfc_finish_block (&block
);
1650 /* Translate the SELECT CASE construct for INTEGER case expressions,
1651 without killing all potential optimizations. The problem is that
1652 Fortran allows unbounded cases, but the back-end does not, so we
1653 need to intercept those before we enter the equivalent SWITCH_EXPR
1656 For example, we translate this,
1659 CASE (:100,101,105:115)
1669 to the GENERIC equivalent,
1673 case (minimum value for typeof(expr) ... 100:
1679 case 200 ... (maximum value for typeof(expr):
1696 gfc_trans_integer_select (gfc_code
* code
)
1706 gfc_start_block (&block
);
1708 /* Calculate the switch expression. */
1709 gfc_init_se (&se
, NULL
);
1710 gfc_conv_expr_val (&se
, code
->expr1
);
1711 gfc_add_block_to_block (&block
, &se
.pre
);
1713 end_label
= gfc_build_label_decl (NULL_TREE
);
1715 gfc_init_block (&body
);
1717 for (c
= code
->block
; c
; c
= c
->block
)
1719 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1724 /* Assume it's the default case. */
1725 low
= high
= NULL_TREE
;
1729 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1732 /* If there's only a lower bound, set the high bound to the
1733 maximum value of the case expression. */
1735 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1740 /* Three cases are possible here:
1742 1) There is no lower bound, e.g. CASE (:N).
1743 2) There is a lower bound .NE. high bound, that is
1744 a case range, e.g. CASE (N:M) where M>N (we make
1745 sure that M>N during type resolution).
1746 3) There is a lower bound, and it has the same value
1747 as the high bound, e.g. CASE (N:N). This is our
1748 internal representation of CASE(N).
1750 In the first and second case, we need to set a value for
1751 high. In the third case, we don't because the GCC middle
1752 end represents a single case value by just letting high be
1753 a NULL_TREE. We can't do that because we need to be able
1754 to represent unbounded cases. */
1758 && mpz_cmp (cp
->low
->value
.integer
,
1759 cp
->high
->value
.integer
) != 0))
1760 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
1763 /* Unbounded case. */
1765 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
1768 /* Build a label. */
1769 label
= gfc_build_label_decl (NULL_TREE
);
1771 /* Add this case label.
1772 Add parameter 'label', make it match GCC backend. */
1773 tmp
= fold_build3_loc (input_location
, CASE_LABEL_EXPR
,
1774 void_type_node
, low
, high
, label
);
1775 gfc_add_expr_to_block (&body
, tmp
);
1778 /* Add the statements for this case. */
1779 tmp
= gfc_trans_code (c
->next
);
1780 gfc_add_expr_to_block (&body
, tmp
);
1782 /* Break to the end of the construct. */
1783 tmp
= build1_v (GOTO_EXPR
, end_label
);
1784 gfc_add_expr_to_block (&body
, tmp
);
1787 tmp
= gfc_finish_block (&body
);
1788 tmp
= build3_v (SWITCH_EXPR
, se
.expr
, tmp
, NULL_TREE
);
1789 gfc_add_expr_to_block (&block
, tmp
);
1791 tmp
= build1_v (LABEL_EXPR
, end_label
);
1792 gfc_add_expr_to_block (&block
, tmp
);
1794 return gfc_finish_block (&block
);
1798 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1800 There are only two cases possible here, even though the standard
1801 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1802 .FALSE., and DEFAULT.
1804 We never generate more than two blocks here. Instead, we always
1805 try to eliminate the DEFAULT case. This way, we can translate this
1806 kind of SELECT construct to a simple
1810 expression in GENERIC. */
1813 gfc_trans_logical_select (gfc_code
* code
)
1816 gfc_code
*t
, *f
, *d
;
1821 /* Assume we don't have any cases at all. */
1824 /* Now see which ones we actually do have. We can have at most two
1825 cases in a single case list: one for .TRUE. and one for .FALSE.
1826 The default case is always separate. If the cases for .TRUE. and
1827 .FALSE. are in the same case list, the block for that case list
1828 always executed, and we don't generate code a COND_EXPR. */
1829 for (c
= code
->block
; c
; c
= c
->block
)
1831 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1835 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
1837 else /* if (cp->value.logical != 0), thus .TRUE. */
1845 /* Start a new block. */
1846 gfc_start_block (&block
);
1848 /* Calculate the switch expression. We always need to do this
1849 because it may have side effects. */
1850 gfc_init_se (&se
, NULL
);
1851 gfc_conv_expr_val (&se
, code
->expr1
);
1852 gfc_add_block_to_block (&block
, &se
.pre
);
1854 if (t
== f
&& t
!= NULL
)
1856 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1857 translate the code for these cases, append it to the current
1859 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
1863 tree true_tree
, false_tree
, stmt
;
1865 true_tree
= build_empty_stmt (input_location
);
1866 false_tree
= build_empty_stmt (input_location
);
1868 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1869 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1870 make the missing case the default case. */
1871 if (t
!= NULL
&& f
!= NULL
)
1881 /* Translate the code for each of these blocks, and append it to
1882 the current block. */
1884 true_tree
= gfc_trans_code (t
->next
);
1887 false_tree
= gfc_trans_code (f
->next
);
1889 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1890 se
.expr
, true_tree
, false_tree
);
1891 gfc_add_expr_to_block (&block
, stmt
);
1894 return gfc_finish_block (&block
);
1898 /* The jump table types are stored in static variables to avoid
1899 constructing them from scratch every single time. */
1900 static GTY(()) tree select_struct
[2];
1902 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1903 Instead of generating compares and jumps, it is far simpler to
1904 generate a data structure describing the cases in order and call a
1905 library subroutine that locates the right case.
1906 This is particularly true because this is the only case where we
1907 might have to dispose of a temporary.
1908 The library subroutine returns a pointer to jump to or NULL if no
1909 branches are to be taken. */
1912 gfc_trans_character_select (gfc_code
*code
)
1914 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
1915 stmtblock_t block
, body
;
1920 VEC(constructor_elt
,gc
) *inits
= NULL
;
1922 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
1924 /* The jump table types are stored in static variables to avoid
1925 constructing them from scratch every single time. */
1926 static tree ss_string1
[2], ss_string1_len
[2];
1927 static tree ss_string2
[2], ss_string2_len
[2];
1928 static tree ss_target
[2];
1930 cp
= code
->block
->ext
.block
.case_list
;
1931 while (cp
->left
!= NULL
)
1934 /* Generate the body */
1935 gfc_start_block (&block
);
1936 gfc_init_se (&expr1se
, NULL
);
1937 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
1939 gfc_add_block_to_block (&block
, &expr1se
.pre
);
1941 end_label
= gfc_build_label_decl (NULL_TREE
);
1943 gfc_init_block (&body
);
1945 /* Attempt to optimize length 1 selects. */
1946 if (integer_onep (expr1se
.string_length
))
1948 for (d
= cp
; d
; d
= d
->right
)
1953 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
1954 && d
->low
->ts
.type
== BT_CHARACTER
);
1955 if (d
->low
->value
.character
.length
> 1)
1957 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
1958 if (d
->low
->value
.character
.string
[i
] != ' ')
1960 if (i
!= d
->low
->value
.character
.length
)
1962 if (optimize
&& d
->high
&& i
== 1)
1964 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
1965 && d
->high
->ts
.type
== BT_CHARACTER
);
1966 if (d
->high
->value
.character
.length
> 1
1967 && (d
->low
->value
.character
.string
[0]
1968 == d
->high
->value
.character
.string
[0])
1969 && d
->high
->value
.character
.string
[1] != ' '
1970 && ((d
->low
->value
.character
.string
[1] < ' ')
1971 == (d
->high
->value
.character
.string
[1]
1981 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
1982 && d
->high
->ts
.type
== BT_CHARACTER
);
1983 if (d
->high
->value
.character
.length
> 1)
1985 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
1986 if (d
->high
->value
.character
.string
[i
] != ' ')
1988 if (i
!= d
->high
->value
.character
.length
)
1995 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
1997 for (c
= code
->block
; c
; c
= c
->block
)
1999 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2005 /* Assume it's the default case. */
2006 low
= high
= NULL_TREE
;
2010 /* CASE ('ab') or CASE ('ab':'az') will never match
2011 any length 1 character. */
2012 if (cp
->low
->value
.character
.length
> 1
2013 && cp
->low
->value
.character
.string
[1] != ' ')
2016 if (cp
->low
->value
.character
.length
> 0)
2017 r
= cp
->low
->value
.character
.string
[0];
2020 low
= build_int_cst (ctype
, r
);
2022 /* If there's only a lower bound, set the high bound
2023 to the maximum value of the case expression. */
2025 high
= TYPE_MAX_VALUE (ctype
);
2031 || (cp
->low
->value
.character
.string
[0]
2032 != cp
->high
->value
.character
.string
[0]))
2034 if (cp
->high
->value
.character
.length
> 0)
2035 r
= cp
->high
->value
.character
.string
[0];
2038 high
= build_int_cst (ctype
, r
);
2041 /* Unbounded case. */
2043 low
= TYPE_MIN_VALUE (ctype
);
2046 /* Build a label. */
2047 label
= gfc_build_label_decl (NULL_TREE
);
2049 /* Add this case label.
2050 Add parameter 'label', make it match GCC backend. */
2051 tmp
= fold_build3_loc (input_location
, CASE_LABEL_EXPR
,
2052 void_type_node
, low
, high
, label
);
2053 gfc_add_expr_to_block (&body
, tmp
);
2056 /* Add the statements for this case. */
2057 tmp
= gfc_trans_code (c
->next
);
2058 gfc_add_expr_to_block (&body
, tmp
);
2060 /* Break to the end of the construct. */
2061 tmp
= build1_v (GOTO_EXPR
, end_label
);
2062 gfc_add_expr_to_block (&body
, tmp
);
2065 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2067 code
->expr1
->ts
.kind
);
2068 case_num
= gfc_create_var (ctype
, "case_num");
2069 gfc_add_modify (&block
, case_num
, tmp
);
2071 gfc_add_block_to_block (&block
, &expr1se
.post
);
2073 tmp
= gfc_finish_block (&body
);
2074 tmp
= build3_v (SWITCH_EXPR
, case_num
, tmp
, NULL_TREE
);
2075 gfc_add_expr_to_block (&block
, tmp
);
2077 tmp
= build1_v (LABEL_EXPR
, end_label
);
2078 gfc_add_expr_to_block (&block
, tmp
);
2080 return gfc_finish_block (&block
);
2084 if (code
->expr1
->ts
.kind
== 1)
2086 else if (code
->expr1
->ts
.kind
== 4)
2091 if (select_struct
[k
] == NULL
)
2094 select_struct
[k
] = make_node (RECORD_TYPE
);
2096 if (code
->expr1
->ts
.kind
== 1)
2097 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2098 else if (code
->expr1
->ts
.kind
== 4)
2099 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2104 #define ADD_FIELD(NAME, TYPE) \
2105 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2106 get_identifier (stringize(NAME)), \
2110 ADD_FIELD (string1
, pchartype
);
2111 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2113 ADD_FIELD (string2
, pchartype
);
2114 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2116 ADD_FIELD (target
, integer_type_node
);
2119 gfc_finish_type (select_struct
[k
]);
2123 for (d
= cp
; d
; d
= d
->right
)
2126 for (c
= code
->block
; c
; c
= c
->block
)
2128 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2130 label
= gfc_build_label_decl (NULL_TREE
);
2131 tmp
= fold_build3_loc (input_location
, CASE_LABEL_EXPR
,
2133 (d
->low
== NULL
&& d
->high
== NULL
)
2134 ? NULL
: build_int_cst (NULL_TREE
, d
->n
),
2136 gfc_add_expr_to_block (&body
, tmp
);
2139 tmp
= gfc_trans_code (c
->next
);
2140 gfc_add_expr_to_block (&body
, tmp
);
2142 tmp
= build1_v (GOTO_EXPR
, end_label
);
2143 gfc_add_expr_to_block (&body
, tmp
);
2146 /* Generate the structure describing the branches */
2147 for (d
= cp
; d
; d
= d
->right
)
2149 VEC(constructor_elt
,gc
) *node
= NULL
;
2151 gfc_init_se (&se
, NULL
);
2155 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2156 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2160 gfc_conv_expr_reference (&se
, d
->low
);
2162 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2163 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2166 if (d
->high
== NULL
)
2168 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2169 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2173 gfc_init_se (&se
, NULL
);
2174 gfc_conv_expr_reference (&se
, d
->high
);
2176 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2177 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2180 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2181 build_int_cst (integer_type_node
, d
->n
));
2183 tmp
= build_constructor (select_struct
[k
], node
);
2184 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2187 type
= build_array_type (select_struct
[k
],
2188 build_index_type (build_int_cst (NULL_TREE
, n
-1)));
2190 init
= build_constructor (type
, inits
);
2191 TREE_CONSTANT (init
) = 1;
2192 TREE_STATIC (init
) = 1;
2193 /* Create a static variable to hold the jump table. */
2194 tmp
= gfc_create_var (type
, "jumptable");
2195 TREE_CONSTANT (tmp
) = 1;
2196 TREE_STATIC (tmp
) = 1;
2197 TREE_READONLY (tmp
) = 1;
2198 DECL_INITIAL (tmp
) = init
;
2201 /* Build the library call */
2202 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2204 if (code
->expr1
->ts
.kind
== 1)
2205 fndecl
= gfor_fndecl_select_string
;
2206 else if (code
->expr1
->ts
.kind
== 4)
2207 fndecl
= gfor_fndecl_select_string_char4
;
2211 tmp
= build_call_expr_loc (input_location
,
2212 fndecl
, 4, init
, build_int_cst (NULL_TREE
, n
),
2213 expr1se
.expr
, expr1se
.string_length
);
2214 case_num
= gfc_create_var (integer_type_node
, "case_num");
2215 gfc_add_modify (&block
, case_num
, tmp
);
2217 gfc_add_block_to_block (&block
, &expr1se
.post
);
2219 tmp
= gfc_finish_block (&body
);
2220 tmp
= build3_v (SWITCH_EXPR
, case_num
, tmp
, NULL_TREE
);
2221 gfc_add_expr_to_block (&block
, tmp
);
2223 tmp
= build1_v (LABEL_EXPR
, end_label
);
2224 gfc_add_expr_to_block (&block
, tmp
);
2226 return gfc_finish_block (&block
);
2230 /* Translate the three variants of the SELECT CASE construct.
2232 SELECT CASEs with INTEGER case expressions can be translated to an
2233 equivalent GENERIC switch statement, and for LOGICAL case
2234 expressions we build one or two if-else compares.
2236 SELECT CASEs with CHARACTER case expressions are a whole different
2237 story, because they don't exist in GENERIC. So we sort them and
2238 do a binary search at runtime.
2240 Fortran has no BREAK statement, and it does not allow jumps from
2241 one case block to another. That makes things a lot easier for
2245 gfc_trans_select (gfc_code
* code
)
2251 gcc_assert (code
&& code
->expr1
);
2252 gfc_init_block (&block
);
2254 /* Build the exit label and hang it in. */
2255 exit_label
= gfc_build_label_decl (NULL_TREE
);
2256 code
->exit_label
= exit_label
;
2258 /* Empty SELECT constructs are legal. */
2259 if (code
->block
== NULL
)
2260 body
= build_empty_stmt (input_location
);
2262 /* Select the correct translation function. */
2264 switch (code
->expr1
->ts
.type
)
2267 body
= gfc_trans_logical_select (code
);
2271 body
= gfc_trans_integer_select (code
);
2275 body
= gfc_trans_character_select (code
);
2279 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2283 /* Build everything together. */
2284 gfc_add_expr_to_block (&block
, body
);
2285 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2287 return gfc_finish_block (&block
);
2291 /* Traversal function to substitute a replacement symtree if the symbol
2292 in the expression is the same as that passed. f == 2 signals that
2293 that variable itself is not to be checked - only the references.
2294 This group of functions is used when the variable expression in a
2295 FORALL assignment has internal references. For example:
2296 FORALL (i = 1:4) p(p(i)) = i
2297 The only recourse here is to store a copy of 'p' for the index
2300 static gfc_symtree
*new_symtree
;
2301 static gfc_symtree
*old_symtree
;
2304 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2306 if (expr
->expr_type
!= EXPR_VARIABLE
)
2311 else if (expr
->symtree
->n
.sym
== sym
)
2312 expr
->symtree
= new_symtree
;
2318 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2320 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2324 forall_restore (gfc_expr
*expr
,
2325 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2326 int *f ATTRIBUTE_UNUSED
)
2328 if (expr
->expr_type
!= EXPR_VARIABLE
)
2331 if (expr
->symtree
== new_symtree
)
2332 expr
->symtree
= old_symtree
;
2338 forall_restore_symtree (gfc_expr
*e
)
2340 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2344 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2349 gfc_symbol
*new_sym
;
2350 gfc_symbol
*old_sym
;
2354 /* Build a copy of the lvalue. */
2355 old_symtree
= c
->expr1
->symtree
;
2356 old_sym
= old_symtree
->n
.sym
;
2357 e
= gfc_lval_expr_from_sym (old_sym
);
2358 if (old_sym
->attr
.dimension
)
2360 gfc_init_se (&tse
, NULL
);
2361 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2362 gfc_add_block_to_block (pre
, &tse
.pre
);
2363 gfc_add_block_to_block (post
, &tse
.post
);
2364 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2366 if (e
->ts
.type
!= BT_CHARACTER
)
2368 /* Use the variable offset for the temporary. */
2369 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2370 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2375 gfc_init_se (&tse
, NULL
);
2376 gfc_init_se (&rse
, NULL
);
2377 gfc_conv_expr (&rse
, e
);
2378 if (e
->ts
.type
== BT_CHARACTER
)
2380 tse
.string_length
= rse
.string_length
;
2381 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2383 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2385 gfc_add_block_to_block (pre
, &tse
.pre
);
2386 gfc_add_block_to_block (post
, &tse
.post
);
2390 tmp
= gfc_typenode_for_spec (&e
->ts
);
2391 tse
.expr
= gfc_create_var (tmp
, "temp");
2394 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2395 e
->expr_type
== EXPR_VARIABLE
, true);
2396 gfc_add_expr_to_block (pre
, tmp
);
2400 /* Create a new symbol to represent the lvalue. */
2401 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2402 new_sym
->ts
= old_sym
->ts
;
2403 new_sym
->attr
.referenced
= 1;
2404 new_sym
->attr
.temporary
= 1;
2405 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2406 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2408 /* Use the temporary as the backend_decl. */
2409 new_sym
->backend_decl
= tse
.expr
;
2411 /* Create a fake symtree for it. */
2413 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2414 new_symtree
->n
.sym
= new_sym
;
2415 gcc_assert (new_symtree
== root
);
2417 /* Go through the expression reference replacing the old_symtree
2419 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2421 /* Now we have made this temporary, we might as well use it for
2422 the right hand side. */
2423 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2427 /* Handles dependencies in forall assignments. */
2429 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2436 lsym
= c
->expr1
->symtree
->n
.sym
;
2437 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2439 /* Now check for dependencies within the 'variable'
2440 expression itself. These are treated by making a complete
2441 copy of variable and changing all the references to it
2442 point to the copy instead. Note that the shallow copy of
2443 the variable will not suffice for derived types with
2444 pointer components. We therefore leave these to their
2446 if (lsym
->ts
.type
== BT_DERIVED
2447 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2451 if (find_forall_index (c
->expr1
, lsym
, 2) == SUCCESS
)
2453 forall_make_variable_temp (c
, pre
, post
);
2457 /* Substrings with dependencies are treated in the same
2459 if (c
->expr1
->ts
.type
== BT_CHARACTER
2461 && c
->expr2
->expr_type
== EXPR_VARIABLE
2462 && lsym
== c
->expr2
->symtree
->n
.sym
)
2464 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2465 if (lref
->type
== REF_SUBSTRING
)
2467 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2468 if (rref
->type
== REF_SUBSTRING
)
2472 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2474 forall_make_variable_temp (c
, pre
, post
);
2483 cleanup_forall_symtrees (gfc_code
*c
)
2485 forall_restore_symtree (c
->expr1
);
2486 forall_restore_symtree (c
->expr2
);
2487 free (new_symtree
->n
.sym
);
2492 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2493 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2494 indicates whether we should generate code to test the FORALLs mask
2495 array. OUTER is the loop header to be used for initializing mask
2498 The generated loop format is:
2499 count = (end - start + step) / step
2512 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2513 int mask_flag
, stmtblock_t
*outer
)
2521 tree var
, start
, end
, step
;
2524 /* Initialize the mask index outside the FORALL nest. */
2525 if (mask_flag
&& forall_tmp
->mask
)
2526 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2528 iter
= forall_tmp
->this_loop
;
2529 nvar
= forall_tmp
->nvar
;
2530 for (n
= 0; n
< nvar
; n
++)
2533 start
= iter
->start
;
2537 exit_label
= gfc_build_label_decl (NULL_TREE
);
2538 TREE_USED (exit_label
) = 1;
2540 /* The loop counter. */
2541 count
= gfc_create_var (TREE_TYPE (var
), "count");
2543 /* The body of the loop. */
2544 gfc_init_block (&block
);
2546 /* The exit condition. */
2547 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2548 count
, build_int_cst (TREE_TYPE (count
), 0));
2549 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2550 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2551 cond
, tmp
, build_empty_stmt (input_location
));
2552 gfc_add_expr_to_block (&block
, tmp
);
2554 /* The main loop body. */
2555 gfc_add_expr_to_block (&block
, body
);
2557 /* Increment the loop variable. */
2558 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2560 gfc_add_modify (&block
, var
, tmp
);
2562 /* Advance to the next mask element. Only do this for the
2564 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2566 tree maskindex
= forall_tmp
->maskindex
;
2567 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2568 maskindex
, gfc_index_one_node
);
2569 gfc_add_modify (&block
, maskindex
, tmp
);
2572 /* Decrement the loop counter. */
2573 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2574 build_int_cst (TREE_TYPE (var
), 1));
2575 gfc_add_modify (&block
, count
, tmp
);
2577 body
= gfc_finish_block (&block
);
2579 /* Loop var initialization. */
2580 gfc_init_block (&block
);
2581 gfc_add_modify (&block
, var
, start
);
2584 /* Initialize the loop counter. */
2585 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2587 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2589 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2591 gfc_add_modify (&block
, count
, tmp
);
2593 /* The loop expression. */
2594 tmp
= build1_v (LOOP_EXPR
, body
);
2595 gfc_add_expr_to_block (&block
, tmp
);
2597 /* The exit label. */
2598 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2599 gfc_add_expr_to_block (&block
, tmp
);
2601 body
= gfc_finish_block (&block
);
2608 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2609 is nonzero, the body is controlled by all masks in the forall nest.
2610 Otherwise, the innermost loop is not controlled by it's mask. This
2611 is used for initializing that mask. */
2614 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2619 forall_info
*forall_tmp
;
2620 tree mask
, maskindex
;
2622 gfc_start_block (&header
);
2624 forall_tmp
= nested_forall_info
;
2625 while (forall_tmp
!= NULL
)
2627 /* Generate body with masks' control. */
2630 mask
= forall_tmp
->mask
;
2631 maskindex
= forall_tmp
->maskindex
;
2633 /* If a mask was specified make the assignment conditional. */
2636 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2637 body
= build3_v (COND_EXPR
, tmp
, body
,
2638 build_empty_stmt (input_location
));
2641 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2642 forall_tmp
= forall_tmp
->prev_nest
;
2646 gfc_add_expr_to_block (&header
, body
);
2647 return gfc_finish_block (&header
);
2651 /* Allocate data for holding a temporary array. Returns either a local
2652 temporary array or a pointer variable. */
2655 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2662 if (INTEGER_CST_P (size
))
2663 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2664 size
, gfc_index_one_node
);
2668 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2669 type
= build_array_type (elem_type
, type
);
2670 if (gfc_can_put_var_on_stack (bytesize
))
2672 gcc_assert (INTEGER_CST_P (size
));
2673 tmpvar
= gfc_create_var (type
, "temp");
2678 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2679 *pdata
= convert (pvoid_type_node
, tmpvar
);
2681 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2682 gfc_add_modify (pblock
, tmpvar
, tmp
);
2688 /* Generate codes to copy the temporary to the actual lhs. */
2691 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2692 tree count1
, tree wheremask
, bool invert
)
2696 stmtblock_t block
, body
;
2702 lss
= gfc_walk_expr (expr
);
2704 if (lss
== gfc_ss_terminator
)
2706 gfc_start_block (&block
);
2708 gfc_init_se (&lse
, NULL
);
2710 /* Translate the expression. */
2711 gfc_conv_expr (&lse
, expr
);
2713 /* Form the expression for the temporary. */
2714 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2716 /* Use the scalar assignment as is. */
2717 gfc_add_block_to_block (&block
, &lse
.pre
);
2718 gfc_add_modify (&block
, lse
.expr
, tmp
);
2719 gfc_add_block_to_block (&block
, &lse
.post
);
2721 /* Increment the count1. */
2722 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2723 count1
, gfc_index_one_node
);
2724 gfc_add_modify (&block
, count1
, tmp
);
2726 tmp
= gfc_finish_block (&block
);
2730 gfc_start_block (&block
);
2732 gfc_init_loopinfo (&loop1
);
2733 gfc_init_se (&rse
, NULL
);
2734 gfc_init_se (&lse
, NULL
);
2736 /* Associate the lss with the loop. */
2737 gfc_add_ss_to_loop (&loop1
, lss
);
2739 /* Calculate the bounds of the scalarization. */
2740 gfc_conv_ss_startstride (&loop1
);
2741 /* Setup the scalarizing loops. */
2742 gfc_conv_loop_setup (&loop1
, &expr
->where
);
2744 gfc_mark_ss_chain_used (lss
, 1);
2746 /* Start the scalarized loop body. */
2747 gfc_start_scalarized_body (&loop1
, &body
);
2749 /* Setup the gfc_se structures. */
2750 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
2753 /* Form the expression of the temporary. */
2754 if (lss
!= gfc_ss_terminator
)
2755 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2756 /* Translate expr. */
2757 gfc_conv_expr (&lse
, expr
);
2759 /* Use the scalar assignment. */
2760 rse
.string_length
= lse
.string_length
;
2761 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
2763 /* Form the mask expression according to the mask tree list. */
2766 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
2768 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
2769 TREE_TYPE (wheremaskexpr
),
2771 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2773 build_empty_stmt (input_location
));
2776 gfc_add_expr_to_block (&body
, tmp
);
2778 /* Increment count1. */
2779 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2780 count1
, gfc_index_one_node
);
2781 gfc_add_modify (&body
, count1
, tmp
);
2783 /* Increment count3. */
2786 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
2787 gfc_array_index_type
, count3
,
2788 gfc_index_one_node
);
2789 gfc_add_modify (&body
, count3
, tmp
);
2792 /* Generate the copying loops. */
2793 gfc_trans_scalarizing_loops (&loop1
, &body
);
2794 gfc_add_block_to_block (&block
, &loop1
.pre
);
2795 gfc_add_block_to_block (&block
, &loop1
.post
);
2796 gfc_cleanup_loop (&loop1
);
2798 tmp
= gfc_finish_block (&block
);
2804 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2805 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2806 and should not be freed. WHEREMASK is the conditional execution mask
2807 whose sense may be inverted by INVERT. */
2810 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
2811 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
2812 tree wheremask
, bool invert
)
2814 stmtblock_t block
, body1
;
2821 gfc_start_block (&block
);
2823 gfc_init_se (&rse
, NULL
);
2824 gfc_init_se (&lse
, NULL
);
2826 if (lss
== gfc_ss_terminator
)
2828 gfc_init_block (&body1
);
2829 gfc_conv_expr (&rse
, expr2
);
2830 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2834 /* Initialize the loop. */
2835 gfc_init_loopinfo (&loop
);
2837 /* We may need LSS to determine the shape of the expression. */
2838 gfc_add_ss_to_loop (&loop
, lss
);
2839 gfc_add_ss_to_loop (&loop
, rss
);
2841 gfc_conv_ss_startstride (&loop
);
2842 gfc_conv_loop_setup (&loop
, &expr2
->where
);
2844 gfc_mark_ss_chain_used (rss
, 1);
2845 /* Start the loop body. */
2846 gfc_start_scalarized_body (&loop
, &body1
);
2848 /* Translate the expression. */
2849 gfc_copy_loopinfo_to_se (&rse
, &loop
);
2851 gfc_conv_expr (&rse
, expr2
);
2853 /* Form the expression of the temporary. */
2854 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2857 /* Use the scalar assignment. */
2858 lse
.string_length
= rse
.string_length
;
2859 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
2860 expr2
->expr_type
== EXPR_VARIABLE
, true);
2862 /* Form the mask expression according to the mask tree list. */
2865 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
2867 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
2868 TREE_TYPE (wheremaskexpr
),
2870 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2872 build_empty_stmt (input_location
));
2875 gfc_add_expr_to_block (&body1
, tmp
);
2877 if (lss
== gfc_ss_terminator
)
2879 gfc_add_block_to_block (&block
, &body1
);
2881 /* Increment count1. */
2882 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2883 count1
, gfc_index_one_node
);
2884 gfc_add_modify (&block
, count1
, tmp
);
2888 /* Increment count1. */
2889 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2890 count1
, gfc_index_one_node
);
2891 gfc_add_modify (&body1
, count1
, tmp
);
2893 /* Increment count3. */
2896 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
2897 gfc_array_index_type
,
2898 count3
, gfc_index_one_node
);
2899 gfc_add_modify (&body1
, count3
, tmp
);
2902 /* Generate the copying loops. */
2903 gfc_trans_scalarizing_loops (&loop
, &body1
);
2905 gfc_add_block_to_block (&block
, &loop
.pre
);
2906 gfc_add_block_to_block (&block
, &loop
.post
);
2908 gfc_cleanup_loop (&loop
);
2909 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2910 as tree nodes in SS may not be valid in different scope. */
2913 tmp
= gfc_finish_block (&block
);
2918 /* Calculate the size of temporary needed in the assignment inside forall.
2919 LSS and RSS are filled in this function. */
2922 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
2923 stmtblock_t
* pblock
,
2924 gfc_ss
**lss
, gfc_ss
**rss
)
2932 *lss
= gfc_walk_expr (expr1
);
2935 size
= gfc_index_one_node
;
2936 if (*lss
!= gfc_ss_terminator
)
2938 gfc_init_loopinfo (&loop
);
2940 /* Walk the RHS of the expression. */
2941 *rss
= gfc_walk_expr (expr2
);
2942 if (*rss
== gfc_ss_terminator
)
2944 /* The rhs is scalar. Add a ss for the expression. */
2945 *rss
= gfc_get_ss ();
2946 (*rss
)->next
= gfc_ss_terminator
;
2947 (*rss
)->type
= GFC_SS_SCALAR
;
2948 (*rss
)->expr
= expr2
;
2951 /* Associate the SS with the loop. */
2952 gfc_add_ss_to_loop (&loop
, *lss
);
2953 /* We don't actually need to add the rhs at this point, but it might
2954 make guessing the loop bounds a bit easier. */
2955 gfc_add_ss_to_loop (&loop
, *rss
);
2957 /* We only want the shape of the expression, not rest of the junk
2958 generated by the scalarizer. */
2959 loop
.array_parameter
= 1;
2961 /* Calculate the bounds of the scalarization. */
2962 save_flag
= gfc_option
.rtcheck
;
2963 gfc_option
.rtcheck
&= !GFC_RTCHECK_BOUNDS
;
2964 gfc_conv_ss_startstride (&loop
);
2965 gfc_option
.rtcheck
= save_flag
;
2966 gfc_conv_loop_setup (&loop
, &expr2
->where
);
2968 /* Figure out how many elements we need. */
2969 for (i
= 0; i
< loop
.dimen
; i
++)
2971 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
2972 gfc_array_index_type
,
2973 gfc_index_one_node
, loop
.from
[i
]);
2974 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
2975 gfc_array_index_type
, tmp
, loop
.to
[i
]);
2976 size
= fold_build2_loc (input_location
, MULT_EXPR
,
2977 gfc_array_index_type
, size
, tmp
);
2979 gfc_add_block_to_block (pblock
, &loop
.pre
);
2980 size
= gfc_evaluate_now (size
, pblock
);
2981 gfc_add_block_to_block (pblock
, &loop
.post
);
2983 /* TODO: write a function that cleans up a loopinfo without freeing
2984 the SS chains. Currently a NOP. */
2991 /* Calculate the overall iterator number of the nested forall construct.
2992 This routine actually calculates the number of times the body of the
2993 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2994 that by the expression INNER_SIZE. The BLOCK argument specifies the
2995 block in which to calculate the result, and the optional INNER_SIZE_BODY
2996 argument contains any statements that need to executed (inside the loop)
2997 to initialize or calculate INNER_SIZE. */
3000 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3001 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3003 forall_info
*forall_tmp
= nested_forall_info
;
3007 /* We can eliminate the innermost unconditional loops with constant
3009 if (INTEGER_CST_P (inner_size
))
3012 && !forall_tmp
->mask
3013 && INTEGER_CST_P (forall_tmp
->size
))
3015 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3016 gfc_array_index_type
,
3017 inner_size
, forall_tmp
->size
);
3018 forall_tmp
= forall_tmp
->prev_nest
;
3021 /* If there are no loops left, we have our constant result. */
3026 /* Otherwise, create a temporary variable to compute the result. */
3027 number
= gfc_create_var (gfc_array_index_type
, "num");
3028 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3030 gfc_start_block (&body
);
3031 if (inner_size_body
)
3032 gfc_add_block_to_block (&body
, inner_size_body
);
3034 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3035 gfc_array_index_type
, number
, inner_size
);
3038 gfc_add_modify (&body
, number
, tmp
);
3039 tmp
= gfc_finish_block (&body
);
3041 /* Generate loops. */
3042 if (forall_tmp
!= NULL
)
3043 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3045 gfc_add_expr_to_block (block
, tmp
);
3051 /* Allocate temporary for forall construct. SIZE is the size of temporary
3052 needed. PTEMP1 is returned for space free. */
3055 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3062 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3063 if (!integer_onep (unit
))
3064 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3065 gfc_array_index_type
, size
, unit
);
3070 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3073 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3078 /* Allocate temporary for forall construct according to the information in
3079 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3080 assignment inside forall. PTEMP1 is returned for space free. */
3083 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3084 tree inner_size
, stmtblock_t
* inner_size_body
,
3085 stmtblock_t
* block
, tree
* ptemp1
)
3089 /* Calculate the total size of temporary needed in forall construct. */
3090 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3091 inner_size_body
, block
);
3093 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3097 /* Handle assignments inside forall which need temporary.
3099 forall (i=start:end:stride; maskexpr)
3102 (where e,f<i> are arbitrary expressions possibly involving i
3103 and there is a dependency between e<i> and f<i>)
3105 masktmp(:) = maskexpr(:)
3110 for (i = start; i <= end; i += stride)
3114 for (i = start; i <= end; i += stride)
3116 if (masktmp[maskindex++])
3117 tmp[count1++] = f<i>
3121 for (i = start; i <= end; i += stride)
3123 if (masktmp[maskindex++])
3124 e<i> = tmp[count1++]
3129 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3130 tree wheremask
, bool invert
,
3131 forall_info
* nested_forall_info
,
3132 stmtblock_t
* block
)
3140 stmtblock_t inner_size_body
;
3142 /* Create vars. count1 is the current iterator number of the nested
3144 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3146 /* Count is the wheremask index. */
3149 count
= gfc_create_var (gfc_array_index_type
, "count");
3150 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3155 /* Initialize count1. */
3156 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3158 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3159 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3160 gfc_init_block (&inner_size_body
);
3161 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3164 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3165 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3167 if (!expr1
->ts
.u
.cl
->backend_decl
)
3170 gfc_init_se (&tse
, NULL
);
3171 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3172 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3174 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3175 expr1
->ts
.u
.cl
->backend_decl
);
3178 type
= gfc_typenode_for_spec (&expr1
->ts
);
3180 /* Allocate temporary for nested forall construct according to the
3181 information in nested_forall_info and inner_size. */
3182 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3183 &inner_size_body
, block
, &ptemp1
);
3185 /* Generate codes to copy rhs to the temporary . */
3186 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3189 /* Generate body and loops according to the information in
3190 nested_forall_info. */
3191 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3192 gfc_add_expr_to_block (block
, tmp
);
3195 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3199 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3201 /* Generate codes to copy the temporary to lhs. */
3202 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3205 /* Generate body and loops according to the information in
3206 nested_forall_info. */
3207 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3208 gfc_add_expr_to_block (block
, tmp
);
3212 /* Free the temporary. */
3213 tmp
= gfc_call_free (ptemp1
);
3214 gfc_add_expr_to_block (block
, tmp
);
3219 /* Translate pointer assignment inside FORALL which need temporary. */
3222 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3223 forall_info
* nested_forall_info
,
3224 stmtblock_t
* block
)
3238 tree tmp
, tmp1
, ptemp1
;
3240 count
= gfc_create_var (gfc_array_index_type
, "count");
3241 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3243 inner_size
= integer_one_node
;
3244 lss
= gfc_walk_expr (expr1
);
3245 rss
= gfc_walk_expr (expr2
);
3246 if (lss
== gfc_ss_terminator
)
3248 type
= gfc_typenode_for_spec (&expr1
->ts
);
3249 type
= build_pointer_type (type
);
3251 /* Allocate temporary for nested forall construct according to the
3252 information in nested_forall_info and inner_size. */
3253 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3254 inner_size
, NULL
, block
, &ptemp1
);
3255 gfc_start_block (&body
);
3256 gfc_init_se (&lse
, NULL
);
3257 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3258 gfc_init_se (&rse
, NULL
);
3259 rse
.want_pointer
= 1;
3260 gfc_conv_expr (&rse
, expr2
);
3261 gfc_add_block_to_block (&body
, &rse
.pre
);
3262 gfc_add_modify (&body
, lse
.expr
,
3263 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3264 gfc_add_block_to_block (&body
, &rse
.post
);
3266 /* Increment count. */
3267 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3268 count
, gfc_index_one_node
);
3269 gfc_add_modify (&body
, count
, tmp
);
3271 tmp
= gfc_finish_block (&body
);
3273 /* Generate body and loops according to the information in
3274 nested_forall_info. */
3275 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3276 gfc_add_expr_to_block (block
, tmp
);
3279 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3281 gfc_start_block (&body
);
3282 gfc_init_se (&lse
, NULL
);
3283 gfc_init_se (&rse
, NULL
);
3284 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3285 lse
.want_pointer
= 1;
3286 gfc_conv_expr (&lse
, expr1
);
3287 gfc_add_block_to_block (&body
, &lse
.pre
);
3288 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3289 gfc_add_block_to_block (&body
, &lse
.post
);
3290 /* Increment count. */
3291 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3292 count
, gfc_index_one_node
);
3293 gfc_add_modify (&body
, count
, tmp
);
3294 tmp
= gfc_finish_block (&body
);
3296 /* Generate body and loops according to the information in
3297 nested_forall_info. */
3298 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3299 gfc_add_expr_to_block (block
, tmp
);
3303 gfc_init_loopinfo (&loop
);
3305 /* Associate the SS with the loop. */
3306 gfc_add_ss_to_loop (&loop
, rss
);
3308 /* Setup the scalarizing loops and bounds. */
3309 gfc_conv_ss_startstride (&loop
);
3311 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3313 info
= &rss
->data
.info
;
3314 desc
= info
->descriptor
;
3316 /* Make a new descriptor. */
3317 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3318 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3319 loop
.from
, loop
.to
, 1,
3320 GFC_ARRAY_UNKNOWN
, true);
3322 /* Allocate temporary for nested forall construct. */
3323 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3324 inner_size
, NULL
, block
, &ptemp1
);
3325 gfc_start_block (&body
);
3326 gfc_init_se (&lse
, NULL
);
3327 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3328 lse
.direct_byref
= 1;
3329 rss
= gfc_walk_expr (expr2
);
3330 gfc_conv_expr_descriptor (&lse
, expr2
, rss
);
3332 gfc_add_block_to_block (&body
, &lse
.pre
);
3333 gfc_add_block_to_block (&body
, &lse
.post
);
3335 /* Increment count. */
3336 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3337 count
, gfc_index_one_node
);
3338 gfc_add_modify (&body
, count
, tmp
);
3340 tmp
= gfc_finish_block (&body
);
3342 /* Generate body and loops according to the information in
3343 nested_forall_info. */
3344 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3345 gfc_add_expr_to_block (block
, tmp
);
3348 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3350 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3351 lss
= gfc_walk_expr (expr1
);
3352 gfc_init_se (&lse
, NULL
);
3353 gfc_conv_expr_descriptor (&lse
, expr1
, lss
);
3354 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3355 gfc_start_block (&body
);
3356 gfc_add_block_to_block (&body
, &lse
.pre
);
3357 gfc_add_block_to_block (&body
, &lse
.post
);
3359 /* Increment count. */
3360 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3361 count
, gfc_index_one_node
);
3362 gfc_add_modify (&body
, count
, tmp
);
3364 tmp
= gfc_finish_block (&body
);
3366 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3367 gfc_add_expr_to_block (block
, tmp
);
3369 /* Free the temporary. */
3372 tmp
= gfc_call_free (ptemp1
);
3373 gfc_add_expr_to_block (block
, tmp
);
3378 /* FORALL and WHERE statements are really nasty, especially when you nest
3379 them. All the rhs of a forall assignment must be evaluated before the
3380 actual assignments are performed. Presumably this also applies to all the
3381 assignments in an inner where statement. */
3383 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3384 linear array, relying on the fact that we process in the same order in all
3387 forall (i=start:end:stride; maskexpr)
3391 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3393 count = ((end + 1 - start) / stride)
3394 masktmp(:) = maskexpr(:)
3397 for (i = start; i <= end; i += stride)
3399 if (masktmp[maskindex++])
3403 for (i = start; i <= end; i += stride)
3405 if (masktmp[maskindex++])
3409 Note that this code only works when there are no dependencies.
3410 Forall loop with array assignments and data dependencies are a real pain,
3411 because the size of the temporary cannot always be determined before the
3412 loop is executed. This problem is compounded by the presence of nested
3417 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3437 gfc_forall_iterator
*fa
;
3440 gfc_saved_var
*saved_vars
;
3441 iter_info
*this_forall
;
3445 /* Do nothing if the mask is false. */
3447 && code
->expr1
->expr_type
== EXPR_CONSTANT
3448 && !code
->expr1
->value
.logical
)
3449 return build_empty_stmt (input_location
);
3452 /* Count the FORALL index number. */
3453 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3457 /* Allocate the space for var, start, end, step, varexpr. */
3458 var
= XCNEWVEC (tree
, nvar
);
3459 start
= XCNEWVEC (tree
, nvar
);
3460 end
= XCNEWVEC (tree
, nvar
);
3461 step
= XCNEWVEC (tree
, nvar
);
3462 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3463 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3465 /* Allocate the space for info. */
3466 info
= XCNEW (forall_info
);
3468 gfc_start_block (&pre
);
3469 gfc_init_block (&post
);
3470 gfc_init_block (&block
);
3473 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3475 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3477 /* Allocate space for this_forall. */
3478 this_forall
= XCNEW (iter_info
);
3480 /* Create a temporary variable for the FORALL index. */
3481 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3482 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3483 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3485 /* Record it in this_forall. */
3486 this_forall
->var
= var
[n
];
3488 /* Replace the index symbol's backend_decl with the temporary decl. */
3489 sym
->backend_decl
= var
[n
];
3491 /* Work out the start, end and stride for the loop. */
3492 gfc_init_se (&se
, NULL
);
3493 gfc_conv_expr_val (&se
, fa
->start
);
3494 /* Record it in this_forall. */
3495 this_forall
->start
= se
.expr
;
3496 gfc_add_block_to_block (&block
, &se
.pre
);
3499 gfc_init_se (&se
, NULL
);
3500 gfc_conv_expr_val (&se
, fa
->end
);
3501 /* Record it in this_forall. */
3502 this_forall
->end
= se
.expr
;
3503 gfc_make_safe_expr (&se
);
3504 gfc_add_block_to_block (&block
, &se
.pre
);
3507 gfc_init_se (&se
, NULL
);
3508 gfc_conv_expr_val (&se
, fa
->stride
);
3509 /* Record it in this_forall. */
3510 this_forall
->step
= se
.expr
;
3511 gfc_make_safe_expr (&se
);
3512 gfc_add_block_to_block (&block
, &se
.pre
);
3515 /* Set the NEXT field of this_forall to NULL. */
3516 this_forall
->next
= NULL
;
3517 /* Link this_forall to the info construct. */
3518 if (info
->this_loop
)
3520 iter_info
*iter_tmp
= info
->this_loop
;
3521 while (iter_tmp
->next
!= NULL
)
3522 iter_tmp
= iter_tmp
->next
;
3523 iter_tmp
->next
= this_forall
;
3526 info
->this_loop
= this_forall
;
3532 /* Calculate the size needed for the current forall level. */
3533 size
= gfc_index_one_node
;
3534 for (n
= 0; n
< nvar
; n
++)
3536 /* size = (end + step - start) / step. */
3537 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3539 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3541 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3543 tmp
= convert (gfc_array_index_type
, tmp
);
3545 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3549 /* Record the nvar and size of current forall level. */
3555 /* If the mask is .true., consider the FORALL unconditional. */
3556 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3557 && code
->expr1
->value
.logical
)
3565 /* First we need to allocate the mask. */
3568 /* As the mask array can be very big, prefer compact boolean types. */
3569 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3570 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3571 size
, NULL
, &block
, &pmask
);
3572 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3574 /* Record them in the info structure. */
3575 info
->maskindex
= maskindex
;
3580 /* No mask was specified. */
3581 maskindex
= NULL_TREE
;
3582 mask
= pmask
= NULL_TREE
;
3585 /* Link the current forall level to nested_forall_info. */
3586 info
->prev_nest
= nested_forall_info
;
3587 nested_forall_info
= info
;
3589 /* Copy the mask into a temporary variable if required.
3590 For now we assume a mask temporary is needed. */
3593 /* As the mask array can be very big, prefer compact boolean types. */
3594 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3596 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3598 /* Start of mask assignment loop body. */
3599 gfc_start_block (&body
);
3601 /* Evaluate the mask expression. */
3602 gfc_init_se (&se
, NULL
);
3603 gfc_conv_expr_val (&se
, code
->expr1
);
3604 gfc_add_block_to_block (&body
, &se
.pre
);
3606 /* Store the mask. */
3607 se
.expr
= convert (mask_type
, se
.expr
);
3609 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3610 gfc_add_modify (&body
, tmp
, se
.expr
);
3612 /* Advance to the next mask element. */
3613 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3614 maskindex
, gfc_index_one_node
);
3615 gfc_add_modify (&body
, maskindex
, tmp
);
3617 /* Generate the loops. */
3618 tmp
= gfc_finish_block (&body
);
3619 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3620 gfc_add_expr_to_block (&block
, tmp
);
3623 c
= code
->block
->next
;
3625 /* TODO: loop merging in FORALL statements. */
3626 /* Now that we've got a copy of the mask, generate the assignment loops. */
3632 /* A scalar or array assignment. DO the simple check for
3633 lhs to rhs dependencies. These make a temporary for the
3634 rhs and form a second forall block to copy to variable. */
3635 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3637 /* Temporaries due to array assignment data dependencies introduce
3638 no end of problems. */
3640 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3641 nested_forall_info
, &block
);
3644 /* Use the normal assignment copying routines. */
3645 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3647 /* Generate body and loops. */
3648 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3650 gfc_add_expr_to_block (&block
, tmp
);
3653 /* Cleanup any temporary symtrees that have been made to deal
3654 with dependencies. */
3656 cleanup_forall_symtrees (c
);
3661 /* Translate WHERE or WHERE construct nested in FORALL. */
3662 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3665 /* Pointer assignment inside FORALL. */
3666 case EXEC_POINTER_ASSIGN
:
3667 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3669 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3670 nested_forall_info
, &block
);
3673 /* Use the normal assignment copying routines. */
3674 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3676 /* Generate body and loops. */
3677 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3679 gfc_add_expr_to_block (&block
, tmp
);
3684 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3685 gfc_add_expr_to_block (&block
, tmp
);
3688 /* Explicit subroutine calls are prevented by the frontend but interface
3689 assignments can legitimately produce them. */
3690 case EXEC_ASSIGN_CALL
:
3691 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3692 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3693 gfc_add_expr_to_block (&block
, tmp
);
3703 /* Restore the original index variables. */
3704 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3705 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3707 /* Free the space for var, start, end, step, varexpr. */
3715 for (this_forall
= info
->this_loop
; this_forall
;)
3717 iter_info
*next
= this_forall
->next
;
3722 /* Free the space for this forall_info. */
3727 /* Free the temporary for the mask. */
3728 tmp
= gfc_call_free (pmask
);
3729 gfc_add_expr_to_block (&block
, tmp
);
3732 pushdecl (maskindex
);
3734 gfc_add_block_to_block (&pre
, &block
);
3735 gfc_add_block_to_block (&pre
, &post
);
3737 return gfc_finish_block (&pre
);
3741 /* Translate the FORALL statement or construct. */
3743 tree
gfc_trans_forall (gfc_code
* code
)
3745 return gfc_trans_forall_1 (code
, NULL
);
3749 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3750 If the WHERE construct is nested in FORALL, compute the overall temporary
3751 needed by the WHERE mask expression multiplied by the iterator number of
3753 ME is the WHERE mask expression.
3754 MASK is the current execution mask upon input, whose sense may or may
3755 not be inverted as specified by the INVERT argument.
3756 CMASK is the updated execution mask on output, or NULL if not required.
3757 PMASK is the pending execution mask on output, or NULL if not required.
3758 BLOCK is the block in which to place the condition evaluation loops. */
3761 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
3762 tree mask
, bool invert
, tree cmask
, tree pmask
,
3763 tree mask_type
, stmtblock_t
* block
)
3768 stmtblock_t body
, body1
;
3769 tree count
, cond
, mtmp
;
3772 gfc_init_loopinfo (&loop
);
3774 lss
= gfc_walk_expr (me
);
3775 rss
= gfc_walk_expr (me
);
3777 /* Variable to index the temporary. */
3778 count
= gfc_create_var (gfc_array_index_type
, "count");
3779 /* Initialize count. */
3780 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3782 gfc_start_block (&body
);
3784 gfc_init_se (&rse
, NULL
);
3785 gfc_init_se (&lse
, NULL
);
3787 if (lss
== gfc_ss_terminator
)
3789 gfc_init_block (&body1
);
3793 /* Initialize the loop. */
3794 gfc_init_loopinfo (&loop
);
3796 /* We may need LSS to determine the shape of the expression. */
3797 gfc_add_ss_to_loop (&loop
, lss
);
3798 gfc_add_ss_to_loop (&loop
, rss
);
3800 gfc_conv_ss_startstride (&loop
);
3801 gfc_conv_loop_setup (&loop
, &me
->where
);
3803 gfc_mark_ss_chain_used (rss
, 1);
3804 /* Start the loop body. */
3805 gfc_start_scalarized_body (&loop
, &body1
);
3807 /* Translate the expression. */
3808 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3810 gfc_conv_expr (&rse
, me
);
3813 /* Variable to evaluate mask condition. */
3814 cond
= gfc_create_var (mask_type
, "cond");
3815 if (mask
&& (cmask
|| pmask
))
3816 mtmp
= gfc_create_var (mask_type
, "mask");
3817 else mtmp
= NULL_TREE
;
3819 gfc_add_block_to_block (&body1
, &lse
.pre
);
3820 gfc_add_block_to_block (&body1
, &rse
.pre
);
3822 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
3824 if (mask
&& (cmask
|| pmask
))
3826 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
3828 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
3829 gfc_add_modify (&body1
, mtmp
, tmp
);
3834 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
3837 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
3839 gfc_add_modify (&body1
, tmp1
, tmp
);
3844 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
3845 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
3847 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
3849 gfc_add_modify (&body1
, tmp1
, tmp
);
3852 gfc_add_block_to_block (&body1
, &lse
.post
);
3853 gfc_add_block_to_block (&body1
, &rse
.post
);
3855 if (lss
== gfc_ss_terminator
)
3857 gfc_add_block_to_block (&body
, &body1
);
3861 /* Increment count. */
3862 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3863 count
, gfc_index_one_node
);
3864 gfc_add_modify (&body1
, count
, tmp1
);
3866 /* Generate the copying loops. */
3867 gfc_trans_scalarizing_loops (&loop
, &body1
);
3869 gfc_add_block_to_block (&body
, &loop
.pre
);
3870 gfc_add_block_to_block (&body
, &loop
.post
);
3872 gfc_cleanup_loop (&loop
);
3873 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3874 as tree nodes in SS may not be valid in different scope. */
3877 tmp1
= gfc_finish_block (&body
);
3878 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3879 if (nested_forall_info
!= NULL
)
3880 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
3882 gfc_add_expr_to_block (block
, tmp1
);
3886 /* Translate an assignment statement in a WHERE statement or construct
3887 statement. The MASK expression is used to control which elements
3888 of EXPR1 shall be assigned. The sense of MASK is specified by
3892 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
3893 tree mask
, bool invert
,
3894 tree count1
, tree count2
,
3900 gfc_ss
*lss_section
;
3907 tree index
, maskexpr
;
3909 /* A defined assignment. */
3910 if (cnext
&& cnext
->resolved_sym
)
3911 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
3914 /* TODO: handle this special case.
3915 Special case a single function returning an array. */
3916 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
3918 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
3924 /* Assignment of the form lhs = rhs. */
3925 gfc_start_block (&block
);
3927 gfc_init_se (&lse
, NULL
);
3928 gfc_init_se (&rse
, NULL
);
3931 lss
= gfc_walk_expr (expr1
);
3934 /* In each where-assign-stmt, the mask-expr and the variable being
3935 defined shall be arrays of the same shape. */
3936 gcc_assert (lss
!= gfc_ss_terminator
);
3938 /* The assignment needs scalarization. */
3941 /* Find a non-scalar SS from the lhs. */
3942 while (lss_section
!= gfc_ss_terminator
3943 && lss_section
->type
!= GFC_SS_SECTION
)
3944 lss_section
= lss_section
->next
;
3946 gcc_assert (lss_section
!= gfc_ss_terminator
);
3948 /* Initialize the scalarizer. */
3949 gfc_init_loopinfo (&loop
);
3952 rss
= gfc_walk_expr (expr2
);
3953 if (rss
== gfc_ss_terminator
)
3955 /* The rhs is scalar. Add a ss for the expression. */
3956 rss
= gfc_get_ss ();
3958 rss
->next
= gfc_ss_terminator
;
3959 rss
->type
= GFC_SS_SCALAR
;
3963 /* Associate the SS with the loop. */
3964 gfc_add_ss_to_loop (&loop
, lss
);
3965 gfc_add_ss_to_loop (&loop
, rss
);
3967 /* Calculate the bounds of the scalarization. */
3968 gfc_conv_ss_startstride (&loop
);
3970 /* Resolve any data dependencies in the statement. */
3971 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
3973 /* Setup the scalarizing loops. */
3974 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3976 /* Setup the gfc_se structures. */
3977 gfc_copy_loopinfo_to_se (&lse
, &loop
);
3978 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3981 gfc_mark_ss_chain_used (rss
, 1);
3982 if (loop
.temp_ss
== NULL
)
3985 gfc_mark_ss_chain_used (lss
, 1);
3989 lse
.ss
= loop
.temp_ss
;
3990 gfc_mark_ss_chain_used (lss
, 3);
3991 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
3994 /* Start the scalarized loop body. */
3995 gfc_start_scalarized_body (&loop
, &body
);
3997 /* Translate the expression. */
3998 gfc_conv_expr (&rse
, expr2
);
3999 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4000 gfc_conv_tmp_array_ref (&lse
);
4002 gfc_conv_expr (&lse
, expr1
);
4004 /* Form the mask expression according to the mask. */
4006 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4008 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4009 TREE_TYPE (maskexpr
), maskexpr
);
4011 /* Use the scalar assignment as is. */
4012 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4013 loop
.temp_ss
!= NULL
, false, true);
4015 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4017 gfc_add_expr_to_block (&body
, tmp
);
4019 if (lss
== gfc_ss_terminator
)
4021 /* Increment count1. */
4022 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4023 count1
, gfc_index_one_node
);
4024 gfc_add_modify (&body
, count1
, tmp
);
4026 /* Use the scalar assignment as is. */
4027 gfc_add_block_to_block (&block
, &body
);
4031 gcc_assert (lse
.ss
== gfc_ss_terminator
4032 && rse
.ss
== gfc_ss_terminator
);
4034 if (loop
.temp_ss
!= NULL
)
4036 /* Increment count1 before finish the main body of a scalarized
4038 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4039 gfc_array_index_type
, count1
, gfc_index_one_node
);
4040 gfc_add_modify (&body
, count1
, tmp
);
4041 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4043 /* We need to copy the temporary to the actual lhs. */
4044 gfc_init_se (&lse
, NULL
);
4045 gfc_init_se (&rse
, NULL
);
4046 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4047 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4049 rse
.ss
= loop
.temp_ss
;
4052 gfc_conv_tmp_array_ref (&rse
);
4053 gfc_conv_expr (&lse
, expr1
);
4055 gcc_assert (lse
.ss
== gfc_ss_terminator
4056 && rse
.ss
== gfc_ss_terminator
);
4058 /* Form the mask expression according to the mask tree list. */
4060 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4062 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4063 TREE_TYPE (maskexpr
), maskexpr
);
4065 /* Use the scalar assignment as is. */
4066 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4068 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4069 build_empty_stmt (input_location
));
4070 gfc_add_expr_to_block (&body
, tmp
);
4072 /* Increment count2. */
4073 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4074 gfc_array_index_type
, count2
,
4075 gfc_index_one_node
);
4076 gfc_add_modify (&body
, count2
, tmp
);
4080 /* Increment count1. */
4081 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4082 gfc_array_index_type
, count1
,
4083 gfc_index_one_node
);
4084 gfc_add_modify (&body
, count1
, tmp
);
4087 /* Generate the copying loops. */
4088 gfc_trans_scalarizing_loops (&loop
, &body
);
4090 /* Wrap the whole thing up. */
4091 gfc_add_block_to_block (&block
, &loop
.pre
);
4092 gfc_add_block_to_block (&block
, &loop
.post
);
4093 gfc_cleanup_loop (&loop
);
4096 return gfc_finish_block (&block
);
4100 /* Translate the WHERE construct or statement.
4101 This function can be called iteratively to translate the nested WHERE
4102 construct or statement.
4103 MASK is the control mask. */
4106 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4107 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4109 stmtblock_t inner_size_body
;
4110 tree inner_size
, size
;
4119 tree count1
, count2
;
4123 tree pcmask
= NULL_TREE
;
4124 tree ppmask
= NULL_TREE
;
4125 tree cmask
= NULL_TREE
;
4126 tree pmask
= NULL_TREE
;
4127 gfc_actual_arglist
*arg
;
4129 /* the WHERE statement or the WHERE construct statement. */
4130 cblock
= code
->block
;
4132 /* As the mask array can be very big, prefer compact boolean types. */
4133 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4135 /* Determine which temporary masks are needed. */
4138 /* One clause: No ELSEWHEREs. */
4139 need_cmask
= (cblock
->next
!= 0);
4142 else if (cblock
->block
->block
)
4144 /* Three or more clauses: Conditional ELSEWHEREs. */
4148 else if (cblock
->next
)
4150 /* Two clauses, the first non-empty. */
4152 need_pmask
= (mask
!= NULL_TREE
4153 && cblock
->block
->next
!= 0);
4155 else if (!cblock
->block
->next
)
4157 /* Two clauses, both empty. */
4161 /* Two clauses, the first empty, the second non-empty. */
4164 need_cmask
= (cblock
->block
->expr1
!= 0);
4173 if (need_cmask
|| need_pmask
)
4175 /* Calculate the size of temporary needed by the mask-expr. */
4176 gfc_init_block (&inner_size_body
);
4177 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4178 &inner_size_body
, &lss
, &rss
);
4180 gfc_free_ss_chain (lss
);
4181 gfc_free_ss_chain (rss
);
4183 /* Calculate the total size of temporary needed. */
4184 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4185 &inner_size_body
, block
);
4187 /* Check whether the size is negative. */
4188 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4189 gfc_index_zero_node
);
4190 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4191 cond
, gfc_index_zero_node
, size
);
4192 size
= gfc_evaluate_now (size
, block
);
4194 /* Allocate temporary for WHERE mask if needed. */
4196 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4199 /* Allocate temporary for !mask if needed. */
4201 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4207 /* Each time around this loop, the where clause is conditional
4208 on the value of mask and invert, which are updated at the
4209 bottom of the loop. */
4211 /* Has mask-expr. */
4214 /* Ensure that the WHERE mask will be evaluated exactly once.
4215 If there are no statements in this WHERE/ELSEWHERE clause,
4216 then we don't need to update the control mask (cmask).
4217 If this is the last clause of the WHERE construct, then
4218 we don't need to update the pending control mask (pmask). */
4220 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4222 cblock
->next
? cmask
: NULL_TREE
,
4223 cblock
->block
? pmask
: NULL_TREE
,
4226 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4228 (cblock
->next
|| cblock
->block
)
4229 ? cmask
: NULL_TREE
,
4230 NULL_TREE
, mask_type
, block
);
4234 /* It's a final elsewhere-stmt. No mask-expr is present. */
4238 /* The body of this where clause are controlled by cmask with
4239 sense specified by invert. */
4241 /* Get the assignment statement of a WHERE statement, or the first
4242 statement in where-body-construct of a WHERE construct. */
4243 cnext
= cblock
->next
;
4248 /* WHERE assignment statement. */
4249 case EXEC_ASSIGN_CALL
:
4251 arg
= cnext
->ext
.actual
;
4252 expr1
= expr2
= NULL
;
4253 for (; arg
; arg
= arg
->next
)
4265 expr1
= cnext
->expr1
;
4266 expr2
= cnext
->expr2
;
4268 if (nested_forall_info
!= NULL
)
4270 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4271 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4272 gfc_trans_assign_need_temp (expr1
, expr2
,
4274 nested_forall_info
, block
);
4277 /* Variables to control maskexpr. */
4278 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4279 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4280 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4281 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4283 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4288 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4290 gfc_add_expr_to_block (block
, tmp
);
4295 /* Variables to control maskexpr. */
4296 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4297 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4298 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4299 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4301 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4305 gfc_add_expr_to_block (block
, tmp
);
4310 /* WHERE or WHERE construct is part of a where-body-construct. */
4312 gfc_trans_where_2 (cnext
, cmask
, invert
,
4313 nested_forall_info
, block
);
4320 /* The next statement within the same where-body-construct. */
4321 cnext
= cnext
->next
;
4323 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4324 cblock
= cblock
->block
;
4325 if (mask
== NULL_TREE
)
4327 /* If we're the initial WHERE, we can simply invert the sense
4328 of the current mask to obtain the "mask" for the remaining
4335 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4341 /* If we allocated a pending mask array, deallocate it now. */
4344 tmp
= gfc_call_free (ppmask
);
4345 gfc_add_expr_to_block (block
, tmp
);
4348 /* If we allocated a current mask array, deallocate it now. */
4351 tmp
= gfc_call_free (pcmask
);
4352 gfc_add_expr_to_block (block
, tmp
);
4356 /* Translate a simple WHERE construct or statement without dependencies.
4357 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4358 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4359 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4362 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4364 stmtblock_t block
, body
;
4365 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4366 tree tmp
, cexpr
, tstmt
, estmt
;
4367 gfc_ss
*css
, *tdss
, *tsss
;
4368 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4373 /* Allow the scalarizer to workshare simple where loops. */
4374 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4375 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4377 cond
= cblock
->expr1
;
4378 tdst
= cblock
->next
->expr1
;
4379 tsrc
= cblock
->next
->expr2
;
4380 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4381 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4383 gfc_start_block (&block
);
4384 gfc_init_loopinfo (&loop
);
4386 /* Handle the condition. */
4387 gfc_init_se (&cse
, NULL
);
4388 css
= gfc_walk_expr (cond
);
4389 gfc_add_ss_to_loop (&loop
, css
);
4391 /* Handle the then-clause. */
4392 gfc_init_se (&tdse
, NULL
);
4393 gfc_init_se (&tsse
, NULL
);
4394 tdss
= gfc_walk_expr (tdst
);
4395 tsss
= gfc_walk_expr (tsrc
);
4396 if (tsss
== gfc_ss_terminator
)
4398 tsss
= gfc_get_ss ();
4400 tsss
->next
= gfc_ss_terminator
;
4401 tsss
->type
= GFC_SS_SCALAR
;
4404 gfc_add_ss_to_loop (&loop
, tdss
);
4405 gfc_add_ss_to_loop (&loop
, tsss
);
4409 /* Handle the else clause. */
4410 gfc_init_se (&edse
, NULL
);
4411 gfc_init_se (&esse
, NULL
);
4412 edss
= gfc_walk_expr (edst
);
4413 esss
= gfc_walk_expr (esrc
);
4414 if (esss
== gfc_ss_terminator
)
4416 esss
= gfc_get_ss ();
4418 esss
->next
= gfc_ss_terminator
;
4419 esss
->type
= GFC_SS_SCALAR
;
4422 gfc_add_ss_to_loop (&loop
, edss
);
4423 gfc_add_ss_to_loop (&loop
, esss
);
4426 gfc_conv_ss_startstride (&loop
);
4427 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4429 gfc_mark_ss_chain_used (css
, 1);
4430 gfc_mark_ss_chain_used (tdss
, 1);
4431 gfc_mark_ss_chain_used (tsss
, 1);
4434 gfc_mark_ss_chain_used (edss
, 1);
4435 gfc_mark_ss_chain_used (esss
, 1);
4438 gfc_start_scalarized_body (&loop
, &body
);
4440 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4441 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4442 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4448 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4449 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4454 gfc_conv_expr (&cse
, cond
);
4455 gfc_add_block_to_block (&body
, &cse
.pre
);
4458 gfc_conv_expr (&tsse
, tsrc
);
4459 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4460 gfc_conv_tmp_array_ref (&tdse
);
4462 gfc_conv_expr (&tdse
, tdst
);
4466 gfc_conv_expr (&esse
, esrc
);
4467 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4468 gfc_conv_tmp_array_ref (&edse
);
4470 gfc_conv_expr (&edse
, edst
);
4473 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4474 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4476 : build_empty_stmt (input_location
);
4477 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4478 gfc_add_expr_to_block (&body
, tmp
);
4479 gfc_add_block_to_block (&body
, &cse
.post
);
4481 gfc_trans_scalarizing_loops (&loop
, &body
);
4482 gfc_add_block_to_block (&block
, &loop
.pre
);
4483 gfc_add_block_to_block (&block
, &loop
.post
);
4484 gfc_cleanup_loop (&loop
);
4486 return gfc_finish_block (&block
);
4489 /* As the WHERE or WHERE construct statement can be nested, we call
4490 gfc_trans_where_2 to do the translation, and pass the initial
4491 NULL values for both the control mask and the pending control mask. */
4494 gfc_trans_where (gfc_code
* code
)
4500 cblock
= code
->block
;
4502 && cblock
->next
->op
== EXEC_ASSIGN
4503 && !cblock
->next
->next
)
4505 eblock
= cblock
->block
;
4508 /* A simple "WHERE (cond) x = y" statement or block is
4509 dependence free if cond is not dependent upon writing x,
4510 and the source y is unaffected by the destination x. */
4511 if (!gfc_check_dependency (cblock
->next
->expr1
,
4513 && !gfc_check_dependency (cblock
->next
->expr1
,
4514 cblock
->next
->expr2
, 0))
4515 return gfc_trans_where_3 (cblock
, NULL
);
4517 else if (!eblock
->expr1
4520 && eblock
->next
->op
== EXEC_ASSIGN
4521 && !eblock
->next
->next
)
4523 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4524 block is dependence free if cond is not dependent on writes
4525 to x1 and x2, y1 is not dependent on writes to x2, and y2
4526 is not dependent on writes to x1, and both y's are not
4527 dependent upon their own x's. In addition to this, the
4528 final two dependency checks below exclude all but the same
4529 array reference if the where and elswhere destinations
4530 are the same. In short, this is VERY conservative and this
4531 is needed because the two loops, required by the standard
4532 are coalesced in gfc_trans_where_3. */
4533 if (!gfc_check_dependency(cblock
->next
->expr1
,
4535 && !gfc_check_dependency(eblock
->next
->expr1
,
4537 && !gfc_check_dependency(cblock
->next
->expr1
,
4538 eblock
->next
->expr2
, 1)
4539 && !gfc_check_dependency(eblock
->next
->expr1
,
4540 cblock
->next
->expr2
, 1)
4541 && !gfc_check_dependency(cblock
->next
->expr1
,
4542 cblock
->next
->expr2
, 1)
4543 && !gfc_check_dependency(eblock
->next
->expr1
,
4544 eblock
->next
->expr2
, 1)
4545 && !gfc_check_dependency(cblock
->next
->expr1
,
4546 eblock
->next
->expr1
, 0)
4547 && !gfc_check_dependency(eblock
->next
->expr1
,
4548 cblock
->next
->expr1
, 0))
4549 return gfc_trans_where_3 (cblock
, eblock
);
4553 gfc_start_block (&block
);
4555 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4557 return gfc_finish_block (&block
);
4561 /* CYCLE a DO loop. The label decl has already been created by
4562 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4563 node at the head of the loop. We must mark the label as used. */
4566 gfc_trans_cycle (gfc_code
* code
)
4570 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4571 gcc_assert (cycle_label
);
4573 TREE_USED (cycle_label
) = 1;
4574 return build1_v (GOTO_EXPR
, cycle_label
);
4578 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4579 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4583 gfc_trans_exit (gfc_code
* code
)
4587 exit_label
= code
->ext
.which_construct
->exit_label
;
4588 gcc_assert (exit_label
);
4590 TREE_USED (exit_label
) = 1;
4591 return build1_v (GOTO_EXPR
, exit_label
);
4595 /* Translate the ALLOCATE statement. */
4598 gfc_trans_allocate (gfc_code
* code
)
4616 if (!code
->ext
.alloc
.list
)
4619 pstat
= stat
= error_label
= tmp
= memsz
= NULL_TREE
;
4621 gfc_init_block (&block
);
4622 gfc_init_block (&post
);
4624 /* Either STAT= and/or ERRMSG is present. */
4625 if (code
->expr1
|| code
->expr2
)
4627 tree gfc_int4_type_node
= gfc_get_int_type (4);
4629 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4630 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
4632 error_label
= gfc_build_label_decl (NULL_TREE
);
4633 TREE_USED (error_label
) = 1;
4639 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4641 expr
= gfc_copy_expr (al
->expr
);
4643 if (expr
->ts
.type
== BT_CLASS
)
4644 gfc_add_data_component (expr
);
4646 gfc_init_se (&se
, NULL
);
4648 se
.want_pointer
= 1;
4649 se
.descriptor_only
= 1;
4650 gfc_conv_expr (&se
, expr
);
4652 if (!gfc_array_allocate (&se
, expr
, pstat
))
4654 /* A scalar or derived type. */
4656 /* Determine allocate size. */
4657 if (al
->expr
->ts
.type
== BT_CLASS
&& code
->expr3
)
4659 if (code
->expr3
->ts
.type
== BT_CLASS
)
4661 sz
= gfc_copy_expr (code
->expr3
);
4662 gfc_add_vptr_component (sz
);
4663 gfc_add_size_component (sz
);
4664 gfc_init_se (&se_sz
, NULL
);
4665 gfc_conv_expr (&se_sz
, sz
);
4670 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
4672 else if (al
->expr
->ts
.type
== BT_CHARACTER
4673 && al
->expr
->ts
.deferred
&& code
->expr3
)
4675 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
4677 /* Convert and use the length expression. */
4678 gfc_init_se (&se_sz
, NULL
);
4679 if (code
->expr3
->expr_type
== EXPR_VARIABLE
4680 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
4682 gfc_conv_expr (&se_sz
, code
->expr3
);
4683 memsz
= se_sz
.string_length
;
4685 else if (code
->expr3
->mold
4686 && code
->expr3
->ts
.u
.cl
4687 && code
->expr3
->ts
.u
.cl
->length
)
4689 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
4690 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4691 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
4692 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
4697 /* This is would be inefficient and possibly could
4698 generate wrong code if the result were not stored
4700 if (slen3
== NULL_TREE
)
4702 gfc_conv_expr (&se_sz
, code
->expr3
);
4703 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4704 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
4705 gfc_add_block_to_block (&post
, &se_sz
.post
);
4706 slen3
= gfc_evaluate_now (se_sz
.string_length
,
4713 /* Otherwise use the stored string length. */
4714 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
4715 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
4717 /* Store the string length. */
4718 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
4719 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
4722 /* Convert to size in bytes, using the character KIND. */
4723 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
4724 tmp
= TYPE_SIZE_UNIT (tmp
);
4725 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
4726 TREE_TYPE (tmp
), tmp
,
4727 fold_convert (TREE_TYPE (tmp
), memsz
));
4729 else if (al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
4731 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
4732 gfc_init_se (&se_sz
, NULL
);
4733 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
4734 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4735 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
4736 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
4737 /* Store the string length. */
4738 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
4739 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
4741 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
4742 tmp
= TYPE_SIZE_UNIT (tmp
);
4743 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
4744 TREE_TYPE (tmp
), tmp
,
4745 fold_convert (TREE_TYPE (se_sz
.expr
),
4748 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
4749 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
4751 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
4753 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
4755 memsz
= se
.string_length
;
4757 /* Convert to size in bytes, using the character KIND. */
4758 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
4759 tmp
= TYPE_SIZE_UNIT (tmp
);
4760 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
4761 TREE_TYPE (tmp
), tmp
,
4762 fold_convert (TREE_TYPE (tmp
), memsz
));
4765 /* Allocate - for non-pointers with re-alloc checking. */
4766 if (gfc_expr_attr (expr
).allocatable
)
4767 tmp
= gfc_allocate_array_with_status (&se
.pre
, se
.expr
, memsz
,
4770 tmp
= gfc_allocate_with_status (&se
.pre
, memsz
, pstat
);
4772 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
4774 fold_convert (TREE_TYPE (se
.expr
), tmp
));
4775 gfc_add_expr_to_block (&se
.pre
, tmp
);
4777 if (code
->expr1
|| code
->expr2
)
4779 tmp
= build1_v (GOTO_EXPR
, error_label
);
4780 parm
= fold_build2_loc (input_location
, NE_EXPR
,
4781 boolean_type_node
, stat
,
4782 build_int_cst (TREE_TYPE (stat
), 0));
4783 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
4785 build_empty_stmt (input_location
));
4786 gfc_add_expr_to_block (&se
.pre
, tmp
);
4789 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
)
4791 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
4792 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
4793 gfc_add_expr_to_block (&se
.pre
, tmp
);
4797 gfc_add_block_to_block (&block
, &se
.pre
);
4799 if (code
->expr3
&& !code
->expr3
->mold
)
4801 /* Initialization via SOURCE block
4802 (or static default initializer). */
4803 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
4804 if (al
->expr
->ts
.type
== BT_CLASS
)
4807 gfc_actual_arglist
*actual
;
4809 gfc_init_se (&call
, NULL
);
4810 /* Do a polymorphic deep copy. */
4811 actual
= gfc_get_actual_arglist ();
4812 actual
->expr
= gfc_copy_expr (rhs
);
4813 if (rhs
->ts
.type
== BT_CLASS
)
4814 gfc_add_data_component (actual
->expr
);
4815 actual
->next
= gfc_get_actual_arglist ();
4816 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
4817 gfc_add_data_component (actual
->next
->expr
);
4818 if (rhs
->ts
.type
== BT_CLASS
)
4820 ppc
= gfc_copy_expr (rhs
);
4821 gfc_add_vptr_component (ppc
);
4824 ppc
= gfc_lval_expr_from_sym (gfc_find_derived_vtab (rhs
->ts
.u
.derived
));
4825 gfc_add_component_ref (ppc
, "_copy");
4826 gfc_conv_procedure_call (&call
, ppc
->symtree
->n
.sym
, actual
,
4828 gfc_add_expr_to_block (&call
.pre
, call
.expr
);
4829 gfc_add_block_to_block (&call
.pre
, &call
.post
);
4830 tmp
= gfc_finish_block (&call
.pre
);
4832 else if (expr3
!= NULL_TREE
)
4834 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
4835 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
4836 slen3
, expr3
, code
->expr3
->ts
.kind
);
4841 /* Switch off automatic reallocation since we have just done
4843 int realloc_lhs
= gfc_option
.flag_realloc_lhs
;
4844 gfc_option
.flag_realloc_lhs
= 0;
4845 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
4847 gfc_option
.flag_realloc_lhs
= realloc_lhs
;
4849 gfc_free_expr (rhs
);
4850 gfc_add_expr_to_block (&block
, tmp
);
4852 else if (code
->expr3
&& code
->expr3
->mold
4853 && code
->expr3
->ts
.type
== BT_CLASS
)
4855 /* Default-initialization via MOLD (polymorphic). */
4856 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
4858 gfc_add_vptr_component (rhs
);
4859 gfc_add_def_init_component (rhs
);
4860 gfc_init_se (&dst
, NULL
);
4861 gfc_init_se (&src
, NULL
);
4862 gfc_conv_expr (&dst
, expr
);
4863 gfc_conv_expr (&src
, rhs
);
4864 gfc_add_block_to_block (&block
, &src
.pre
);
4865 tmp
= gfc_build_memcpy_call (dst
.expr
, src
.expr
, memsz
);
4866 gfc_add_expr_to_block (&block
, tmp
);
4867 gfc_free_expr (rhs
);
4870 /* Allocation of CLASS entities. */
4871 gfc_free_expr (expr
);
4873 if (expr
->ts
.type
== BT_CLASS
)
4878 /* Initialize VPTR for CLASS objects. */
4879 lhs
= gfc_expr_to_initialize (expr
);
4880 gfc_add_vptr_component (lhs
);
4882 if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
4884 /* Polymorphic SOURCE: VPTR must be determined at run time. */
4885 rhs
= gfc_copy_expr (code
->expr3
);
4886 gfc_add_vptr_component (rhs
);
4887 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
4888 gfc_add_expr_to_block (&block
, tmp
);
4889 gfc_free_expr (rhs
);
4893 /* VPTR is fixed at compile time. */
4897 ts
= &code
->expr3
->ts
;
4898 else if (expr
->ts
.type
== BT_DERIVED
)
4900 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
)
4901 ts
= &code
->ext
.alloc
.ts
;
4902 else if (expr
->ts
.type
== BT_CLASS
)
4903 ts
= &CLASS_DATA (expr
)->ts
;
4907 if (ts
->type
== BT_DERIVED
)
4909 vtab
= gfc_find_derived_vtab (ts
->u
.derived
);
4911 gfc_init_se (&lse
, NULL
);
4912 lse
.want_pointer
= 1;
4913 gfc_conv_expr (&lse
, lhs
);
4914 tmp
= gfc_build_addr_expr (NULL_TREE
,
4915 gfc_get_symbol_decl (vtab
));
4916 gfc_add_modify (&block
, lse
.expr
,
4917 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
4920 gfc_free_expr (lhs
);
4928 tmp
= build1_v (LABEL_EXPR
, error_label
);
4929 gfc_add_expr_to_block (&block
, tmp
);
4931 gfc_init_se (&se
, NULL
);
4932 gfc_conv_expr_lhs (&se
, code
->expr1
);
4933 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
4934 gfc_add_modify (&block
, se
.expr
, tmp
);
4940 /* A better error message may be possible, but not required. */
4941 const char *msg
= "Attempt to allocate an allocated object";
4942 tree errmsg
, slen
, dlen
;
4944 gfc_init_se (&se
, NULL
);
4945 gfc_conv_expr_lhs (&se
, code
->expr2
);
4947 errmsg
= gfc_create_var (pchar_type_node
, "ERRMSG");
4949 gfc_add_modify (&block
, errmsg
,
4950 gfc_build_addr_expr (pchar_type_node
,
4951 gfc_build_localized_cstring_const (msg
)));
4953 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
4954 dlen
= gfc_get_expr_charlen (code
->expr2
);
4955 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
4958 dlen
= build_call_expr_loc (input_location
,
4959 built_in_decls
[BUILT_IN_MEMCPY
], 3,
4960 gfc_build_addr_expr (pvoid_type_node
, se
.expr
), errmsg
, slen
);
4962 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
4963 build_int_cst (TREE_TYPE (stat
), 0));
4965 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
4967 gfc_add_expr_to_block (&block
, tmp
);
4970 gfc_add_block_to_block (&block
, &se
.post
);
4971 gfc_add_block_to_block (&block
, &post
);
4973 return gfc_finish_block (&block
);
4977 /* Translate a DEALLOCATE statement. */
4980 gfc_trans_deallocate (gfc_code
*code
)
4984 tree apstat
, astat
, pstat
, stat
, tmp
;
4987 pstat
= apstat
= stat
= astat
= tmp
= NULL_TREE
;
4989 gfc_start_block (&block
);
4991 /* Count the number of failed deallocations. If deallocate() was
4992 called with STAT= , then set STAT to the count. If deallocate
4993 was called with ERRMSG, then set ERRMG to a string. */
4994 if (code
->expr1
|| code
->expr2
)
4996 tree gfc_int4_type_node
= gfc_get_int_type (4);
4998 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4999 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5001 /* Running total of possible deallocation failures. */
5002 astat
= gfc_create_var (gfc_int4_type_node
, "astat");
5003 apstat
= gfc_build_addr_expr (NULL_TREE
, astat
);
5005 /* Initialize astat to 0. */
5006 gfc_add_modify (&block
, astat
, build_int_cst (TREE_TYPE (astat
), 0));
5009 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5011 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5012 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5014 if (expr
->ts
.type
== BT_CLASS
)
5015 gfc_add_data_component (expr
);
5017 gfc_init_se (&se
, NULL
);
5018 gfc_start_block (&se
.pre
);
5020 se
.want_pointer
= 1;
5021 se
.descriptor_only
= 1;
5022 gfc_conv_expr (&se
, expr
);
5026 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
)
5029 gfc_ref
*last
= NULL
;
5030 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5031 if (ref
->type
== REF_COMPONENT
)
5034 /* Do not deallocate the components of a derived type
5035 ultimate pointer component. */
5036 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5037 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5039 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5041 gfc_add_expr_to_block (&se
.pre
, tmp
);
5044 tmp
= gfc_array_deallocate (se
.expr
, pstat
, expr
);
5045 gfc_add_expr_to_block (&se
.pre
, tmp
);
5049 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5051 gfc_add_expr_to_block (&se
.pre
, tmp
);
5053 /* Set to zero after deallocation. */
5054 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5056 build_int_cst (TREE_TYPE (se
.expr
), 0));
5057 gfc_add_expr_to_block (&se
.pre
, tmp
);
5059 if (al
->expr
->ts
.type
== BT_CLASS
)
5061 /* Reset _vptr component to declared type. */
5062 gfc_expr
*rhs
, *lhs
= gfc_copy_expr (al
->expr
);
5063 gfc_symbol
*vtab
= gfc_find_derived_vtab (al
->expr
->ts
.u
.derived
);
5064 gfc_add_vptr_component (lhs
);
5065 rhs
= gfc_lval_expr_from_sym (vtab
);
5066 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5067 gfc_add_expr_to_block (&se
.pre
, tmp
);
5068 gfc_free_expr (lhs
);
5069 gfc_free_expr (rhs
);
5073 /* Keep track of the number of failed deallocations by adding stat
5074 of the last deallocation to the running total. */
5075 if (code
->expr1
|| code
->expr2
)
5077 apstat
= fold_build2_loc (input_location
, PLUS_EXPR
,
5078 TREE_TYPE (stat
), astat
, stat
);
5079 gfc_add_modify (&se
.pre
, astat
, apstat
);
5082 tmp
= gfc_finish_block (&se
.pre
);
5083 gfc_add_expr_to_block (&block
, tmp
);
5084 gfc_free_expr (expr
);
5090 gfc_init_se (&se
, NULL
);
5091 gfc_conv_expr_lhs (&se
, code
->expr1
);
5092 tmp
= convert (TREE_TYPE (se
.expr
), astat
);
5093 gfc_add_modify (&block
, se
.expr
, tmp
);
5099 /* A better error message may be possible, but not required. */
5100 const char *msg
= "Attempt to deallocate an unallocated object";
5101 tree errmsg
, slen
, dlen
;
5103 gfc_init_se (&se
, NULL
);
5104 gfc_conv_expr_lhs (&se
, code
->expr2
);
5106 errmsg
= gfc_create_var (pchar_type_node
, "ERRMSG");
5108 gfc_add_modify (&block
, errmsg
,
5109 gfc_build_addr_expr (pchar_type_node
,
5110 gfc_build_localized_cstring_const (msg
)));
5112 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5113 dlen
= gfc_get_expr_charlen (code
->expr2
);
5114 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5117 dlen
= build_call_expr_loc (input_location
,
5118 built_in_decls
[BUILT_IN_MEMCPY
], 3,
5119 gfc_build_addr_expr (pvoid_type_node
, se
.expr
), errmsg
, slen
);
5121 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, astat
,
5122 build_int_cst (TREE_TYPE (astat
), 0));
5124 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5126 gfc_add_expr_to_block (&block
, tmp
);
5129 return gfc_finish_block (&block
);
5132 #include "gt-fortran-trans-stmt.h"