1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
30 #include "trans-stmt.h"
31 #include "trans-types.h"
32 #include "trans-array.h"
33 #include "trans-const.h"
35 #include "dependency.h"
38 typedef struct iter_info
44 struct iter_info
*next
;
48 typedef struct forall_info
55 struct forall_info
*prev_nest
;
59 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
60 forall_info
*, stmtblock_t
*);
62 /* Translate a F95 label number to a LABEL_EXPR. */
65 gfc_trans_label_here (gfc_code
* code
)
67 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
71 /* Given a variable expression which has been ASSIGNed to, find the decl
72 containing the auxiliary variables. For variables in common blocks this
76 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
78 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
79 gfc_conv_expr (se
, expr
);
80 /* Deals with variable in common block. Get the field declaration. */
81 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
82 se
->expr
= TREE_OPERAND (se
->expr
, 1);
83 /* Deals with dummy argument. Get the parameter declaration. */
84 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
85 se
->expr
= TREE_OPERAND (se
->expr
, 0);
88 /* Translate a label assignment statement. */
91 gfc_trans_label_assign (gfc_code
* code
)
100 /* Start a new block. */
101 gfc_init_se (&se
, NULL
);
102 gfc_start_block (&se
.pre
);
103 gfc_conv_label_variable (&se
, code
->expr1
);
105 len
= GFC_DECL_STRING_LEN (se
.expr
);
106 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
108 label_tree
= gfc_get_label_decl (code
->label1
);
110 if (code
->label1
->defined
== ST_LABEL_TARGET
111 || code
->label1
->defined
== ST_LABEL_DO_TARGET
)
113 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
114 len_tree
= integer_minus_one_node
;
118 gfc_expr
*format
= code
->label1
->format
;
120 label_len
= format
->value
.character
.length
;
121 len_tree
= build_int_cst (gfc_charlen_type_node
, label_len
);
122 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
123 format
->value
.character
.string
);
124 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
127 gfc_add_modify (&se
.pre
, len
, len_tree
);
128 gfc_add_modify (&se
.pre
, addr
, label_tree
);
130 return gfc_finish_block (&se
.pre
);
133 /* Translate a GOTO statement. */
136 gfc_trans_goto (gfc_code
* code
)
138 locus loc
= code
->loc
;
144 if (code
->label1
!= NULL
)
145 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
148 gfc_init_se (&se
, NULL
);
149 gfc_start_block (&se
.pre
);
150 gfc_conv_label_variable (&se
, code
->expr1
);
151 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
152 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
153 build_int_cst (TREE_TYPE (tmp
), -1));
154 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
155 "Assigned label is not a target label");
157 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
159 /* We're going to ignore a label list. It does not really change the
160 statement's semantics (because it is just a further restriction on
161 what's legal code); before, we were comparing label addresses here, but
162 that's a very fragile business and may break with optimization. So
165 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
167 gfc_add_expr_to_block (&se
.pre
, target
);
168 return gfc_finish_block (&se
.pre
);
172 /* Translate an ENTRY statement. Just adds a label for this entry point. */
174 gfc_trans_entry (gfc_code
* code
)
176 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
180 /* Replace a gfc_ss structure by another both in the gfc_se struct
181 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
182 to replace a variable ss by the corresponding temporary. */
185 replace_ss (gfc_se
*se
, gfc_ss
*old_ss
, gfc_ss
*new_ss
)
187 gfc_ss
**sess
, **loopss
;
189 /* The old_ss is a ss for a single variable. */
190 gcc_assert (old_ss
->info
->type
== GFC_SS_SECTION
);
192 for (sess
= &(se
->ss
); *sess
!= gfc_ss_terminator
; sess
= &((*sess
)->next
))
195 gcc_assert (*sess
!= gfc_ss_terminator
);
198 new_ss
->next
= old_ss
->next
;
201 for (loopss
= &(se
->loop
->ss
); *loopss
!= gfc_ss_terminator
;
202 loopss
= &((*loopss
)->loop_chain
))
203 if (*loopss
== old_ss
)
205 gcc_assert (*loopss
!= gfc_ss_terminator
);
208 new_ss
->loop_chain
= old_ss
->loop_chain
;
209 new_ss
->loop
= old_ss
->loop
;
211 gfc_free_ss (old_ss
);
215 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
216 elemental subroutines. Make temporaries for output arguments if any such
217 dependencies are found. Output arguments are chosen because internal_unpack
218 can be used, as is, to copy the result back to the variable. */
220 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
221 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
222 gfc_dep_check check_variable
)
224 gfc_actual_arglist
*arg0
;
226 gfc_formal_arglist
*formal
;
234 if (loopse
->ss
== NULL
)
239 formal
= gfc_sym_get_dummy_args (sym
);
241 /* Loop over all the arguments testing for dependencies. */
242 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
248 /* Obtain the info structure for the current argument. */
249 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
250 if (ss
->info
->expr
== e
)
253 /* If there is a dependency, create a temporary and use it
254 instead of the variable. */
255 fsym
= formal
? formal
->sym
: NULL
;
256 if (e
->expr_type
== EXPR_VARIABLE
258 && fsym
->attr
.intent
!= INTENT_IN
259 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
260 sym
, arg0
, check_variable
))
262 tree initial
, temptype
;
263 stmtblock_t temp_post
;
266 tmp_ss
= gfc_get_array_ss (gfc_ss_terminator
, NULL
, ss
->dimen
,
268 gfc_mark_ss_chain_used (tmp_ss
, 1);
269 tmp_ss
->info
->expr
= ss
->info
->expr
;
270 replace_ss (loopse
, ss
, tmp_ss
);
272 /* Obtain the argument descriptor for unpacking. */
273 gfc_init_se (&parmse
, NULL
);
274 parmse
.want_pointer
= 1;
275 gfc_conv_expr_descriptor (&parmse
, e
);
276 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
278 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
279 initialize the array temporary with a copy of the values. */
280 if (fsym
->attr
.intent
== INTENT_INOUT
281 || (fsym
->ts
.type
==BT_DERIVED
282 && fsym
->attr
.intent
== INTENT_OUT
))
283 initial
= parmse
.expr
;
284 /* For class expressions, we always initialize with the copy of
286 else if (e
->ts
.type
== BT_CLASS
)
287 initial
= parmse
.expr
;
291 if (e
->ts
.type
!= BT_CLASS
)
293 /* Find the type of the temporary to create; we don't use the type
294 of e itself as this breaks for subcomponent-references in e
295 (where the type of e is that of the final reference, but
296 parmse.expr's type corresponds to the full derived-type). */
297 /* TODO: Fix this somehow so we don't need a temporary of the whole
298 array but instead only the components referenced. */
299 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
300 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
301 temptype
= TREE_TYPE (temptype
);
302 temptype
= gfc_get_element_type (temptype
);
306 /* For class arrays signal that the size of the dynamic type has to
307 be obtained from the vtable, using the 'initial' expression. */
308 temptype
= NULL_TREE
;
310 /* Generate the temporary. Cleaning up the temporary should be the
311 very last thing done, so we add the code to a new block and add it
312 to se->post as last instructions. */
313 size
= gfc_create_var (gfc_array_index_type
, NULL
);
314 data
= gfc_create_var (pvoid_type_node
, NULL
);
315 gfc_init_block (&temp_post
);
316 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
, tmp_ss
,
317 temptype
, initial
, false, true,
318 false, &arg
->expr
->where
);
319 gfc_add_modify (&se
->pre
, size
, tmp
);
320 tmp
= fold_convert (pvoid_type_node
, tmp_ss
->info
->data
.array
.data
);
321 gfc_add_modify (&se
->pre
, data
, tmp
);
323 /* Update other ss' delta. */
324 gfc_set_delta (loopse
->loop
);
326 /* Copy the result back using unpack..... */
327 if (e
->ts
.type
!= BT_CLASS
)
328 tmp
= build_call_expr_loc (input_location
,
329 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
332 /* ... except for class results where the copy is
334 tmp
= build_fold_indirect_ref_loc (input_location
, parmse
.expr
);
335 tmp
= gfc_conv_descriptor_data_get (tmp
);
336 tmp
= build_call_expr_loc (input_location
,
337 builtin_decl_explicit (BUILT_IN_MEMCPY
),
339 fold_convert (size_type_node
, size
));
341 gfc_add_expr_to_block (&se
->post
, tmp
);
343 /* parmse.pre is already added above. */
344 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
345 gfc_add_block_to_block (&se
->post
, &temp_post
);
351 /* Get the interface symbol for the procedure corresponding to the given call.
352 We can't get the procedure symbol directly as we have to handle the case
353 of (deferred) type-bound procedures. */
356 get_proc_ifc_for_call (gfc_code
*c
)
360 gcc_assert (c
->op
== EXEC_ASSIGN_CALL
|| c
->op
== EXEC_CALL
);
362 sym
= gfc_get_proc_ifc_for_expr (c
->expr1
);
364 /* Fall back/last resort try. */
366 sym
= c
->resolved_sym
;
372 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
375 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
376 tree mask
, tree count1
, bool invert
)
380 int has_alternate_specifier
;
381 gfc_dep_check check_variable
;
382 tree index
= NULL_TREE
;
383 tree maskexpr
= NULL_TREE
;
386 /* A CALL starts a new block because the actual arguments may have to
387 be evaluated first. */
388 gfc_init_se (&se
, NULL
);
389 gfc_start_block (&se
.pre
);
391 gcc_assert (code
->resolved_sym
);
393 ss
= gfc_ss_terminator
;
394 if (code
->resolved_sym
->attr
.elemental
)
395 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
,
396 get_proc_ifc_for_call (code
),
399 /* Is not an elemental subroutine call with array valued arguments. */
400 if (ss
== gfc_ss_terminator
)
403 /* Translate the call. */
404 has_alternate_specifier
405 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
408 /* A subroutine without side-effect, by definition, does nothing! */
409 TREE_SIDE_EFFECTS (se
.expr
) = 1;
411 /* Chain the pieces together and return the block. */
412 if (has_alternate_specifier
)
414 gfc_code
*select_code
;
416 select_code
= code
->next
;
417 gcc_assert(select_code
->op
== EXEC_SELECT
);
418 sym
= select_code
->expr1
->symtree
->n
.sym
;
419 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
420 if (sym
->backend_decl
== NULL
)
421 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
422 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
425 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
427 gfc_add_block_to_block (&se
.pre
, &se
.post
);
432 /* An elemental subroutine call with array valued arguments has
440 /* gfc_walk_elemental_function_args renders the ss chain in the
441 reverse order to the actual argument order. */
442 ss
= gfc_reverse_ss (ss
);
444 /* Initialize the loop. */
445 gfc_init_se (&loopse
, NULL
);
446 gfc_init_loopinfo (&loop
);
447 gfc_add_ss_to_loop (&loop
, ss
);
449 gfc_conv_ss_startstride (&loop
);
450 /* TODO: gfc_conv_loop_setup generates a temporary for vector
451 subscripts. This could be prevented in the elemental case
452 as temporaries are handled separatedly
453 (below in gfc_conv_elemental_dependencies). */
454 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
455 gfc_mark_ss_chain_used (ss
, 1);
457 /* Convert the arguments, checking for dependencies. */
458 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
461 /* For operator assignment, do dependency checking. */
462 if (dependency_check
)
463 check_variable
= ELEM_CHECK_VARIABLE
;
465 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
467 gfc_init_se (&depse
, NULL
);
468 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
469 code
->ext
.actual
, check_variable
);
471 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
472 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
474 /* Generate the loop body. */
475 gfc_start_scalarized_body (&loop
, &body
);
476 gfc_init_block (&block
);
480 /* Form the mask expression according to the mask. */
482 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
484 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
485 TREE_TYPE (maskexpr
), maskexpr
);
488 /* Add the subroutine call to the block. */
489 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
490 code
->ext
.actual
, code
->expr1
,
495 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
496 build_empty_stmt (input_location
));
497 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
498 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
499 gfc_array_index_type
,
500 count1
, gfc_index_one_node
);
501 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
504 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
506 gfc_add_block_to_block (&block
, &loopse
.pre
);
507 gfc_add_block_to_block (&block
, &loopse
.post
);
509 /* Finish up the loop block and the loop. */
510 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
511 gfc_trans_scalarizing_loops (&loop
, &body
);
512 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
513 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
514 gfc_add_block_to_block (&se
.pre
, &se
.post
);
515 gfc_cleanup_loop (&loop
);
518 return gfc_finish_block (&se
.pre
);
522 /* Translate the RETURN statement. */
525 gfc_trans_return (gfc_code
* code
)
533 /* If code->expr is not NULL, this return statement must appear
534 in a subroutine and current_fake_result_decl has already
537 result
= gfc_get_fake_result_decl (NULL
, 0);
540 gfc_warning ("An alternate return at %L without a * dummy argument",
541 &code
->expr1
->where
);
542 return gfc_generate_return ();
545 /* Start a new block for this statement. */
546 gfc_init_se (&se
, NULL
);
547 gfc_start_block (&se
.pre
);
549 gfc_conv_expr (&se
, code
->expr1
);
551 /* Note that the actually returned expression is a simple value and
552 does not depend on any pointers or such; thus we can clean-up with
553 se.post before returning. */
554 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
555 result
, fold_convert (TREE_TYPE (result
),
557 gfc_add_expr_to_block (&se
.pre
, tmp
);
558 gfc_add_block_to_block (&se
.pre
, &se
.post
);
560 tmp
= gfc_generate_return ();
561 gfc_add_expr_to_block (&se
.pre
, tmp
);
562 return gfc_finish_block (&se
.pre
);
565 return gfc_generate_return ();
569 /* Translate the PAUSE statement. We have to translate this statement
570 to a runtime library call. */
573 gfc_trans_pause (gfc_code
* code
)
575 tree gfc_int4_type_node
= gfc_get_int_type (4);
579 /* Start a new block for this statement. */
580 gfc_init_se (&se
, NULL
);
581 gfc_start_block (&se
.pre
);
584 if (code
->expr1
== NULL
)
586 tmp
= build_int_cst (gfc_int4_type_node
, 0);
587 tmp
= build_call_expr_loc (input_location
,
588 gfor_fndecl_pause_string
, 2,
589 build_int_cst (pchar_type_node
, 0), tmp
);
591 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
593 gfc_conv_expr (&se
, code
->expr1
);
594 tmp
= build_call_expr_loc (input_location
,
595 gfor_fndecl_pause_numeric
, 1,
596 fold_convert (gfc_int4_type_node
, se
.expr
));
600 gfc_conv_expr_reference (&se
, code
->expr1
);
601 tmp
= build_call_expr_loc (input_location
,
602 gfor_fndecl_pause_string
, 2,
603 se
.expr
, se
.string_length
);
606 gfc_add_expr_to_block (&se
.pre
, tmp
);
608 gfc_add_block_to_block (&se
.pre
, &se
.post
);
610 return gfc_finish_block (&se
.pre
);
614 /* Translate the STOP statement. We have to translate this statement
615 to a runtime library call. */
618 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
620 tree gfc_int4_type_node
= gfc_get_int_type (4);
624 /* Start a new block for this statement. */
625 gfc_init_se (&se
, NULL
);
626 gfc_start_block (&se
.pre
);
628 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
630 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
631 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
632 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
633 gfc_add_expr_to_block (&se
.pre
, tmp
);
635 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
636 gfc_add_expr_to_block (&se
.pre
, tmp
);
639 if (code
->expr1
== NULL
)
641 tmp
= build_int_cst (gfc_int4_type_node
, 0);
642 tmp
= build_call_expr_loc (input_location
,
644 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
645 ? gfor_fndecl_caf_error_stop_str
646 : gfor_fndecl_error_stop_string
)
647 : gfor_fndecl_stop_string
,
648 2, build_int_cst (pchar_type_node
, 0), tmp
);
650 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
652 gfc_conv_expr (&se
, code
->expr1
);
653 tmp
= build_call_expr_loc (input_location
,
655 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
656 ? gfor_fndecl_caf_error_stop
657 : gfor_fndecl_error_stop_numeric
)
658 : gfor_fndecl_stop_numeric_f08
, 1,
659 fold_convert (gfc_int4_type_node
, se
.expr
));
663 gfc_conv_expr_reference (&se
, code
->expr1
);
664 tmp
= build_call_expr_loc (input_location
,
666 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
667 ? gfor_fndecl_caf_error_stop_str
668 : gfor_fndecl_error_stop_string
)
669 : gfor_fndecl_stop_string
,
670 2, se
.expr
, se
.string_length
);
673 gfc_add_expr_to_block (&se
.pre
, tmp
);
675 gfc_add_block_to_block (&se
.pre
, &se
.post
);
677 return gfc_finish_block (&se
.pre
);
682 gfc_trans_lock_unlock (gfc_code
*code
, gfc_exec_op type ATTRIBUTE_UNUSED
)
685 tree stat
= NULL_TREE
, lock_acquired
= NULL_TREE
;
687 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
688 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
689 if (!code
->expr2
&& !code
->expr4
&& gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
692 gfc_init_se (&se
, NULL
);
693 gfc_start_block (&se
.pre
);
697 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
698 gfc_init_se (&argse
, NULL
);
699 gfc_conv_expr_val (&argse
, code
->expr2
);
705 gcc_assert (code
->expr4
->expr_type
== EXPR_VARIABLE
);
706 gfc_init_se (&argse
, NULL
);
707 gfc_conv_expr_val (&argse
, code
->expr4
);
708 lock_acquired
= argse
.expr
;
711 if (stat
!= NULL_TREE
)
712 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
714 if (lock_acquired
!= NULL_TREE
)
715 gfc_add_modify (&se
.pre
, lock_acquired
,
716 fold_convert (TREE_TYPE (lock_acquired
),
719 return gfc_finish_block (&se
.pre
);
724 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
728 tree images
= NULL_TREE
, stat
= NULL_TREE
,
729 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
731 /* Short cut: For single images without bound checking or without STAT=,
732 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
733 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
734 && gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
737 gfc_init_se (&se
, NULL
);
738 gfc_start_block (&se
.pre
);
740 if (code
->expr1
&& code
->expr1
->rank
== 0)
742 gfc_init_se (&argse
, NULL
);
743 gfc_conv_expr_val (&argse
, code
->expr1
);
749 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
750 gfc_init_se (&argse
, NULL
);
751 gfc_conv_expr_val (&argse
, code
->expr2
);
755 stat
= null_pointer_node
;
757 if (code
->expr3
&& gfc_option
.coarray
== GFC_FCOARRAY_LIB
758 && type
!= EXEC_SYNC_MEMORY
)
760 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
761 gfc_init_se (&argse
, NULL
);
762 gfc_conv_expr (&argse
, code
->expr3
);
763 gfc_conv_string_parameter (&argse
);
764 errmsg
= gfc_build_addr_expr (NULL
, argse
.expr
);
765 errmsglen
= argse
.string_length
;
767 else if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
769 errmsg
= null_pointer_node
;
770 errmsglen
= build_int_cst (integer_type_node
, 0);
773 /* Check SYNC IMAGES(imageset) for valid image index.
774 FIXME: Add a check for image-set arrays. */
775 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
776 && code
->expr1
->rank
== 0)
779 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
780 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
781 images
, build_int_cst (TREE_TYPE (images
), 1));
785 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
786 images
, gfort_gvar_caf_num_images
);
787 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
789 build_int_cst (TREE_TYPE (images
), 1));
790 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
791 boolean_type_node
, cond
, cond2
);
793 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
794 &code
->expr1
->where
, "Invalid image number "
796 fold_convert (integer_type_node
, images
));
799 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
800 image control statements SYNC IMAGES and SYNC ALL. */
801 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
803 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
804 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
805 gfc_add_expr_to_block (&se
.pre
, tmp
);
808 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
810 /* Set STAT to zero. */
812 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
814 else if (type
== EXEC_SYNC_ALL
)
816 /* SYNC ALL => stat == null_pointer_node
817 SYNC ALL(stat=s) => stat has an integer type
819 If "stat" has the wrong integer type, use a temp variable of
820 the right type and later cast the result back into "stat". */
821 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
823 if (TREE_TYPE (stat
) == integer_type_node
)
824 stat
= gfc_build_addr_expr (NULL
, stat
);
826 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
827 3, stat
, errmsg
, errmsglen
);
828 gfc_add_expr_to_block (&se
.pre
, tmp
);
832 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
834 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
835 3, gfc_build_addr_expr (NULL
, tmp_stat
),
837 gfc_add_expr_to_block (&se
.pre
, tmp
);
839 gfc_add_modify (&se
.pre
, stat
,
840 fold_convert (TREE_TYPE (stat
), tmp_stat
));
847 gcc_assert (type
== EXEC_SYNC_IMAGES
);
851 len
= build_int_cst (integer_type_node
, -1);
852 images
= null_pointer_node
;
854 else if (code
->expr1
->rank
== 0)
856 len
= build_int_cst (integer_type_node
, 1);
857 images
= gfc_build_addr_expr (NULL_TREE
, images
);
862 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
863 gfc_fatal_error ("Sorry, only support for integer kind %d "
864 "implemented for image-set at %L",
865 gfc_c_int_kind
, &code
->expr1
->where
);
867 gfc_conv_array_parameter (&se
, code
->expr1
, true, NULL
, NULL
, &len
);
870 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
871 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
872 tmp
= gfc_get_element_type (tmp
);
874 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
875 TREE_TYPE (len
), len
,
876 fold_convert (TREE_TYPE (len
),
877 TYPE_SIZE_UNIT (tmp
)));
878 len
= fold_convert (integer_type_node
, len
);
881 /* SYNC IMAGES(imgs) => stat == null_pointer_node
882 SYNC IMAGES(imgs,stat=s) => stat has an integer type
884 If "stat" has the wrong integer type, use a temp variable of
885 the right type and later cast the result back into "stat". */
886 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
888 if (TREE_TYPE (stat
) == integer_type_node
)
889 stat
= gfc_build_addr_expr (NULL
, stat
);
891 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
892 5, fold_convert (integer_type_node
, len
),
893 images
, stat
, errmsg
, errmsglen
);
894 gfc_add_expr_to_block (&se
.pre
, tmp
);
898 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
900 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
901 5, fold_convert (integer_type_node
, len
),
902 images
, gfc_build_addr_expr (NULL
, tmp_stat
),
904 gfc_add_expr_to_block (&se
.pre
, tmp
);
906 gfc_add_modify (&se
.pre
, stat
,
907 fold_convert (TREE_TYPE (stat
), tmp_stat
));
911 return gfc_finish_block (&se
.pre
);
915 /* Generate GENERIC for the IF construct. This function also deals with
916 the simple IF statement, because the front end translates the IF
917 statement into an IF construct.
949 where COND_S is the simplified version of the predicate. PRE_COND_S
950 are the pre side-effects produced by the translation of the
952 We need to build the chain recursively otherwise we run into
953 problems with folding incomplete statements. */
956 gfc_trans_if_1 (gfc_code
* code
)
963 /* Check for an unconditional ELSE clause. */
965 return gfc_trans_code (code
->next
);
967 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
968 gfc_init_se (&if_se
, NULL
);
969 gfc_start_block (&if_se
.pre
);
971 /* Calculate the IF condition expression. */
972 if (code
->expr1
->where
.lb
)
974 gfc_save_backend_locus (&saved_loc
);
975 gfc_set_backend_locus (&code
->expr1
->where
);
978 gfc_conv_expr_val (&if_se
, code
->expr1
);
980 if (code
->expr1
->where
.lb
)
981 gfc_restore_backend_locus (&saved_loc
);
983 /* Translate the THEN clause. */
984 stmt
= gfc_trans_code (code
->next
);
986 /* Translate the ELSE clause. */
988 elsestmt
= gfc_trans_if_1 (code
->block
);
990 elsestmt
= build_empty_stmt (input_location
);
992 /* Build the condition expression and add it to the condition block. */
993 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
994 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
997 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
999 /* Finish off this statement. */
1000 return gfc_finish_block (&if_se
.pre
);
1004 gfc_trans_if (gfc_code
* code
)
1009 /* Create exit label so it is available for trans'ing the body code. */
1010 exit_label
= gfc_build_label_decl (NULL_TREE
);
1011 code
->exit_label
= exit_label
;
1013 /* Translate the actual code in code->block. */
1014 gfc_init_block (&body
);
1015 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
1017 /* Add exit label. */
1018 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1020 return gfc_finish_block (&body
);
1024 /* Translate an arithmetic IF expression.
1026 IF (cond) label1, label2, label3 translates to
1038 An optimized version can be generated in case of equal labels.
1039 E.g., if label1 is equal to label2, we can translate it to
1048 gfc_trans_arithmetic_if (gfc_code
* code
)
1056 /* Start a new block. */
1057 gfc_init_se (&se
, NULL
);
1058 gfc_start_block (&se
.pre
);
1060 /* Pre-evaluate COND. */
1061 gfc_conv_expr_val (&se
, code
->expr1
);
1062 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
1064 /* Build something to compare with. */
1065 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
1067 if (code
->label1
->value
!= code
->label2
->value
)
1069 /* If (cond < 0) take branch1 else take branch2.
1070 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1071 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1072 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
1074 if (code
->label1
->value
!= code
->label3
->value
)
1075 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
1078 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1081 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1082 tmp
, branch1
, branch2
);
1085 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1087 if (code
->label1
->value
!= code
->label3
->value
1088 && code
->label2
->value
!= code
->label3
->value
)
1090 /* if (cond <= 0) take branch1 else take branch2. */
1091 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
1092 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1094 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1095 tmp
, branch1
, branch2
);
1098 /* Append the COND_EXPR to the evaluation of COND, and return. */
1099 gfc_add_expr_to_block (&se
.pre
, branch1
);
1100 return gfc_finish_block (&se
.pre
);
1104 /* Translate a CRITICAL block. */
1106 gfc_trans_critical (gfc_code
*code
)
1111 gfc_start_block (&block
);
1113 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1115 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_critical
, 0);
1116 gfc_add_expr_to_block (&block
, tmp
);
1119 tmp
= gfc_trans_code (code
->block
->next
);
1120 gfc_add_expr_to_block (&block
, tmp
);
1122 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1124 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_end_critical
,
1126 gfc_add_expr_to_block (&block
, tmp
);
1130 return gfc_finish_block (&block
);
1134 /* Do proper initialization for ASSOCIATE names. */
1137 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1148 gcc_assert (sym
->assoc
);
1149 e
= sym
->assoc
->target
;
1151 class_target
= (e
->expr_type
== EXPR_VARIABLE
)
1152 && (gfc_is_class_scalar_expr (e
)
1153 || gfc_is_class_array_ref (e
, NULL
));
1155 unlimited
= UNLIMITED_POLY (e
);
1157 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1158 to array temporary) for arrays with either unknown shape or if associating
1160 if (sym
->attr
.dimension
&& !class_target
1161 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1166 desc
= sym
->backend_decl
;
1168 /* If association is to an expression, evaluate it and create temporary.
1169 Otherwise, get descriptor of target for pointer assignment. */
1170 gfc_init_se (&se
, NULL
);
1171 if (sym
->assoc
->variable
)
1173 se
.direct_byref
= 1;
1176 gfc_conv_expr_descriptor (&se
, e
);
1178 /* If we didn't already do the pointer assignment, set associate-name
1179 descriptor to the one generated for the temporary. */
1180 if (!sym
->assoc
->variable
)
1184 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1186 /* The generated descriptor has lower bound zero (as array
1187 temporary), shift bounds so we get lower bounds of 1. */
1188 for (dim
= 0; dim
< e
->rank
; ++dim
)
1189 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1190 dim
, gfc_index_one_node
);
1193 /* Done, register stuff as init / cleanup code. */
1194 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1195 gfc_finish_block (&se
.post
));
1198 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1199 arrays to be assigned directly. */
1200 else if (class_target
&& sym
->attr
.dimension
1201 && (sym
->ts
.type
== BT_DERIVED
|| unlimited
))
1205 gfc_init_se (&se
, NULL
);
1206 se
.descriptor_only
= 1;
1207 gfc_conv_expr (&se
, e
);
1209 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
.expr
)));
1210 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym
->backend_decl
)));
1212 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
1216 /* Recover the dtype, which has been overwritten by the
1217 assignment from an unlimited polymorphic object. */
1218 tmp
= gfc_conv_descriptor_dtype (sym
->backend_decl
);
1219 gfc_add_modify (&se
.pre
, tmp
,
1220 gfc_get_dtype (TREE_TYPE (sym
->backend_decl
)));
1223 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1224 gfc_finish_block (&se
.post
));
1227 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1228 else if (gfc_is_associate_pointer (sym
))
1232 gcc_assert (!sym
->attr
.dimension
);
1234 gfc_init_se (&se
, NULL
);
1236 /* Class associate-names come this way because they are
1237 unconditionally associate pointers and the symbol is scalar. */
1238 if (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.dimension
)
1240 /* For a class array we need a descriptor for the selector. */
1241 gfc_conv_expr_descriptor (&se
, e
);
1243 /* Obtain a temporary class container for the result. */
1244 gfc_conv_class_to_class (&se
, e
, sym
->ts
, false, true, false, false);
1245 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1247 /* Set the offset. */
1248 desc
= gfc_class_data_get (se
.expr
);
1249 offset
= gfc_index_zero_node
;
1250 for (n
= 0; n
< e
->rank
; n
++)
1252 dim
= gfc_rank_cst
[n
];
1253 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1254 gfc_array_index_type
,
1255 gfc_conv_descriptor_stride_get (desc
, dim
),
1256 gfc_conv_descriptor_lbound_get (desc
, dim
));
1257 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
1258 gfc_array_index_type
,
1261 gfc_conv_descriptor_offset_set (&se
.pre
, desc
, offset
);
1263 else if (sym
->ts
.type
== BT_CLASS
&& e
->ts
.type
== BT_CLASS
1264 && CLASS_DATA (e
)->attr
.dimension
)
1266 /* This is bound to be a class array element. */
1267 gfc_conv_expr_reference (&se
, e
);
1268 /* Get the _vptr component of the class object. */
1269 tmp
= gfc_get_vptr_from_expr (se
.expr
);
1270 /* Obtain a temporary class container for the result. */
1271 gfc_conv_derived_to_class (&se
, e
, sym
->ts
, tmp
, false, false);
1272 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1275 gfc_conv_expr (&se
, e
);
1277 tmp
= TREE_TYPE (sym
->backend_decl
);
1278 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1279 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1281 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1282 gfc_finish_block (&se
.post
));
1285 /* Do a simple assignment. This is for scalar expressions, where we
1286 can simply use expression assignment. */
1291 lhs
= gfc_lval_expr_from_sym (sym
);
1292 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1293 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1296 /* Set the stringlength from the vtable size. */
1297 if (sym
->ts
.type
== BT_CHARACTER
&& sym
->attr
.select_type_temporary
)
1301 gfc_init_se (&se
, NULL
);
1302 gcc_assert (UNLIMITED_POLY (e
->symtree
->n
.sym
));
1303 tmp
= gfc_get_symbol_decl (e
->symtree
->n
.sym
);
1304 tmp
= gfc_vtable_size_get (tmp
);
1305 gfc_get_symbol_decl (sym
);
1306 charlen
= sym
->ts
.u
.cl
->backend_decl
;
1307 gfc_add_modify (&se
.pre
, charlen
,
1308 fold_convert (TREE_TYPE (charlen
), tmp
));
1309 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1310 gfc_finish_block (&se
.post
));
1315 /* Translate a BLOCK construct. This is basically what we would do for a
1319 gfc_trans_block_construct (gfc_code
* code
)
1323 gfc_wrapped_block block
;
1326 gfc_association_list
*ass
;
1328 ns
= code
->ext
.block
.ns
;
1330 sym
= ns
->proc_name
;
1333 /* Process local variables. */
1334 gcc_assert (!sym
->tlink
);
1336 gfc_process_block_locals (ns
);
1338 /* Generate code including exit-label. */
1339 gfc_init_block (&body
);
1340 exit_label
= gfc_build_label_decl (NULL_TREE
);
1341 code
->exit_label
= exit_label
;
1342 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1343 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1345 /* Finish everything. */
1346 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1347 gfc_trans_deferred_vars (sym
, &block
);
1348 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1349 trans_associate_var (ass
->st
->n
.sym
, &block
);
1351 return gfc_finish_wrapped_block (&block
);
1355 /* Translate the simple DO construct. This is where the loop variable has
1356 integer type and step +-1. We can't use this in the general case
1357 because integer overflow and floating point errors could give incorrect
1359 We translate a do loop from:
1361 DO dovar = from, to, step
1367 [Evaluate loop bounds and step]
1369 if ((step > 0) ? (dovar <= to) : (dovar => to))
1375 cond = (dovar == to);
1377 if (cond) goto end_label;
1382 This helps the optimizers by avoiding the extra induction variable
1383 used in the general case. */
1386 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1387 tree from
, tree to
, tree step
, tree exit_cond
)
1393 tree saved_dovar
= NULL
;
1398 type
= TREE_TYPE (dovar
);
1400 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1402 /* Initialize the DO variable: dovar = from. */
1403 gfc_add_modify_loc (loc
, pblock
, dovar
,
1404 fold_convert (TREE_TYPE(dovar
), from
));
1406 /* Save value for do-tinkering checking. */
1407 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1409 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1410 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1413 /* Cycle and exit statements are implemented with gotos. */
1414 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1415 exit_label
= gfc_build_label_decl (NULL_TREE
);
1417 /* Put the labels where they can be found later. See gfc_trans_do(). */
1418 code
->cycle_label
= cycle_label
;
1419 code
->exit_label
= exit_label
;
1422 gfc_start_block (&body
);
1424 /* Main loop body. */
1425 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1426 gfc_add_expr_to_block (&body
, tmp
);
1428 /* Label for cycle statements (if needed). */
1429 if (TREE_USED (cycle_label
))
1431 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1432 gfc_add_expr_to_block (&body
, tmp
);
1435 /* Check whether someone has modified the loop variable. */
1436 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1438 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1439 dovar
, saved_dovar
);
1440 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1441 "Loop variable has been modified");
1444 /* Exit the loop if there is an I/O result condition or error. */
1447 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1448 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1450 build_empty_stmt (loc
));
1451 gfc_add_expr_to_block (&body
, tmp
);
1454 /* Evaluate the loop condition. */
1455 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1457 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1459 /* Increment the loop variable. */
1460 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1461 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1463 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1464 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1466 /* The loop exit. */
1467 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1468 TREE_USED (exit_label
) = 1;
1469 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1470 cond
, tmp
, build_empty_stmt (loc
));
1471 gfc_add_expr_to_block (&body
, tmp
);
1473 /* Finish the loop body. */
1474 tmp
= gfc_finish_block (&body
);
1475 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1477 /* Only execute the loop if the number of iterations is positive. */
1478 if (tree_int_cst_sgn (step
) > 0)
1479 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1482 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1484 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1485 build_empty_stmt (loc
));
1486 gfc_add_expr_to_block (pblock
, tmp
);
1488 /* Add the exit label. */
1489 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1490 gfc_add_expr_to_block (pblock
, tmp
);
1492 return gfc_finish_block (pblock
);
1495 /* Translate the DO construct. This obviously is one of the most
1496 important ones to get right with any compiler, but especially
1499 We special case some loop forms as described in gfc_trans_simple_do.
1500 For other cases we implement them with a separate loop count,
1501 as described in the standard.
1503 We translate a do loop from:
1505 DO dovar = from, to, step
1511 [evaluate loop bounds and step]
1512 empty = (step > 0 ? to < from : to > from);
1513 countm1 = (to - from) / step;
1515 if (empty) goto exit_label;
1523 if (countm1t == 0) goto exit_label;
1527 countm1 is an unsigned integer. It is equal to the loop count minus one,
1528 because the loop count itself can overflow. */
1531 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1535 tree saved_dovar
= NULL
;
1550 gfc_start_block (&block
);
1552 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1554 /* Evaluate all the expressions in the iterator. */
1555 gfc_init_se (&se
, NULL
);
1556 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1557 gfc_add_block_to_block (&block
, &se
.pre
);
1559 type
= TREE_TYPE (dovar
);
1561 gfc_init_se (&se
, NULL
);
1562 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1563 gfc_add_block_to_block (&block
, &se
.pre
);
1564 from
= gfc_evaluate_now (se
.expr
, &block
);
1566 gfc_init_se (&se
, NULL
);
1567 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1568 gfc_add_block_to_block (&block
, &se
.pre
);
1569 to
= gfc_evaluate_now (se
.expr
, &block
);
1571 gfc_init_se (&se
, NULL
);
1572 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1573 gfc_add_block_to_block (&block
, &se
.pre
);
1574 step
= gfc_evaluate_now (se
.expr
, &block
);
1576 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1578 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1579 build_zero_cst (type
));
1580 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1581 "DO step value is zero");
1584 /* Special case simple loops. */
1585 if (TREE_CODE (type
) == INTEGER_TYPE
1586 && (integer_onep (step
)
1587 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1588 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1591 if (TREE_CODE (type
) == INTEGER_TYPE
)
1592 utype
= unsigned_type_for (type
);
1594 utype
= unsigned_type_for (gfc_array_index_type
);
1595 countm1
= gfc_create_var (utype
, "countm1");
1597 /* Cycle and exit statements are implemented with gotos. */
1598 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1599 exit_label
= gfc_build_label_decl (NULL_TREE
);
1600 TREE_USED (exit_label
) = 1;
1602 /* Put these labels where they can be found later. */
1603 code
->cycle_label
= cycle_label
;
1604 code
->exit_label
= exit_label
;
1606 /* Initialize the DO variable: dovar = from. */
1607 gfc_add_modify (&block
, dovar
, from
);
1609 /* Save value for do-tinkering checking. */
1610 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1612 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1613 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1616 /* Initialize loop count and jump to exit label if the loop is empty.
1617 This code is executed before we enter the loop body. We generate:
1622 countm1 = (to - from) / step;
1628 countm1 = (from - to) / -step;
1632 if (TREE_CODE (type
) == INTEGER_TYPE
)
1634 tree pos
, neg
, tou
, fromu
, stepu
, tmp2
;
1636 /* The distance from FROM to TO cannot always be represented in a signed
1637 type, thus use unsigned arithmetic, also to avoid any undefined
1639 tou
= fold_convert (utype
, to
);
1640 fromu
= fold_convert (utype
, from
);
1641 stepu
= fold_convert (utype
, step
);
1643 /* For a positive step, when to < from, exit, otherwise compute
1644 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1645 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1646 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1647 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1650 pos
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1651 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1653 fold_build2 (MODIFY_EXPR
, void_type_node
,
1656 /* For a negative step, when to > from, exit, otherwise compute
1657 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1658 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
, from
);
1659 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1660 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1662 fold_build1_loc (loc
, NEGATE_EXPR
, utype
, stepu
));
1663 neg
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1664 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1666 fold_build2 (MODIFY_EXPR
, void_type_node
,
1669 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1670 build_int_cst (TREE_TYPE (step
), 0));
1671 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
, neg
, pos
);
1673 gfc_add_expr_to_block (&block
, tmp
);
1679 /* TODO: We could use the same width as the real type.
1680 This would probably cause more problems that it solves
1681 when we implement "long double" types. */
1683 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1684 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1685 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1686 gfc_add_modify (&block
, countm1
, tmp
);
1688 /* We need a special check for empty loops:
1689 empty = (step > 0 ? to < from : to > from); */
1690 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1691 build_zero_cst (type
));
1692 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1693 fold_build2_loc (loc
, LT_EXPR
,
1694 boolean_type_node
, to
, from
),
1695 fold_build2_loc (loc
, GT_EXPR
,
1696 boolean_type_node
, to
, from
));
1697 /* If the loop is empty, go directly to the exit label. */
1698 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1699 build1_v (GOTO_EXPR
, exit_label
),
1700 build_empty_stmt (input_location
));
1701 gfc_add_expr_to_block (&block
, tmp
);
1705 gfc_start_block (&body
);
1707 /* Main loop body. */
1708 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1709 gfc_add_expr_to_block (&body
, tmp
);
1711 /* Label for cycle statements (if needed). */
1712 if (TREE_USED (cycle_label
))
1714 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1715 gfc_add_expr_to_block (&body
, tmp
);
1718 /* Check whether someone has modified the loop variable. */
1719 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1721 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1723 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1724 "Loop variable has been modified");
1727 /* Exit the loop if there is an I/O result condition or error. */
1730 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1731 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1733 build_empty_stmt (input_location
));
1734 gfc_add_expr_to_block (&body
, tmp
);
1737 /* Increment the loop variable. */
1738 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1739 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1741 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1742 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1744 /* Initialize countm1t. */
1745 tree countm1t
= gfc_create_var (utype
, "countm1t");
1746 gfc_add_modify_loc (loc
, &body
, countm1t
, countm1
);
1748 /* Decrement the loop count. */
1749 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1750 build_int_cst (utype
, 1));
1751 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1753 /* End with the loop condition. Loop until countm1t == 0. */
1754 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1t
,
1755 build_int_cst (utype
, 0));
1756 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1757 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1758 cond
, tmp
, build_empty_stmt (loc
));
1759 gfc_add_expr_to_block (&body
, tmp
);
1761 /* End of loop body. */
1762 tmp
= gfc_finish_block (&body
);
1764 /* The for loop itself. */
1765 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1766 gfc_add_expr_to_block (&block
, tmp
);
1768 /* Add the exit label. */
1769 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1770 gfc_add_expr_to_block (&block
, tmp
);
1772 return gfc_finish_block (&block
);
1776 /* Translate the DO WHILE construct.
1789 if (! cond) goto exit_label;
1795 Because the evaluation of the exit condition `cond' may have side
1796 effects, we can't do much for empty loop bodies. The backend optimizers
1797 should be smart enough to eliminate any dead loops. */
1800 gfc_trans_do_while (gfc_code
* code
)
1808 /* Everything we build here is part of the loop body. */
1809 gfc_start_block (&block
);
1811 /* Cycle and exit statements are implemented with gotos. */
1812 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1813 exit_label
= gfc_build_label_decl (NULL_TREE
);
1815 /* Put the labels where they can be found later. See gfc_trans_do(). */
1816 code
->cycle_label
= cycle_label
;
1817 code
->exit_label
= exit_label
;
1819 /* Create a GIMPLE version of the exit condition. */
1820 gfc_init_se (&cond
, NULL
);
1821 gfc_conv_expr_val (&cond
, code
->expr1
);
1822 gfc_add_block_to_block (&block
, &cond
.pre
);
1823 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1824 TRUTH_NOT_EXPR
, TREE_TYPE (cond
.expr
), cond
.expr
);
1826 /* Build "IF (! cond) GOTO exit_label". */
1827 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1828 TREE_USED (exit_label
) = 1;
1829 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1830 void_type_node
, cond
.expr
, tmp
,
1831 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1832 gfc_add_expr_to_block (&block
, tmp
);
1834 /* The main body of the loop. */
1835 tmp
= gfc_trans_code (code
->block
->next
);
1836 gfc_add_expr_to_block (&block
, tmp
);
1838 /* Label for cycle statements (if needed). */
1839 if (TREE_USED (cycle_label
))
1841 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1842 gfc_add_expr_to_block (&block
, tmp
);
1845 /* End of loop body. */
1846 tmp
= gfc_finish_block (&block
);
1848 gfc_init_block (&block
);
1849 /* Build the loop. */
1850 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1851 void_type_node
, tmp
);
1852 gfc_add_expr_to_block (&block
, tmp
);
1854 /* Add the exit label. */
1855 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1856 gfc_add_expr_to_block (&block
, tmp
);
1858 return gfc_finish_block (&block
);
1862 /* Translate the SELECT CASE construct for INTEGER case expressions,
1863 without killing all potential optimizations. The problem is that
1864 Fortran allows unbounded cases, but the back-end does not, so we
1865 need to intercept those before we enter the equivalent SWITCH_EXPR
1868 For example, we translate this,
1871 CASE (:100,101,105:115)
1881 to the GENERIC equivalent,
1885 case (minimum value for typeof(expr) ... 100:
1891 case 200 ... (maximum value for typeof(expr):
1908 gfc_trans_integer_select (gfc_code
* code
)
1918 gfc_start_block (&block
);
1920 /* Calculate the switch expression. */
1921 gfc_init_se (&se
, NULL
);
1922 gfc_conv_expr_val (&se
, code
->expr1
);
1923 gfc_add_block_to_block (&block
, &se
.pre
);
1925 end_label
= gfc_build_label_decl (NULL_TREE
);
1927 gfc_init_block (&body
);
1929 for (c
= code
->block
; c
; c
= c
->block
)
1931 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1936 /* Assume it's the default case. */
1937 low
= high
= NULL_TREE
;
1941 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1944 /* If there's only a lower bound, set the high bound to the
1945 maximum value of the case expression. */
1947 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1952 /* Three cases are possible here:
1954 1) There is no lower bound, e.g. CASE (:N).
1955 2) There is a lower bound .NE. high bound, that is
1956 a case range, e.g. CASE (N:M) where M>N (we make
1957 sure that M>N during type resolution).
1958 3) There is a lower bound, and it has the same value
1959 as the high bound, e.g. CASE (N:N). This is our
1960 internal representation of CASE(N).
1962 In the first and second case, we need to set a value for
1963 high. In the third case, we don't because the GCC middle
1964 end represents a single case value by just letting high be
1965 a NULL_TREE. We can't do that because we need to be able
1966 to represent unbounded cases. */
1970 && mpz_cmp (cp
->low
->value
.integer
,
1971 cp
->high
->value
.integer
) != 0))
1972 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
1975 /* Unbounded case. */
1977 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
1980 /* Build a label. */
1981 label
= gfc_build_label_decl (NULL_TREE
);
1983 /* Add this case label.
1984 Add parameter 'label', make it match GCC backend. */
1985 tmp
= build_case_label (low
, high
, label
);
1986 gfc_add_expr_to_block (&body
, tmp
);
1989 /* Add the statements for this case. */
1990 tmp
= gfc_trans_code (c
->next
);
1991 gfc_add_expr_to_block (&body
, tmp
);
1993 /* Break to the end of the construct. */
1994 tmp
= build1_v (GOTO_EXPR
, end_label
);
1995 gfc_add_expr_to_block (&body
, tmp
);
1998 tmp
= gfc_finish_block (&body
);
1999 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2000 se
.expr
, tmp
, NULL_TREE
);
2001 gfc_add_expr_to_block (&block
, tmp
);
2003 tmp
= build1_v (LABEL_EXPR
, end_label
);
2004 gfc_add_expr_to_block (&block
, tmp
);
2006 return gfc_finish_block (&block
);
2010 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2012 There are only two cases possible here, even though the standard
2013 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2014 .FALSE., and DEFAULT.
2016 We never generate more than two blocks here. Instead, we always
2017 try to eliminate the DEFAULT case. This way, we can translate this
2018 kind of SELECT construct to a simple
2022 expression in GENERIC. */
2025 gfc_trans_logical_select (gfc_code
* code
)
2028 gfc_code
*t
, *f
, *d
;
2033 /* Assume we don't have any cases at all. */
2036 /* Now see which ones we actually do have. We can have at most two
2037 cases in a single case list: one for .TRUE. and one for .FALSE.
2038 The default case is always separate. If the cases for .TRUE. and
2039 .FALSE. are in the same case list, the block for that case list
2040 always executed, and we don't generate code a COND_EXPR. */
2041 for (c
= code
->block
; c
; c
= c
->block
)
2043 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2047 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
2049 else /* if (cp->value.logical != 0), thus .TRUE. */
2057 /* Start a new block. */
2058 gfc_start_block (&block
);
2060 /* Calculate the switch expression. We always need to do this
2061 because it may have side effects. */
2062 gfc_init_se (&se
, NULL
);
2063 gfc_conv_expr_val (&se
, code
->expr1
);
2064 gfc_add_block_to_block (&block
, &se
.pre
);
2066 if (t
== f
&& t
!= NULL
)
2068 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2069 translate the code for these cases, append it to the current
2071 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
2075 tree true_tree
, false_tree
, stmt
;
2077 true_tree
= build_empty_stmt (input_location
);
2078 false_tree
= build_empty_stmt (input_location
);
2080 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2081 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2082 make the missing case the default case. */
2083 if (t
!= NULL
&& f
!= NULL
)
2093 /* Translate the code for each of these blocks, and append it to
2094 the current block. */
2096 true_tree
= gfc_trans_code (t
->next
);
2099 false_tree
= gfc_trans_code (f
->next
);
2101 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2102 se
.expr
, true_tree
, false_tree
);
2103 gfc_add_expr_to_block (&block
, stmt
);
2106 return gfc_finish_block (&block
);
2110 /* The jump table types are stored in static variables to avoid
2111 constructing them from scratch every single time. */
2112 static GTY(()) tree select_struct
[2];
2114 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2115 Instead of generating compares and jumps, it is far simpler to
2116 generate a data structure describing the cases in order and call a
2117 library subroutine that locates the right case.
2118 This is particularly true because this is the only case where we
2119 might have to dispose of a temporary.
2120 The library subroutine returns a pointer to jump to or NULL if no
2121 branches are to be taken. */
2124 gfc_trans_character_select (gfc_code
*code
)
2126 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
2127 stmtblock_t block
, body
;
2132 vec
<constructor_elt
, va_gc
> *inits
= NULL
;
2134 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
2136 /* The jump table types are stored in static variables to avoid
2137 constructing them from scratch every single time. */
2138 static tree ss_string1
[2], ss_string1_len
[2];
2139 static tree ss_string2
[2], ss_string2_len
[2];
2140 static tree ss_target
[2];
2142 cp
= code
->block
->ext
.block
.case_list
;
2143 while (cp
->left
!= NULL
)
2146 /* Generate the body */
2147 gfc_start_block (&block
);
2148 gfc_init_se (&expr1se
, NULL
);
2149 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
2151 gfc_add_block_to_block (&block
, &expr1se
.pre
);
2153 end_label
= gfc_build_label_decl (NULL_TREE
);
2155 gfc_init_block (&body
);
2157 /* Attempt to optimize length 1 selects. */
2158 if (integer_onep (expr1se
.string_length
))
2160 for (d
= cp
; d
; d
= d
->right
)
2165 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
2166 && d
->low
->ts
.type
== BT_CHARACTER
);
2167 if (d
->low
->value
.character
.length
> 1)
2169 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
2170 if (d
->low
->value
.character
.string
[i
] != ' ')
2172 if (i
!= d
->low
->value
.character
.length
)
2174 if (optimize
&& d
->high
&& i
== 1)
2176 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2177 && d
->high
->ts
.type
== BT_CHARACTER
);
2178 if (d
->high
->value
.character
.length
> 1
2179 && (d
->low
->value
.character
.string
[0]
2180 == d
->high
->value
.character
.string
[0])
2181 && d
->high
->value
.character
.string
[1] != ' '
2182 && ((d
->low
->value
.character
.string
[1] < ' ')
2183 == (d
->high
->value
.character
.string
[1]
2193 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2194 && d
->high
->ts
.type
== BT_CHARACTER
);
2195 if (d
->high
->value
.character
.length
> 1)
2197 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
2198 if (d
->high
->value
.character
.string
[i
] != ' ')
2200 if (i
!= d
->high
->value
.character
.length
)
2207 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
2209 for (c
= code
->block
; c
; c
= c
->block
)
2211 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2217 /* Assume it's the default case. */
2218 low
= high
= NULL_TREE
;
2222 /* CASE ('ab') or CASE ('ab':'az') will never match
2223 any length 1 character. */
2224 if (cp
->low
->value
.character
.length
> 1
2225 && cp
->low
->value
.character
.string
[1] != ' ')
2228 if (cp
->low
->value
.character
.length
> 0)
2229 r
= cp
->low
->value
.character
.string
[0];
2232 low
= build_int_cst (ctype
, r
);
2234 /* If there's only a lower bound, set the high bound
2235 to the maximum value of the case expression. */
2237 high
= TYPE_MAX_VALUE (ctype
);
2243 || (cp
->low
->value
.character
.string
[0]
2244 != cp
->high
->value
.character
.string
[0]))
2246 if (cp
->high
->value
.character
.length
> 0)
2247 r
= cp
->high
->value
.character
.string
[0];
2250 high
= build_int_cst (ctype
, r
);
2253 /* Unbounded case. */
2255 low
= TYPE_MIN_VALUE (ctype
);
2258 /* Build a label. */
2259 label
= gfc_build_label_decl (NULL_TREE
);
2261 /* Add this case label.
2262 Add parameter 'label', make it match GCC backend. */
2263 tmp
= build_case_label (low
, high
, label
);
2264 gfc_add_expr_to_block (&body
, tmp
);
2267 /* Add the statements for this case. */
2268 tmp
= gfc_trans_code (c
->next
);
2269 gfc_add_expr_to_block (&body
, tmp
);
2271 /* Break to the end of the construct. */
2272 tmp
= build1_v (GOTO_EXPR
, end_label
);
2273 gfc_add_expr_to_block (&body
, tmp
);
2276 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2278 code
->expr1
->ts
.kind
);
2279 case_num
= gfc_create_var (ctype
, "case_num");
2280 gfc_add_modify (&block
, case_num
, tmp
);
2282 gfc_add_block_to_block (&block
, &expr1se
.post
);
2284 tmp
= gfc_finish_block (&body
);
2285 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2286 case_num
, tmp
, NULL_TREE
);
2287 gfc_add_expr_to_block (&block
, tmp
);
2289 tmp
= build1_v (LABEL_EXPR
, end_label
);
2290 gfc_add_expr_to_block (&block
, tmp
);
2292 return gfc_finish_block (&block
);
2296 if (code
->expr1
->ts
.kind
== 1)
2298 else if (code
->expr1
->ts
.kind
== 4)
2303 if (select_struct
[k
] == NULL
)
2306 select_struct
[k
] = make_node (RECORD_TYPE
);
2308 if (code
->expr1
->ts
.kind
== 1)
2309 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2310 else if (code
->expr1
->ts
.kind
== 4)
2311 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2316 #define ADD_FIELD(NAME, TYPE) \
2317 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2318 get_identifier (stringize(NAME)), \
2322 ADD_FIELD (string1
, pchartype
);
2323 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2325 ADD_FIELD (string2
, pchartype
);
2326 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2328 ADD_FIELD (target
, integer_type_node
);
2331 gfc_finish_type (select_struct
[k
]);
2335 for (d
= cp
; d
; d
= d
->right
)
2338 for (c
= code
->block
; c
; c
= c
->block
)
2340 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2342 label
= gfc_build_label_decl (NULL_TREE
);
2343 tmp
= build_case_label ((d
->low
== NULL
&& d
->high
== NULL
)
2345 : build_int_cst (integer_type_node
, d
->n
),
2347 gfc_add_expr_to_block (&body
, tmp
);
2350 tmp
= gfc_trans_code (c
->next
);
2351 gfc_add_expr_to_block (&body
, tmp
);
2353 tmp
= build1_v (GOTO_EXPR
, end_label
);
2354 gfc_add_expr_to_block (&body
, tmp
);
2357 /* Generate the structure describing the branches */
2358 for (d
= cp
; d
; d
= d
->right
)
2360 vec
<constructor_elt
, va_gc
> *node
= NULL
;
2362 gfc_init_se (&se
, NULL
);
2366 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2367 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2371 gfc_conv_expr_reference (&se
, d
->low
);
2373 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2374 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2377 if (d
->high
== NULL
)
2379 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2380 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2384 gfc_init_se (&se
, NULL
);
2385 gfc_conv_expr_reference (&se
, d
->high
);
2387 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2388 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2391 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2392 build_int_cst (integer_type_node
, d
->n
));
2394 tmp
= build_constructor (select_struct
[k
], node
);
2395 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2398 type
= build_array_type (select_struct
[k
],
2399 build_index_type (size_int (n
-1)));
2401 init
= build_constructor (type
, inits
);
2402 TREE_CONSTANT (init
) = 1;
2403 TREE_STATIC (init
) = 1;
2404 /* Create a static variable to hold the jump table. */
2405 tmp
= gfc_create_var (type
, "jumptable");
2406 TREE_CONSTANT (tmp
) = 1;
2407 TREE_STATIC (tmp
) = 1;
2408 TREE_READONLY (tmp
) = 1;
2409 DECL_INITIAL (tmp
) = init
;
2412 /* Build the library call */
2413 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2415 if (code
->expr1
->ts
.kind
== 1)
2416 fndecl
= gfor_fndecl_select_string
;
2417 else if (code
->expr1
->ts
.kind
== 4)
2418 fndecl
= gfor_fndecl_select_string_char4
;
2422 tmp
= build_call_expr_loc (input_location
,
2424 build_int_cst (gfc_charlen_type_node
, n
),
2425 expr1se
.expr
, expr1se
.string_length
);
2426 case_num
= gfc_create_var (integer_type_node
, "case_num");
2427 gfc_add_modify (&block
, case_num
, tmp
);
2429 gfc_add_block_to_block (&block
, &expr1se
.post
);
2431 tmp
= gfc_finish_block (&body
);
2432 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2433 case_num
, tmp
, NULL_TREE
);
2434 gfc_add_expr_to_block (&block
, tmp
);
2436 tmp
= build1_v (LABEL_EXPR
, end_label
);
2437 gfc_add_expr_to_block (&block
, tmp
);
2439 return gfc_finish_block (&block
);
2443 /* Translate the three variants of the SELECT CASE construct.
2445 SELECT CASEs with INTEGER case expressions can be translated to an
2446 equivalent GENERIC switch statement, and for LOGICAL case
2447 expressions we build one or two if-else compares.
2449 SELECT CASEs with CHARACTER case expressions are a whole different
2450 story, because they don't exist in GENERIC. So we sort them and
2451 do a binary search at runtime.
2453 Fortran has no BREAK statement, and it does not allow jumps from
2454 one case block to another. That makes things a lot easier for
2458 gfc_trans_select (gfc_code
* code
)
2464 gcc_assert (code
&& code
->expr1
);
2465 gfc_init_block (&block
);
2467 /* Build the exit label and hang it in. */
2468 exit_label
= gfc_build_label_decl (NULL_TREE
);
2469 code
->exit_label
= exit_label
;
2471 /* Empty SELECT constructs are legal. */
2472 if (code
->block
== NULL
)
2473 body
= build_empty_stmt (input_location
);
2475 /* Select the correct translation function. */
2477 switch (code
->expr1
->ts
.type
)
2480 body
= gfc_trans_logical_select (code
);
2484 body
= gfc_trans_integer_select (code
);
2488 body
= gfc_trans_character_select (code
);
2492 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2496 /* Build everything together. */
2497 gfc_add_expr_to_block (&block
, body
);
2498 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2500 return gfc_finish_block (&block
);
2504 /* Traversal function to substitute a replacement symtree if the symbol
2505 in the expression is the same as that passed. f == 2 signals that
2506 that variable itself is not to be checked - only the references.
2507 This group of functions is used when the variable expression in a
2508 FORALL assignment has internal references. For example:
2509 FORALL (i = 1:4) p(p(i)) = i
2510 The only recourse here is to store a copy of 'p' for the index
2513 static gfc_symtree
*new_symtree
;
2514 static gfc_symtree
*old_symtree
;
2517 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2519 if (expr
->expr_type
!= EXPR_VARIABLE
)
2524 else if (expr
->symtree
->n
.sym
== sym
)
2525 expr
->symtree
= new_symtree
;
2531 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2533 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2537 forall_restore (gfc_expr
*expr
,
2538 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2539 int *f ATTRIBUTE_UNUSED
)
2541 if (expr
->expr_type
!= EXPR_VARIABLE
)
2544 if (expr
->symtree
== new_symtree
)
2545 expr
->symtree
= old_symtree
;
2551 forall_restore_symtree (gfc_expr
*e
)
2553 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2557 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2562 gfc_symbol
*new_sym
;
2563 gfc_symbol
*old_sym
;
2567 /* Build a copy of the lvalue. */
2568 old_symtree
= c
->expr1
->symtree
;
2569 old_sym
= old_symtree
->n
.sym
;
2570 e
= gfc_lval_expr_from_sym (old_sym
);
2571 if (old_sym
->attr
.dimension
)
2573 gfc_init_se (&tse
, NULL
);
2574 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2575 gfc_add_block_to_block (pre
, &tse
.pre
);
2576 gfc_add_block_to_block (post
, &tse
.post
);
2577 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2579 if (e
->ts
.type
!= BT_CHARACTER
)
2581 /* Use the variable offset for the temporary. */
2582 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2583 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2588 gfc_init_se (&tse
, NULL
);
2589 gfc_init_se (&rse
, NULL
);
2590 gfc_conv_expr (&rse
, e
);
2591 if (e
->ts
.type
== BT_CHARACTER
)
2593 tse
.string_length
= rse
.string_length
;
2594 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2596 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2598 gfc_add_block_to_block (pre
, &tse
.pre
);
2599 gfc_add_block_to_block (post
, &tse
.post
);
2603 tmp
= gfc_typenode_for_spec (&e
->ts
);
2604 tse
.expr
= gfc_create_var (tmp
, "temp");
2607 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2608 e
->expr_type
== EXPR_VARIABLE
, true);
2609 gfc_add_expr_to_block (pre
, tmp
);
2613 /* Create a new symbol to represent the lvalue. */
2614 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2615 new_sym
->ts
= old_sym
->ts
;
2616 new_sym
->attr
.referenced
= 1;
2617 new_sym
->attr
.temporary
= 1;
2618 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2619 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2621 /* Use the temporary as the backend_decl. */
2622 new_sym
->backend_decl
= tse
.expr
;
2624 /* Create a fake symtree for it. */
2626 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2627 new_symtree
->n
.sym
= new_sym
;
2628 gcc_assert (new_symtree
== root
);
2630 /* Go through the expression reference replacing the old_symtree
2632 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2634 /* Now we have made this temporary, we might as well use it for
2635 the right hand side. */
2636 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2640 /* Handles dependencies in forall assignments. */
2642 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2649 lsym
= c
->expr1
->symtree
->n
.sym
;
2650 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2652 /* Now check for dependencies within the 'variable'
2653 expression itself. These are treated by making a complete
2654 copy of variable and changing all the references to it
2655 point to the copy instead. Note that the shallow copy of
2656 the variable will not suffice for derived types with
2657 pointer components. We therefore leave these to their
2659 if (lsym
->ts
.type
== BT_DERIVED
2660 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2664 if (find_forall_index (c
->expr1
, lsym
, 2))
2666 forall_make_variable_temp (c
, pre
, post
);
2670 /* Substrings with dependencies are treated in the same
2672 if (c
->expr1
->ts
.type
== BT_CHARACTER
2674 && c
->expr2
->expr_type
== EXPR_VARIABLE
2675 && lsym
== c
->expr2
->symtree
->n
.sym
)
2677 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2678 if (lref
->type
== REF_SUBSTRING
)
2680 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2681 if (rref
->type
== REF_SUBSTRING
)
2685 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2687 forall_make_variable_temp (c
, pre
, post
);
2696 cleanup_forall_symtrees (gfc_code
*c
)
2698 forall_restore_symtree (c
->expr1
);
2699 forall_restore_symtree (c
->expr2
);
2700 free (new_symtree
->n
.sym
);
2705 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2706 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2707 indicates whether we should generate code to test the FORALLs mask
2708 array. OUTER is the loop header to be used for initializing mask
2711 The generated loop format is:
2712 count = (end - start + step) / step
2725 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2726 int mask_flag
, stmtblock_t
*outer
)
2734 tree var
, start
, end
, step
;
2737 /* Initialize the mask index outside the FORALL nest. */
2738 if (mask_flag
&& forall_tmp
->mask
)
2739 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2741 iter
= forall_tmp
->this_loop
;
2742 nvar
= forall_tmp
->nvar
;
2743 for (n
= 0; n
< nvar
; n
++)
2746 start
= iter
->start
;
2750 exit_label
= gfc_build_label_decl (NULL_TREE
);
2751 TREE_USED (exit_label
) = 1;
2753 /* The loop counter. */
2754 count
= gfc_create_var (TREE_TYPE (var
), "count");
2756 /* The body of the loop. */
2757 gfc_init_block (&block
);
2759 /* The exit condition. */
2760 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2761 count
, build_int_cst (TREE_TYPE (count
), 0));
2762 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2763 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2764 cond
, tmp
, build_empty_stmt (input_location
));
2765 gfc_add_expr_to_block (&block
, tmp
);
2767 /* The main loop body. */
2768 gfc_add_expr_to_block (&block
, body
);
2770 /* Increment the loop variable. */
2771 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2773 gfc_add_modify (&block
, var
, tmp
);
2775 /* Advance to the next mask element. Only do this for the
2777 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2779 tree maskindex
= forall_tmp
->maskindex
;
2780 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2781 maskindex
, gfc_index_one_node
);
2782 gfc_add_modify (&block
, maskindex
, tmp
);
2785 /* Decrement the loop counter. */
2786 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2787 build_int_cst (TREE_TYPE (var
), 1));
2788 gfc_add_modify (&block
, count
, tmp
);
2790 body
= gfc_finish_block (&block
);
2792 /* Loop var initialization. */
2793 gfc_init_block (&block
);
2794 gfc_add_modify (&block
, var
, start
);
2797 /* Initialize the loop counter. */
2798 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2800 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2802 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2804 gfc_add_modify (&block
, count
, tmp
);
2806 /* The loop expression. */
2807 tmp
= build1_v (LOOP_EXPR
, body
);
2808 gfc_add_expr_to_block (&block
, tmp
);
2810 /* The exit label. */
2811 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2812 gfc_add_expr_to_block (&block
, tmp
);
2814 body
= gfc_finish_block (&block
);
2821 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2822 is nonzero, the body is controlled by all masks in the forall nest.
2823 Otherwise, the innermost loop is not controlled by it's mask. This
2824 is used for initializing that mask. */
2827 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2832 forall_info
*forall_tmp
;
2833 tree mask
, maskindex
;
2835 gfc_start_block (&header
);
2837 forall_tmp
= nested_forall_info
;
2838 while (forall_tmp
!= NULL
)
2840 /* Generate body with masks' control. */
2843 mask
= forall_tmp
->mask
;
2844 maskindex
= forall_tmp
->maskindex
;
2846 /* If a mask was specified make the assignment conditional. */
2849 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2850 body
= build3_v (COND_EXPR
, tmp
, body
,
2851 build_empty_stmt (input_location
));
2854 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2855 forall_tmp
= forall_tmp
->prev_nest
;
2859 gfc_add_expr_to_block (&header
, body
);
2860 return gfc_finish_block (&header
);
2864 /* Allocate data for holding a temporary array. Returns either a local
2865 temporary array or a pointer variable. */
2868 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2875 if (INTEGER_CST_P (size
))
2876 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2877 size
, gfc_index_one_node
);
2881 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2882 type
= build_array_type (elem_type
, type
);
2883 if (gfc_can_put_var_on_stack (bytesize
))
2885 gcc_assert (INTEGER_CST_P (size
));
2886 tmpvar
= gfc_create_var (type
, "temp");
2891 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2892 *pdata
= convert (pvoid_type_node
, tmpvar
);
2894 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2895 gfc_add_modify (pblock
, tmpvar
, tmp
);
2901 /* Generate codes to copy the temporary to the actual lhs. */
2904 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2905 tree count1
, tree wheremask
, bool invert
)
2909 stmtblock_t block
, body
;
2915 lss
= gfc_walk_expr (expr
);
2917 if (lss
== gfc_ss_terminator
)
2919 gfc_start_block (&block
);
2921 gfc_init_se (&lse
, NULL
);
2923 /* Translate the expression. */
2924 gfc_conv_expr (&lse
, expr
);
2926 /* Form the expression for the temporary. */
2927 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2929 /* Use the scalar assignment as is. */
2930 gfc_add_block_to_block (&block
, &lse
.pre
);
2931 gfc_add_modify (&block
, lse
.expr
, tmp
);
2932 gfc_add_block_to_block (&block
, &lse
.post
);
2934 /* Increment the count1. */
2935 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2936 count1
, gfc_index_one_node
);
2937 gfc_add_modify (&block
, count1
, tmp
);
2939 tmp
= gfc_finish_block (&block
);
2943 gfc_start_block (&block
);
2945 gfc_init_loopinfo (&loop1
);
2946 gfc_init_se (&rse
, NULL
);
2947 gfc_init_se (&lse
, NULL
);
2949 /* Associate the lss with the loop. */
2950 gfc_add_ss_to_loop (&loop1
, lss
);
2952 /* Calculate the bounds of the scalarization. */
2953 gfc_conv_ss_startstride (&loop1
);
2954 /* Setup the scalarizing loops. */
2955 gfc_conv_loop_setup (&loop1
, &expr
->where
);
2957 gfc_mark_ss_chain_used (lss
, 1);
2959 /* Start the scalarized loop body. */
2960 gfc_start_scalarized_body (&loop1
, &body
);
2962 /* Setup the gfc_se structures. */
2963 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
2966 /* Form the expression of the temporary. */
2967 if (lss
!= gfc_ss_terminator
)
2968 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2969 /* Translate expr. */
2970 gfc_conv_expr (&lse
, expr
);
2972 /* Use the scalar assignment. */
2973 rse
.string_length
= lse
.string_length
;
2974 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
2976 /* Form the mask expression according to the mask tree list. */
2979 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
2981 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
2982 TREE_TYPE (wheremaskexpr
),
2984 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2986 build_empty_stmt (input_location
));
2989 gfc_add_expr_to_block (&body
, tmp
);
2991 /* Increment count1. */
2992 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2993 count1
, gfc_index_one_node
);
2994 gfc_add_modify (&body
, count1
, tmp
);
2996 /* Increment count3. */
2999 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3000 gfc_array_index_type
, count3
,
3001 gfc_index_one_node
);
3002 gfc_add_modify (&body
, count3
, tmp
);
3005 /* Generate the copying loops. */
3006 gfc_trans_scalarizing_loops (&loop1
, &body
);
3007 gfc_add_block_to_block (&block
, &loop1
.pre
);
3008 gfc_add_block_to_block (&block
, &loop1
.post
);
3009 gfc_cleanup_loop (&loop1
);
3011 tmp
= gfc_finish_block (&block
);
3017 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3018 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3019 and should not be freed. WHEREMASK is the conditional execution mask
3020 whose sense may be inverted by INVERT. */
3023 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
3024 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
3025 tree wheremask
, bool invert
)
3027 stmtblock_t block
, body1
;
3034 gfc_start_block (&block
);
3036 gfc_init_se (&rse
, NULL
);
3037 gfc_init_se (&lse
, NULL
);
3039 if (lss
== gfc_ss_terminator
)
3041 gfc_init_block (&body1
);
3042 gfc_conv_expr (&rse
, expr2
);
3043 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3047 /* Initialize the loop. */
3048 gfc_init_loopinfo (&loop
);
3050 /* We may need LSS to determine the shape of the expression. */
3051 gfc_add_ss_to_loop (&loop
, lss
);
3052 gfc_add_ss_to_loop (&loop
, rss
);
3054 gfc_conv_ss_startstride (&loop
);
3055 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3057 gfc_mark_ss_chain_used (rss
, 1);
3058 /* Start the loop body. */
3059 gfc_start_scalarized_body (&loop
, &body1
);
3061 /* Translate the expression. */
3062 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3064 gfc_conv_expr (&rse
, expr2
);
3066 /* Form the expression of the temporary. */
3067 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3070 /* Use the scalar assignment. */
3071 lse
.string_length
= rse
.string_length
;
3072 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
3073 expr2
->expr_type
== EXPR_VARIABLE
, true);
3075 /* Form the mask expression according to the mask tree list. */
3078 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3080 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3081 TREE_TYPE (wheremaskexpr
),
3083 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3085 build_empty_stmt (input_location
));
3088 gfc_add_expr_to_block (&body1
, tmp
);
3090 if (lss
== gfc_ss_terminator
)
3092 gfc_add_block_to_block (&block
, &body1
);
3094 /* Increment count1. */
3095 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
3096 count1
, gfc_index_one_node
);
3097 gfc_add_modify (&block
, count1
, tmp
);
3101 /* Increment count1. */
3102 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3103 count1
, gfc_index_one_node
);
3104 gfc_add_modify (&body1
, count1
, tmp
);
3106 /* Increment count3. */
3109 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3110 gfc_array_index_type
,
3111 count3
, gfc_index_one_node
);
3112 gfc_add_modify (&body1
, count3
, tmp
);
3115 /* Generate the copying loops. */
3116 gfc_trans_scalarizing_loops (&loop
, &body1
);
3118 gfc_add_block_to_block (&block
, &loop
.pre
);
3119 gfc_add_block_to_block (&block
, &loop
.post
);
3121 gfc_cleanup_loop (&loop
);
3122 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3123 as tree nodes in SS may not be valid in different scope. */
3126 tmp
= gfc_finish_block (&block
);
3131 /* Calculate the size of temporary needed in the assignment inside forall.
3132 LSS and RSS are filled in this function. */
3135 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
3136 stmtblock_t
* pblock
,
3137 gfc_ss
**lss
, gfc_ss
**rss
)
3145 *lss
= gfc_walk_expr (expr1
);
3148 size
= gfc_index_one_node
;
3149 if (*lss
!= gfc_ss_terminator
)
3151 gfc_init_loopinfo (&loop
);
3153 /* Walk the RHS of the expression. */
3154 *rss
= gfc_walk_expr (expr2
);
3155 if (*rss
== gfc_ss_terminator
)
3156 /* The rhs is scalar. Add a ss for the expression. */
3157 *rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
3159 /* Associate the SS with the loop. */
3160 gfc_add_ss_to_loop (&loop
, *lss
);
3161 /* We don't actually need to add the rhs at this point, but it might
3162 make guessing the loop bounds a bit easier. */
3163 gfc_add_ss_to_loop (&loop
, *rss
);
3165 /* We only want the shape of the expression, not rest of the junk
3166 generated by the scalarizer. */
3167 loop
.array_parameter
= 1;
3169 /* Calculate the bounds of the scalarization. */
3170 save_flag
= gfc_option
.rtcheck
;
3171 gfc_option
.rtcheck
&= ~GFC_RTCHECK_BOUNDS
;
3172 gfc_conv_ss_startstride (&loop
);
3173 gfc_option
.rtcheck
= save_flag
;
3174 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3176 /* Figure out how many elements we need. */
3177 for (i
= 0; i
< loop
.dimen
; i
++)
3179 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
3180 gfc_array_index_type
,
3181 gfc_index_one_node
, loop
.from
[i
]);
3182 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3183 gfc_array_index_type
, tmp
, loop
.to
[i
]);
3184 size
= fold_build2_loc (input_location
, MULT_EXPR
,
3185 gfc_array_index_type
, size
, tmp
);
3187 gfc_add_block_to_block (pblock
, &loop
.pre
);
3188 size
= gfc_evaluate_now (size
, pblock
);
3189 gfc_add_block_to_block (pblock
, &loop
.post
);
3191 /* TODO: write a function that cleans up a loopinfo without freeing
3192 the SS chains. Currently a NOP. */
3199 /* Calculate the overall iterator number of the nested forall construct.
3200 This routine actually calculates the number of times the body of the
3201 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3202 that by the expression INNER_SIZE. The BLOCK argument specifies the
3203 block in which to calculate the result, and the optional INNER_SIZE_BODY
3204 argument contains any statements that need to executed (inside the loop)
3205 to initialize or calculate INNER_SIZE. */
3208 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3209 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3211 forall_info
*forall_tmp
= nested_forall_info
;
3215 /* We can eliminate the innermost unconditional loops with constant
3217 if (INTEGER_CST_P (inner_size
))
3220 && !forall_tmp
->mask
3221 && INTEGER_CST_P (forall_tmp
->size
))
3223 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3224 gfc_array_index_type
,
3225 inner_size
, forall_tmp
->size
);
3226 forall_tmp
= forall_tmp
->prev_nest
;
3229 /* If there are no loops left, we have our constant result. */
3234 /* Otherwise, create a temporary variable to compute the result. */
3235 number
= gfc_create_var (gfc_array_index_type
, "num");
3236 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3238 gfc_start_block (&body
);
3239 if (inner_size_body
)
3240 gfc_add_block_to_block (&body
, inner_size_body
);
3242 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3243 gfc_array_index_type
, number
, inner_size
);
3246 gfc_add_modify (&body
, number
, tmp
);
3247 tmp
= gfc_finish_block (&body
);
3249 /* Generate loops. */
3250 if (forall_tmp
!= NULL
)
3251 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3253 gfc_add_expr_to_block (block
, tmp
);
3259 /* Allocate temporary for forall construct. SIZE is the size of temporary
3260 needed. PTEMP1 is returned for space free. */
3263 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3270 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3271 if (!integer_onep (unit
))
3272 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3273 gfc_array_index_type
, size
, unit
);
3278 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3281 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3286 /* Allocate temporary for forall construct according to the information in
3287 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3288 assignment inside forall. PTEMP1 is returned for space free. */
3291 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3292 tree inner_size
, stmtblock_t
* inner_size_body
,
3293 stmtblock_t
* block
, tree
* ptemp1
)
3297 /* Calculate the total size of temporary needed in forall construct. */
3298 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3299 inner_size_body
, block
);
3301 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3305 /* Handle assignments inside forall which need temporary.
3307 forall (i=start:end:stride; maskexpr)
3310 (where e,f<i> are arbitrary expressions possibly involving i
3311 and there is a dependency between e<i> and f<i>)
3313 masktmp(:) = maskexpr(:)
3318 for (i = start; i <= end; i += stride)
3322 for (i = start; i <= end; i += stride)
3324 if (masktmp[maskindex++])
3325 tmp[count1++] = f<i>
3329 for (i = start; i <= end; i += stride)
3331 if (masktmp[maskindex++])
3332 e<i> = tmp[count1++]
3337 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3338 tree wheremask
, bool invert
,
3339 forall_info
* nested_forall_info
,
3340 stmtblock_t
* block
)
3348 stmtblock_t inner_size_body
;
3350 /* Create vars. count1 is the current iterator number of the nested
3352 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3354 /* Count is the wheremask index. */
3357 count
= gfc_create_var (gfc_array_index_type
, "count");
3358 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3363 /* Initialize count1. */
3364 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3366 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3367 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3368 gfc_init_block (&inner_size_body
);
3369 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3372 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3373 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3375 if (!expr1
->ts
.u
.cl
->backend_decl
)
3378 gfc_init_se (&tse
, NULL
);
3379 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3380 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3382 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3383 expr1
->ts
.u
.cl
->backend_decl
);
3386 type
= gfc_typenode_for_spec (&expr1
->ts
);
3388 /* Allocate temporary for nested forall construct according to the
3389 information in nested_forall_info and inner_size. */
3390 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3391 &inner_size_body
, block
, &ptemp1
);
3393 /* Generate codes to copy rhs to the temporary . */
3394 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3397 /* Generate body and loops according to the information in
3398 nested_forall_info. */
3399 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3400 gfc_add_expr_to_block (block
, tmp
);
3403 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3407 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3409 /* Generate codes to copy the temporary to lhs. */
3410 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3413 /* Generate body and loops according to the information in
3414 nested_forall_info. */
3415 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3416 gfc_add_expr_to_block (block
, tmp
);
3420 /* Free the temporary. */
3421 tmp
= gfc_call_free (ptemp1
);
3422 gfc_add_expr_to_block (block
, tmp
);
3427 /* Translate pointer assignment inside FORALL which need temporary. */
3430 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3431 forall_info
* nested_forall_info
,
3432 stmtblock_t
* block
)
3439 gfc_array_info
*info
;
3446 tree tmp
, tmp1
, ptemp1
;
3448 count
= gfc_create_var (gfc_array_index_type
, "count");
3449 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3451 inner_size
= gfc_index_one_node
;
3452 lss
= gfc_walk_expr (expr1
);
3453 rss
= gfc_walk_expr (expr2
);
3454 if (lss
== gfc_ss_terminator
)
3456 type
= gfc_typenode_for_spec (&expr1
->ts
);
3457 type
= build_pointer_type (type
);
3459 /* Allocate temporary for nested forall construct according to the
3460 information in nested_forall_info and inner_size. */
3461 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3462 inner_size
, NULL
, block
, &ptemp1
);
3463 gfc_start_block (&body
);
3464 gfc_init_se (&lse
, NULL
);
3465 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3466 gfc_init_se (&rse
, NULL
);
3467 rse
.want_pointer
= 1;
3468 gfc_conv_expr (&rse
, expr2
);
3469 gfc_add_block_to_block (&body
, &rse
.pre
);
3470 gfc_add_modify (&body
, lse
.expr
,
3471 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3472 gfc_add_block_to_block (&body
, &rse
.post
);
3474 /* Increment count. */
3475 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3476 count
, gfc_index_one_node
);
3477 gfc_add_modify (&body
, count
, tmp
);
3479 tmp
= gfc_finish_block (&body
);
3481 /* Generate body and loops according to the information in
3482 nested_forall_info. */
3483 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3484 gfc_add_expr_to_block (block
, tmp
);
3487 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3489 gfc_start_block (&body
);
3490 gfc_init_se (&lse
, NULL
);
3491 gfc_init_se (&rse
, NULL
);
3492 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3493 lse
.want_pointer
= 1;
3494 gfc_conv_expr (&lse
, expr1
);
3495 gfc_add_block_to_block (&body
, &lse
.pre
);
3496 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3497 gfc_add_block_to_block (&body
, &lse
.post
);
3498 /* Increment count. */
3499 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3500 count
, gfc_index_one_node
);
3501 gfc_add_modify (&body
, count
, tmp
);
3502 tmp
= gfc_finish_block (&body
);
3504 /* Generate body and loops according to the information in
3505 nested_forall_info. */
3506 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3507 gfc_add_expr_to_block (block
, tmp
);
3511 gfc_init_loopinfo (&loop
);
3513 /* Associate the SS with the loop. */
3514 gfc_add_ss_to_loop (&loop
, rss
);
3516 /* Setup the scalarizing loops and bounds. */
3517 gfc_conv_ss_startstride (&loop
);
3519 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3521 info
= &rss
->info
->data
.array
;
3522 desc
= info
->descriptor
;
3524 /* Make a new descriptor. */
3525 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3526 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3527 loop
.from
, loop
.to
, 1,
3528 GFC_ARRAY_UNKNOWN
, true);
3530 /* Allocate temporary for nested forall construct. */
3531 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3532 inner_size
, NULL
, block
, &ptemp1
);
3533 gfc_start_block (&body
);
3534 gfc_init_se (&lse
, NULL
);
3535 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3536 lse
.direct_byref
= 1;
3537 gfc_conv_expr_descriptor (&lse
, expr2
);
3539 gfc_add_block_to_block (&body
, &lse
.pre
);
3540 gfc_add_block_to_block (&body
, &lse
.post
);
3542 /* Increment count. */
3543 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3544 count
, gfc_index_one_node
);
3545 gfc_add_modify (&body
, count
, tmp
);
3547 tmp
= gfc_finish_block (&body
);
3549 /* Generate body and loops according to the information in
3550 nested_forall_info. */
3551 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3552 gfc_add_expr_to_block (block
, tmp
);
3555 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3557 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3558 gfc_init_se (&lse
, NULL
);
3559 gfc_conv_expr_descriptor (&lse
, expr1
);
3560 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3561 gfc_start_block (&body
);
3562 gfc_add_block_to_block (&body
, &lse
.pre
);
3563 gfc_add_block_to_block (&body
, &lse
.post
);
3565 /* Increment count. */
3566 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3567 count
, gfc_index_one_node
);
3568 gfc_add_modify (&body
, count
, tmp
);
3570 tmp
= gfc_finish_block (&body
);
3572 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3573 gfc_add_expr_to_block (block
, tmp
);
3575 /* Free the temporary. */
3578 tmp
= gfc_call_free (ptemp1
);
3579 gfc_add_expr_to_block (block
, tmp
);
3584 /* FORALL and WHERE statements are really nasty, especially when you nest
3585 them. All the rhs of a forall assignment must be evaluated before the
3586 actual assignments are performed. Presumably this also applies to all the
3587 assignments in an inner where statement. */
3589 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3590 linear array, relying on the fact that we process in the same order in all
3593 forall (i=start:end:stride; maskexpr)
3597 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3599 count = ((end + 1 - start) / stride)
3600 masktmp(:) = maskexpr(:)
3603 for (i = start; i <= end; i += stride)
3605 if (masktmp[maskindex++])
3609 for (i = start; i <= end; i += stride)
3611 if (masktmp[maskindex++])
3615 Note that this code only works when there are no dependencies.
3616 Forall loop with array assignments and data dependencies are a real pain,
3617 because the size of the temporary cannot always be determined before the
3618 loop is executed. This problem is compounded by the presence of nested
3623 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3640 tree cycle_label
= NULL_TREE
;
3644 gfc_forall_iterator
*fa
;
3647 gfc_saved_var
*saved_vars
;
3648 iter_info
*this_forall
;
3652 /* Do nothing if the mask is false. */
3654 && code
->expr1
->expr_type
== EXPR_CONSTANT
3655 && !code
->expr1
->value
.logical
)
3656 return build_empty_stmt (input_location
);
3659 /* Count the FORALL index number. */
3660 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3664 /* Allocate the space for var, start, end, step, varexpr. */
3665 var
= XCNEWVEC (tree
, nvar
);
3666 start
= XCNEWVEC (tree
, nvar
);
3667 end
= XCNEWVEC (tree
, nvar
);
3668 step
= XCNEWVEC (tree
, nvar
);
3669 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3670 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3672 /* Allocate the space for info. */
3673 info
= XCNEW (forall_info
);
3675 gfc_start_block (&pre
);
3676 gfc_init_block (&post
);
3677 gfc_init_block (&block
);
3680 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3682 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3684 /* Allocate space for this_forall. */
3685 this_forall
= XCNEW (iter_info
);
3687 /* Create a temporary variable for the FORALL index. */
3688 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3689 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3690 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3692 /* Record it in this_forall. */
3693 this_forall
->var
= var
[n
];
3695 /* Replace the index symbol's backend_decl with the temporary decl. */
3696 sym
->backend_decl
= var
[n
];
3698 /* Work out the start, end and stride for the loop. */
3699 gfc_init_se (&se
, NULL
);
3700 gfc_conv_expr_val (&se
, fa
->start
);
3701 /* Record it in this_forall. */
3702 this_forall
->start
= se
.expr
;
3703 gfc_add_block_to_block (&block
, &se
.pre
);
3706 gfc_init_se (&se
, NULL
);
3707 gfc_conv_expr_val (&se
, fa
->end
);
3708 /* Record it in this_forall. */
3709 this_forall
->end
= se
.expr
;
3710 gfc_make_safe_expr (&se
);
3711 gfc_add_block_to_block (&block
, &se
.pre
);
3714 gfc_init_se (&se
, NULL
);
3715 gfc_conv_expr_val (&se
, fa
->stride
);
3716 /* Record it in this_forall. */
3717 this_forall
->step
= se
.expr
;
3718 gfc_make_safe_expr (&se
);
3719 gfc_add_block_to_block (&block
, &se
.pre
);
3722 /* Set the NEXT field of this_forall to NULL. */
3723 this_forall
->next
= NULL
;
3724 /* Link this_forall to the info construct. */
3725 if (info
->this_loop
)
3727 iter_info
*iter_tmp
= info
->this_loop
;
3728 while (iter_tmp
->next
!= NULL
)
3729 iter_tmp
= iter_tmp
->next
;
3730 iter_tmp
->next
= this_forall
;
3733 info
->this_loop
= this_forall
;
3739 /* Calculate the size needed for the current forall level. */
3740 size
= gfc_index_one_node
;
3741 for (n
= 0; n
< nvar
; n
++)
3743 /* size = (end + step - start) / step. */
3744 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3746 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3748 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3750 tmp
= convert (gfc_array_index_type
, tmp
);
3752 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3756 /* Record the nvar and size of current forall level. */
3762 /* If the mask is .true., consider the FORALL unconditional. */
3763 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3764 && code
->expr1
->value
.logical
)
3772 /* First we need to allocate the mask. */
3775 /* As the mask array can be very big, prefer compact boolean types. */
3776 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3777 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3778 size
, NULL
, &block
, &pmask
);
3779 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3781 /* Record them in the info structure. */
3782 info
->maskindex
= maskindex
;
3787 /* No mask was specified. */
3788 maskindex
= NULL_TREE
;
3789 mask
= pmask
= NULL_TREE
;
3792 /* Link the current forall level to nested_forall_info. */
3793 info
->prev_nest
= nested_forall_info
;
3794 nested_forall_info
= info
;
3796 /* Copy the mask into a temporary variable if required.
3797 For now we assume a mask temporary is needed. */
3800 /* As the mask array can be very big, prefer compact boolean types. */
3801 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3803 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3805 /* Start of mask assignment loop body. */
3806 gfc_start_block (&body
);
3808 /* Evaluate the mask expression. */
3809 gfc_init_se (&se
, NULL
);
3810 gfc_conv_expr_val (&se
, code
->expr1
);
3811 gfc_add_block_to_block (&body
, &se
.pre
);
3813 /* Store the mask. */
3814 se
.expr
= convert (mask_type
, se
.expr
);
3816 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3817 gfc_add_modify (&body
, tmp
, se
.expr
);
3819 /* Advance to the next mask element. */
3820 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3821 maskindex
, gfc_index_one_node
);
3822 gfc_add_modify (&body
, maskindex
, tmp
);
3824 /* Generate the loops. */
3825 tmp
= gfc_finish_block (&body
);
3826 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3827 gfc_add_expr_to_block (&block
, tmp
);
3830 if (code
->op
== EXEC_DO_CONCURRENT
)
3832 gfc_init_block (&body
);
3833 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3834 code
->cycle_label
= cycle_label
;
3835 tmp
= gfc_trans_code (code
->block
->next
);
3836 gfc_add_expr_to_block (&body
, tmp
);
3838 if (TREE_USED (cycle_label
))
3840 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3841 gfc_add_expr_to_block (&body
, tmp
);
3844 tmp
= gfc_finish_block (&body
);
3845 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3846 gfc_add_expr_to_block (&block
, tmp
);
3850 c
= code
->block
->next
;
3852 /* TODO: loop merging in FORALL statements. */
3853 /* Now that we've got a copy of the mask, generate the assignment loops. */
3859 /* A scalar or array assignment. DO the simple check for
3860 lhs to rhs dependencies. These make a temporary for the
3861 rhs and form a second forall block to copy to variable. */
3862 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3864 /* Temporaries due to array assignment data dependencies introduce
3865 no end of problems. */
3867 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3868 nested_forall_info
, &block
);
3871 /* Use the normal assignment copying routines. */
3872 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3874 /* Generate body and loops. */
3875 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3877 gfc_add_expr_to_block (&block
, tmp
);
3880 /* Cleanup any temporary symtrees that have been made to deal
3881 with dependencies. */
3883 cleanup_forall_symtrees (c
);
3888 /* Translate WHERE or WHERE construct nested in FORALL. */
3889 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3892 /* Pointer assignment inside FORALL. */
3893 case EXEC_POINTER_ASSIGN
:
3894 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3896 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3897 nested_forall_info
, &block
);
3900 /* Use the normal assignment copying routines. */
3901 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3903 /* Generate body and loops. */
3904 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3906 gfc_add_expr_to_block (&block
, tmp
);
3911 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3912 gfc_add_expr_to_block (&block
, tmp
);
3915 /* Explicit subroutine calls are prevented by the frontend but interface
3916 assignments can legitimately produce them. */
3917 case EXEC_ASSIGN_CALL
:
3918 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3919 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3920 gfc_add_expr_to_block (&block
, tmp
);
3931 /* Restore the original index variables. */
3932 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3933 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3935 /* Free the space for var, start, end, step, varexpr. */
3943 for (this_forall
= info
->this_loop
; this_forall
;)
3945 iter_info
*next
= this_forall
->next
;
3950 /* Free the space for this forall_info. */
3955 /* Free the temporary for the mask. */
3956 tmp
= gfc_call_free (pmask
);
3957 gfc_add_expr_to_block (&block
, tmp
);
3960 pushdecl (maskindex
);
3962 gfc_add_block_to_block (&pre
, &block
);
3963 gfc_add_block_to_block (&pre
, &post
);
3965 return gfc_finish_block (&pre
);
3969 /* Translate the FORALL statement or construct. */
3971 tree
gfc_trans_forall (gfc_code
* code
)
3973 return gfc_trans_forall_1 (code
, NULL
);
3977 /* Translate the DO CONCURRENT construct. */
3979 tree
gfc_trans_do_concurrent (gfc_code
* code
)
3981 return gfc_trans_forall_1 (code
, NULL
);
3985 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3986 If the WHERE construct is nested in FORALL, compute the overall temporary
3987 needed by the WHERE mask expression multiplied by the iterator number of
3989 ME is the WHERE mask expression.
3990 MASK is the current execution mask upon input, whose sense may or may
3991 not be inverted as specified by the INVERT argument.
3992 CMASK is the updated execution mask on output, or NULL if not required.
3993 PMASK is the pending execution mask on output, or NULL if not required.
3994 BLOCK is the block in which to place the condition evaluation loops. */
3997 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
3998 tree mask
, bool invert
, tree cmask
, tree pmask
,
3999 tree mask_type
, stmtblock_t
* block
)
4004 stmtblock_t body
, body1
;
4005 tree count
, cond
, mtmp
;
4008 gfc_init_loopinfo (&loop
);
4010 lss
= gfc_walk_expr (me
);
4011 rss
= gfc_walk_expr (me
);
4013 /* Variable to index the temporary. */
4014 count
= gfc_create_var (gfc_array_index_type
, "count");
4015 /* Initialize count. */
4016 gfc_add_modify (block
, count
, gfc_index_zero_node
);
4018 gfc_start_block (&body
);
4020 gfc_init_se (&rse
, NULL
);
4021 gfc_init_se (&lse
, NULL
);
4023 if (lss
== gfc_ss_terminator
)
4025 gfc_init_block (&body1
);
4029 /* Initialize the loop. */
4030 gfc_init_loopinfo (&loop
);
4032 /* We may need LSS to determine the shape of the expression. */
4033 gfc_add_ss_to_loop (&loop
, lss
);
4034 gfc_add_ss_to_loop (&loop
, rss
);
4036 gfc_conv_ss_startstride (&loop
);
4037 gfc_conv_loop_setup (&loop
, &me
->where
);
4039 gfc_mark_ss_chain_used (rss
, 1);
4040 /* Start the loop body. */
4041 gfc_start_scalarized_body (&loop
, &body1
);
4043 /* Translate the expression. */
4044 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4046 gfc_conv_expr (&rse
, me
);
4049 /* Variable to evaluate mask condition. */
4050 cond
= gfc_create_var (mask_type
, "cond");
4051 if (mask
&& (cmask
|| pmask
))
4052 mtmp
= gfc_create_var (mask_type
, "mask");
4053 else mtmp
= NULL_TREE
;
4055 gfc_add_block_to_block (&body1
, &lse
.pre
);
4056 gfc_add_block_to_block (&body1
, &rse
.pre
);
4058 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
4060 if (mask
&& (cmask
|| pmask
))
4062 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
4064 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
4065 gfc_add_modify (&body1
, mtmp
, tmp
);
4070 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
4073 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
4075 gfc_add_modify (&body1
, tmp1
, tmp
);
4080 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
4081 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
4083 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
4085 gfc_add_modify (&body1
, tmp1
, tmp
);
4088 gfc_add_block_to_block (&body1
, &lse
.post
);
4089 gfc_add_block_to_block (&body1
, &rse
.post
);
4091 if (lss
== gfc_ss_terminator
)
4093 gfc_add_block_to_block (&body
, &body1
);
4097 /* Increment count. */
4098 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4099 count
, gfc_index_one_node
);
4100 gfc_add_modify (&body1
, count
, tmp1
);
4102 /* Generate the copying loops. */
4103 gfc_trans_scalarizing_loops (&loop
, &body1
);
4105 gfc_add_block_to_block (&body
, &loop
.pre
);
4106 gfc_add_block_to_block (&body
, &loop
.post
);
4108 gfc_cleanup_loop (&loop
);
4109 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4110 as tree nodes in SS may not be valid in different scope. */
4113 tmp1
= gfc_finish_block (&body
);
4114 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4115 if (nested_forall_info
!= NULL
)
4116 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
4118 gfc_add_expr_to_block (block
, tmp1
);
4122 /* Translate an assignment statement in a WHERE statement or construct
4123 statement. The MASK expression is used to control which elements
4124 of EXPR1 shall be assigned. The sense of MASK is specified by
4128 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
4129 tree mask
, bool invert
,
4130 tree count1
, tree count2
,
4136 gfc_ss
*lss_section
;
4143 tree index
, maskexpr
;
4145 /* A defined assignment. */
4146 if (cnext
&& cnext
->resolved_sym
)
4147 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
4150 /* TODO: handle this special case.
4151 Special case a single function returning an array. */
4152 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
4154 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
4160 /* Assignment of the form lhs = rhs. */
4161 gfc_start_block (&block
);
4163 gfc_init_se (&lse
, NULL
);
4164 gfc_init_se (&rse
, NULL
);
4167 lss
= gfc_walk_expr (expr1
);
4170 /* In each where-assign-stmt, the mask-expr and the variable being
4171 defined shall be arrays of the same shape. */
4172 gcc_assert (lss
!= gfc_ss_terminator
);
4174 /* The assignment needs scalarization. */
4177 /* Find a non-scalar SS from the lhs. */
4178 while (lss_section
!= gfc_ss_terminator
4179 && lss_section
->info
->type
!= GFC_SS_SECTION
)
4180 lss_section
= lss_section
->next
;
4182 gcc_assert (lss_section
!= gfc_ss_terminator
);
4184 /* Initialize the scalarizer. */
4185 gfc_init_loopinfo (&loop
);
4188 rss
= gfc_walk_expr (expr2
);
4189 if (rss
== gfc_ss_terminator
)
4191 /* The rhs is scalar. Add a ss for the expression. */
4192 rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
4193 rss
->info
->where
= 1;
4196 /* Associate the SS with the loop. */
4197 gfc_add_ss_to_loop (&loop
, lss
);
4198 gfc_add_ss_to_loop (&loop
, rss
);
4200 /* Calculate the bounds of the scalarization. */
4201 gfc_conv_ss_startstride (&loop
);
4203 /* Resolve any data dependencies in the statement. */
4204 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
4206 /* Setup the scalarizing loops. */
4207 gfc_conv_loop_setup (&loop
, &expr2
->where
);
4209 /* Setup the gfc_se structures. */
4210 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4211 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4214 gfc_mark_ss_chain_used (rss
, 1);
4215 if (loop
.temp_ss
== NULL
)
4218 gfc_mark_ss_chain_used (lss
, 1);
4222 lse
.ss
= loop
.temp_ss
;
4223 gfc_mark_ss_chain_used (lss
, 3);
4224 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
4227 /* Start the scalarized loop body. */
4228 gfc_start_scalarized_body (&loop
, &body
);
4230 /* Translate the expression. */
4231 gfc_conv_expr (&rse
, expr2
);
4232 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4233 gfc_conv_tmp_array_ref (&lse
);
4235 gfc_conv_expr (&lse
, expr1
);
4237 /* Form the mask expression according to the mask. */
4239 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4241 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4242 TREE_TYPE (maskexpr
), maskexpr
);
4244 /* Use the scalar assignment as is. */
4245 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4246 loop
.temp_ss
!= NULL
, false, true);
4248 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4250 gfc_add_expr_to_block (&body
, tmp
);
4252 if (lss
== gfc_ss_terminator
)
4254 /* Increment count1. */
4255 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4256 count1
, gfc_index_one_node
);
4257 gfc_add_modify (&body
, count1
, tmp
);
4259 /* Use the scalar assignment as is. */
4260 gfc_add_block_to_block (&block
, &body
);
4264 gcc_assert (lse
.ss
== gfc_ss_terminator
4265 && rse
.ss
== gfc_ss_terminator
);
4267 if (loop
.temp_ss
!= NULL
)
4269 /* Increment count1 before finish the main body of a scalarized
4271 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4272 gfc_array_index_type
, count1
, gfc_index_one_node
);
4273 gfc_add_modify (&body
, count1
, tmp
);
4274 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4276 /* We need to copy the temporary to the actual lhs. */
4277 gfc_init_se (&lse
, NULL
);
4278 gfc_init_se (&rse
, NULL
);
4279 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4280 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4282 rse
.ss
= loop
.temp_ss
;
4285 gfc_conv_tmp_array_ref (&rse
);
4286 gfc_conv_expr (&lse
, expr1
);
4288 gcc_assert (lse
.ss
== gfc_ss_terminator
4289 && rse
.ss
== gfc_ss_terminator
);
4291 /* Form the mask expression according to the mask tree list. */
4293 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4295 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4296 TREE_TYPE (maskexpr
), maskexpr
);
4298 /* Use the scalar assignment as is. */
4299 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4301 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4302 build_empty_stmt (input_location
));
4303 gfc_add_expr_to_block (&body
, tmp
);
4305 /* Increment count2. */
4306 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4307 gfc_array_index_type
, count2
,
4308 gfc_index_one_node
);
4309 gfc_add_modify (&body
, count2
, tmp
);
4313 /* Increment count1. */
4314 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4315 gfc_array_index_type
, count1
,
4316 gfc_index_one_node
);
4317 gfc_add_modify (&body
, count1
, tmp
);
4320 /* Generate the copying loops. */
4321 gfc_trans_scalarizing_loops (&loop
, &body
);
4323 /* Wrap the whole thing up. */
4324 gfc_add_block_to_block (&block
, &loop
.pre
);
4325 gfc_add_block_to_block (&block
, &loop
.post
);
4326 gfc_cleanup_loop (&loop
);
4329 return gfc_finish_block (&block
);
4333 /* Translate the WHERE construct or statement.
4334 This function can be called iteratively to translate the nested WHERE
4335 construct or statement.
4336 MASK is the control mask. */
4339 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4340 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4342 stmtblock_t inner_size_body
;
4343 tree inner_size
, size
;
4352 tree count1
, count2
;
4356 tree pcmask
= NULL_TREE
;
4357 tree ppmask
= NULL_TREE
;
4358 tree cmask
= NULL_TREE
;
4359 tree pmask
= NULL_TREE
;
4360 gfc_actual_arglist
*arg
;
4362 /* the WHERE statement or the WHERE construct statement. */
4363 cblock
= code
->block
;
4365 /* As the mask array can be very big, prefer compact boolean types. */
4366 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4368 /* Determine which temporary masks are needed. */
4371 /* One clause: No ELSEWHEREs. */
4372 need_cmask
= (cblock
->next
!= 0);
4375 else if (cblock
->block
->block
)
4377 /* Three or more clauses: Conditional ELSEWHEREs. */
4381 else if (cblock
->next
)
4383 /* Two clauses, the first non-empty. */
4385 need_pmask
= (mask
!= NULL_TREE
4386 && cblock
->block
->next
!= 0);
4388 else if (!cblock
->block
->next
)
4390 /* Two clauses, both empty. */
4394 /* Two clauses, the first empty, the second non-empty. */
4397 need_cmask
= (cblock
->block
->expr1
!= 0);
4406 if (need_cmask
|| need_pmask
)
4408 /* Calculate the size of temporary needed by the mask-expr. */
4409 gfc_init_block (&inner_size_body
);
4410 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4411 &inner_size_body
, &lss
, &rss
);
4413 gfc_free_ss_chain (lss
);
4414 gfc_free_ss_chain (rss
);
4416 /* Calculate the total size of temporary needed. */
4417 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4418 &inner_size_body
, block
);
4420 /* Check whether the size is negative. */
4421 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4422 gfc_index_zero_node
);
4423 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4424 cond
, gfc_index_zero_node
, size
);
4425 size
= gfc_evaluate_now (size
, block
);
4427 /* Allocate temporary for WHERE mask if needed. */
4429 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4432 /* Allocate temporary for !mask if needed. */
4434 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4440 /* Each time around this loop, the where clause is conditional
4441 on the value of mask and invert, which are updated at the
4442 bottom of the loop. */
4444 /* Has mask-expr. */
4447 /* Ensure that the WHERE mask will be evaluated exactly once.
4448 If there are no statements in this WHERE/ELSEWHERE clause,
4449 then we don't need to update the control mask (cmask).
4450 If this is the last clause of the WHERE construct, then
4451 we don't need to update the pending control mask (pmask). */
4453 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4455 cblock
->next
? cmask
: NULL_TREE
,
4456 cblock
->block
? pmask
: NULL_TREE
,
4459 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4461 (cblock
->next
|| cblock
->block
)
4462 ? cmask
: NULL_TREE
,
4463 NULL_TREE
, mask_type
, block
);
4467 /* It's a final elsewhere-stmt. No mask-expr is present. */
4471 /* The body of this where clause are controlled by cmask with
4472 sense specified by invert. */
4474 /* Get the assignment statement of a WHERE statement, or the first
4475 statement in where-body-construct of a WHERE construct. */
4476 cnext
= cblock
->next
;
4481 /* WHERE assignment statement. */
4482 case EXEC_ASSIGN_CALL
:
4484 arg
= cnext
->ext
.actual
;
4485 expr1
= expr2
= NULL
;
4486 for (; arg
; arg
= arg
->next
)
4498 expr1
= cnext
->expr1
;
4499 expr2
= cnext
->expr2
;
4501 if (nested_forall_info
!= NULL
)
4503 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4504 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4505 gfc_trans_assign_need_temp (expr1
, expr2
,
4507 nested_forall_info
, block
);
4510 /* Variables to control maskexpr. */
4511 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4512 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4513 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4514 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4516 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4521 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4523 gfc_add_expr_to_block (block
, tmp
);
4528 /* Variables to control maskexpr. */
4529 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4530 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4531 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4532 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4534 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4538 gfc_add_expr_to_block (block
, tmp
);
4543 /* WHERE or WHERE construct is part of a where-body-construct. */
4545 gfc_trans_where_2 (cnext
, cmask
, invert
,
4546 nested_forall_info
, block
);
4553 /* The next statement within the same where-body-construct. */
4554 cnext
= cnext
->next
;
4556 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4557 cblock
= cblock
->block
;
4558 if (mask
== NULL_TREE
)
4560 /* If we're the initial WHERE, we can simply invert the sense
4561 of the current mask to obtain the "mask" for the remaining
4568 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4574 /* If we allocated a pending mask array, deallocate it now. */
4577 tmp
= gfc_call_free (ppmask
);
4578 gfc_add_expr_to_block (block
, tmp
);
4581 /* If we allocated a current mask array, deallocate it now. */
4584 tmp
= gfc_call_free (pcmask
);
4585 gfc_add_expr_to_block (block
, tmp
);
4589 /* Translate a simple WHERE construct or statement without dependencies.
4590 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4591 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4592 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4595 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4597 stmtblock_t block
, body
;
4598 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4599 tree tmp
, cexpr
, tstmt
, estmt
;
4600 gfc_ss
*css
, *tdss
, *tsss
;
4601 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4606 /* Allow the scalarizer to workshare simple where loops. */
4607 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4608 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4610 cond
= cblock
->expr1
;
4611 tdst
= cblock
->next
->expr1
;
4612 tsrc
= cblock
->next
->expr2
;
4613 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4614 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4616 gfc_start_block (&block
);
4617 gfc_init_loopinfo (&loop
);
4619 /* Handle the condition. */
4620 gfc_init_se (&cse
, NULL
);
4621 css
= gfc_walk_expr (cond
);
4622 gfc_add_ss_to_loop (&loop
, css
);
4624 /* Handle the then-clause. */
4625 gfc_init_se (&tdse
, NULL
);
4626 gfc_init_se (&tsse
, NULL
);
4627 tdss
= gfc_walk_expr (tdst
);
4628 tsss
= gfc_walk_expr (tsrc
);
4629 if (tsss
== gfc_ss_terminator
)
4631 tsss
= gfc_get_scalar_ss (gfc_ss_terminator
, tsrc
);
4632 tsss
->info
->where
= 1;
4634 gfc_add_ss_to_loop (&loop
, tdss
);
4635 gfc_add_ss_to_loop (&loop
, tsss
);
4639 /* Handle the else clause. */
4640 gfc_init_se (&edse
, NULL
);
4641 gfc_init_se (&esse
, NULL
);
4642 edss
= gfc_walk_expr (edst
);
4643 esss
= gfc_walk_expr (esrc
);
4644 if (esss
== gfc_ss_terminator
)
4646 esss
= gfc_get_scalar_ss (gfc_ss_terminator
, esrc
);
4647 esss
->info
->where
= 1;
4649 gfc_add_ss_to_loop (&loop
, edss
);
4650 gfc_add_ss_to_loop (&loop
, esss
);
4653 gfc_conv_ss_startstride (&loop
);
4654 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4656 gfc_mark_ss_chain_used (css
, 1);
4657 gfc_mark_ss_chain_used (tdss
, 1);
4658 gfc_mark_ss_chain_used (tsss
, 1);
4661 gfc_mark_ss_chain_used (edss
, 1);
4662 gfc_mark_ss_chain_used (esss
, 1);
4665 gfc_start_scalarized_body (&loop
, &body
);
4667 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4668 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4669 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4675 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4676 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4681 gfc_conv_expr (&cse
, cond
);
4682 gfc_add_block_to_block (&body
, &cse
.pre
);
4685 gfc_conv_expr (&tsse
, tsrc
);
4686 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4687 gfc_conv_tmp_array_ref (&tdse
);
4689 gfc_conv_expr (&tdse
, tdst
);
4693 gfc_conv_expr (&esse
, esrc
);
4694 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4695 gfc_conv_tmp_array_ref (&edse
);
4697 gfc_conv_expr (&edse
, edst
);
4700 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4701 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4703 : build_empty_stmt (input_location
);
4704 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4705 gfc_add_expr_to_block (&body
, tmp
);
4706 gfc_add_block_to_block (&body
, &cse
.post
);
4708 gfc_trans_scalarizing_loops (&loop
, &body
);
4709 gfc_add_block_to_block (&block
, &loop
.pre
);
4710 gfc_add_block_to_block (&block
, &loop
.post
);
4711 gfc_cleanup_loop (&loop
);
4713 return gfc_finish_block (&block
);
4716 /* As the WHERE or WHERE construct statement can be nested, we call
4717 gfc_trans_where_2 to do the translation, and pass the initial
4718 NULL values for both the control mask and the pending control mask. */
4721 gfc_trans_where (gfc_code
* code
)
4727 cblock
= code
->block
;
4729 && cblock
->next
->op
== EXEC_ASSIGN
4730 && !cblock
->next
->next
)
4732 eblock
= cblock
->block
;
4735 /* A simple "WHERE (cond) x = y" statement or block is
4736 dependence free if cond is not dependent upon writing x,
4737 and the source y is unaffected by the destination x. */
4738 if (!gfc_check_dependency (cblock
->next
->expr1
,
4740 && !gfc_check_dependency (cblock
->next
->expr1
,
4741 cblock
->next
->expr2
, 0))
4742 return gfc_trans_where_3 (cblock
, NULL
);
4744 else if (!eblock
->expr1
4747 && eblock
->next
->op
== EXEC_ASSIGN
4748 && !eblock
->next
->next
)
4750 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4751 block is dependence free if cond is not dependent on writes
4752 to x1 and x2, y1 is not dependent on writes to x2, and y2
4753 is not dependent on writes to x1, and both y's are not
4754 dependent upon their own x's. In addition to this, the
4755 final two dependency checks below exclude all but the same
4756 array reference if the where and elswhere destinations
4757 are the same. In short, this is VERY conservative and this
4758 is needed because the two loops, required by the standard
4759 are coalesced in gfc_trans_where_3. */
4760 if (!gfc_check_dependency (cblock
->next
->expr1
,
4762 && !gfc_check_dependency (eblock
->next
->expr1
,
4764 && !gfc_check_dependency (cblock
->next
->expr1
,
4765 eblock
->next
->expr2
, 1)
4766 && !gfc_check_dependency (eblock
->next
->expr1
,
4767 cblock
->next
->expr2
, 1)
4768 && !gfc_check_dependency (cblock
->next
->expr1
,
4769 cblock
->next
->expr2
, 1)
4770 && !gfc_check_dependency (eblock
->next
->expr1
,
4771 eblock
->next
->expr2
, 1)
4772 && !gfc_check_dependency (cblock
->next
->expr1
,
4773 eblock
->next
->expr1
, 0)
4774 && !gfc_check_dependency (eblock
->next
->expr1
,
4775 cblock
->next
->expr1
, 0))
4776 return gfc_trans_where_3 (cblock
, eblock
);
4780 gfc_start_block (&block
);
4782 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4784 return gfc_finish_block (&block
);
4788 /* CYCLE a DO loop. The label decl has already been created by
4789 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4790 node at the head of the loop. We must mark the label as used. */
4793 gfc_trans_cycle (gfc_code
* code
)
4797 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4798 gcc_assert (cycle_label
);
4800 TREE_USED (cycle_label
) = 1;
4801 return build1_v (GOTO_EXPR
, cycle_label
);
4805 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4806 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4810 gfc_trans_exit (gfc_code
* code
)
4814 exit_label
= code
->ext
.which_construct
->exit_label
;
4815 gcc_assert (exit_label
);
4817 TREE_USED (exit_label
) = 1;
4818 return build1_v (GOTO_EXPR
, exit_label
);
4822 /* Translate the ALLOCATE statement. */
4825 gfc_trans_allocate (gfc_code
* code
)
4847 tree memsize
= NULL_TREE
;
4848 tree classexpr
= NULL_TREE
;
4850 if (!code
->ext
.alloc
.list
)
4853 stat
= tmp
= memsz
= NULL_TREE
;
4854 label_errmsg
= label_finish
= errmsg
= errlen
= NULL_TREE
;
4856 gfc_init_block (&block
);
4857 gfc_init_block (&post
);
4859 /* STAT= (and maybe ERRMSG=) is present. */
4863 tree gfc_int4_type_node
= gfc_get_int_type (4);
4864 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4866 /* ERRMSG= only makes sense with STAT=. */
4869 gfc_init_se (&se
, NULL
);
4870 se
.want_pointer
= 1;
4871 gfc_conv_expr_lhs (&se
, code
->expr2
);
4873 errlen
= se
.string_length
;
4877 errmsg
= null_pointer_node
;
4878 errlen
= build_int_cst (gfc_charlen_type_node
, 0);
4881 /* GOTO destinations. */
4882 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
4883 label_finish
= gfc_build_label_decl (NULL_TREE
);
4884 TREE_USED (label_finish
) = 0;
4890 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4892 expr
= gfc_copy_expr (al
->expr
);
4894 if (expr
->ts
.type
== BT_CLASS
)
4895 gfc_add_data_component (expr
);
4897 gfc_init_se (&se
, NULL
);
4899 se
.want_pointer
= 1;
4900 se
.descriptor_only
= 1;
4901 gfc_conv_expr (&se
, expr
);
4903 /* Evaluate expr3 just once if not a variable. */
4904 if (al
== code
->ext
.alloc
.list
4905 && al
->expr
->ts
.type
== BT_CLASS
4907 && code
->expr3
->ts
.type
== BT_CLASS
4908 && code
->expr3
->expr_type
!= EXPR_VARIABLE
)
4910 gfc_init_se (&se_sz
, NULL
);
4911 gfc_conv_expr_reference (&se_sz
, code
->expr3
);
4912 gfc_conv_class_to_class (&se_sz
, code
->expr3
,
4913 code
->expr3
->ts
, false, true, false, false);
4914 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4915 gfc_add_block_to_block (&se
.post
, &se_sz
.post
);
4916 classexpr
= build_fold_indirect_ref_loc (input_location
,
4918 classexpr
= gfc_evaluate_now (classexpr
, &se
.pre
);
4919 memsize
= gfc_vtable_size_get (classexpr
);
4920 memsize
= fold_convert (sizetype
, memsize
);
4924 class_expr
= classexpr
;
4927 if (!gfc_array_allocate (&se
, expr
, stat
, errmsg
, errlen
, label_finish
,
4928 memsz
, &nelems
, code
->expr3
))
4930 bool unlimited_char
;
4932 unlimited_char
= UNLIMITED_POLY (al
->expr
)
4933 && ((code
->expr3
&& code
->expr3
->ts
.type
== BT_CHARACTER
)
4934 || (code
->ext
.alloc
.ts
.type
== BT_CHARACTER
4935 && code
->ext
.alloc
.ts
.u
.cl
4936 && code
->ext
.alloc
.ts
.u
.cl
->length
));
4938 /* A scalar or derived type. */
4940 /* Determine allocate size. */
4941 if (al
->expr
->ts
.type
== BT_CLASS
4944 && memsz
== NULL_TREE
)
4946 if (code
->expr3
->ts
.type
== BT_CLASS
)
4948 sz
= gfc_copy_expr (code
->expr3
);
4949 gfc_add_vptr_component (sz
);
4950 gfc_add_size_component (sz
);
4951 gfc_init_se (&se_sz
, NULL
);
4952 gfc_conv_expr (&se_sz
, sz
);
4957 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
4959 else if (((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
4960 || unlimited_char
) && code
->expr3
)
4962 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
4964 /* Convert and use the length expression. */
4965 gfc_init_se (&se_sz
, NULL
);
4966 if (code
->expr3
->expr_type
== EXPR_VARIABLE
4967 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
4969 gfc_conv_expr (&se_sz
, code
->expr3
);
4970 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4972 = gfc_evaluate_now (se_sz
.string_length
, &se
.pre
);
4973 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
4974 memsz
= se_sz
.string_length
;
4976 else if (code
->expr3
->mold
4977 && code
->expr3
->ts
.u
.cl
4978 && code
->expr3
->ts
.u
.cl
->length
)
4980 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
4981 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4982 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
4983 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
4988 /* This is would be inefficient and possibly could
4989 generate wrong code if the result were not stored
4991 if (slen3
== NULL_TREE
)
4993 gfc_conv_expr (&se_sz
, code
->expr3
);
4994 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4995 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
4996 gfc_add_block_to_block (&post
, &se_sz
.post
);
4997 slen3
= gfc_evaluate_now (se_sz
.string_length
,
5004 /* Otherwise use the stored string length. */
5005 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
5006 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5008 /* Store the string length. */
5009 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
5010 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5013 /* Convert to size in bytes, using the character KIND. */
5015 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->expr3
->ts
));
5017 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
5018 tmp
= TYPE_SIZE_UNIT (tmp
);
5019 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5020 TREE_TYPE (tmp
), tmp
,
5021 fold_convert (TREE_TYPE (tmp
), memsz
));
5023 else if ((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5026 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
5027 gfc_init_se (&se_sz
, NULL
);
5028 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
5029 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5030 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5031 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5032 /* Store the string length. */
5033 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5034 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5036 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5037 tmp
= TYPE_SIZE_UNIT (tmp
);
5038 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5039 TREE_TYPE (tmp
), tmp
,
5040 fold_convert (TREE_TYPE (se_sz
.expr
),
5043 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
5044 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5045 else if (memsz
== NULL_TREE
)
5046 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
5048 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
5050 memsz
= se
.string_length
;
5052 /* Convert to size in bytes, using the character KIND. */
5053 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5054 tmp
= TYPE_SIZE_UNIT (tmp
);
5055 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5056 TREE_TYPE (tmp
), tmp
,
5057 fold_convert (TREE_TYPE (tmp
), memsz
));
5060 /* Allocate - for non-pointers with re-alloc checking. */
5061 if (gfc_expr_attr (expr
).allocatable
)
5062 gfc_allocate_allocatable (&se
.pre
, se
.expr
, memsz
, NULL_TREE
,
5063 stat
, errmsg
, errlen
, label_finish
, expr
);
5065 gfc_allocate_using_malloc (&se
.pre
, se
.expr
, memsz
, stat
);
5067 if (al
->expr
->ts
.type
== BT_DERIVED
5068 && expr
->ts
.u
.derived
->attr
.alloc_comp
)
5070 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5071 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
5072 gfc_add_expr_to_block (&se
.pre
, tmp
);
5074 else if (al
->expr
->ts
.type
== BT_CLASS
)
5076 /* With class objects, it is best to play safe and null the
5077 memory because we cannot know if dynamic types have allocatable
5078 components or not. */
5079 tmp
= build_call_expr_loc (input_location
,
5080 builtin_decl_explicit (BUILT_IN_MEMSET
),
5081 3, se
.expr
, integer_zero_node
, memsz
);
5082 gfc_add_expr_to_block (&se
.pre
, tmp
);
5086 gfc_add_block_to_block (&block
, &se
.pre
);
5088 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5091 tmp
= build1_v (GOTO_EXPR
, label_errmsg
);
5092 parm
= fold_build2_loc (input_location
, NE_EXPR
,
5093 boolean_type_node
, stat
,
5094 build_int_cst (TREE_TYPE (stat
), 0));
5095 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5096 gfc_unlikely (parm
), tmp
,
5097 build_empty_stmt (input_location
));
5098 gfc_add_expr_to_block (&block
, tmp
);
5101 /* We need the vptr of CLASS objects to be initialized. */
5102 e
= gfc_copy_expr (al
->expr
);
5103 if (e
->ts
.type
== BT_CLASS
)
5105 gfc_expr
*lhs
, *rhs
;
5108 lhs
= gfc_expr_to_initialize (e
);
5109 gfc_add_vptr_component (lhs
);
5111 if (class_expr
!= NULL_TREE
)
5113 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5114 gfc_init_se (&lse
, NULL
);
5115 lse
.want_pointer
= 1;
5116 gfc_conv_expr (&lse
, lhs
);
5117 tmp
= gfc_class_vptr_get (class_expr
);
5118 gfc_add_modify (&block
, lse
.expr
,
5119 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5121 else if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
5123 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5124 rhs
= gfc_copy_expr (code
->expr3
);
5125 gfc_add_vptr_component (rhs
);
5126 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5127 gfc_add_expr_to_block (&block
, tmp
);
5128 gfc_free_expr (rhs
);
5129 rhs
= gfc_expr_to_initialize (e
);
5133 /* VPTR is fixed at compile time. */
5137 ts
= &code
->expr3
->ts
;
5138 else if (e
->ts
.type
== BT_DERIVED
)
5140 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
|| UNLIMITED_POLY (al
->expr
))
5141 ts
= &code
->ext
.alloc
.ts
;
5142 else if (e
->ts
.type
== BT_CLASS
)
5143 ts
= &CLASS_DATA (e
)->ts
;
5147 if (ts
->type
== BT_DERIVED
|| UNLIMITED_POLY (e
))
5149 if (ts
->type
== BT_DERIVED
)
5150 vtab
= gfc_find_derived_vtab (ts
->u
.derived
);
5152 vtab
= gfc_find_intrinsic_vtab (ts
);
5154 gfc_init_se (&lse
, NULL
);
5155 lse
.want_pointer
= 1;
5156 gfc_conv_expr (&lse
, lhs
);
5157 tmp
= gfc_build_addr_expr (NULL_TREE
,
5158 gfc_get_symbol_decl (vtab
));
5159 gfc_add_modify (&block
, lse
.expr
,
5160 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5163 gfc_free_expr (lhs
);
5168 if (code
->expr3
&& !code
->expr3
->mold
)
5170 /* Initialization via SOURCE block
5171 (or static default initializer). */
5172 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
5173 if (class_expr
!= NULL_TREE
)
5176 to
= TREE_OPERAND (se
.expr
, 0);
5178 tmp
= gfc_copy_class_to_class (class_expr
, to
, nelems
);
5180 else if (al
->expr
->ts
.type
== BT_CLASS
)
5182 gfc_actual_arglist
*actual
;
5185 gfc_ref
*ref
, *dataref
;
5187 /* Do a polymorphic deep copy. */
5188 actual
= gfc_get_actual_arglist ();
5189 actual
->expr
= gfc_copy_expr (rhs
);
5190 if (rhs
->ts
.type
== BT_CLASS
)
5191 gfc_add_data_component (actual
->expr
);
5192 actual
->next
= gfc_get_actual_arglist ();
5193 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
5194 actual
->next
->expr
->ts
.type
= BT_CLASS
;
5195 gfc_add_data_component (actual
->next
->expr
);
5198 /* Make sure we go up through the reference chain to
5199 the _data reference, where the arrayspec is found. */
5200 for (ref
= actual
->next
->expr
->ref
; ref
; ref
= ref
->next
)
5201 if (ref
->type
== REF_COMPONENT
5202 && strcmp (ref
->u
.c
.component
->name
, "_data") == 0)
5205 if (dataref
&& dataref
->u
.c
.component
->as
)
5209 gfc_ref
*ref
= dataref
->next
;
5210 ref
->u
.ar
.type
= AR_SECTION
;
5211 /* We have to set up the array reference to give ranges
5212 in all dimensions and ensure that the end and stride
5213 are set so that the copy can be scalarized. */
5215 for (; dim
< dataref
->u
.c
.component
->as
->rank
; dim
++)
5217 ref
->u
.ar
.dimen_type
[dim
] = DIMEN_RANGE
;
5218 if (ref
->u
.ar
.end
[dim
] == NULL
)
5220 ref
->u
.ar
.end
[dim
] = ref
->u
.ar
.start
[dim
];
5221 temp
= gfc_get_int_expr (gfc_default_integer_kind
,
5222 &al
->expr
->where
, 1);
5223 ref
->u
.ar
.start
[dim
] = temp
;
5225 temp
= gfc_subtract (gfc_copy_expr (ref
->u
.ar
.end
[dim
]),
5226 gfc_copy_expr (ref
->u
.ar
.start
[dim
]));
5227 temp
= gfc_add (gfc_get_int_expr (gfc_default_integer_kind
,
5228 &al
->expr
->where
, 1),
5232 if (rhs
->ts
.type
== BT_CLASS
)
5234 ppc
= gfc_copy_expr (rhs
);
5235 gfc_add_vptr_component (ppc
);
5237 else if (rhs
->ts
.type
== BT_DERIVED
)
5238 ppc
= gfc_lval_expr_from_sym
5239 (gfc_find_derived_vtab (rhs
->ts
.u
.derived
));
5241 ppc
= gfc_lval_expr_from_sym
5242 (gfc_find_intrinsic_vtab (&rhs
->ts
));
5243 gfc_add_component_ref (ppc
, "_copy");
5245 ppc_code
= gfc_get_code ();
5246 ppc_code
->resolved_sym
= ppc
->symtree
->n
.sym
;
5247 /* Although '_copy' is set to be elemental in class.c, it is
5248 not staying that way. Find out why, sometime.... */
5249 ppc_code
->resolved_sym
->attr
.elemental
= 1;
5250 ppc_code
->ext
.actual
= actual
;
5251 ppc_code
->expr1
= ppc
;
5252 ppc_code
->op
= EXEC_CALL
;
5253 /* Since '_copy' is elemental, the scalarizer will take care
5254 of arrays in gfc_trans_call. */
5255 tmp
= gfc_trans_call (ppc_code
, true, NULL
, NULL
, false);
5256 gfc_free_statements (ppc_code
);
5258 else if (expr3
!= NULL_TREE
)
5260 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5261 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
5262 slen3
, expr3
, code
->expr3
->ts
.kind
);
5267 /* Switch off automatic reallocation since we have just done
5269 int realloc_lhs
= gfc_option
.flag_realloc_lhs
;
5270 gfc_option
.flag_realloc_lhs
= 0;
5271 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
5273 gfc_option
.flag_realloc_lhs
= realloc_lhs
;
5275 gfc_free_expr (rhs
);
5276 gfc_add_expr_to_block (&block
, tmp
);
5278 else if (code
->expr3
&& code
->expr3
->mold
5279 && code
->expr3
->ts
.type
== BT_CLASS
)
5281 /* Since the _vptr has already been assigned to the allocate
5282 object, we can use gfc_copy_class_to_class in its
5283 initialization mode. */
5284 tmp
= TREE_OPERAND (se
.expr
, 0);
5285 tmp
= gfc_copy_class_to_class (NULL_TREE
, tmp
, nelems
);
5286 gfc_add_expr_to_block (&block
, tmp
);
5289 gfc_free_expr (expr
);
5295 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5296 gfc_add_expr_to_block (&block
, tmp
);
5299 /* ERRMSG - only useful if STAT is present. */
5300 if (code
->expr1
&& code
->expr2
)
5302 const char *msg
= "Attempt to allocate an allocated object";
5303 tree slen
, dlen
, errmsg_str
;
5304 stmtblock_t errmsg_block
;
5306 gfc_init_block (&errmsg_block
);
5308 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5309 gfc_add_modify (&errmsg_block
, errmsg_str
,
5310 gfc_build_addr_expr (pchar_type_node
,
5311 gfc_build_localized_cstring_const (msg
)));
5313 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5314 dlen
= gfc_get_expr_charlen (code
->expr2
);
5315 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5318 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5319 slen
, errmsg_str
, gfc_default_character_kind
);
5320 dlen
= gfc_finish_block (&errmsg_block
);
5322 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5323 build_int_cst (TREE_TYPE (stat
), 0));
5325 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5327 gfc_add_expr_to_block (&block
, tmp
);
5333 if (TREE_USED (label_finish
))
5335 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5336 gfc_add_expr_to_block (&block
, tmp
);
5339 gfc_init_se (&se
, NULL
);
5340 gfc_conv_expr_lhs (&se
, code
->expr1
);
5341 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5342 gfc_add_modify (&block
, se
.expr
, tmp
);
5345 gfc_add_block_to_block (&block
, &se
.post
);
5346 gfc_add_block_to_block (&block
, &post
);
5348 return gfc_finish_block (&block
);
5352 /* Reset the vptr after deallocation. */
5355 reset_vptr (stmtblock_t
*block
, gfc_expr
*e
)
5357 gfc_expr
*rhs
, *lhs
= gfc_copy_expr (e
);
5361 if (UNLIMITED_POLY (e
))
5362 rhs
= gfc_get_null_expr (NULL
);
5365 vtab
= gfc_find_derived_vtab (e
->ts
.u
.derived
);
5366 rhs
= gfc_lval_expr_from_sym (vtab
);
5368 gfc_add_vptr_component (lhs
);
5369 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5370 gfc_add_expr_to_block (block
, tmp
);
5371 gfc_free_expr (lhs
);
5372 gfc_free_expr (rhs
);
5376 /* Translate a DEALLOCATE statement. */
5379 gfc_trans_deallocate (gfc_code
*code
)
5383 tree apstat
, pstat
, stat
, errmsg
, errlen
, tmp
;
5384 tree label_finish
, label_errmsg
;
5387 pstat
= apstat
= stat
= errmsg
= errlen
= tmp
= NULL_TREE
;
5388 label_finish
= label_errmsg
= NULL_TREE
;
5390 gfc_start_block (&block
);
5392 /* Count the number of failed deallocations. If deallocate() was
5393 called with STAT= , then set STAT to the count. If deallocate
5394 was called with ERRMSG, then set ERRMG to a string. */
5397 tree gfc_int4_type_node
= gfc_get_int_type (4);
5399 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
5400 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5402 /* GOTO destinations. */
5403 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
5404 label_finish
= gfc_build_label_decl (NULL_TREE
);
5405 TREE_USED (label_finish
) = 0;
5408 /* Set ERRMSG - only needed if STAT is available. */
5409 if (code
->expr1
&& code
->expr2
)
5411 gfc_init_se (&se
, NULL
);
5412 se
.want_pointer
= 1;
5413 gfc_conv_expr_lhs (&se
, code
->expr2
);
5415 errlen
= se
.string_length
;
5418 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5420 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5421 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5423 if (expr
->ts
.type
== BT_CLASS
)
5424 gfc_add_data_component (expr
);
5426 gfc_init_se (&se
, NULL
);
5427 gfc_start_block (&se
.pre
);
5429 se
.want_pointer
= 1;
5430 se
.descriptor_only
= 1;
5431 gfc_conv_expr (&se
, expr
);
5433 if (expr
->rank
|| gfc_is_coarray (expr
))
5435 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
)
5438 gfc_ref
*last
= NULL
;
5439 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5440 if (ref
->type
== REF_COMPONENT
)
5443 /* Do not deallocate the components of a derived type
5444 ultimate pointer component. */
5445 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5446 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5448 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5450 gfc_add_expr_to_block (&se
.pre
, tmp
);
5453 tmp
= gfc_array_deallocate (se
.expr
, pstat
, errmsg
, errlen
,
5454 label_finish
, expr
);
5455 gfc_add_expr_to_block (&se
.pre
, tmp
);
5456 if (UNLIMITED_POLY (al
->expr
))
5457 reset_vptr (&se
.pre
, al
->expr
);
5461 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5462 al
->expr
, al
->expr
->ts
);
5463 gfc_add_expr_to_block (&se
.pre
, tmp
);
5465 /* Set to zero after deallocation. */
5466 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5468 build_int_cst (TREE_TYPE (se
.expr
), 0));
5469 gfc_add_expr_to_block (&se
.pre
, tmp
);
5471 if (al
->expr
->ts
.type
== BT_CLASS
)
5472 reset_vptr (&se
.pre
, al
->expr
);
5479 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5480 build_int_cst (TREE_TYPE (stat
), 0));
5481 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5482 gfc_unlikely (cond
),
5483 build1_v (GOTO_EXPR
, label_errmsg
),
5484 build_empty_stmt (input_location
));
5485 gfc_add_expr_to_block (&se
.pre
, tmp
);
5488 tmp
= gfc_finish_block (&se
.pre
);
5489 gfc_add_expr_to_block (&block
, tmp
);
5490 gfc_free_expr (expr
);
5495 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5496 gfc_add_expr_to_block (&block
, tmp
);
5499 /* Set ERRMSG - only needed if STAT is available. */
5500 if (code
->expr1
&& code
->expr2
)
5502 const char *msg
= "Attempt to deallocate an unallocated object";
5503 stmtblock_t errmsg_block
;
5504 tree errmsg_str
, slen
, dlen
, cond
;
5506 gfc_init_block (&errmsg_block
);
5508 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5509 gfc_add_modify (&errmsg_block
, errmsg_str
,
5510 gfc_build_addr_expr (pchar_type_node
,
5511 gfc_build_localized_cstring_const (msg
)));
5512 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5513 dlen
= gfc_get_expr_charlen (code
->expr2
);
5515 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5516 slen
, errmsg_str
, gfc_default_character_kind
);
5517 tmp
= gfc_finish_block (&errmsg_block
);
5519 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5520 build_int_cst (TREE_TYPE (stat
), 0));
5521 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5522 gfc_unlikely (cond
), tmp
,
5523 build_empty_stmt (input_location
));
5525 gfc_add_expr_to_block (&block
, tmp
);
5528 if (code
->expr1
&& TREE_USED (label_finish
))
5530 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5531 gfc_add_expr_to_block (&block
, tmp
);
5537 gfc_init_se (&se
, NULL
);
5538 gfc_conv_expr_lhs (&se
, code
->expr1
);
5539 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5540 gfc_add_modify (&block
, se
.expr
, tmp
);
5543 return gfc_finish_block (&block
);
5546 #include "gt-fortran-trans-stmt.h"