1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "double-int.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
42 #include "trans-stmt.h"
43 #include "trans-types.h"
44 #include "trans-array.h"
45 #include "trans-const.h"
47 #include "dependency.h"
50 typedef struct iter_info
56 struct iter_info
*next
;
60 typedef struct forall_info
67 struct forall_info
*prev_nest
;
72 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
73 forall_info
*, stmtblock_t
*);
75 /* Translate a F95 label number to a LABEL_EXPR. */
78 gfc_trans_label_here (gfc_code
* code
)
80 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
84 /* Given a variable expression which has been ASSIGNed to, find the decl
85 containing the auxiliary variables. For variables in common blocks this
89 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
91 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
92 gfc_conv_expr (se
, expr
);
93 /* Deals with variable in common block. Get the field declaration. */
94 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
95 se
->expr
= TREE_OPERAND (se
->expr
, 1);
96 /* Deals with dummy argument. Get the parameter declaration. */
97 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
98 se
->expr
= TREE_OPERAND (se
->expr
, 0);
101 /* Translate a label assignment statement. */
104 gfc_trans_label_assign (gfc_code
* code
)
113 /* Start a new block. */
114 gfc_init_se (&se
, NULL
);
115 gfc_start_block (&se
.pre
);
116 gfc_conv_label_variable (&se
, code
->expr1
);
118 len
= GFC_DECL_STRING_LEN (se
.expr
);
119 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
121 label_tree
= gfc_get_label_decl (code
->label1
);
123 if (code
->label1
->defined
== ST_LABEL_TARGET
124 || code
->label1
->defined
== ST_LABEL_DO_TARGET
)
126 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
127 len_tree
= integer_minus_one_node
;
131 gfc_expr
*format
= code
->label1
->format
;
133 label_len
= format
->value
.character
.length
;
134 len_tree
= build_int_cst (gfc_charlen_type_node
, label_len
);
135 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
136 format
->value
.character
.string
);
137 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
140 gfc_add_modify (&se
.pre
, len
, len_tree
);
141 gfc_add_modify (&se
.pre
, addr
, label_tree
);
143 return gfc_finish_block (&se
.pre
);
146 /* Translate a GOTO statement. */
149 gfc_trans_goto (gfc_code
* code
)
151 locus loc
= code
->loc
;
157 if (code
->label1
!= NULL
)
158 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
161 gfc_init_se (&se
, NULL
);
162 gfc_start_block (&se
.pre
);
163 gfc_conv_label_variable (&se
, code
->expr1
);
164 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
165 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
166 build_int_cst (TREE_TYPE (tmp
), -1));
167 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
168 "Assigned label is not a target label");
170 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
172 /* We're going to ignore a label list. It does not really change the
173 statement's semantics (because it is just a further restriction on
174 what's legal code); before, we were comparing label addresses here, but
175 that's a very fragile business and may break with optimization. So
178 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
180 gfc_add_expr_to_block (&se
.pre
, target
);
181 return gfc_finish_block (&se
.pre
);
185 /* Translate an ENTRY statement. Just adds a label for this entry point. */
187 gfc_trans_entry (gfc_code
* code
)
189 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
193 /* Replace a gfc_ss structure by another both in the gfc_se struct
194 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
195 to replace a variable ss by the corresponding temporary. */
198 replace_ss (gfc_se
*se
, gfc_ss
*old_ss
, gfc_ss
*new_ss
)
200 gfc_ss
**sess
, **loopss
;
202 /* The old_ss is a ss for a single variable. */
203 gcc_assert (old_ss
->info
->type
== GFC_SS_SECTION
);
205 for (sess
= &(se
->ss
); *sess
!= gfc_ss_terminator
; sess
= &((*sess
)->next
))
208 gcc_assert (*sess
!= gfc_ss_terminator
);
211 new_ss
->next
= old_ss
->next
;
214 for (loopss
= &(se
->loop
->ss
); *loopss
!= gfc_ss_terminator
;
215 loopss
= &((*loopss
)->loop_chain
))
216 if (*loopss
== old_ss
)
218 gcc_assert (*loopss
!= gfc_ss_terminator
);
221 new_ss
->loop_chain
= old_ss
->loop_chain
;
222 new_ss
->loop
= old_ss
->loop
;
224 gfc_free_ss (old_ss
);
228 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
229 elemental subroutines. Make temporaries for output arguments if any such
230 dependencies are found. Output arguments are chosen because internal_unpack
231 can be used, as is, to copy the result back to the variable. */
233 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
234 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
235 gfc_dep_check check_variable
)
237 gfc_actual_arglist
*arg0
;
239 gfc_formal_arglist
*formal
;
247 if (loopse
->ss
== NULL
)
252 formal
= gfc_sym_get_dummy_args (sym
);
254 /* Loop over all the arguments testing for dependencies. */
255 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
261 /* Obtain the info structure for the current argument. */
262 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
263 if (ss
->info
->expr
== e
)
266 /* If there is a dependency, create a temporary and use it
267 instead of the variable. */
268 fsym
= formal
? formal
->sym
: NULL
;
269 if (e
->expr_type
== EXPR_VARIABLE
271 && fsym
->attr
.intent
!= INTENT_IN
272 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
273 sym
, arg0
, check_variable
))
275 tree initial
, temptype
;
276 stmtblock_t temp_post
;
279 tmp_ss
= gfc_get_array_ss (gfc_ss_terminator
, NULL
, ss
->dimen
,
281 gfc_mark_ss_chain_used (tmp_ss
, 1);
282 tmp_ss
->info
->expr
= ss
->info
->expr
;
283 replace_ss (loopse
, ss
, tmp_ss
);
285 /* Obtain the argument descriptor for unpacking. */
286 gfc_init_se (&parmse
, NULL
);
287 parmse
.want_pointer
= 1;
288 gfc_conv_expr_descriptor (&parmse
, e
);
289 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
291 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
292 initialize the array temporary with a copy of the values. */
293 if (fsym
->attr
.intent
== INTENT_INOUT
294 || (fsym
->ts
.type
==BT_DERIVED
295 && fsym
->attr
.intent
== INTENT_OUT
))
296 initial
= parmse
.expr
;
297 /* For class expressions, we always initialize with the copy of
299 else if (e
->ts
.type
== BT_CLASS
)
300 initial
= parmse
.expr
;
304 if (e
->ts
.type
!= BT_CLASS
)
306 /* Find the type of the temporary to create; we don't use the type
307 of e itself as this breaks for subcomponent-references in e
308 (where the type of e is that of the final reference, but
309 parmse.expr's type corresponds to the full derived-type). */
310 /* TODO: Fix this somehow so we don't need a temporary of the whole
311 array but instead only the components referenced. */
312 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
313 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
314 temptype
= TREE_TYPE (temptype
);
315 temptype
= gfc_get_element_type (temptype
);
319 /* For class arrays signal that the size of the dynamic type has to
320 be obtained from the vtable, using the 'initial' expression. */
321 temptype
= NULL_TREE
;
323 /* Generate the temporary. Cleaning up the temporary should be the
324 very last thing done, so we add the code to a new block and add it
325 to se->post as last instructions. */
326 size
= gfc_create_var (gfc_array_index_type
, NULL
);
327 data
= gfc_create_var (pvoid_type_node
, NULL
);
328 gfc_init_block (&temp_post
);
329 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
, tmp_ss
,
330 temptype
, initial
, false, true,
331 false, &arg
->expr
->where
);
332 gfc_add_modify (&se
->pre
, size
, tmp
);
333 tmp
= fold_convert (pvoid_type_node
, tmp_ss
->info
->data
.array
.data
);
334 gfc_add_modify (&se
->pre
, data
, tmp
);
336 /* Update other ss' delta. */
337 gfc_set_delta (loopse
->loop
);
339 /* Copy the result back using unpack..... */
340 if (e
->ts
.type
!= BT_CLASS
)
341 tmp
= build_call_expr_loc (input_location
,
342 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
345 /* ... except for class results where the copy is
347 tmp
= build_fold_indirect_ref_loc (input_location
, parmse
.expr
);
348 tmp
= gfc_conv_descriptor_data_get (tmp
);
349 tmp
= build_call_expr_loc (input_location
,
350 builtin_decl_explicit (BUILT_IN_MEMCPY
),
352 fold_convert (size_type_node
, size
));
354 gfc_add_expr_to_block (&se
->post
, tmp
);
356 /* parmse.pre is already added above. */
357 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
358 gfc_add_block_to_block (&se
->post
, &temp_post
);
364 /* Get the interface symbol for the procedure corresponding to the given call.
365 We can't get the procedure symbol directly as we have to handle the case
366 of (deferred) type-bound procedures. */
369 get_proc_ifc_for_call (gfc_code
*c
)
373 gcc_assert (c
->op
== EXEC_ASSIGN_CALL
|| c
->op
== EXEC_CALL
);
375 sym
= gfc_get_proc_ifc_for_expr (c
->expr1
);
377 /* Fall back/last resort try. */
379 sym
= c
->resolved_sym
;
385 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
388 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
389 tree mask
, tree count1
, bool invert
)
393 int has_alternate_specifier
;
394 gfc_dep_check check_variable
;
395 tree index
= NULL_TREE
;
396 tree maskexpr
= NULL_TREE
;
399 /* A CALL starts a new block because the actual arguments may have to
400 be evaluated first. */
401 gfc_init_se (&se
, NULL
);
402 gfc_start_block (&se
.pre
);
404 gcc_assert (code
->resolved_sym
);
406 ss
= gfc_ss_terminator
;
407 if (code
->resolved_sym
->attr
.elemental
)
408 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
,
409 get_proc_ifc_for_call (code
),
412 /* Is not an elemental subroutine call with array valued arguments. */
413 if (ss
== gfc_ss_terminator
)
416 /* Translate the call. */
417 has_alternate_specifier
418 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
421 /* A subroutine without side-effect, by definition, does nothing! */
422 TREE_SIDE_EFFECTS (se
.expr
) = 1;
424 /* Chain the pieces together and return the block. */
425 if (has_alternate_specifier
)
427 gfc_code
*select_code
;
429 select_code
= code
->next
;
430 gcc_assert(select_code
->op
== EXEC_SELECT
);
431 sym
= select_code
->expr1
->symtree
->n
.sym
;
432 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
433 if (sym
->backend_decl
== NULL
)
434 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
435 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
438 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
440 gfc_add_block_to_block (&se
.pre
, &se
.post
);
445 /* An elemental subroutine call with array valued arguments has
453 /* gfc_walk_elemental_function_args renders the ss chain in the
454 reverse order to the actual argument order. */
455 ss
= gfc_reverse_ss (ss
);
457 /* Initialize the loop. */
458 gfc_init_se (&loopse
, NULL
);
459 gfc_init_loopinfo (&loop
);
460 gfc_add_ss_to_loop (&loop
, ss
);
462 gfc_conv_ss_startstride (&loop
);
463 /* TODO: gfc_conv_loop_setup generates a temporary for vector
464 subscripts. This could be prevented in the elemental case
465 as temporaries are handled separatedly
466 (below in gfc_conv_elemental_dependencies). */
467 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
468 gfc_mark_ss_chain_used (ss
, 1);
470 /* Convert the arguments, checking for dependencies. */
471 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
474 /* For operator assignment, do dependency checking. */
475 if (dependency_check
)
476 check_variable
= ELEM_CHECK_VARIABLE
;
478 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
480 gfc_init_se (&depse
, NULL
);
481 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
482 code
->ext
.actual
, check_variable
);
484 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
485 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
487 /* Generate the loop body. */
488 gfc_start_scalarized_body (&loop
, &body
);
489 gfc_init_block (&block
);
493 /* Form the mask expression according to the mask. */
495 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
497 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
498 TREE_TYPE (maskexpr
), maskexpr
);
501 /* Add the subroutine call to the block. */
502 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
503 code
->ext
.actual
, code
->expr1
,
508 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
509 build_empty_stmt (input_location
));
510 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
511 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
512 gfc_array_index_type
,
513 count1
, gfc_index_one_node
);
514 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
517 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
519 gfc_add_block_to_block (&block
, &loopse
.pre
);
520 gfc_add_block_to_block (&block
, &loopse
.post
);
522 /* Finish up the loop block and the loop. */
523 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
524 gfc_trans_scalarizing_loops (&loop
, &body
);
525 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
526 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
527 gfc_add_block_to_block (&se
.pre
, &se
.post
);
528 gfc_cleanup_loop (&loop
);
531 return gfc_finish_block (&se
.pre
);
535 /* Translate the RETURN statement. */
538 gfc_trans_return (gfc_code
* code
)
546 /* If code->expr is not NULL, this return statement must appear
547 in a subroutine and current_fake_result_decl has already
550 result
= gfc_get_fake_result_decl (NULL
, 0);
553 gfc_warning ("An alternate return at %L without a * dummy argument",
554 &code
->expr1
->where
);
555 return gfc_generate_return ();
558 /* Start a new block for this statement. */
559 gfc_init_se (&se
, NULL
);
560 gfc_start_block (&se
.pre
);
562 gfc_conv_expr (&se
, code
->expr1
);
564 /* Note that the actually returned expression is a simple value and
565 does not depend on any pointers or such; thus we can clean-up with
566 se.post before returning. */
567 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
568 result
, fold_convert (TREE_TYPE (result
),
570 gfc_add_expr_to_block (&se
.pre
, tmp
);
571 gfc_add_block_to_block (&se
.pre
, &se
.post
);
573 tmp
= gfc_generate_return ();
574 gfc_add_expr_to_block (&se
.pre
, tmp
);
575 return gfc_finish_block (&se
.pre
);
578 return gfc_generate_return ();
582 /* Translate the PAUSE statement. We have to translate this statement
583 to a runtime library call. */
586 gfc_trans_pause (gfc_code
* code
)
588 tree gfc_int4_type_node
= gfc_get_int_type (4);
592 /* Start a new block for this statement. */
593 gfc_init_se (&se
, NULL
);
594 gfc_start_block (&se
.pre
);
597 if (code
->expr1
== NULL
)
599 tmp
= build_int_cst (gfc_int4_type_node
, 0);
600 tmp
= build_call_expr_loc (input_location
,
601 gfor_fndecl_pause_string
, 2,
602 build_int_cst (pchar_type_node
, 0), tmp
);
604 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
606 gfc_conv_expr (&se
, code
->expr1
);
607 tmp
= build_call_expr_loc (input_location
,
608 gfor_fndecl_pause_numeric
, 1,
609 fold_convert (gfc_int4_type_node
, se
.expr
));
613 gfc_conv_expr_reference (&se
, code
->expr1
);
614 tmp
= build_call_expr_loc (input_location
,
615 gfor_fndecl_pause_string
, 2,
616 se
.expr
, se
.string_length
);
619 gfc_add_expr_to_block (&se
.pre
, tmp
);
621 gfc_add_block_to_block (&se
.pre
, &se
.post
);
623 return gfc_finish_block (&se
.pre
);
627 /* Translate the STOP statement. We have to translate this statement
628 to a runtime library call. */
631 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
633 tree gfc_int4_type_node
= gfc_get_int_type (4);
637 /* Start a new block for this statement. */
638 gfc_init_se (&se
, NULL
);
639 gfc_start_block (&se
.pre
);
641 if (flag_coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
643 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
644 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
645 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
646 gfc_add_expr_to_block (&se
.pre
, tmp
);
648 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
649 gfc_add_expr_to_block (&se
.pre
, tmp
);
652 if (code
->expr1
== NULL
)
654 tmp
= build_int_cst (gfc_int4_type_node
, 0);
655 tmp
= build_call_expr_loc (input_location
,
657 ? (flag_coarray
== GFC_FCOARRAY_LIB
658 ? gfor_fndecl_caf_error_stop_str
659 : gfor_fndecl_error_stop_string
)
660 : gfor_fndecl_stop_string
,
661 2, build_int_cst (pchar_type_node
, 0), tmp
);
663 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
665 gfc_conv_expr (&se
, code
->expr1
);
666 tmp
= build_call_expr_loc (input_location
,
668 ? (flag_coarray
== GFC_FCOARRAY_LIB
669 ? gfor_fndecl_caf_error_stop
670 : gfor_fndecl_error_stop_numeric
)
671 : gfor_fndecl_stop_numeric_f08
, 1,
672 fold_convert (gfc_int4_type_node
, se
.expr
));
676 gfc_conv_expr_reference (&se
, code
->expr1
);
677 tmp
= build_call_expr_loc (input_location
,
679 ? (flag_coarray
== GFC_FCOARRAY_LIB
680 ? gfor_fndecl_caf_error_stop_str
681 : gfor_fndecl_error_stop_string
)
682 : gfor_fndecl_stop_string
,
683 2, se
.expr
, se
.string_length
);
686 gfc_add_expr_to_block (&se
.pre
, tmp
);
688 gfc_add_block_to_block (&se
.pre
, &se
.post
);
690 return gfc_finish_block (&se
.pre
);
695 gfc_trans_lock_unlock (gfc_code
*code
, gfc_exec_op type ATTRIBUTE_UNUSED
)
698 tree stat
= NULL_TREE
, lock_acquired
= NULL_TREE
;
700 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
701 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
702 if (!code
->expr2
&& !code
->expr4
&& flag_coarray
!= GFC_FCOARRAY_LIB
)
705 gfc_init_se (&se
, NULL
);
706 gfc_start_block (&se
.pre
);
710 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
711 gfc_init_se (&argse
, NULL
);
712 gfc_conv_expr_val (&argse
, code
->expr2
);
718 gcc_assert (code
->expr4
->expr_type
== EXPR_VARIABLE
);
719 gfc_init_se (&argse
, NULL
);
720 gfc_conv_expr_val (&argse
, code
->expr4
);
721 lock_acquired
= argse
.expr
;
724 if (stat
!= NULL_TREE
)
725 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
727 if (lock_acquired
!= NULL_TREE
)
728 gfc_add_modify (&se
.pre
, lock_acquired
,
729 fold_convert (TREE_TYPE (lock_acquired
),
732 return gfc_finish_block (&se
.pre
);
737 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
741 tree images
= NULL_TREE
, stat
= NULL_TREE
,
742 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
744 /* Short cut: For single images without bound checking or without STAT=,
745 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
746 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
747 && flag_coarray
!= GFC_FCOARRAY_LIB
)
750 gfc_init_se (&se
, NULL
);
751 gfc_start_block (&se
.pre
);
753 if (code
->expr1
&& code
->expr1
->rank
== 0)
755 gfc_init_se (&argse
, NULL
);
756 gfc_conv_expr_val (&argse
, code
->expr1
);
762 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
763 gfc_init_se (&argse
, NULL
);
764 gfc_conv_expr_val (&argse
, code
->expr2
);
768 stat
= null_pointer_node
;
770 if (code
->expr3
&& flag_coarray
== GFC_FCOARRAY_LIB
771 && type
!= EXEC_SYNC_MEMORY
)
773 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
774 gfc_init_se (&argse
, NULL
);
775 gfc_conv_expr (&argse
, code
->expr3
);
776 gfc_conv_string_parameter (&argse
);
777 errmsg
= gfc_build_addr_expr (NULL
, argse
.expr
);
778 errmsglen
= argse
.string_length
;
780 else if (flag_coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
782 errmsg
= null_pointer_node
;
783 errmsglen
= build_int_cst (integer_type_node
, 0);
786 /* Check SYNC IMAGES(imageset) for valid image index.
787 FIXME: Add a check for image-set arrays. */
788 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
789 && code
->expr1
->rank
== 0)
792 if (flag_coarray
!= GFC_FCOARRAY_LIB
)
793 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
794 images
, build_int_cst (TREE_TYPE (images
), 1));
798 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_num_images
,
799 2, integer_zero_node
,
800 build_int_cst (integer_type_node
, -1));
801 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
803 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
805 build_int_cst (TREE_TYPE (images
), 1));
806 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
807 boolean_type_node
, cond
, cond2
);
809 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
810 &code
->expr1
->where
, "Invalid image number "
812 fold_convert (integer_type_node
, images
));
815 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
816 image control statements SYNC IMAGES and SYNC ALL. */
817 if (flag_coarray
== GFC_FCOARRAY_LIB
)
819 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
820 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
821 gfc_add_expr_to_block (&se
.pre
, tmp
);
824 if (flag_coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
826 /* Set STAT to zero. */
828 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
830 else if (type
== EXEC_SYNC_ALL
)
832 /* SYNC ALL => stat == null_pointer_node
833 SYNC ALL(stat=s) => stat has an integer type
835 If "stat" has the wrong integer type, use a temp variable of
836 the right type and later cast the result back into "stat". */
837 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
839 if (TREE_TYPE (stat
) == integer_type_node
)
840 stat
= gfc_build_addr_expr (NULL
, stat
);
842 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
843 3, stat
, errmsg
, errmsglen
);
844 gfc_add_expr_to_block (&se
.pre
, tmp
);
848 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
850 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
851 3, gfc_build_addr_expr (NULL
, tmp_stat
),
853 gfc_add_expr_to_block (&se
.pre
, tmp
);
855 gfc_add_modify (&se
.pre
, stat
,
856 fold_convert (TREE_TYPE (stat
), tmp_stat
));
863 gcc_assert (type
== EXEC_SYNC_IMAGES
);
867 len
= build_int_cst (integer_type_node
, -1);
868 images
= null_pointer_node
;
870 else if (code
->expr1
->rank
== 0)
872 len
= build_int_cst (integer_type_node
, 1);
873 images
= gfc_build_addr_expr (NULL_TREE
, images
);
878 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
879 gfc_fatal_error ("Sorry, only support for integer kind %d "
880 "implemented for image-set at %L",
881 gfc_c_int_kind
, &code
->expr1
->where
);
883 gfc_conv_array_parameter (&se
, code
->expr1
, true, NULL
, NULL
, &len
);
886 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
887 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
888 tmp
= gfc_get_element_type (tmp
);
890 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
891 TREE_TYPE (len
), len
,
892 fold_convert (TREE_TYPE (len
),
893 TYPE_SIZE_UNIT (tmp
)));
894 len
= fold_convert (integer_type_node
, len
);
897 /* SYNC IMAGES(imgs) => stat == null_pointer_node
898 SYNC IMAGES(imgs,stat=s) => stat has an integer type
900 If "stat" has the wrong integer type, use a temp variable of
901 the right type and later cast the result back into "stat". */
902 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
904 if (TREE_TYPE (stat
) == integer_type_node
)
905 stat
= gfc_build_addr_expr (NULL
, stat
);
907 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
908 5, fold_convert (integer_type_node
, len
),
909 images
, stat
, errmsg
, errmsglen
);
910 gfc_add_expr_to_block (&se
.pre
, tmp
);
914 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
916 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
917 5, fold_convert (integer_type_node
, len
),
918 images
, gfc_build_addr_expr (NULL
, tmp_stat
),
920 gfc_add_expr_to_block (&se
.pre
, tmp
);
922 gfc_add_modify (&se
.pre
, stat
,
923 fold_convert (TREE_TYPE (stat
), tmp_stat
));
927 return gfc_finish_block (&se
.pre
);
931 /* Generate GENERIC for the IF construct. This function also deals with
932 the simple IF statement, because the front end translates the IF
933 statement into an IF construct.
965 where COND_S is the simplified version of the predicate. PRE_COND_S
966 are the pre side-effects produced by the translation of the
968 We need to build the chain recursively otherwise we run into
969 problems with folding incomplete statements. */
972 gfc_trans_if_1 (gfc_code
* code
)
979 /* Check for an unconditional ELSE clause. */
981 return gfc_trans_code (code
->next
);
983 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
984 gfc_init_se (&if_se
, NULL
);
985 gfc_start_block (&if_se
.pre
);
987 /* Calculate the IF condition expression. */
988 if (code
->expr1
->where
.lb
)
990 gfc_save_backend_locus (&saved_loc
);
991 gfc_set_backend_locus (&code
->expr1
->where
);
994 gfc_conv_expr_val (&if_se
, code
->expr1
);
996 if (code
->expr1
->where
.lb
)
997 gfc_restore_backend_locus (&saved_loc
);
999 /* Translate the THEN clause. */
1000 stmt
= gfc_trans_code (code
->next
);
1002 /* Translate the ELSE clause. */
1004 elsestmt
= gfc_trans_if_1 (code
->block
);
1006 elsestmt
= build_empty_stmt (input_location
);
1008 /* Build the condition expression and add it to the condition block. */
1009 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
1010 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
1013 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
1015 /* Finish off this statement. */
1016 return gfc_finish_block (&if_se
.pre
);
1020 gfc_trans_if (gfc_code
* code
)
1025 /* Create exit label so it is available for trans'ing the body code. */
1026 exit_label
= gfc_build_label_decl (NULL_TREE
);
1027 code
->exit_label
= exit_label
;
1029 /* Translate the actual code in code->block. */
1030 gfc_init_block (&body
);
1031 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
1033 /* Add exit label. */
1034 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1036 return gfc_finish_block (&body
);
1040 /* Translate an arithmetic IF expression.
1042 IF (cond) label1, label2, label3 translates to
1054 An optimized version can be generated in case of equal labels.
1055 E.g., if label1 is equal to label2, we can translate it to
1064 gfc_trans_arithmetic_if (gfc_code
* code
)
1072 /* Start a new block. */
1073 gfc_init_se (&se
, NULL
);
1074 gfc_start_block (&se
.pre
);
1076 /* Pre-evaluate COND. */
1077 gfc_conv_expr_val (&se
, code
->expr1
);
1078 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
1080 /* Build something to compare with. */
1081 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
1083 if (code
->label1
->value
!= code
->label2
->value
)
1085 /* If (cond < 0) take branch1 else take branch2.
1086 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1087 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1088 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
1090 if (code
->label1
->value
!= code
->label3
->value
)
1091 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
1094 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1097 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1098 tmp
, branch1
, branch2
);
1101 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1103 if (code
->label1
->value
!= code
->label3
->value
1104 && code
->label2
->value
!= code
->label3
->value
)
1106 /* if (cond <= 0) take branch1 else take branch2. */
1107 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
1108 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1110 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1111 tmp
, branch1
, branch2
);
1114 /* Append the COND_EXPR to the evaluation of COND, and return. */
1115 gfc_add_expr_to_block (&se
.pre
, branch1
);
1116 return gfc_finish_block (&se
.pre
);
1120 /* Translate a CRITICAL block. */
1122 gfc_trans_critical (gfc_code
*code
)
1125 tree tmp
, token
= NULL_TREE
;
1127 gfc_start_block (&block
);
1129 if (flag_coarray
== GFC_FCOARRAY_LIB
)
1131 token
= gfc_get_symbol_decl (code
->resolved_sym
);
1132 token
= GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token
));
1133 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_lock
, 7,
1134 token
, integer_zero_node
, integer_one_node
,
1135 null_pointer_node
, null_pointer_node
,
1136 null_pointer_node
, integer_zero_node
);
1137 gfc_add_expr_to_block (&block
, tmp
);
1140 tmp
= gfc_trans_code (code
->block
->next
);
1141 gfc_add_expr_to_block (&block
, tmp
);
1143 if (flag_coarray
== GFC_FCOARRAY_LIB
)
1145 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_unlock
, 6,
1146 token
, integer_zero_node
, integer_one_node
,
1147 null_pointer_node
, null_pointer_node
,
1149 gfc_add_expr_to_block (&block
, tmp
);
1153 return gfc_finish_block (&block
);
1157 /* Do proper initialization for ASSOCIATE names. */
1160 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1171 gcc_assert (sym
->assoc
);
1172 e
= sym
->assoc
->target
;
1174 class_target
= (e
->expr_type
== EXPR_VARIABLE
)
1175 && (gfc_is_class_scalar_expr (e
)
1176 || gfc_is_class_array_ref (e
, NULL
));
1178 unlimited
= UNLIMITED_POLY (e
);
1180 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1181 to array temporary) for arrays with either unknown shape or if associating
1183 if (sym
->attr
.dimension
&& !class_target
1184 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1188 bool cst_array_ctor
;
1190 desc
= sym
->backend_decl
;
1191 cst_array_ctor
= e
->expr_type
== EXPR_ARRAY
1192 && gfc_constant_array_constructor_p (e
->value
.constructor
);
1194 /* If association is to an expression, evaluate it and create temporary.
1195 Otherwise, get descriptor of target for pointer assignment. */
1196 gfc_init_se (&se
, NULL
);
1197 if (sym
->assoc
->variable
|| cst_array_ctor
)
1199 se
.direct_byref
= 1;
1204 gfc_conv_expr_descriptor (&se
, e
);
1206 /* If we didn't already do the pointer assignment, set associate-name
1207 descriptor to the one generated for the temporary. */
1208 if (!sym
->assoc
->variable
&& !cst_array_ctor
)
1212 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1214 /* The generated descriptor has lower bound zero (as array
1215 temporary), shift bounds so we get lower bounds of 1. */
1216 for (dim
= 0; dim
< e
->rank
; ++dim
)
1217 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1218 dim
, gfc_index_one_node
);
1221 /* If this is a subreference array pointer associate name use the
1222 associate variable element size for the value of 'span'. */
1223 if (sym
->attr
.subref_array_pointer
)
1225 gcc_assert (e
->expr_type
== EXPR_VARIABLE
);
1226 tmp
= e
->symtree
->n
.sym
->backend_decl
;
1227 tmp
= gfc_get_element_type (TREE_TYPE (tmp
));
1228 tmp
= fold_convert (gfc_array_index_type
, size_in_bytes (tmp
));
1229 gfc_add_modify (&se
.pre
, GFC_DECL_SPAN(desc
), tmp
);
1232 /* Done, register stuff as init / cleanup code. */
1233 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1234 gfc_finish_block (&se
.post
));
1237 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1238 arrays to be assigned directly. */
1239 else if (class_target
&& sym
->attr
.dimension
1240 && (sym
->ts
.type
== BT_DERIVED
|| unlimited
))
1244 gfc_init_se (&se
, NULL
);
1245 se
.descriptor_only
= 1;
1246 gfc_conv_expr (&se
, e
);
1248 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
.expr
)));
1249 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym
->backend_decl
)));
1251 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
1255 /* Recover the dtype, which has been overwritten by the
1256 assignment from an unlimited polymorphic object. */
1257 tmp
= gfc_conv_descriptor_dtype (sym
->backend_decl
);
1258 gfc_add_modify (&se
.pre
, tmp
,
1259 gfc_get_dtype (TREE_TYPE (sym
->backend_decl
)));
1262 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1263 gfc_finish_block (&se
.post
));
1266 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1267 else if (gfc_is_associate_pointer (sym
))
1271 gcc_assert (!sym
->attr
.dimension
);
1273 gfc_init_se (&se
, NULL
);
1275 /* Class associate-names come this way because they are
1276 unconditionally associate pointers and the symbol is scalar. */
1277 if (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.dimension
)
1279 /* For a class array we need a descriptor for the selector. */
1280 gfc_conv_expr_descriptor (&se
, e
);
1282 /* Obtain a temporary class container for the result. */
1283 gfc_conv_class_to_class (&se
, e
, sym
->ts
, false, true, false, false);
1284 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1286 /* Set the offset. */
1287 desc
= gfc_class_data_get (se
.expr
);
1288 offset
= gfc_index_zero_node
;
1289 for (n
= 0; n
< e
->rank
; n
++)
1291 dim
= gfc_rank_cst
[n
];
1292 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1293 gfc_array_index_type
,
1294 gfc_conv_descriptor_stride_get (desc
, dim
),
1295 gfc_conv_descriptor_lbound_get (desc
, dim
));
1296 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
1297 gfc_array_index_type
,
1300 gfc_conv_descriptor_offset_set (&se
.pre
, desc
, offset
);
1302 else if (sym
->ts
.type
== BT_CLASS
&& e
->ts
.type
== BT_CLASS
1303 && CLASS_DATA (e
)->attr
.dimension
)
1305 /* This is bound to be a class array element. */
1306 gfc_conv_expr_reference (&se
, e
);
1307 /* Get the _vptr component of the class object. */
1308 tmp
= gfc_get_vptr_from_expr (se
.expr
);
1309 /* Obtain a temporary class container for the result. */
1310 gfc_conv_derived_to_class (&se
, e
, sym
->ts
, tmp
, false, false);
1311 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1314 gfc_conv_expr (&se
, e
);
1316 tmp
= TREE_TYPE (sym
->backend_decl
);
1317 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1318 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1320 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1321 gfc_finish_block (&se
.post
));
1324 /* Do a simple assignment. This is for scalar expressions, where we
1325 can simply use expression assignment. */
1330 lhs
= gfc_lval_expr_from_sym (sym
);
1331 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1332 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1335 /* Set the stringlength from the vtable size. */
1336 if (sym
->ts
.type
== BT_CHARACTER
&& sym
->attr
.select_type_temporary
)
1340 gfc_init_se (&se
, NULL
);
1341 gcc_assert (UNLIMITED_POLY (e
->symtree
->n
.sym
));
1342 tmp
= gfc_get_symbol_decl (e
->symtree
->n
.sym
);
1343 tmp
= gfc_vtable_size_get (tmp
);
1344 gfc_get_symbol_decl (sym
);
1345 charlen
= sym
->ts
.u
.cl
->backend_decl
;
1346 gfc_add_modify (&se
.pre
, charlen
,
1347 fold_convert (TREE_TYPE (charlen
), tmp
));
1348 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1349 gfc_finish_block (&se
.post
));
1354 /* Translate a BLOCK construct. This is basically what we would do for a
1358 gfc_trans_block_construct (gfc_code
* code
)
1362 gfc_wrapped_block block
;
1365 gfc_association_list
*ass
;
1367 ns
= code
->ext
.block
.ns
;
1369 sym
= ns
->proc_name
;
1372 /* Process local variables. */
1373 gcc_assert (!sym
->tlink
);
1375 gfc_process_block_locals (ns
);
1377 /* Generate code including exit-label. */
1378 gfc_init_block (&body
);
1379 exit_label
= gfc_build_label_decl (NULL_TREE
);
1380 code
->exit_label
= exit_label
;
1382 /* Generate !$ACC DECLARE directive. */
1383 if (ns
->oacc_declare_clauses
)
1385 tree tmp
= gfc_trans_oacc_declare (&body
, ns
);
1386 gfc_add_expr_to_block (&body
, tmp
);
1389 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1390 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1392 /* Finish everything. */
1393 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1394 gfc_trans_deferred_vars (sym
, &block
);
1395 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1396 trans_associate_var (ass
->st
->n
.sym
, &block
);
1398 return gfc_finish_wrapped_block (&block
);
1402 /* Translate the simple DO construct. This is where the loop variable has
1403 integer type and step +-1. We can't use this in the general case
1404 because integer overflow and floating point errors could give incorrect
1406 We translate a do loop from:
1408 DO dovar = from, to, step
1414 [Evaluate loop bounds and step]
1416 if ((step > 0) ? (dovar <= to) : (dovar => to))
1422 cond = (dovar == to);
1424 if (cond) goto end_label;
1429 This helps the optimizers by avoiding the extra induction variable
1430 used in the general case. */
1433 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1434 tree from
, tree to
, tree step
, tree exit_cond
)
1440 tree saved_dovar
= NULL
;
1445 type
= TREE_TYPE (dovar
);
1447 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1449 /* Initialize the DO variable: dovar = from. */
1450 gfc_add_modify_loc (loc
, pblock
, dovar
,
1451 fold_convert (TREE_TYPE(dovar
), from
));
1453 /* Save value for do-tinkering checking. */
1454 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1456 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1457 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1460 /* Cycle and exit statements are implemented with gotos. */
1461 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1462 exit_label
= gfc_build_label_decl (NULL_TREE
);
1464 /* Put the labels where they can be found later. See gfc_trans_do(). */
1465 code
->cycle_label
= cycle_label
;
1466 code
->exit_label
= exit_label
;
1469 gfc_start_block (&body
);
1471 /* Main loop body. */
1472 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1473 gfc_add_expr_to_block (&body
, tmp
);
1475 /* Label for cycle statements (if needed). */
1476 if (TREE_USED (cycle_label
))
1478 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1479 gfc_add_expr_to_block (&body
, tmp
);
1482 /* Check whether someone has modified the loop variable. */
1483 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1485 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1486 dovar
, saved_dovar
);
1487 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1488 "Loop variable has been modified");
1491 /* Exit the loop if there is an I/O result condition or error. */
1494 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1495 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1497 build_empty_stmt (loc
));
1498 gfc_add_expr_to_block (&body
, tmp
);
1501 /* Evaluate the loop condition. */
1502 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1504 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1506 /* Increment the loop variable. */
1507 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1508 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1510 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1511 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1513 /* The loop exit. */
1514 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1515 TREE_USED (exit_label
) = 1;
1516 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1517 cond
, tmp
, build_empty_stmt (loc
));
1518 gfc_add_expr_to_block (&body
, tmp
);
1520 /* Finish the loop body. */
1521 tmp
= gfc_finish_block (&body
);
1522 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1524 /* Only execute the loop if the number of iterations is positive. */
1525 if (tree_int_cst_sgn (step
) > 0)
1526 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1529 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1531 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1532 build_empty_stmt (loc
));
1533 gfc_add_expr_to_block (pblock
, tmp
);
1535 /* Add the exit label. */
1536 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1537 gfc_add_expr_to_block (pblock
, tmp
);
1539 return gfc_finish_block (pblock
);
1542 /* Translate the DO construct. This obviously is one of the most
1543 important ones to get right with any compiler, but especially
1546 We special case some loop forms as described in gfc_trans_simple_do.
1547 For other cases we implement them with a separate loop count,
1548 as described in the standard.
1550 We translate a do loop from:
1552 DO dovar = from, to, step
1558 [evaluate loop bounds and step]
1559 empty = (step > 0 ? to < from : to > from);
1560 countm1 = (to - from) / step;
1562 if (empty) goto exit_label;
1570 if (countm1t == 0) goto exit_label;
1574 countm1 is an unsigned integer. It is equal to the loop count minus one,
1575 because the loop count itself can overflow. */
1578 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1582 tree saved_dovar
= NULL
;
1597 gfc_start_block (&block
);
1599 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1601 /* Evaluate all the expressions in the iterator. */
1602 gfc_init_se (&se
, NULL
);
1603 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1604 gfc_add_block_to_block (&block
, &se
.pre
);
1606 type
= TREE_TYPE (dovar
);
1608 gfc_init_se (&se
, NULL
);
1609 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1610 gfc_add_block_to_block (&block
, &se
.pre
);
1611 from
= gfc_evaluate_now (se
.expr
, &block
);
1613 gfc_init_se (&se
, NULL
);
1614 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1615 gfc_add_block_to_block (&block
, &se
.pre
);
1616 to
= gfc_evaluate_now (se
.expr
, &block
);
1618 gfc_init_se (&se
, NULL
);
1619 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1620 gfc_add_block_to_block (&block
, &se
.pre
);
1621 step
= gfc_evaluate_now (se
.expr
, &block
);
1623 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1625 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1626 build_zero_cst (type
));
1627 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1628 "DO step value is zero");
1631 /* Special case simple loops. */
1632 if (TREE_CODE (type
) == INTEGER_TYPE
1633 && (integer_onep (step
)
1634 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1635 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1638 if (TREE_CODE (type
) == INTEGER_TYPE
)
1639 utype
= unsigned_type_for (type
);
1641 utype
= unsigned_type_for (gfc_array_index_type
);
1642 countm1
= gfc_create_var (utype
, "countm1");
1644 /* Cycle and exit statements are implemented with gotos. */
1645 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1646 exit_label
= gfc_build_label_decl (NULL_TREE
);
1647 TREE_USED (exit_label
) = 1;
1649 /* Put these labels where they can be found later. */
1650 code
->cycle_label
= cycle_label
;
1651 code
->exit_label
= exit_label
;
1653 /* Initialize the DO variable: dovar = from. */
1654 gfc_add_modify (&block
, dovar
, from
);
1656 /* Save value for do-tinkering checking. */
1657 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1659 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1660 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1663 /* Initialize loop count and jump to exit label if the loop is empty.
1664 This code is executed before we enter the loop body. We generate:
1667 countm1 = (to - from) / step;
1673 countm1 = (from - to) / -step;
1679 if (TREE_CODE (type
) == INTEGER_TYPE
)
1681 tree pos
, neg
, tou
, fromu
, stepu
, tmp2
;
1683 /* The distance from FROM to TO cannot always be represented in a signed
1684 type, thus use unsigned arithmetic, also to avoid any undefined
1686 tou
= fold_convert (utype
, to
);
1687 fromu
= fold_convert (utype
, from
);
1688 stepu
= fold_convert (utype
, step
);
1690 /* For a positive step, when to < from, exit, otherwise compute
1691 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1692 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1693 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1694 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1697 pos
= build2 (COMPOUND_EXPR
, void_type_node
,
1698 fold_build2 (MODIFY_EXPR
, void_type_node
,
1700 build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1701 build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1702 exit_label
), NULL_TREE
));
1704 /* For a negative step, when to > from, exit, otherwise compute
1705 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1706 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
, from
);
1707 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1708 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1710 fold_build1_loc (loc
, NEGATE_EXPR
, utype
, stepu
));
1711 neg
= build2 (COMPOUND_EXPR
, void_type_node
,
1712 fold_build2 (MODIFY_EXPR
, void_type_node
,
1714 build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1715 build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1716 exit_label
), NULL_TREE
));
1718 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1719 build_int_cst (TREE_TYPE (step
), 0));
1720 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
, neg
, pos
);
1722 gfc_add_expr_to_block (&block
, tmp
);
1728 /* TODO: We could use the same width as the real type.
1729 This would probably cause more problems that it solves
1730 when we implement "long double" types. */
1732 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1733 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1734 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1735 gfc_add_modify (&block
, countm1
, tmp
);
1737 /* We need a special check for empty loops:
1738 empty = (step > 0 ? to < from : to > from); */
1739 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1740 build_zero_cst (type
));
1741 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1742 fold_build2_loc (loc
, LT_EXPR
,
1743 boolean_type_node
, to
, from
),
1744 fold_build2_loc (loc
, GT_EXPR
,
1745 boolean_type_node
, to
, from
));
1746 /* If the loop is empty, go directly to the exit label. */
1747 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1748 build1_v (GOTO_EXPR
, exit_label
),
1749 build_empty_stmt (input_location
));
1750 gfc_add_expr_to_block (&block
, tmp
);
1754 gfc_start_block (&body
);
1756 /* Main loop body. */
1757 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1758 gfc_add_expr_to_block (&body
, tmp
);
1760 /* Label for cycle statements (if needed). */
1761 if (TREE_USED (cycle_label
))
1763 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1764 gfc_add_expr_to_block (&body
, tmp
);
1767 /* Check whether someone has modified the loop variable. */
1768 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1770 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1772 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1773 "Loop variable has been modified");
1776 /* Exit the loop if there is an I/O result condition or error. */
1779 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1780 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1782 build_empty_stmt (input_location
));
1783 gfc_add_expr_to_block (&body
, tmp
);
1786 /* Increment the loop variable. */
1787 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1788 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1790 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1791 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1793 /* Initialize countm1t. */
1794 tree countm1t
= gfc_create_var (utype
, "countm1t");
1795 gfc_add_modify_loc (loc
, &body
, countm1t
, countm1
);
1797 /* Decrement the loop count. */
1798 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1799 build_int_cst (utype
, 1));
1800 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1802 /* End with the loop condition. Loop until countm1t == 0. */
1803 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1t
,
1804 build_int_cst (utype
, 0));
1805 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1806 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1807 cond
, tmp
, build_empty_stmt (loc
));
1808 gfc_add_expr_to_block (&body
, tmp
);
1810 /* End of loop body. */
1811 tmp
= gfc_finish_block (&body
);
1813 /* The for loop itself. */
1814 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1815 gfc_add_expr_to_block (&block
, tmp
);
1817 /* Add the exit label. */
1818 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1819 gfc_add_expr_to_block (&block
, tmp
);
1821 return gfc_finish_block (&block
);
1825 /* Translate the DO WHILE construct.
1838 if (! cond) goto exit_label;
1844 Because the evaluation of the exit condition `cond' may have side
1845 effects, we can't do much for empty loop bodies. The backend optimizers
1846 should be smart enough to eliminate any dead loops. */
1849 gfc_trans_do_while (gfc_code
* code
)
1857 /* Everything we build here is part of the loop body. */
1858 gfc_start_block (&block
);
1860 /* Cycle and exit statements are implemented with gotos. */
1861 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1862 exit_label
= gfc_build_label_decl (NULL_TREE
);
1864 /* Put the labels where they can be found later. See gfc_trans_do(). */
1865 code
->cycle_label
= cycle_label
;
1866 code
->exit_label
= exit_label
;
1868 /* Create a GIMPLE version of the exit condition. */
1869 gfc_init_se (&cond
, NULL
);
1870 gfc_conv_expr_val (&cond
, code
->expr1
);
1871 gfc_add_block_to_block (&block
, &cond
.pre
);
1872 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1873 TRUTH_NOT_EXPR
, TREE_TYPE (cond
.expr
), cond
.expr
);
1875 /* Build "IF (! cond) GOTO exit_label". */
1876 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1877 TREE_USED (exit_label
) = 1;
1878 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1879 void_type_node
, cond
.expr
, tmp
,
1880 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1881 gfc_add_expr_to_block (&block
, tmp
);
1883 /* The main body of the loop. */
1884 tmp
= gfc_trans_code (code
->block
->next
);
1885 gfc_add_expr_to_block (&block
, tmp
);
1887 /* Label for cycle statements (if needed). */
1888 if (TREE_USED (cycle_label
))
1890 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1891 gfc_add_expr_to_block (&block
, tmp
);
1894 /* End of loop body. */
1895 tmp
= gfc_finish_block (&block
);
1897 gfc_init_block (&block
);
1898 /* Build the loop. */
1899 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1900 void_type_node
, tmp
);
1901 gfc_add_expr_to_block (&block
, tmp
);
1903 /* Add the exit label. */
1904 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1905 gfc_add_expr_to_block (&block
, tmp
);
1907 return gfc_finish_block (&block
);
1911 /* Translate the SELECT CASE construct for INTEGER case expressions,
1912 without killing all potential optimizations. The problem is that
1913 Fortran allows unbounded cases, but the back-end does not, so we
1914 need to intercept those before we enter the equivalent SWITCH_EXPR
1917 For example, we translate this,
1920 CASE (:100,101,105:115)
1930 to the GENERIC equivalent,
1934 case (minimum value for typeof(expr) ... 100:
1940 case 200 ... (maximum value for typeof(expr):
1957 gfc_trans_integer_select (gfc_code
* code
)
1967 gfc_start_block (&block
);
1969 /* Calculate the switch expression. */
1970 gfc_init_se (&se
, NULL
);
1971 gfc_conv_expr_val (&se
, code
->expr1
);
1972 gfc_add_block_to_block (&block
, &se
.pre
);
1974 end_label
= gfc_build_label_decl (NULL_TREE
);
1976 gfc_init_block (&body
);
1978 for (c
= code
->block
; c
; c
= c
->block
)
1980 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1985 /* Assume it's the default case. */
1986 low
= high
= NULL_TREE
;
1990 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1993 /* If there's only a lower bound, set the high bound to the
1994 maximum value of the case expression. */
1996 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
2001 /* Three cases are possible here:
2003 1) There is no lower bound, e.g. CASE (:N).
2004 2) There is a lower bound .NE. high bound, that is
2005 a case range, e.g. CASE (N:M) where M>N (we make
2006 sure that M>N during type resolution).
2007 3) There is a lower bound, and it has the same value
2008 as the high bound, e.g. CASE (N:N). This is our
2009 internal representation of CASE(N).
2011 In the first and second case, we need to set a value for
2012 high. In the third case, we don't because the GCC middle
2013 end represents a single case value by just letting high be
2014 a NULL_TREE. We can't do that because we need to be able
2015 to represent unbounded cases. */
2019 && mpz_cmp (cp
->low
->value
.integer
,
2020 cp
->high
->value
.integer
) != 0))
2021 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
2024 /* Unbounded case. */
2026 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
2029 /* Build a label. */
2030 label
= gfc_build_label_decl (NULL_TREE
);
2032 /* Add this case label.
2033 Add parameter 'label', make it match GCC backend. */
2034 tmp
= build_case_label (low
, high
, label
);
2035 gfc_add_expr_to_block (&body
, tmp
);
2038 /* Add the statements for this case. */
2039 tmp
= gfc_trans_code (c
->next
);
2040 gfc_add_expr_to_block (&body
, tmp
);
2042 /* Break to the end of the construct. */
2043 tmp
= build1_v (GOTO_EXPR
, end_label
);
2044 gfc_add_expr_to_block (&body
, tmp
);
2047 tmp
= gfc_finish_block (&body
);
2048 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2049 se
.expr
, tmp
, NULL_TREE
);
2050 gfc_add_expr_to_block (&block
, tmp
);
2052 tmp
= build1_v (LABEL_EXPR
, end_label
);
2053 gfc_add_expr_to_block (&block
, tmp
);
2055 return gfc_finish_block (&block
);
2059 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2061 There are only two cases possible here, even though the standard
2062 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2063 .FALSE., and DEFAULT.
2065 We never generate more than two blocks here. Instead, we always
2066 try to eliminate the DEFAULT case. This way, we can translate this
2067 kind of SELECT construct to a simple
2071 expression in GENERIC. */
2074 gfc_trans_logical_select (gfc_code
* code
)
2077 gfc_code
*t
, *f
, *d
;
2082 /* Assume we don't have any cases at all. */
2085 /* Now see which ones we actually do have. We can have at most two
2086 cases in a single case list: one for .TRUE. and one for .FALSE.
2087 The default case is always separate. If the cases for .TRUE. and
2088 .FALSE. are in the same case list, the block for that case list
2089 always executed, and we don't generate code a COND_EXPR. */
2090 for (c
= code
->block
; c
; c
= c
->block
)
2092 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2096 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
2098 else /* if (cp->value.logical != 0), thus .TRUE. */
2106 /* Start a new block. */
2107 gfc_start_block (&block
);
2109 /* Calculate the switch expression. We always need to do this
2110 because it may have side effects. */
2111 gfc_init_se (&se
, NULL
);
2112 gfc_conv_expr_val (&se
, code
->expr1
);
2113 gfc_add_block_to_block (&block
, &se
.pre
);
2115 if (t
== f
&& t
!= NULL
)
2117 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2118 translate the code for these cases, append it to the current
2120 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
2124 tree true_tree
, false_tree
, stmt
;
2126 true_tree
= build_empty_stmt (input_location
);
2127 false_tree
= build_empty_stmt (input_location
);
2129 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2130 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2131 make the missing case the default case. */
2132 if (t
!= NULL
&& f
!= NULL
)
2142 /* Translate the code for each of these blocks, and append it to
2143 the current block. */
2145 true_tree
= gfc_trans_code (t
->next
);
2148 false_tree
= gfc_trans_code (f
->next
);
2150 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2151 se
.expr
, true_tree
, false_tree
);
2152 gfc_add_expr_to_block (&block
, stmt
);
2155 return gfc_finish_block (&block
);
2159 /* The jump table types are stored in static variables to avoid
2160 constructing them from scratch every single time. */
2161 static GTY(()) tree select_struct
[2];
2163 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2164 Instead of generating compares and jumps, it is far simpler to
2165 generate a data structure describing the cases in order and call a
2166 library subroutine that locates the right case.
2167 This is particularly true because this is the only case where we
2168 might have to dispose of a temporary.
2169 The library subroutine returns a pointer to jump to or NULL if no
2170 branches are to be taken. */
2173 gfc_trans_character_select (gfc_code
*code
)
2175 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
2176 stmtblock_t block
, body
;
2181 vec
<constructor_elt
, va_gc
> *inits
= NULL
;
2183 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
2185 /* The jump table types are stored in static variables to avoid
2186 constructing them from scratch every single time. */
2187 static tree ss_string1
[2], ss_string1_len
[2];
2188 static tree ss_string2
[2], ss_string2_len
[2];
2189 static tree ss_target
[2];
2191 cp
= code
->block
->ext
.block
.case_list
;
2192 while (cp
->left
!= NULL
)
2195 /* Generate the body */
2196 gfc_start_block (&block
);
2197 gfc_init_se (&expr1se
, NULL
);
2198 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
2200 gfc_add_block_to_block (&block
, &expr1se
.pre
);
2202 end_label
= gfc_build_label_decl (NULL_TREE
);
2204 gfc_init_block (&body
);
2206 /* Attempt to optimize length 1 selects. */
2207 if (integer_onep (expr1se
.string_length
))
2209 for (d
= cp
; d
; d
= d
->right
)
2214 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
2215 && d
->low
->ts
.type
== BT_CHARACTER
);
2216 if (d
->low
->value
.character
.length
> 1)
2218 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
2219 if (d
->low
->value
.character
.string
[i
] != ' ')
2221 if (i
!= d
->low
->value
.character
.length
)
2223 if (optimize
&& d
->high
&& i
== 1)
2225 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2226 && d
->high
->ts
.type
== BT_CHARACTER
);
2227 if (d
->high
->value
.character
.length
> 1
2228 && (d
->low
->value
.character
.string
[0]
2229 == d
->high
->value
.character
.string
[0])
2230 && d
->high
->value
.character
.string
[1] != ' '
2231 && ((d
->low
->value
.character
.string
[1] < ' ')
2232 == (d
->high
->value
.character
.string
[1]
2242 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2243 && d
->high
->ts
.type
== BT_CHARACTER
);
2244 if (d
->high
->value
.character
.length
> 1)
2246 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
2247 if (d
->high
->value
.character
.string
[i
] != ' ')
2249 if (i
!= d
->high
->value
.character
.length
)
2256 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
2258 for (c
= code
->block
; c
; c
= c
->block
)
2260 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2266 /* Assume it's the default case. */
2267 low
= high
= NULL_TREE
;
2271 /* CASE ('ab') or CASE ('ab':'az') will never match
2272 any length 1 character. */
2273 if (cp
->low
->value
.character
.length
> 1
2274 && cp
->low
->value
.character
.string
[1] != ' ')
2277 if (cp
->low
->value
.character
.length
> 0)
2278 r
= cp
->low
->value
.character
.string
[0];
2281 low
= build_int_cst (ctype
, r
);
2283 /* If there's only a lower bound, set the high bound
2284 to the maximum value of the case expression. */
2286 high
= TYPE_MAX_VALUE (ctype
);
2292 || (cp
->low
->value
.character
.string
[0]
2293 != cp
->high
->value
.character
.string
[0]))
2295 if (cp
->high
->value
.character
.length
> 0)
2296 r
= cp
->high
->value
.character
.string
[0];
2299 high
= build_int_cst (ctype
, r
);
2302 /* Unbounded case. */
2304 low
= TYPE_MIN_VALUE (ctype
);
2307 /* Build a label. */
2308 label
= gfc_build_label_decl (NULL_TREE
);
2310 /* Add this case label.
2311 Add parameter 'label', make it match GCC backend. */
2312 tmp
= build_case_label (low
, high
, label
);
2313 gfc_add_expr_to_block (&body
, tmp
);
2316 /* Add the statements for this case. */
2317 tmp
= gfc_trans_code (c
->next
);
2318 gfc_add_expr_to_block (&body
, tmp
);
2320 /* Break to the end of the construct. */
2321 tmp
= build1_v (GOTO_EXPR
, end_label
);
2322 gfc_add_expr_to_block (&body
, tmp
);
2325 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2327 code
->expr1
->ts
.kind
);
2328 case_num
= gfc_create_var (ctype
, "case_num");
2329 gfc_add_modify (&block
, case_num
, tmp
);
2331 gfc_add_block_to_block (&block
, &expr1se
.post
);
2333 tmp
= gfc_finish_block (&body
);
2334 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2335 case_num
, tmp
, NULL_TREE
);
2336 gfc_add_expr_to_block (&block
, tmp
);
2338 tmp
= build1_v (LABEL_EXPR
, end_label
);
2339 gfc_add_expr_to_block (&block
, tmp
);
2341 return gfc_finish_block (&block
);
2345 if (code
->expr1
->ts
.kind
== 1)
2347 else if (code
->expr1
->ts
.kind
== 4)
2352 if (select_struct
[k
] == NULL
)
2355 select_struct
[k
] = make_node (RECORD_TYPE
);
2357 if (code
->expr1
->ts
.kind
== 1)
2358 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2359 else if (code
->expr1
->ts
.kind
== 4)
2360 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2365 #define ADD_FIELD(NAME, TYPE) \
2366 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2367 get_identifier (stringize(NAME)), \
2371 ADD_FIELD (string1
, pchartype
);
2372 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2374 ADD_FIELD (string2
, pchartype
);
2375 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2377 ADD_FIELD (target
, integer_type_node
);
2380 gfc_finish_type (select_struct
[k
]);
2384 for (d
= cp
; d
; d
= d
->right
)
2387 for (c
= code
->block
; c
; c
= c
->block
)
2389 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2391 label
= gfc_build_label_decl (NULL_TREE
);
2392 tmp
= build_case_label ((d
->low
== NULL
&& d
->high
== NULL
)
2394 : build_int_cst (integer_type_node
, d
->n
),
2396 gfc_add_expr_to_block (&body
, tmp
);
2399 tmp
= gfc_trans_code (c
->next
);
2400 gfc_add_expr_to_block (&body
, tmp
);
2402 tmp
= build1_v (GOTO_EXPR
, end_label
);
2403 gfc_add_expr_to_block (&body
, tmp
);
2406 /* Generate the structure describing the branches */
2407 for (d
= cp
; d
; d
= d
->right
)
2409 vec
<constructor_elt
, va_gc
> *node
= NULL
;
2411 gfc_init_se (&se
, NULL
);
2415 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2416 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2420 gfc_conv_expr_reference (&se
, d
->low
);
2422 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2423 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2426 if (d
->high
== NULL
)
2428 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2429 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2433 gfc_init_se (&se
, NULL
);
2434 gfc_conv_expr_reference (&se
, d
->high
);
2436 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2437 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2440 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2441 build_int_cst (integer_type_node
, d
->n
));
2443 tmp
= build_constructor (select_struct
[k
], node
);
2444 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2447 type
= build_array_type (select_struct
[k
],
2448 build_index_type (size_int (n
-1)));
2450 init
= build_constructor (type
, inits
);
2451 TREE_CONSTANT (init
) = 1;
2452 TREE_STATIC (init
) = 1;
2453 /* Create a static variable to hold the jump table. */
2454 tmp
= gfc_create_var (type
, "jumptable");
2455 TREE_CONSTANT (tmp
) = 1;
2456 TREE_STATIC (tmp
) = 1;
2457 TREE_READONLY (tmp
) = 1;
2458 DECL_INITIAL (tmp
) = init
;
2461 /* Build the library call */
2462 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2464 if (code
->expr1
->ts
.kind
== 1)
2465 fndecl
= gfor_fndecl_select_string
;
2466 else if (code
->expr1
->ts
.kind
== 4)
2467 fndecl
= gfor_fndecl_select_string_char4
;
2471 tmp
= build_call_expr_loc (input_location
,
2473 build_int_cst (gfc_charlen_type_node
, n
),
2474 expr1se
.expr
, expr1se
.string_length
);
2475 case_num
= gfc_create_var (integer_type_node
, "case_num");
2476 gfc_add_modify (&block
, case_num
, tmp
);
2478 gfc_add_block_to_block (&block
, &expr1se
.post
);
2480 tmp
= gfc_finish_block (&body
);
2481 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2482 case_num
, tmp
, NULL_TREE
);
2483 gfc_add_expr_to_block (&block
, tmp
);
2485 tmp
= build1_v (LABEL_EXPR
, end_label
);
2486 gfc_add_expr_to_block (&block
, tmp
);
2488 return gfc_finish_block (&block
);
2492 /* Translate the three variants of the SELECT CASE construct.
2494 SELECT CASEs with INTEGER case expressions can be translated to an
2495 equivalent GENERIC switch statement, and for LOGICAL case
2496 expressions we build one or two if-else compares.
2498 SELECT CASEs with CHARACTER case expressions are a whole different
2499 story, because they don't exist in GENERIC. So we sort them and
2500 do a binary search at runtime.
2502 Fortran has no BREAK statement, and it does not allow jumps from
2503 one case block to another. That makes things a lot easier for
2507 gfc_trans_select (gfc_code
* code
)
2513 gcc_assert (code
&& code
->expr1
);
2514 gfc_init_block (&block
);
2516 /* Build the exit label and hang it in. */
2517 exit_label
= gfc_build_label_decl (NULL_TREE
);
2518 code
->exit_label
= exit_label
;
2520 /* Empty SELECT constructs are legal. */
2521 if (code
->block
== NULL
)
2522 body
= build_empty_stmt (input_location
);
2524 /* Select the correct translation function. */
2526 switch (code
->expr1
->ts
.type
)
2529 body
= gfc_trans_logical_select (code
);
2533 body
= gfc_trans_integer_select (code
);
2537 body
= gfc_trans_character_select (code
);
2541 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2545 /* Build everything together. */
2546 gfc_add_expr_to_block (&block
, body
);
2547 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2549 return gfc_finish_block (&block
);
2553 /* Traversal function to substitute a replacement symtree if the symbol
2554 in the expression is the same as that passed. f == 2 signals that
2555 that variable itself is not to be checked - only the references.
2556 This group of functions is used when the variable expression in a
2557 FORALL assignment has internal references. For example:
2558 FORALL (i = 1:4) p(p(i)) = i
2559 The only recourse here is to store a copy of 'p' for the index
2562 static gfc_symtree
*new_symtree
;
2563 static gfc_symtree
*old_symtree
;
2566 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2568 if (expr
->expr_type
!= EXPR_VARIABLE
)
2573 else if (expr
->symtree
->n
.sym
== sym
)
2574 expr
->symtree
= new_symtree
;
2580 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2582 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2586 forall_restore (gfc_expr
*expr
,
2587 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2588 int *f ATTRIBUTE_UNUSED
)
2590 if (expr
->expr_type
!= EXPR_VARIABLE
)
2593 if (expr
->symtree
== new_symtree
)
2594 expr
->symtree
= old_symtree
;
2600 forall_restore_symtree (gfc_expr
*e
)
2602 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2606 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2611 gfc_symbol
*new_sym
;
2612 gfc_symbol
*old_sym
;
2616 /* Build a copy of the lvalue. */
2617 old_symtree
= c
->expr1
->symtree
;
2618 old_sym
= old_symtree
->n
.sym
;
2619 e
= gfc_lval_expr_from_sym (old_sym
);
2620 if (old_sym
->attr
.dimension
)
2622 gfc_init_se (&tse
, NULL
);
2623 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2624 gfc_add_block_to_block (pre
, &tse
.pre
);
2625 gfc_add_block_to_block (post
, &tse
.post
);
2626 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2628 if (e
->ts
.type
!= BT_CHARACTER
)
2630 /* Use the variable offset for the temporary. */
2631 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2632 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2637 gfc_init_se (&tse
, NULL
);
2638 gfc_init_se (&rse
, NULL
);
2639 gfc_conv_expr (&rse
, e
);
2640 if (e
->ts
.type
== BT_CHARACTER
)
2642 tse
.string_length
= rse
.string_length
;
2643 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2645 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2647 gfc_add_block_to_block (pre
, &tse
.pre
);
2648 gfc_add_block_to_block (post
, &tse
.post
);
2652 tmp
= gfc_typenode_for_spec (&e
->ts
);
2653 tse
.expr
= gfc_create_var (tmp
, "temp");
2656 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2657 e
->expr_type
== EXPR_VARIABLE
, true);
2658 gfc_add_expr_to_block (pre
, tmp
);
2662 /* Create a new symbol to represent the lvalue. */
2663 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2664 new_sym
->ts
= old_sym
->ts
;
2665 new_sym
->attr
.referenced
= 1;
2666 new_sym
->attr
.temporary
= 1;
2667 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2668 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2670 /* Use the temporary as the backend_decl. */
2671 new_sym
->backend_decl
= tse
.expr
;
2673 /* Create a fake symtree for it. */
2675 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2676 new_symtree
->n
.sym
= new_sym
;
2677 gcc_assert (new_symtree
== root
);
2679 /* Go through the expression reference replacing the old_symtree
2681 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2683 /* Now we have made this temporary, we might as well use it for
2684 the right hand side. */
2685 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2689 /* Handles dependencies in forall assignments. */
2691 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2698 lsym
= c
->expr1
->symtree
->n
.sym
;
2699 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2701 /* Now check for dependencies within the 'variable'
2702 expression itself. These are treated by making a complete
2703 copy of variable and changing all the references to it
2704 point to the copy instead. Note that the shallow copy of
2705 the variable will not suffice for derived types with
2706 pointer components. We therefore leave these to their
2708 if (lsym
->ts
.type
== BT_DERIVED
2709 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2713 if (find_forall_index (c
->expr1
, lsym
, 2))
2715 forall_make_variable_temp (c
, pre
, post
);
2719 /* Substrings with dependencies are treated in the same
2721 if (c
->expr1
->ts
.type
== BT_CHARACTER
2723 && c
->expr2
->expr_type
== EXPR_VARIABLE
2724 && lsym
== c
->expr2
->symtree
->n
.sym
)
2726 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2727 if (lref
->type
== REF_SUBSTRING
)
2729 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2730 if (rref
->type
== REF_SUBSTRING
)
2734 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2736 forall_make_variable_temp (c
, pre
, post
);
2745 cleanup_forall_symtrees (gfc_code
*c
)
2747 forall_restore_symtree (c
->expr1
);
2748 forall_restore_symtree (c
->expr2
);
2749 free (new_symtree
->n
.sym
);
2754 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2755 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2756 indicates whether we should generate code to test the FORALLs mask
2757 array. OUTER is the loop header to be used for initializing mask
2760 The generated loop format is:
2761 count = (end - start + step) / step
2774 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2775 int mask_flag
, stmtblock_t
*outer
)
2783 tree var
, start
, end
, step
;
2786 /* Initialize the mask index outside the FORALL nest. */
2787 if (mask_flag
&& forall_tmp
->mask
)
2788 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2790 iter
= forall_tmp
->this_loop
;
2791 nvar
= forall_tmp
->nvar
;
2792 for (n
= 0; n
< nvar
; n
++)
2795 start
= iter
->start
;
2799 exit_label
= gfc_build_label_decl (NULL_TREE
);
2800 TREE_USED (exit_label
) = 1;
2802 /* The loop counter. */
2803 count
= gfc_create_var (TREE_TYPE (var
), "count");
2805 /* The body of the loop. */
2806 gfc_init_block (&block
);
2808 /* The exit condition. */
2809 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2810 count
, build_int_cst (TREE_TYPE (count
), 0));
2811 if (forall_tmp
->do_concurrent
)
2812 cond
= build2 (ANNOTATE_EXPR
, TREE_TYPE (cond
), cond
,
2813 build_int_cst (integer_type_node
,
2814 annot_expr_ivdep_kind
));
2816 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2817 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2818 cond
, tmp
, build_empty_stmt (input_location
));
2819 gfc_add_expr_to_block (&block
, tmp
);
2821 /* The main loop body. */
2822 gfc_add_expr_to_block (&block
, body
);
2824 /* Increment the loop variable. */
2825 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2827 gfc_add_modify (&block
, var
, tmp
);
2829 /* Advance to the next mask element. Only do this for the
2831 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2833 tree maskindex
= forall_tmp
->maskindex
;
2834 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2835 maskindex
, gfc_index_one_node
);
2836 gfc_add_modify (&block
, maskindex
, tmp
);
2839 /* Decrement the loop counter. */
2840 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2841 build_int_cst (TREE_TYPE (var
), 1));
2842 gfc_add_modify (&block
, count
, tmp
);
2844 body
= gfc_finish_block (&block
);
2846 /* Loop var initialization. */
2847 gfc_init_block (&block
);
2848 gfc_add_modify (&block
, var
, start
);
2851 /* Initialize the loop counter. */
2852 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2854 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2856 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2858 gfc_add_modify (&block
, count
, tmp
);
2860 /* The loop expression. */
2861 tmp
= build1_v (LOOP_EXPR
, body
);
2862 gfc_add_expr_to_block (&block
, tmp
);
2864 /* The exit label. */
2865 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2866 gfc_add_expr_to_block (&block
, tmp
);
2868 body
= gfc_finish_block (&block
);
2875 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2876 is nonzero, the body is controlled by all masks in the forall nest.
2877 Otherwise, the innermost loop is not controlled by it's mask. This
2878 is used for initializing that mask. */
2881 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2886 forall_info
*forall_tmp
;
2887 tree mask
, maskindex
;
2889 gfc_start_block (&header
);
2891 forall_tmp
= nested_forall_info
;
2892 while (forall_tmp
!= NULL
)
2894 /* Generate body with masks' control. */
2897 mask
= forall_tmp
->mask
;
2898 maskindex
= forall_tmp
->maskindex
;
2900 /* If a mask was specified make the assignment conditional. */
2903 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2904 body
= build3_v (COND_EXPR
, tmp
, body
,
2905 build_empty_stmt (input_location
));
2908 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2909 forall_tmp
= forall_tmp
->prev_nest
;
2913 gfc_add_expr_to_block (&header
, body
);
2914 return gfc_finish_block (&header
);
2918 /* Allocate data for holding a temporary array. Returns either a local
2919 temporary array or a pointer variable. */
2922 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2929 if (INTEGER_CST_P (size
))
2930 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2931 size
, gfc_index_one_node
);
2935 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2936 type
= build_array_type (elem_type
, type
);
2937 if (gfc_can_put_var_on_stack (bytesize
))
2939 gcc_assert (INTEGER_CST_P (size
));
2940 tmpvar
= gfc_create_var (type
, "temp");
2945 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2946 *pdata
= convert (pvoid_type_node
, tmpvar
);
2948 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2949 gfc_add_modify (pblock
, tmpvar
, tmp
);
2955 /* Generate codes to copy the temporary to the actual lhs. */
2958 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2959 tree count1
, tree wheremask
, bool invert
)
2963 stmtblock_t block
, body
;
2969 lss
= gfc_walk_expr (expr
);
2971 if (lss
== gfc_ss_terminator
)
2973 gfc_start_block (&block
);
2975 gfc_init_se (&lse
, NULL
);
2977 /* Translate the expression. */
2978 gfc_conv_expr (&lse
, expr
);
2980 /* Form the expression for the temporary. */
2981 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2983 /* Use the scalar assignment as is. */
2984 gfc_add_block_to_block (&block
, &lse
.pre
);
2985 gfc_add_modify (&block
, lse
.expr
, tmp
);
2986 gfc_add_block_to_block (&block
, &lse
.post
);
2988 /* Increment the count1. */
2989 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2990 count1
, gfc_index_one_node
);
2991 gfc_add_modify (&block
, count1
, tmp
);
2993 tmp
= gfc_finish_block (&block
);
2997 gfc_start_block (&block
);
2999 gfc_init_loopinfo (&loop1
);
3000 gfc_init_se (&rse
, NULL
);
3001 gfc_init_se (&lse
, NULL
);
3003 /* Associate the lss with the loop. */
3004 gfc_add_ss_to_loop (&loop1
, lss
);
3006 /* Calculate the bounds of the scalarization. */
3007 gfc_conv_ss_startstride (&loop1
);
3008 /* Setup the scalarizing loops. */
3009 gfc_conv_loop_setup (&loop1
, &expr
->where
);
3011 gfc_mark_ss_chain_used (lss
, 1);
3013 /* Start the scalarized loop body. */
3014 gfc_start_scalarized_body (&loop1
, &body
);
3016 /* Setup the gfc_se structures. */
3017 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
3020 /* Form the expression of the temporary. */
3021 if (lss
!= gfc_ss_terminator
)
3022 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3023 /* Translate expr. */
3024 gfc_conv_expr (&lse
, expr
);
3026 /* Use the scalar assignment. */
3027 rse
.string_length
= lse
.string_length
;
3028 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
3030 /* Form the mask expression according to the mask tree list. */
3033 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3035 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3036 TREE_TYPE (wheremaskexpr
),
3038 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3040 build_empty_stmt (input_location
));
3043 gfc_add_expr_to_block (&body
, tmp
);
3045 /* Increment count1. */
3046 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3047 count1
, gfc_index_one_node
);
3048 gfc_add_modify (&body
, count1
, tmp
);
3050 /* Increment count3. */
3053 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3054 gfc_array_index_type
, count3
,
3055 gfc_index_one_node
);
3056 gfc_add_modify (&body
, count3
, tmp
);
3059 /* Generate the copying loops. */
3060 gfc_trans_scalarizing_loops (&loop1
, &body
);
3061 gfc_add_block_to_block (&block
, &loop1
.pre
);
3062 gfc_add_block_to_block (&block
, &loop1
.post
);
3063 gfc_cleanup_loop (&loop1
);
3065 tmp
= gfc_finish_block (&block
);
3071 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3072 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3073 and should not be freed. WHEREMASK is the conditional execution mask
3074 whose sense may be inverted by INVERT. */
3077 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
3078 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
3079 tree wheremask
, bool invert
)
3081 stmtblock_t block
, body1
;
3088 gfc_start_block (&block
);
3090 gfc_init_se (&rse
, NULL
);
3091 gfc_init_se (&lse
, NULL
);
3093 if (lss
== gfc_ss_terminator
)
3095 gfc_init_block (&body1
);
3096 gfc_conv_expr (&rse
, expr2
);
3097 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3101 /* Initialize the loop. */
3102 gfc_init_loopinfo (&loop
);
3104 /* We may need LSS to determine the shape of the expression. */
3105 gfc_add_ss_to_loop (&loop
, lss
);
3106 gfc_add_ss_to_loop (&loop
, rss
);
3108 gfc_conv_ss_startstride (&loop
);
3109 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3111 gfc_mark_ss_chain_used (rss
, 1);
3112 /* Start the loop body. */
3113 gfc_start_scalarized_body (&loop
, &body1
);
3115 /* Translate the expression. */
3116 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3118 gfc_conv_expr (&rse
, expr2
);
3120 /* Form the expression of the temporary. */
3121 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3124 /* Use the scalar assignment. */
3125 lse
.string_length
= rse
.string_length
;
3126 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
3127 expr2
->expr_type
== EXPR_VARIABLE
, true);
3129 /* Form the mask expression according to the mask tree list. */
3132 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3134 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3135 TREE_TYPE (wheremaskexpr
),
3137 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3139 build_empty_stmt (input_location
));
3142 gfc_add_expr_to_block (&body1
, tmp
);
3144 if (lss
== gfc_ss_terminator
)
3146 gfc_add_block_to_block (&block
, &body1
);
3148 /* Increment count1. */
3149 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
3150 count1
, gfc_index_one_node
);
3151 gfc_add_modify (&block
, count1
, tmp
);
3155 /* Increment count1. */
3156 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3157 count1
, gfc_index_one_node
);
3158 gfc_add_modify (&body1
, count1
, tmp
);
3160 /* Increment count3. */
3163 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3164 gfc_array_index_type
,
3165 count3
, gfc_index_one_node
);
3166 gfc_add_modify (&body1
, count3
, tmp
);
3169 /* Generate the copying loops. */
3170 gfc_trans_scalarizing_loops (&loop
, &body1
);
3172 gfc_add_block_to_block (&block
, &loop
.pre
);
3173 gfc_add_block_to_block (&block
, &loop
.post
);
3175 gfc_cleanup_loop (&loop
);
3176 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3177 as tree nodes in SS may not be valid in different scope. */
3180 tmp
= gfc_finish_block (&block
);
3185 /* Calculate the size of temporary needed in the assignment inside forall.
3186 LSS and RSS are filled in this function. */
3189 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
3190 stmtblock_t
* pblock
,
3191 gfc_ss
**lss
, gfc_ss
**rss
)
3199 *lss
= gfc_walk_expr (expr1
);
3202 size
= gfc_index_one_node
;
3203 if (*lss
!= gfc_ss_terminator
)
3205 gfc_init_loopinfo (&loop
);
3207 /* Walk the RHS of the expression. */
3208 *rss
= gfc_walk_expr (expr2
);
3209 if (*rss
== gfc_ss_terminator
)
3210 /* The rhs is scalar. Add a ss for the expression. */
3211 *rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
3213 /* Associate the SS with the loop. */
3214 gfc_add_ss_to_loop (&loop
, *lss
);
3215 /* We don't actually need to add the rhs at this point, but it might
3216 make guessing the loop bounds a bit easier. */
3217 gfc_add_ss_to_loop (&loop
, *rss
);
3219 /* We only want the shape of the expression, not rest of the junk
3220 generated by the scalarizer. */
3221 loop
.array_parameter
= 1;
3223 /* Calculate the bounds of the scalarization. */
3224 save_flag
= gfc_option
.rtcheck
;
3225 gfc_option
.rtcheck
&= ~GFC_RTCHECK_BOUNDS
;
3226 gfc_conv_ss_startstride (&loop
);
3227 gfc_option
.rtcheck
= save_flag
;
3228 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3230 /* Figure out how many elements we need. */
3231 for (i
= 0; i
< loop
.dimen
; i
++)
3233 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
3234 gfc_array_index_type
,
3235 gfc_index_one_node
, loop
.from
[i
]);
3236 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3237 gfc_array_index_type
, tmp
, loop
.to
[i
]);
3238 size
= fold_build2_loc (input_location
, MULT_EXPR
,
3239 gfc_array_index_type
, size
, tmp
);
3241 gfc_add_block_to_block (pblock
, &loop
.pre
);
3242 size
= gfc_evaluate_now (size
, pblock
);
3243 gfc_add_block_to_block (pblock
, &loop
.post
);
3245 /* TODO: write a function that cleans up a loopinfo without freeing
3246 the SS chains. Currently a NOP. */
3253 /* Calculate the overall iterator number of the nested forall construct.
3254 This routine actually calculates the number of times the body of the
3255 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3256 that by the expression INNER_SIZE. The BLOCK argument specifies the
3257 block in which to calculate the result, and the optional INNER_SIZE_BODY
3258 argument contains any statements that need to executed (inside the loop)
3259 to initialize or calculate INNER_SIZE. */
3262 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3263 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3265 forall_info
*forall_tmp
= nested_forall_info
;
3269 /* We can eliminate the innermost unconditional loops with constant
3271 if (INTEGER_CST_P (inner_size
))
3274 && !forall_tmp
->mask
3275 && INTEGER_CST_P (forall_tmp
->size
))
3277 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3278 gfc_array_index_type
,
3279 inner_size
, forall_tmp
->size
);
3280 forall_tmp
= forall_tmp
->prev_nest
;
3283 /* If there are no loops left, we have our constant result. */
3288 /* Otherwise, create a temporary variable to compute the result. */
3289 number
= gfc_create_var (gfc_array_index_type
, "num");
3290 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3292 gfc_start_block (&body
);
3293 if (inner_size_body
)
3294 gfc_add_block_to_block (&body
, inner_size_body
);
3296 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3297 gfc_array_index_type
, number
, inner_size
);
3300 gfc_add_modify (&body
, number
, tmp
);
3301 tmp
= gfc_finish_block (&body
);
3303 /* Generate loops. */
3304 if (forall_tmp
!= NULL
)
3305 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3307 gfc_add_expr_to_block (block
, tmp
);
3313 /* Allocate temporary for forall construct. SIZE is the size of temporary
3314 needed. PTEMP1 is returned for space free. */
3317 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3324 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3325 if (!integer_onep (unit
))
3326 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3327 gfc_array_index_type
, size
, unit
);
3332 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3335 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3340 /* Allocate temporary for forall construct according to the information in
3341 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3342 assignment inside forall. PTEMP1 is returned for space free. */
3345 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3346 tree inner_size
, stmtblock_t
* inner_size_body
,
3347 stmtblock_t
* block
, tree
* ptemp1
)
3351 /* Calculate the total size of temporary needed in forall construct. */
3352 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3353 inner_size_body
, block
);
3355 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3359 /* Handle assignments inside forall which need temporary.
3361 forall (i=start:end:stride; maskexpr)
3364 (where e,f<i> are arbitrary expressions possibly involving i
3365 and there is a dependency between e<i> and f<i>)
3367 masktmp(:) = maskexpr(:)
3372 for (i = start; i <= end; i += stride)
3376 for (i = start; i <= end; i += stride)
3378 if (masktmp[maskindex++])
3379 tmp[count1++] = f<i>
3383 for (i = start; i <= end; i += stride)
3385 if (masktmp[maskindex++])
3386 e<i> = tmp[count1++]
3391 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3392 tree wheremask
, bool invert
,
3393 forall_info
* nested_forall_info
,
3394 stmtblock_t
* block
)
3402 stmtblock_t inner_size_body
;
3404 /* Create vars. count1 is the current iterator number of the nested
3406 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3408 /* Count is the wheremask index. */
3411 count
= gfc_create_var (gfc_array_index_type
, "count");
3412 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3417 /* Initialize count1. */
3418 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3420 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3421 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3422 gfc_init_block (&inner_size_body
);
3423 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3426 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3427 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3429 if (!expr1
->ts
.u
.cl
->backend_decl
)
3432 gfc_init_se (&tse
, NULL
);
3433 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3434 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3436 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3437 expr1
->ts
.u
.cl
->backend_decl
);
3440 type
= gfc_typenode_for_spec (&expr1
->ts
);
3442 /* Allocate temporary for nested forall construct according to the
3443 information in nested_forall_info and inner_size. */
3444 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3445 &inner_size_body
, block
, &ptemp1
);
3447 /* Generate codes to copy rhs to the temporary . */
3448 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3451 /* Generate body and loops according to the information in
3452 nested_forall_info. */
3453 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3454 gfc_add_expr_to_block (block
, tmp
);
3457 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3461 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3463 /* Generate codes to copy the temporary to lhs. */
3464 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3467 /* Generate body and loops according to the information in
3468 nested_forall_info. */
3469 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3470 gfc_add_expr_to_block (block
, tmp
);
3474 /* Free the temporary. */
3475 tmp
= gfc_call_free (ptemp1
);
3476 gfc_add_expr_to_block (block
, tmp
);
3481 /* Translate pointer assignment inside FORALL which need temporary. */
3484 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3485 forall_info
* nested_forall_info
,
3486 stmtblock_t
* block
)
3493 gfc_array_info
*info
;
3500 tree tmp
, tmp1
, ptemp1
;
3502 count
= gfc_create_var (gfc_array_index_type
, "count");
3503 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3505 inner_size
= gfc_index_one_node
;
3506 lss
= gfc_walk_expr (expr1
);
3507 rss
= gfc_walk_expr (expr2
);
3508 if (lss
== gfc_ss_terminator
)
3510 type
= gfc_typenode_for_spec (&expr1
->ts
);
3511 type
= build_pointer_type (type
);
3513 /* Allocate temporary for nested forall construct according to the
3514 information in nested_forall_info and inner_size. */
3515 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3516 inner_size
, NULL
, block
, &ptemp1
);
3517 gfc_start_block (&body
);
3518 gfc_init_se (&lse
, NULL
);
3519 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3520 gfc_init_se (&rse
, NULL
);
3521 rse
.want_pointer
= 1;
3522 gfc_conv_expr (&rse
, expr2
);
3523 gfc_add_block_to_block (&body
, &rse
.pre
);
3524 gfc_add_modify (&body
, lse
.expr
,
3525 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3526 gfc_add_block_to_block (&body
, &rse
.post
);
3528 /* Increment count. */
3529 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3530 count
, gfc_index_one_node
);
3531 gfc_add_modify (&body
, count
, tmp
);
3533 tmp
= gfc_finish_block (&body
);
3535 /* Generate body and loops according to the information in
3536 nested_forall_info. */
3537 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3538 gfc_add_expr_to_block (block
, tmp
);
3541 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3543 gfc_start_block (&body
);
3544 gfc_init_se (&lse
, NULL
);
3545 gfc_init_se (&rse
, NULL
);
3546 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3547 lse
.want_pointer
= 1;
3548 gfc_conv_expr (&lse
, expr1
);
3549 gfc_add_block_to_block (&body
, &lse
.pre
);
3550 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3551 gfc_add_block_to_block (&body
, &lse
.post
);
3552 /* Increment count. */
3553 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3554 count
, gfc_index_one_node
);
3555 gfc_add_modify (&body
, count
, tmp
);
3556 tmp
= gfc_finish_block (&body
);
3558 /* Generate body and loops according to the information in
3559 nested_forall_info. */
3560 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3561 gfc_add_expr_to_block (block
, tmp
);
3565 gfc_init_loopinfo (&loop
);
3567 /* Associate the SS with the loop. */
3568 gfc_add_ss_to_loop (&loop
, rss
);
3570 /* Setup the scalarizing loops and bounds. */
3571 gfc_conv_ss_startstride (&loop
);
3573 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3575 info
= &rss
->info
->data
.array
;
3576 desc
= info
->descriptor
;
3578 /* Make a new descriptor. */
3579 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3580 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3581 loop
.from
, loop
.to
, 1,
3582 GFC_ARRAY_UNKNOWN
, true);
3584 /* Allocate temporary for nested forall construct. */
3585 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3586 inner_size
, NULL
, block
, &ptemp1
);
3587 gfc_start_block (&body
);
3588 gfc_init_se (&lse
, NULL
);
3589 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3590 lse
.direct_byref
= 1;
3591 gfc_conv_expr_descriptor (&lse
, expr2
);
3593 gfc_add_block_to_block (&body
, &lse
.pre
);
3594 gfc_add_block_to_block (&body
, &lse
.post
);
3596 /* Increment count. */
3597 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3598 count
, gfc_index_one_node
);
3599 gfc_add_modify (&body
, count
, tmp
);
3601 tmp
= gfc_finish_block (&body
);
3603 /* Generate body and loops according to the information in
3604 nested_forall_info. */
3605 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3606 gfc_add_expr_to_block (block
, tmp
);
3609 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3611 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3612 gfc_init_se (&lse
, NULL
);
3613 gfc_conv_expr_descriptor (&lse
, expr1
);
3614 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3615 gfc_start_block (&body
);
3616 gfc_add_block_to_block (&body
, &lse
.pre
);
3617 gfc_add_block_to_block (&body
, &lse
.post
);
3619 /* Increment count. */
3620 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3621 count
, gfc_index_one_node
);
3622 gfc_add_modify (&body
, count
, tmp
);
3624 tmp
= gfc_finish_block (&body
);
3626 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3627 gfc_add_expr_to_block (block
, tmp
);
3629 /* Free the temporary. */
3632 tmp
= gfc_call_free (ptemp1
);
3633 gfc_add_expr_to_block (block
, tmp
);
3638 /* FORALL and WHERE statements are really nasty, especially when you nest
3639 them. All the rhs of a forall assignment must be evaluated before the
3640 actual assignments are performed. Presumably this also applies to all the
3641 assignments in an inner where statement. */
3643 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3644 linear array, relying on the fact that we process in the same order in all
3647 forall (i=start:end:stride; maskexpr)
3651 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3653 count = ((end + 1 - start) / stride)
3654 masktmp(:) = maskexpr(:)
3657 for (i = start; i <= end; i += stride)
3659 if (masktmp[maskindex++])
3663 for (i = start; i <= end; i += stride)
3665 if (masktmp[maskindex++])
3669 Note that this code only works when there are no dependencies.
3670 Forall loop with array assignments and data dependencies are a real pain,
3671 because the size of the temporary cannot always be determined before the
3672 loop is executed. This problem is compounded by the presence of nested
3677 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3694 tree cycle_label
= NULL_TREE
;
3698 gfc_forall_iterator
*fa
;
3701 gfc_saved_var
*saved_vars
;
3702 iter_info
*this_forall
;
3706 /* Do nothing if the mask is false. */
3708 && code
->expr1
->expr_type
== EXPR_CONSTANT
3709 && !code
->expr1
->value
.logical
)
3710 return build_empty_stmt (input_location
);
3713 /* Count the FORALL index number. */
3714 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3718 /* Allocate the space for var, start, end, step, varexpr. */
3719 var
= XCNEWVEC (tree
, nvar
);
3720 start
= XCNEWVEC (tree
, nvar
);
3721 end
= XCNEWVEC (tree
, nvar
);
3722 step
= XCNEWVEC (tree
, nvar
);
3723 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3724 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3726 /* Allocate the space for info. */
3727 info
= XCNEW (forall_info
);
3729 gfc_start_block (&pre
);
3730 gfc_init_block (&post
);
3731 gfc_init_block (&block
);
3734 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3736 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3738 /* Allocate space for this_forall. */
3739 this_forall
= XCNEW (iter_info
);
3741 /* Create a temporary variable for the FORALL index. */
3742 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3743 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3744 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3746 /* Record it in this_forall. */
3747 this_forall
->var
= var
[n
];
3749 /* Replace the index symbol's backend_decl with the temporary decl. */
3750 sym
->backend_decl
= var
[n
];
3752 /* Work out the start, end and stride for the loop. */
3753 gfc_init_se (&se
, NULL
);
3754 gfc_conv_expr_val (&se
, fa
->start
);
3755 /* Record it in this_forall. */
3756 this_forall
->start
= se
.expr
;
3757 gfc_add_block_to_block (&block
, &se
.pre
);
3760 gfc_init_se (&se
, NULL
);
3761 gfc_conv_expr_val (&se
, fa
->end
);
3762 /* Record it in this_forall. */
3763 this_forall
->end
= se
.expr
;
3764 gfc_make_safe_expr (&se
);
3765 gfc_add_block_to_block (&block
, &se
.pre
);
3768 gfc_init_se (&se
, NULL
);
3769 gfc_conv_expr_val (&se
, fa
->stride
);
3770 /* Record it in this_forall. */
3771 this_forall
->step
= se
.expr
;
3772 gfc_make_safe_expr (&se
);
3773 gfc_add_block_to_block (&block
, &se
.pre
);
3776 /* Set the NEXT field of this_forall to NULL. */
3777 this_forall
->next
= NULL
;
3778 /* Link this_forall to the info construct. */
3779 if (info
->this_loop
)
3781 iter_info
*iter_tmp
= info
->this_loop
;
3782 while (iter_tmp
->next
!= NULL
)
3783 iter_tmp
= iter_tmp
->next
;
3784 iter_tmp
->next
= this_forall
;
3787 info
->this_loop
= this_forall
;
3793 /* Calculate the size needed for the current forall level. */
3794 size
= gfc_index_one_node
;
3795 for (n
= 0; n
< nvar
; n
++)
3797 /* size = (end + step - start) / step. */
3798 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3800 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3802 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3804 tmp
= convert (gfc_array_index_type
, tmp
);
3806 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3810 /* Record the nvar and size of current forall level. */
3816 /* If the mask is .true., consider the FORALL unconditional. */
3817 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3818 && code
->expr1
->value
.logical
)
3826 /* First we need to allocate the mask. */
3829 /* As the mask array can be very big, prefer compact boolean types. */
3830 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3831 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3832 size
, NULL
, &block
, &pmask
);
3833 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3835 /* Record them in the info structure. */
3836 info
->maskindex
= maskindex
;
3841 /* No mask was specified. */
3842 maskindex
= NULL_TREE
;
3843 mask
= pmask
= NULL_TREE
;
3846 /* Link the current forall level to nested_forall_info. */
3847 info
->prev_nest
= nested_forall_info
;
3848 nested_forall_info
= info
;
3850 /* Copy the mask into a temporary variable if required.
3851 For now we assume a mask temporary is needed. */
3854 /* As the mask array can be very big, prefer compact boolean types. */
3855 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3857 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3859 /* Start of mask assignment loop body. */
3860 gfc_start_block (&body
);
3862 /* Evaluate the mask expression. */
3863 gfc_init_se (&se
, NULL
);
3864 gfc_conv_expr_val (&se
, code
->expr1
);
3865 gfc_add_block_to_block (&body
, &se
.pre
);
3867 /* Store the mask. */
3868 se
.expr
= convert (mask_type
, se
.expr
);
3870 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3871 gfc_add_modify (&body
, tmp
, se
.expr
);
3873 /* Advance to the next mask element. */
3874 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3875 maskindex
, gfc_index_one_node
);
3876 gfc_add_modify (&body
, maskindex
, tmp
);
3878 /* Generate the loops. */
3879 tmp
= gfc_finish_block (&body
);
3880 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3881 gfc_add_expr_to_block (&block
, tmp
);
3884 if (code
->op
== EXEC_DO_CONCURRENT
)
3886 gfc_init_block (&body
);
3887 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3888 code
->cycle_label
= cycle_label
;
3889 tmp
= gfc_trans_code (code
->block
->next
);
3890 gfc_add_expr_to_block (&body
, tmp
);
3892 if (TREE_USED (cycle_label
))
3894 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3895 gfc_add_expr_to_block (&body
, tmp
);
3898 tmp
= gfc_finish_block (&body
);
3899 nested_forall_info
->do_concurrent
= true;
3900 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3901 gfc_add_expr_to_block (&block
, tmp
);
3905 c
= code
->block
->next
;
3907 /* TODO: loop merging in FORALL statements. */
3908 /* Now that we've got a copy of the mask, generate the assignment loops. */
3914 /* A scalar or array assignment. DO the simple check for
3915 lhs to rhs dependencies. These make a temporary for the
3916 rhs and form a second forall block to copy to variable. */
3917 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3919 /* Temporaries due to array assignment data dependencies introduce
3920 no end of problems. */
3922 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3923 nested_forall_info
, &block
);
3926 /* Use the normal assignment copying routines. */
3927 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3929 /* Generate body and loops. */
3930 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3932 gfc_add_expr_to_block (&block
, tmp
);
3935 /* Cleanup any temporary symtrees that have been made to deal
3936 with dependencies. */
3938 cleanup_forall_symtrees (c
);
3943 /* Translate WHERE or WHERE construct nested in FORALL. */
3944 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3947 /* Pointer assignment inside FORALL. */
3948 case EXEC_POINTER_ASSIGN
:
3949 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3951 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3952 nested_forall_info
, &block
);
3955 /* Use the normal assignment copying routines. */
3956 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3958 /* Generate body and loops. */
3959 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3961 gfc_add_expr_to_block (&block
, tmp
);
3966 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3967 gfc_add_expr_to_block (&block
, tmp
);
3970 /* Explicit subroutine calls are prevented by the frontend but interface
3971 assignments can legitimately produce them. */
3972 case EXEC_ASSIGN_CALL
:
3973 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3974 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3975 gfc_add_expr_to_block (&block
, tmp
);
3986 /* Restore the original index variables. */
3987 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3988 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3990 /* Free the space for var, start, end, step, varexpr. */
3998 for (this_forall
= info
->this_loop
; this_forall
;)
4000 iter_info
*next
= this_forall
->next
;
4005 /* Free the space for this forall_info. */
4010 /* Free the temporary for the mask. */
4011 tmp
= gfc_call_free (pmask
);
4012 gfc_add_expr_to_block (&block
, tmp
);
4015 pushdecl (maskindex
);
4017 gfc_add_block_to_block (&pre
, &block
);
4018 gfc_add_block_to_block (&pre
, &post
);
4020 return gfc_finish_block (&pre
);
4024 /* Translate the FORALL statement or construct. */
4026 tree
gfc_trans_forall (gfc_code
* code
)
4028 return gfc_trans_forall_1 (code
, NULL
);
4032 /* Translate the DO CONCURRENT construct. */
4034 tree
gfc_trans_do_concurrent (gfc_code
* code
)
4036 return gfc_trans_forall_1 (code
, NULL
);
4040 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4041 If the WHERE construct is nested in FORALL, compute the overall temporary
4042 needed by the WHERE mask expression multiplied by the iterator number of
4044 ME is the WHERE mask expression.
4045 MASK is the current execution mask upon input, whose sense may or may
4046 not be inverted as specified by the INVERT argument.
4047 CMASK is the updated execution mask on output, or NULL if not required.
4048 PMASK is the pending execution mask on output, or NULL if not required.
4049 BLOCK is the block in which to place the condition evaluation loops. */
4052 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
4053 tree mask
, bool invert
, tree cmask
, tree pmask
,
4054 tree mask_type
, stmtblock_t
* block
)
4059 stmtblock_t body
, body1
;
4060 tree count
, cond
, mtmp
;
4063 gfc_init_loopinfo (&loop
);
4065 lss
= gfc_walk_expr (me
);
4066 rss
= gfc_walk_expr (me
);
4068 /* Variable to index the temporary. */
4069 count
= gfc_create_var (gfc_array_index_type
, "count");
4070 /* Initialize count. */
4071 gfc_add_modify (block
, count
, gfc_index_zero_node
);
4073 gfc_start_block (&body
);
4075 gfc_init_se (&rse
, NULL
);
4076 gfc_init_se (&lse
, NULL
);
4078 if (lss
== gfc_ss_terminator
)
4080 gfc_init_block (&body1
);
4084 /* Initialize the loop. */
4085 gfc_init_loopinfo (&loop
);
4087 /* We may need LSS to determine the shape of the expression. */
4088 gfc_add_ss_to_loop (&loop
, lss
);
4089 gfc_add_ss_to_loop (&loop
, rss
);
4091 gfc_conv_ss_startstride (&loop
);
4092 gfc_conv_loop_setup (&loop
, &me
->where
);
4094 gfc_mark_ss_chain_used (rss
, 1);
4095 /* Start the loop body. */
4096 gfc_start_scalarized_body (&loop
, &body1
);
4098 /* Translate the expression. */
4099 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4101 gfc_conv_expr (&rse
, me
);
4104 /* Variable to evaluate mask condition. */
4105 cond
= gfc_create_var (mask_type
, "cond");
4106 if (mask
&& (cmask
|| pmask
))
4107 mtmp
= gfc_create_var (mask_type
, "mask");
4108 else mtmp
= NULL_TREE
;
4110 gfc_add_block_to_block (&body1
, &lse
.pre
);
4111 gfc_add_block_to_block (&body1
, &rse
.pre
);
4113 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
4115 if (mask
&& (cmask
|| pmask
))
4117 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
4119 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
4120 gfc_add_modify (&body1
, mtmp
, tmp
);
4125 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
4128 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
4130 gfc_add_modify (&body1
, tmp1
, tmp
);
4135 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
4136 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
4138 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
4140 gfc_add_modify (&body1
, tmp1
, tmp
);
4143 gfc_add_block_to_block (&body1
, &lse
.post
);
4144 gfc_add_block_to_block (&body1
, &rse
.post
);
4146 if (lss
== gfc_ss_terminator
)
4148 gfc_add_block_to_block (&body
, &body1
);
4152 /* Increment count. */
4153 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4154 count
, gfc_index_one_node
);
4155 gfc_add_modify (&body1
, count
, tmp1
);
4157 /* Generate the copying loops. */
4158 gfc_trans_scalarizing_loops (&loop
, &body1
);
4160 gfc_add_block_to_block (&body
, &loop
.pre
);
4161 gfc_add_block_to_block (&body
, &loop
.post
);
4163 gfc_cleanup_loop (&loop
);
4164 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4165 as tree nodes in SS may not be valid in different scope. */
4168 tmp1
= gfc_finish_block (&body
);
4169 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4170 if (nested_forall_info
!= NULL
)
4171 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
4173 gfc_add_expr_to_block (block
, tmp1
);
4177 /* Translate an assignment statement in a WHERE statement or construct
4178 statement. The MASK expression is used to control which elements
4179 of EXPR1 shall be assigned. The sense of MASK is specified by
4183 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
4184 tree mask
, bool invert
,
4185 tree count1
, tree count2
,
4191 gfc_ss
*lss_section
;
4198 tree index
, maskexpr
;
4200 /* A defined assignment. */
4201 if (cnext
&& cnext
->resolved_sym
)
4202 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
4205 /* TODO: handle this special case.
4206 Special case a single function returning an array. */
4207 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
4209 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
4215 /* Assignment of the form lhs = rhs. */
4216 gfc_start_block (&block
);
4218 gfc_init_se (&lse
, NULL
);
4219 gfc_init_se (&rse
, NULL
);
4222 lss
= gfc_walk_expr (expr1
);
4225 /* In each where-assign-stmt, the mask-expr and the variable being
4226 defined shall be arrays of the same shape. */
4227 gcc_assert (lss
!= gfc_ss_terminator
);
4229 /* The assignment needs scalarization. */
4232 /* Find a non-scalar SS from the lhs. */
4233 while (lss_section
!= gfc_ss_terminator
4234 && lss_section
->info
->type
!= GFC_SS_SECTION
)
4235 lss_section
= lss_section
->next
;
4237 gcc_assert (lss_section
!= gfc_ss_terminator
);
4239 /* Initialize the scalarizer. */
4240 gfc_init_loopinfo (&loop
);
4243 rss
= gfc_walk_expr (expr2
);
4244 if (rss
== gfc_ss_terminator
)
4246 /* The rhs is scalar. Add a ss for the expression. */
4247 rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
4248 rss
->info
->where
= 1;
4251 /* Associate the SS with the loop. */
4252 gfc_add_ss_to_loop (&loop
, lss
);
4253 gfc_add_ss_to_loop (&loop
, rss
);
4255 /* Calculate the bounds of the scalarization. */
4256 gfc_conv_ss_startstride (&loop
);
4258 /* Resolve any data dependencies in the statement. */
4259 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
4261 /* Setup the scalarizing loops. */
4262 gfc_conv_loop_setup (&loop
, &expr2
->where
);
4264 /* Setup the gfc_se structures. */
4265 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4266 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4269 gfc_mark_ss_chain_used (rss
, 1);
4270 if (loop
.temp_ss
== NULL
)
4273 gfc_mark_ss_chain_used (lss
, 1);
4277 lse
.ss
= loop
.temp_ss
;
4278 gfc_mark_ss_chain_used (lss
, 3);
4279 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
4282 /* Start the scalarized loop body. */
4283 gfc_start_scalarized_body (&loop
, &body
);
4285 /* Translate the expression. */
4286 gfc_conv_expr (&rse
, expr2
);
4287 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4288 gfc_conv_tmp_array_ref (&lse
);
4290 gfc_conv_expr (&lse
, expr1
);
4292 /* Form the mask expression according to the mask. */
4294 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4296 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4297 TREE_TYPE (maskexpr
), maskexpr
);
4299 /* Use the scalar assignment as is. */
4300 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4301 loop
.temp_ss
!= NULL
, false, true);
4303 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4305 gfc_add_expr_to_block (&body
, tmp
);
4307 if (lss
== gfc_ss_terminator
)
4309 /* Increment count1. */
4310 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4311 count1
, gfc_index_one_node
);
4312 gfc_add_modify (&body
, count1
, tmp
);
4314 /* Use the scalar assignment as is. */
4315 gfc_add_block_to_block (&block
, &body
);
4319 gcc_assert (lse
.ss
== gfc_ss_terminator
4320 && rse
.ss
== gfc_ss_terminator
);
4322 if (loop
.temp_ss
!= NULL
)
4324 /* Increment count1 before finish the main body of a scalarized
4326 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4327 gfc_array_index_type
, count1
, gfc_index_one_node
);
4328 gfc_add_modify (&body
, count1
, tmp
);
4329 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4331 /* We need to copy the temporary to the actual lhs. */
4332 gfc_init_se (&lse
, NULL
);
4333 gfc_init_se (&rse
, NULL
);
4334 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4335 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4337 rse
.ss
= loop
.temp_ss
;
4340 gfc_conv_tmp_array_ref (&rse
);
4341 gfc_conv_expr (&lse
, expr1
);
4343 gcc_assert (lse
.ss
== gfc_ss_terminator
4344 && rse
.ss
== gfc_ss_terminator
);
4346 /* Form the mask expression according to the mask tree list. */
4348 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4350 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4351 TREE_TYPE (maskexpr
), maskexpr
);
4353 /* Use the scalar assignment as is. */
4354 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4356 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4357 build_empty_stmt (input_location
));
4358 gfc_add_expr_to_block (&body
, tmp
);
4360 /* Increment count2. */
4361 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4362 gfc_array_index_type
, count2
,
4363 gfc_index_one_node
);
4364 gfc_add_modify (&body
, count2
, tmp
);
4368 /* Increment count1. */
4369 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4370 gfc_array_index_type
, count1
,
4371 gfc_index_one_node
);
4372 gfc_add_modify (&body
, count1
, tmp
);
4375 /* Generate the copying loops. */
4376 gfc_trans_scalarizing_loops (&loop
, &body
);
4378 /* Wrap the whole thing up. */
4379 gfc_add_block_to_block (&block
, &loop
.pre
);
4380 gfc_add_block_to_block (&block
, &loop
.post
);
4381 gfc_cleanup_loop (&loop
);
4384 return gfc_finish_block (&block
);
4388 /* Translate the WHERE construct or statement.
4389 This function can be called iteratively to translate the nested WHERE
4390 construct or statement.
4391 MASK is the control mask. */
4394 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4395 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4397 stmtblock_t inner_size_body
;
4398 tree inner_size
, size
;
4407 tree count1
, count2
;
4411 tree pcmask
= NULL_TREE
;
4412 tree ppmask
= NULL_TREE
;
4413 tree cmask
= NULL_TREE
;
4414 tree pmask
= NULL_TREE
;
4415 gfc_actual_arglist
*arg
;
4417 /* the WHERE statement or the WHERE construct statement. */
4418 cblock
= code
->block
;
4420 /* As the mask array can be very big, prefer compact boolean types. */
4421 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4423 /* Determine which temporary masks are needed. */
4426 /* One clause: No ELSEWHEREs. */
4427 need_cmask
= (cblock
->next
!= 0);
4430 else if (cblock
->block
->block
)
4432 /* Three or more clauses: Conditional ELSEWHEREs. */
4436 else if (cblock
->next
)
4438 /* Two clauses, the first non-empty. */
4440 need_pmask
= (mask
!= NULL_TREE
4441 && cblock
->block
->next
!= 0);
4443 else if (!cblock
->block
->next
)
4445 /* Two clauses, both empty. */
4449 /* Two clauses, the first empty, the second non-empty. */
4452 need_cmask
= (cblock
->block
->expr1
!= 0);
4461 if (need_cmask
|| need_pmask
)
4463 /* Calculate the size of temporary needed by the mask-expr. */
4464 gfc_init_block (&inner_size_body
);
4465 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4466 &inner_size_body
, &lss
, &rss
);
4468 gfc_free_ss_chain (lss
);
4469 gfc_free_ss_chain (rss
);
4471 /* Calculate the total size of temporary needed. */
4472 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4473 &inner_size_body
, block
);
4475 /* Check whether the size is negative. */
4476 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4477 gfc_index_zero_node
);
4478 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4479 cond
, gfc_index_zero_node
, size
);
4480 size
= gfc_evaluate_now (size
, block
);
4482 /* Allocate temporary for WHERE mask if needed. */
4484 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4487 /* Allocate temporary for !mask if needed. */
4489 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4495 /* Each time around this loop, the where clause is conditional
4496 on the value of mask and invert, which are updated at the
4497 bottom of the loop. */
4499 /* Has mask-expr. */
4502 /* Ensure that the WHERE mask will be evaluated exactly once.
4503 If there are no statements in this WHERE/ELSEWHERE clause,
4504 then we don't need to update the control mask (cmask).
4505 If this is the last clause of the WHERE construct, then
4506 we don't need to update the pending control mask (pmask). */
4508 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4510 cblock
->next
? cmask
: NULL_TREE
,
4511 cblock
->block
? pmask
: NULL_TREE
,
4514 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4516 (cblock
->next
|| cblock
->block
)
4517 ? cmask
: NULL_TREE
,
4518 NULL_TREE
, mask_type
, block
);
4522 /* It's a final elsewhere-stmt. No mask-expr is present. */
4526 /* The body of this where clause are controlled by cmask with
4527 sense specified by invert. */
4529 /* Get the assignment statement of a WHERE statement, or the first
4530 statement in where-body-construct of a WHERE construct. */
4531 cnext
= cblock
->next
;
4536 /* WHERE assignment statement. */
4537 case EXEC_ASSIGN_CALL
:
4539 arg
= cnext
->ext
.actual
;
4540 expr1
= expr2
= NULL
;
4541 for (; arg
; arg
= arg
->next
)
4553 expr1
= cnext
->expr1
;
4554 expr2
= cnext
->expr2
;
4556 if (nested_forall_info
!= NULL
)
4558 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4559 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4560 gfc_trans_assign_need_temp (expr1
, expr2
,
4562 nested_forall_info
, block
);
4565 /* Variables to control maskexpr. */
4566 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4567 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4568 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4569 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4571 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4576 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4578 gfc_add_expr_to_block (block
, tmp
);
4583 /* Variables to control maskexpr. */
4584 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4585 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4586 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4587 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4589 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4593 gfc_add_expr_to_block (block
, tmp
);
4598 /* WHERE or WHERE construct is part of a where-body-construct. */
4600 gfc_trans_where_2 (cnext
, cmask
, invert
,
4601 nested_forall_info
, block
);
4608 /* The next statement within the same where-body-construct. */
4609 cnext
= cnext
->next
;
4611 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4612 cblock
= cblock
->block
;
4613 if (mask
== NULL_TREE
)
4615 /* If we're the initial WHERE, we can simply invert the sense
4616 of the current mask to obtain the "mask" for the remaining
4623 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4629 /* If we allocated a pending mask array, deallocate it now. */
4632 tmp
= gfc_call_free (ppmask
);
4633 gfc_add_expr_to_block (block
, tmp
);
4636 /* If we allocated a current mask array, deallocate it now. */
4639 tmp
= gfc_call_free (pcmask
);
4640 gfc_add_expr_to_block (block
, tmp
);
4644 /* Translate a simple WHERE construct or statement without dependencies.
4645 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4646 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4647 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4650 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4652 stmtblock_t block
, body
;
4653 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4654 tree tmp
, cexpr
, tstmt
, estmt
;
4655 gfc_ss
*css
, *tdss
, *tsss
;
4656 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4661 /* Allow the scalarizer to workshare simple where loops. */
4662 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4663 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4665 cond
= cblock
->expr1
;
4666 tdst
= cblock
->next
->expr1
;
4667 tsrc
= cblock
->next
->expr2
;
4668 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4669 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4671 gfc_start_block (&block
);
4672 gfc_init_loopinfo (&loop
);
4674 /* Handle the condition. */
4675 gfc_init_se (&cse
, NULL
);
4676 css
= gfc_walk_expr (cond
);
4677 gfc_add_ss_to_loop (&loop
, css
);
4679 /* Handle the then-clause. */
4680 gfc_init_se (&tdse
, NULL
);
4681 gfc_init_se (&tsse
, NULL
);
4682 tdss
= gfc_walk_expr (tdst
);
4683 tsss
= gfc_walk_expr (tsrc
);
4684 if (tsss
== gfc_ss_terminator
)
4686 tsss
= gfc_get_scalar_ss (gfc_ss_terminator
, tsrc
);
4687 tsss
->info
->where
= 1;
4689 gfc_add_ss_to_loop (&loop
, tdss
);
4690 gfc_add_ss_to_loop (&loop
, tsss
);
4694 /* Handle the else clause. */
4695 gfc_init_se (&edse
, NULL
);
4696 gfc_init_se (&esse
, NULL
);
4697 edss
= gfc_walk_expr (edst
);
4698 esss
= gfc_walk_expr (esrc
);
4699 if (esss
== gfc_ss_terminator
)
4701 esss
= gfc_get_scalar_ss (gfc_ss_terminator
, esrc
);
4702 esss
->info
->where
= 1;
4704 gfc_add_ss_to_loop (&loop
, edss
);
4705 gfc_add_ss_to_loop (&loop
, esss
);
4708 gfc_conv_ss_startstride (&loop
);
4709 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4711 gfc_mark_ss_chain_used (css
, 1);
4712 gfc_mark_ss_chain_used (tdss
, 1);
4713 gfc_mark_ss_chain_used (tsss
, 1);
4716 gfc_mark_ss_chain_used (edss
, 1);
4717 gfc_mark_ss_chain_used (esss
, 1);
4720 gfc_start_scalarized_body (&loop
, &body
);
4722 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4723 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4724 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4730 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4731 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4736 gfc_conv_expr (&cse
, cond
);
4737 gfc_add_block_to_block (&body
, &cse
.pre
);
4740 gfc_conv_expr (&tsse
, tsrc
);
4741 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4742 gfc_conv_tmp_array_ref (&tdse
);
4744 gfc_conv_expr (&tdse
, tdst
);
4748 gfc_conv_expr (&esse
, esrc
);
4749 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4750 gfc_conv_tmp_array_ref (&edse
);
4752 gfc_conv_expr (&edse
, edst
);
4755 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4756 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4758 : build_empty_stmt (input_location
);
4759 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4760 gfc_add_expr_to_block (&body
, tmp
);
4761 gfc_add_block_to_block (&body
, &cse
.post
);
4763 gfc_trans_scalarizing_loops (&loop
, &body
);
4764 gfc_add_block_to_block (&block
, &loop
.pre
);
4765 gfc_add_block_to_block (&block
, &loop
.post
);
4766 gfc_cleanup_loop (&loop
);
4768 return gfc_finish_block (&block
);
4771 /* As the WHERE or WHERE construct statement can be nested, we call
4772 gfc_trans_where_2 to do the translation, and pass the initial
4773 NULL values for both the control mask and the pending control mask. */
4776 gfc_trans_where (gfc_code
* code
)
4782 cblock
= code
->block
;
4784 && cblock
->next
->op
== EXEC_ASSIGN
4785 && !cblock
->next
->next
)
4787 eblock
= cblock
->block
;
4790 /* A simple "WHERE (cond) x = y" statement or block is
4791 dependence free if cond is not dependent upon writing x,
4792 and the source y is unaffected by the destination x. */
4793 if (!gfc_check_dependency (cblock
->next
->expr1
,
4795 && !gfc_check_dependency (cblock
->next
->expr1
,
4796 cblock
->next
->expr2
, 0))
4797 return gfc_trans_where_3 (cblock
, NULL
);
4799 else if (!eblock
->expr1
4802 && eblock
->next
->op
== EXEC_ASSIGN
4803 && !eblock
->next
->next
)
4805 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4806 block is dependence free if cond is not dependent on writes
4807 to x1 and x2, y1 is not dependent on writes to x2, and y2
4808 is not dependent on writes to x1, and both y's are not
4809 dependent upon their own x's. In addition to this, the
4810 final two dependency checks below exclude all but the same
4811 array reference if the where and elswhere destinations
4812 are the same. In short, this is VERY conservative and this
4813 is needed because the two loops, required by the standard
4814 are coalesced in gfc_trans_where_3. */
4815 if (!gfc_check_dependency (cblock
->next
->expr1
,
4817 && !gfc_check_dependency (eblock
->next
->expr1
,
4819 && !gfc_check_dependency (cblock
->next
->expr1
,
4820 eblock
->next
->expr2
, 1)
4821 && !gfc_check_dependency (eblock
->next
->expr1
,
4822 cblock
->next
->expr2
, 1)
4823 && !gfc_check_dependency (cblock
->next
->expr1
,
4824 cblock
->next
->expr2
, 1)
4825 && !gfc_check_dependency (eblock
->next
->expr1
,
4826 eblock
->next
->expr2
, 1)
4827 && !gfc_check_dependency (cblock
->next
->expr1
,
4828 eblock
->next
->expr1
, 0)
4829 && !gfc_check_dependency (eblock
->next
->expr1
,
4830 cblock
->next
->expr1
, 0))
4831 return gfc_trans_where_3 (cblock
, eblock
);
4835 gfc_start_block (&block
);
4837 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4839 return gfc_finish_block (&block
);
4843 /* CYCLE a DO loop. The label decl has already been created by
4844 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4845 node at the head of the loop. We must mark the label as used. */
4848 gfc_trans_cycle (gfc_code
* code
)
4852 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4853 gcc_assert (cycle_label
);
4855 TREE_USED (cycle_label
) = 1;
4856 return build1_v (GOTO_EXPR
, cycle_label
);
4860 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4861 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4865 gfc_trans_exit (gfc_code
* code
)
4869 exit_label
= code
->ext
.which_construct
->exit_label
;
4870 gcc_assert (exit_label
);
4872 TREE_USED (exit_label
) = 1;
4873 return build1_v (GOTO_EXPR
, exit_label
);
4877 /* Translate the ALLOCATE statement. */
4880 gfc_trans_allocate (gfc_code
* code
)
4902 tree memsize
= NULL_TREE
;
4903 tree classexpr
= NULL_TREE
;
4905 if (!code
->ext
.alloc
.list
)
4908 stat
= tmp
= memsz
= NULL_TREE
;
4909 label_errmsg
= label_finish
= errmsg
= errlen
= NULL_TREE
;
4911 gfc_init_block (&block
);
4912 gfc_init_block (&post
);
4914 /* STAT= (and maybe ERRMSG=) is present. */
4918 tree gfc_int4_type_node
= gfc_get_int_type (4);
4919 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4921 /* ERRMSG= only makes sense with STAT=. */
4924 gfc_init_se (&se
, NULL
);
4925 se
.want_pointer
= 1;
4926 gfc_conv_expr_lhs (&se
, code
->expr2
);
4928 errlen
= se
.string_length
;
4932 errmsg
= null_pointer_node
;
4933 errlen
= build_int_cst (gfc_charlen_type_node
, 0);
4936 /* GOTO destinations. */
4937 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
4938 label_finish
= gfc_build_label_decl (NULL_TREE
);
4939 TREE_USED (label_finish
) = 0;
4945 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4947 expr
= gfc_copy_expr (al
->expr
);
4949 if (expr
->ts
.type
== BT_CLASS
)
4950 gfc_add_data_component (expr
);
4952 gfc_init_se (&se
, NULL
);
4954 se
.want_pointer
= 1;
4955 se
.descriptor_only
= 1;
4956 gfc_conv_expr (&se
, expr
);
4958 /* Evaluate expr3 just once if not a variable. */
4959 if (al
== code
->ext
.alloc
.list
4960 && al
->expr
->ts
.type
== BT_CLASS
4962 && code
->expr3
->ts
.type
== BT_CLASS
4963 && code
->expr3
->expr_type
!= EXPR_VARIABLE
)
4965 gfc_init_se (&se_sz
, NULL
);
4966 gfc_conv_expr_reference (&se_sz
, code
->expr3
);
4967 gfc_conv_class_to_class (&se_sz
, code
->expr3
,
4968 code
->expr3
->ts
, false, true, false, false);
4969 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4970 gfc_add_block_to_block (&se
.post
, &se_sz
.post
);
4971 classexpr
= build_fold_indirect_ref_loc (input_location
,
4973 classexpr
= gfc_evaluate_now (classexpr
, &se
.pre
);
4974 memsize
= gfc_vtable_size_get (classexpr
);
4975 memsize
= fold_convert (sizetype
, memsize
);
4979 class_expr
= classexpr
;
4982 if (!gfc_array_allocate (&se
, expr
, stat
, errmsg
, errlen
, label_finish
,
4983 memsz
, &nelems
, code
->expr3
, &code
->ext
.alloc
.ts
))
4985 bool unlimited_char
;
4987 unlimited_char
= UNLIMITED_POLY (al
->expr
)
4988 && ((code
->expr3
&& code
->expr3
->ts
.type
== BT_CHARACTER
)
4989 || (code
->ext
.alloc
.ts
.type
== BT_CHARACTER
4990 && code
->ext
.alloc
.ts
.u
.cl
4991 && code
->ext
.alloc
.ts
.u
.cl
->length
));
4993 /* A scalar or derived type. */
4995 /* Determine allocate size. */
4996 if (al
->expr
->ts
.type
== BT_CLASS
4999 && memsz
== NULL_TREE
)
5001 if (code
->expr3
->ts
.type
== BT_CLASS
)
5003 sz
= gfc_copy_expr (code
->expr3
);
5004 gfc_add_vptr_component (sz
);
5005 gfc_add_size_component (sz
);
5006 gfc_init_se (&se_sz
, NULL
);
5007 gfc_conv_expr (&se_sz
, sz
);
5012 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
5014 else if (((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5015 || unlimited_char
) && code
->expr3
)
5017 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
5019 /* Convert and use the length expression. */
5020 gfc_init_se (&se_sz
, NULL
);
5021 if (code
->expr3
->expr_type
== EXPR_VARIABLE
5022 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
5024 gfc_conv_expr (&se_sz
, code
->expr3
);
5025 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5027 = gfc_evaluate_now (se_sz
.string_length
, &se
.pre
);
5028 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5029 memsz
= se_sz
.string_length
;
5031 else if (code
->expr3
->mold
5032 && code
->expr3
->ts
.u
.cl
5033 && code
->expr3
->ts
.u
.cl
->length
)
5035 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
5036 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5037 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5038 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5043 /* This is would be inefficient and possibly could
5044 generate wrong code if the result were not stored
5046 if (slen3
== NULL_TREE
)
5048 gfc_conv_expr (&se_sz
, code
->expr3
);
5049 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5050 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5051 gfc_add_block_to_block (&post
, &se_sz
.post
);
5052 slen3
= gfc_evaluate_now (se_sz
.string_length
,
5059 /* Otherwise use the stored string length. */
5060 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
5061 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5063 /* Store the string length. */
5064 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
5065 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5067 else if (al
->expr
->ts
.type
== BT_CHARACTER
5068 && al
->expr
->ts
.deferred
&& se
.string_length
)
5069 gfc_add_modify (&se
.pre
, se
.string_length
,
5070 fold_convert (TREE_TYPE (se
.string_length
),
5073 /* Convert to size in bytes, using the character KIND. */
5075 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->expr3
->ts
));
5077 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
5078 tmp
= TYPE_SIZE_UNIT (tmp
);
5079 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5080 TREE_TYPE (tmp
), tmp
,
5081 fold_convert (TREE_TYPE (tmp
), memsz
));
5083 else if ((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5086 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
5087 gfc_init_se (&se_sz
, NULL
);
5088 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
5089 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5090 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5091 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5092 /* Store the string length. */
5093 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5094 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5096 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5097 tmp
= TYPE_SIZE_UNIT (tmp
);
5098 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5099 TREE_TYPE (tmp
), tmp
,
5100 fold_convert (TREE_TYPE (se_sz
.expr
),
5103 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
5104 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5105 else if (memsz
== NULL_TREE
)
5106 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
5108 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
5110 memsz
= se
.string_length
;
5112 /* Convert to size in bytes, using the character KIND. */
5113 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5114 tmp
= TYPE_SIZE_UNIT (tmp
);
5115 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5116 TREE_TYPE (tmp
), tmp
,
5117 fold_convert (TREE_TYPE (tmp
), memsz
));
5120 /* Allocate - for non-pointers with re-alloc checking. */
5121 if (gfc_expr_attr (expr
).allocatable
)
5122 gfc_allocate_allocatable (&se
.pre
, se
.expr
, memsz
, NULL_TREE
,
5123 stat
, errmsg
, errlen
, label_finish
, expr
);
5125 gfc_allocate_using_malloc (&se
.pre
, se
.expr
, memsz
, stat
);
5127 if (al
->expr
->ts
.type
== BT_DERIVED
5128 && expr
->ts
.u
.derived
->attr
.alloc_comp
)
5130 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5131 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
5132 gfc_add_expr_to_block (&se
.pre
, tmp
);
5136 gfc_add_block_to_block (&block
, &se
.pre
);
5138 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5141 tmp
= build1_v (GOTO_EXPR
, label_errmsg
);
5142 parm
= fold_build2_loc (input_location
, NE_EXPR
,
5143 boolean_type_node
, stat
,
5144 build_int_cst (TREE_TYPE (stat
), 0));
5145 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5146 gfc_unlikely (parm
, PRED_FORTRAN_FAIL_ALLOC
),
5147 tmp
, build_empty_stmt (input_location
));
5148 gfc_add_expr_to_block (&block
, tmp
);
5151 /* We need the vptr of CLASS objects to be initialized. */
5152 e
= gfc_copy_expr (al
->expr
);
5153 if (e
->ts
.type
== BT_CLASS
)
5155 gfc_expr
*lhs
, *rhs
;
5157 gfc_ref
*ref
, *class_ref
, *tail
;
5159 /* Find the last class reference. */
5161 for (ref
= e
->ref
; ref
; ref
= ref
->next
)
5163 if (ref
->type
== REF_COMPONENT
5164 && ref
->u
.c
.component
->ts
.type
== BT_CLASS
)
5167 if (ref
->next
== NULL
)
5171 /* Remove and store all subsequent references after the
5175 tail
= class_ref
->next
;
5176 class_ref
->next
= NULL
;
5184 lhs
= gfc_expr_to_initialize (e
);
5185 gfc_add_vptr_component (lhs
);
5187 /* Remove the _vptr component and restore the original tail
5191 gfc_free_ref_list (class_ref
->next
);
5192 class_ref
->next
= tail
;
5196 gfc_free_ref_list (e
->ref
);
5200 if (class_expr
!= NULL_TREE
)
5202 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5203 gfc_init_se (&lse
, NULL
);
5204 lse
.want_pointer
= 1;
5205 gfc_conv_expr (&lse
, lhs
);
5206 tmp
= gfc_class_vptr_get (class_expr
);
5207 gfc_add_modify (&block
, lse
.expr
,
5208 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5210 else if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
5212 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5213 rhs
= gfc_copy_expr (code
->expr3
);
5214 gfc_add_vptr_component (rhs
);
5215 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5216 gfc_add_expr_to_block (&block
, tmp
);
5217 gfc_free_expr (rhs
);
5218 rhs
= gfc_expr_to_initialize (e
);
5222 /* VPTR is fixed at compile time. */
5226 ts
= &code
->expr3
->ts
;
5227 else if (e
->ts
.type
== BT_DERIVED
)
5229 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
|| UNLIMITED_POLY (al
->expr
))
5230 ts
= &code
->ext
.alloc
.ts
;
5231 else if (e
->ts
.type
== BT_CLASS
)
5232 ts
= &CLASS_DATA (e
)->ts
;
5236 if (ts
->type
== BT_DERIVED
|| UNLIMITED_POLY (e
))
5238 vtab
= gfc_find_vtab (ts
);
5240 gfc_init_se (&lse
, NULL
);
5241 lse
.want_pointer
= 1;
5242 gfc_conv_expr (&lse
, lhs
);
5243 tmp
= gfc_build_addr_expr (NULL_TREE
,
5244 gfc_get_symbol_decl (vtab
));
5245 gfc_add_modify (&block
, lse
.expr
,
5246 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5249 gfc_free_expr (lhs
);
5254 if (code
->expr3
&& !code
->expr3
->mold
)
5256 /* Initialization via SOURCE block
5257 (or static default initializer). */
5258 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
5259 if (class_expr
!= NULL_TREE
)
5262 to
= TREE_OPERAND (se
.expr
, 0);
5264 tmp
= gfc_copy_class_to_class (class_expr
, to
, nelems
);
5266 else if (al
->expr
->ts
.type
== BT_CLASS
)
5268 gfc_actual_arglist
*actual
;
5271 gfc_ref
*ref
, *dataref
;
5273 /* Do a polymorphic deep copy. */
5274 actual
= gfc_get_actual_arglist ();
5275 actual
->expr
= gfc_copy_expr (rhs
);
5276 if (rhs
->ts
.type
== BT_CLASS
)
5277 gfc_add_data_component (actual
->expr
);
5278 actual
->next
= gfc_get_actual_arglist ();
5279 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
5280 actual
->next
->expr
->ts
.type
= BT_CLASS
;
5281 gfc_add_data_component (actual
->next
->expr
);
5284 /* Make sure we go up through the reference chain to
5285 the _data reference, where the arrayspec is found. */
5286 for (ref
= actual
->next
->expr
->ref
; ref
; ref
= ref
->next
)
5287 if (ref
->type
== REF_COMPONENT
5288 && strcmp (ref
->u
.c
.component
->name
, "_data") == 0)
5291 if (dataref
&& dataref
->u
.c
.component
->as
)
5295 gfc_ref
*ref
= dataref
->next
;
5296 ref
->u
.ar
.type
= AR_SECTION
;
5297 /* We have to set up the array reference to give ranges
5298 in all dimensions and ensure that the end and stride
5299 are set so that the copy can be scalarized. */
5301 for (; dim
< dataref
->u
.c
.component
->as
->rank
; dim
++)
5303 ref
->u
.ar
.dimen_type
[dim
] = DIMEN_RANGE
;
5304 if (ref
->u
.ar
.end
[dim
] == NULL
)
5306 ref
->u
.ar
.end
[dim
] = ref
->u
.ar
.start
[dim
];
5307 temp
= gfc_get_int_expr (gfc_default_integer_kind
,
5308 &al
->expr
->where
, 1);
5309 ref
->u
.ar
.start
[dim
] = temp
;
5311 temp
= gfc_subtract (gfc_copy_expr (ref
->u
.ar
.end
[dim
]),
5312 gfc_copy_expr (ref
->u
.ar
.start
[dim
]));
5313 temp
= gfc_add (gfc_get_int_expr (gfc_default_integer_kind
,
5314 &al
->expr
->where
, 1),
5318 if (rhs
->ts
.type
== BT_CLASS
)
5320 ppc
= gfc_copy_expr (rhs
);
5321 gfc_add_vptr_component (ppc
);
5324 ppc
= gfc_lval_expr_from_sym (gfc_find_vtab (&rhs
->ts
));
5325 gfc_add_component_ref (ppc
, "_copy");
5327 ppc_code
= gfc_get_code (EXEC_CALL
);
5328 ppc_code
->resolved_sym
= ppc
->symtree
->n
.sym
;
5329 /* Although '_copy' is set to be elemental in class.c, it is
5330 not staying that way. Find out why, sometime.... */
5331 ppc_code
->resolved_sym
->attr
.elemental
= 1;
5332 ppc_code
->ext
.actual
= actual
;
5333 ppc_code
->expr1
= ppc
;
5334 /* Since '_copy' is elemental, the scalarizer will take care
5335 of arrays in gfc_trans_call. */
5336 tmp
= gfc_trans_call (ppc_code
, true, NULL
, NULL
, false);
5337 gfc_free_statements (ppc_code
);
5339 else if (expr3
!= NULL_TREE
)
5341 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5342 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
5343 slen3
, expr3
, code
->expr3
->ts
.kind
);
5348 /* Switch off automatic reallocation since we have just done
5350 int realloc_lhs
= flag_realloc_lhs
;
5351 flag_realloc_lhs
= 0;
5352 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
5354 flag_realloc_lhs
= realloc_lhs
;
5356 gfc_free_expr (rhs
);
5357 gfc_add_expr_to_block (&block
, tmp
);
5359 else if (code
->expr3
&& code
->expr3
->mold
5360 && code
->expr3
->ts
.type
== BT_CLASS
)
5362 /* Since the _vptr has already been assigned to the allocate
5363 object, we can use gfc_copy_class_to_class in its
5364 initialization mode. */
5365 tmp
= TREE_OPERAND (se
.expr
, 0);
5366 tmp
= gfc_copy_class_to_class (NULL_TREE
, tmp
, nelems
);
5367 gfc_add_expr_to_block (&block
, tmp
);
5370 gfc_free_expr (expr
);
5376 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5377 gfc_add_expr_to_block (&block
, tmp
);
5380 /* ERRMSG - only useful if STAT is present. */
5381 if (code
->expr1
&& code
->expr2
)
5383 const char *msg
= "Attempt to allocate an allocated object";
5384 tree slen
, dlen
, errmsg_str
;
5385 stmtblock_t errmsg_block
;
5387 gfc_init_block (&errmsg_block
);
5389 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5390 gfc_add_modify (&errmsg_block
, errmsg_str
,
5391 gfc_build_addr_expr (pchar_type_node
,
5392 gfc_build_localized_cstring_const (msg
)));
5394 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5395 dlen
= gfc_get_expr_charlen (code
->expr2
);
5396 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5399 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5400 slen
, errmsg_str
, gfc_default_character_kind
);
5401 dlen
= gfc_finish_block (&errmsg_block
);
5403 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5404 build_int_cst (TREE_TYPE (stat
), 0));
5406 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5408 gfc_add_expr_to_block (&block
, tmp
);
5414 if (TREE_USED (label_finish
))
5416 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5417 gfc_add_expr_to_block (&block
, tmp
);
5420 gfc_init_se (&se
, NULL
);
5421 gfc_conv_expr_lhs (&se
, code
->expr1
);
5422 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5423 gfc_add_modify (&block
, se
.expr
, tmp
);
5426 gfc_add_block_to_block (&block
, &se
.post
);
5427 gfc_add_block_to_block (&block
, &post
);
5429 return gfc_finish_block (&block
);
5433 /* Translate a DEALLOCATE statement. */
5436 gfc_trans_deallocate (gfc_code
*code
)
5440 tree apstat
, pstat
, stat
, errmsg
, errlen
, tmp
;
5441 tree label_finish
, label_errmsg
;
5444 pstat
= apstat
= stat
= errmsg
= errlen
= tmp
= NULL_TREE
;
5445 label_finish
= label_errmsg
= NULL_TREE
;
5447 gfc_start_block (&block
);
5449 /* Count the number of failed deallocations. If deallocate() was
5450 called with STAT= , then set STAT to the count. If deallocate
5451 was called with ERRMSG, then set ERRMG to a string. */
5454 tree gfc_int4_type_node
= gfc_get_int_type (4);
5456 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
5457 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5459 /* GOTO destinations. */
5460 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
5461 label_finish
= gfc_build_label_decl (NULL_TREE
);
5462 TREE_USED (label_finish
) = 0;
5465 /* Set ERRMSG - only needed if STAT is available. */
5466 if (code
->expr1
&& code
->expr2
)
5468 gfc_init_se (&se
, NULL
);
5469 se
.want_pointer
= 1;
5470 gfc_conv_expr_lhs (&se
, code
->expr2
);
5472 errlen
= se
.string_length
;
5475 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5477 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5478 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5480 if (expr
->ts
.type
== BT_CLASS
)
5481 gfc_add_data_component (expr
);
5483 gfc_init_se (&se
, NULL
);
5484 gfc_start_block (&se
.pre
);
5486 se
.want_pointer
= 1;
5487 se
.descriptor_only
= 1;
5488 gfc_conv_expr (&se
, expr
);
5490 if (expr
->rank
|| gfc_is_coarray (expr
))
5492 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
5493 && !gfc_is_finalizable (expr
->ts
.u
.derived
, NULL
))
5496 gfc_ref
*last
= NULL
;
5497 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5498 if (ref
->type
== REF_COMPONENT
)
5501 /* Do not deallocate the components of a derived type
5502 ultimate pointer component. */
5503 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5504 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5506 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5508 gfc_add_expr_to_block (&se
.pre
, tmp
);
5511 tmp
= gfc_array_deallocate (se
.expr
, pstat
, errmsg
, errlen
,
5512 label_finish
, expr
);
5513 gfc_add_expr_to_block (&se
.pre
, tmp
);
5514 if (al
->expr
->ts
.type
== BT_CLASS
)
5515 gfc_reset_vptr (&se
.pre
, al
->expr
);
5519 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5520 al
->expr
, al
->expr
->ts
);
5521 gfc_add_expr_to_block (&se
.pre
, tmp
);
5523 /* Set to zero after deallocation. */
5524 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5526 build_int_cst (TREE_TYPE (se
.expr
), 0));
5527 gfc_add_expr_to_block (&se
.pre
, tmp
);
5529 if (al
->expr
->ts
.type
== BT_CLASS
)
5530 gfc_reset_vptr (&se
.pre
, al
->expr
);
5537 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5538 build_int_cst (TREE_TYPE (stat
), 0));
5539 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5540 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
),
5541 build1_v (GOTO_EXPR
, label_errmsg
),
5542 build_empty_stmt (input_location
));
5543 gfc_add_expr_to_block (&se
.pre
, tmp
);
5546 tmp
= gfc_finish_block (&se
.pre
);
5547 gfc_add_expr_to_block (&block
, tmp
);
5548 gfc_free_expr (expr
);
5553 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5554 gfc_add_expr_to_block (&block
, tmp
);
5557 /* Set ERRMSG - only needed if STAT is available. */
5558 if (code
->expr1
&& code
->expr2
)
5560 const char *msg
= "Attempt to deallocate an unallocated object";
5561 stmtblock_t errmsg_block
;
5562 tree errmsg_str
, slen
, dlen
, cond
;
5564 gfc_init_block (&errmsg_block
);
5566 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5567 gfc_add_modify (&errmsg_block
, errmsg_str
,
5568 gfc_build_addr_expr (pchar_type_node
,
5569 gfc_build_localized_cstring_const (msg
)));
5570 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5571 dlen
= gfc_get_expr_charlen (code
->expr2
);
5573 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5574 slen
, errmsg_str
, gfc_default_character_kind
);
5575 tmp
= gfc_finish_block (&errmsg_block
);
5577 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5578 build_int_cst (TREE_TYPE (stat
), 0));
5579 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5580 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
), tmp
,
5581 build_empty_stmt (input_location
));
5583 gfc_add_expr_to_block (&block
, tmp
);
5586 if (code
->expr1
&& TREE_USED (label_finish
))
5588 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5589 gfc_add_expr_to_block (&block
, tmp
);
5595 gfc_init_se (&se
, NULL
);
5596 gfc_conv_expr_lhs (&se
, code
->expr1
);
5597 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5598 gfc_add_modify (&block
, se
.expr
, tmp
);
5601 return gfc_finish_block (&block
);
5604 #include "gt-fortran-trans-stmt.h"