1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2015 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "double-int.h"
37 #include "fold-const.h"
38 #include "stringpool.h"
42 #include "trans-stmt.h"
43 #include "trans-types.h"
44 #include "trans-array.h"
45 #include "trans-const.h"
47 #include "dependency.h"
50 typedef struct iter_info
56 struct iter_info
*next
;
60 typedef struct forall_info
67 struct forall_info
*prev_nest
;
72 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
73 forall_info
*, stmtblock_t
*);
75 /* Translate a F95 label number to a LABEL_EXPR. */
78 gfc_trans_label_here (gfc_code
* code
)
80 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
84 /* Given a variable expression which has been ASSIGNed to, find the decl
85 containing the auxiliary variables. For variables in common blocks this
89 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
91 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
92 gfc_conv_expr (se
, expr
);
93 /* Deals with variable in common block. Get the field declaration. */
94 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
95 se
->expr
= TREE_OPERAND (se
->expr
, 1);
96 /* Deals with dummy argument. Get the parameter declaration. */
97 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
98 se
->expr
= TREE_OPERAND (se
->expr
, 0);
101 /* Translate a label assignment statement. */
104 gfc_trans_label_assign (gfc_code
* code
)
113 /* Start a new block. */
114 gfc_init_se (&se
, NULL
);
115 gfc_start_block (&se
.pre
);
116 gfc_conv_label_variable (&se
, code
->expr1
);
118 len
= GFC_DECL_STRING_LEN (se
.expr
);
119 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
121 label_tree
= gfc_get_label_decl (code
->label1
);
123 if (code
->label1
->defined
== ST_LABEL_TARGET
124 || code
->label1
->defined
== ST_LABEL_DO_TARGET
)
126 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
127 len_tree
= integer_minus_one_node
;
131 gfc_expr
*format
= code
->label1
->format
;
133 label_len
= format
->value
.character
.length
;
134 len_tree
= build_int_cst (gfc_charlen_type_node
, label_len
);
135 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
136 format
->value
.character
.string
);
137 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
140 gfc_add_modify (&se
.pre
, len
, len_tree
);
141 gfc_add_modify (&se
.pre
, addr
, label_tree
);
143 return gfc_finish_block (&se
.pre
);
146 /* Translate a GOTO statement. */
149 gfc_trans_goto (gfc_code
* code
)
151 locus loc
= code
->loc
;
157 if (code
->label1
!= NULL
)
158 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
161 gfc_init_se (&se
, NULL
);
162 gfc_start_block (&se
.pre
);
163 gfc_conv_label_variable (&se
, code
->expr1
);
164 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
165 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
166 build_int_cst (TREE_TYPE (tmp
), -1));
167 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
168 "Assigned label is not a target label");
170 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
172 /* We're going to ignore a label list. It does not really change the
173 statement's semantics (because it is just a further restriction on
174 what's legal code); before, we were comparing label addresses here, but
175 that's a very fragile business and may break with optimization. So
178 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
180 gfc_add_expr_to_block (&se
.pre
, target
);
181 return gfc_finish_block (&se
.pre
);
185 /* Translate an ENTRY statement. Just adds a label for this entry point. */
187 gfc_trans_entry (gfc_code
* code
)
189 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
193 /* Replace a gfc_ss structure by another both in the gfc_se struct
194 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
195 to replace a variable ss by the corresponding temporary. */
198 replace_ss (gfc_se
*se
, gfc_ss
*old_ss
, gfc_ss
*new_ss
)
200 gfc_ss
**sess
, **loopss
;
202 /* The old_ss is a ss for a single variable. */
203 gcc_assert (old_ss
->info
->type
== GFC_SS_SECTION
);
205 for (sess
= &(se
->ss
); *sess
!= gfc_ss_terminator
; sess
= &((*sess
)->next
))
208 gcc_assert (*sess
!= gfc_ss_terminator
);
211 new_ss
->next
= old_ss
->next
;
214 for (loopss
= &(se
->loop
->ss
); *loopss
!= gfc_ss_terminator
;
215 loopss
= &((*loopss
)->loop_chain
))
216 if (*loopss
== old_ss
)
218 gcc_assert (*loopss
!= gfc_ss_terminator
);
221 new_ss
->loop_chain
= old_ss
->loop_chain
;
222 new_ss
->loop
= old_ss
->loop
;
224 gfc_free_ss (old_ss
);
228 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
229 elemental subroutines. Make temporaries for output arguments if any such
230 dependencies are found. Output arguments are chosen because internal_unpack
231 can be used, as is, to copy the result back to the variable. */
233 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
234 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
235 gfc_dep_check check_variable
)
237 gfc_actual_arglist
*arg0
;
239 gfc_formal_arglist
*formal
;
247 if (loopse
->ss
== NULL
)
252 formal
= gfc_sym_get_dummy_args (sym
);
254 /* Loop over all the arguments testing for dependencies. */
255 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
261 /* Obtain the info structure for the current argument. */
262 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
263 if (ss
->info
->expr
== e
)
266 /* If there is a dependency, create a temporary and use it
267 instead of the variable. */
268 fsym
= formal
? formal
->sym
: NULL
;
269 if (e
->expr_type
== EXPR_VARIABLE
271 && fsym
->attr
.intent
!= INTENT_IN
272 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
273 sym
, arg0
, check_variable
))
275 tree initial
, temptype
;
276 stmtblock_t temp_post
;
279 tmp_ss
= gfc_get_array_ss (gfc_ss_terminator
, NULL
, ss
->dimen
,
281 gfc_mark_ss_chain_used (tmp_ss
, 1);
282 tmp_ss
->info
->expr
= ss
->info
->expr
;
283 replace_ss (loopse
, ss
, tmp_ss
);
285 /* Obtain the argument descriptor for unpacking. */
286 gfc_init_se (&parmse
, NULL
);
287 parmse
.want_pointer
= 1;
288 gfc_conv_expr_descriptor (&parmse
, e
);
289 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
291 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
292 initialize the array temporary with a copy of the values. */
293 if (fsym
->attr
.intent
== INTENT_INOUT
294 || (fsym
->ts
.type
==BT_DERIVED
295 && fsym
->attr
.intent
== INTENT_OUT
))
296 initial
= parmse
.expr
;
297 /* For class expressions, we always initialize with the copy of
299 else if (e
->ts
.type
== BT_CLASS
)
300 initial
= parmse
.expr
;
304 if (e
->ts
.type
!= BT_CLASS
)
306 /* Find the type of the temporary to create; we don't use the type
307 of e itself as this breaks for subcomponent-references in e
308 (where the type of e is that of the final reference, but
309 parmse.expr's type corresponds to the full derived-type). */
310 /* TODO: Fix this somehow so we don't need a temporary of the whole
311 array but instead only the components referenced. */
312 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
313 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
314 temptype
= TREE_TYPE (temptype
);
315 temptype
= gfc_get_element_type (temptype
);
319 /* For class arrays signal that the size of the dynamic type has to
320 be obtained from the vtable, using the 'initial' expression. */
321 temptype
= NULL_TREE
;
323 /* Generate the temporary. Cleaning up the temporary should be the
324 very last thing done, so we add the code to a new block and add it
325 to se->post as last instructions. */
326 size
= gfc_create_var (gfc_array_index_type
, NULL
);
327 data
= gfc_create_var (pvoid_type_node
, NULL
);
328 gfc_init_block (&temp_post
);
329 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
, tmp_ss
,
330 temptype
, initial
, false, true,
331 false, &arg
->expr
->where
);
332 gfc_add_modify (&se
->pre
, size
, tmp
);
333 tmp
= fold_convert (pvoid_type_node
, tmp_ss
->info
->data
.array
.data
);
334 gfc_add_modify (&se
->pre
, data
, tmp
);
336 /* Update other ss' delta. */
337 gfc_set_delta (loopse
->loop
);
339 /* Copy the result back using unpack..... */
340 if (e
->ts
.type
!= BT_CLASS
)
341 tmp
= build_call_expr_loc (input_location
,
342 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
345 /* ... except for class results where the copy is
347 tmp
= build_fold_indirect_ref_loc (input_location
, parmse
.expr
);
348 tmp
= gfc_conv_descriptor_data_get (tmp
);
349 tmp
= build_call_expr_loc (input_location
,
350 builtin_decl_explicit (BUILT_IN_MEMCPY
),
352 fold_convert (size_type_node
, size
));
354 gfc_add_expr_to_block (&se
->post
, tmp
);
356 /* parmse.pre is already added above. */
357 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
358 gfc_add_block_to_block (&se
->post
, &temp_post
);
364 /* Get the interface symbol for the procedure corresponding to the given call.
365 We can't get the procedure symbol directly as we have to handle the case
366 of (deferred) type-bound procedures. */
369 get_proc_ifc_for_call (gfc_code
*c
)
373 gcc_assert (c
->op
== EXEC_ASSIGN_CALL
|| c
->op
== EXEC_CALL
);
375 sym
= gfc_get_proc_ifc_for_expr (c
->expr1
);
377 /* Fall back/last resort try. */
379 sym
= c
->resolved_sym
;
385 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
388 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
389 tree mask
, tree count1
, bool invert
)
393 int has_alternate_specifier
;
394 gfc_dep_check check_variable
;
395 tree index
= NULL_TREE
;
396 tree maskexpr
= NULL_TREE
;
399 /* A CALL starts a new block because the actual arguments may have to
400 be evaluated first. */
401 gfc_init_se (&se
, NULL
);
402 gfc_start_block (&se
.pre
);
404 gcc_assert (code
->resolved_sym
);
406 ss
= gfc_ss_terminator
;
407 if (code
->resolved_sym
->attr
.elemental
)
408 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
,
409 get_proc_ifc_for_call (code
),
412 /* Is not an elemental subroutine call with array valued arguments. */
413 if (ss
== gfc_ss_terminator
)
416 /* Translate the call. */
417 has_alternate_specifier
418 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
421 /* A subroutine without side-effect, by definition, does nothing! */
422 TREE_SIDE_EFFECTS (se
.expr
) = 1;
424 /* Chain the pieces together and return the block. */
425 if (has_alternate_specifier
)
427 gfc_code
*select_code
;
429 select_code
= code
->next
;
430 gcc_assert(select_code
->op
== EXEC_SELECT
);
431 sym
= select_code
->expr1
->symtree
->n
.sym
;
432 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
433 if (sym
->backend_decl
== NULL
)
434 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
435 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
438 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
440 gfc_add_block_to_block (&se
.pre
, &se
.post
);
445 /* An elemental subroutine call with array valued arguments has
453 /* gfc_walk_elemental_function_args renders the ss chain in the
454 reverse order to the actual argument order. */
455 ss
= gfc_reverse_ss (ss
);
457 /* Initialize the loop. */
458 gfc_init_se (&loopse
, NULL
);
459 gfc_init_loopinfo (&loop
);
460 gfc_add_ss_to_loop (&loop
, ss
);
462 gfc_conv_ss_startstride (&loop
);
463 /* TODO: gfc_conv_loop_setup generates a temporary for vector
464 subscripts. This could be prevented in the elemental case
465 as temporaries are handled separatedly
466 (below in gfc_conv_elemental_dependencies). */
467 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
468 gfc_mark_ss_chain_used (ss
, 1);
470 /* Convert the arguments, checking for dependencies. */
471 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
474 /* For operator assignment, do dependency checking. */
475 if (dependency_check
)
476 check_variable
= ELEM_CHECK_VARIABLE
;
478 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
480 gfc_init_se (&depse
, NULL
);
481 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
482 code
->ext
.actual
, check_variable
);
484 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
485 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
487 /* Generate the loop body. */
488 gfc_start_scalarized_body (&loop
, &body
);
489 gfc_init_block (&block
);
493 /* Form the mask expression according to the mask. */
495 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
497 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
498 TREE_TYPE (maskexpr
), maskexpr
);
501 /* Add the subroutine call to the block. */
502 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
503 code
->ext
.actual
, code
->expr1
,
508 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
509 build_empty_stmt (input_location
));
510 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
511 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
512 gfc_array_index_type
,
513 count1
, gfc_index_one_node
);
514 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
517 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
519 gfc_add_block_to_block (&block
, &loopse
.pre
);
520 gfc_add_block_to_block (&block
, &loopse
.post
);
522 /* Finish up the loop block and the loop. */
523 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
524 gfc_trans_scalarizing_loops (&loop
, &body
);
525 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
526 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
527 gfc_add_block_to_block (&se
.pre
, &se
.post
);
528 gfc_cleanup_loop (&loop
);
531 return gfc_finish_block (&se
.pre
);
535 /* Translate the RETURN statement. */
538 gfc_trans_return (gfc_code
* code
)
546 /* If code->expr is not NULL, this return statement must appear
547 in a subroutine and current_fake_result_decl has already
550 result
= gfc_get_fake_result_decl (NULL
, 0);
553 gfc_warning ("An alternate return at %L without a * dummy argument",
554 &code
->expr1
->where
);
555 return gfc_generate_return ();
558 /* Start a new block for this statement. */
559 gfc_init_se (&se
, NULL
);
560 gfc_start_block (&se
.pre
);
562 gfc_conv_expr (&se
, code
->expr1
);
564 /* Note that the actually returned expression is a simple value and
565 does not depend on any pointers or such; thus we can clean-up with
566 se.post before returning. */
567 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
568 result
, fold_convert (TREE_TYPE (result
),
570 gfc_add_expr_to_block (&se
.pre
, tmp
);
571 gfc_add_block_to_block (&se
.pre
, &se
.post
);
573 tmp
= gfc_generate_return ();
574 gfc_add_expr_to_block (&se
.pre
, tmp
);
575 return gfc_finish_block (&se
.pre
);
578 return gfc_generate_return ();
582 /* Translate the PAUSE statement. We have to translate this statement
583 to a runtime library call. */
586 gfc_trans_pause (gfc_code
* code
)
588 tree gfc_int4_type_node
= gfc_get_int_type (4);
592 /* Start a new block for this statement. */
593 gfc_init_se (&se
, NULL
);
594 gfc_start_block (&se
.pre
);
597 if (code
->expr1
== NULL
)
599 tmp
= build_int_cst (gfc_int4_type_node
, 0);
600 tmp
= build_call_expr_loc (input_location
,
601 gfor_fndecl_pause_string
, 2,
602 build_int_cst (pchar_type_node
, 0), tmp
);
604 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
606 gfc_conv_expr (&se
, code
->expr1
);
607 tmp
= build_call_expr_loc (input_location
,
608 gfor_fndecl_pause_numeric
, 1,
609 fold_convert (gfc_int4_type_node
, se
.expr
));
613 gfc_conv_expr_reference (&se
, code
->expr1
);
614 tmp
= build_call_expr_loc (input_location
,
615 gfor_fndecl_pause_string
, 2,
616 se
.expr
, se
.string_length
);
619 gfc_add_expr_to_block (&se
.pre
, tmp
);
621 gfc_add_block_to_block (&se
.pre
, &se
.post
);
623 return gfc_finish_block (&se
.pre
);
627 /* Translate the STOP statement. We have to translate this statement
628 to a runtime library call. */
631 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
633 tree gfc_int4_type_node
= gfc_get_int_type (4);
637 /* Start a new block for this statement. */
638 gfc_init_se (&se
, NULL
);
639 gfc_start_block (&se
.pre
);
641 if (flag_coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
643 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
644 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
645 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
646 gfc_add_expr_to_block (&se
.pre
, tmp
);
648 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
649 gfc_add_expr_to_block (&se
.pre
, tmp
);
652 if (code
->expr1
== NULL
)
654 tmp
= build_int_cst (gfc_int4_type_node
, 0);
655 tmp
= build_call_expr_loc (input_location
,
657 ? (flag_coarray
== GFC_FCOARRAY_LIB
658 ? gfor_fndecl_caf_error_stop_str
659 : gfor_fndecl_error_stop_string
)
660 : gfor_fndecl_stop_string
,
661 2, build_int_cst (pchar_type_node
, 0), tmp
);
663 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
665 gfc_conv_expr (&se
, code
->expr1
);
666 tmp
= build_call_expr_loc (input_location
,
668 ? (flag_coarray
== GFC_FCOARRAY_LIB
669 ? gfor_fndecl_caf_error_stop
670 : gfor_fndecl_error_stop_numeric
)
671 : gfor_fndecl_stop_numeric_f08
, 1,
672 fold_convert (gfc_int4_type_node
, se
.expr
));
676 gfc_conv_expr_reference (&se
, code
->expr1
);
677 tmp
= build_call_expr_loc (input_location
,
679 ? (flag_coarray
== GFC_FCOARRAY_LIB
680 ? gfor_fndecl_caf_error_stop_str
681 : gfor_fndecl_error_stop_string
)
682 : gfor_fndecl_stop_string
,
683 2, se
.expr
, se
.string_length
);
686 gfc_add_expr_to_block (&se
.pre
, tmp
);
688 gfc_add_block_to_block (&se
.pre
, &se
.post
);
690 return gfc_finish_block (&se
.pre
);
695 gfc_trans_lock_unlock (gfc_code
*code
, gfc_exec_op type ATTRIBUTE_UNUSED
)
698 tree stat
= NULL_TREE
, lock_acquired
= NULL_TREE
;
700 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
701 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
702 if (!code
->expr2
&& !code
->expr4
&& flag_coarray
!= GFC_FCOARRAY_LIB
)
705 gfc_init_se (&se
, NULL
);
706 gfc_start_block (&se
.pre
);
710 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
711 gfc_init_se (&argse
, NULL
);
712 gfc_conv_expr_val (&argse
, code
->expr2
);
718 gcc_assert (code
->expr4
->expr_type
== EXPR_VARIABLE
);
719 gfc_init_se (&argse
, NULL
);
720 gfc_conv_expr_val (&argse
, code
->expr4
);
721 lock_acquired
= argse
.expr
;
724 if (stat
!= NULL_TREE
)
725 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
727 if (lock_acquired
!= NULL_TREE
)
728 gfc_add_modify (&se
.pre
, lock_acquired
,
729 fold_convert (TREE_TYPE (lock_acquired
),
732 return gfc_finish_block (&se
.pre
);
737 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
741 tree images
= NULL_TREE
, stat
= NULL_TREE
,
742 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
744 /* Short cut: For single images without bound checking or without STAT=,
745 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
746 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
747 && flag_coarray
!= GFC_FCOARRAY_LIB
)
750 gfc_init_se (&se
, NULL
);
751 gfc_start_block (&se
.pre
);
753 if (code
->expr1
&& code
->expr1
->rank
== 0)
755 gfc_init_se (&argse
, NULL
);
756 gfc_conv_expr_val (&argse
, code
->expr1
);
762 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
763 gfc_init_se (&argse
, NULL
);
764 gfc_conv_expr_val (&argse
, code
->expr2
);
768 stat
= null_pointer_node
;
770 if (code
->expr3
&& flag_coarray
== GFC_FCOARRAY_LIB
771 && type
!= EXEC_SYNC_MEMORY
)
773 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
774 gfc_init_se (&argse
, NULL
);
775 gfc_conv_expr (&argse
, code
->expr3
);
776 gfc_conv_string_parameter (&argse
);
777 errmsg
= gfc_build_addr_expr (NULL
, argse
.expr
);
778 errmsglen
= argse
.string_length
;
780 else if (flag_coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
782 errmsg
= null_pointer_node
;
783 errmsglen
= build_int_cst (integer_type_node
, 0);
786 /* Check SYNC IMAGES(imageset) for valid image index.
787 FIXME: Add a check for image-set arrays. */
788 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
789 && code
->expr1
->rank
== 0)
792 if (flag_coarray
!= GFC_FCOARRAY_LIB
)
793 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
794 images
, build_int_cst (TREE_TYPE (images
), 1));
798 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_num_images
,
799 2, integer_zero_node
,
800 build_int_cst (integer_type_node
, -1));
801 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
803 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
805 build_int_cst (TREE_TYPE (images
), 1));
806 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
807 boolean_type_node
, cond
, cond2
);
809 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
810 &code
->expr1
->where
, "Invalid image number "
812 fold_convert (integer_type_node
, images
));
815 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
816 image control statements SYNC IMAGES and SYNC ALL. */
817 if (flag_coarray
== GFC_FCOARRAY_LIB
)
819 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
820 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
821 gfc_add_expr_to_block (&se
.pre
, tmp
);
824 if (flag_coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
826 /* Set STAT to zero. */
828 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
830 else if (type
== EXEC_SYNC_ALL
)
832 /* SYNC ALL => stat == null_pointer_node
833 SYNC ALL(stat=s) => stat has an integer type
835 If "stat" has the wrong integer type, use a temp variable of
836 the right type and later cast the result back into "stat". */
837 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
839 if (TREE_TYPE (stat
) == integer_type_node
)
840 stat
= gfc_build_addr_expr (NULL
, stat
);
842 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
843 3, stat
, errmsg
, errmsglen
);
844 gfc_add_expr_to_block (&se
.pre
, tmp
);
848 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
850 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
851 3, gfc_build_addr_expr (NULL
, tmp_stat
),
853 gfc_add_expr_to_block (&se
.pre
, tmp
);
855 gfc_add_modify (&se
.pre
, stat
,
856 fold_convert (TREE_TYPE (stat
), tmp_stat
));
863 gcc_assert (type
== EXEC_SYNC_IMAGES
);
867 len
= build_int_cst (integer_type_node
, -1);
868 images
= null_pointer_node
;
870 else if (code
->expr1
->rank
== 0)
872 len
= build_int_cst (integer_type_node
, 1);
873 images
= gfc_build_addr_expr (NULL_TREE
, images
);
878 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
879 gfc_fatal_error ("Sorry, only support for integer kind %d "
880 "implemented for image-set at %L",
881 gfc_c_int_kind
, &code
->expr1
->where
);
883 gfc_conv_array_parameter (&se
, code
->expr1
, true, NULL
, NULL
, &len
);
886 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
887 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
888 tmp
= gfc_get_element_type (tmp
);
890 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
891 TREE_TYPE (len
), len
,
892 fold_convert (TREE_TYPE (len
),
893 TYPE_SIZE_UNIT (tmp
)));
894 len
= fold_convert (integer_type_node
, len
);
897 /* SYNC IMAGES(imgs) => stat == null_pointer_node
898 SYNC IMAGES(imgs,stat=s) => stat has an integer type
900 If "stat" has the wrong integer type, use a temp variable of
901 the right type and later cast the result back into "stat". */
902 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
904 if (TREE_TYPE (stat
) == integer_type_node
)
905 stat
= gfc_build_addr_expr (NULL
, stat
);
907 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
908 5, fold_convert (integer_type_node
, len
),
909 images
, stat
, errmsg
, errmsglen
);
910 gfc_add_expr_to_block (&se
.pre
, tmp
);
914 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
916 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
917 5, fold_convert (integer_type_node
, len
),
918 images
, gfc_build_addr_expr (NULL
, tmp_stat
),
920 gfc_add_expr_to_block (&se
.pre
, tmp
);
922 gfc_add_modify (&se
.pre
, stat
,
923 fold_convert (TREE_TYPE (stat
), tmp_stat
));
927 return gfc_finish_block (&se
.pre
);
931 /* Generate GENERIC for the IF construct. This function also deals with
932 the simple IF statement, because the front end translates the IF
933 statement into an IF construct.
965 where COND_S is the simplified version of the predicate. PRE_COND_S
966 are the pre side-effects produced by the translation of the
968 We need to build the chain recursively otherwise we run into
969 problems with folding incomplete statements. */
972 gfc_trans_if_1 (gfc_code
* code
)
979 /* Check for an unconditional ELSE clause. */
981 return gfc_trans_code (code
->next
);
983 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
984 gfc_init_se (&if_se
, NULL
);
985 gfc_start_block (&if_se
.pre
);
987 /* Calculate the IF condition expression. */
988 if (code
->expr1
->where
.lb
)
990 gfc_save_backend_locus (&saved_loc
);
991 gfc_set_backend_locus (&code
->expr1
->where
);
994 gfc_conv_expr_val (&if_se
, code
->expr1
);
996 if (code
->expr1
->where
.lb
)
997 gfc_restore_backend_locus (&saved_loc
);
999 /* Translate the THEN clause. */
1000 stmt
= gfc_trans_code (code
->next
);
1002 /* Translate the ELSE clause. */
1004 elsestmt
= gfc_trans_if_1 (code
->block
);
1006 elsestmt
= build_empty_stmt (input_location
);
1008 /* Build the condition expression and add it to the condition block. */
1009 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
1010 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
1013 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
1015 /* Finish off this statement. */
1016 return gfc_finish_block (&if_se
.pre
);
1020 gfc_trans_if (gfc_code
* code
)
1025 /* Create exit label so it is available for trans'ing the body code. */
1026 exit_label
= gfc_build_label_decl (NULL_TREE
);
1027 code
->exit_label
= exit_label
;
1029 /* Translate the actual code in code->block. */
1030 gfc_init_block (&body
);
1031 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
1033 /* Add exit label. */
1034 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1036 return gfc_finish_block (&body
);
1040 /* Translate an arithmetic IF expression.
1042 IF (cond) label1, label2, label3 translates to
1054 An optimized version can be generated in case of equal labels.
1055 E.g., if label1 is equal to label2, we can translate it to
1064 gfc_trans_arithmetic_if (gfc_code
* code
)
1072 /* Start a new block. */
1073 gfc_init_se (&se
, NULL
);
1074 gfc_start_block (&se
.pre
);
1076 /* Pre-evaluate COND. */
1077 gfc_conv_expr_val (&se
, code
->expr1
);
1078 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
1080 /* Build something to compare with. */
1081 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
1083 if (code
->label1
->value
!= code
->label2
->value
)
1085 /* If (cond < 0) take branch1 else take branch2.
1086 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1087 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1088 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
1090 if (code
->label1
->value
!= code
->label3
->value
)
1091 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
1094 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1097 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1098 tmp
, branch1
, branch2
);
1101 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1103 if (code
->label1
->value
!= code
->label3
->value
1104 && code
->label2
->value
!= code
->label3
->value
)
1106 /* if (cond <= 0) take branch1 else take branch2. */
1107 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
1108 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1110 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1111 tmp
, branch1
, branch2
);
1114 /* Append the COND_EXPR to the evaluation of COND, and return. */
1115 gfc_add_expr_to_block (&se
.pre
, branch1
);
1116 return gfc_finish_block (&se
.pre
);
1120 /* Translate a CRITICAL block. */
1122 gfc_trans_critical (gfc_code
*code
)
1125 tree tmp
, token
= NULL_TREE
;
1127 gfc_start_block (&block
);
1129 if (flag_coarray
== GFC_FCOARRAY_LIB
)
1131 token
= gfc_get_symbol_decl (code
->resolved_sym
);
1132 token
= GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token
));
1133 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_lock
, 7,
1134 token
, integer_zero_node
, integer_one_node
,
1135 null_pointer_node
, null_pointer_node
,
1136 null_pointer_node
, integer_zero_node
);
1137 gfc_add_expr_to_block (&block
, tmp
);
1140 tmp
= gfc_trans_code (code
->block
->next
);
1141 gfc_add_expr_to_block (&block
, tmp
);
1143 if (flag_coarray
== GFC_FCOARRAY_LIB
)
1145 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_unlock
, 6,
1146 token
, integer_zero_node
, integer_one_node
,
1147 null_pointer_node
, null_pointer_node
,
1149 gfc_add_expr_to_block (&block
, tmp
);
1153 return gfc_finish_block (&block
);
1157 /* Do proper initialization for ASSOCIATE names. */
1160 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1171 gcc_assert (sym
->assoc
);
1172 e
= sym
->assoc
->target
;
1174 class_target
= (e
->expr_type
== EXPR_VARIABLE
)
1175 && (gfc_is_class_scalar_expr (e
)
1176 || gfc_is_class_array_ref (e
, NULL
));
1178 unlimited
= UNLIMITED_POLY (e
);
1180 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1181 to array temporary) for arrays with either unknown shape or if associating
1183 if (sym
->attr
.dimension
&& !class_target
1184 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1188 bool cst_array_ctor
;
1190 desc
= sym
->backend_decl
;
1191 cst_array_ctor
= e
->expr_type
== EXPR_ARRAY
1192 && gfc_constant_array_constructor_p (e
->value
.constructor
);
1194 /* If association is to an expression, evaluate it and create temporary.
1195 Otherwise, get descriptor of target for pointer assignment. */
1196 gfc_init_se (&se
, NULL
);
1197 if (sym
->assoc
->variable
|| cst_array_ctor
)
1199 se
.direct_byref
= 1;
1204 gfc_conv_expr_descriptor (&se
, e
);
1206 /* If we didn't already do the pointer assignment, set associate-name
1207 descriptor to the one generated for the temporary. */
1208 if (!sym
->assoc
->variable
&& !cst_array_ctor
)
1212 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1214 /* The generated descriptor has lower bound zero (as array
1215 temporary), shift bounds so we get lower bounds of 1. */
1216 for (dim
= 0; dim
< e
->rank
; ++dim
)
1217 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1218 dim
, gfc_index_one_node
);
1221 /* If this is a subreference array pointer associate name use the
1222 associate variable element size for the value of 'span'. */
1223 if (sym
->attr
.subref_array_pointer
)
1225 gcc_assert (e
->expr_type
== EXPR_VARIABLE
);
1226 tmp
= e
->symtree
->n
.sym
->backend_decl
;
1227 tmp
= gfc_get_element_type (TREE_TYPE (tmp
));
1228 tmp
= fold_convert (gfc_array_index_type
, size_in_bytes (tmp
));
1229 gfc_add_modify (&se
.pre
, GFC_DECL_SPAN(desc
), tmp
);
1232 /* Done, register stuff as init / cleanup code. */
1233 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1234 gfc_finish_block (&se
.post
));
1237 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1238 arrays to be assigned directly. */
1239 else if (class_target
&& sym
->attr
.dimension
1240 && (sym
->ts
.type
== BT_DERIVED
|| unlimited
))
1244 gfc_init_se (&se
, NULL
);
1245 se
.descriptor_only
= 1;
1246 gfc_conv_expr (&se
, e
);
1248 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
.expr
)));
1249 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym
->backend_decl
)));
1251 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
1255 /* Recover the dtype, which has been overwritten by the
1256 assignment from an unlimited polymorphic object. */
1257 tmp
= gfc_conv_descriptor_dtype (sym
->backend_decl
);
1258 gfc_add_modify (&se
.pre
, tmp
,
1259 gfc_get_dtype (TREE_TYPE (sym
->backend_decl
)));
1262 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1263 gfc_finish_block (&se
.post
));
1266 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1267 else if (gfc_is_associate_pointer (sym
))
1271 gcc_assert (!sym
->attr
.dimension
);
1273 gfc_init_se (&se
, NULL
);
1275 /* Class associate-names come this way because they are
1276 unconditionally associate pointers and the symbol is scalar. */
1277 if (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.dimension
)
1279 /* For a class array we need a descriptor for the selector. */
1280 gfc_conv_expr_descriptor (&se
, e
);
1282 /* Obtain a temporary class container for the result. */
1283 gfc_conv_class_to_class (&se
, e
, sym
->ts
, false, true, false, false);
1284 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1286 /* Set the offset. */
1287 desc
= gfc_class_data_get (se
.expr
);
1288 offset
= gfc_index_zero_node
;
1289 for (n
= 0; n
< e
->rank
; n
++)
1291 dim
= gfc_rank_cst
[n
];
1292 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1293 gfc_array_index_type
,
1294 gfc_conv_descriptor_stride_get (desc
, dim
),
1295 gfc_conv_descriptor_lbound_get (desc
, dim
));
1296 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
1297 gfc_array_index_type
,
1300 gfc_conv_descriptor_offset_set (&se
.pre
, desc
, offset
);
1302 else if (sym
->ts
.type
== BT_CLASS
&& e
->ts
.type
== BT_CLASS
1303 && CLASS_DATA (e
)->attr
.dimension
)
1305 /* This is bound to be a class array element. */
1306 gfc_conv_expr_reference (&se
, e
);
1307 /* Get the _vptr component of the class object. */
1308 tmp
= gfc_get_vptr_from_expr (se
.expr
);
1309 /* Obtain a temporary class container for the result. */
1310 gfc_conv_derived_to_class (&se
, e
, sym
->ts
, tmp
, false, false);
1311 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1314 gfc_conv_expr (&se
, e
);
1316 tmp
= TREE_TYPE (sym
->backend_decl
);
1317 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1318 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1320 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1321 gfc_finish_block (&se
.post
));
1324 /* Do a simple assignment. This is for scalar expressions, where we
1325 can simply use expression assignment. */
1330 lhs
= gfc_lval_expr_from_sym (sym
);
1331 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1332 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1335 /* Set the stringlength from the vtable size. */
1336 if (sym
->ts
.type
== BT_CHARACTER
&& sym
->attr
.select_type_temporary
)
1340 gfc_init_se (&se
, NULL
);
1341 gcc_assert (UNLIMITED_POLY (e
->symtree
->n
.sym
));
1342 tmp
= gfc_get_symbol_decl (e
->symtree
->n
.sym
);
1343 tmp
= gfc_vtable_size_get (tmp
);
1344 gfc_get_symbol_decl (sym
);
1345 charlen
= sym
->ts
.u
.cl
->backend_decl
;
1346 gfc_add_modify (&se
.pre
, charlen
,
1347 fold_convert (TREE_TYPE (charlen
), tmp
));
1348 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1349 gfc_finish_block (&se
.post
));
1354 /* Translate a BLOCK construct. This is basically what we would do for a
1358 gfc_trans_block_construct (gfc_code
* code
)
1362 gfc_wrapped_block block
;
1365 gfc_association_list
*ass
;
1367 ns
= code
->ext
.block
.ns
;
1369 sym
= ns
->proc_name
;
1372 /* Process local variables. */
1373 gcc_assert (!sym
->tlink
);
1375 gfc_process_block_locals (ns
);
1377 /* Generate code including exit-label. */
1378 gfc_init_block (&body
);
1379 exit_label
= gfc_build_label_decl (NULL_TREE
);
1380 code
->exit_label
= exit_label
;
1381 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1382 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1384 /* Finish everything. */
1385 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1386 gfc_trans_deferred_vars (sym
, &block
);
1387 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1388 trans_associate_var (ass
->st
->n
.sym
, &block
);
1390 return gfc_finish_wrapped_block (&block
);
1394 /* Translate the simple DO construct. This is where the loop variable has
1395 integer type and step +-1. We can't use this in the general case
1396 because integer overflow and floating point errors could give incorrect
1398 We translate a do loop from:
1400 DO dovar = from, to, step
1406 [Evaluate loop bounds and step]
1408 if ((step > 0) ? (dovar <= to) : (dovar => to))
1414 cond = (dovar == to);
1416 if (cond) goto end_label;
1421 This helps the optimizers by avoiding the extra induction variable
1422 used in the general case. */
1425 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1426 tree from
, tree to
, tree step
, tree exit_cond
)
1432 tree saved_dovar
= NULL
;
1437 type
= TREE_TYPE (dovar
);
1439 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1441 /* Initialize the DO variable: dovar = from. */
1442 gfc_add_modify_loc (loc
, pblock
, dovar
,
1443 fold_convert (TREE_TYPE(dovar
), from
));
1445 /* Save value for do-tinkering checking. */
1446 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1448 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1449 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1452 /* Cycle and exit statements are implemented with gotos. */
1453 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1454 exit_label
= gfc_build_label_decl (NULL_TREE
);
1456 /* Put the labels where they can be found later. See gfc_trans_do(). */
1457 code
->cycle_label
= cycle_label
;
1458 code
->exit_label
= exit_label
;
1461 gfc_start_block (&body
);
1463 /* Main loop body. */
1464 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1465 gfc_add_expr_to_block (&body
, tmp
);
1467 /* Label for cycle statements (if needed). */
1468 if (TREE_USED (cycle_label
))
1470 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1471 gfc_add_expr_to_block (&body
, tmp
);
1474 /* Check whether someone has modified the loop variable. */
1475 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1477 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1478 dovar
, saved_dovar
);
1479 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1480 "Loop variable has been modified");
1483 /* Exit the loop if there is an I/O result condition or error. */
1486 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1487 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1489 build_empty_stmt (loc
));
1490 gfc_add_expr_to_block (&body
, tmp
);
1493 /* Evaluate the loop condition. */
1494 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1496 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1498 /* Increment the loop variable. */
1499 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1500 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1502 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1503 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1505 /* The loop exit. */
1506 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1507 TREE_USED (exit_label
) = 1;
1508 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1509 cond
, tmp
, build_empty_stmt (loc
));
1510 gfc_add_expr_to_block (&body
, tmp
);
1512 /* Finish the loop body. */
1513 tmp
= gfc_finish_block (&body
);
1514 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1516 /* Only execute the loop if the number of iterations is positive. */
1517 if (tree_int_cst_sgn (step
) > 0)
1518 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1521 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1523 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1524 build_empty_stmt (loc
));
1525 gfc_add_expr_to_block (pblock
, tmp
);
1527 /* Add the exit label. */
1528 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1529 gfc_add_expr_to_block (pblock
, tmp
);
1531 return gfc_finish_block (pblock
);
1534 /* Translate the DO construct. This obviously is one of the most
1535 important ones to get right with any compiler, but especially
1538 We special case some loop forms as described in gfc_trans_simple_do.
1539 For other cases we implement them with a separate loop count,
1540 as described in the standard.
1542 We translate a do loop from:
1544 DO dovar = from, to, step
1550 [evaluate loop bounds and step]
1551 empty = (step > 0 ? to < from : to > from);
1552 countm1 = (to - from) / step;
1554 if (empty) goto exit_label;
1562 if (countm1t == 0) goto exit_label;
1566 countm1 is an unsigned integer. It is equal to the loop count minus one,
1567 because the loop count itself can overflow. */
1570 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1574 tree saved_dovar
= NULL
;
1589 gfc_start_block (&block
);
1591 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1593 /* Evaluate all the expressions in the iterator. */
1594 gfc_init_se (&se
, NULL
);
1595 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1596 gfc_add_block_to_block (&block
, &se
.pre
);
1598 type
= TREE_TYPE (dovar
);
1600 gfc_init_se (&se
, NULL
);
1601 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1602 gfc_add_block_to_block (&block
, &se
.pre
);
1603 from
= gfc_evaluate_now (se
.expr
, &block
);
1605 gfc_init_se (&se
, NULL
);
1606 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1607 gfc_add_block_to_block (&block
, &se
.pre
);
1608 to
= gfc_evaluate_now (se
.expr
, &block
);
1610 gfc_init_se (&se
, NULL
);
1611 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1612 gfc_add_block_to_block (&block
, &se
.pre
);
1613 step
= gfc_evaluate_now (se
.expr
, &block
);
1615 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1617 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1618 build_zero_cst (type
));
1619 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1620 "DO step value is zero");
1623 /* Special case simple loops. */
1624 if (TREE_CODE (type
) == INTEGER_TYPE
1625 && (integer_onep (step
)
1626 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1627 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1630 if (TREE_CODE (type
) == INTEGER_TYPE
)
1631 utype
= unsigned_type_for (type
);
1633 utype
= unsigned_type_for (gfc_array_index_type
);
1634 countm1
= gfc_create_var (utype
, "countm1");
1636 /* Cycle and exit statements are implemented with gotos. */
1637 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1638 exit_label
= gfc_build_label_decl (NULL_TREE
);
1639 TREE_USED (exit_label
) = 1;
1641 /* Put these labels where they can be found later. */
1642 code
->cycle_label
= cycle_label
;
1643 code
->exit_label
= exit_label
;
1645 /* Initialize the DO variable: dovar = from. */
1646 gfc_add_modify (&block
, dovar
, from
);
1648 /* Save value for do-tinkering checking. */
1649 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1651 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1652 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1655 /* Initialize loop count and jump to exit label if the loop is empty.
1656 This code is executed before we enter the loop body. We generate:
1659 countm1 = (to - from) / step;
1665 countm1 = (from - to) / -step;
1671 if (TREE_CODE (type
) == INTEGER_TYPE
)
1673 tree pos
, neg
, tou
, fromu
, stepu
, tmp2
;
1675 /* The distance from FROM to TO cannot always be represented in a signed
1676 type, thus use unsigned arithmetic, also to avoid any undefined
1678 tou
= fold_convert (utype
, to
);
1679 fromu
= fold_convert (utype
, from
);
1680 stepu
= fold_convert (utype
, step
);
1682 /* For a positive step, when to < from, exit, otherwise compute
1683 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1684 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1685 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1686 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1689 pos
= build2 (COMPOUND_EXPR
, void_type_node
,
1690 fold_build2 (MODIFY_EXPR
, void_type_node
,
1692 build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1693 build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1694 exit_label
), NULL_TREE
));
1696 /* For a negative step, when to > from, exit, otherwise compute
1697 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1698 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
, from
);
1699 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1700 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1702 fold_build1_loc (loc
, NEGATE_EXPR
, utype
, stepu
));
1703 neg
= build2 (COMPOUND_EXPR
, void_type_node
,
1704 fold_build2 (MODIFY_EXPR
, void_type_node
,
1706 build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1707 build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1708 exit_label
), NULL_TREE
));
1710 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1711 build_int_cst (TREE_TYPE (step
), 0));
1712 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
, neg
, pos
);
1714 gfc_add_expr_to_block (&block
, tmp
);
1720 /* TODO: We could use the same width as the real type.
1721 This would probably cause more problems that it solves
1722 when we implement "long double" types. */
1724 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1725 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1726 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1727 gfc_add_modify (&block
, countm1
, tmp
);
1729 /* We need a special check for empty loops:
1730 empty = (step > 0 ? to < from : to > from); */
1731 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1732 build_zero_cst (type
));
1733 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1734 fold_build2_loc (loc
, LT_EXPR
,
1735 boolean_type_node
, to
, from
),
1736 fold_build2_loc (loc
, GT_EXPR
,
1737 boolean_type_node
, to
, from
));
1738 /* If the loop is empty, go directly to the exit label. */
1739 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1740 build1_v (GOTO_EXPR
, exit_label
),
1741 build_empty_stmt (input_location
));
1742 gfc_add_expr_to_block (&block
, tmp
);
1746 gfc_start_block (&body
);
1748 /* Main loop body. */
1749 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1750 gfc_add_expr_to_block (&body
, tmp
);
1752 /* Label for cycle statements (if needed). */
1753 if (TREE_USED (cycle_label
))
1755 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1756 gfc_add_expr_to_block (&body
, tmp
);
1759 /* Check whether someone has modified the loop variable. */
1760 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1762 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1764 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1765 "Loop variable has been modified");
1768 /* Exit the loop if there is an I/O result condition or error. */
1771 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1772 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1774 build_empty_stmt (input_location
));
1775 gfc_add_expr_to_block (&body
, tmp
);
1778 /* Increment the loop variable. */
1779 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1780 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1782 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1783 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1785 /* Initialize countm1t. */
1786 tree countm1t
= gfc_create_var (utype
, "countm1t");
1787 gfc_add_modify_loc (loc
, &body
, countm1t
, countm1
);
1789 /* Decrement the loop count. */
1790 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1791 build_int_cst (utype
, 1));
1792 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1794 /* End with the loop condition. Loop until countm1t == 0. */
1795 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1t
,
1796 build_int_cst (utype
, 0));
1797 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1798 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1799 cond
, tmp
, build_empty_stmt (loc
));
1800 gfc_add_expr_to_block (&body
, tmp
);
1802 /* End of loop body. */
1803 tmp
= gfc_finish_block (&body
);
1805 /* The for loop itself. */
1806 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1807 gfc_add_expr_to_block (&block
, tmp
);
1809 /* Add the exit label. */
1810 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1811 gfc_add_expr_to_block (&block
, tmp
);
1813 return gfc_finish_block (&block
);
1817 /* Translate the DO WHILE construct.
1830 if (! cond) goto exit_label;
1836 Because the evaluation of the exit condition `cond' may have side
1837 effects, we can't do much for empty loop bodies. The backend optimizers
1838 should be smart enough to eliminate any dead loops. */
1841 gfc_trans_do_while (gfc_code
* code
)
1849 /* Everything we build here is part of the loop body. */
1850 gfc_start_block (&block
);
1852 /* Cycle and exit statements are implemented with gotos. */
1853 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1854 exit_label
= gfc_build_label_decl (NULL_TREE
);
1856 /* Put the labels where they can be found later. See gfc_trans_do(). */
1857 code
->cycle_label
= cycle_label
;
1858 code
->exit_label
= exit_label
;
1860 /* Create a GIMPLE version of the exit condition. */
1861 gfc_init_se (&cond
, NULL
);
1862 gfc_conv_expr_val (&cond
, code
->expr1
);
1863 gfc_add_block_to_block (&block
, &cond
.pre
);
1864 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1865 TRUTH_NOT_EXPR
, TREE_TYPE (cond
.expr
), cond
.expr
);
1867 /* Build "IF (! cond) GOTO exit_label". */
1868 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1869 TREE_USED (exit_label
) = 1;
1870 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1871 void_type_node
, cond
.expr
, tmp
,
1872 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1873 gfc_add_expr_to_block (&block
, tmp
);
1875 /* The main body of the loop. */
1876 tmp
= gfc_trans_code (code
->block
->next
);
1877 gfc_add_expr_to_block (&block
, tmp
);
1879 /* Label for cycle statements (if needed). */
1880 if (TREE_USED (cycle_label
))
1882 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1883 gfc_add_expr_to_block (&block
, tmp
);
1886 /* End of loop body. */
1887 tmp
= gfc_finish_block (&block
);
1889 gfc_init_block (&block
);
1890 /* Build the loop. */
1891 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1892 void_type_node
, tmp
);
1893 gfc_add_expr_to_block (&block
, tmp
);
1895 /* Add the exit label. */
1896 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1897 gfc_add_expr_to_block (&block
, tmp
);
1899 return gfc_finish_block (&block
);
1903 /* Translate the SELECT CASE construct for INTEGER case expressions,
1904 without killing all potential optimizations. The problem is that
1905 Fortran allows unbounded cases, but the back-end does not, so we
1906 need to intercept those before we enter the equivalent SWITCH_EXPR
1909 For example, we translate this,
1912 CASE (:100,101,105:115)
1922 to the GENERIC equivalent,
1926 case (minimum value for typeof(expr) ... 100:
1932 case 200 ... (maximum value for typeof(expr):
1949 gfc_trans_integer_select (gfc_code
* code
)
1959 gfc_start_block (&block
);
1961 /* Calculate the switch expression. */
1962 gfc_init_se (&se
, NULL
);
1963 gfc_conv_expr_val (&se
, code
->expr1
);
1964 gfc_add_block_to_block (&block
, &se
.pre
);
1966 end_label
= gfc_build_label_decl (NULL_TREE
);
1968 gfc_init_block (&body
);
1970 for (c
= code
->block
; c
; c
= c
->block
)
1972 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1977 /* Assume it's the default case. */
1978 low
= high
= NULL_TREE
;
1982 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1985 /* If there's only a lower bound, set the high bound to the
1986 maximum value of the case expression. */
1988 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1993 /* Three cases are possible here:
1995 1) There is no lower bound, e.g. CASE (:N).
1996 2) There is a lower bound .NE. high bound, that is
1997 a case range, e.g. CASE (N:M) where M>N (we make
1998 sure that M>N during type resolution).
1999 3) There is a lower bound, and it has the same value
2000 as the high bound, e.g. CASE (N:N). This is our
2001 internal representation of CASE(N).
2003 In the first and second case, we need to set a value for
2004 high. In the third case, we don't because the GCC middle
2005 end represents a single case value by just letting high be
2006 a NULL_TREE. We can't do that because we need to be able
2007 to represent unbounded cases. */
2011 && mpz_cmp (cp
->low
->value
.integer
,
2012 cp
->high
->value
.integer
) != 0))
2013 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
2016 /* Unbounded case. */
2018 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
2021 /* Build a label. */
2022 label
= gfc_build_label_decl (NULL_TREE
);
2024 /* Add this case label.
2025 Add parameter 'label', make it match GCC backend. */
2026 tmp
= build_case_label (low
, high
, label
);
2027 gfc_add_expr_to_block (&body
, tmp
);
2030 /* Add the statements for this case. */
2031 tmp
= gfc_trans_code (c
->next
);
2032 gfc_add_expr_to_block (&body
, tmp
);
2034 /* Break to the end of the construct. */
2035 tmp
= build1_v (GOTO_EXPR
, end_label
);
2036 gfc_add_expr_to_block (&body
, tmp
);
2039 tmp
= gfc_finish_block (&body
);
2040 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2041 se
.expr
, tmp
, NULL_TREE
);
2042 gfc_add_expr_to_block (&block
, tmp
);
2044 tmp
= build1_v (LABEL_EXPR
, end_label
);
2045 gfc_add_expr_to_block (&block
, tmp
);
2047 return gfc_finish_block (&block
);
2051 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2053 There are only two cases possible here, even though the standard
2054 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2055 .FALSE., and DEFAULT.
2057 We never generate more than two blocks here. Instead, we always
2058 try to eliminate the DEFAULT case. This way, we can translate this
2059 kind of SELECT construct to a simple
2063 expression in GENERIC. */
2066 gfc_trans_logical_select (gfc_code
* code
)
2069 gfc_code
*t
, *f
, *d
;
2074 /* Assume we don't have any cases at all. */
2077 /* Now see which ones we actually do have. We can have at most two
2078 cases in a single case list: one for .TRUE. and one for .FALSE.
2079 The default case is always separate. If the cases for .TRUE. and
2080 .FALSE. are in the same case list, the block for that case list
2081 always executed, and we don't generate code a COND_EXPR. */
2082 for (c
= code
->block
; c
; c
= c
->block
)
2084 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2088 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
2090 else /* if (cp->value.logical != 0), thus .TRUE. */
2098 /* Start a new block. */
2099 gfc_start_block (&block
);
2101 /* Calculate the switch expression. We always need to do this
2102 because it may have side effects. */
2103 gfc_init_se (&se
, NULL
);
2104 gfc_conv_expr_val (&se
, code
->expr1
);
2105 gfc_add_block_to_block (&block
, &se
.pre
);
2107 if (t
== f
&& t
!= NULL
)
2109 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2110 translate the code for these cases, append it to the current
2112 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
2116 tree true_tree
, false_tree
, stmt
;
2118 true_tree
= build_empty_stmt (input_location
);
2119 false_tree
= build_empty_stmt (input_location
);
2121 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2122 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2123 make the missing case the default case. */
2124 if (t
!= NULL
&& f
!= NULL
)
2134 /* Translate the code for each of these blocks, and append it to
2135 the current block. */
2137 true_tree
= gfc_trans_code (t
->next
);
2140 false_tree
= gfc_trans_code (f
->next
);
2142 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2143 se
.expr
, true_tree
, false_tree
);
2144 gfc_add_expr_to_block (&block
, stmt
);
2147 return gfc_finish_block (&block
);
2151 /* The jump table types are stored in static variables to avoid
2152 constructing them from scratch every single time. */
2153 static GTY(()) tree select_struct
[2];
2155 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2156 Instead of generating compares and jumps, it is far simpler to
2157 generate a data structure describing the cases in order and call a
2158 library subroutine that locates the right case.
2159 This is particularly true because this is the only case where we
2160 might have to dispose of a temporary.
2161 The library subroutine returns a pointer to jump to or NULL if no
2162 branches are to be taken. */
2165 gfc_trans_character_select (gfc_code
*code
)
2167 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
2168 stmtblock_t block
, body
;
2173 vec
<constructor_elt
, va_gc
> *inits
= NULL
;
2175 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
2177 /* The jump table types are stored in static variables to avoid
2178 constructing them from scratch every single time. */
2179 static tree ss_string1
[2], ss_string1_len
[2];
2180 static tree ss_string2
[2], ss_string2_len
[2];
2181 static tree ss_target
[2];
2183 cp
= code
->block
->ext
.block
.case_list
;
2184 while (cp
->left
!= NULL
)
2187 /* Generate the body */
2188 gfc_start_block (&block
);
2189 gfc_init_se (&expr1se
, NULL
);
2190 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
2192 gfc_add_block_to_block (&block
, &expr1se
.pre
);
2194 end_label
= gfc_build_label_decl (NULL_TREE
);
2196 gfc_init_block (&body
);
2198 /* Attempt to optimize length 1 selects. */
2199 if (integer_onep (expr1se
.string_length
))
2201 for (d
= cp
; d
; d
= d
->right
)
2206 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
2207 && d
->low
->ts
.type
== BT_CHARACTER
);
2208 if (d
->low
->value
.character
.length
> 1)
2210 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
2211 if (d
->low
->value
.character
.string
[i
] != ' ')
2213 if (i
!= d
->low
->value
.character
.length
)
2215 if (optimize
&& d
->high
&& i
== 1)
2217 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2218 && d
->high
->ts
.type
== BT_CHARACTER
);
2219 if (d
->high
->value
.character
.length
> 1
2220 && (d
->low
->value
.character
.string
[0]
2221 == d
->high
->value
.character
.string
[0])
2222 && d
->high
->value
.character
.string
[1] != ' '
2223 && ((d
->low
->value
.character
.string
[1] < ' ')
2224 == (d
->high
->value
.character
.string
[1]
2234 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2235 && d
->high
->ts
.type
== BT_CHARACTER
);
2236 if (d
->high
->value
.character
.length
> 1)
2238 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
2239 if (d
->high
->value
.character
.string
[i
] != ' ')
2241 if (i
!= d
->high
->value
.character
.length
)
2248 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
2250 for (c
= code
->block
; c
; c
= c
->block
)
2252 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2258 /* Assume it's the default case. */
2259 low
= high
= NULL_TREE
;
2263 /* CASE ('ab') or CASE ('ab':'az') will never match
2264 any length 1 character. */
2265 if (cp
->low
->value
.character
.length
> 1
2266 && cp
->low
->value
.character
.string
[1] != ' ')
2269 if (cp
->low
->value
.character
.length
> 0)
2270 r
= cp
->low
->value
.character
.string
[0];
2273 low
= build_int_cst (ctype
, r
);
2275 /* If there's only a lower bound, set the high bound
2276 to the maximum value of the case expression. */
2278 high
= TYPE_MAX_VALUE (ctype
);
2284 || (cp
->low
->value
.character
.string
[0]
2285 != cp
->high
->value
.character
.string
[0]))
2287 if (cp
->high
->value
.character
.length
> 0)
2288 r
= cp
->high
->value
.character
.string
[0];
2291 high
= build_int_cst (ctype
, r
);
2294 /* Unbounded case. */
2296 low
= TYPE_MIN_VALUE (ctype
);
2299 /* Build a label. */
2300 label
= gfc_build_label_decl (NULL_TREE
);
2302 /* Add this case label.
2303 Add parameter 'label', make it match GCC backend. */
2304 tmp
= build_case_label (low
, high
, label
);
2305 gfc_add_expr_to_block (&body
, tmp
);
2308 /* Add the statements for this case. */
2309 tmp
= gfc_trans_code (c
->next
);
2310 gfc_add_expr_to_block (&body
, tmp
);
2312 /* Break to the end of the construct. */
2313 tmp
= build1_v (GOTO_EXPR
, end_label
);
2314 gfc_add_expr_to_block (&body
, tmp
);
2317 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2319 code
->expr1
->ts
.kind
);
2320 case_num
= gfc_create_var (ctype
, "case_num");
2321 gfc_add_modify (&block
, case_num
, tmp
);
2323 gfc_add_block_to_block (&block
, &expr1se
.post
);
2325 tmp
= gfc_finish_block (&body
);
2326 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2327 case_num
, tmp
, NULL_TREE
);
2328 gfc_add_expr_to_block (&block
, tmp
);
2330 tmp
= build1_v (LABEL_EXPR
, end_label
);
2331 gfc_add_expr_to_block (&block
, tmp
);
2333 return gfc_finish_block (&block
);
2337 if (code
->expr1
->ts
.kind
== 1)
2339 else if (code
->expr1
->ts
.kind
== 4)
2344 if (select_struct
[k
] == NULL
)
2347 select_struct
[k
] = make_node (RECORD_TYPE
);
2349 if (code
->expr1
->ts
.kind
== 1)
2350 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2351 else if (code
->expr1
->ts
.kind
== 4)
2352 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2357 #define ADD_FIELD(NAME, TYPE) \
2358 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2359 get_identifier (stringize(NAME)), \
2363 ADD_FIELD (string1
, pchartype
);
2364 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2366 ADD_FIELD (string2
, pchartype
);
2367 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2369 ADD_FIELD (target
, integer_type_node
);
2372 gfc_finish_type (select_struct
[k
]);
2376 for (d
= cp
; d
; d
= d
->right
)
2379 for (c
= code
->block
; c
; c
= c
->block
)
2381 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2383 label
= gfc_build_label_decl (NULL_TREE
);
2384 tmp
= build_case_label ((d
->low
== NULL
&& d
->high
== NULL
)
2386 : build_int_cst (integer_type_node
, d
->n
),
2388 gfc_add_expr_to_block (&body
, tmp
);
2391 tmp
= gfc_trans_code (c
->next
);
2392 gfc_add_expr_to_block (&body
, tmp
);
2394 tmp
= build1_v (GOTO_EXPR
, end_label
);
2395 gfc_add_expr_to_block (&body
, tmp
);
2398 /* Generate the structure describing the branches */
2399 for (d
= cp
; d
; d
= d
->right
)
2401 vec
<constructor_elt
, va_gc
> *node
= NULL
;
2403 gfc_init_se (&se
, NULL
);
2407 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2408 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2412 gfc_conv_expr_reference (&se
, d
->low
);
2414 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2415 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2418 if (d
->high
== NULL
)
2420 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2421 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2425 gfc_init_se (&se
, NULL
);
2426 gfc_conv_expr_reference (&se
, d
->high
);
2428 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2429 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2432 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2433 build_int_cst (integer_type_node
, d
->n
));
2435 tmp
= build_constructor (select_struct
[k
], node
);
2436 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2439 type
= build_array_type (select_struct
[k
],
2440 build_index_type (size_int (n
-1)));
2442 init
= build_constructor (type
, inits
);
2443 TREE_CONSTANT (init
) = 1;
2444 TREE_STATIC (init
) = 1;
2445 /* Create a static variable to hold the jump table. */
2446 tmp
= gfc_create_var (type
, "jumptable");
2447 TREE_CONSTANT (tmp
) = 1;
2448 TREE_STATIC (tmp
) = 1;
2449 TREE_READONLY (tmp
) = 1;
2450 DECL_INITIAL (tmp
) = init
;
2453 /* Build the library call */
2454 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2456 if (code
->expr1
->ts
.kind
== 1)
2457 fndecl
= gfor_fndecl_select_string
;
2458 else if (code
->expr1
->ts
.kind
== 4)
2459 fndecl
= gfor_fndecl_select_string_char4
;
2463 tmp
= build_call_expr_loc (input_location
,
2465 build_int_cst (gfc_charlen_type_node
, n
),
2466 expr1se
.expr
, expr1se
.string_length
);
2467 case_num
= gfc_create_var (integer_type_node
, "case_num");
2468 gfc_add_modify (&block
, case_num
, tmp
);
2470 gfc_add_block_to_block (&block
, &expr1se
.post
);
2472 tmp
= gfc_finish_block (&body
);
2473 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2474 case_num
, tmp
, NULL_TREE
);
2475 gfc_add_expr_to_block (&block
, tmp
);
2477 tmp
= build1_v (LABEL_EXPR
, end_label
);
2478 gfc_add_expr_to_block (&block
, tmp
);
2480 return gfc_finish_block (&block
);
2484 /* Translate the three variants of the SELECT CASE construct.
2486 SELECT CASEs with INTEGER case expressions can be translated to an
2487 equivalent GENERIC switch statement, and for LOGICAL case
2488 expressions we build one or two if-else compares.
2490 SELECT CASEs with CHARACTER case expressions are a whole different
2491 story, because they don't exist in GENERIC. So we sort them and
2492 do a binary search at runtime.
2494 Fortran has no BREAK statement, and it does not allow jumps from
2495 one case block to another. That makes things a lot easier for
2499 gfc_trans_select (gfc_code
* code
)
2505 gcc_assert (code
&& code
->expr1
);
2506 gfc_init_block (&block
);
2508 /* Build the exit label and hang it in. */
2509 exit_label
= gfc_build_label_decl (NULL_TREE
);
2510 code
->exit_label
= exit_label
;
2512 /* Empty SELECT constructs are legal. */
2513 if (code
->block
== NULL
)
2514 body
= build_empty_stmt (input_location
);
2516 /* Select the correct translation function. */
2518 switch (code
->expr1
->ts
.type
)
2521 body
= gfc_trans_logical_select (code
);
2525 body
= gfc_trans_integer_select (code
);
2529 body
= gfc_trans_character_select (code
);
2533 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2537 /* Build everything together. */
2538 gfc_add_expr_to_block (&block
, body
);
2539 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2541 return gfc_finish_block (&block
);
2545 /* Traversal function to substitute a replacement symtree if the symbol
2546 in the expression is the same as that passed. f == 2 signals that
2547 that variable itself is not to be checked - only the references.
2548 This group of functions is used when the variable expression in a
2549 FORALL assignment has internal references. For example:
2550 FORALL (i = 1:4) p(p(i)) = i
2551 The only recourse here is to store a copy of 'p' for the index
2554 static gfc_symtree
*new_symtree
;
2555 static gfc_symtree
*old_symtree
;
2558 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2560 if (expr
->expr_type
!= EXPR_VARIABLE
)
2565 else if (expr
->symtree
->n
.sym
== sym
)
2566 expr
->symtree
= new_symtree
;
2572 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2574 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2578 forall_restore (gfc_expr
*expr
,
2579 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2580 int *f ATTRIBUTE_UNUSED
)
2582 if (expr
->expr_type
!= EXPR_VARIABLE
)
2585 if (expr
->symtree
== new_symtree
)
2586 expr
->symtree
= old_symtree
;
2592 forall_restore_symtree (gfc_expr
*e
)
2594 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2598 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2603 gfc_symbol
*new_sym
;
2604 gfc_symbol
*old_sym
;
2608 /* Build a copy of the lvalue. */
2609 old_symtree
= c
->expr1
->symtree
;
2610 old_sym
= old_symtree
->n
.sym
;
2611 e
= gfc_lval_expr_from_sym (old_sym
);
2612 if (old_sym
->attr
.dimension
)
2614 gfc_init_se (&tse
, NULL
);
2615 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2616 gfc_add_block_to_block (pre
, &tse
.pre
);
2617 gfc_add_block_to_block (post
, &tse
.post
);
2618 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2620 if (e
->ts
.type
!= BT_CHARACTER
)
2622 /* Use the variable offset for the temporary. */
2623 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2624 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2629 gfc_init_se (&tse
, NULL
);
2630 gfc_init_se (&rse
, NULL
);
2631 gfc_conv_expr (&rse
, e
);
2632 if (e
->ts
.type
== BT_CHARACTER
)
2634 tse
.string_length
= rse
.string_length
;
2635 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2637 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2639 gfc_add_block_to_block (pre
, &tse
.pre
);
2640 gfc_add_block_to_block (post
, &tse
.post
);
2644 tmp
= gfc_typenode_for_spec (&e
->ts
);
2645 tse
.expr
= gfc_create_var (tmp
, "temp");
2648 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2649 e
->expr_type
== EXPR_VARIABLE
, true);
2650 gfc_add_expr_to_block (pre
, tmp
);
2654 /* Create a new symbol to represent the lvalue. */
2655 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2656 new_sym
->ts
= old_sym
->ts
;
2657 new_sym
->attr
.referenced
= 1;
2658 new_sym
->attr
.temporary
= 1;
2659 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2660 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2662 /* Use the temporary as the backend_decl. */
2663 new_sym
->backend_decl
= tse
.expr
;
2665 /* Create a fake symtree for it. */
2667 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2668 new_symtree
->n
.sym
= new_sym
;
2669 gcc_assert (new_symtree
== root
);
2671 /* Go through the expression reference replacing the old_symtree
2673 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2675 /* Now we have made this temporary, we might as well use it for
2676 the right hand side. */
2677 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2681 /* Handles dependencies in forall assignments. */
2683 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2690 lsym
= c
->expr1
->symtree
->n
.sym
;
2691 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2693 /* Now check for dependencies within the 'variable'
2694 expression itself. These are treated by making a complete
2695 copy of variable and changing all the references to it
2696 point to the copy instead. Note that the shallow copy of
2697 the variable will not suffice for derived types with
2698 pointer components. We therefore leave these to their
2700 if (lsym
->ts
.type
== BT_DERIVED
2701 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2705 if (find_forall_index (c
->expr1
, lsym
, 2))
2707 forall_make_variable_temp (c
, pre
, post
);
2711 /* Substrings with dependencies are treated in the same
2713 if (c
->expr1
->ts
.type
== BT_CHARACTER
2715 && c
->expr2
->expr_type
== EXPR_VARIABLE
2716 && lsym
== c
->expr2
->symtree
->n
.sym
)
2718 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2719 if (lref
->type
== REF_SUBSTRING
)
2721 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2722 if (rref
->type
== REF_SUBSTRING
)
2726 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2728 forall_make_variable_temp (c
, pre
, post
);
2737 cleanup_forall_symtrees (gfc_code
*c
)
2739 forall_restore_symtree (c
->expr1
);
2740 forall_restore_symtree (c
->expr2
);
2741 free (new_symtree
->n
.sym
);
2746 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2747 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2748 indicates whether we should generate code to test the FORALLs mask
2749 array. OUTER is the loop header to be used for initializing mask
2752 The generated loop format is:
2753 count = (end - start + step) / step
2766 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2767 int mask_flag
, stmtblock_t
*outer
)
2775 tree var
, start
, end
, step
;
2778 /* Initialize the mask index outside the FORALL nest. */
2779 if (mask_flag
&& forall_tmp
->mask
)
2780 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2782 iter
= forall_tmp
->this_loop
;
2783 nvar
= forall_tmp
->nvar
;
2784 for (n
= 0; n
< nvar
; n
++)
2787 start
= iter
->start
;
2791 exit_label
= gfc_build_label_decl (NULL_TREE
);
2792 TREE_USED (exit_label
) = 1;
2794 /* The loop counter. */
2795 count
= gfc_create_var (TREE_TYPE (var
), "count");
2797 /* The body of the loop. */
2798 gfc_init_block (&block
);
2800 /* The exit condition. */
2801 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2802 count
, build_int_cst (TREE_TYPE (count
), 0));
2803 if (forall_tmp
->do_concurrent
)
2804 cond
= build2 (ANNOTATE_EXPR
, TREE_TYPE (cond
), cond
,
2805 build_int_cst (integer_type_node
,
2806 annot_expr_ivdep_kind
));
2808 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2809 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2810 cond
, tmp
, build_empty_stmt (input_location
));
2811 gfc_add_expr_to_block (&block
, tmp
);
2813 /* The main loop body. */
2814 gfc_add_expr_to_block (&block
, body
);
2816 /* Increment the loop variable. */
2817 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2819 gfc_add_modify (&block
, var
, tmp
);
2821 /* Advance to the next mask element. Only do this for the
2823 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2825 tree maskindex
= forall_tmp
->maskindex
;
2826 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2827 maskindex
, gfc_index_one_node
);
2828 gfc_add_modify (&block
, maskindex
, tmp
);
2831 /* Decrement the loop counter. */
2832 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2833 build_int_cst (TREE_TYPE (var
), 1));
2834 gfc_add_modify (&block
, count
, tmp
);
2836 body
= gfc_finish_block (&block
);
2838 /* Loop var initialization. */
2839 gfc_init_block (&block
);
2840 gfc_add_modify (&block
, var
, start
);
2843 /* Initialize the loop counter. */
2844 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2846 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2848 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2850 gfc_add_modify (&block
, count
, tmp
);
2852 /* The loop expression. */
2853 tmp
= build1_v (LOOP_EXPR
, body
);
2854 gfc_add_expr_to_block (&block
, tmp
);
2856 /* The exit label. */
2857 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2858 gfc_add_expr_to_block (&block
, tmp
);
2860 body
= gfc_finish_block (&block
);
2867 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2868 is nonzero, the body is controlled by all masks in the forall nest.
2869 Otherwise, the innermost loop is not controlled by it's mask. This
2870 is used for initializing that mask. */
2873 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2878 forall_info
*forall_tmp
;
2879 tree mask
, maskindex
;
2881 gfc_start_block (&header
);
2883 forall_tmp
= nested_forall_info
;
2884 while (forall_tmp
!= NULL
)
2886 /* Generate body with masks' control. */
2889 mask
= forall_tmp
->mask
;
2890 maskindex
= forall_tmp
->maskindex
;
2892 /* If a mask was specified make the assignment conditional. */
2895 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2896 body
= build3_v (COND_EXPR
, tmp
, body
,
2897 build_empty_stmt (input_location
));
2900 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2901 forall_tmp
= forall_tmp
->prev_nest
;
2905 gfc_add_expr_to_block (&header
, body
);
2906 return gfc_finish_block (&header
);
2910 /* Allocate data for holding a temporary array. Returns either a local
2911 temporary array or a pointer variable. */
2914 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2921 if (INTEGER_CST_P (size
))
2922 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2923 size
, gfc_index_one_node
);
2927 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2928 type
= build_array_type (elem_type
, type
);
2929 if (gfc_can_put_var_on_stack (bytesize
))
2931 gcc_assert (INTEGER_CST_P (size
));
2932 tmpvar
= gfc_create_var (type
, "temp");
2937 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2938 *pdata
= convert (pvoid_type_node
, tmpvar
);
2940 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2941 gfc_add_modify (pblock
, tmpvar
, tmp
);
2947 /* Generate codes to copy the temporary to the actual lhs. */
2950 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2951 tree count1
, tree wheremask
, bool invert
)
2955 stmtblock_t block
, body
;
2961 lss
= gfc_walk_expr (expr
);
2963 if (lss
== gfc_ss_terminator
)
2965 gfc_start_block (&block
);
2967 gfc_init_se (&lse
, NULL
);
2969 /* Translate the expression. */
2970 gfc_conv_expr (&lse
, expr
);
2972 /* Form the expression for the temporary. */
2973 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2975 /* Use the scalar assignment as is. */
2976 gfc_add_block_to_block (&block
, &lse
.pre
);
2977 gfc_add_modify (&block
, lse
.expr
, tmp
);
2978 gfc_add_block_to_block (&block
, &lse
.post
);
2980 /* Increment the count1. */
2981 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2982 count1
, gfc_index_one_node
);
2983 gfc_add_modify (&block
, count1
, tmp
);
2985 tmp
= gfc_finish_block (&block
);
2989 gfc_start_block (&block
);
2991 gfc_init_loopinfo (&loop1
);
2992 gfc_init_se (&rse
, NULL
);
2993 gfc_init_se (&lse
, NULL
);
2995 /* Associate the lss with the loop. */
2996 gfc_add_ss_to_loop (&loop1
, lss
);
2998 /* Calculate the bounds of the scalarization. */
2999 gfc_conv_ss_startstride (&loop1
);
3000 /* Setup the scalarizing loops. */
3001 gfc_conv_loop_setup (&loop1
, &expr
->where
);
3003 gfc_mark_ss_chain_used (lss
, 1);
3005 /* Start the scalarized loop body. */
3006 gfc_start_scalarized_body (&loop1
, &body
);
3008 /* Setup the gfc_se structures. */
3009 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
3012 /* Form the expression of the temporary. */
3013 if (lss
!= gfc_ss_terminator
)
3014 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3015 /* Translate expr. */
3016 gfc_conv_expr (&lse
, expr
);
3018 /* Use the scalar assignment. */
3019 rse
.string_length
= lse
.string_length
;
3020 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
3022 /* Form the mask expression according to the mask tree list. */
3025 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3027 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3028 TREE_TYPE (wheremaskexpr
),
3030 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3032 build_empty_stmt (input_location
));
3035 gfc_add_expr_to_block (&body
, tmp
);
3037 /* Increment count1. */
3038 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3039 count1
, gfc_index_one_node
);
3040 gfc_add_modify (&body
, count1
, tmp
);
3042 /* Increment count3. */
3045 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3046 gfc_array_index_type
, count3
,
3047 gfc_index_one_node
);
3048 gfc_add_modify (&body
, count3
, tmp
);
3051 /* Generate the copying loops. */
3052 gfc_trans_scalarizing_loops (&loop1
, &body
);
3053 gfc_add_block_to_block (&block
, &loop1
.pre
);
3054 gfc_add_block_to_block (&block
, &loop1
.post
);
3055 gfc_cleanup_loop (&loop1
);
3057 tmp
= gfc_finish_block (&block
);
3063 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3064 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3065 and should not be freed. WHEREMASK is the conditional execution mask
3066 whose sense may be inverted by INVERT. */
3069 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
3070 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
3071 tree wheremask
, bool invert
)
3073 stmtblock_t block
, body1
;
3080 gfc_start_block (&block
);
3082 gfc_init_se (&rse
, NULL
);
3083 gfc_init_se (&lse
, NULL
);
3085 if (lss
== gfc_ss_terminator
)
3087 gfc_init_block (&body1
);
3088 gfc_conv_expr (&rse
, expr2
);
3089 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3093 /* Initialize the loop. */
3094 gfc_init_loopinfo (&loop
);
3096 /* We may need LSS to determine the shape of the expression. */
3097 gfc_add_ss_to_loop (&loop
, lss
);
3098 gfc_add_ss_to_loop (&loop
, rss
);
3100 gfc_conv_ss_startstride (&loop
);
3101 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3103 gfc_mark_ss_chain_used (rss
, 1);
3104 /* Start the loop body. */
3105 gfc_start_scalarized_body (&loop
, &body1
);
3107 /* Translate the expression. */
3108 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3110 gfc_conv_expr (&rse
, expr2
);
3112 /* Form the expression of the temporary. */
3113 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3116 /* Use the scalar assignment. */
3117 lse
.string_length
= rse
.string_length
;
3118 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
3119 expr2
->expr_type
== EXPR_VARIABLE
, true);
3121 /* Form the mask expression according to the mask tree list. */
3124 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3126 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3127 TREE_TYPE (wheremaskexpr
),
3129 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3131 build_empty_stmt (input_location
));
3134 gfc_add_expr_to_block (&body1
, tmp
);
3136 if (lss
== gfc_ss_terminator
)
3138 gfc_add_block_to_block (&block
, &body1
);
3140 /* Increment count1. */
3141 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
3142 count1
, gfc_index_one_node
);
3143 gfc_add_modify (&block
, count1
, tmp
);
3147 /* Increment count1. */
3148 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3149 count1
, gfc_index_one_node
);
3150 gfc_add_modify (&body1
, count1
, tmp
);
3152 /* Increment count3. */
3155 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3156 gfc_array_index_type
,
3157 count3
, gfc_index_one_node
);
3158 gfc_add_modify (&body1
, count3
, tmp
);
3161 /* Generate the copying loops. */
3162 gfc_trans_scalarizing_loops (&loop
, &body1
);
3164 gfc_add_block_to_block (&block
, &loop
.pre
);
3165 gfc_add_block_to_block (&block
, &loop
.post
);
3167 gfc_cleanup_loop (&loop
);
3168 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3169 as tree nodes in SS may not be valid in different scope. */
3172 tmp
= gfc_finish_block (&block
);
3177 /* Calculate the size of temporary needed in the assignment inside forall.
3178 LSS and RSS are filled in this function. */
3181 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
3182 stmtblock_t
* pblock
,
3183 gfc_ss
**lss
, gfc_ss
**rss
)
3191 *lss
= gfc_walk_expr (expr1
);
3194 size
= gfc_index_one_node
;
3195 if (*lss
!= gfc_ss_terminator
)
3197 gfc_init_loopinfo (&loop
);
3199 /* Walk the RHS of the expression. */
3200 *rss
= gfc_walk_expr (expr2
);
3201 if (*rss
== gfc_ss_terminator
)
3202 /* The rhs is scalar. Add a ss for the expression. */
3203 *rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
3205 /* Associate the SS with the loop. */
3206 gfc_add_ss_to_loop (&loop
, *lss
);
3207 /* We don't actually need to add the rhs at this point, but it might
3208 make guessing the loop bounds a bit easier. */
3209 gfc_add_ss_to_loop (&loop
, *rss
);
3211 /* We only want the shape of the expression, not rest of the junk
3212 generated by the scalarizer. */
3213 loop
.array_parameter
= 1;
3215 /* Calculate the bounds of the scalarization. */
3216 save_flag
= gfc_option
.rtcheck
;
3217 gfc_option
.rtcheck
&= ~GFC_RTCHECK_BOUNDS
;
3218 gfc_conv_ss_startstride (&loop
);
3219 gfc_option
.rtcheck
= save_flag
;
3220 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3222 /* Figure out how many elements we need. */
3223 for (i
= 0; i
< loop
.dimen
; i
++)
3225 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
3226 gfc_array_index_type
,
3227 gfc_index_one_node
, loop
.from
[i
]);
3228 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3229 gfc_array_index_type
, tmp
, loop
.to
[i
]);
3230 size
= fold_build2_loc (input_location
, MULT_EXPR
,
3231 gfc_array_index_type
, size
, tmp
);
3233 gfc_add_block_to_block (pblock
, &loop
.pre
);
3234 size
= gfc_evaluate_now (size
, pblock
);
3235 gfc_add_block_to_block (pblock
, &loop
.post
);
3237 /* TODO: write a function that cleans up a loopinfo without freeing
3238 the SS chains. Currently a NOP. */
3245 /* Calculate the overall iterator number of the nested forall construct.
3246 This routine actually calculates the number of times the body of the
3247 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3248 that by the expression INNER_SIZE. The BLOCK argument specifies the
3249 block in which to calculate the result, and the optional INNER_SIZE_BODY
3250 argument contains any statements that need to executed (inside the loop)
3251 to initialize or calculate INNER_SIZE. */
3254 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3255 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3257 forall_info
*forall_tmp
= nested_forall_info
;
3261 /* We can eliminate the innermost unconditional loops with constant
3263 if (INTEGER_CST_P (inner_size
))
3266 && !forall_tmp
->mask
3267 && INTEGER_CST_P (forall_tmp
->size
))
3269 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3270 gfc_array_index_type
,
3271 inner_size
, forall_tmp
->size
);
3272 forall_tmp
= forall_tmp
->prev_nest
;
3275 /* If there are no loops left, we have our constant result. */
3280 /* Otherwise, create a temporary variable to compute the result. */
3281 number
= gfc_create_var (gfc_array_index_type
, "num");
3282 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3284 gfc_start_block (&body
);
3285 if (inner_size_body
)
3286 gfc_add_block_to_block (&body
, inner_size_body
);
3288 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3289 gfc_array_index_type
, number
, inner_size
);
3292 gfc_add_modify (&body
, number
, tmp
);
3293 tmp
= gfc_finish_block (&body
);
3295 /* Generate loops. */
3296 if (forall_tmp
!= NULL
)
3297 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3299 gfc_add_expr_to_block (block
, tmp
);
3305 /* Allocate temporary for forall construct. SIZE is the size of temporary
3306 needed. PTEMP1 is returned for space free. */
3309 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3316 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3317 if (!integer_onep (unit
))
3318 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3319 gfc_array_index_type
, size
, unit
);
3324 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3327 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3332 /* Allocate temporary for forall construct according to the information in
3333 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3334 assignment inside forall. PTEMP1 is returned for space free. */
3337 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3338 tree inner_size
, stmtblock_t
* inner_size_body
,
3339 stmtblock_t
* block
, tree
* ptemp1
)
3343 /* Calculate the total size of temporary needed in forall construct. */
3344 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3345 inner_size_body
, block
);
3347 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3351 /* Handle assignments inside forall which need temporary.
3353 forall (i=start:end:stride; maskexpr)
3356 (where e,f<i> are arbitrary expressions possibly involving i
3357 and there is a dependency between e<i> and f<i>)
3359 masktmp(:) = maskexpr(:)
3364 for (i = start; i <= end; i += stride)
3368 for (i = start; i <= end; i += stride)
3370 if (masktmp[maskindex++])
3371 tmp[count1++] = f<i>
3375 for (i = start; i <= end; i += stride)
3377 if (masktmp[maskindex++])
3378 e<i> = tmp[count1++]
3383 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3384 tree wheremask
, bool invert
,
3385 forall_info
* nested_forall_info
,
3386 stmtblock_t
* block
)
3394 stmtblock_t inner_size_body
;
3396 /* Create vars. count1 is the current iterator number of the nested
3398 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3400 /* Count is the wheremask index. */
3403 count
= gfc_create_var (gfc_array_index_type
, "count");
3404 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3409 /* Initialize count1. */
3410 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3412 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3413 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3414 gfc_init_block (&inner_size_body
);
3415 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3418 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3419 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3421 if (!expr1
->ts
.u
.cl
->backend_decl
)
3424 gfc_init_se (&tse
, NULL
);
3425 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3426 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3428 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3429 expr1
->ts
.u
.cl
->backend_decl
);
3432 type
= gfc_typenode_for_spec (&expr1
->ts
);
3434 /* Allocate temporary for nested forall construct according to the
3435 information in nested_forall_info and inner_size. */
3436 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3437 &inner_size_body
, block
, &ptemp1
);
3439 /* Generate codes to copy rhs to the temporary . */
3440 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3443 /* Generate body and loops according to the information in
3444 nested_forall_info. */
3445 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3446 gfc_add_expr_to_block (block
, tmp
);
3449 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3453 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3455 /* Generate codes to copy the temporary to lhs. */
3456 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3459 /* Generate body and loops according to the information in
3460 nested_forall_info. */
3461 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3462 gfc_add_expr_to_block (block
, tmp
);
3466 /* Free the temporary. */
3467 tmp
= gfc_call_free (ptemp1
);
3468 gfc_add_expr_to_block (block
, tmp
);
3473 /* Translate pointer assignment inside FORALL which need temporary. */
3476 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3477 forall_info
* nested_forall_info
,
3478 stmtblock_t
* block
)
3485 gfc_array_info
*info
;
3492 tree tmp
, tmp1
, ptemp1
;
3494 count
= gfc_create_var (gfc_array_index_type
, "count");
3495 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3497 inner_size
= gfc_index_one_node
;
3498 lss
= gfc_walk_expr (expr1
);
3499 rss
= gfc_walk_expr (expr2
);
3500 if (lss
== gfc_ss_terminator
)
3502 type
= gfc_typenode_for_spec (&expr1
->ts
);
3503 type
= build_pointer_type (type
);
3505 /* Allocate temporary for nested forall construct according to the
3506 information in nested_forall_info and inner_size. */
3507 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3508 inner_size
, NULL
, block
, &ptemp1
);
3509 gfc_start_block (&body
);
3510 gfc_init_se (&lse
, NULL
);
3511 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3512 gfc_init_se (&rse
, NULL
);
3513 rse
.want_pointer
= 1;
3514 gfc_conv_expr (&rse
, expr2
);
3515 gfc_add_block_to_block (&body
, &rse
.pre
);
3516 gfc_add_modify (&body
, lse
.expr
,
3517 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3518 gfc_add_block_to_block (&body
, &rse
.post
);
3520 /* Increment count. */
3521 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3522 count
, gfc_index_one_node
);
3523 gfc_add_modify (&body
, count
, tmp
);
3525 tmp
= gfc_finish_block (&body
);
3527 /* Generate body and loops according to the information in
3528 nested_forall_info. */
3529 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3530 gfc_add_expr_to_block (block
, tmp
);
3533 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3535 gfc_start_block (&body
);
3536 gfc_init_se (&lse
, NULL
);
3537 gfc_init_se (&rse
, NULL
);
3538 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3539 lse
.want_pointer
= 1;
3540 gfc_conv_expr (&lse
, expr1
);
3541 gfc_add_block_to_block (&body
, &lse
.pre
);
3542 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3543 gfc_add_block_to_block (&body
, &lse
.post
);
3544 /* Increment count. */
3545 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3546 count
, gfc_index_one_node
);
3547 gfc_add_modify (&body
, count
, tmp
);
3548 tmp
= gfc_finish_block (&body
);
3550 /* Generate body and loops according to the information in
3551 nested_forall_info. */
3552 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3553 gfc_add_expr_to_block (block
, tmp
);
3557 gfc_init_loopinfo (&loop
);
3559 /* Associate the SS with the loop. */
3560 gfc_add_ss_to_loop (&loop
, rss
);
3562 /* Setup the scalarizing loops and bounds. */
3563 gfc_conv_ss_startstride (&loop
);
3565 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3567 info
= &rss
->info
->data
.array
;
3568 desc
= info
->descriptor
;
3570 /* Make a new descriptor. */
3571 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3572 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3573 loop
.from
, loop
.to
, 1,
3574 GFC_ARRAY_UNKNOWN
, true);
3576 /* Allocate temporary for nested forall construct. */
3577 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3578 inner_size
, NULL
, block
, &ptemp1
);
3579 gfc_start_block (&body
);
3580 gfc_init_se (&lse
, NULL
);
3581 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3582 lse
.direct_byref
= 1;
3583 gfc_conv_expr_descriptor (&lse
, expr2
);
3585 gfc_add_block_to_block (&body
, &lse
.pre
);
3586 gfc_add_block_to_block (&body
, &lse
.post
);
3588 /* Increment count. */
3589 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3590 count
, gfc_index_one_node
);
3591 gfc_add_modify (&body
, count
, tmp
);
3593 tmp
= gfc_finish_block (&body
);
3595 /* Generate body and loops according to the information in
3596 nested_forall_info. */
3597 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3598 gfc_add_expr_to_block (block
, tmp
);
3601 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3603 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3604 gfc_init_se (&lse
, NULL
);
3605 gfc_conv_expr_descriptor (&lse
, expr1
);
3606 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3607 gfc_start_block (&body
);
3608 gfc_add_block_to_block (&body
, &lse
.pre
);
3609 gfc_add_block_to_block (&body
, &lse
.post
);
3611 /* Increment count. */
3612 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3613 count
, gfc_index_one_node
);
3614 gfc_add_modify (&body
, count
, tmp
);
3616 tmp
= gfc_finish_block (&body
);
3618 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3619 gfc_add_expr_to_block (block
, tmp
);
3621 /* Free the temporary. */
3624 tmp
= gfc_call_free (ptemp1
);
3625 gfc_add_expr_to_block (block
, tmp
);
3630 /* FORALL and WHERE statements are really nasty, especially when you nest
3631 them. All the rhs of a forall assignment must be evaluated before the
3632 actual assignments are performed. Presumably this also applies to all the
3633 assignments in an inner where statement. */
3635 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3636 linear array, relying on the fact that we process in the same order in all
3639 forall (i=start:end:stride; maskexpr)
3643 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3645 count = ((end + 1 - start) / stride)
3646 masktmp(:) = maskexpr(:)
3649 for (i = start; i <= end; i += stride)
3651 if (masktmp[maskindex++])
3655 for (i = start; i <= end; i += stride)
3657 if (masktmp[maskindex++])
3661 Note that this code only works when there are no dependencies.
3662 Forall loop with array assignments and data dependencies are a real pain,
3663 because the size of the temporary cannot always be determined before the
3664 loop is executed. This problem is compounded by the presence of nested
3669 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3686 tree cycle_label
= NULL_TREE
;
3690 gfc_forall_iterator
*fa
;
3693 gfc_saved_var
*saved_vars
;
3694 iter_info
*this_forall
;
3698 /* Do nothing if the mask is false. */
3700 && code
->expr1
->expr_type
== EXPR_CONSTANT
3701 && !code
->expr1
->value
.logical
)
3702 return build_empty_stmt (input_location
);
3705 /* Count the FORALL index number. */
3706 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3710 /* Allocate the space for var, start, end, step, varexpr. */
3711 var
= XCNEWVEC (tree
, nvar
);
3712 start
= XCNEWVEC (tree
, nvar
);
3713 end
= XCNEWVEC (tree
, nvar
);
3714 step
= XCNEWVEC (tree
, nvar
);
3715 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3716 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3718 /* Allocate the space for info. */
3719 info
= XCNEW (forall_info
);
3721 gfc_start_block (&pre
);
3722 gfc_init_block (&post
);
3723 gfc_init_block (&block
);
3726 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3728 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3730 /* Allocate space for this_forall. */
3731 this_forall
= XCNEW (iter_info
);
3733 /* Create a temporary variable for the FORALL index. */
3734 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3735 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3736 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3738 /* Record it in this_forall. */
3739 this_forall
->var
= var
[n
];
3741 /* Replace the index symbol's backend_decl with the temporary decl. */
3742 sym
->backend_decl
= var
[n
];
3744 /* Work out the start, end and stride for the loop. */
3745 gfc_init_se (&se
, NULL
);
3746 gfc_conv_expr_val (&se
, fa
->start
);
3747 /* Record it in this_forall. */
3748 this_forall
->start
= se
.expr
;
3749 gfc_add_block_to_block (&block
, &se
.pre
);
3752 gfc_init_se (&se
, NULL
);
3753 gfc_conv_expr_val (&se
, fa
->end
);
3754 /* Record it in this_forall. */
3755 this_forall
->end
= se
.expr
;
3756 gfc_make_safe_expr (&se
);
3757 gfc_add_block_to_block (&block
, &se
.pre
);
3760 gfc_init_se (&se
, NULL
);
3761 gfc_conv_expr_val (&se
, fa
->stride
);
3762 /* Record it in this_forall. */
3763 this_forall
->step
= se
.expr
;
3764 gfc_make_safe_expr (&se
);
3765 gfc_add_block_to_block (&block
, &se
.pre
);
3768 /* Set the NEXT field of this_forall to NULL. */
3769 this_forall
->next
= NULL
;
3770 /* Link this_forall to the info construct. */
3771 if (info
->this_loop
)
3773 iter_info
*iter_tmp
= info
->this_loop
;
3774 while (iter_tmp
->next
!= NULL
)
3775 iter_tmp
= iter_tmp
->next
;
3776 iter_tmp
->next
= this_forall
;
3779 info
->this_loop
= this_forall
;
3785 /* Calculate the size needed for the current forall level. */
3786 size
= gfc_index_one_node
;
3787 for (n
= 0; n
< nvar
; n
++)
3789 /* size = (end + step - start) / step. */
3790 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3792 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3794 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3796 tmp
= convert (gfc_array_index_type
, tmp
);
3798 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3802 /* Record the nvar and size of current forall level. */
3808 /* If the mask is .true., consider the FORALL unconditional. */
3809 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3810 && code
->expr1
->value
.logical
)
3818 /* First we need to allocate the mask. */
3821 /* As the mask array can be very big, prefer compact boolean types. */
3822 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3823 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3824 size
, NULL
, &block
, &pmask
);
3825 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3827 /* Record them in the info structure. */
3828 info
->maskindex
= maskindex
;
3833 /* No mask was specified. */
3834 maskindex
= NULL_TREE
;
3835 mask
= pmask
= NULL_TREE
;
3838 /* Link the current forall level to nested_forall_info. */
3839 info
->prev_nest
= nested_forall_info
;
3840 nested_forall_info
= info
;
3842 /* Copy the mask into a temporary variable if required.
3843 For now we assume a mask temporary is needed. */
3846 /* As the mask array can be very big, prefer compact boolean types. */
3847 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3849 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3851 /* Start of mask assignment loop body. */
3852 gfc_start_block (&body
);
3854 /* Evaluate the mask expression. */
3855 gfc_init_se (&se
, NULL
);
3856 gfc_conv_expr_val (&se
, code
->expr1
);
3857 gfc_add_block_to_block (&body
, &se
.pre
);
3859 /* Store the mask. */
3860 se
.expr
= convert (mask_type
, se
.expr
);
3862 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3863 gfc_add_modify (&body
, tmp
, se
.expr
);
3865 /* Advance to the next mask element. */
3866 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3867 maskindex
, gfc_index_one_node
);
3868 gfc_add_modify (&body
, maskindex
, tmp
);
3870 /* Generate the loops. */
3871 tmp
= gfc_finish_block (&body
);
3872 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3873 gfc_add_expr_to_block (&block
, tmp
);
3876 if (code
->op
== EXEC_DO_CONCURRENT
)
3878 gfc_init_block (&body
);
3879 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3880 code
->cycle_label
= cycle_label
;
3881 tmp
= gfc_trans_code (code
->block
->next
);
3882 gfc_add_expr_to_block (&body
, tmp
);
3884 if (TREE_USED (cycle_label
))
3886 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3887 gfc_add_expr_to_block (&body
, tmp
);
3890 tmp
= gfc_finish_block (&body
);
3891 nested_forall_info
->do_concurrent
= true;
3892 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3893 gfc_add_expr_to_block (&block
, tmp
);
3897 c
= code
->block
->next
;
3899 /* TODO: loop merging in FORALL statements. */
3900 /* Now that we've got a copy of the mask, generate the assignment loops. */
3906 /* A scalar or array assignment. DO the simple check for
3907 lhs to rhs dependencies. These make a temporary for the
3908 rhs and form a second forall block to copy to variable. */
3909 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3911 /* Temporaries due to array assignment data dependencies introduce
3912 no end of problems. */
3914 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3915 nested_forall_info
, &block
);
3918 /* Use the normal assignment copying routines. */
3919 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3921 /* Generate body and loops. */
3922 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3924 gfc_add_expr_to_block (&block
, tmp
);
3927 /* Cleanup any temporary symtrees that have been made to deal
3928 with dependencies. */
3930 cleanup_forall_symtrees (c
);
3935 /* Translate WHERE or WHERE construct nested in FORALL. */
3936 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3939 /* Pointer assignment inside FORALL. */
3940 case EXEC_POINTER_ASSIGN
:
3941 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3943 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3944 nested_forall_info
, &block
);
3947 /* Use the normal assignment copying routines. */
3948 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3950 /* Generate body and loops. */
3951 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3953 gfc_add_expr_to_block (&block
, tmp
);
3958 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3959 gfc_add_expr_to_block (&block
, tmp
);
3962 /* Explicit subroutine calls are prevented by the frontend but interface
3963 assignments can legitimately produce them. */
3964 case EXEC_ASSIGN_CALL
:
3965 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3966 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3967 gfc_add_expr_to_block (&block
, tmp
);
3978 /* Restore the original index variables. */
3979 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3980 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3982 /* Free the space for var, start, end, step, varexpr. */
3990 for (this_forall
= info
->this_loop
; this_forall
;)
3992 iter_info
*next
= this_forall
->next
;
3997 /* Free the space for this forall_info. */
4002 /* Free the temporary for the mask. */
4003 tmp
= gfc_call_free (pmask
);
4004 gfc_add_expr_to_block (&block
, tmp
);
4007 pushdecl (maskindex
);
4009 gfc_add_block_to_block (&pre
, &block
);
4010 gfc_add_block_to_block (&pre
, &post
);
4012 return gfc_finish_block (&pre
);
4016 /* Translate the FORALL statement or construct. */
4018 tree
gfc_trans_forall (gfc_code
* code
)
4020 return gfc_trans_forall_1 (code
, NULL
);
4024 /* Translate the DO CONCURRENT construct. */
4026 tree
gfc_trans_do_concurrent (gfc_code
* code
)
4028 return gfc_trans_forall_1 (code
, NULL
);
4032 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4033 If the WHERE construct is nested in FORALL, compute the overall temporary
4034 needed by the WHERE mask expression multiplied by the iterator number of
4036 ME is the WHERE mask expression.
4037 MASK is the current execution mask upon input, whose sense may or may
4038 not be inverted as specified by the INVERT argument.
4039 CMASK is the updated execution mask on output, or NULL if not required.
4040 PMASK is the pending execution mask on output, or NULL if not required.
4041 BLOCK is the block in which to place the condition evaluation loops. */
4044 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
4045 tree mask
, bool invert
, tree cmask
, tree pmask
,
4046 tree mask_type
, stmtblock_t
* block
)
4051 stmtblock_t body
, body1
;
4052 tree count
, cond
, mtmp
;
4055 gfc_init_loopinfo (&loop
);
4057 lss
= gfc_walk_expr (me
);
4058 rss
= gfc_walk_expr (me
);
4060 /* Variable to index the temporary. */
4061 count
= gfc_create_var (gfc_array_index_type
, "count");
4062 /* Initialize count. */
4063 gfc_add_modify (block
, count
, gfc_index_zero_node
);
4065 gfc_start_block (&body
);
4067 gfc_init_se (&rse
, NULL
);
4068 gfc_init_se (&lse
, NULL
);
4070 if (lss
== gfc_ss_terminator
)
4072 gfc_init_block (&body1
);
4076 /* Initialize the loop. */
4077 gfc_init_loopinfo (&loop
);
4079 /* We may need LSS to determine the shape of the expression. */
4080 gfc_add_ss_to_loop (&loop
, lss
);
4081 gfc_add_ss_to_loop (&loop
, rss
);
4083 gfc_conv_ss_startstride (&loop
);
4084 gfc_conv_loop_setup (&loop
, &me
->where
);
4086 gfc_mark_ss_chain_used (rss
, 1);
4087 /* Start the loop body. */
4088 gfc_start_scalarized_body (&loop
, &body1
);
4090 /* Translate the expression. */
4091 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4093 gfc_conv_expr (&rse
, me
);
4096 /* Variable to evaluate mask condition. */
4097 cond
= gfc_create_var (mask_type
, "cond");
4098 if (mask
&& (cmask
|| pmask
))
4099 mtmp
= gfc_create_var (mask_type
, "mask");
4100 else mtmp
= NULL_TREE
;
4102 gfc_add_block_to_block (&body1
, &lse
.pre
);
4103 gfc_add_block_to_block (&body1
, &rse
.pre
);
4105 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
4107 if (mask
&& (cmask
|| pmask
))
4109 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
4111 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
4112 gfc_add_modify (&body1
, mtmp
, tmp
);
4117 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
4120 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
4122 gfc_add_modify (&body1
, tmp1
, tmp
);
4127 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
4128 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
4130 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
4132 gfc_add_modify (&body1
, tmp1
, tmp
);
4135 gfc_add_block_to_block (&body1
, &lse
.post
);
4136 gfc_add_block_to_block (&body1
, &rse
.post
);
4138 if (lss
== gfc_ss_terminator
)
4140 gfc_add_block_to_block (&body
, &body1
);
4144 /* Increment count. */
4145 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4146 count
, gfc_index_one_node
);
4147 gfc_add_modify (&body1
, count
, tmp1
);
4149 /* Generate the copying loops. */
4150 gfc_trans_scalarizing_loops (&loop
, &body1
);
4152 gfc_add_block_to_block (&body
, &loop
.pre
);
4153 gfc_add_block_to_block (&body
, &loop
.post
);
4155 gfc_cleanup_loop (&loop
);
4156 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4157 as tree nodes in SS may not be valid in different scope. */
4160 tmp1
= gfc_finish_block (&body
);
4161 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4162 if (nested_forall_info
!= NULL
)
4163 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
4165 gfc_add_expr_to_block (block
, tmp1
);
4169 /* Translate an assignment statement in a WHERE statement or construct
4170 statement. The MASK expression is used to control which elements
4171 of EXPR1 shall be assigned. The sense of MASK is specified by
4175 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
4176 tree mask
, bool invert
,
4177 tree count1
, tree count2
,
4183 gfc_ss
*lss_section
;
4190 tree index
, maskexpr
;
4192 /* A defined assignment. */
4193 if (cnext
&& cnext
->resolved_sym
)
4194 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
4197 /* TODO: handle this special case.
4198 Special case a single function returning an array. */
4199 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
4201 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
4207 /* Assignment of the form lhs = rhs. */
4208 gfc_start_block (&block
);
4210 gfc_init_se (&lse
, NULL
);
4211 gfc_init_se (&rse
, NULL
);
4214 lss
= gfc_walk_expr (expr1
);
4217 /* In each where-assign-stmt, the mask-expr and the variable being
4218 defined shall be arrays of the same shape. */
4219 gcc_assert (lss
!= gfc_ss_terminator
);
4221 /* The assignment needs scalarization. */
4224 /* Find a non-scalar SS from the lhs. */
4225 while (lss_section
!= gfc_ss_terminator
4226 && lss_section
->info
->type
!= GFC_SS_SECTION
)
4227 lss_section
= lss_section
->next
;
4229 gcc_assert (lss_section
!= gfc_ss_terminator
);
4231 /* Initialize the scalarizer. */
4232 gfc_init_loopinfo (&loop
);
4235 rss
= gfc_walk_expr (expr2
);
4236 if (rss
== gfc_ss_terminator
)
4238 /* The rhs is scalar. Add a ss for the expression. */
4239 rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
4240 rss
->info
->where
= 1;
4243 /* Associate the SS with the loop. */
4244 gfc_add_ss_to_loop (&loop
, lss
);
4245 gfc_add_ss_to_loop (&loop
, rss
);
4247 /* Calculate the bounds of the scalarization. */
4248 gfc_conv_ss_startstride (&loop
);
4250 /* Resolve any data dependencies in the statement. */
4251 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
4253 /* Setup the scalarizing loops. */
4254 gfc_conv_loop_setup (&loop
, &expr2
->where
);
4256 /* Setup the gfc_se structures. */
4257 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4258 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4261 gfc_mark_ss_chain_used (rss
, 1);
4262 if (loop
.temp_ss
== NULL
)
4265 gfc_mark_ss_chain_used (lss
, 1);
4269 lse
.ss
= loop
.temp_ss
;
4270 gfc_mark_ss_chain_used (lss
, 3);
4271 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
4274 /* Start the scalarized loop body. */
4275 gfc_start_scalarized_body (&loop
, &body
);
4277 /* Translate the expression. */
4278 gfc_conv_expr (&rse
, expr2
);
4279 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4280 gfc_conv_tmp_array_ref (&lse
);
4282 gfc_conv_expr (&lse
, expr1
);
4284 /* Form the mask expression according to the mask. */
4286 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4288 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4289 TREE_TYPE (maskexpr
), maskexpr
);
4291 /* Use the scalar assignment as is. */
4292 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4293 loop
.temp_ss
!= NULL
, false, true);
4295 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4297 gfc_add_expr_to_block (&body
, tmp
);
4299 if (lss
== gfc_ss_terminator
)
4301 /* Increment count1. */
4302 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4303 count1
, gfc_index_one_node
);
4304 gfc_add_modify (&body
, count1
, tmp
);
4306 /* Use the scalar assignment as is. */
4307 gfc_add_block_to_block (&block
, &body
);
4311 gcc_assert (lse
.ss
== gfc_ss_terminator
4312 && rse
.ss
== gfc_ss_terminator
);
4314 if (loop
.temp_ss
!= NULL
)
4316 /* Increment count1 before finish the main body of a scalarized
4318 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4319 gfc_array_index_type
, count1
, gfc_index_one_node
);
4320 gfc_add_modify (&body
, count1
, tmp
);
4321 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4323 /* We need to copy the temporary to the actual lhs. */
4324 gfc_init_se (&lse
, NULL
);
4325 gfc_init_se (&rse
, NULL
);
4326 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4327 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4329 rse
.ss
= loop
.temp_ss
;
4332 gfc_conv_tmp_array_ref (&rse
);
4333 gfc_conv_expr (&lse
, expr1
);
4335 gcc_assert (lse
.ss
== gfc_ss_terminator
4336 && rse
.ss
== gfc_ss_terminator
);
4338 /* Form the mask expression according to the mask tree list. */
4340 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4342 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4343 TREE_TYPE (maskexpr
), maskexpr
);
4345 /* Use the scalar assignment as is. */
4346 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4348 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4349 build_empty_stmt (input_location
));
4350 gfc_add_expr_to_block (&body
, tmp
);
4352 /* Increment count2. */
4353 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4354 gfc_array_index_type
, count2
,
4355 gfc_index_one_node
);
4356 gfc_add_modify (&body
, count2
, tmp
);
4360 /* Increment count1. */
4361 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4362 gfc_array_index_type
, count1
,
4363 gfc_index_one_node
);
4364 gfc_add_modify (&body
, count1
, tmp
);
4367 /* Generate the copying loops. */
4368 gfc_trans_scalarizing_loops (&loop
, &body
);
4370 /* Wrap the whole thing up. */
4371 gfc_add_block_to_block (&block
, &loop
.pre
);
4372 gfc_add_block_to_block (&block
, &loop
.post
);
4373 gfc_cleanup_loop (&loop
);
4376 return gfc_finish_block (&block
);
4380 /* Translate the WHERE construct or statement.
4381 This function can be called iteratively to translate the nested WHERE
4382 construct or statement.
4383 MASK is the control mask. */
4386 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4387 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4389 stmtblock_t inner_size_body
;
4390 tree inner_size
, size
;
4399 tree count1
, count2
;
4403 tree pcmask
= NULL_TREE
;
4404 tree ppmask
= NULL_TREE
;
4405 tree cmask
= NULL_TREE
;
4406 tree pmask
= NULL_TREE
;
4407 gfc_actual_arglist
*arg
;
4409 /* the WHERE statement or the WHERE construct statement. */
4410 cblock
= code
->block
;
4412 /* As the mask array can be very big, prefer compact boolean types. */
4413 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4415 /* Determine which temporary masks are needed. */
4418 /* One clause: No ELSEWHEREs. */
4419 need_cmask
= (cblock
->next
!= 0);
4422 else if (cblock
->block
->block
)
4424 /* Three or more clauses: Conditional ELSEWHEREs. */
4428 else if (cblock
->next
)
4430 /* Two clauses, the first non-empty. */
4432 need_pmask
= (mask
!= NULL_TREE
4433 && cblock
->block
->next
!= 0);
4435 else if (!cblock
->block
->next
)
4437 /* Two clauses, both empty. */
4441 /* Two clauses, the first empty, the second non-empty. */
4444 need_cmask
= (cblock
->block
->expr1
!= 0);
4453 if (need_cmask
|| need_pmask
)
4455 /* Calculate the size of temporary needed by the mask-expr. */
4456 gfc_init_block (&inner_size_body
);
4457 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4458 &inner_size_body
, &lss
, &rss
);
4460 gfc_free_ss_chain (lss
);
4461 gfc_free_ss_chain (rss
);
4463 /* Calculate the total size of temporary needed. */
4464 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4465 &inner_size_body
, block
);
4467 /* Check whether the size is negative. */
4468 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4469 gfc_index_zero_node
);
4470 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4471 cond
, gfc_index_zero_node
, size
);
4472 size
= gfc_evaluate_now (size
, block
);
4474 /* Allocate temporary for WHERE mask if needed. */
4476 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4479 /* Allocate temporary for !mask if needed. */
4481 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4487 /* Each time around this loop, the where clause is conditional
4488 on the value of mask and invert, which are updated at the
4489 bottom of the loop. */
4491 /* Has mask-expr. */
4494 /* Ensure that the WHERE mask will be evaluated exactly once.
4495 If there are no statements in this WHERE/ELSEWHERE clause,
4496 then we don't need to update the control mask (cmask).
4497 If this is the last clause of the WHERE construct, then
4498 we don't need to update the pending control mask (pmask). */
4500 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4502 cblock
->next
? cmask
: NULL_TREE
,
4503 cblock
->block
? pmask
: NULL_TREE
,
4506 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4508 (cblock
->next
|| cblock
->block
)
4509 ? cmask
: NULL_TREE
,
4510 NULL_TREE
, mask_type
, block
);
4514 /* It's a final elsewhere-stmt. No mask-expr is present. */
4518 /* The body of this where clause are controlled by cmask with
4519 sense specified by invert. */
4521 /* Get the assignment statement of a WHERE statement, or the first
4522 statement in where-body-construct of a WHERE construct. */
4523 cnext
= cblock
->next
;
4528 /* WHERE assignment statement. */
4529 case EXEC_ASSIGN_CALL
:
4531 arg
= cnext
->ext
.actual
;
4532 expr1
= expr2
= NULL
;
4533 for (; arg
; arg
= arg
->next
)
4545 expr1
= cnext
->expr1
;
4546 expr2
= cnext
->expr2
;
4548 if (nested_forall_info
!= NULL
)
4550 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4551 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4552 gfc_trans_assign_need_temp (expr1
, expr2
,
4554 nested_forall_info
, block
);
4557 /* Variables to control maskexpr. */
4558 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4559 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4560 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4561 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4563 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4568 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4570 gfc_add_expr_to_block (block
, tmp
);
4575 /* Variables to control maskexpr. */
4576 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4577 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4578 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4579 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4581 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4585 gfc_add_expr_to_block (block
, tmp
);
4590 /* WHERE or WHERE construct is part of a where-body-construct. */
4592 gfc_trans_where_2 (cnext
, cmask
, invert
,
4593 nested_forall_info
, block
);
4600 /* The next statement within the same where-body-construct. */
4601 cnext
= cnext
->next
;
4603 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4604 cblock
= cblock
->block
;
4605 if (mask
== NULL_TREE
)
4607 /* If we're the initial WHERE, we can simply invert the sense
4608 of the current mask to obtain the "mask" for the remaining
4615 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4621 /* If we allocated a pending mask array, deallocate it now. */
4624 tmp
= gfc_call_free (ppmask
);
4625 gfc_add_expr_to_block (block
, tmp
);
4628 /* If we allocated a current mask array, deallocate it now. */
4631 tmp
= gfc_call_free (pcmask
);
4632 gfc_add_expr_to_block (block
, tmp
);
4636 /* Translate a simple WHERE construct or statement without dependencies.
4637 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4638 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4639 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4642 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4644 stmtblock_t block
, body
;
4645 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4646 tree tmp
, cexpr
, tstmt
, estmt
;
4647 gfc_ss
*css
, *tdss
, *tsss
;
4648 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4653 /* Allow the scalarizer to workshare simple where loops. */
4654 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4655 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4657 cond
= cblock
->expr1
;
4658 tdst
= cblock
->next
->expr1
;
4659 tsrc
= cblock
->next
->expr2
;
4660 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4661 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4663 gfc_start_block (&block
);
4664 gfc_init_loopinfo (&loop
);
4666 /* Handle the condition. */
4667 gfc_init_se (&cse
, NULL
);
4668 css
= gfc_walk_expr (cond
);
4669 gfc_add_ss_to_loop (&loop
, css
);
4671 /* Handle the then-clause. */
4672 gfc_init_se (&tdse
, NULL
);
4673 gfc_init_se (&tsse
, NULL
);
4674 tdss
= gfc_walk_expr (tdst
);
4675 tsss
= gfc_walk_expr (tsrc
);
4676 if (tsss
== gfc_ss_terminator
)
4678 tsss
= gfc_get_scalar_ss (gfc_ss_terminator
, tsrc
);
4679 tsss
->info
->where
= 1;
4681 gfc_add_ss_to_loop (&loop
, tdss
);
4682 gfc_add_ss_to_loop (&loop
, tsss
);
4686 /* Handle the else clause. */
4687 gfc_init_se (&edse
, NULL
);
4688 gfc_init_se (&esse
, NULL
);
4689 edss
= gfc_walk_expr (edst
);
4690 esss
= gfc_walk_expr (esrc
);
4691 if (esss
== gfc_ss_terminator
)
4693 esss
= gfc_get_scalar_ss (gfc_ss_terminator
, esrc
);
4694 esss
->info
->where
= 1;
4696 gfc_add_ss_to_loop (&loop
, edss
);
4697 gfc_add_ss_to_loop (&loop
, esss
);
4700 gfc_conv_ss_startstride (&loop
);
4701 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4703 gfc_mark_ss_chain_used (css
, 1);
4704 gfc_mark_ss_chain_used (tdss
, 1);
4705 gfc_mark_ss_chain_used (tsss
, 1);
4708 gfc_mark_ss_chain_used (edss
, 1);
4709 gfc_mark_ss_chain_used (esss
, 1);
4712 gfc_start_scalarized_body (&loop
, &body
);
4714 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4715 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4716 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4722 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4723 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4728 gfc_conv_expr (&cse
, cond
);
4729 gfc_add_block_to_block (&body
, &cse
.pre
);
4732 gfc_conv_expr (&tsse
, tsrc
);
4733 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4734 gfc_conv_tmp_array_ref (&tdse
);
4736 gfc_conv_expr (&tdse
, tdst
);
4740 gfc_conv_expr (&esse
, esrc
);
4741 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4742 gfc_conv_tmp_array_ref (&edse
);
4744 gfc_conv_expr (&edse
, edst
);
4747 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4748 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4750 : build_empty_stmt (input_location
);
4751 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4752 gfc_add_expr_to_block (&body
, tmp
);
4753 gfc_add_block_to_block (&body
, &cse
.post
);
4755 gfc_trans_scalarizing_loops (&loop
, &body
);
4756 gfc_add_block_to_block (&block
, &loop
.pre
);
4757 gfc_add_block_to_block (&block
, &loop
.post
);
4758 gfc_cleanup_loop (&loop
);
4760 return gfc_finish_block (&block
);
4763 /* As the WHERE or WHERE construct statement can be nested, we call
4764 gfc_trans_where_2 to do the translation, and pass the initial
4765 NULL values for both the control mask and the pending control mask. */
4768 gfc_trans_where (gfc_code
* code
)
4774 cblock
= code
->block
;
4776 && cblock
->next
->op
== EXEC_ASSIGN
4777 && !cblock
->next
->next
)
4779 eblock
= cblock
->block
;
4782 /* A simple "WHERE (cond) x = y" statement or block is
4783 dependence free if cond is not dependent upon writing x,
4784 and the source y is unaffected by the destination x. */
4785 if (!gfc_check_dependency (cblock
->next
->expr1
,
4787 && !gfc_check_dependency (cblock
->next
->expr1
,
4788 cblock
->next
->expr2
, 0))
4789 return gfc_trans_where_3 (cblock
, NULL
);
4791 else if (!eblock
->expr1
4794 && eblock
->next
->op
== EXEC_ASSIGN
4795 && !eblock
->next
->next
)
4797 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4798 block is dependence free if cond is not dependent on writes
4799 to x1 and x2, y1 is not dependent on writes to x2, and y2
4800 is not dependent on writes to x1, and both y's are not
4801 dependent upon their own x's. In addition to this, the
4802 final two dependency checks below exclude all but the same
4803 array reference if the where and elswhere destinations
4804 are the same. In short, this is VERY conservative and this
4805 is needed because the two loops, required by the standard
4806 are coalesced in gfc_trans_where_3. */
4807 if (!gfc_check_dependency (cblock
->next
->expr1
,
4809 && !gfc_check_dependency (eblock
->next
->expr1
,
4811 && !gfc_check_dependency (cblock
->next
->expr1
,
4812 eblock
->next
->expr2
, 1)
4813 && !gfc_check_dependency (eblock
->next
->expr1
,
4814 cblock
->next
->expr2
, 1)
4815 && !gfc_check_dependency (cblock
->next
->expr1
,
4816 cblock
->next
->expr2
, 1)
4817 && !gfc_check_dependency (eblock
->next
->expr1
,
4818 eblock
->next
->expr2
, 1)
4819 && !gfc_check_dependency (cblock
->next
->expr1
,
4820 eblock
->next
->expr1
, 0)
4821 && !gfc_check_dependency (eblock
->next
->expr1
,
4822 cblock
->next
->expr1
, 0))
4823 return gfc_trans_where_3 (cblock
, eblock
);
4827 gfc_start_block (&block
);
4829 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4831 return gfc_finish_block (&block
);
4835 /* CYCLE a DO loop. The label decl has already been created by
4836 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4837 node at the head of the loop. We must mark the label as used. */
4840 gfc_trans_cycle (gfc_code
* code
)
4844 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4845 gcc_assert (cycle_label
);
4847 TREE_USED (cycle_label
) = 1;
4848 return build1_v (GOTO_EXPR
, cycle_label
);
4852 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4853 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4857 gfc_trans_exit (gfc_code
* code
)
4861 exit_label
= code
->ext
.which_construct
->exit_label
;
4862 gcc_assert (exit_label
);
4864 TREE_USED (exit_label
) = 1;
4865 return build1_v (GOTO_EXPR
, exit_label
);
4869 /* Translate the ALLOCATE statement. */
4872 gfc_trans_allocate (gfc_code
* code
)
4894 tree memsize
= NULL_TREE
;
4895 tree classexpr
= NULL_TREE
;
4897 if (!code
->ext
.alloc
.list
)
4900 stat
= tmp
= memsz
= NULL_TREE
;
4901 label_errmsg
= label_finish
= errmsg
= errlen
= NULL_TREE
;
4903 gfc_init_block (&block
);
4904 gfc_init_block (&post
);
4906 /* STAT= (and maybe ERRMSG=) is present. */
4910 tree gfc_int4_type_node
= gfc_get_int_type (4);
4911 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4913 /* ERRMSG= only makes sense with STAT=. */
4916 gfc_init_se (&se
, NULL
);
4917 se
.want_pointer
= 1;
4918 gfc_conv_expr_lhs (&se
, code
->expr2
);
4920 errlen
= se
.string_length
;
4924 errmsg
= null_pointer_node
;
4925 errlen
= build_int_cst (gfc_charlen_type_node
, 0);
4928 /* GOTO destinations. */
4929 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
4930 label_finish
= gfc_build_label_decl (NULL_TREE
);
4931 TREE_USED (label_finish
) = 0;
4937 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4939 expr
= gfc_copy_expr (al
->expr
);
4941 if (expr
->ts
.type
== BT_CLASS
)
4942 gfc_add_data_component (expr
);
4944 gfc_init_se (&se
, NULL
);
4946 se
.want_pointer
= 1;
4947 se
.descriptor_only
= 1;
4948 gfc_conv_expr (&se
, expr
);
4950 /* Evaluate expr3 just once if not a variable. */
4951 if (al
== code
->ext
.alloc
.list
4952 && al
->expr
->ts
.type
== BT_CLASS
4954 && code
->expr3
->ts
.type
== BT_CLASS
4955 && code
->expr3
->expr_type
!= EXPR_VARIABLE
)
4957 gfc_init_se (&se_sz
, NULL
);
4958 gfc_conv_expr_reference (&se_sz
, code
->expr3
);
4959 gfc_conv_class_to_class (&se_sz
, code
->expr3
,
4960 code
->expr3
->ts
, false, true, false, false);
4961 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4962 gfc_add_block_to_block (&se
.post
, &se_sz
.post
);
4963 classexpr
= build_fold_indirect_ref_loc (input_location
,
4965 classexpr
= gfc_evaluate_now (classexpr
, &se
.pre
);
4966 memsize
= gfc_vtable_size_get (classexpr
);
4967 memsize
= fold_convert (sizetype
, memsize
);
4971 class_expr
= classexpr
;
4974 if (!gfc_array_allocate (&se
, expr
, stat
, errmsg
, errlen
, label_finish
,
4975 memsz
, &nelems
, code
->expr3
, &code
->ext
.alloc
.ts
))
4977 bool unlimited_char
;
4979 unlimited_char
= UNLIMITED_POLY (al
->expr
)
4980 && ((code
->expr3
&& code
->expr3
->ts
.type
== BT_CHARACTER
)
4981 || (code
->ext
.alloc
.ts
.type
== BT_CHARACTER
4982 && code
->ext
.alloc
.ts
.u
.cl
4983 && code
->ext
.alloc
.ts
.u
.cl
->length
));
4985 /* A scalar or derived type. */
4987 /* Determine allocate size. */
4988 if (al
->expr
->ts
.type
== BT_CLASS
4991 && memsz
== NULL_TREE
)
4993 if (code
->expr3
->ts
.type
== BT_CLASS
)
4995 sz
= gfc_copy_expr (code
->expr3
);
4996 gfc_add_vptr_component (sz
);
4997 gfc_add_size_component (sz
);
4998 gfc_init_se (&se_sz
, NULL
);
4999 gfc_conv_expr (&se_sz
, sz
);
5004 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
5006 else if (((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5007 || unlimited_char
) && code
->expr3
)
5009 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
5011 /* Convert and use the length expression. */
5012 gfc_init_se (&se_sz
, NULL
);
5013 if (code
->expr3
->expr_type
== EXPR_VARIABLE
5014 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
5016 gfc_conv_expr (&se_sz
, code
->expr3
);
5017 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5019 = gfc_evaluate_now (se_sz
.string_length
, &se
.pre
);
5020 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5021 memsz
= se_sz
.string_length
;
5023 else if (code
->expr3
->mold
5024 && code
->expr3
->ts
.u
.cl
5025 && code
->expr3
->ts
.u
.cl
->length
)
5027 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
5028 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5029 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5030 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5035 /* This is would be inefficient and possibly could
5036 generate wrong code if the result were not stored
5038 if (slen3
== NULL_TREE
)
5040 gfc_conv_expr (&se_sz
, code
->expr3
);
5041 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5042 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5043 gfc_add_block_to_block (&post
, &se_sz
.post
);
5044 slen3
= gfc_evaluate_now (se_sz
.string_length
,
5051 /* Otherwise use the stored string length. */
5052 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
5053 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5055 /* Store the string length. */
5056 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
5057 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5059 else if (al
->expr
->ts
.type
== BT_CHARACTER
5060 && al
->expr
->ts
.deferred
&& se
.string_length
)
5061 gfc_add_modify (&se
.pre
, se
.string_length
,
5062 fold_convert (TREE_TYPE (se
.string_length
),
5065 /* Convert to size in bytes, using the character KIND. */
5067 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->expr3
->ts
));
5069 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
5070 tmp
= TYPE_SIZE_UNIT (tmp
);
5071 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5072 TREE_TYPE (tmp
), tmp
,
5073 fold_convert (TREE_TYPE (tmp
), memsz
));
5075 else if ((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5078 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
5079 gfc_init_se (&se_sz
, NULL
);
5080 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
5081 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5082 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5083 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5084 /* Store the string length. */
5085 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5086 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5088 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5089 tmp
= TYPE_SIZE_UNIT (tmp
);
5090 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5091 TREE_TYPE (tmp
), tmp
,
5092 fold_convert (TREE_TYPE (se_sz
.expr
),
5095 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
5096 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5097 else if (memsz
== NULL_TREE
)
5098 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
5100 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
5102 memsz
= se
.string_length
;
5104 /* Convert to size in bytes, using the character KIND. */
5105 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5106 tmp
= TYPE_SIZE_UNIT (tmp
);
5107 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5108 TREE_TYPE (tmp
), tmp
,
5109 fold_convert (TREE_TYPE (tmp
), memsz
));
5112 /* Allocate - for non-pointers with re-alloc checking. */
5113 if (gfc_expr_attr (expr
).allocatable
)
5114 gfc_allocate_allocatable (&se
.pre
, se
.expr
, memsz
, NULL_TREE
,
5115 stat
, errmsg
, errlen
, label_finish
, expr
);
5117 gfc_allocate_using_malloc (&se
.pre
, se
.expr
, memsz
, stat
);
5119 if (al
->expr
->ts
.type
== BT_DERIVED
5120 && expr
->ts
.u
.derived
->attr
.alloc_comp
)
5122 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5123 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
5124 gfc_add_expr_to_block (&se
.pre
, tmp
);
5128 gfc_add_block_to_block (&block
, &se
.pre
);
5130 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5133 tmp
= build1_v (GOTO_EXPR
, label_errmsg
);
5134 parm
= fold_build2_loc (input_location
, NE_EXPR
,
5135 boolean_type_node
, stat
,
5136 build_int_cst (TREE_TYPE (stat
), 0));
5137 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5138 gfc_unlikely (parm
, PRED_FORTRAN_FAIL_ALLOC
),
5139 tmp
, build_empty_stmt (input_location
));
5140 gfc_add_expr_to_block (&block
, tmp
);
5143 /* We need the vptr of CLASS objects to be initialized. */
5144 e
= gfc_copy_expr (al
->expr
);
5145 if (e
->ts
.type
== BT_CLASS
)
5147 gfc_expr
*lhs
, *rhs
;
5149 gfc_ref
*ref
, *class_ref
, *tail
;
5151 /* Find the last class reference. */
5153 for (ref
= e
->ref
; ref
; ref
= ref
->next
)
5155 if (ref
->type
== REF_COMPONENT
5156 && ref
->u
.c
.component
->ts
.type
== BT_CLASS
)
5159 if (ref
->next
== NULL
)
5163 /* Remove and store all subsequent references after the
5167 tail
= class_ref
->next
;
5168 class_ref
->next
= NULL
;
5176 lhs
= gfc_expr_to_initialize (e
);
5177 gfc_add_vptr_component (lhs
);
5179 /* Remove the _vptr component and restore the original tail
5183 gfc_free_ref_list (class_ref
->next
);
5184 class_ref
->next
= tail
;
5188 gfc_free_ref_list (e
->ref
);
5192 if (class_expr
!= NULL_TREE
)
5194 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5195 gfc_init_se (&lse
, NULL
);
5196 lse
.want_pointer
= 1;
5197 gfc_conv_expr (&lse
, lhs
);
5198 tmp
= gfc_class_vptr_get (class_expr
);
5199 gfc_add_modify (&block
, lse
.expr
,
5200 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5202 else if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
5204 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5205 rhs
= gfc_copy_expr (code
->expr3
);
5206 gfc_add_vptr_component (rhs
);
5207 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5208 gfc_add_expr_to_block (&block
, tmp
);
5209 gfc_free_expr (rhs
);
5210 rhs
= gfc_expr_to_initialize (e
);
5214 /* VPTR is fixed at compile time. */
5218 ts
= &code
->expr3
->ts
;
5219 else if (e
->ts
.type
== BT_DERIVED
)
5221 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
|| UNLIMITED_POLY (al
->expr
))
5222 ts
= &code
->ext
.alloc
.ts
;
5223 else if (e
->ts
.type
== BT_CLASS
)
5224 ts
= &CLASS_DATA (e
)->ts
;
5228 if (ts
->type
== BT_DERIVED
|| UNLIMITED_POLY (e
))
5230 vtab
= gfc_find_vtab (ts
);
5232 gfc_init_se (&lse
, NULL
);
5233 lse
.want_pointer
= 1;
5234 gfc_conv_expr (&lse
, lhs
);
5235 tmp
= gfc_build_addr_expr (NULL_TREE
,
5236 gfc_get_symbol_decl (vtab
));
5237 gfc_add_modify (&block
, lse
.expr
,
5238 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5241 gfc_free_expr (lhs
);
5246 if (code
->expr3
&& !code
->expr3
->mold
)
5248 /* Initialization via SOURCE block
5249 (or static default initializer). */
5250 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
5251 if (class_expr
!= NULL_TREE
)
5254 to
= TREE_OPERAND (se
.expr
, 0);
5256 tmp
= gfc_copy_class_to_class (class_expr
, to
, nelems
);
5258 else if (al
->expr
->ts
.type
== BT_CLASS
)
5260 gfc_actual_arglist
*actual
;
5263 gfc_ref
*ref
, *dataref
;
5265 /* Do a polymorphic deep copy. */
5266 actual
= gfc_get_actual_arglist ();
5267 actual
->expr
= gfc_copy_expr (rhs
);
5268 if (rhs
->ts
.type
== BT_CLASS
)
5269 gfc_add_data_component (actual
->expr
);
5270 actual
->next
= gfc_get_actual_arglist ();
5271 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
5272 actual
->next
->expr
->ts
.type
= BT_CLASS
;
5273 gfc_add_data_component (actual
->next
->expr
);
5276 /* Make sure we go up through the reference chain to
5277 the _data reference, where the arrayspec is found. */
5278 for (ref
= actual
->next
->expr
->ref
; ref
; ref
= ref
->next
)
5279 if (ref
->type
== REF_COMPONENT
5280 && strcmp (ref
->u
.c
.component
->name
, "_data") == 0)
5283 if (dataref
&& dataref
->u
.c
.component
->as
)
5287 gfc_ref
*ref
= dataref
->next
;
5288 ref
->u
.ar
.type
= AR_SECTION
;
5289 /* We have to set up the array reference to give ranges
5290 in all dimensions and ensure that the end and stride
5291 are set so that the copy can be scalarized. */
5293 for (; dim
< dataref
->u
.c
.component
->as
->rank
; dim
++)
5295 ref
->u
.ar
.dimen_type
[dim
] = DIMEN_RANGE
;
5296 if (ref
->u
.ar
.end
[dim
] == NULL
)
5298 ref
->u
.ar
.end
[dim
] = ref
->u
.ar
.start
[dim
];
5299 temp
= gfc_get_int_expr (gfc_default_integer_kind
,
5300 &al
->expr
->where
, 1);
5301 ref
->u
.ar
.start
[dim
] = temp
;
5303 temp
= gfc_subtract (gfc_copy_expr (ref
->u
.ar
.end
[dim
]),
5304 gfc_copy_expr (ref
->u
.ar
.start
[dim
]));
5305 temp
= gfc_add (gfc_get_int_expr (gfc_default_integer_kind
,
5306 &al
->expr
->where
, 1),
5310 if (rhs
->ts
.type
== BT_CLASS
)
5312 ppc
= gfc_copy_expr (rhs
);
5313 gfc_add_vptr_component (ppc
);
5316 ppc
= gfc_lval_expr_from_sym (gfc_find_vtab (&rhs
->ts
));
5317 gfc_add_component_ref (ppc
, "_copy");
5319 ppc_code
= gfc_get_code (EXEC_CALL
);
5320 ppc_code
->resolved_sym
= ppc
->symtree
->n
.sym
;
5321 /* Although '_copy' is set to be elemental in class.c, it is
5322 not staying that way. Find out why, sometime.... */
5323 ppc_code
->resolved_sym
->attr
.elemental
= 1;
5324 ppc_code
->ext
.actual
= actual
;
5325 ppc_code
->expr1
= ppc
;
5326 /* Since '_copy' is elemental, the scalarizer will take care
5327 of arrays in gfc_trans_call. */
5328 tmp
= gfc_trans_call (ppc_code
, true, NULL
, NULL
, false);
5329 gfc_free_statements (ppc_code
);
5331 else if (expr3
!= NULL_TREE
)
5333 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5334 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
5335 slen3
, expr3
, code
->expr3
->ts
.kind
);
5340 /* Switch off automatic reallocation since we have just done
5342 int realloc_lhs
= flag_realloc_lhs
;
5343 flag_realloc_lhs
= 0;
5344 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
5346 flag_realloc_lhs
= realloc_lhs
;
5348 gfc_free_expr (rhs
);
5349 gfc_add_expr_to_block (&block
, tmp
);
5351 else if (code
->expr3
&& code
->expr3
->mold
5352 && code
->expr3
->ts
.type
== BT_CLASS
)
5354 /* Since the _vptr has already been assigned to the allocate
5355 object, we can use gfc_copy_class_to_class in its
5356 initialization mode. */
5357 tmp
= TREE_OPERAND (se
.expr
, 0);
5358 tmp
= gfc_copy_class_to_class (NULL_TREE
, tmp
, nelems
);
5359 gfc_add_expr_to_block (&block
, tmp
);
5362 gfc_free_expr (expr
);
5368 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5369 gfc_add_expr_to_block (&block
, tmp
);
5372 /* ERRMSG - only useful if STAT is present. */
5373 if (code
->expr1
&& code
->expr2
)
5375 const char *msg
= "Attempt to allocate an allocated object";
5376 tree slen
, dlen
, errmsg_str
;
5377 stmtblock_t errmsg_block
;
5379 gfc_init_block (&errmsg_block
);
5381 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5382 gfc_add_modify (&errmsg_block
, errmsg_str
,
5383 gfc_build_addr_expr (pchar_type_node
,
5384 gfc_build_localized_cstring_const (msg
)));
5386 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5387 dlen
= gfc_get_expr_charlen (code
->expr2
);
5388 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5391 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5392 slen
, errmsg_str
, gfc_default_character_kind
);
5393 dlen
= gfc_finish_block (&errmsg_block
);
5395 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5396 build_int_cst (TREE_TYPE (stat
), 0));
5398 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5400 gfc_add_expr_to_block (&block
, tmp
);
5406 if (TREE_USED (label_finish
))
5408 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5409 gfc_add_expr_to_block (&block
, tmp
);
5412 gfc_init_se (&se
, NULL
);
5413 gfc_conv_expr_lhs (&se
, code
->expr1
);
5414 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5415 gfc_add_modify (&block
, se
.expr
, tmp
);
5418 gfc_add_block_to_block (&block
, &se
.post
);
5419 gfc_add_block_to_block (&block
, &post
);
5421 return gfc_finish_block (&block
);
5425 /* Translate a DEALLOCATE statement. */
5428 gfc_trans_deallocate (gfc_code
*code
)
5432 tree apstat
, pstat
, stat
, errmsg
, errlen
, tmp
;
5433 tree label_finish
, label_errmsg
;
5436 pstat
= apstat
= stat
= errmsg
= errlen
= tmp
= NULL_TREE
;
5437 label_finish
= label_errmsg
= NULL_TREE
;
5439 gfc_start_block (&block
);
5441 /* Count the number of failed deallocations. If deallocate() was
5442 called with STAT= , then set STAT to the count. If deallocate
5443 was called with ERRMSG, then set ERRMG to a string. */
5446 tree gfc_int4_type_node
= gfc_get_int_type (4);
5448 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
5449 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5451 /* GOTO destinations. */
5452 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
5453 label_finish
= gfc_build_label_decl (NULL_TREE
);
5454 TREE_USED (label_finish
) = 0;
5457 /* Set ERRMSG - only needed if STAT is available. */
5458 if (code
->expr1
&& code
->expr2
)
5460 gfc_init_se (&se
, NULL
);
5461 se
.want_pointer
= 1;
5462 gfc_conv_expr_lhs (&se
, code
->expr2
);
5464 errlen
= se
.string_length
;
5467 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5469 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5470 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5472 if (expr
->ts
.type
== BT_CLASS
)
5473 gfc_add_data_component (expr
);
5475 gfc_init_se (&se
, NULL
);
5476 gfc_start_block (&se
.pre
);
5478 se
.want_pointer
= 1;
5479 se
.descriptor_only
= 1;
5480 gfc_conv_expr (&se
, expr
);
5482 if (expr
->rank
|| gfc_is_coarray (expr
))
5484 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
5485 && !gfc_is_finalizable (expr
->ts
.u
.derived
, NULL
))
5488 gfc_ref
*last
= NULL
;
5489 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5490 if (ref
->type
== REF_COMPONENT
)
5493 /* Do not deallocate the components of a derived type
5494 ultimate pointer component. */
5495 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5496 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5498 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5500 gfc_add_expr_to_block (&se
.pre
, tmp
);
5503 tmp
= gfc_array_deallocate (se
.expr
, pstat
, errmsg
, errlen
,
5504 label_finish
, expr
);
5505 gfc_add_expr_to_block (&se
.pre
, tmp
);
5506 if (al
->expr
->ts
.type
== BT_CLASS
)
5507 gfc_reset_vptr (&se
.pre
, al
->expr
);
5511 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5512 al
->expr
, al
->expr
->ts
);
5513 gfc_add_expr_to_block (&se
.pre
, tmp
);
5515 /* Set to zero after deallocation. */
5516 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5518 build_int_cst (TREE_TYPE (se
.expr
), 0));
5519 gfc_add_expr_to_block (&se
.pre
, tmp
);
5521 if (al
->expr
->ts
.type
== BT_CLASS
)
5522 gfc_reset_vptr (&se
.pre
, al
->expr
);
5529 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5530 build_int_cst (TREE_TYPE (stat
), 0));
5531 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5532 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
),
5533 build1_v (GOTO_EXPR
, label_errmsg
),
5534 build_empty_stmt (input_location
));
5535 gfc_add_expr_to_block (&se
.pre
, tmp
);
5538 tmp
= gfc_finish_block (&se
.pre
);
5539 gfc_add_expr_to_block (&block
, tmp
);
5540 gfc_free_expr (expr
);
5545 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5546 gfc_add_expr_to_block (&block
, tmp
);
5549 /* Set ERRMSG - only needed if STAT is available. */
5550 if (code
->expr1
&& code
->expr2
)
5552 const char *msg
= "Attempt to deallocate an unallocated object";
5553 stmtblock_t errmsg_block
;
5554 tree errmsg_str
, slen
, dlen
, cond
;
5556 gfc_init_block (&errmsg_block
);
5558 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5559 gfc_add_modify (&errmsg_block
, errmsg_str
,
5560 gfc_build_addr_expr (pchar_type_node
,
5561 gfc_build_localized_cstring_const (msg
)));
5562 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5563 dlen
= gfc_get_expr_charlen (code
->expr2
);
5565 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5566 slen
, errmsg_str
, gfc_default_character_kind
);
5567 tmp
= gfc_finish_block (&errmsg_block
);
5569 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5570 build_int_cst (TREE_TYPE (stat
), 0));
5571 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5572 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
), tmp
,
5573 build_empty_stmt (input_location
));
5575 gfc_add_expr_to_block (&block
, tmp
);
5578 if (code
->expr1
&& TREE_USED (label_finish
))
5580 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5581 gfc_add_expr_to_block (&block
, tmp
);
5587 gfc_init_se (&se
, NULL
);
5588 gfc_conv_expr_lhs (&se
, code
->expr1
);
5589 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5590 gfc_add_modify (&block
, se
.expr
, tmp
);
5593 return gfc_finish_block (&block
);
5596 #include "gt-fortran-trans-stmt.h"