1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
27 #include "stringpool.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info
*next
;
49 typedef struct forall_info
56 struct forall_info
*prev_nest
;
61 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
62 forall_info
*, stmtblock_t
*);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code
* code
)
69 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
80 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
81 gfc_conv_expr (se
, expr
);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
84 se
->expr
= TREE_OPERAND (se
->expr
, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
87 se
->expr
= TREE_OPERAND (se
->expr
, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code
* code
)
102 /* Start a new block. */
103 gfc_init_se (&se
, NULL
);
104 gfc_start_block (&se
.pre
);
105 gfc_conv_label_variable (&se
, code
->expr1
);
107 len
= GFC_DECL_STRING_LEN (se
.expr
);
108 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
110 label_tree
= gfc_get_label_decl (code
->label1
);
112 if (code
->label1
->defined
== ST_LABEL_TARGET
113 || code
->label1
->defined
== ST_LABEL_DO_TARGET
)
115 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
116 len_tree
= integer_minus_one_node
;
120 gfc_expr
*format
= code
->label1
->format
;
122 label_len
= format
->value
.character
.length
;
123 len_tree
= build_int_cst (gfc_charlen_type_node
, label_len
);
124 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
125 format
->value
.character
.string
);
126 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
129 gfc_add_modify (&se
.pre
, len
, len_tree
);
130 gfc_add_modify (&se
.pre
, addr
, label_tree
);
132 return gfc_finish_block (&se
.pre
);
135 /* Translate a GOTO statement. */
138 gfc_trans_goto (gfc_code
* code
)
140 locus loc
= code
->loc
;
146 if (code
->label1
!= NULL
)
147 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
150 gfc_init_se (&se
, NULL
);
151 gfc_start_block (&se
.pre
);
152 gfc_conv_label_variable (&se
, code
->expr1
);
153 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
154 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
155 build_int_cst (TREE_TYPE (tmp
), -1));
156 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
157 "Assigned label is not a target label");
159 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
161 /* We're going to ignore a label list. It does not really change the
162 statement's semantics (because it is just a further restriction on
163 what's legal code); before, we were comparing label addresses here, but
164 that's a very fragile business and may break with optimization. So
167 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
169 gfc_add_expr_to_block (&se
.pre
, target
);
170 return gfc_finish_block (&se
.pre
);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code
* code
)
178 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
182 /* Replace a gfc_ss structure by another both in the gfc_se struct
183 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
184 to replace a variable ss by the corresponding temporary. */
187 replace_ss (gfc_se
*se
, gfc_ss
*old_ss
, gfc_ss
*new_ss
)
189 gfc_ss
**sess
, **loopss
;
191 /* The old_ss is a ss for a single variable. */
192 gcc_assert (old_ss
->info
->type
== GFC_SS_SECTION
);
194 for (sess
= &(se
->ss
); *sess
!= gfc_ss_terminator
; sess
= &((*sess
)->next
))
197 gcc_assert (*sess
!= gfc_ss_terminator
);
200 new_ss
->next
= old_ss
->next
;
203 for (loopss
= &(se
->loop
->ss
); *loopss
!= gfc_ss_terminator
;
204 loopss
= &((*loopss
)->loop_chain
))
205 if (*loopss
== old_ss
)
207 gcc_assert (*loopss
!= gfc_ss_terminator
);
210 new_ss
->loop_chain
= old_ss
->loop_chain
;
211 new_ss
->loop
= old_ss
->loop
;
213 gfc_free_ss (old_ss
);
217 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
218 elemental subroutines. Make temporaries for output arguments if any such
219 dependencies are found. Output arguments are chosen because internal_unpack
220 can be used, as is, to copy the result back to the variable. */
222 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
223 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
224 gfc_dep_check check_variable
)
226 gfc_actual_arglist
*arg0
;
228 gfc_formal_arglist
*formal
;
236 if (loopse
->ss
== NULL
)
241 formal
= gfc_sym_get_dummy_args (sym
);
243 /* Loop over all the arguments testing for dependencies. */
244 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
250 /* Obtain the info structure for the current argument. */
251 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
252 if (ss
->info
->expr
== e
)
255 /* If there is a dependency, create a temporary and use it
256 instead of the variable. */
257 fsym
= formal
? formal
->sym
: NULL
;
258 if (e
->expr_type
== EXPR_VARIABLE
260 && fsym
->attr
.intent
!= INTENT_IN
261 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
262 sym
, arg0
, check_variable
))
264 tree initial
, temptype
;
265 stmtblock_t temp_post
;
268 tmp_ss
= gfc_get_array_ss (gfc_ss_terminator
, NULL
, ss
->dimen
,
270 gfc_mark_ss_chain_used (tmp_ss
, 1);
271 tmp_ss
->info
->expr
= ss
->info
->expr
;
272 replace_ss (loopse
, ss
, tmp_ss
);
274 /* Obtain the argument descriptor for unpacking. */
275 gfc_init_se (&parmse
, NULL
);
276 parmse
.want_pointer
= 1;
277 gfc_conv_expr_descriptor (&parmse
, e
);
278 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
280 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
281 initialize the array temporary with a copy of the values. */
282 if (fsym
->attr
.intent
== INTENT_INOUT
283 || (fsym
->ts
.type
==BT_DERIVED
284 && fsym
->attr
.intent
== INTENT_OUT
))
285 initial
= parmse
.expr
;
286 /* For class expressions, we always initialize with the copy of
288 else if (e
->ts
.type
== BT_CLASS
)
289 initial
= parmse
.expr
;
293 if (e
->ts
.type
!= BT_CLASS
)
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e
297 (where the type of e is that of the final reference, but
298 parmse.expr's type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
303 temptype
= TREE_TYPE (temptype
);
304 temptype
= gfc_get_element_type (temptype
);
308 /* For class arrays signal that the size of the dynamic type has to
309 be obtained from the vtable, using the 'initial' expression. */
310 temptype
= NULL_TREE
;
312 /* Generate the temporary. Cleaning up the temporary should be the
313 very last thing done, so we add the code to a new block and add it
314 to se->post as last instructions. */
315 size
= gfc_create_var (gfc_array_index_type
, NULL
);
316 data
= gfc_create_var (pvoid_type_node
, NULL
);
317 gfc_init_block (&temp_post
);
318 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
, tmp_ss
,
319 temptype
, initial
, false, true,
320 false, &arg
->expr
->where
);
321 gfc_add_modify (&se
->pre
, size
, tmp
);
322 tmp
= fold_convert (pvoid_type_node
, tmp_ss
->info
->data
.array
.data
);
323 gfc_add_modify (&se
->pre
, data
, tmp
);
325 /* Update other ss' delta. */
326 gfc_set_delta (loopse
->loop
);
328 /* Copy the result back using unpack..... */
329 if (e
->ts
.type
!= BT_CLASS
)
330 tmp
= build_call_expr_loc (input_location
,
331 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
334 /* ... except for class results where the copy is
336 tmp
= build_fold_indirect_ref_loc (input_location
, parmse
.expr
);
337 tmp
= gfc_conv_descriptor_data_get (tmp
);
338 tmp
= build_call_expr_loc (input_location
,
339 builtin_decl_explicit (BUILT_IN_MEMCPY
),
341 fold_convert (size_type_node
, size
));
343 gfc_add_expr_to_block (&se
->post
, tmp
);
345 /* parmse.pre is already added above. */
346 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
347 gfc_add_block_to_block (&se
->post
, &temp_post
);
353 /* Get the interface symbol for the procedure corresponding to the given call.
354 We can't get the procedure symbol directly as we have to handle the case
355 of (deferred) type-bound procedures. */
358 get_proc_ifc_for_call (gfc_code
*c
)
362 gcc_assert (c
->op
== EXEC_ASSIGN_CALL
|| c
->op
== EXEC_CALL
);
364 sym
= gfc_get_proc_ifc_for_expr (c
->expr1
);
366 /* Fall back/last resort try. */
368 sym
= c
->resolved_sym
;
374 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
377 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
378 tree mask
, tree count1
, bool invert
)
382 int has_alternate_specifier
;
383 gfc_dep_check check_variable
;
384 tree index
= NULL_TREE
;
385 tree maskexpr
= NULL_TREE
;
388 /* A CALL starts a new block because the actual arguments may have to
389 be evaluated first. */
390 gfc_init_se (&se
, NULL
);
391 gfc_start_block (&se
.pre
);
393 gcc_assert (code
->resolved_sym
);
395 ss
= gfc_ss_terminator
;
396 if (code
->resolved_sym
->attr
.elemental
)
397 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
,
398 get_proc_ifc_for_call (code
),
401 /* Is not an elemental subroutine call with array valued arguments. */
402 if (ss
== gfc_ss_terminator
)
405 /* Translate the call. */
406 has_alternate_specifier
407 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
410 /* A subroutine without side-effect, by definition, does nothing! */
411 TREE_SIDE_EFFECTS (se
.expr
) = 1;
413 /* Chain the pieces together and return the block. */
414 if (has_alternate_specifier
)
416 gfc_code
*select_code
;
418 select_code
= code
->next
;
419 gcc_assert(select_code
->op
== EXEC_SELECT
);
420 sym
= select_code
->expr1
->symtree
->n
.sym
;
421 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
422 if (sym
->backend_decl
== NULL
)
423 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
424 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
427 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
429 gfc_add_block_to_block (&se
.pre
, &se
.post
);
434 /* An elemental subroutine call with array valued arguments has
442 /* gfc_walk_elemental_function_args renders the ss chain in the
443 reverse order to the actual argument order. */
444 ss
= gfc_reverse_ss (ss
);
446 /* Initialize the loop. */
447 gfc_init_se (&loopse
, NULL
);
448 gfc_init_loopinfo (&loop
);
449 gfc_add_ss_to_loop (&loop
, ss
);
451 gfc_conv_ss_startstride (&loop
);
452 /* TODO: gfc_conv_loop_setup generates a temporary for vector
453 subscripts. This could be prevented in the elemental case
454 as temporaries are handled separatedly
455 (below in gfc_conv_elemental_dependencies). */
456 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
457 gfc_mark_ss_chain_used (ss
, 1);
459 /* Convert the arguments, checking for dependencies. */
460 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
463 /* For operator assignment, do dependency checking. */
464 if (dependency_check
)
465 check_variable
= ELEM_CHECK_VARIABLE
;
467 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
469 gfc_init_se (&depse
, NULL
);
470 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
471 code
->ext
.actual
, check_variable
);
473 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
474 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
476 /* Generate the loop body. */
477 gfc_start_scalarized_body (&loop
, &body
);
478 gfc_init_block (&block
);
482 /* Form the mask expression according to the mask. */
484 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
486 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
487 TREE_TYPE (maskexpr
), maskexpr
);
490 /* Add the subroutine call to the block. */
491 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
492 code
->ext
.actual
, code
->expr1
,
497 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
498 build_empty_stmt (input_location
));
499 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
500 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
501 gfc_array_index_type
,
502 count1
, gfc_index_one_node
);
503 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
506 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
508 gfc_add_block_to_block (&block
, &loopse
.pre
);
509 gfc_add_block_to_block (&block
, &loopse
.post
);
511 /* Finish up the loop block and the loop. */
512 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
513 gfc_trans_scalarizing_loops (&loop
, &body
);
514 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
515 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
516 gfc_add_block_to_block (&se
.pre
, &se
.post
);
517 gfc_cleanup_loop (&loop
);
520 return gfc_finish_block (&se
.pre
);
524 /* Translate the RETURN statement. */
527 gfc_trans_return (gfc_code
* code
)
535 /* If code->expr is not NULL, this return statement must appear
536 in a subroutine and current_fake_result_decl has already
539 result
= gfc_get_fake_result_decl (NULL
, 0);
542 gfc_warning ("An alternate return at %L without a * dummy argument",
543 &code
->expr1
->where
);
544 return gfc_generate_return ();
547 /* Start a new block for this statement. */
548 gfc_init_se (&se
, NULL
);
549 gfc_start_block (&se
.pre
);
551 gfc_conv_expr (&se
, code
->expr1
);
553 /* Note that the actually returned expression is a simple value and
554 does not depend on any pointers or such; thus we can clean-up with
555 se.post before returning. */
556 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
557 result
, fold_convert (TREE_TYPE (result
),
559 gfc_add_expr_to_block (&se
.pre
, tmp
);
560 gfc_add_block_to_block (&se
.pre
, &se
.post
);
562 tmp
= gfc_generate_return ();
563 gfc_add_expr_to_block (&se
.pre
, tmp
);
564 return gfc_finish_block (&se
.pre
);
567 return gfc_generate_return ();
571 /* Translate the PAUSE statement. We have to translate this statement
572 to a runtime library call. */
575 gfc_trans_pause (gfc_code
* code
)
577 tree gfc_int4_type_node
= gfc_get_int_type (4);
581 /* Start a new block for this statement. */
582 gfc_init_se (&se
, NULL
);
583 gfc_start_block (&se
.pre
);
586 if (code
->expr1
== NULL
)
588 tmp
= build_int_cst (gfc_int4_type_node
, 0);
589 tmp
= build_call_expr_loc (input_location
,
590 gfor_fndecl_pause_string
, 2,
591 build_int_cst (pchar_type_node
, 0), tmp
);
593 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
595 gfc_conv_expr (&se
, code
->expr1
);
596 tmp
= build_call_expr_loc (input_location
,
597 gfor_fndecl_pause_numeric
, 1,
598 fold_convert (gfc_int4_type_node
, se
.expr
));
602 gfc_conv_expr_reference (&se
, code
->expr1
);
603 tmp
= build_call_expr_loc (input_location
,
604 gfor_fndecl_pause_string
, 2,
605 se
.expr
, se
.string_length
);
608 gfc_add_expr_to_block (&se
.pre
, tmp
);
610 gfc_add_block_to_block (&se
.pre
, &se
.post
);
612 return gfc_finish_block (&se
.pre
);
616 /* Translate the STOP statement. We have to translate this statement
617 to a runtime library call. */
620 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
622 tree gfc_int4_type_node
= gfc_get_int_type (4);
626 /* Start a new block for this statement. */
627 gfc_init_se (&se
, NULL
);
628 gfc_start_block (&se
.pre
);
630 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
632 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
633 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
634 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
635 gfc_add_expr_to_block (&se
.pre
, tmp
);
637 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
638 gfc_add_expr_to_block (&se
.pre
, tmp
);
641 if (code
->expr1
== NULL
)
643 tmp
= build_int_cst (gfc_int4_type_node
, 0);
644 tmp
= build_call_expr_loc (input_location
,
646 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
647 ? gfor_fndecl_caf_error_stop_str
648 : gfor_fndecl_error_stop_string
)
649 : gfor_fndecl_stop_string
,
650 2, build_int_cst (pchar_type_node
, 0), tmp
);
652 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
654 gfc_conv_expr (&se
, code
->expr1
);
655 tmp
= build_call_expr_loc (input_location
,
657 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
658 ? gfor_fndecl_caf_error_stop
659 : gfor_fndecl_error_stop_numeric
)
660 : gfor_fndecl_stop_numeric_f08
, 1,
661 fold_convert (gfc_int4_type_node
, se
.expr
));
665 gfc_conv_expr_reference (&se
, code
->expr1
);
666 tmp
= build_call_expr_loc (input_location
,
668 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
669 ? gfor_fndecl_caf_error_stop_str
670 : gfor_fndecl_error_stop_string
)
671 : gfor_fndecl_stop_string
,
672 2, se
.expr
, se
.string_length
);
675 gfc_add_expr_to_block (&se
.pre
, tmp
);
677 gfc_add_block_to_block (&se
.pre
, &se
.post
);
679 return gfc_finish_block (&se
.pre
);
684 gfc_trans_lock_unlock (gfc_code
*code
, gfc_exec_op type ATTRIBUTE_UNUSED
)
687 tree stat
= NULL_TREE
, lock_acquired
= NULL_TREE
;
689 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
690 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
691 if (!code
->expr2
&& !code
->expr4
&& gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
694 gfc_init_se (&se
, NULL
);
695 gfc_start_block (&se
.pre
);
699 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
700 gfc_init_se (&argse
, NULL
);
701 gfc_conv_expr_val (&argse
, code
->expr2
);
707 gcc_assert (code
->expr4
->expr_type
== EXPR_VARIABLE
);
708 gfc_init_se (&argse
, NULL
);
709 gfc_conv_expr_val (&argse
, code
->expr4
);
710 lock_acquired
= argse
.expr
;
713 if (stat
!= NULL_TREE
)
714 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
716 if (lock_acquired
!= NULL_TREE
)
717 gfc_add_modify (&se
.pre
, lock_acquired
,
718 fold_convert (TREE_TYPE (lock_acquired
),
721 return gfc_finish_block (&se
.pre
);
726 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
730 tree images
= NULL_TREE
, stat
= NULL_TREE
,
731 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
733 /* Short cut: For single images without bound checking or without STAT=,
734 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
735 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
736 && gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
739 gfc_init_se (&se
, NULL
);
740 gfc_start_block (&se
.pre
);
742 if (code
->expr1
&& code
->expr1
->rank
== 0)
744 gfc_init_se (&argse
, NULL
);
745 gfc_conv_expr_val (&argse
, code
->expr1
);
751 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
752 gfc_init_se (&argse
, NULL
);
753 gfc_conv_expr_val (&argse
, code
->expr2
);
757 stat
= null_pointer_node
;
759 if (code
->expr3
&& gfc_option
.coarray
== GFC_FCOARRAY_LIB
760 && type
!= EXEC_SYNC_MEMORY
)
762 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
763 gfc_init_se (&argse
, NULL
);
764 gfc_conv_expr (&argse
, code
->expr3
);
765 gfc_conv_string_parameter (&argse
);
766 errmsg
= gfc_build_addr_expr (NULL
, argse
.expr
);
767 errmsglen
= argse
.string_length
;
769 else if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
771 errmsg
= null_pointer_node
;
772 errmsglen
= build_int_cst (integer_type_node
, 0);
775 /* Check SYNC IMAGES(imageset) for valid image index.
776 FIXME: Add a check for image-set arrays. */
777 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
778 && code
->expr1
->rank
== 0)
781 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
782 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
783 images
, build_int_cst (TREE_TYPE (images
), 1));
787 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
788 images
, gfort_gvar_caf_num_images
);
789 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
791 build_int_cst (TREE_TYPE (images
), 1));
792 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
793 boolean_type_node
, cond
, cond2
);
795 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
796 &code
->expr1
->where
, "Invalid image number "
798 fold_convert (integer_type_node
, images
));
801 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
802 image control statements SYNC IMAGES and SYNC ALL. */
803 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
805 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
806 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
807 gfc_add_expr_to_block (&se
.pre
, tmp
);
810 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
812 /* Set STAT to zero. */
814 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
816 else if (type
== EXEC_SYNC_ALL
)
818 /* SYNC ALL => stat == null_pointer_node
819 SYNC ALL(stat=s) => stat has an integer type
821 If "stat" has the wrong integer type, use a temp variable of
822 the right type and later cast the result back into "stat". */
823 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
825 if (TREE_TYPE (stat
) == integer_type_node
)
826 stat
= gfc_build_addr_expr (NULL
, stat
);
828 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
829 3, stat
, errmsg
, errmsglen
);
830 gfc_add_expr_to_block (&se
.pre
, tmp
);
834 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
836 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
837 3, gfc_build_addr_expr (NULL
, tmp_stat
),
839 gfc_add_expr_to_block (&se
.pre
, tmp
);
841 gfc_add_modify (&se
.pre
, stat
,
842 fold_convert (TREE_TYPE (stat
), tmp_stat
));
849 gcc_assert (type
== EXEC_SYNC_IMAGES
);
853 len
= build_int_cst (integer_type_node
, -1);
854 images
= null_pointer_node
;
856 else if (code
->expr1
->rank
== 0)
858 len
= build_int_cst (integer_type_node
, 1);
859 images
= gfc_build_addr_expr (NULL_TREE
, images
);
864 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
865 gfc_fatal_error ("Sorry, only support for integer kind %d "
866 "implemented for image-set at %L",
867 gfc_c_int_kind
, &code
->expr1
->where
);
869 gfc_conv_array_parameter (&se
, code
->expr1
, true, NULL
, NULL
, &len
);
872 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
873 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
874 tmp
= gfc_get_element_type (tmp
);
876 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
877 TREE_TYPE (len
), len
,
878 fold_convert (TREE_TYPE (len
),
879 TYPE_SIZE_UNIT (tmp
)));
880 len
= fold_convert (integer_type_node
, len
);
883 /* SYNC IMAGES(imgs) => stat == null_pointer_node
884 SYNC IMAGES(imgs,stat=s) => stat has an integer type
886 If "stat" has the wrong integer type, use a temp variable of
887 the right type and later cast the result back into "stat". */
888 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
890 if (TREE_TYPE (stat
) == integer_type_node
)
891 stat
= gfc_build_addr_expr (NULL
, stat
);
893 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
894 5, fold_convert (integer_type_node
, len
),
895 images
, stat
, errmsg
, errmsglen
);
896 gfc_add_expr_to_block (&se
.pre
, tmp
);
900 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
902 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
903 5, fold_convert (integer_type_node
, len
),
904 images
, gfc_build_addr_expr (NULL
, tmp_stat
),
906 gfc_add_expr_to_block (&se
.pre
, tmp
);
908 gfc_add_modify (&se
.pre
, stat
,
909 fold_convert (TREE_TYPE (stat
), tmp_stat
));
913 return gfc_finish_block (&se
.pre
);
917 /* Generate GENERIC for the IF construct. This function also deals with
918 the simple IF statement, because the front end translates the IF
919 statement into an IF construct.
951 where COND_S is the simplified version of the predicate. PRE_COND_S
952 are the pre side-effects produced by the translation of the
954 We need to build the chain recursively otherwise we run into
955 problems with folding incomplete statements. */
958 gfc_trans_if_1 (gfc_code
* code
)
965 /* Check for an unconditional ELSE clause. */
967 return gfc_trans_code (code
->next
);
969 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
970 gfc_init_se (&if_se
, NULL
);
971 gfc_start_block (&if_se
.pre
);
973 /* Calculate the IF condition expression. */
974 if (code
->expr1
->where
.lb
)
976 gfc_save_backend_locus (&saved_loc
);
977 gfc_set_backend_locus (&code
->expr1
->where
);
980 gfc_conv_expr_val (&if_se
, code
->expr1
);
982 if (code
->expr1
->where
.lb
)
983 gfc_restore_backend_locus (&saved_loc
);
985 /* Translate the THEN clause. */
986 stmt
= gfc_trans_code (code
->next
);
988 /* Translate the ELSE clause. */
990 elsestmt
= gfc_trans_if_1 (code
->block
);
992 elsestmt
= build_empty_stmt (input_location
);
994 /* Build the condition expression and add it to the condition block. */
995 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
996 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
999 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
1001 /* Finish off this statement. */
1002 return gfc_finish_block (&if_se
.pre
);
1006 gfc_trans_if (gfc_code
* code
)
1011 /* Create exit label so it is available for trans'ing the body code. */
1012 exit_label
= gfc_build_label_decl (NULL_TREE
);
1013 code
->exit_label
= exit_label
;
1015 /* Translate the actual code in code->block. */
1016 gfc_init_block (&body
);
1017 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
1019 /* Add exit label. */
1020 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1022 return gfc_finish_block (&body
);
1026 /* Translate an arithmetic IF expression.
1028 IF (cond) label1, label2, label3 translates to
1040 An optimized version can be generated in case of equal labels.
1041 E.g., if label1 is equal to label2, we can translate it to
1050 gfc_trans_arithmetic_if (gfc_code
* code
)
1058 /* Start a new block. */
1059 gfc_init_se (&se
, NULL
);
1060 gfc_start_block (&se
.pre
);
1062 /* Pre-evaluate COND. */
1063 gfc_conv_expr_val (&se
, code
->expr1
);
1064 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
1066 /* Build something to compare with. */
1067 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
1069 if (code
->label1
->value
!= code
->label2
->value
)
1071 /* If (cond < 0) take branch1 else take branch2.
1072 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1073 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1074 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
1076 if (code
->label1
->value
!= code
->label3
->value
)
1077 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
1080 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1083 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1084 tmp
, branch1
, branch2
);
1087 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1089 if (code
->label1
->value
!= code
->label3
->value
1090 && code
->label2
->value
!= code
->label3
->value
)
1092 /* if (cond <= 0) take branch1 else take branch2. */
1093 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
1094 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1096 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1097 tmp
, branch1
, branch2
);
1100 /* Append the COND_EXPR to the evaluation of COND, and return. */
1101 gfc_add_expr_to_block (&se
.pre
, branch1
);
1102 return gfc_finish_block (&se
.pre
);
1106 /* Translate a CRITICAL block. */
1108 gfc_trans_critical (gfc_code
*code
)
1113 gfc_start_block (&block
);
1115 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1117 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_critical
, 0);
1118 gfc_add_expr_to_block (&block
, tmp
);
1121 tmp
= gfc_trans_code (code
->block
->next
);
1122 gfc_add_expr_to_block (&block
, tmp
);
1124 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1126 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_end_critical
,
1128 gfc_add_expr_to_block (&block
, tmp
);
1132 return gfc_finish_block (&block
);
1136 /* Do proper initialization for ASSOCIATE names. */
1139 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1150 gcc_assert (sym
->assoc
);
1151 e
= sym
->assoc
->target
;
1153 class_target
= (e
->expr_type
== EXPR_VARIABLE
)
1154 && (gfc_is_class_scalar_expr (e
)
1155 || gfc_is_class_array_ref (e
, NULL
));
1157 unlimited
= UNLIMITED_POLY (e
);
1159 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1160 to array temporary) for arrays with either unknown shape or if associating
1162 if (sym
->attr
.dimension
&& !class_target
1163 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1168 desc
= sym
->backend_decl
;
1170 /* If association is to an expression, evaluate it and create temporary.
1171 Otherwise, get descriptor of target for pointer assignment. */
1172 gfc_init_se (&se
, NULL
);
1173 if (sym
->assoc
->variable
)
1175 se
.direct_byref
= 1;
1178 gfc_conv_expr_descriptor (&se
, e
);
1180 /* If we didn't already do the pointer assignment, set associate-name
1181 descriptor to the one generated for the temporary. */
1182 if (!sym
->assoc
->variable
)
1186 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1188 /* The generated descriptor has lower bound zero (as array
1189 temporary), shift bounds so we get lower bounds of 1. */
1190 for (dim
= 0; dim
< e
->rank
; ++dim
)
1191 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1192 dim
, gfc_index_one_node
);
1195 /* If this is a subreference array pointer associate name use the
1196 associate variable element size for the value of 'span'. */
1197 if (sym
->attr
.subref_array_pointer
)
1199 gcc_assert (e
->expr_type
== EXPR_VARIABLE
);
1200 tmp
= e
->symtree
->n
.sym
->backend_decl
;
1201 tmp
= gfc_get_element_type (TREE_TYPE (tmp
));
1202 tmp
= fold_convert (gfc_array_index_type
, size_in_bytes (tmp
));
1203 gfc_add_modify (&se
.pre
, GFC_DECL_SPAN(desc
), tmp
);
1206 /* Done, register stuff as init / cleanup code. */
1207 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1208 gfc_finish_block (&se
.post
));
1211 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1212 arrays to be assigned directly. */
1213 else if (class_target
&& sym
->attr
.dimension
1214 && (sym
->ts
.type
== BT_DERIVED
|| unlimited
))
1218 gfc_init_se (&se
, NULL
);
1219 se
.descriptor_only
= 1;
1220 gfc_conv_expr (&se
, e
);
1222 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
.expr
)));
1223 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym
->backend_decl
)));
1225 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
1229 /* Recover the dtype, which has been overwritten by the
1230 assignment from an unlimited polymorphic object. */
1231 tmp
= gfc_conv_descriptor_dtype (sym
->backend_decl
);
1232 gfc_add_modify (&se
.pre
, tmp
,
1233 gfc_get_dtype (TREE_TYPE (sym
->backend_decl
)));
1236 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1237 gfc_finish_block (&se
.post
));
1240 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1241 else if (gfc_is_associate_pointer (sym
))
1245 gcc_assert (!sym
->attr
.dimension
);
1247 gfc_init_se (&se
, NULL
);
1249 /* Class associate-names come this way because they are
1250 unconditionally associate pointers and the symbol is scalar. */
1251 if (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.dimension
)
1253 /* For a class array we need a descriptor for the selector. */
1254 gfc_conv_expr_descriptor (&se
, e
);
1256 /* Obtain a temporary class container for the result. */
1257 gfc_conv_class_to_class (&se
, e
, sym
->ts
, false, true, false, false);
1258 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1260 /* Set the offset. */
1261 desc
= gfc_class_data_get (se
.expr
);
1262 offset
= gfc_index_zero_node
;
1263 for (n
= 0; n
< e
->rank
; n
++)
1265 dim
= gfc_rank_cst
[n
];
1266 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1267 gfc_array_index_type
,
1268 gfc_conv_descriptor_stride_get (desc
, dim
),
1269 gfc_conv_descriptor_lbound_get (desc
, dim
));
1270 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
1271 gfc_array_index_type
,
1274 gfc_conv_descriptor_offset_set (&se
.pre
, desc
, offset
);
1276 else if (sym
->ts
.type
== BT_CLASS
&& e
->ts
.type
== BT_CLASS
1277 && CLASS_DATA (e
)->attr
.dimension
)
1279 /* This is bound to be a class array element. */
1280 gfc_conv_expr_reference (&se
, e
);
1281 /* Get the _vptr component of the class object. */
1282 tmp
= gfc_get_vptr_from_expr (se
.expr
);
1283 /* Obtain a temporary class container for the result. */
1284 gfc_conv_derived_to_class (&se
, e
, sym
->ts
, tmp
, false, false);
1285 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1288 gfc_conv_expr (&se
, e
);
1290 tmp
= TREE_TYPE (sym
->backend_decl
);
1291 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1292 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1294 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1295 gfc_finish_block (&se
.post
));
1298 /* Do a simple assignment. This is for scalar expressions, where we
1299 can simply use expression assignment. */
1304 lhs
= gfc_lval_expr_from_sym (sym
);
1305 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1306 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1309 /* Set the stringlength from the vtable size. */
1310 if (sym
->ts
.type
== BT_CHARACTER
&& sym
->attr
.select_type_temporary
)
1314 gfc_init_se (&se
, NULL
);
1315 gcc_assert (UNLIMITED_POLY (e
->symtree
->n
.sym
));
1316 tmp
= gfc_get_symbol_decl (e
->symtree
->n
.sym
);
1317 tmp
= gfc_vtable_size_get (tmp
);
1318 gfc_get_symbol_decl (sym
);
1319 charlen
= sym
->ts
.u
.cl
->backend_decl
;
1320 gfc_add_modify (&se
.pre
, charlen
,
1321 fold_convert (TREE_TYPE (charlen
), tmp
));
1322 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1323 gfc_finish_block (&se
.post
));
1328 /* Translate a BLOCK construct. This is basically what we would do for a
1332 gfc_trans_block_construct (gfc_code
* code
)
1336 gfc_wrapped_block block
;
1339 gfc_association_list
*ass
;
1341 ns
= code
->ext
.block
.ns
;
1343 sym
= ns
->proc_name
;
1346 /* Process local variables. */
1347 gcc_assert (!sym
->tlink
);
1349 gfc_process_block_locals (ns
);
1351 /* Generate code including exit-label. */
1352 gfc_init_block (&body
);
1353 exit_label
= gfc_build_label_decl (NULL_TREE
);
1354 code
->exit_label
= exit_label
;
1355 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1356 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1358 /* Finish everything. */
1359 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1360 gfc_trans_deferred_vars (sym
, &block
);
1361 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1362 trans_associate_var (ass
->st
->n
.sym
, &block
);
1364 return gfc_finish_wrapped_block (&block
);
1368 /* Translate the simple DO construct. This is where the loop variable has
1369 integer type and step +-1. We can't use this in the general case
1370 because integer overflow and floating point errors could give incorrect
1372 We translate a do loop from:
1374 DO dovar = from, to, step
1380 [Evaluate loop bounds and step]
1382 if ((step > 0) ? (dovar <= to) : (dovar => to))
1388 cond = (dovar == to);
1390 if (cond) goto end_label;
1395 This helps the optimizers by avoiding the extra induction variable
1396 used in the general case. */
1399 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1400 tree from
, tree to
, tree step
, tree exit_cond
)
1406 tree saved_dovar
= NULL
;
1411 type
= TREE_TYPE (dovar
);
1413 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1415 /* Initialize the DO variable: dovar = from. */
1416 gfc_add_modify_loc (loc
, pblock
, dovar
,
1417 fold_convert (TREE_TYPE(dovar
), from
));
1419 /* Save value for do-tinkering checking. */
1420 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1422 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1423 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1426 /* Cycle and exit statements are implemented with gotos. */
1427 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1428 exit_label
= gfc_build_label_decl (NULL_TREE
);
1430 /* Put the labels where they can be found later. See gfc_trans_do(). */
1431 code
->cycle_label
= cycle_label
;
1432 code
->exit_label
= exit_label
;
1435 gfc_start_block (&body
);
1437 /* Main loop body. */
1438 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1439 gfc_add_expr_to_block (&body
, tmp
);
1441 /* Label for cycle statements (if needed). */
1442 if (TREE_USED (cycle_label
))
1444 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1445 gfc_add_expr_to_block (&body
, tmp
);
1448 /* Check whether someone has modified the loop variable. */
1449 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1451 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1452 dovar
, saved_dovar
);
1453 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1454 "Loop variable has been modified");
1457 /* Exit the loop if there is an I/O result condition or error. */
1460 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1461 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1463 build_empty_stmt (loc
));
1464 gfc_add_expr_to_block (&body
, tmp
);
1467 /* Evaluate the loop condition. */
1468 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1470 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1472 /* Increment the loop variable. */
1473 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1474 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1476 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1477 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1479 /* The loop exit. */
1480 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1481 TREE_USED (exit_label
) = 1;
1482 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1483 cond
, tmp
, build_empty_stmt (loc
));
1484 gfc_add_expr_to_block (&body
, tmp
);
1486 /* Finish the loop body. */
1487 tmp
= gfc_finish_block (&body
);
1488 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1490 /* Only execute the loop if the number of iterations is positive. */
1491 if (tree_int_cst_sgn (step
) > 0)
1492 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1495 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1497 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1498 build_empty_stmt (loc
));
1499 gfc_add_expr_to_block (pblock
, tmp
);
1501 /* Add the exit label. */
1502 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1503 gfc_add_expr_to_block (pblock
, tmp
);
1505 return gfc_finish_block (pblock
);
1508 /* Translate the DO construct. This obviously is one of the most
1509 important ones to get right with any compiler, but especially
1512 We special case some loop forms as described in gfc_trans_simple_do.
1513 For other cases we implement them with a separate loop count,
1514 as described in the standard.
1516 We translate a do loop from:
1518 DO dovar = from, to, step
1524 [evaluate loop bounds and step]
1525 empty = (step > 0 ? to < from : to > from);
1526 countm1 = (to - from) / step;
1528 if (empty) goto exit_label;
1536 if (countm1t == 0) goto exit_label;
1540 countm1 is an unsigned integer. It is equal to the loop count minus one,
1541 because the loop count itself can overflow. */
1544 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1548 tree saved_dovar
= NULL
;
1563 gfc_start_block (&block
);
1565 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1567 /* Evaluate all the expressions in the iterator. */
1568 gfc_init_se (&se
, NULL
);
1569 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1570 gfc_add_block_to_block (&block
, &se
.pre
);
1572 type
= TREE_TYPE (dovar
);
1574 gfc_init_se (&se
, NULL
);
1575 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1576 gfc_add_block_to_block (&block
, &se
.pre
);
1577 from
= gfc_evaluate_now (se
.expr
, &block
);
1579 gfc_init_se (&se
, NULL
);
1580 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1581 gfc_add_block_to_block (&block
, &se
.pre
);
1582 to
= gfc_evaluate_now (se
.expr
, &block
);
1584 gfc_init_se (&se
, NULL
);
1585 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1586 gfc_add_block_to_block (&block
, &se
.pre
);
1587 step
= gfc_evaluate_now (se
.expr
, &block
);
1589 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1591 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1592 build_zero_cst (type
));
1593 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1594 "DO step value is zero");
1597 /* Special case simple loops. */
1598 if (TREE_CODE (type
) == INTEGER_TYPE
1599 && (integer_onep (step
)
1600 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1601 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1604 if (TREE_CODE (type
) == INTEGER_TYPE
)
1605 utype
= unsigned_type_for (type
);
1607 utype
= unsigned_type_for (gfc_array_index_type
);
1608 countm1
= gfc_create_var (utype
, "countm1");
1610 /* Cycle and exit statements are implemented with gotos. */
1611 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1612 exit_label
= gfc_build_label_decl (NULL_TREE
);
1613 TREE_USED (exit_label
) = 1;
1615 /* Put these labels where they can be found later. */
1616 code
->cycle_label
= cycle_label
;
1617 code
->exit_label
= exit_label
;
1619 /* Initialize the DO variable: dovar = from. */
1620 gfc_add_modify (&block
, dovar
, from
);
1622 /* Save value for do-tinkering checking. */
1623 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1625 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1626 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1629 /* Initialize loop count and jump to exit label if the loop is empty.
1630 This code is executed before we enter the loop body. We generate:
1635 countm1 = (to - from) / step;
1641 countm1 = (from - to) / -step;
1645 if (TREE_CODE (type
) == INTEGER_TYPE
)
1647 tree pos
, neg
, tou
, fromu
, stepu
, tmp2
;
1649 /* The distance from FROM to TO cannot always be represented in a signed
1650 type, thus use unsigned arithmetic, also to avoid any undefined
1652 tou
= fold_convert (utype
, to
);
1653 fromu
= fold_convert (utype
, from
);
1654 stepu
= fold_convert (utype
, step
);
1656 /* For a positive step, when to < from, exit, otherwise compute
1657 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1658 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1659 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1660 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1663 pos
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1664 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1666 fold_build2 (MODIFY_EXPR
, void_type_node
,
1669 /* For a negative step, when to > from, exit, otherwise compute
1670 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1671 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
, from
);
1672 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1673 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1675 fold_build1_loc (loc
, NEGATE_EXPR
, utype
, stepu
));
1676 neg
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1677 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1679 fold_build2 (MODIFY_EXPR
, void_type_node
,
1682 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1683 build_int_cst (TREE_TYPE (step
), 0));
1684 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
, neg
, pos
);
1686 gfc_add_expr_to_block (&block
, tmp
);
1692 /* TODO: We could use the same width as the real type.
1693 This would probably cause more problems that it solves
1694 when we implement "long double" types. */
1696 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1697 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1698 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1699 gfc_add_modify (&block
, countm1
, tmp
);
1701 /* We need a special check for empty loops:
1702 empty = (step > 0 ? to < from : to > from); */
1703 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1704 build_zero_cst (type
));
1705 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1706 fold_build2_loc (loc
, LT_EXPR
,
1707 boolean_type_node
, to
, from
),
1708 fold_build2_loc (loc
, GT_EXPR
,
1709 boolean_type_node
, to
, from
));
1710 /* If the loop is empty, go directly to the exit label. */
1711 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1712 build1_v (GOTO_EXPR
, exit_label
),
1713 build_empty_stmt (input_location
));
1714 gfc_add_expr_to_block (&block
, tmp
);
1718 gfc_start_block (&body
);
1720 /* Main loop body. */
1721 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1722 gfc_add_expr_to_block (&body
, tmp
);
1724 /* Label for cycle statements (if needed). */
1725 if (TREE_USED (cycle_label
))
1727 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1728 gfc_add_expr_to_block (&body
, tmp
);
1731 /* Check whether someone has modified the loop variable. */
1732 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1734 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1736 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1737 "Loop variable has been modified");
1740 /* Exit the loop if there is an I/O result condition or error. */
1743 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1744 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1746 build_empty_stmt (input_location
));
1747 gfc_add_expr_to_block (&body
, tmp
);
1750 /* Increment the loop variable. */
1751 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1752 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1754 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1755 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1757 /* Initialize countm1t. */
1758 tree countm1t
= gfc_create_var (utype
, "countm1t");
1759 gfc_add_modify_loc (loc
, &body
, countm1t
, countm1
);
1761 /* Decrement the loop count. */
1762 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1763 build_int_cst (utype
, 1));
1764 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1766 /* End with the loop condition. Loop until countm1t == 0. */
1767 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1t
,
1768 build_int_cst (utype
, 0));
1769 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1770 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1771 cond
, tmp
, build_empty_stmt (loc
));
1772 gfc_add_expr_to_block (&body
, tmp
);
1774 /* End of loop body. */
1775 tmp
= gfc_finish_block (&body
);
1777 /* The for loop itself. */
1778 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1779 gfc_add_expr_to_block (&block
, tmp
);
1781 /* Add the exit label. */
1782 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1783 gfc_add_expr_to_block (&block
, tmp
);
1785 return gfc_finish_block (&block
);
1789 /* Translate the DO WHILE construct.
1802 if (! cond) goto exit_label;
1808 Because the evaluation of the exit condition `cond' may have side
1809 effects, we can't do much for empty loop bodies. The backend optimizers
1810 should be smart enough to eliminate any dead loops. */
1813 gfc_trans_do_while (gfc_code
* code
)
1821 /* Everything we build here is part of the loop body. */
1822 gfc_start_block (&block
);
1824 /* Cycle and exit statements are implemented with gotos. */
1825 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1826 exit_label
= gfc_build_label_decl (NULL_TREE
);
1828 /* Put the labels where they can be found later. See gfc_trans_do(). */
1829 code
->cycle_label
= cycle_label
;
1830 code
->exit_label
= exit_label
;
1832 /* Create a GIMPLE version of the exit condition. */
1833 gfc_init_se (&cond
, NULL
);
1834 gfc_conv_expr_val (&cond
, code
->expr1
);
1835 gfc_add_block_to_block (&block
, &cond
.pre
);
1836 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1837 TRUTH_NOT_EXPR
, TREE_TYPE (cond
.expr
), cond
.expr
);
1839 /* Build "IF (! cond) GOTO exit_label". */
1840 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1841 TREE_USED (exit_label
) = 1;
1842 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1843 void_type_node
, cond
.expr
, tmp
,
1844 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1845 gfc_add_expr_to_block (&block
, tmp
);
1847 /* The main body of the loop. */
1848 tmp
= gfc_trans_code (code
->block
->next
);
1849 gfc_add_expr_to_block (&block
, tmp
);
1851 /* Label for cycle statements (if needed). */
1852 if (TREE_USED (cycle_label
))
1854 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1855 gfc_add_expr_to_block (&block
, tmp
);
1858 /* End of loop body. */
1859 tmp
= gfc_finish_block (&block
);
1861 gfc_init_block (&block
);
1862 /* Build the loop. */
1863 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1864 void_type_node
, tmp
);
1865 gfc_add_expr_to_block (&block
, tmp
);
1867 /* Add the exit label. */
1868 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1869 gfc_add_expr_to_block (&block
, tmp
);
1871 return gfc_finish_block (&block
);
1875 /* Translate the SELECT CASE construct for INTEGER case expressions,
1876 without killing all potential optimizations. The problem is that
1877 Fortran allows unbounded cases, but the back-end does not, so we
1878 need to intercept those before we enter the equivalent SWITCH_EXPR
1881 For example, we translate this,
1884 CASE (:100,101,105:115)
1894 to the GENERIC equivalent,
1898 case (minimum value for typeof(expr) ... 100:
1904 case 200 ... (maximum value for typeof(expr):
1921 gfc_trans_integer_select (gfc_code
* code
)
1931 gfc_start_block (&block
);
1933 /* Calculate the switch expression. */
1934 gfc_init_se (&se
, NULL
);
1935 gfc_conv_expr_val (&se
, code
->expr1
);
1936 gfc_add_block_to_block (&block
, &se
.pre
);
1938 end_label
= gfc_build_label_decl (NULL_TREE
);
1940 gfc_init_block (&body
);
1942 for (c
= code
->block
; c
; c
= c
->block
)
1944 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1949 /* Assume it's the default case. */
1950 low
= high
= NULL_TREE
;
1954 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1957 /* If there's only a lower bound, set the high bound to the
1958 maximum value of the case expression. */
1960 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1965 /* Three cases are possible here:
1967 1) There is no lower bound, e.g. CASE (:N).
1968 2) There is a lower bound .NE. high bound, that is
1969 a case range, e.g. CASE (N:M) where M>N (we make
1970 sure that M>N during type resolution).
1971 3) There is a lower bound, and it has the same value
1972 as the high bound, e.g. CASE (N:N). This is our
1973 internal representation of CASE(N).
1975 In the first and second case, we need to set a value for
1976 high. In the third case, we don't because the GCC middle
1977 end represents a single case value by just letting high be
1978 a NULL_TREE. We can't do that because we need to be able
1979 to represent unbounded cases. */
1983 && mpz_cmp (cp
->low
->value
.integer
,
1984 cp
->high
->value
.integer
) != 0))
1985 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
1988 /* Unbounded case. */
1990 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
1993 /* Build a label. */
1994 label
= gfc_build_label_decl (NULL_TREE
);
1996 /* Add this case label.
1997 Add parameter 'label', make it match GCC backend. */
1998 tmp
= build_case_label (low
, high
, label
);
1999 gfc_add_expr_to_block (&body
, tmp
);
2002 /* Add the statements for this case. */
2003 tmp
= gfc_trans_code (c
->next
);
2004 gfc_add_expr_to_block (&body
, tmp
);
2006 /* Break to the end of the construct. */
2007 tmp
= build1_v (GOTO_EXPR
, end_label
);
2008 gfc_add_expr_to_block (&body
, tmp
);
2011 tmp
= gfc_finish_block (&body
);
2012 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2013 se
.expr
, tmp
, NULL_TREE
);
2014 gfc_add_expr_to_block (&block
, tmp
);
2016 tmp
= build1_v (LABEL_EXPR
, end_label
);
2017 gfc_add_expr_to_block (&block
, tmp
);
2019 return gfc_finish_block (&block
);
2023 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2025 There are only two cases possible here, even though the standard
2026 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2027 .FALSE., and DEFAULT.
2029 We never generate more than two blocks here. Instead, we always
2030 try to eliminate the DEFAULT case. This way, we can translate this
2031 kind of SELECT construct to a simple
2035 expression in GENERIC. */
2038 gfc_trans_logical_select (gfc_code
* code
)
2041 gfc_code
*t
, *f
, *d
;
2046 /* Assume we don't have any cases at all. */
2049 /* Now see which ones we actually do have. We can have at most two
2050 cases in a single case list: one for .TRUE. and one for .FALSE.
2051 The default case is always separate. If the cases for .TRUE. and
2052 .FALSE. are in the same case list, the block for that case list
2053 always executed, and we don't generate code a COND_EXPR. */
2054 for (c
= code
->block
; c
; c
= c
->block
)
2056 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2060 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
2062 else /* if (cp->value.logical != 0), thus .TRUE. */
2070 /* Start a new block. */
2071 gfc_start_block (&block
);
2073 /* Calculate the switch expression. We always need to do this
2074 because it may have side effects. */
2075 gfc_init_se (&se
, NULL
);
2076 gfc_conv_expr_val (&se
, code
->expr1
);
2077 gfc_add_block_to_block (&block
, &se
.pre
);
2079 if (t
== f
&& t
!= NULL
)
2081 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2082 translate the code for these cases, append it to the current
2084 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
2088 tree true_tree
, false_tree
, stmt
;
2090 true_tree
= build_empty_stmt (input_location
);
2091 false_tree
= build_empty_stmt (input_location
);
2093 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2094 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2095 make the missing case the default case. */
2096 if (t
!= NULL
&& f
!= NULL
)
2106 /* Translate the code for each of these blocks, and append it to
2107 the current block. */
2109 true_tree
= gfc_trans_code (t
->next
);
2112 false_tree
= gfc_trans_code (f
->next
);
2114 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2115 se
.expr
, true_tree
, false_tree
);
2116 gfc_add_expr_to_block (&block
, stmt
);
2119 return gfc_finish_block (&block
);
2123 /* The jump table types are stored in static variables to avoid
2124 constructing them from scratch every single time. */
2125 static GTY(()) tree select_struct
[2];
2127 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2128 Instead of generating compares and jumps, it is far simpler to
2129 generate a data structure describing the cases in order and call a
2130 library subroutine that locates the right case.
2131 This is particularly true because this is the only case where we
2132 might have to dispose of a temporary.
2133 The library subroutine returns a pointer to jump to or NULL if no
2134 branches are to be taken. */
2137 gfc_trans_character_select (gfc_code
*code
)
2139 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
2140 stmtblock_t block
, body
;
2145 vec
<constructor_elt
, va_gc
> *inits
= NULL
;
2147 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
2149 /* The jump table types are stored in static variables to avoid
2150 constructing them from scratch every single time. */
2151 static tree ss_string1
[2], ss_string1_len
[2];
2152 static tree ss_string2
[2], ss_string2_len
[2];
2153 static tree ss_target
[2];
2155 cp
= code
->block
->ext
.block
.case_list
;
2156 while (cp
->left
!= NULL
)
2159 /* Generate the body */
2160 gfc_start_block (&block
);
2161 gfc_init_se (&expr1se
, NULL
);
2162 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
2164 gfc_add_block_to_block (&block
, &expr1se
.pre
);
2166 end_label
= gfc_build_label_decl (NULL_TREE
);
2168 gfc_init_block (&body
);
2170 /* Attempt to optimize length 1 selects. */
2171 if (integer_onep (expr1se
.string_length
))
2173 for (d
= cp
; d
; d
= d
->right
)
2178 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
2179 && d
->low
->ts
.type
== BT_CHARACTER
);
2180 if (d
->low
->value
.character
.length
> 1)
2182 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
2183 if (d
->low
->value
.character
.string
[i
] != ' ')
2185 if (i
!= d
->low
->value
.character
.length
)
2187 if (optimize
&& d
->high
&& i
== 1)
2189 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2190 && d
->high
->ts
.type
== BT_CHARACTER
);
2191 if (d
->high
->value
.character
.length
> 1
2192 && (d
->low
->value
.character
.string
[0]
2193 == d
->high
->value
.character
.string
[0])
2194 && d
->high
->value
.character
.string
[1] != ' '
2195 && ((d
->low
->value
.character
.string
[1] < ' ')
2196 == (d
->high
->value
.character
.string
[1]
2206 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2207 && d
->high
->ts
.type
== BT_CHARACTER
);
2208 if (d
->high
->value
.character
.length
> 1)
2210 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
2211 if (d
->high
->value
.character
.string
[i
] != ' ')
2213 if (i
!= d
->high
->value
.character
.length
)
2220 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
2222 for (c
= code
->block
; c
; c
= c
->block
)
2224 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2230 /* Assume it's the default case. */
2231 low
= high
= NULL_TREE
;
2235 /* CASE ('ab') or CASE ('ab':'az') will never match
2236 any length 1 character. */
2237 if (cp
->low
->value
.character
.length
> 1
2238 && cp
->low
->value
.character
.string
[1] != ' ')
2241 if (cp
->low
->value
.character
.length
> 0)
2242 r
= cp
->low
->value
.character
.string
[0];
2245 low
= build_int_cst (ctype
, r
);
2247 /* If there's only a lower bound, set the high bound
2248 to the maximum value of the case expression. */
2250 high
= TYPE_MAX_VALUE (ctype
);
2256 || (cp
->low
->value
.character
.string
[0]
2257 != cp
->high
->value
.character
.string
[0]))
2259 if (cp
->high
->value
.character
.length
> 0)
2260 r
= cp
->high
->value
.character
.string
[0];
2263 high
= build_int_cst (ctype
, r
);
2266 /* Unbounded case. */
2268 low
= TYPE_MIN_VALUE (ctype
);
2271 /* Build a label. */
2272 label
= gfc_build_label_decl (NULL_TREE
);
2274 /* Add this case label.
2275 Add parameter 'label', make it match GCC backend. */
2276 tmp
= build_case_label (low
, high
, label
);
2277 gfc_add_expr_to_block (&body
, tmp
);
2280 /* Add the statements for this case. */
2281 tmp
= gfc_trans_code (c
->next
);
2282 gfc_add_expr_to_block (&body
, tmp
);
2284 /* Break to the end of the construct. */
2285 tmp
= build1_v (GOTO_EXPR
, end_label
);
2286 gfc_add_expr_to_block (&body
, tmp
);
2289 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2291 code
->expr1
->ts
.kind
);
2292 case_num
= gfc_create_var (ctype
, "case_num");
2293 gfc_add_modify (&block
, case_num
, tmp
);
2295 gfc_add_block_to_block (&block
, &expr1se
.post
);
2297 tmp
= gfc_finish_block (&body
);
2298 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2299 case_num
, tmp
, NULL_TREE
);
2300 gfc_add_expr_to_block (&block
, tmp
);
2302 tmp
= build1_v (LABEL_EXPR
, end_label
);
2303 gfc_add_expr_to_block (&block
, tmp
);
2305 return gfc_finish_block (&block
);
2309 if (code
->expr1
->ts
.kind
== 1)
2311 else if (code
->expr1
->ts
.kind
== 4)
2316 if (select_struct
[k
] == NULL
)
2319 select_struct
[k
] = make_node (RECORD_TYPE
);
2321 if (code
->expr1
->ts
.kind
== 1)
2322 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2323 else if (code
->expr1
->ts
.kind
== 4)
2324 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2329 #define ADD_FIELD(NAME, TYPE) \
2330 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2331 get_identifier (stringize(NAME)), \
2335 ADD_FIELD (string1
, pchartype
);
2336 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2338 ADD_FIELD (string2
, pchartype
);
2339 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2341 ADD_FIELD (target
, integer_type_node
);
2344 gfc_finish_type (select_struct
[k
]);
2348 for (d
= cp
; d
; d
= d
->right
)
2351 for (c
= code
->block
; c
; c
= c
->block
)
2353 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2355 label
= gfc_build_label_decl (NULL_TREE
);
2356 tmp
= build_case_label ((d
->low
== NULL
&& d
->high
== NULL
)
2358 : build_int_cst (integer_type_node
, d
->n
),
2360 gfc_add_expr_to_block (&body
, tmp
);
2363 tmp
= gfc_trans_code (c
->next
);
2364 gfc_add_expr_to_block (&body
, tmp
);
2366 tmp
= build1_v (GOTO_EXPR
, end_label
);
2367 gfc_add_expr_to_block (&body
, tmp
);
2370 /* Generate the structure describing the branches */
2371 for (d
= cp
; d
; d
= d
->right
)
2373 vec
<constructor_elt
, va_gc
> *node
= NULL
;
2375 gfc_init_se (&se
, NULL
);
2379 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2380 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2384 gfc_conv_expr_reference (&se
, d
->low
);
2386 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2387 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2390 if (d
->high
== NULL
)
2392 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2393 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2397 gfc_init_se (&se
, NULL
);
2398 gfc_conv_expr_reference (&se
, d
->high
);
2400 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2401 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2404 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2405 build_int_cst (integer_type_node
, d
->n
));
2407 tmp
= build_constructor (select_struct
[k
], node
);
2408 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2411 type
= build_array_type (select_struct
[k
],
2412 build_index_type (size_int (n
-1)));
2414 init
= build_constructor (type
, inits
);
2415 TREE_CONSTANT (init
) = 1;
2416 TREE_STATIC (init
) = 1;
2417 /* Create a static variable to hold the jump table. */
2418 tmp
= gfc_create_var (type
, "jumptable");
2419 TREE_CONSTANT (tmp
) = 1;
2420 TREE_STATIC (tmp
) = 1;
2421 TREE_READONLY (tmp
) = 1;
2422 DECL_INITIAL (tmp
) = init
;
2425 /* Build the library call */
2426 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2428 if (code
->expr1
->ts
.kind
== 1)
2429 fndecl
= gfor_fndecl_select_string
;
2430 else if (code
->expr1
->ts
.kind
== 4)
2431 fndecl
= gfor_fndecl_select_string_char4
;
2435 tmp
= build_call_expr_loc (input_location
,
2437 build_int_cst (gfc_charlen_type_node
, n
),
2438 expr1se
.expr
, expr1se
.string_length
);
2439 case_num
= gfc_create_var (integer_type_node
, "case_num");
2440 gfc_add_modify (&block
, case_num
, tmp
);
2442 gfc_add_block_to_block (&block
, &expr1se
.post
);
2444 tmp
= gfc_finish_block (&body
);
2445 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2446 case_num
, tmp
, NULL_TREE
);
2447 gfc_add_expr_to_block (&block
, tmp
);
2449 tmp
= build1_v (LABEL_EXPR
, end_label
);
2450 gfc_add_expr_to_block (&block
, tmp
);
2452 return gfc_finish_block (&block
);
2456 /* Translate the three variants of the SELECT CASE construct.
2458 SELECT CASEs with INTEGER case expressions can be translated to an
2459 equivalent GENERIC switch statement, and for LOGICAL case
2460 expressions we build one or two if-else compares.
2462 SELECT CASEs with CHARACTER case expressions are a whole different
2463 story, because they don't exist in GENERIC. So we sort them and
2464 do a binary search at runtime.
2466 Fortran has no BREAK statement, and it does not allow jumps from
2467 one case block to another. That makes things a lot easier for
2471 gfc_trans_select (gfc_code
* code
)
2477 gcc_assert (code
&& code
->expr1
);
2478 gfc_init_block (&block
);
2480 /* Build the exit label and hang it in. */
2481 exit_label
= gfc_build_label_decl (NULL_TREE
);
2482 code
->exit_label
= exit_label
;
2484 /* Empty SELECT constructs are legal. */
2485 if (code
->block
== NULL
)
2486 body
= build_empty_stmt (input_location
);
2488 /* Select the correct translation function. */
2490 switch (code
->expr1
->ts
.type
)
2493 body
= gfc_trans_logical_select (code
);
2497 body
= gfc_trans_integer_select (code
);
2501 body
= gfc_trans_character_select (code
);
2505 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2509 /* Build everything together. */
2510 gfc_add_expr_to_block (&block
, body
);
2511 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2513 return gfc_finish_block (&block
);
2517 /* Traversal function to substitute a replacement symtree if the symbol
2518 in the expression is the same as that passed. f == 2 signals that
2519 that variable itself is not to be checked - only the references.
2520 This group of functions is used when the variable expression in a
2521 FORALL assignment has internal references. For example:
2522 FORALL (i = 1:4) p(p(i)) = i
2523 The only recourse here is to store a copy of 'p' for the index
2526 static gfc_symtree
*new_symtree
;
2527 static gfc_symtree
*old_symtree
;
2530 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2532 if (expr
->expr_type
!= EXPR_VARIABLE
)
2537 else if (expr
->symtree
->n
.sym
== sym
)
2538 expr
->symtree
= new_symtree
;
2544 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2546 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2550 forall_restore (gfc_expr
*expr
,
2551 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2552 int *f ATTRIBUTE_UNUSED
)
2554 if (expr
->expr_type
!= EXPR_VARIABLE
)
2557 if (expr
->symtree
== new_symtree
)
2558 expr
->symtree
= old_symtree
;
2564 forall_restore_symtree (gfc_expr
*e
)
2566 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2570 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2575 gfc_symbol
*new_sym
;
2576 gfc_symbol
*old_sym
;
2580 /* Build a copy of the lvalue. */
2581 old_symtree
= c
->expr1
->symtree
;
2582 old_sym
= old_symtree
->n
.sym
;
2583 e
= gfc_lval_expr_from_sym (old_sym
);
2584 if (old_sym
->attr
.dimension
)
2586 gfc_init_se (&tse
, NULL
);
2587 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2588 gfc_add_block_to_block (pre
, &tse
.pre
);
2589 gfc_add_block_to_block (post
, &tse
.post
);
2590 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2592 if (e
->ts
.type
!= BT_CHARACTER
)
2594 /* Use the variable offset for the temporary. */
2595 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2596 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2601 gfc_init_se (&tse
, NULL
);
2602 gfc_init_se (&rse
, NULL
);
2603 gfc_conv_expr (&rse
, e
);
2604 if (e
->ts
.type
== BT_CHARACTER
)
2606 tse
.string_length
= rse
.string_length
;
2607 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2609 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2611 gfc_add_block_to_block (pre
, &tse
.pre
);
2612 gfc_add_block_to_block (post
, &tse
.post
);
2616 tmp
= gfc_typenode_for_spec (&e
->ts
);
2617 tse
.expr
= gfc_create_var (tmp
, "temp");
2620 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2621 e
->expr_type
== EXPR_VARIABLE
, true);
2622 gfc_add_expr_to_block (pre
, tmp
);
2626 /* Create a new symbol to represent the lvalue. */
2627 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2628 new_sym
->ts
= old_sym
->ts
;
2629 new_sym
->attr
.referenced
= 1;
2630 new_sym
->attr
.temporary
= 1;
2631 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2632 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2634 /* Use the temporary as the backend_decl. */
2635 new_sym
->backend_decl
= tse
.expr
;
2637 /* Create a fake symtree for it. */
2639 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2640 new_symtree
->n
.sym
= new_sym
;
2641 gcc_assert (new_symtree
== root
);
2643 /* Go through the expression reference replacing the old_symtree
2645 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2647 /* Now we have made this temporary, we might as well use it for
2648 the right hand side. */
2649 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2653 /* Handles dependencies in forall assignments. */
2655 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2662 lsym
= c
->expr1
->symtree
->n
.sym
;
2663 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2665 /* Now check for dependencies within the 'variable'
2666 expression itself. These are treated by making a complete
2667 copy of variable and changing all the references to it
2668 point to the copy instead. Note that the shallow copy of
2669 the variable will not suffice for derived types with
2670 pointer components. We therefore leave these to their
2672 if (lsym
->ts
.type
== BT_DERIVED
2673 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2677 if (find_forall_index (c
->expr1
, lsym
, 2))
2679 forall_make_variable_temp (c
, pre
, post
);
2683 /* Substrings with dependencies are treated in the same
2685 if (c
->expr1
->ts
.type
== BT_CHARACTER
2687 && c
->expr2
->expr_type
== EXPR_VARIABLE
2688 && lsym
== c
->expr2
->symtree
->n
.sym
)
2690 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2691 if (lref
->type
== REF_SUBSTRING
)
2693 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2694 if (rref
->type
== REF_SUBSTRING
)
2698 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2700 forall_make_variable_temp (c
, pre
, post
);
2709 cleanup_forall_symtrees (gfc_code
*c
)
2711 forall_restore_symtree (c
->expr1
);
2712 forall_restore_symtree (c
->expr2
);
2713 free (new_symtree
->n
.sym
);
2718 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2719 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2720 indicates whether we should generate code to test the FORALLs mask
2721 array. OUTER is the loop header to be used for initializing mask
2724 The generated loop format is:
2725 count = (end - start + step) / step
2738 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2739 int mask_flag
, stmtblock_t
*outer
)
2747 tree var
, start
, end
, step
;
2750 /* Initialize the mask index outside the FORALL nest. */
2751 if (mask_flag
&& forall_tmp
->mask
)
2752 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2754 iter
= forall_tmp
->this_loop
;
2755 nvar
= forall_tmp
->nvar
;
2756 for (n
= 0; n
< nvar
; n
++)
2759 start
= iter
->start
;
2763 exit_label
= gfc_build_label_decl (NULL_TREE
);
2764 TREE_USED (exit_label
) = 1;
2766 /* The loop counter. */
2767 count
= gfc_create_var (TREE_TYPE (var
), "count");
2769 /* The body of the loop. */
2770 gfc_init_block (&block
);
2772 /* The exit condition. */
2773 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2774 count
, build_int_cst (TREE_TYPE (count
), 0));
2775 if (forall_tmp
->do_concurrent
)
2776 cond
= build2 (ANNOTATE_EXPR
, TREE_TYPE (cond
), cond
,
2777 build_int_cst (integer_type_node
,
2778 annot_expr_ivdep_kind
));
2780 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2781 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2782 cond
, tmp
, build_empty_stmt (input_location
));
2783 gfc_add_expr_to_block (&block
, tmp
);
2785 /* The main loop body. */
2786 gfc_add_expr_to_block (&block
, body
);
2788 /* Increment the loop variable. */
2789 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2791 gfc_add_modify (&block
, var
, tmp
);
2793 /* Advance to the next mask element. Only do this for the
2795 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2797 tree maskindex
= forall_tmp
->maskindex
;
2798 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2799 maskindex
, gfc_index_one_node
);
2800 gfc_add_modify (&block
, maskindex
, tmp
);
2803 /* Decrement the loop counter. */
2804 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2805 build_int_cst (TREE_TYPE (var
), 1));
2806 gfc_add_modify (&block
, count
, tmp
);
2808 body
= gfc_finish_block (&block
);
2810 /* Loop var initialization. */
2811 gfc_init_block (&block
);
2812 gfc_add_modify (&block
, var
, start
);
2815 /* Initialize the loop counter. */
2816 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2818 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2820 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2822 gfc_add_modify (&block
, count
, tmp
);
2824 /* The loop expression. */
2825 tmp
= build1_v (LOOP_EXPR
, body
);
2826 gfc_add_expr_to_block (&block
, tmp
);
2828 /* The exit label. */
2829 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2830 gfc_add_expr_to_block (&block
, tmp
);
2832 body
= gfc_finish_block (&block
);
2839 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2840 is nonzero, the body is controlled by all masks in the forall nest.
2841 Otherwise, the innermost loop is not controlled by it's mask. This
2842 is used for initializing that mask. */
2845 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2850 forall_info
*forall_tmp
;
2851 tree mask
, maskindex
;
2853 gfc_start_block (&header
);
2855 forall_tmp
= nested_forall_info
;
2856 while (forall_tmp
!= NULL
)
2858 /* Generate body with masks' control. */
2861 mask
= forall_tmp
->mask
;
2862 maskindex
= forall_tmp
->maskindex
;
2864 /* If a mask was specified make the assignment conditional. */
2867 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2868 body
= build3_v (COND_EXPR
, tmp
, body
,
2869 build_empty_stmt (input_location
));
2872 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2873 forall_tmp
= forall_tmp
->prev_nest
;
2877 gfc_add_expr_to_block (&header
, body
);
2878 return gfc_finish_block (&header
);
2882 /* Allocate data for holding a temporary array. Returns either a local
2883 temporary array or a pointer variable. */
2886 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2893 if (INTEGER_CST_P (size
))
2894 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2895 size
, gfc_index_one_node
);
2899 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2900 type
= build_array_type (elem_type
, type
);
2901 if (gfc_can_put_var_on_stack (bytesize
))
2903 gcc_assert (INTEGER_CST_P (size
));
2904 tmpvar
= gfc_create_var (type
, "temp");
2909 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2910 *pdata
= convert (pvoid_type_node
, tmpvar
);
2912 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2913 gfc_add_modify (pblock
, tmpvar
, tmp
);
2919 /* Generate codes to copy the temporary to the actual lhs. */
2922 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2923 tree count1
, tree wheremask
, bool invert
)
2927 stmtblock_t block
, body
;
2933 lss
= gfc_walk_expr (expr
);
2935 if (lss
== gfc_ss_terminator
)
2937 gfc_start_block (&block
);
2939 gfc_init_se (&lse
, NULL
);
2941 /* Translate the expression. */
2942 gfc_conv_expr (&lse
, expr
);
2944 /* Form the expression for the temporary. */
2945 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2947 /* Use the scalar assignment as is. */
2948 gfc_add_block_to_block (&block
, &lse
.pre
);
2949 gfc_add_modify (&block
, lse
.expr
, tmp
);
2950 gfc_add_block_to_block (&block
, &lse
.post
);
2952 /* Increment the count1. */
2953 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2954 count1
, gfc_index_one_node
);
2955 gfc_add_modify (&block
, count1
, tmp
);
2957 tmp
= gfc_finish_block (&block
);
2961 gfc_start_block (&block
);
2963 gfc_init_loopinfo (&loop1
);
2964 gfc_init_se (&rse
, NULL
);
2965 gfc_init_se (&lse
, NULL
);
2967 /* Associate the lss with the loop. */
2968 gfc_add_ss_to_loop (&loop1
, lss
);
2970 /* Calculate the bounds of the scalarization. */
2971 gfc_conv_ss_startstride (&loop1
);
2972 /* Setup the scalarizing loops. */
2973 gfc_conv_loop_setup (&loop1
, &expr
->where
);
2975 gfc_mark_ss_chain_used (lss
, 1);
2977 /* Start the scalarized loop body. */
2978 gfc_start_scalarized_body (&loop1
, &body
);
2980 /* Setup the gfc_se structures. */
2981 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
2984 /* Form the expression of the temporary. */
2985 if (lss
!= gfc_ss_terminator
)
2986 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2987 /* Translate expr. */
2988 gfc_conv_expr (&lse
, expr
);
2990 /* Use the scalar assignment. */
2991 rse
.string_length
= lse
.string_length
;
2992 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
2994 /* Form the mask expression according to the mask tree list. */
2997 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
2999 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3000 TREE_TYPE (wheremaskexpr
),
3002 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3004 build_empty_stmt (input_location
));
3007 gfc_add_expr_to_block (&body
, tmp
);
3009 /* Increment count1. */
3010 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3011 count1
, gfc_index_one_node
);
3012 gfc_add_modify (&body
, count1
, tmp
);
3014 /* Increment count3. */
3017 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3018 gfc_array_index_type
, count3
,
3019 gfc_index_one_node
);
3020 gfc_add_modify (&body
, count3
, tmp
);
3023 /* Generate the copying loops. */
3024 gfc_trans_scalarizing_loops (&loop1
, &body
);
3025 gfc_add_block_to_block (&block
, &loop1
.pre
);
3026 gfc_add_block_to_block (&block
, &loop1
.post
);
3027 gfc_cleanup_loop (&loop1
);
3029 tmp
= gfc_finish_block (&block
);
3035 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3036 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3037 and should not be freed. WHEREMASK is the conditional execution mask
3038 whose sense may be inverted by INVERT. */
3041 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
3042 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
3043 tree wheremask
, bool invert
)
3045 stmtblock_t block
, body1
;
3052 gfc_start_block (&block
);
3054 gfc_init_se (&rse
, NULL
);
3055 gfc_init_se (&lse
, NULL
);
3057 if (lss
== gfc_ss_terminator
)
3059 gfc_init_block (&body1
);
3060 gfc_conv_expr (&rse
, expr2
);
3061 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3065 /* Initialize the loop. */
3066 gfc_init_loopinfo (&loop
);
3068 /* We may need LSS to determine the shape of the expression. */
3069 gfc_add_ss_to_loop (&loop
, lss
);
3070 gfc_add_ss_to_loop (&loop
, rss
);
3072 gfc_conv_ss_startstride (&loop
);
3073 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3075 gfc_mark_ss_chain_used (rss
, 1);
3076 /* Start the loop body. */
3077 gfc_start_scalarized_body (&loop
, &body1
);
3079 /* Translate the expression. */
3080 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3082 gfc_conv_expr (&rse
, expr2
);
3084 /* Form the expression of the temporary. */
3085 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3088 /* Use the scalar assignment. */
3089 lse
.string_length
= rse
.string_length
;
3090 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
3091 expr2
->expr_type
== EXPR_VARIABLE
, true);
3093 /* Form the mask expression according to the mask tree list. */
3096 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3098 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3099 TREE_TYPE (wheremaskexpr
),
3101 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3103 build_empty_stmt (input_location
));
3106 gfc_add_expr_to_block (&body1
, tmp
);
3108 if (lss
== gfc_ss_terminator
)
3110 gfc_add_block_to_block (&block
, &body1
);
3112 /* Increment count1. */
3113 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
3114 count1
, gfc_index_one_node
);
3115 gfc_add_modify (&block
, count1
, tmp
);
3119 /* Increment count1. */
3120 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3121 count1
, gfc_index_one_node
);
3122 gfc_add_modify (&body1
, count1
, tmp
);
3124 /* Increment count3. */
3127 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3128 gfc_array_index_type
,
3129 count3
, gfc_index_one_node
);
3130 gfc_add_modify (&body1
, count3
, tmp
);
3133 /* Generate the copying loops. */
3134 gfc_trans_scalarizing_loops (&loop
, &body1
);
3136 gfc_add_block_to_block (&block
, &loop
.pre
);
3137 gfc_add_block_to_block (&block
, &loop
.post
);
3139 gfc_cleanup_loop (&loop
);
3140 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3141 as tree nodes in SS may not be valid in different scope. */
3144 tmp
= gfc_finish_block (&block
);
3149 /* Calculate the size of temporary needed in the assignment inside forall.
3150 LSS and RSS are filled in this function. */
3153 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
3154 stmtblock_t
* pblock
,
3155 gfc_ss
**lss
, gfc_ss
**rss
)
3163 *lss
= gfc_walk_expr (expr1
);
3166 size
= gfc_index_one_node
;
3167 if (*lss
!= gfc_ss_terminator
)
3169 gfc_init_loopinfo (&loop
);
3171 /* Walk the RHS of the expression. */
3172 *rss
= gfc_walk_expr (expr2
);
3173 if (*rss
== gfc_ss_terminator
)
3174 /* The rhs is scalar. Add a ss for the expression. */
3175 *rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
3177 /* Associate the SS with the loop. */
3178 gfc_add_ss_to_loop (&loop
, *lss
);
3179 /* We don't actually need to add the rhs at this point, but it might
3180 make guessing the loop bounds a bit easier. */
3181 gfc_add_ss_to_loop (&loop
, *rss
);
3183 /* We only want the shape of the expression, not rest of the junk
3184 generated by the scalarizer. */
3185 loop
.array_parameter
= 1;
3187 /* Calculate the bounds of the scalarization. */
3188 save_flag
= gfc_option
.rtcheck
;
3189 gfc_option
.rtcheck
&= ~GFC_RTCHECK_BOUNDS
;
3190 gfc_conv_ss_startstride (&loop
);
3191 gfc_option
.rtcheck
= save_flag
;
3192 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3194 /* Figure out how many elements we need. */
3195 for (i
= 0; i
< loop
.dimen
; i
++)
3197 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
3198 gfc_array_index_type
,
3199 gfc_index_one_node
, loop
.from
[i
]);
3200 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3201 gfc_array_index_type
, tmp
, loop
.to
[i
]);
3202 size
= fold_build2_loc (input_location
, MULT_EXPR
,
3203 gfc_array_index_type
, size
, tmp
);
3205 gfc_add_block_to_block (pblock
, &loop
.pre
);
3206 size
= gfc_evaluate_now (size
, pblock
);
3207 gfc_add_block_to_block (pblock
, &loop
.post
);
3209 /* TODO: write a function that cleans up a loopinfo without freeing
3210 the SS chains. Currently a NOP. */
3217 /* Calculate the overall iterator number of the nested forall construct.
3218 This routine actually calculates the number of times the body of the
3219 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3220 that by the expression INNER_SIZE. The BLOCK argument specifies the
3221 block in which to calculate the result, and the optional INNER_SIZE_BODY
3222 argument contains any statements that need to executed (inside the loop)
3223 to initialize or calculate INNER_SIZE. */
3226 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3227 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3229 forall_info
*forall_tmp
= nested_forall_info
;
3233 /* We can eliminate the innermost unconditional loops with constant
3235 if (INTEGER_CST_P (inner_size
))
3238 && !forall_tmp
->mask
3239 && INTEGER_CST_P (forall_tmp
->size
))
3241 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3242 gfc_array_index_type
,
3243 inner_size
, forall_tmp
->size
);
3244 forall_tmp
= forall_tmp
->prev_nest
;
3247 /* If there are no loops left, we have our constant result. */
3252 /* Otherwise, create a temporary variable to compute the result. */
3253 number
= gfc_create_var (gfc_array_index_type
, "num");
3254 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3256 gfc_start_block (&body
);
3257 if (inner_size_body
)
3258 gfc_add_block_to_block (&body
, inner_size_body
);
3260 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3261 gfc_array_index_type
, number
, inner_size
);
3264 gfc_add_modify (&body
, number
, tmp
);
3265 tmp
= gfc_finish_block (&body
);
3267 /* Generate loops. */
3268 if (forall_tmp
!= NULL
)
3269 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3271 gfc_add_expr_to_block (block
, tmp
);
3277 /* Allocate temporary for forall construct. SIZE is the size of temporary
3278 needed. PTEMP1 is returned for space free. */
3281 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3288 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3289 if (!integer_onep (unit
))
3290 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3291 gfc_array_index_type
, size
, unit
);
3296 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3299 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3304 /* Allocate temporary for forall construct according to the information in
3305 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3306 assignment inside forall. PTEMP1 is returned for space free. */
3309 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3310 tree inner_size
, stmtblock_t
* inner_size_body
,
3311 stmtblock_t
* block
, tree
* ptemp1
)
3315 /* Calculate the total size of temporary needed in forall construct. */
3316 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3317 inner_size_body
, block
);
3319 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3323 /* Handle assignments inside forall which need temporary.
3325 forall (i=start:end:stride; maskexpr)
3328 (where e,f<i> are arbitrary expressions possibly involving i
3329 and there is a dependency between e<i> and f<i>)
3331 masktmp(:) = maskexpr(:)
3336 for (i = start; i <= end; i += stride)
3340 for (i = start; i <= end; i += stride)
3342 if (masktmp[maskindex++])
3343 tmp[count1++] = f<i>
3347 for (i = start; i <= end; i += stride)
3349 if (masktmp[maskindex++])
3350 e<i> = tmp[count1++]
3355 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3356 tree wheremask
, bool invert
,
3357 forall_info
* nested_forall_info
,
3358 stmtblock_t
* block
)
3366 stmtblock_t inner_size_body
;
3368 /* Create vars. count1 is the current iterator number of the nested
3370 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3372 /* Count is the wheremask index. */
3375 count
= gfc_create_var (gfc_array_index_type
, "count");
3376 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3381 /* Initialize count1. */
3382 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3384 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3385 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3386 gfc_init_block (&inner_size_body
);
3387 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3390 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3391 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3393 if (!expr1
->ts
.u
.cl
->backend_decl
)
3396 gfc_init_se (&tse
, NULL
);
3397 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3398 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3400 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3401 expr1
->ts
.u
.cl
->backend_decl
);
3404 type
= gfc_typenode_for_spec (&expr1
->ts
);
3406 /* Allocate temporary for nested forall construct according to the
3407 information in nested_forall_info and inner_size. */
3408 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3409 &inner_size_body
, block
, &ptemp1
);
3411 /* Generate codes to copy rhs to the temporary . */
3412 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3415 /* Generate body and loops according to the information in
3416 nested_forall_info. */
3417 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3418 gfc_add_expr_to_block (block
, tmp
);
3421 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3425 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3427 /* Generate codes to copy the temporary to lhs. */
3428 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3431 /* Generate body and loops according to the information in
3432 nested_forall_info. */
3433 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3434 gfc_add_expr_to_block (block
, tmp
);
3438 /* Free the temporary. */
3439 tmp
= gfc_call_free (ptemp1
);
3440 gfc_add_expr_to_block (block
, tmp
);
3445 /* Translate pointer assignment inside FORALL which need temporary. */
3448 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3449 forall_info
* nested_forall_info
,
3450 stmtblock_t
* block
)
3457 gfc_array_info
*info
;
3464 tree tmp
, tmp1
, ptemp1
;
3466 count
= gfc_create_var (gfc_array_index_type
, "count");
3467 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3469 inner_size
= gfc_index_one_node
;
3470 lss
= gfc_walk_expr (expr1
);
3471 rss
= gfc_walk_expr (expr2
);
3472 if (lss
== gfc_ss_terminator
)
3474 type
= gfc_typenode_for_spec (&expr1
->ts
);
3475 type
= build_pointer_type (type
);
3477 /* Allocate temporary for nested forall construct according to the
3478 information in nested_forall_info and inner_size. */
3479 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3480 inner_size
, NULL
, block
, &ptemp1
);
3481 gfc_start_block (&body
);
3482 gfc_init_se (&lse
, NULL
);
3483 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3484 gfc_init_se (&rse
, NULL
);
3485 rse
.want_pointer
= 1;
3486 gfc_conv_expr (&rse
, expr2
);
3487 gfc_add_block_to_block (&body
, &rse
.pre
);
3488 gfc_add_modify (&body
, lse
.expr
,
3489 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3490 gfc_add_block_to_block (&body
, &rse
.post
);
3492 /* Increment count. */
3493 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3494 count
, gfc_index_one_node
);
3495 gfc_add_modify (&body
, count
, tmp
);
3497 tmp
= gfc_finish_block (&body
);
3499 /* Generate body and loops according to the information in
3500 nested_forall_info. */
3501 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3502 gfc_add_expr_to_block (block
, tmp
);
3505 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3507 gfc_start_block (&body
);
3508 gfc_init_se (&lse
, NULL
);
3509 gfc_init_se (&rse
, NULL
);
3510 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3511 lse
.want_pointer
= 1;
3512 gfc_conv_expr (&lse
, expr1
);
3513 gfc_add_block_to_block (&body
, &lse
.pre
);
3514 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3515 gfc_add_block_to_block (&body
, &lse
.post
);
3516 /* Increment count. */
3517 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3518 count
, gfc_index_one_node
);
3519 gfc_add_modify (&body
, count
, tmp
);
3520 tmp
= gfc_finish_block (&body
);
3522 /* Generate body and loops according to the information in
3523 nested_forall_info. */
3524 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3525 gfc_add_expr_to_block (block
, tmp
);
3529 gfc_init_loopinfo (&loop
);
3531 /* Associate the SS with the loop. */
3532 gfc_add_ss_to_loop (&loop
, rss
);
3534 /* Setup the scalarizing loops and bounds. */
3535 gfc_conv_ss_startstride (&loop
);
3537 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3539 info
= &rss
->info
->data
.array
;
3540 desc
= info
->descriptor
;
3542 /* Make a new descriptor. */
3543 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3544 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3545 loop
.from
, loop
.to
, 1,
3546 GFC_ARRAY_UNKNOWN
, true);
3548 /* Allocate temporary for nested forall construct. */
3549 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3550 inner_size
, NULL
, block
, &ptemp1
);
3551 gfc_start_block (&body
);
3552 gfc_init_se (&lse
, NULL
);
3553 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3554 lse
.direct_byref
= 1;
3555 gfc_conv_expr_descriptor (&lse
, expr2
);
3557 gfc_add_block_to_block (&body
, &lse
.pre
);
3558 gfc_add_block_to_block (&body
, &lse
.post
);
3560 /* Increment count. */
3561 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3562 count
, gfc_index_one_node
);
3563 gfc_add_modify (&body
, count
, tmp
);
3565 tmp
= gfc_finish_block (&body
);
3567 /* Generate body and loops according to the information in
3568 nested_forall_info. */
3569 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3570 gfc_add_expr_to_block (block
, tmp
);
3573 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3575 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3576 gfc_init_se (&lse
, NULL
);
3577 gfc_conv_expr_descriptor (&lse
, expr1
);
3578 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3579 gfc_start_block (&body
);
3580 gfc_add_block_to_block (&body
, &lse
.pre
);
3581 gfc_add_block_to_block (&body
, &lse
.post
);
3583 /* Increment count. */
3584 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3585 count
, gfc_index_one_node
);
3586 gfc_add_modify (&body
, count
, tmp
);
3588 tmp
= gfc_finish_block (&body
);
3590 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3591 gfc_add_expr_to_block (block
, tmp
);
3593 /* Free the temporary. */
3596 tmp
= gfc_call_free (ptemp1
);
3597 gfc_add_expr_to_block (block
, tmp
);
3602 /* FORALL and WHERE statements are really nasty, especially when you nest
3603 them. All the rhs of a forall assignment must be evaluated before the
3604 actual assignments are performed. Presumably this also applies to all the
3605 assignments in an inner where statement. */
3607 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3608 linear array, relying on the fact that we process in the same order in all
3611 forall (i=start:end:stride; maskexpr)
3615 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3617 count = ((end + 1 - start) / stride)
3618 masktmp(:) = maskexpr(:)
3621 for (i = start; i <= end; i += stride)
3623 if (masktmp[maskindex++])
3627 for (i = start; i <= end; i += stride)
3629 if (masktmp[maskindex++])
3633 Note that this code only works when there are no dependencies.
3634 Forall loop with array assignments and data dependencies are a real pain,
3635 because the size of the temporary cannot always be determined before the
3636 loop is executed. This problem is compounded by the presence of nested
3641 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3658 tree cycle_label
= NULL_TREE
;
3662 gfc_forall_iterator
*fa
;
3665 gfc_saved_var
*saved_vars
;
3666 iter_info
*this_forall
;
3670 /* Do nothing if the mask is false. */
3672 && code
->expr1
->expr_type
== EXPR_CONSTANT
3673 && !code
->expr1
->value
.logical
)
3674 return build_empty_stmt (input_location
);
3677 /* Count the FORALL index number. */
3678 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3682 /* Allocate the space for var, start, end, step, varexpr. */
3683 var
= XCNEWVEC (tree
, nvar
);
3684 start
= XCNEWVEC (tree
, nvar
);
3685 end
= XCNEWVEC (tree
, nvar
);
3686 step
= XCNEWVEC (tree
, nvar
);
3687 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3688 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3690 /* Allocate the space for info. */
3691 info
= XCNEW (forall_info
);
3693 gfc_start_block (&pre
);
3694 gfc_init_block (&post
);
3695 gfc_init_block (&block
);
3698 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3700 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3702 /* Allocate space for this_forall. */
3703 this_forall
= XCNEW (iter_info
);
3705 /* Create a temporary variable for the FORALL index. */
3706 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3707 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3708 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3710 /* Record it in this_forall. */
3711 this_forall
->var
= var
[n
];
3713 /* Replace the index symbol's backend_decl with the temporary decl. */
3714 sym
->backend_decl
= var
[n
];
3716 /* Work out the start, end and stride for the loop. */
3717 gfc_init_se (&se
, NULL
);
3718 gfc_conv_expr_val (&se
, fa
->start
);
3719 /* Record it in this_forall. */
3720 this_forall
->start
= se
.expr
;
3721 gfc_add_block_to_block (&block
, &se
.pre
);
3724 gfc_init_se (&se
, NULL
);
3725 gfc_conv_expr_val (&se
, fa
->end
);
3726 /* Record it in this_forall. */
3727 this_forall
->end
= se
.expr
;
3728 gfc_make_safe_expr (&se
);
3729 gfc_add_block_to_block (&block
, &se
.pre
);
3732 gfc_init_se (&se
, NULL
);
3733 gfc_conv_expr_val (&se
, fa
->stride
);
3734 /* Record it in this_forall. */
3735 this_forall
->step
= se
.expr
;
3736 gfc_make_safe_expr (&se
);
3737 gfc_add_block_to_block (&block
, &se
.pre
);
3740 /* Set the NEXT field of this_forall to NULL. */
3741 this_forall
->next
= NULL
;
3742 /* Link this_forall to the info construct. */
3743 if (info
->this_loop
)
3745 iter_info
*iter_tmp
= info
->this_loop
;
3746 while (iter_tmp
->next
!= NULL
)
3747 iter_tmp
= iter_tmp
->next
;
3748 iter_tmp
->next
= this_forall
;
3751 info
->this_loop
= this_forall
;
3757 /* Calculate the size needed for the current forall level. */
3758 size
= gfc_index_one_node
;
3759 for (n
= 0; n
< nvar
; n
++)
3761 /* size = (end + step - start) / step. */
3762 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3764 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3766 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3768 tmp
= convert (gfc_array_index_type
, tmp
);
3770 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3774 /* Record the nvar and size of current forall level. */
3780 /* If the mask is .true., consider the FORALL unconditional. */
3781 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3782 && code
->expr1
->value
.logical
)
3790 /* First we need to allocate the mask. */
3793 /* As the mask array can be very big, prefer compact boolean types. */
3794 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3795 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3796 size
, NULL
, &block
, &pmask
);
3797 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3799 /* Record them in the info structure. */
3800 info
->maskindex
= maskindex
;
3805 /* No mask was specified. */
3806 maskindex
= NULL_TREE
;
3807 mask
= pmask
= NULL_TREE
;
3810 /* Link the current forall level to nested_forall_info. */
3811 info
->prev_nest
= nested_forall_info
;
3812 nested_forall_info
= info
;
3814 /* Copy the mask into a temporary variable if required.
3815 For now we assume a mask temporary is needed. */
3818 /* As the mask array can be very big, prefer compact boolean types. */
3819 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3821 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3823 /* Start of mask assignment loop body. */
3824 gfc_start_block (&body
);
3826 /* Evaluate the mask expression. */
3827 gfc_init_se (&se
, NULL
);
3828 gfc_conv_expr_val (&se
, code
->expr1
);
3829 gfc_add_block_to_block (&body
, &se
.pre
);
3831 /* Store the mask. */
3832 se
.expr
= convert (mask_type
, se
.expr
);
3834 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3835 gfc_add_modify (&body
, tmp
, se
.expr
);
3837 /* Advance to the next mask element. */
3838 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3839 maskindex
, gfc_index_one_node
);
3840 gfc_add_modify (&body
, maskindex
, tmp
);
3842 /* Generate the loops. */
3843 tmp
= gfc_finish_block (&body
);
3844 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3845 gfc_add_expr_to_block (&block
, tmp
);
3848 if (code
->op
== EXEC_DO_CONCURRENT
)
3850 gfc_init_block (&body
);
3851 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3852 code
->cycle_label
= cycle_label
;
3853 tmp
= gfc_trans_code (code
->block
->next
);
3854 gfc_add_expr_to_block (&body
, tmp
);
3856 if (TREE_USED (cycle_label
))
3858 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3859 gfc_add_expr_to_block (&body
, tmp
);
3862 tmp
= gfc_finish_block (&body
);
3863 nested_forall_info
->do_concurrent
= true;
3864 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3865 gfc_add_expr_to_block (&block
, tmp
);
3869 c
= code
->block
->next
;
3871 /* TODO: loop merging in FORALL statements. */
3872 /* Now that we've got a copy of the mask, generate the assignment loops. */
3878 /* A scalar or array assignment. DO the simple check for
3879 lhs to rhs dependencies. These make a temporary for the
3880 rhs and form a second forall block to copy to variable. */
3881 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3883 /* Temporaries due to array assignment data dependencies introduce
3884 no end of problems. */
3886 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3887 nested_forall_info
, &block
);
3890 /* Use the normal assignment copying routines. */
3891 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3893 /* Generate body and loops. */
3894 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3896 gfc_add_expr_to_block (&block
, tmp
);
3899 /* Cleanup any temporary symtrees that have been made to deal
3900 with dependencies. */
3902 cleanup_forall_symtrees (c
);
3907 /* Translate WHERE or WHERE construct nested in FORALL. */
3908 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3911 /* Pointer assignment inside FORALL. */
3912 case EXEC_POINTER_ASSIGN
:
3913 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3915 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3916 nested_forall_info
, &block
);
3919 /* Use the normal assignment copying routines. */
3920 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3922 /* Generate body and loops. */
3923 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3925 gfc_add_expr_to_block (&block
, tmp
);
3930 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3931 gfc_add_expr_to_block (&block
, tmp
);
3934 /* Explicit subroutine calls are prevented by the frontend but interface
3935 assignments can legitimately produce them. */
3936 case EXEC_ASSIGN_CALL
:
3937 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3938 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3939 gfc_add_expr_to_block (&block
, tmp
);
3950 /* Restore the original index variables. */
3951 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3952 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3954 /* Free the space for var, start, end, step, varexpr. */
3962 for (this_forall
= info
->this_loop
; this_forall
;)
3964 iter_info
*next
= this_forall
->next
;
3969 /* Free the space for this forall_info. */
3974 /* Free the temporary for the mask. */
3975 tmp
= gfc_call_free (pmask
);
3976 gfc_add_expr_to_block (&block
, tmp
);
3979 pushdecl (maskindex
);
3981 gfc_add_block_to_block (&pre
, &block
);
3982 gfc_add_block_to_block (&pre
, &post
);
3984 return gfc_finish_block (&pre
);
3988 /* Translate the FORALL statement or construct. */
3990 tree
gfc_trans_forall (gfc_code
* code
)
3992 return gfc_trans_forall_1 (code
, NULL
);
3996 /* Translate the DO CONCURRENT construct. */
3998 tree
gfc_trans_do_concurrent (gfc_code
* code
)
4000 return gfc_trans_forall_1 (code
, NULL
);
4004 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4005 If the WHERE construct is nested in FORALL, compute the overall temporary
4006 needed by the WHERE mask expression multiplied by the iterator number of
4008 ME is the WHERE mask expression.
4009 MASK is the current execution mask upon input, whose sense may or may
4010 not be inverted as specified by the INVERT argument.
4011 CMASK is the updated execution mask on output, or NULL if not required.
4012 PMASK is the pending execution mask on output, or NULL if not required.
4013 BLOCK is the block in which to place the condition evaluation loops. */
4016 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
4017 tree mask
, bool invert
, tree cmask
, tree pmask
,
4018 tree mask_type
, stmtblock_t
* block
)
4023 stmtblock_t body
, body1
;
4024 tree count
, cond
, mtmp
;
4027 gfc_init_loopinfo (&loop
);
4029 lss
= gfc_walk_expr (me
);
4030 rss
= gfc_walk_expr (me
);
4032 /* Variable to index the temporary. */
4033 count
= gfc_create_var (gfc_array_index_type
, "count");
4034 /* Initialize count. */
4035 gfc_add_modify (block
, count
, gfc_index_zero_node
);
4037 gfc_start_block (&body
);
4039 gfc_init_se (&rse
, NULL
);
4040 gfc_init_se (&lse
, NULL
);
4042 if (lss
== gfc_ss_terminator
)
4044 gfc_init_block (&body1
);
4048 /* Initialize the loop. */
4049 gfc_init_loopinfo (&loop
);
4051 /* We may need LSS to determine the shape of the expression. */
4052 gfc_add_ss_to_loop (&loop
, lss
);
4053 gfc_add_ss_to_loop (&loop
, rss
);
4055 gfc_conv_ss_startstride (&loop
);
4056 gfc_conv_loop_setup (&loop
, &me
->where
);
4058 gfc_mark_ss_chain_used (rss
, 1);
4059 /* Start the loop body. */
4060 gfc_start_scalarized_body (&loop
, &body1
);
4062 /* Translate the expression. */
4063 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4065 gfc_conv_expr (&rse
, me
);
4068 /* Variable to evaluate mask condition. */
4069 cond
= gfc_create_var (mask_type
, "cond");
4070 if (mask
&& (cmask
|| pmask
))
4071 mtmp
= gfc_create_var (mask_type
, "mask");
4072 else mtmp
= NULL_TREE
;
4074 gfc_add_block_to_block (&body1
, &lse
.pre
);
4075 gfc_add_block_to_block (&body1
, &rse
.pre
);
4077 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
4079 if (mask
&& (cmask
|| pmask
))
4081 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
4083 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
4084 gfc_add_modify (&body1
, mtmp
, tmp
);
4089 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
4092 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
4094 gfc_add_modify (&body1
, tmp1
, tmp
);
4099 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
4100 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
4102 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
4104 gfc_add_modify (&body1
, tmp1
, tmp
);
4107 gfc_add_block_to_block (&body1
, &lse
.post
);
4108 gfc_add_block_to_block (&body1
, &rse
.post
);
4110 if (lss
== gfc_ss_terminator
)
4112 gfc_add_block_to_block (&body
, &body1
);
4116 /* Increment count. */
4117 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4118 count
, gfc_index_one_node
);
4119 gfc_add_modify (&body1
, count
, tmp1
);
4121 /* Generate the copying loops. */
4122 gfc_trans_scalarizing_loops (&loop
, &body1
);
4124 gfc_add_block_to_block (&body
, &loop
.pre
);
4125 gfc_add_block_to_block (&body
, &loop
.post
);
4127 gfc_cleanup_loop (&loop
);
4128 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4129 as tree nodes in SS may not be valid in different scope. */
4132 tmp1
= gfc_finish_block (&body
);
4133 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4134 if (nested_forall_info
!= NULL
)
4135 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
4137 gfc_add_expr_to_block (block
, tmp1
);
4141 /* Translate an assignment statement in a WHERE statement or construct
4142 statement. The MASK expression is used to control which elements
4143 of EXPR1 shall be assigned. The sense of MASK is specified by
4147 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
4148 tree mask
, bool invert
,
4149 tree count1
, tree count2
,
4155 gfc_ss
*lss_section
;
4162 tree index
, maskexpr
;
4164 /* A defined assignment. */
4165 if (cnext
&& cnext
->resolved_sym
)
4166 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
4169 /* TODO: handle this special case.
4170 Special case a single function returning an array. */
4171 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
4173 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
4179 /* Assignment of the form lhs = rhs. */
4180 gfc_start_block (&block
);
4182 gfc_init_se (&lse
, NULL
);
4183 gfc_init_se (&rse
, NULL
);
4186 lss
= gfc_walk_expr (expr1
);
4189 /* In each where-assign-stmt, the mask-expr and the variable being
4190 defined shall be arrays of the same shape. */
4191 gcc_assert (lss
!= gfc_ss_terminator
);
4193 /* The assignment needs scalarization. */
4196 /* Find a non-scalar SS from the lhs. */
4197 while (lss_section
!= gfc_ss_terminator
4198 && lss_section
->info
->type
!= GFC_SS_SECTION
)
4199 lss_section
= lss_section
->next
;
4201 gcc_assert (lss_section
!= gfc_ss_terminator
);
4203 /* Initialize the scalarizer. */
4204 gfc_init_loopinfo (&loop
);
4207 rss
= gfc_walk_expr (expr2
);
4208 if (rss
== gfc_ss_terminator
)
4210 /* The rhs is scalar. Add a ss for the expression. */
4211 rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
4212 rss
->info
->where
= 1;
4215 /* Associate the SS with the loop. */
4216 gfc_add_ss_to_loop (&loop
, lss
);
4217 gfc_add_ss_to_loop (&loop
, rss
);
4219 /* Calculate the bounds of the scalarization. */
4220 gfc_conv_ss_startstride (&loop
);
4222 /* Resolve any data dependencies in the statement. */
4223 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
4225 /* Setup the scalarizing loops. */
4226 gfc_conv_loop_setup (&loop
, &expr2
->where
);
4228 /* Setup the gfc_se structures. */
4229 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4230 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4233 gfc_mark_ss_chain_used (rss
, 1);
4234 if (loop
.temp_ss
== NULL
)
4237 gfc_mark_ss_chain_used (lss
, 1);
4241 lse
.ss
= loop
.temp_ss
;
4242 gfc_mark_ss_chain_used (lss
, 3);
4243 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
4246 /* Start the scalarized loop body. */
4247 gfc_start_scalarized_body (&loop
, &body
);
4249 /* Translate the expression. */
4250 gfc_conv_expr (&rse
, expr2
);
4251 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4252 gfc_conv_tmp_array_ref (&lse
);
4254 gfc_conv_expr (&lse
, expr1
);
4256 /* Form the mask expression according to the mask. */
4258 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4260 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4261 TREE_TYPE (maskexpr
), maskexpr
);
4263 /* Use the scalar assignment as is. */
4264 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4265 loop
.temp_ss
!= NULL
, false, true);
4267 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4269 gfc_add_expr_to_block (&body
, tmp
);
4271 if (lss
== gfc_ss_terminator
)
4273 /* Increment count1. */
4274 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4275 count1
, gfc_index_one_node
);
4276 gfc_add_modify (&body
, count1
, tmp
);
4278 /* Use the scalar assignment as is. */
4279 gfc_add_block_to_block (&block
, &body
);
4283 gcc_assert (lse
.ss
== gfc_ss_terminator
4284 && rse
.ss
== gfc_ss_terminator
);
4286 if (loop
.temp_ss
!= NULL
)
4288 /* Increment count1 before finish the main body of a scalarized
4290 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4291 gfc_array_index_type
, count1
, gfc_index_one_node
);
4292 gfc_add_modify (&body
, count1
, tmp
);
4293 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4295 /* We need to copy the temporary to the actual lhs. */
4296 gfc_init_se (&lse
, NULL
);
4297 gfc_init_se (&rse
, NULL
);
4298 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4299 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4301 rse
.ss
= loop
.temp_ss
;
4304 gfc_conv_tmp_array_ref (&rse
);
4305 gfc_conv_expr (&lse
, expr1
);
4307 gcc_assert (lse
.ss
== gfc_ss_terminator
4308 && rse
.ss
== gfc_ss_terminator
);
4310 /* Form the mask expression according to the mask tree list. */
4312 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4314 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4315 TREE_TYPE (maskexpr
), maskexpr
);
4317 /* Use the scalar assignment as is. */
4318 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4320 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4321 build_empty_stmt (input_location
));
4322 gfc_add_expr_to_block (&body
, tmp
);
4324 /* Increment count2. */
4325 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4326 gfc_array_index_type
, count2
,
4327 gfc_index_one_node
);
4328 gfc_add_modify (&body
, count2
, tmp
);
4332 /* Increment count1. */
4333 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4334 gfc_array_index_type
, count1
,
4335 gfc_index_one_node
);
4336 gfc_add_modify (&body
, count1
, tmp
);
4339 /* Generate the copying loops. */
4340 gfc_trans_scalarizing_loops (&loop
, &body
);
4342 /* Wrap the whole thing up. */
4343 gfc_add_block_to_block (&block
, &loop
.pre
);
4344 gfc_add_block_to_block (&block
, &loop
.post
);
4345 gfc_cleanup_loop (&loop
);
4348 return gfc_finish_block (&block
);
4352 /* Translate the WHERE construct or statement.
4353 This function can be called iteratively to translate the nested WHERE
4354 construct or statement.
4355 MASK is the control mask. */
4358 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4359 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4361 stmtblock_t inner_size_body
;
4362 tree inner_size
, size
;
4371 tree count1
, count2
;
4375 tree pcmask
= NULL_TREE
;
4376 tree ppmask
= NULL_TREE
;
4377 tree cmask
= NULL_TREE
;
4378 tree pmask
= NULL_TREE
;
4379 gfc_actual_arglist
*arg
;
4381 /* the WHERE statement or the WHERE construct statement. */
4382 cblock
= code
->block
;
4384 /* As the mask array can be very big, prefer compact boolean types. */
4385 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4387 /* Determine which temporary masks are needed. */
4390 /* One clause: No ELSEWHEREs. */
4391 need_cmask
= (cblock
->next
!= 0);
4394 else if (cblock
->block
->block
)
4396 /* Three or more clauses: Conditional ELSEWHEREs. */
4400 else if (cblock
->next
)
4402 /* Two clauses, the first non-empty. */
4404 need_pmask
= (mask
!= NULL_TREE
4405 && cblock
->block
->next
!= 0);
4407 else if (!cblock
->block
->next
)
4409 /* Two clauses, both empty. */
4413 /* Two clauses, the first empty, the second non-empty. */
4416 need_cmask
= (cblock
->block
->expr1
!= 0);
4425 if (need_cmask
|| need_pmask
)
4427 /* Calculate the size of temporary needed by the mask-expr. */
4428 gfc_init_block (&inner_size_body
);
4429 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4430 &inner_size_body
, &lss
, &rss
);
4432 gfc_free_ss_chain (lss
);
4433 gfc_free_ss_chain (rss
);
4435 /* Calculate the total size of temporary needed. */
4436 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4437 &inner_size_body
, block
);
4439 /* Check whether the size is negative. */
4440 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4441 gfc_index_zero_node
);
4442 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4443 cond
, gfc_index_zero_node
, size
);
4444 size
= gfc_evaluate_now (size
, block
);
4446 /* Allocate temporary for WHERE mask if needed. */
4448 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4451 /* Allocate temporary for !mask if needed. */
4453 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4459 /* Each time around this loop, the where clause is conditional
4460 on the value of mask and invert, which are updated at the
4461 bottom of the loop. */
4463 /* Has mask-expr. */
4466 /* Ensure that the WHERE mask will be evaluated exactly once.
4467 If there are no statements in this WHERE/ELSEWHERE clause,
4468 then we don't need to update the control mask (cmask).
4469 If this is the last clause of the WHERE construct, then
4470 we don't need to update the pending control mask (pmask). */
4472 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4474 cblock
->next
? cmask
: NULL_TREE
,
4475 cblock
->block
? pmask
: NULL_TREE
,
4478 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4480 (cblock
->next
|| cblock
->block
)
4481 ? cmask
: NULL_TREE
,
4482 NULL_TREE
, mask_type
, block
);
4486 /* It's a final elsewhere-stmt. No mask-expr is present. */
4490 /* The body of this where clause are controlled by cmask with
4491 sense specified by invert. */
4493 /* Get the assignment statement of a WHERE statement, or the first
4494 statement in where-body-construct of a WHERE construct. */
4495 cnext
= cblock
->next
;
4500 /* WHERE assignment statement. */
4501 case EXEC_ASSIGN_CALL
:
4503 arg
= cnext
->ext
.actual
;
4504 expr1
= expr2
= NULL
;
4505 for (; arg
; arg
= arg
->next
)
4517 expr1
= cnext
->expr1
;
4518 expr2
= cnext
->expr2
;
4520 if (nested_forall_info
!= NULL
)
4522 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4523 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4524 gfc_trans_assign_need_temp (expr1
, expr2
,
4526 nested_forall_info
, block
);
4529 /* Variables to control maskexpr. */
4530 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4531 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4532 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4533 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4535 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4540 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4542 gfc_add_expr_to_block (block
, tmp
);
4547 /* Variables to control maskexpr. */
4548 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4549 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4550 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4551 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4553 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4557 gfc_add_expr_to_block (block
, tmp
);
4562 /* WHERE or WHERE construct is part of a where-body-construct. */
4564 gfc_trans_where_2 (cnext
, cmask
, invert
,
4565 nested_forall_info
, block
);
4572 /* The next statement within the same where-body-construct. */
4573 cnext
= cnext
->next
;
4575 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4576 cblock
= cblock
->block
;
4577 if (mask
== NULL_TREE
)
4579 /* If we're the initial WHERE, we can simply invert the sense
4580 of the current mask to obtain the "mask" for the remaining
4587 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4593 /* If we allocated a pending mask array, deallocate it now. */
4596 tmp
= gfc_call_free (ppmask
);
4597 gfc_add_expr_to_block (block
, tmp
);
4600 /* If we allocated a current mask array, deallocate it now. */
4603 tmp
= gfc_call_free (pcmask
);
4604 gfc_add_expr_to_block (block
, tmp
);
4608 /* Translate a simple WHERE construct or statement without dependencies.
4609 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4610 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4611 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4614 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4616 stmtblock_t block
, body
;
4617 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4618 tree tmp
, cexpr
, tstmt
, estmt
;
4619 gfc_ss
*css
, *tdss
, *tsss
;
4620 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4625 /* Allow the scalarizer to workshare simple where loops. */
4626 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4627 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4629 cond
= cblock
->expr1
;
4630 tdst
= cblock
->next
->expr1
;
4631 tsrc
= cblock
->next
->expr2
;
4632 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4633 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4635 gfc_start_block (&block
);
4636 gfc_init_loopinfo (&loop
);
4638 /* Handle the condition. */
4639 gfc_init_se (&cse
, NULL
);
4640 css
= gfc_walk_expr (cond
);
4641 gfc_add_ss_to_loop (&loop
, css
);
4643 /* Handle the then-clause. */
4644 gfc_init_se (&tdse
, NULL
);
4645 gfc_init_se (&tsse
, NULL
);
4646 tdss
= gfc_walk_expr (tdst
);
4647 tsss
= gfc_walk_expr (tsrc
);
4648 if (tsss
== gfc_ss_terminator
)
4650 tsss
= gfc_get_scalar_ss (gfc_ss_terminator
, tsrc
);
4651 tsss
->info
->where
= 1;
4653 gfc_add_ss_to_loop (&loop
, tdss
);
4654 gfc_add_ss_to_loop (&loop
, tsss
);
4658 /* Handle the else clause. */
4659 gfc_init_se (&edse
, NULL
);
4660 gfc_init_se (&esse
, NULL
);
4661 edss
= gfc_walk_expr (edst
);
4662 esss
= gfc_walk_expr (esrc
);
4663 if (esss
== gfc_ss_terminator
)
4665 esss
= gfc_get_scalar_ss (gfc_ss_terminator
, esrc
);
4666 esss
->info
->where
= 1;
4668 gfc_add_ss_to_loop (&loop
, edss
);
4669 gfc_add_ss_to_loop (&loop
, esss
);
4672 gfc_conv_ss_startstride (&loop
);
4673 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4675 gfc_mark_ss_chain_used (css
, 1);
4676 gfc_mark_ss_chain_used (tdss
, 1);
4677 gfc_mark_ss_chain_used (tsss
, 1);
4680 gfc_mark_ss_chain_used (edss
, 1);
4681 gfc_mark_ss_chain_used (esss
, 1);
4684 gfc_start_scalarized_body (&loop
, &body
);
4686 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4687 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4688 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4694 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4695 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4700 gfc_conv_expr (&cse
, cond
);
4701 gfc_add_block_to_block (&body
, &cse
.pre
);
4704 gfc_conv_expr (&tsse
, tsrc
);
4705 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4706 gfc_conv_tmp_array_ref (&tdse
);
4708 gfc_conv_expr (&tdse
, tdst
);
4712 gfc_conv_expr (&esse
, esrc
);
4713 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4714 gfc_conv_tmp_array_ref (&edse
);
4716 gfc_conv_expr (&edse
, edst
);
4719 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4720 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4722 : build_empty_stmt (input_location
);
4723 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4724 gfc_add_expr_to_block (&body
, tmp
);
4725 gfc_add_block_to_block (&body
, &cse
.post
);
4727 gfc_trans_scalarizing_loops (&loop
, &body
);
4728 gfc_add_block_to_block (&block
, &loop
.pre
);
4729 gfc_add_block_to_block (&block
, &loop
.post
);
4730 gfc_cleanup_loop (&loop
);
4732 return gfc_finish_block (&block
);
4735 /* As the WHERE or WHERE construct statement can be nested, we call
4736 gfc_trans_where_2 to do the translation, and pass the initial
4737 NULL values for both the control mask and the pending control mask. */
4740 gfc_trans_where (gfc_code
* code
)
4746 cblock
= code
->block
;
4748 && cblock
->next
->op
== EXEC_ASSIGN
4749 && !cblock
->next
->next
)
4751 eblock
= cblock
->block
;
4754 /* A simple "WHERE (cond) x = y" statement or block is
4755 dependence free if cond is not dependent upon writing x,
4756 and the source y is unaffected by the destination x. */
4757 if (!gfc_check_dependency (cblock
->next
->expr1
,
4759 && !gfc_check_dependency (cblock
->next
->expr1
,
4760 cblock
->next
->expr2
, 0))
4761 return gfc_trans_where_3 (cblock
, NULL
);
4763 else if (!eblock
->expr1
4766 && eblock
->next
->op
== EXEC_ASSIGN
4767 && !eblock
->next
->next
)
4769 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4770 block is dependence free if cond is not dependent on writes
4771 to x1 and x2, y1 is not dependent on writes to x2, and y2
4772 is not dependent on writes to x1, and both y's are not
4773 dependent upon their own x's. In addition to this, the
4774 final two dependency checks below exclude all but the same
4775 array reference if the where and elswhere destinations
4776 are the same. In short, this is VERY conservative and this
4777 is needed because the two loops, required by the standard
4778 are coalesced in gfc_trans_where_3. */
4779 if (!gfc_check_dependency (cblock
->next
->expr1
,
4781 && !gfc_check_dependency (eblock
->next
->expr1
,
4783 && !gfc_check_dependency (cblock
->next
->expr1
,
4784 eblock
->next
->expr2
, 1)
4785 && !gfc_check_dependency (eblock
->next
->expr1
,
4786 cblock
->next
->expr2
, 1)
4787 && !gfc_check_dependency (cblock
->next
->expr1
,
4788 cblock
->next
->expr2
, 1)
4789 && !gfc_check_dependency (eblock
->next
->expr1
,
4790 eblock
->next
->expr2
, 1)
4791 && !gfc_check_dependency (cblock
->next
->expr1
,
4792 eblock
->next
->expr1
, 0)
4793 && !gfc_check_dependency (eblock
->next
->expr1
,
4794 cblock
->next
->expr1
, 0))
4795 return gfc_trans_where_3 (cblock
, eblock
);
4799 gfc_start_block (&block
);
4801 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4803 return gfc_finish_block (&block
);
4807 /* CYCLE a DO loop. The label decl has already been created by
4808 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4809 node at the head of the loop. We must mark the label as used. */
4812 gfc_trans_cycle (gfc_code
* code
)
4816 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4817 gcc_assert (cycle_label
);
4819 TREE_USED (cycle_label
) = 1;
4820 return build1_v (GOTO_EXPR
, cycle_label
);
4824 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4825 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4829 gfc_trans_exit (gfc_code
* code
)
4833 exit_label
= code
->ext
.which_construct
->exit_label
;
4834 gcc_assert (exit_label
);
4836 TREE_USED (exit_label
) = 1;
4837 return build1_v (GOTO_EXPR
, exit_label
);
4841 /* Translate the ALLOCATE statement. */
4844 gfc_trans_allocate (gfc_code
* code
)
4866 tree memsize
= NULL_TREE
;
4867 tree classexpr
= NULL_TREE
;
4869 if (!code
->ext
.alloc
.list
)
4872 stat
= tmp
= memsz
= NULL_TREE
;
4873 label_errmsg
= label_finish
= errmsg
= errlen
= NULL_TREE
;
4875 gfc_init_block (&block
);
4876 gfc_init_block (&post
);
4878 /* STAT= (and maybe ERRMSG=) is present. */
4882 tree gfc_int4_type_node
= gfc_get_int_type (4);
4883 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4885 /* ERRMSG= only makes sense with STAT=. */
4888 gfc_init_se (&se
, NULL
);
4889 se
.want_pointer
= 1;
4890 gfc_conv_expr_lhs (&se
, code
->expr2
);
4892 errlen
= se
.string_length
;
4896 errmsg
= null_pointer_node
;
4897 errlen
= build_int_cst (gfc_charlen_type_node
, 0);
4900 /* GOTO destinations. */
4901 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
4902 label_finish
= gfc_build_label_decl (NULL_TREE
);
4903 TREE_USED (label_finish
) = 0;
4909 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4911 expr
= gfc_copy_expr (al
->expr
);
4913 if (expr
->ts
.type
== BT_CLASS
)
4914 gfc_add_data_component (expr
);
4916 gfc_init_se (&se
, NULL
);
4918 se
.want_pointer
= 1;
4919 se
.descriptor_only
= 1;
4920 gfc_conv_expr (&se
, expr
);
4922 /* Evaluate expr3 just once if not a variable. */
4923 if (al
== code
->ext
.alloc
.list
4924 && al
->expr
->ts
.type
== BT_CLASS
4926 && code
->expr3
->ts
.type
== BT_CLASS
4927 && code
->expr3
->expr_type
!= EXPR_VARIABLE
)
4929 gfc_init_se (&se_sz
, NULL
);
4930 gfc_conv_expr_reference (&se_sz
, code
->expr3
);
4931 gfc_conv_class_to_class (&se_sz
, code
->expr3
,
4932 code
->expr3
->ts
, false, true, false, false);
4933 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4934 gfc_add_block_to_block (&se
.post
, &se_sz
.post
);
4935 classexpr
= build_fold_indirect_ref_loc (input_location
,
4937 classexpr
= gfc_evaluate_now (classexpr
, &se
.pre
);
4938 memsize
= gfc_vtable_size_get (classexpr
);
4939 memsize
= fold_convert (sizetype
, memsize
);
4943 class_expr
= classexpr
;
4946 if (!gfc_array_allocate (&se
, expr
, stat
, errmsg
, errlen
, label_finish
,
4947 memsz
, &nelems
, code
->expr3
, &code
->ext
.alloc
.ts
))
4949 bool unlimited_char
;
4951 unlimited_char
= UNLIMITED_POLY (al
->expr
)
4952 && ((code
->expr3
&& code
->expr3
->ts
.type
== BT_CHARACTER
)
4953 || (code
->ext
.alloc
.ts
.type
== BT_CHARACTER
4954 && code
->ext
.alloc
.ts
.u
.cl
4955 && code
->ext
.alloc
.ts
.u
.cl
->length
));
4957 /* A scalar or derived type. */
4959 /* Determine allocate size. */
4960 if (al
->expr
->ts
.type
== BT_CLASS
4963 && memsz
== NULL_TREE
)
4965 if (code
->expr3
->ts
.type
== BT_CLASS
)
4967 sz
= gfc_copy_expr (code
->expr3
);
4968 gfc_add_vptr_component (sz
);
4969 gfc_add_size_component (sz
);
4970 gfc_init_se (&se_sz
, NULL
);
4971 gfc_conv_expr (&se_sz
, sz
);
4976 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
4978 else if (((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
4979 || unlimited_char
) && code
->expr3
)
4981 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
4983 /* Convert and use the length expression. */
4984 gfc_init_se (&se_sz
, NULL
);
4985 if (code
->expr3
->expr_type
== EXPR_VARIABLE
4986 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
4988 gfc_conv_expr (&se_sz
, code
->expr3
);
4989 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4991 = gfc_evaluate_now (se_sz
.string_length
, &se
.pre
);
4992 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
4993 memsz
= se_sz
.string_length
;
4995 else if (code
->expr3
->mold
4996 && code
->expr3
->ts
.u
.cl
4997 && code
->expr3
->ts
.u
.cl
->length
)
4999 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
5000 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5001 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5002 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5007 /* This is would be inefficient and possibly could
5008 generate wrong code if the result were not stored
5010 if (slen3
== NULL_TREE
)
5012 gfc_conv_expr (&se_sz
, code
->expr3
);
5013 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5014 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5015 gfc_add_block_to_block (&post
, &se_sz
.post
);
5016 slen3
= gfc_evaluate_now (se_sz
.string_length
,
5023 /* Otherwise use the stored string length. */
5024 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
5025 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5027 /* Store the string length. */
5028 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
5029 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5031 else if (al
->expr
->ts
.type
== BT_CHARACTER
5032 && al
->expr
->ts
.deferred
&& se
.string_length
)
5033 gfc_add_modify (&se
.pre
, se
.string_length
,
5034 fold_convert (TREE_TYPE (se
.string_length
),
5037 /* Convert to size in bytes, using the character KIND. */
5039 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->expr3
->ts
));
5041 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
5042 tmp
= TYPE_SIZE_UNIT (tmp
);
5043 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5044 TREE_TYPE (tmp
), tmp
,
5045 fold_convert (TREE_TYPE (tmp
), memsz
));
5047 else if ((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5050 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
5051 gfc_init_se (&se_sz
, NULL
);
5052 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
5053 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5054 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5055 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5056 /* Store the string length. */
5057 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5058 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5060 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5061 tmp
= TYPE_SIZE_UNIT (tmp
);
5062 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5063 TREE_TYPE (tmp
), tmp
,
5064 fold_convert (TREE_TYPE (se_sz
.expr
),
5067 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
5068 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5069 else if (memsz
== NULL_TREE
)
5070 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
5072 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
5074 memsz
= se
.string_length
;
5076 /* Convert to size in bytes, using the character KIND. */
5077 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5078 tmp
= TYPE_SIZE_UNIT (tmp
);
5079 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5080 TREE_TYPE (tmp
), tmp
,
5081 fold_convert (TREE_TYPE (tmp
), memsz
));
5084 /* Allocate - for non-pointers with re-alloc checking. */
5085 if (gfc_expr_attr (expr
).allocatable
)
5086 gfc_allocate_allocatable (&se
.pre
, se
.expr
, memsz
, NULL_TREE
,
5087 stat
, errmsg
, errlen
, label_finish
, expr
);
5089 gfc_allocate_using_malloc (&se
.pre
, se
.expr
, memsz
, stat
);
5091 if (al
->expr
->ts
.type
== BT_DERIVED
5092 && expr
->ts
.u
.derived
->attr
.alloc_comp
)
5094 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5095 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
5096 gfc_add_expr_to_block (&se
.pre
, tmp
);
5100 gfc_add_block_to_block (&block
, &se
.pre
);
5102 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5105 tmp
= build1_v (GOTO_EXPR
, label_errmsg
);
5106 parm
= fold_build2_loc (input_location
, NE_EXPR
,
5107 boolean_type_node
, stat
,
5108 build_int_cst (TREE_TYPE (stat
), 0));
5109 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5110 gfc_unlikely (parm
, PRED_FORTRAN_FAIL_ALLOC
),
5111 tmp
, build_empty_stmt (input_location
));
5112 gfc_add_expr_to_block (&block
, tmp
);
5115 /* We need the vptr of CLASS objects to be initialized. */
5116 e
= gfc_copy_expr (al
->expr
);
5117 if (e
->ts
.type
== BT_CLASS
)
5119 gfc_expr
*lhs
, *rhs
;
5121 gfc_ref
*ref
, *class_ref
, *tail
;
5123 /* Find the last class reference. */
5125 for (ref
= e
->ref
; ref
; ref
= ref
->next
)
5127 if (ref
->type
== REF_COMPONENT
5128 && ref
->u
.c
.component
->ts
.type
== BT_CLASS
)
5131 if (ref
->next
== NULL
)
5135 /* Remove and store all subsequent references after the
5139 tail
= class_ref
->next
;
5140 class_ref
->next
= NULL
;
5148 lhs
= gfc_expr_to_initialize (e
);
5149 gfc_add_vptr_component (lhs
);
5151 /* Remove the _vptr component and restore the original tail
5155 gfc_free_ref_list (class_ref
->next
);
5156 class_ref
->next
= tail
;
5160 gfc_free_ref_list (e
->ref
);
5164 if (class_expr
!= NULL_TREE
)
5166 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5167 gfc_init_se (&lse
, NULL
);
5168 lse
.want_pointer
= 1;
5169 gfc_conv_expr (&lse
, lhs
);
5170 tmp
= gfc_class_vptr_get (class_expr
);
5171 gfc_add_modify (&block
, lse
.expr
,
5172 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5174 else if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
5176 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5177 rhs
= gfc_copy_expr (code
->expr3
);
5178 gfc_add_vptr_component (rhs
);
5179 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5180 gfc_add_expr_to_block (&block
, tmp
);
5181 gfc_free_expr (rhs
);
5182 rhs
= gfc_expr_to_initialize (e
);
5186 /* VPTR is fixed at compile time. */
5190 ts
= &code
->expr3
->ts
;
5191 else if (e
->ts
.type
== BT_DERIVED
)
5193 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
|| UNLIMITED_POLY (al
->expr
))
5194 ts
= &code
->ext
.alloc
.ts
;
5195 else if (e
->ts
.type
== BT_CLASS
)
5196 ts
= &CLASS_DATA (e
)->ts
;
5200 if (ts
->type
== BT_DERIVED
|| UNLIMITED_POLY (e
))
5202 vtab
= gfc_find_vtab (ts
);
5204 gfc_init_se (&lse
, NULL
);
5205 lse
.want_pointer
= 1;
5206 gfc_conv_expr (&lse
, lhs
);
5207 tmp
= gfc_build_addr_expr (NULL_TREE
,
5208 gfc_get_symbol_decl (vtab
));
5209 gfc_add_modify (&block
, lse
.expr
,
5210 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5213 gfc_free_expr (lhs
);
5218 if (code
->expr3
&& !code
->expr3
->mold
)
5220 /* Initialization via SOURCE block
5221 (or static default initializer). */
5222 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
5223 if (class_expr
!= NULL_TREE
)
5226 to
= TREE_OPERAND (se
.expr
, 0);
5228 tmp
= gfc_copy_class_to_class (class_expr
, to
, nelems
);
5230 else if (al
->expr
->ts
.type
== BT_CLASS
)
5232 gfc_actual_arglist
*actual
;
5235 gfc_ref
*ref
, *dataref
;
5237 /* Do a polymorphic deep copy. */
5238 actual
= gfc_get_actual_arglist ();
5239 actual
->expr
= gfc_copy_expr (rhs
);
5240 if (rhs
->ts
.type
== BT_CLASS
)
5241 gfc_add_data_component (actual
->expr
);
5242 actual
->next
= gfc_get_actual_arglist ();
5243 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
5244 actual
->next
->expr
->ts
.type
= BT_CLASS
;
5245 gfc_add_data_component (actual
->next
->expr
);
5248 /* Make sure we go up through the reference chain to
5249 the _data reference, where the arrayspec is found. */
5250 for (ref
= actual
->next
->expr
->ref
; ref
; ref
= ref
->next
)
5251 if (ref
->type
== REF_COMPONENT
5252 && strcmp (ref
->u
.c
.component
->name
, "_data") == 0)
5255 if (dataref
&& dataref
->u
.c
.component
->as
)
5259 gfc_ref
*ref
= dataref
->next
;
5260 ref
->u
.ar
.type
= AR_SECTION
;
5261 /* We have to set up the array reference to give ranges
5262 in all dimensions and ensure that the end and stride
5263 are set so that the copy can be scalarized. */
5265 for (; dim
< dataref
->u
.c
.component
->as
->rank
; dim
++)
5267 ref
->u
.ar
.dimen_type
[dim
] = DIMEN_RANGE
;
5268 if (ref
->u
.ar
.end
[dim
] == NULL
)
5270 ref
->u
.ar
.end
[dim
] = ref
->u
.ar
.start
[dim
];
5271 temp
= gfc_get_int_expr (gfc_default_integer_kind
,
5272 &al
->expr
->where
, 1);
5273 ref
->u
.ar
.start
[dim
] = temp
;
5275 temp
= gfc_subtract (gfc_copy_expr (ref
->u
.ar
.end
[dim
]),
5276 gfc_copy_expr (ref
->u
.ar
.start
[dim
]));
5277 temp
= gfc_add (gfc_get_int_expr (gfc_default_integer_kind
,
5278 &al
->expr
->where
, 1),
5282 if (rhs
->ts
.type
== BT_CLASS
)
5284 ppc
= gfc_copy_expr (rhs
);
5285 gfc_add_vptr_component (ppc
);
5288 ppc
= gfc_lval_expr_from_sym (gfc_find_vtab (&rhs
->ts
));
5289 gfc_add_component_ref (ppc
, "_copy");
5291 ppc_code
= gfc_get_code (EXEC_CALL
);
5292 ppc_code
->resolved_sym
= ppc
->symtree
->n
.sym
;
5293 /* Although '_copy' is set to be elemental in class.c, it is
5294 not staying that way. Find out why, sometime.... */
5295 ppc_code
->resolved_sym
->attr
.elemental
= 1;
5296 ppc_code
->ext
.actual
= actual
;
5297 ppc_code
->expr1
= ppc
;
5298 /* Since '_copy' is elemental, the scalarizer will take care
5299 of arrays in gfc_trans_call. */
5300 tmp
= gfc_trans_call (ppc_code
, true, NULL
, NULL
, false);
5301 gfc_free_statements (ppc_code
);
5303 else if (expr3
!= NULL_TREE
)
5305 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5306 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
5307 slen3
, expr3
, code
->expr3
->ts
.kind
);
5312 /* Switch off automatic reallocation since we have just done
5314 int realloc_lhs
= gfc_option
.flag_realloc_lhs
;
5315 gfc_option
.flag_realloc_lhs
= 0;
5316 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
5318 gfc_option
.flag_realloc_lhs
= realloc_lhs
;
5320 gfc_free_expr (rhs
);
5321 gfc_add_expr_to_block (&block
, tmp
);
5323 else if (code
->expr3
&& code
->expr3
->mold
5324 && code
->expr3
->ts
.type
== BT_CLASS
)
5326 /* Since the _vptr has already been assigned to the allocate
5327 object, we can use gfc_copy_class_to_class in its
5328 initialization mode. */
5329 tmp
= TREE_OPERAND (se
.expr
, 0);
5330 tmp
= gfc_copy_class_to_class (NULL_TREE
, tmp
, nelems
);
5331 gfc_add_expr_to_block (&block
, tmp
);
5334 gfc_free_expr (expr
);
5340 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5341 gfc_add_expr_to_block (&block
, tmp
);
5344 /* ERRMSG - only useful if STAT is present. */
5345 if (code
->expr1
&& code
->expr2
)
5347 const char *msg
= "Attempt to allocate an allocated object";
5348 tree slen
, dlen
, errmsg_str
;
5349 stmtblock_t errmsg_block
;
5351 gfc_init_block (&errmsg_block
);
5353 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5354 gfc_add_modify (&errmsg_block
, errmsg_str
,
5355 gfc_build_addr_expr (pchar_type_node
,
5356 gfc_build_localized_cstring_const (msg
)));
5358 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5359 dlen
= gfc_get_expr_charlen (code
->expr2
);
5360 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5363 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5364 slen
, errmsg_str
, gfc_default_character_kind
);
5365 dlen
= gfc_finish_block (&errmsg_block
);
5367 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5368 build_int_cst (TREE_TYPE (stat
), 0));
5370 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5372 gfc_add_expr_to_block (&block
, tmp
);
5378 if (TREE_USED (label_finish
))
5380 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5381 gfc_add_expr_to_block (&block
, tmp
);
5384 gfc_init_se (&se
, NULL
);
5385 gfc_conv_expr_lhs (&se
, code
->expr1
);
5386 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5387 gfc_add_modify (&block
, se
.expr
, tmp
);
5390 gfc_add_block_to_block (&block
, &se
.post
);
5391 gfc_add_block_to_block (&block
, &post
);
5393 return gfc_finish_block (&block
);
5397 /* Translate a DEALLOCATE statement. */
5400 gfc_trans_deallocate (gfc_code
*code
)
5404 tree apstat
, pstat
, stat
, errmsg
, errlen
, tmp
;
5405 tree label_finish
, label_errmsg
;
5408 pstat
= apstat
= stat
= errmsg
= errlen
= tmp
= NULL_TREE
;
5409 label_finish
= label_errmsg
= NULL_TREE
;
5411 gfc_start_block (&block
);
5413 /* Count the number of failed deallocations. If deallocate() was
5414 called with STAT= , then set STAT to the count. If deallocate
5415 was called with ERRMSG, then set ERRMG to a string. */
5418 tree gfc_int4_type_node
= gfc_get_int_type (4);
5420 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
5421 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5423 /* GOTO destinations. */
5424 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
5425 label_finish
= gfc_build_label_decl (NULL_TREE
);
5426 TREE_USED (label_finish
) = 0;
5429 /* Set ERRMSG - only needed if STAT is available. */
5430 if (code
->expr1
&& code
->expr2
)
5432 gfc_init_se (&se
, NULL
);
5433 se
.want_pointer
= 1;
5434 gfc_conv_expr_lhs (&se
, code
->expr2
);
5436 errlen
= se
.string_length
;
5439 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5441 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5442 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5444 if (expr
->ts
.type
== BT_CLASS
)
5445 gfc_add_data_component (expr
);
5447 gfc_init_se (&se
, NULL
);
5448 gfc_start_block (&se
.pre
);
5450 se
.want_pointer
= 1;
5451 se
.descriptor_only
= 1;
5452 gfc_conv_expr (&se
, expr
);
5454 if (expr
->rank
|| gfc_is_coarray (expr
))
5456 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
5457 && !gfc_is_finalizable (expr
->ts
.u
.derived
, NULL
))
5460 gfc_ref
*last
= NULL
;
5461 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5462 if (ref
->type
== REF_COMPONENT
)
5465 /* Do not deallocate the components of a derived type
5466 ultimate pointer component. */
5467 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5468 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5470 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5472 gfc_add_expr_to_block (&se
.pre
, tmp
);
5475 tmp
= gfc_array_deallocate (se
.expr
, pstat
, errmsg
, errlen
,
5476 label_finish
, expr
);
5477 gfc_add_expr_to_block (&se
.pre
, tmp
);
5478 if (al
->expr
->ts
.type
== BT_CLASS
)
5479 gfc_reset_vptr (&se
.pre
, al
->expr
);
5483 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5484 al
->expr
, al
->expr
->ts
);
5485 gfc_add_expr_to_block (&se
.pre
, tmp
);
5487 /* Set to zero after deallocation. */
5488 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5490 build_int_cst (TREE_TYPE (se
.expr
), 0));
5491 gfc_add_expr_to_block (&se
.pre
, tmp
);
5493 if (al
->expr
->ts
.type
== BT_CLASS
)
5494 gfc_reset_vptr (&se
.pre
, al
->expr
);
5501 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5502 build_int_cst (TREE_TYPE (stat
), 0));
5503 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5504 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
),
5505 build1_v (GOTO_EXPR
, label_errmsg
),
5506 build_empty_stmt (input_location
));
5507 gfc_add_expr_to_block (&se
.pre
, tmp
);
5510 tmp
= gfc_finish_block (&se
.pre
);
5511 gfc_add_expr_to_block (&block
, tmp
);
5512 gfc_free_expr (expr
);
5517 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5518 gfc_add_expr_to_block (&block
, tmp
);
5521 /* Set ERRMSG - only needed if STAT is available. */
5522 if (code
->expr1
&& code
->expr2
)
5524 const char *msg
= "Attempt to deallocate an unallocated object";
5525 stmtblock_t errmsg_block
;
5526 tree errmsg_str
, slen
, dlen
, cond
;
5528 gfc_init_block (&errmsg_block
);
5530 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5531 gfc_add_modify (&errmsg_block
, errmsg_str
,
5532 gfc_build_addr_expr (pchar_type_node
,
5533 gfc_build_localized_cstring_const (msg
)));
5534 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5535 dlen
= gfc_get_expr_charlen (code
->expr2
);
5537 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5538 slen
, errmsg_str
, gfc_default_character_kind
);
5539 tmp
= gfc_finish_block (&errmsg_block
);
5541 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5542 build_int_cst (TREE_TYPE (stat
), 0));
5543 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5544 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
), tmp
,
5545 build_empty_stmt (input_location
));
5547 gfc_add_expr_to_block (&block
, tmp
);
5550 if (code
->expr1
&& TREE_USED (label_finish
))
5552 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5553 gfc_add_expr_to_block (&block
, tmp
);
5559 gfc_init_se (&se
, NULL
);
5560 gfc_conv_expr_lhs (&se
, code
->expr1
);
5561 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5562 gfc_add_modify (&block
, se
.expr
, tmp
);
5565 return gfc_finish_block (&block
);
5568 #include "gt-fortran-trans-stmt.h"