1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
27 #include "stringpool.h"
31 #include "trans-stmt.h"
32 #include "trans-types.h"
33 #include "trans-array.h"
34 #include "trans-const.h"
36 #include "dependency.h"
39 typedef struct iter_info
45 struct iter_info
*next
;
49 typedef struct forall_info
56 struct forall_info
*prev_nest
;
61 static void gfc_trans_where_2 (gfc_code
*, tree
, bool,
62 forall_info
*, stmtblock_t
*);
64 /* Translate a F95 label number to a LABEL_EXPR. */
67 gfc_trans_label_here (gfc_code
* code
)
69 return build1_v (LABEL_EXPR
, gfc_get_label_decl (code
->here
));
73 /* Given a variable expression which has been ASSIGNed to, find the decl
74 containing the auxiliary variables. For variables in common blocks this
78 gfc_conv_label_variable (gfc_se
* se
, gfc_expr
* expr
)
80 gcc_assert (expr
->symtree
->n
.sym
->attr
.assign
== 1);
81 gfc_conv_expr (se
, expr
);
82 /* Deals with variable in common block. Get the field declaration. */
83 if (TREE_CODE (se
->expr
) == COMPONENT_REF
)
84 se
->expr
= TREE_OPERAND (se
->expr
, 1);
85 /* Deals with dummy argument. Get the parameter declaration. */
86 else if (TREE_CODE (se
->expr
) == INDIRECT_REF
)
87 se
->expr
= TREE_OPERAND (se
->expr
, 0);
90 /* Translate a label assignment statement. */
93 gfc_trans_label_assign (gfc_code
* code
)
102 /* Start a new block. */
103 gfc_init_se (&se
, NULL
);
104 gfc_start_block (&se
.pre
);
105 gfc_conv_label_variable (&se
, code
->expr1
);
107 len
= GFC_DECL_STRING_LEN (se
.expr
);
108 addr
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
110 label_tree
= gfc_get_label_decl (code
->label1
);
112 if (code
->label1
->defined
== ST_LABEL_TARGET
113 || code
->label1
->defined
== ST_LABEL_DO_TARGET
)
115 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
116 len_tree
= integer_minus_one_node
;
120 gfc_expr
*format
= code
->label1
->format
;
122 label_len
= format
->value
.character
.length
;
123 len_tree
= build_int_cst (gfc_charlen_type_node
, label_len
);
124 label_tree
= gfc_build_wide_string_const (format
->ts
.kind
, label_len
+ 1,
125 format
->value
.character
.string
);
126 label_tree
= gfc_build_addr_expr (pvoid_type_node
, label_tree
);
129 gfc_add_modify (&se
.pre
, len
, len_tree
);
130 gfc_add_modify (&se
.pre
, addr
, label_tree
);
132 return gfc_finish_block (&se
.pre
);
135 /* Translate a GOTO statement. */
138 gfc_trans_goto (gfc_code
* code
)
140 locus loc
= code
->loc
;
146 if (code
->label1
!= NULL
)
147 return build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
150 gfc_init_se (&se
, NULL
);
151 gfc_start_block (&se
.pre
);
152 gfc_conv_label_variable (&se
, code
->expr1
);
153 tmp
= GFC_DECL_STRING_LEN (se
.expr
);
154 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, tmp
,
155 build_int_cst (TREE_TYPE (tmp
), -1));
156 gfc_trans_runtime_check (true, false, tmp
, &se
.pre
, &loc
,
157 "Assigned label is not a target label");
159 assigned_goto
= GFC_DECL_ASSIGN_ADDR (se
.expr
);
161 /* We're going to ignore a label list. It does not really change the
162 statement's semantics (because it is just a further restriction on
163 what's legal code); before, we were comparing label addresses here, but
164 that's a very fragile business and may break with optimization. So
167 target
= fold_build1_loc (input_location
, GOTO_EXPR
, void_type_node
,
169 gfc_add_expr_to_block (&se
.pre
, target
);
170 return gfc_finish_block (&se
.pre
);
174 /* Translate an ENTRY statement. Just adds a label for this entry point. */
176 gfc_trans_entry (gfc_code
* code
)
178 return build1_v (LABEL_EXPR
, code
->ext
.entry
->label
);
182 /* Replace a gfc_ss structure by another both in the gfc_se struct
183 and the gfc_loopinfo struct. This is used in gfc_conv_elemental_dependencies
184 to replace a variable ss by the corresponding temporary. */
187 replace_ss (gfc_se
*se
, gfc_ss
*old_ss
, gfc_ss
*new_ss
)
189 gfc_ss
**sess
, **loopss
;
191 /* The old_ss is a ss for a single variable. */
192 gcc_assert (old_ss
->info
->type
== GFC_SS_SECTION
);
194 for (sess
= &(se
->ss
); *sess
!= gfc_ss_terminator
; sess
= &((*sess
)->next
))
197 gcc_assert (*sess
!= gfc_ss_terminator
);
200 new_ss
->next
= old_ss
->next
;
203 for (loopss
= &(se
->loop
->ss
); *loopss
!= gfc_ss_terminator
;
204 loopss
= &((*loopss
)->loop_chain
))
205 if (*loopss
== old_ss
)
207 gcc_assert (*loopss
!= gfc_ss_terminator
);
210 new_ss
->loop_chain
= old_ss
->loop_chain
;
211 new_ss
->loop
= old_ss
->loop
;
213 gfc_free_ss (old_ss
);
217 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
218 elemental subroutines. Make temporaries for output arguments if any such
219 dependencies are found. Output arguments are chosen because internal_unpack
220 can be used, as is, to copy the result back to the variable. */
222 gfc_conv_elemental_dependencies (gfc_se
* se
, gfc_se
* loopse
,
223 gfc_symbol
* sym
, gfc_actual_arglist
* arg
,
224 gfc_dep_check check_variable
)
226 gfc_actual_arglist
*arg0
;
228 gfc_formal_arglist
*formal
;
236 if (loopse
->ss
== NULL
)
241 formal
= gfc_sym_get_dummy_args (sym
);
243 /* Loop over all the arguments testing for dependencies. */
244 for (; arg
!= NULL
; arg
= arg
->next
, formal
= formal
? formal
->next
: NULL
)
250 /* Obtain the info structure for the current argument. */
251 for (ss
= loopse
->ss
; ss
&& ss
!= gfc_ss_terminator
; ss
= ss
->next
)
252 if (ss
->info
->expr
== e
)
255 /* If there is a dependency, create a temporary and use it
256 instead of the variable. */
257 fsym
= formal
? formal
->sym
: NULL
;
258 if (e
->expr_type
== EXPR_VARIABLE
260 && fsym
->attr
.intent
!= INTENT_IN
261 && gfc_check_fncall_dependency (e
, fsym
->attr
.intent
,
262 sym
, arg0
, check_variable
))
264 tree initial
, temptype
;
265 stmtblock_t temp_post
;
268 tmp_ss
= gfc_get_array_ss (gfc_ss_terminator
, NULL
, ss
->dimen
,
270 gfc_mark_ss_chain_used (tmp_ss
, 1);
271 tmp_ss
->info
->expr
= ss
->info
->expr
;
272 replace_ss (loopse
, ss
, tmp_ss
);
274 /* Obtain the argument descriptor for unpacking. */
275 gfc_init_se (&parmse
, NULL
);
276 parmse
.want_pointer
= 1;
277 gfc_conv_expr_descriptor (&parmse
, e
);
278 gfc_add_block_to_block (&se
->pre
, &parmse
.pre
);
280 /* If we've got INTENT(INOUT) or a derived type with INTENT(OUT),
281 initialize the array temporary with a copy of the values. */
282 if (fsym
->attr
.intent
== INTENT_INOUT
283 || (fsym
->ts
.type
==BT_DERIVED
284 && fsym
->attr
.intent
== INTENT_OUT
))
285 initial
= parmse
.expr
;
286 /* For class expressions, we always initialize with the copy of
288 else if (e
->ts
.type
== BT_CLASS
)
289 initial
= parmse
.expr
;
293 if (e
->ts
.type
!= BT_CLASS
)
295 /* Find the type of the temporary to create; we don't use the type
296 of e itself as this breaks for subcomponent-references in e
297 (where the type of e is that of the final reference, but
298 parmse.expr's type corresponds to the full derived-type). */
299 /* TODO: Fix this somehow so we don't need a temporary of the whole
300 array but instead only the components referenced. */
301 temptype
= TREE_TYPE (parmse
.expr
); /* Pointer to descriptor. */
302 gcc_assert (TREE_CODE (temptype
) == POINTER_TYPE
);
303 temptype
= TREE_TYPE (temptype
);
304 temptype
= gfc_get_element_type (temptype
);
308 /* For class arrays signal that the size of the dynamic type has to
309 be obtained from the vtable, using the 'initial' expression. */
310 temptype
= NULL_TREE
;
312 /* Generate the temporary. Cleaning up the temporary should be the
313 very last thing done, so we add the code to a new block and add it
314 to se->post as last instructions. */
315 size
= gfc_create_var (gfc_array_index_type
, NULL
);
316 data
= gfc_create_var (pvoid_type_node
, NULL
);
317 gfc_init_block (&temp_post
);
318 tmp
= gfc_trans_create_temp_array (&se
->pre
, &temp_post
, tmp_ss
,
319 temptype
, initial
, false, true,
320 false, &arg
->expr
->where
);
321 gfc_add_modify (&se
->pre
, size
, tmp
);
322 tmp
= fold_convert (pvoid_type_node
, tmp_ss
->info
->data
.array
.data
);
323 gfc_add_modify (&se
->pre
, data
, tmp
);
325 /* Update other ss' delta. */
326 gfc_set_delta (loopse
->loop
);
328 /* Copy the result back using unpack..... */
329 if (e
->ts
.type
!= BT_CLASS
)
330 tmp
= build_call_expr_loc (input_location
,
331 gfor_fndecl_in_unpack
, 2, parmse
.expr
, data
);
334 /* ... except for class results where the copy is
336 tmp
= build_fold_indirect_ref_loc (input_location
, parmse
.expr
);
337 tmp
= gfc_conv_descriptor_data_get (tmp
);
338 tmp
= build_call_expr_loc (input_location
,
339 builtin_decl_explicit (BUILT_IN_MEMCPY
),
341 fold_convert (size_type_node
, size
));
343 gfc_add_expr_to_block (&se
->post
, tmp
);
345 /* parmse.pre is already added above. */
346 gfc_add_block_to_block (&se
->post
, &parmse
.post
);
347 gfc_add_block_to_block (&se
->post
, &temp_post
);
353 /* Get the interface symbol for the procedure corresponding to the given call.
354 We can't get the procedure symbol directly as we have to handle the case
355 of (deferred) type-bound procedures. */
358 get_proc_ifc_for_call (gfc_code
*c
)
362 gcc_assert (c
->op
== EXEC_ASSIGN_CALL
|| c
->op
== EXEC_CALL
);
364 sym
= gfc_get_proc_ifc_for_expr (c
->expr1
);
366 /* Fall back/last resort try. */
368 sym
= c
->resolved_sym
;
374 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
377 gfc_trans_call (gfc_code
* code
, bool dependency_check
,
378 tree mask
, tree count1
, bool invert
)
382 int has_alternate_specifier
;
383 gfc_dep_check check_variable
;
384 tree index
= NULL_TREE
;
385 tree maskexpr
= NULL_TREE
;
388 /* A CALL starts a new block because the actual arguments may have to
389 be evaluated first. */
390 gfc_init_se (&se
, NULL
);
391 gfc_start_block (&se
.pre
);
393 gcc_assert (code
->resolved_sym
);
395 ss
= gfc_ss_terminator
;
396 if (code
->resolved_sym
->attr
.elemental
)
397 ss
= gfc_walk_elemental_function_args (ss
, code
->ext
.actual
,
398 get_proc_ifc_for_call (code
),
401 /* Is not an elemental subroutine call with array valued arguments. */
402 if (ss
== gfc_ss_terminator
)
405 /* Translate the call. */
406 has_alternate_specifier
407 = gfc_conv_procedure_call (&se
, code
->resolved_sym
, code
->ext
.actual
,
410 /* A subroutine without side-effect, by definition, does nothing! */
411 TREE_SIDE_EFFECTS (se
.expr
) = 1;
413 /* Chain the pieces together and return the block. */
414 if (has_alternate_specifier
)
416 gfc_code
*select_code
;
418 select_code
= code
->next
;
419 gcc_assert(select_code
->op
== EXEC_SELECT
);
420 sym
= select_code
->expr1
->symtree
->n
.sym
;
421 se
.expr
= convert (gfc_typenode_for_spec (&sym
->ts
), se
.expr
);
422 if (sym
->backend_decl
== NULL
)
423 sym
->backend_decl
= gfc_get_symbol_decl (sym
);
424 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
427 gfc_add_expr_to_block (&se
.pre
, se
.expr
);
429 gfc_add_block_to_block (&se
.pre
, &se
.post
);
434 /* An elemental subroutine call with array valued arguments has
442 /* gfc_walk_elemental_function_args renders the ss chain in the
443 reverse order to the actual argument order. */
444 ss
= gfc_reverse_ss (ss
);
446 /* Initialize the loop. */
447 gfc_init_se (&loopse
, NULL
);
448 gfc_init_loopinfo (&loop
);
449 gfc_add_ss_to_loop (&loop
, ss
);
451 gfc_conv_ss_startstride (&loop
);
452 /* TODO: gfc_conv_loop_setup generates a temporary for vector
453 subscripts. This could be prevented in the elemental case
454 as temporaries are handled separatedly
455 (below in gfc_conv_elemental_dependencies). */
456 gfc_conv_loop_setup (&loop
, &code
->expr1
->where
);
457 gfc_mark_ss_chain_used (ss
, 1);
459 /* Convert the arguments, checking for dependencies. */
460 gfc_copy_loopinfo_to_se (&loopse
, &loop
);
463 /* For operator assignment, do dependency checking. */
464 if (dependency_check
)
465 check_variable
= ELEM_CHECK_VARIABLE
;
467 check_variable
= ELEM_DONT_CHECK_VARIABLE
;
469 gfc_init_se (&depse
, NULL
);
470 gfc_conv_elemental_dependencies (&depse
, &loopse
, code
->resolved_sym
,
471 code
->ext
.actual
, check_variable
);
473 gfc_add_block_to_block (&loop
.pre
, &depse
.pre
);
474 gfc_add_block_to_block (&loop
.post
, &depse
.post
);
476 /* Generate the loop body. */
477 gfc_start_scalarized_body (&loop
, &body
);
478 gfc_init_block (&block
);
482 /* Form the mask expression according to the mask. */
484 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
486 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
487 TREE_TYPE (maskexpr
), maskexpr
);
490 /* Add the subroutine call to the block. */
491 gfc_conv_procedure_call (&loopse
, code
->resolved_sym
,
492 code
->ext
.actual
, code
->expr1
,
497 tmp
= build3_v (COND_EXPR
, maskexpr
, loopse
.expr
,
498 build_empty_stmt (input_location
));
499 gfc_add_expr_to_block (&loopse
.pre
, tmp
);
500 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
501 gfc_array_index_type
,
502 count1
, gfc_index_one_node
);
503 gfc_add_modify (&loopse
.pre
, count1
, tmp
);
506 gfc_add_expr_to_block (&loopse
.pre
, loopse
.expr
);
508 gfc_add_block_to_block (&block
, &loopse
.pre
);
509 gfc_add_block_to_block (&block
, &loopse
.post
);
511 /* Finish up the loop block and the loop. */
512 gfc_add_expr_to_block (&body
, gfc_finish_block (&block
));
513 gfc_trans_scalarizing_loops (&loop
, &body
);
514 gfc_add_block_to_block (&se
.pre
, &loop
.pre
);
515 gfc_add_block_to_block (&se
.pre
, &loop
.post
);
516 gfc_add_block_to_block (&se
.pre
, &se
.post
);
517 gfc_cleanup_loop (&loop
);
520 return gfc_finish_block (&se
.pre
);
524 /* Translate the RETURN statement. */
527 gfc_trans_return (gfc_code
* code
)
535 /* If code->expr is not NULL, this return statement must appear
536 in a subroutine and current_fake_result_decl has already
539 result
= gfc_get_fake_result_decl (NULL
, 0);
542 gfc_warning ("An alternate return at %L without a * dummy argument",
543 &code
->expr1
->where
);
544 return gfc_generate_return ();
547 /* Start a new block for this statement. */
548 gfc_init_se (&se
, NULL
);
549 gfc_start_block (&se
.pre
);
551 gfc_conv_expr (&se
, code
->expr1
);
553 /* Note that the actually returned expression is a simple value and
554 does not depend on any pointers or such; thus we can clean-up with
555 se.post before returning. */
556 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, TREE_TYPE (result
),
557 result
, fold_convert (TREE_TYPE (result
),
559 gfc_add_expr_to_block (&se
.pre
, tmp
);
560 gfc_add_block_to_block (&se
.pre
, &se
.post
);
562 tmp
= gfc_generate_return ();
563 gfc_add_expr_to_block (&se
.pre
, tmp
);
564 return gfc_finish_block (&se
.pre
);
567 return gfc_generate_return ();
571 /* Translate the PAUSE statement. We have to translate this statement
572 to a runtime library call. */
575 gfc_trans_pause (gfc_code
* code
)
577 tree gfc_int4_type_node
= gfc_get_int_type (4);
581 /* Start a new block for this statement. */
582 gfc_init_se (&se
, NULL
);
583 gfc_start_block (&se
.pre
);
586 if (code
->expr1
== NULL
)
588 tmp
= build_int_cst (gfc_int4_type_node
, 0);
589 tmp
= build_call_expr_loc (input_location
,
590 gfor_fndecl_pause_string
, 2,
591 build_int_cst (pchar_type_node
, 0), tmp
);
593 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
595 gfc_conv_expr (&se
, code
->expr1
);
596 tmp
= build_call_expr_loc (input_location
,
597 gfor_fndecl_pause_numeric
, 1,
598 fold_convert (gfc_int4_type_node
, se
.expr
));
602 gfc_conv_expr_reference (&se
, code
->expr1
);
603 tmp
= build_call_expr_loc (input_location
,
604 gfor_fndecl_pause_string
, 2,
605 se
.expr
, se
.string_length
);
608 gfc_add_expr_to_block (&se
.pre
, tmp
);
610 gfc_add_block_to_block (&se
.pre
, &se
.post
);
612 return gfc_finish_block (&se
.pre
);
616 /* Translate the STOP statement. We have to translate this statement
617 to a runtime library call. */
620 gfc_trans_stop (gfc_code
*code
, bool error_stop
)
622 tree gfc_int4_type_node
= gfc_get_int_type (4);
626 /* Start a new block for this statement. */
627 gfc_init_se (&se
, NULL
);
628 gfc_start_block (&se
.pre
);
630 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& !error_stop
)
632 /* Per F2008, 8.5.1 STOP implies a SYNC MEMORY. */
633 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
634 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
635 gfc_add_expr_to_block (&se
.pre
, tmp
);
637 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_finalize
, 0);
638 gfc_add_expr_to_block (&se
.pre
, tmp
);
641 if (code
->expr1
== NULL
)
643 tmp
= build_int_cst (gfc_int4_type_node
, 0);
644 tmp
= build_call_expr_loc (input_location
,
646 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
647 ? gfor_fndecl_caf_error_stop_str
648 : gfor_fndecl_error_stop_string
)
649 : gfor_fndecl_stop_string
,
650 2, build_int_cst (pchar_type_node
, 0), tmp
);
652 else if (code
->expr1
->ts
.type
== BT_INTEGER
)
654 gfc_conv_expr (&se
, code
->expr1
);
655 tmp
= build_call_expr_loc (input_location
,
657 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
658 ? gfor_fndecl_caf_error_stop
659 : gfor_fndecl_error_stop_numeric
)
660 : gfor_fndecl_stop_numeric_f08
, 1,
661 fold_convert (gfc_int4_type_node
, se
.expr
));
665 gfc_conv_expr_reference (&se
, code
->expr1
);
666 tmp
= build_call_expr_loc (input_location
,
668 ? (gfc_option
.coarray
== GFC_FCOARRAY_LIB
669 ? gfor_fndecl_caf_error_stop_str
670 : gfor_fndecl_error_stop_string
)
671 : gfor_fndecl_stop_string
,
672 2, se
.expr
, se
.string_length
);
675 gfc_add_expr_to_block (&se
.pre
, tmp
);
677 gfc_add_block_to_block (&se
.pre
, &se
.post
);
679 return gfc_finish_block (&se
.pre
);
684 gfc_trans_lock_unlock (gfc_code
*code
, gfc_exec_op type ATTRIBUTE_UNUSED
)
687 tree stat
= NULL_TREE
, lock_acquired
= NULL_TREE
;
689 /* Short cut: For single images without STAT= or LOCK_ACQUIRED
690 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
691 if (!code
->expr2
&& !code
->expr4
&& gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
694 gfc_init_se (&se
, NULL
);
695 gfc_start_block (&se
.pre
);
699 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
700 gfc_init_se (&argse
, NULL
);
701 gfc_conv_expr_val (&argse
, code
->expr2
);
707 gcc_assert (code
->expr4
->expr_type
== EXPR_VARIABLE
);
708 gfc_init_se (&argse
, NULL
);
709 gfc_conv_expr_val (&argse
, code
->expr4
);
710 lock_acquired
= argse
.expr
;
713 if (stat
!= NULL_TREE
)
714 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
716 if (lock_acquired
!= NULL_TREE
)
717 gfc_add_modify (&se
.pre
, lock_acquired
,
718 fold_convert (TREE_TYPE (lock_acquired
),
721 return gfc_finish_block (&se
.pre
);
726 gfc_trans_sync (gfc_code
*code
, gfc_exec_op type
)
730 tree images
= NULL_TREE
, stat
= NULL_TREE
,
731 errmsg
= NULL_TREE
, errmsglen
= NULL_TREE
;
733 /* Short cut: For single images without bound checking or without STAT=,
734 return early. (ERRMSG= is always untouched for -fcoarray=single.) */
735 if (!code
->expr2
&& !(gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
736 && gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
739 gfc_init_se (&se
, NULL
);
740 gfc_start_block (&se
.pre
);
742 if (code
->expr1
&& code
->expr1
->rank
== 0)
744 gfc_init_se (&argse
, NULL
);
745 gfc_conv_expr_val (&argse
, code
->expr1
);
751 gcc_assert (code
->expr2
->expr_type
== EXPR_VARIABLE
);
752 gfc_init_se (&argse
, NULL
);
753 gfc_conv_expr_val (&argse
, code
->expr2
);
757 stat
= null_pointer_node
;
759 if (code
->expr3
&& gfc_option
.coarray
== GFC_FCOARRAY_LIB
760 && type
!= EXEC_SYNC_MEMORY
)
762 gcc_assert (code
->expr3
->expr_type
== EXPR_VARIABLE
);
763 gfc_init_se (&argse
, NULL
);
764 gfc_conv_expr (&argse
, code
->expr3
);
765 gfc_conv_string_parameter (&argse
);
766 errmsg
= gfc_build_addr_expr (NULL
, argse
.expr
);
767 errmsglen
= argse
.string_length
;
769 else if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
&& type
!= EXEC_SYNC_MEMORY
)
771 errmsg
= null_pointer_node
;
772 errmsglen
= build_int_cst (integer_type_node
, 0);
775 /* Check SYNC IMAGES(imageset) for valid image index.
776 FIXME: Add a check for image-set arrays. */
777 if (code
->expr1
&& (gfc_option
.rtcheck
& GFC_RTCHECK_BOUNDS
)
778 && code
->expr1
->rank
== 0)
781 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
)
782 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
783 images
, build_int_cst (TREE_TYPE (images
), 1));
787 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_num_images
,
788 2, integer_zero_node
,
789 build_int_cst (integer_type_node
, -1));
790 cond
= fold_build2_loc (input_location
, GT_EXPR
, boolean_type_node
,
792 cond2
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
794 build_int_cst (TREE_TYPE (images
), 1));
795 cond
= fold_build2_loc (input_location
, TRUTH_OR_EXPR
,
796 boolean_type_node
, cond
, cond2
);
798 gfc_trans_runtime_check (true, false, cond
, &se
.pre
,
799 &code
->expr1
->where
, "Invalid image number "
801 fold_convert (integer_type_node
, images
));
804 /* Per F2008, 8.5.1, a SYNC MEMORY is implied by calling the
805 image control statements SYNC IMAGES and SYNC ALL. */
806 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
808 tmp
= builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE
);
809 tmp
= build_call_expr_loc (input_location
, tmp
, 0);
810 gfc_add_expr_to_block (&se
.pre
, tmp
);
813 if (gfc_option
.coarray
!= GFC_FCOARRAY_LIB
|| type
== EXEC_SYNC_MEMORY
)
815 /* Set STAT to zero. */
817 gfc_add_modify (&se
.pre
, stat
, build_int_cst (TREE_TYPE (stat
), 0));
819 else if (type
== EXEC_SYNC_ALL
)
821 /* SYNC ALL => stat == null_pointer_node
822 SYNC ALL(stat=s) => stat has an integer type
824 If "stat" has the wrong integer type, use a temp variable of
825 the right type and later cast the result back into "stat". */
826 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
828 if (TREE_TYPE (stat
) == integer_type_node
)
829 stat
= gfc_build_addr_expr (NULL
, stat
);
831 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
832 3, stat
, errmsg
, errmsglen
);
833 gfc_add_expr_to_block (&se
.pre
, tmp
);
837 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
839 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_all
,
840 3, gfc_build_addr_expr (NULL
, tmp_stat
),
842 gfc_add_expr_to_block (&se
.pre
, tmp
);
844 gfc_add_modify (&se
.pre
, stat
,
845 fold_convert (TREE_TYPE (stat
), tmp_stat
));
852 gcc_assert (type
== EXEC_SYNC_IMAGES
);
856 len
= build_int_cst (integer_type_node
, -1);
857 images
= null_pointer_node
;
859 else if (code
->expr1
->rank
== 0)
861 len
= build_int_cst (integer_type_node
, 1);
862 images
= gfc_build_addr_expr (NULL_TREE
, images
);
867 if (code
->expr1
->ts
.kind
!= gfc_c_int_kind
)
868 gfc_fatal_error ("Sorry, only support for integer kind %d "
869 "implemented for image-set at %L",
870 gfc_c_int_kind
, &code
->expr1
->where
);
872 gfc_conv_array_parameter (&se
, code
->expr1
, true, NULL
, NULL
, &len
);
875 tmp
= gfc_typenode_for_spec (&code
->expr1
->ts
);
876 if (GFC_ARRAY_TYPE_P (tmp
) || GFC_DESCRIPTOR_TYPE_P (tmp
))
877 tmp
= gfc_get_element_type (tmp
);
879 len
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
,
880 TREE_TYPE (len
), len
,
881 fold_convert (TREE_TYPE (len
),
882 TYPE_SIZE_UNIT (tmp
)));
883 len
= fold_convert (integer_type_node
, len
);
886 /* SYNC IMAGES(imgs) => stat == null_pointer_node
887 SYNC IMAGES(imgs,stat=s) => stat has an integer type
889 If "stat" has the wrong integer type, use a temp variable of
890 the right type and later cast the result back into "stat". */
891 if (stat
== null_pointer_node
|| TREE_TYPE (stat
) == integer_type_node
)
893 if (TREE_TYPE (stat
) == integer_type_node
)
894 stat
= gfc_build_addr_expr (NULL
, stat
);
896 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
897 5, fold_convert (integer_type_node
, len
),
898 images
, stat
, errmsg
, errmsglen
);
899 gfc_add_expr_to_block (&se
.pre
, tmp
);
903 tree tmp_stat
= gfc_create_var (integer_type_node
, "stat");
905 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_sync_images
,
906 5, fold_convert (integer_type_node
, len
),
907 images
, gfc_build_addr_expr (NULL
, tmp_stat
),
909 gfc_add_expr_to_block (&se
.pre
, tmp
);
911 gfc_add_modify (&se
.pre
, stat
,
912 fold_convert (TREE_TYPE (stat
), tmp_stat
));
916 return gfc_finish_block (&se
.pre
);
920 /* Generate GENERIC for the IF construct. This function also deals with
921 the simple IF statement, because the front end translates the IF
922 statement into an IF construct.
954 where COND_S is the simplified version of the predicate. PRE_COND_S
955 are the pre side-effects produced by the translation of the
957 We need to build the chain recursively otherwise we run into
958 problems with folding incomplete statements. */
961 gfc_trans_if_1 (gfc_code
* code
)
968 /* Check for an unconditional ELSE clause. */
970 return gfc_trans_code (code
->next
);
972 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
973 gfc_init_se (&if_se
, NULL
);
974 gfc_start_block (&if_se
.pre
);
976 /* Calculate the IF condition expression. */
977 if (code
->expr1
->where
.lb
)
979 gfc_save_backend_locus (&saved_loc
);
980 gfc_set_backend_locus (&code
->expr1
->where
);
983 gfc_conv_expr_val (&if_se
, code
->expr1
);
985 if (code
->expr1
->where
.lb
)
986 gfc_restore_backend_locus (&saved_loc
);
988 /* Translate the THEN clause. */
989 stmt
= gfc_trans_code (code
->next
);
991 /* Translate the ELSE clause. */
993 elsestmt
= gfc_trans_if_1 (code
->block
);
995 elsestmt
= build_empty_stmt (input_location
);
997 /* Build the condition expression and add it to the condition block. */
998 loc
= code
->expr1
->where
.lb
? code
->expr1
->where
.lb
->location
: input_location
;
999 stmt
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, if_se
.expr
, stmt
,
1002 gfc_add_expr_to_block (&if_se
.pre
, stmt
);
1004 /* Finish off this statement. */
1005 return gfc_finish_block (&if_se
.pre
);
1009 gfc_trans_if (gfc_code
* code
)
1014 /* Create exit label so it is available for trans'ing the body code. */
1015 exit_label
= gfc_build_label_decl (NULL_TREE
);
1016 code
->exit_label
= exit_label
;
1018 /* Translate the actual code in code->block. */
1019 gfc_init_block (&body
);
1020 gfc_add_expr_to_block (&body
, gfc_trans_if_1 (code
->block
));
1022 /* Add exit label. */
1023 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1025 return gfc_finish_block (&body
);
1029 /* Translate an arithmetic IF expression.
1031 IF (cond) label1, label2, label3 translates to
1043 An optimized version can be generated in case of equal labels.
1044 E.g., if label1 is equal to label2, we can translate it to
1053 gfc_trans_arithmetic_if (gfc_code
* code
)
1061 /* Start a new block. */
1062 gfc_init_se (&se
, NULL
);
1063 gfc_start_block (&se
.pre
);
1065 /* Pre-evaluate COND. */
1066 gfc_conv_expr_val (&se
, code
->expr1
);
1067 se
.expr
= gfc_evaluate_now (se
.expr
, &se
.pre
);
1069 /* Build something to compare with. */
1070 zero
= gfc_build_const (TREE_TYPE (se
.expr
), integer_zero_node
);
1072 if (code
->label1
->value
!= code
->label2
->value
)
1074 /* If (cond < 0) take branch1 else take branch2.
1075 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
1076 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1077 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label2
));
1079 if (code
->label1
->value
!= code
->label3
->value
)
1080 tmp
= fold_build2_loc (input_location
, LT_EXPR
, boolean_type_node
,
1083 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
,
1086 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1087 tmp
, branch1
, branch2
);
1090 branch1
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label1
));
1092 if (code
->label1
->value
!= code
->label3
->value
1093 && code
->label2
->value
!= code
->label3
->value
)
1095 /* if (cond <= 0) take branch1 else take branch2. */
1096 branch2
= build1_v (GOTO_EXPR
, gfc_get_label_decl (code
->label3
));
1097 tmp
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
1099 branch1
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
1100 tmp
, branch1
, branch2
);
1103 /* Append the COND_EXPR to the evaluation of COND, and return. */
1104 gfc_add_expr_to_block (&se
.pre
, branch1
);
1105 return gfc_finish_block (&se
.pre
);
1109 /* Translate a CRITICAL block. */
1111 gfc_trans_critical (gfc_code
*code
)
1114 tree tmp
, token
= NULL_TREE
;
1116 gfc_start_block (&block
);
1118 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1120 token
= gfc_get_symbol_decl (code
->resolved_sym
);
1121 token
= GFC_TYPE_ARRAY_CAF_TOKEN (TREE_TYPE (token
));
1122 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_lock
, 7,
1123 token
, integer_zero_node
, integer_one_node
,
1124 null_pointer_node
, null_pointer_node
,
1125 null_pointer_node
, integer_zero_node
);
1126 gfc_add_expr_to_block (&block
, tmp
);
1129 tmp
= gfc_trans_code (code
->block
->next
);
1130 gfc_add_expr_to_block (&block
, tmp
);
1132 if (gfc_option
.coarray
== GFC_FCOARRAY_LIB
)
1134 tmp
= build_call_expr_loc (input_location
, gfor_fndecl_caf_unlock
, 6,
1135 token
, integer_zero_node
, integer_one_node
,
1136 null_pointer_node
, null_pointer_node
,
1138 gfc_add_expr_to_block (&block
, tmp
);
1142 return gfc_finish_block (&block
);
1146 /* Do proper initialization for ASSOCIATE names. */
1149 trans_associate_var (gfc_symbol
*sym
, gfc_wrapped_block
*block
)
1160 gcc_assert (sym
->assoc
);
1161 e
= sym
->assoc
->target
;
1163 class_target
= (e
->expr_type
== EXPR_VARIABLE
)
1164 && (gfc_is_class_scalar_expr (e
)
1165 || gfc_is_class_array_ref (e
, NULL
));
1167 unlimited
= UNLIMITED_POLY (e
);
1169 /* Do a `pointer assignment' with updated descriptor (or assign descriptor
1170 to array temporary) for arrays with either unknown shape or if associating
1172 if (sym
->attr
.dimension
&& !class_target
1173 && (sym
->as
->type
== AS_DEFERRED
|| sym
->assoc
->variable
))
1177 bool cst_array_ctor
;
1179 desc
= sym
->backend_decl
;
1180 cst_array_ctor
= e
->expr_type
== EXPR_ARRAY
1181 && gfc_constant_array_constructor_p (e
->value
.constructor
);
1183 /* If association is to an expression, evaluate it and create temporary.
1184 Otherwise, get descriptor of target for pointer assignment. */
1185 gfc_init_se (&se
, NULL
);
1186 if (sym
->assoc
->variable
|| cst_array_ctor
)
1188 se
.direct_byref
= 1;
1193 gfc_conv_expr_descriptor (&se
, e
);
1195 /* If we didn't already do the pointer assignment, set associate-name
1196 descriptor to the one generated for the temporary. */
1197 if (!sym
->assoc
->variable
&& !cst_array_ctor
)
1201 gfc_add_modify (&se
.pre
, desc
, se
.expr
);
1203 /* The generated descriptor has lower bound zero (as array
1204 temporary), shift bounds so we get lower bounds of 1. */
1205 for (dim
= 0; dim
< e
->rank
; ++dim
)
1206 gfc_conv_shift_descriptor_lbound (&se
.pre
, desc
,
1207 dim
, gfc_index_one_node
);
1210 /* If this is a subreference array pointer associate name use the
1211 associate variable element size for the value of 'span'. */
1212 if (sym
->attr
.subref_array_pointer
)
1214 gcc_assert (e
->expr_type
== EXPR_VARIABLE
);
1215 tmp
= e
->symtree
->n
.sym
->backend_decl
;
1216 tmp
= gfc_get_element_type (TREE_TYPE (tmp
));
1217 tmp
= fold_convert (gfc_array_index_type
, size_in_bytes (tmp
));
1218 gfc_add_modify (&se
.pre
, GFC_DECL_SPAN(desc
), tmp
);
1221 /* Done, register stuff as init / cleanup code. */
1222 gfc_add_init_cleanup (block
, gfc_finish_block (&se
.pre
),
1223 gfc_finish_block (&se
.post
));
1226 /* Temporaries, arising from TYPE IS, just need the descriptor of class
1227 arrays to be assigned directly. */
1228 else if (class_target
&& sym
->attr
.dimension
1229 && (sym
->ts
.type
== BT_DERIVED
|| unlimited
))
1233 gfc_init_se (&se
, NULL
);
1234 se
.descriptor_only
= 1;
1235 gfc_conv_expr (&se
, e
);
1237 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se
.expr
)));
1238 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (sym
->backend_decl
)));
1240 gfc_add_modify (&se
.pre
, sym
->backend_decl
, se
.expr
);
1244 /* Recover the dtype, which has been overwritten by the
1245 assignment from an unlimited polymorphic object. */
1246 tmp
= gfc_conv_descriptor_dtype (sym
->backend_decl
);
1247 gfc_add_modify (&se
.pre
, tmp
,
1248 gfc_get_dtype (TREE_TYPE (sym
->backend_decl
)));
1251 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1252 gfc_finish_block (&se
.post
));
1255 /* Do a scalar pointer assignment; this is for scalar variable targets. */
1256 else if (gfc_is_associate_pointer (sym
))
1260 gcc_assert (!sym
->attr
.dimension
);
1262 gfc_init_se (&se
, NULL
);
1264 /* Class associate-names come this way because they are
1265 unconditionally associate pointers and the symbol is scalar. */
1266 if (sym
->ts
.type
== BT_CLASS
&& CLASS_DATA (sym
)->attr
.dimension
)
1268 /* For a class array we need a descriptor for the selector. */
1269 gfc_conv_expr_descriptor (&se
, e
);
1271 /* Obtain a temporary class container for the result. */
1272 gfc_conv_class_to_class (&se
, e
, sym
->ts
, false, true, false, false);
1273 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1275 /* Set the offset. */
1276 desc
= gfc_class_data_get (se
.expr
);
1277 offset
= gfc_index_zero_node
;
1278 for (n
= 0; n
< e
->rank
; n
++)
1280 dim
= gfc_rank_cst
[n
];
1281 tmp
= fold_build2_loc (input_location
, MULT_EXPR
,
1282 gfc_array_index_type
,
1283 gfc_conv_descriptor_stride_get (desc
, dim
),
1284 gfc_conv_descriptor_lbound_get (desc
, dim
));
1285 offset
= fold_build2_loc (input_location
, MINUS_EXPR
,
1286 gfc_array_index_type
,
1289 gfc_conv_descriptor_offset_set (&se
.pre
, desc
, offset
);
1291 else if (sym
->ts
.type
== BT_CLASS
&& e
->ts
.type
== BT_CLASS
1292 && CLASS_DATA (e
)->attr
.dimension
)
1294 /* This is bound to be a class array element. */
1295 gfc_conv_expr_reference (&se
, e
);
1296 /* Get the _vptr component of the class object. */
1297 tmp
= gfc_get_vptr_from_expr (se
.expr
);
1298 /* Obtain a temporary class container for the result. */
1299 gfc_conv_derived_to_class (&se
, e
, sym
->ts
, tmp
, false, false);
1300 se
.expr
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
1303 gfc_conv_expr (&se
, e
);
1305 tmp
= TREE_TYPE (sym
->backend_decl
);
1306 tmp
= gfc_build_addr_expr (tmp
, se
.expr
);
1307 gfc_add_modify (&se
.pre
, sym
->backend_decl
, tmp
);
1309 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1310 gfc_finish_block (&se
.post
));
1313 /* Do a simple assignment. This is for scalar expressions, where we
1314 can simply use expression assignment. */
1319 lhs
= gfc_lval_expr_from_sym (sym
);
1320 tmp
= gfc_trans_assignment (lhs
, e
, false, true);
1321 gfc_add_init_cleanup (block
, tmp
, NULL_TREE
);
1324 /* Set the stringlength from the vtable size. */
1325 if (sym
->ts
.type
== BT_CHARACTER
&& sym
->attr
.select_type_temporary
)
1329 gfc_init_se (&se
, NULL
);
1330 gcc_assert (UNLIMITED_POLY (e
->symtree
->n
.sym
));
1331 tmp
= gfc_get_symbol_decl (e
->symtree
->n
.sym
);
1332 tmp
= gfc_vtable_size_get (tmp
);
1333 gfc_get_symbol_decl (sym
);
1334 charlen
= sym
->ts
.u
.cl
->backend_decl
;
1335 gfc_add_modify (&se
.pre
, charlen
,
1336 fold_convert (TREE_TYPE (charlen
), tmp
));
1337 gfc_add_init_cleanup (block
, gfc_finish_block( &se
.pre
),
1338 gfc_finish_block (&se
.post
));
1343 /* Translate a BLOCK construct. This is basically what we would do for a
1347 gfc_trans_block_construct (gfc_code
* code
)
1351 gfc_wrapped_block block
;
1354 gfc_association_list
*ass
;
1356 ns
= code
->ext
.block
.ns
;
1358 sym
= ns
->proc_name
;
1361 /* Process local variables. */
1362 gcc_assert (!sym
->tlink
);
1364 gfc_process_block_locals (ns
);
1366 /* Generate code including exit-label. */
1367 gfc_init_block (&body
);
1368 exit_label
= gfc_build_label_decl (NULL_TREE
);
1369 code
->exit_label
= exit_label
;
1370 gfc_add_expr_to_block (&body
, gfc_trans_code (ns
->code
));
1371 gfc_add_expr_to_block (&body
, build1_v (LABEL_EXPR
, exit_label
));
1373 /* Finish everything. */
1374 gfc_start_wrapped_block (&block
, gfc_finish_block (&body
));
1375 gfc_trans_deferred_vars (sym
, &block
);
1376 for (ass
= code
->ext
.block
.assoc
; ass
; ass
= ass
->next
)
1377 trans_associate_var (ass
->st
->n
.sym
, &block
);
1379 return gfc_finish_wrapped_block (&block
);
1383 /* Translate the simple DO construct. This is where the loop variable has
1384 integer type and step +-1. We can't use this in the general case
1385 because integer overflow and floating point errors could give incorrect
1387 We translate a do loop from:
1389 DO dovar = from, to, step
1395 [Evaluate loop bounds and step]
1397 if ((step > 0) ? (dovar <= to) : (dovar => to))
1403 cond = (dovar == to);
1405 if (cond) goto end_label;
1410 This helps the optimizers by avoiding the extra induction variable
1411 used in the general case. */
1414 gfc_trans_simple_do (gfc_code
* code
, stmtblock_t
*pblock
, tree dovar
,
1415 tree from
, tree to
, tree step
, tree exit_cond
)
1421 tree saved_dovar
= NULL
;
1426 type
= TREE_TYPE (dovar
);
1428 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1430 /* Initialize the DO variable: dovar = from. */
1431 gfc_add_modify_loc (loc
, pblock
, dovar
,
1432 fold_convert (TREE_TYPE(dovar
), from
));
1434 /* Save value for do-tinkering checking. */
1435 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1437 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1438 gfc_add_modify_loc (loc
, pblock
, saved_dovar
, dovar
);
1441 /* Cycle and exit statements are implemented with gotos. */
1442 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1443 exit_label
= gfc_build_label_decl (NULL_TREE
);
1445 /* Put the labels where they can be found later. See gfc_trans_do(). */
1446 code
->cycle_label
= cycle_label
;
1447 code
->exit_label
= exit_label
;
1450 gfc_start_block (&body
);
1452 /* Main loop body. */
1453 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1454 gfc_add_expr_to_block (&body
, tmp
);
1456 /* Label for cycle statements (if needed). */
1457 if (TREE_USED (cycle_label
))
1459 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1460 gfc_add_expr_to_block (&body
, tmp
);
1463 /* Check whether someone has modified the loop variable. */
1464 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1466 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
,
1467 dovar
, saved_dovar
);
1468 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1469 "Loop variable has been modified");
1472 /* Exit the loop if there is an I/O result condition or error. */
1475 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1476 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1478 build_empty_stmt (loc
));
1479 gfc_add_expr_to_block (&body
, tmp
);
1482 /* Evaluate the loop condition. */
1483 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, dovar
,
1485 cond
= gfc_evaluate_now_loc (loc
, cond
, &body
);
1487 /* Increment the loop variable. */
1488 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1489 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1491 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1492 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1494 /* The loop exit. */
1495 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1496 TREE_USED (exit_label
) = 1;
1497 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1498 cond
, tmp
, build_empty_stmt (loc
));
1499 gfc_add_expr_to_block (&body
, tmp
);
1501 /* Finish the loop body. */
1502 tmp
= gfc_finish_block (&body
);
1503 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1505 /* Only execute the loop if the number of iterations is positive. */
1506 if (tree_int_cst_sgn (step
) > 0)
1507 cond
= fold_build2_loc (loc
, LE_EXPR
, boolean_type_node
, dovar
,
1510 cond
= fold_build2_loc (loc
, GE_EXPR
, boolean_type_node
, dovar
,
1512 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, cond
, tmp
,
1513 build_empty_stmt (loc
));
1514 gfc_add_expr_to_block (pblock
, tmp
);
1516 /* Add the exit label. */
1517 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1518 gfc_add_expr_to_block (pblock
, tmp
);
1520 return gfc_finish_block (pblock
);
1523 /* Translate the DO construct. This obviously is one of the most
1524 important ones to get right with any compiler, but especially
1527 We special case some loop forms as described in gfc_trans_simple_do.
1528 For other cases we implement them with a separate loop count,
1529 as described in the standard.
1531 We translate a do loop from:
1533 DO dovar = from, to, step
1539 [evaluate loop bounds and step]
1540 empty = (step > 0 ? to < from : to > from);
1541 countm1 = (to - from) / step;
1543 if (empty) goto exit_label;
1551 if (countm1t == 0) goto exit_label;
1555 countm1 is an unsigned integer. It is equal to the loop count minus one,
1556 because the loop count itself can overflow. */
1559 gfc_trans_do (gfc_code
* code
, tree exit_cond
)
1563 tree saved_dovar
= NULL
;
1578 gfc_start_block (&block
);
1580 loc
= code
->ext
.iterator
->start
->where
.lb
->location
;
1582 /* Evaluate all the expressions in the iterator. */
1583 gfc_init_se (&se
, NULL
);
1584 gfc_conv_expr_lhs (&se
, code
->ext
.iterator
->var
);
1585 gfc_add_block_to_block (&block
, &se
.pre
);
1587 type
= TREE_TYPE (dovar
);
1589 gfc_init_se (&se
, NULL
);
1590 gfc_conv_expr_val (&se
, code
->ext
.iterator
->start
);
1591 gfc_add_block_to_block (&block
, &se
.pre
);
1592 from
= gfc_evaluate_now (se
.expr
, &block
);
1594 gfc_init_se (&se
, NULL
);
1595 gfc_conv_expr_val (&se
, code
->ext
.iterator
->end
);
1596 gfc_add_block_to_block (&block
, &se
.pre
);
1597 to
= gfc_evaluate_now (se
.expr
, &block
);
1599 gfc_init_se (&se
, NULL
);
1600 gfc_conv_expr_val (&se
, code
->ext
.iterator
->step
);
1601 gfc_add_block_to_block (&block
, &se
.pre
);
1602 step
= gfc_evaluate_now (se
.expr
, &block
);
1604 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1606 tmp
= fold_build2_loc (input_location
, EQ_EXPR
, boolean_type_node
, step
,
1607 build_zero_cst (type
));
1608 gfc_trans_runtime_check (true, false, tmp
, &block
, &code
->loc
,
1609 "DO step value is zero");
1612 /* Special case simple loops. */
1613 if (TREE_CODE (type
) == INTEGER_TYPE
1614 && (integer_onep (step
)
1615 || tree_int_cst_equal (step
, integer_minus_one_node
)))
1616 return gfc_trans_simple_do (code
, &block
, dovar
, from
, to
, step
, exit_cond
);
1619 if (TREE_CODE (type
) == INTEGER_TYPE
)
1620 utype
= unsigned_type_for (type
);
1622 utype
= unsigned_type_for (gfc_array_index_type
);
1623 countm1
= gfc_create_var (utype
, "countm1");
1625 /* Cycle and exit statements are implemented with gotos. */
1626 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1627 exit_label
= gfc_build_label_decl (NULL_TREE
);
1628 TREE_USED (exit_label
) = 1;
1630 /* Put these labels where they can be found later. */
1631 code
->cycle_label
= cycle_label
;
1632 code
->exit_label
= exit_label
;
1634 /* Initialize the DO variable: dovar = from. */
1635 gfc_add_modify (&block
, dovar
, from
);
1637 /* Save value for do-tinkering checking. */
1638 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1640 saved_dovar
= gfc_create_var (type
, ".saved_dovar");
1641 gfc_add_modify_loc (loc
, &block
, saved_dovar
, dovar
);
1644 /* Initialize loop count and jump to exit label if the loop is empty.
1645 This code is executed before we enter the loop body. We generate:
1650 countm1 = (to - from) / step;
1656 countm1 = (from - to) / -step;
1660 if (TREE_CODE (type
) == INTEGER_TYPE
)
1662 tree pos
, neg
, tou
, fromu
, stepu
, tmp2
;
1664 /* The distance from FROM to TO cannot always be represented in a signed
1665 type, thus use unsigned arithmetic, also to avoid any undefined
1667 tou
= fold_convert (utype
, to
);
1668 fromu
= fold_convert (utype
, from
);
1669 stepu
= fold_convert (utype
, step
);
1671 /* For a positive step, when to < from, exit, otherwise compute
1672 countm1 = ((unsigned)to - (unsigned)from) / (unsigned)step */
1673 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, to
, from
);
1674 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1675 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1678 pos
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1679 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1681 fold_build2 (MODIFY_EXPR
, void_type_node
,
1684 /* For a negative step, when to > from, exit, otherwise compute
1685 countm1 = ((unsigned)from - (unsigned)to) / -(unsigned)step */
1686 tmp
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, to
, from
);
1687 tmp2
= fold_build2_loc (loc
, TRUNC_DIV_EXPR
, utype
,
1688 fold_build2_loc (loc
, MINUS_EXPR
, utype
,
1690 fold_build1_loc (loc
, NEGATE_EXPR
, utype
, stepu
));
1691 neg
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1692 fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
,
1694 fold_build2 (MODIFY_EXPR
, void_type_node
,
1697 tmp
= fold_build2_loc (loc
, LT_EXPR
, boolean_type_node
, step
,
1698 build_int_cst (TREE_TYPE (step
), 0));
1699 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
, neg
, pos
);
1701 gfc_add_expr_to_block (&block
, tmp
);
1707 /* TODO: We could use the same width as the real type.
1708 This would probably cause more problems that it solves
1709 when we implement "long double" types. */
1711 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, type
, to
, from
);
1712 tmp
= fold_build2_loc (loc
, RDIV_EXPR
, type
, tmp
, step
);
1713 tmp
= fold_build1_loc (loc
, FIX_TRUNC_EXPR
, utype
, tmp
);
1714 gfc_add_modify (&block
, countm1
, tmp
);
1716 /* We need a special check for empty loops:
1717 empty = (step > 0 ? to < from : to > from); */
1718 pos_step
= fold_build2_loc (loc
, GT_EXPR
, boolean_type_node
, step
,
1719 build_zero_cst (type
));
1720 tmp
= fold_build3_loc (loc
, COND_EXPR
, boolean_type_node
, pos_step
,
1721 fold_build2_loc (loc
, LT_EXPR
,
1722 boolean_type_node
, to
, from
),
1723 fold_build2_loc (loc
, GT_EXPR
,
1724 boolean_type_node
, to
, from
));
1725 /* If the loop is empty, go directly to the exit label. */
1726 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
, tmp
,
1727 build1_v (GOTO_EXPR
, exit_label
),
1728 build_empty_stmt (input_location
));
1729 gfc_add_expr_to_block (&block
, tmp
);
1733 gfc_start_block (&body
);
1735 /* Main loop body. */
1736 tmp
= gfc_trans_code_cond (code
->block
->next
, exit_cond
);
1737 gfc_add_expr_to_block (&body
, tmp
);
1739 /* Label for cycle statements (if needed). */
1740 if (TREE_USED (cycle_label
))
1742 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1743 gfc_add_expr_to_block (&body
, tmp
);
1746 /* Check whether someone has modified the loop variable. */
1747 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1749 tmp
= fold_build2_loc (loc
, NE_EXPR
, boolean_type_node
, dovar
,
1751 gfc_trans_runtime_check (true, false, tmp
, &body
, &code
->loc
,
1752 "Loop variable has been modified");
1755 /* Exit the loop if there is an I/O result condition or error. */
1758 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1759 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1761 build_empty_stmt (input_location
));
1762 gfc_add_expr_to_block (&body
, tmp
);
1765 /* Increment the loop variable. */
1766 tmp
= fold_build2_loc (loc
, PLUS_EXPR
, type
, dovar
, step
);
1767 gfc_add_modify_loc (loc
, &body
, dovar
, tmp
);
1769 if (gfc_option
.rtcheck
& GFC_RTCHECK_DO
)
1770 gfc_add_modify_loc (loc
, &body
, saved_dovar
, dovar
);
1772 /* Initialize countm1t. */
1773 tree countm1t
= gfc_create_var (utype
, "countm1t");
1774 gfc_add_modify_loc (loc
, &body
, countm1t
, countm1
);
1776 /* Decrement the loop count. */
1777 tmp
= fold_build2_loc (loc
, MINUS_EXPR
, utype
, countm1
,
1778 build_int_cst (utype
, 1));
1779 gfc_add_modify_loc (loc
, &body
, countm1
, tmp
);
1781 /* End with the loop condition. Loop until countm1t == 0. */
1782 cond
= fold_build2_loc (loc
, EQ_EXPR
, boolean_type_node
, countm1t
,
1783 build_int_cst (utype
, 0));
1784 tmp
= fold_build1_loc (loc
, GOTO_EXPR
, void_type_node
, exit_label
);
1785 tmp
= fold_build3_loc (loc
, COND_EXPR
, void_type_node
,
1786 cond
, tmp
, build_empty_stmt (loc
));
1787 gfc_add_expr_to_block (&body
, tmp
);
1789 /* End of loop body. */
1790 tmp
= gfc_finish_block (&body
);
1792 /* The for loop itself. */
1793 tmp
= fold_build1_loc (loc
, LOOP_EXPR
, void_type_node
, tmp
);
1794 gfc_add_expr_to_block (&block
, tmp
);
1796 /* Add the exit label. */
1797 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1798 gfc_add_expr_to_block (&block
, tmp
);
1800 return gfc_finish_block (&block
);
1804 /* Translate the DO WHILE construct.
1817 if (! cond) goto exit_label;
1823 Because the evaluation of the exit condition `cond' may have side
1824 effects, we can't do much for empty loop bodies. The backend optimizers
1825 should be smart enough to eliminate any dead loops. */
1828 gfc_trans_do_while (gfc_code
* code
)
1836 /* Everything we build here is part of the loop body. */
1837 gfc_start_block (&block
);
1839 /* Cycle and exit statements are implemented with gotos. */
1840 cycle_label
= gfc_build_label_decl (NULL_TREE
);
1841 exit_label
= gfc_build_label_decl (NULL_TREE
);
1843 /* Put the labels where they can be found later. See gfc_trans_do(). */
1844 code
->cycle_label
= cycle_label
;
1845 code
->exit_label
= exit_label
;
1847 /* Create a GIMPLE version of the exit condition. */
1848 gfc_init_se (&cond
, NULL
);
1849 gfc_conv_expr_val (&cond
, code
->expr1
);
1850 gfc_add_block_to_block (&block
, &cond
.pre
);
1851 cond
.expr
= fold_build1_loc (code
->expr1
->where
.lb
->location
,
1852 TRUTH_NOT_EXPR
, TREE_TYPE (cond
.expr
), cond
.expr
);
1854 /* Build "IF (! cond) GOTO exit_label". */
1855 tmp
= build1_v (GOTO_EXPR
, exit_label
);
1856 TREE_USED (exit_label
) = 1;
1857 tmp
= fold_build3_loc (code
->expr1
->where
.lb
->location
, COND_EXPR
,
1858 void_type_node
, cond
.expr
, tmp
,
1859 build_empty_stmt (code
->expr1
->where
.lb
->location
));
1860 gfc_add_expr_to_block (&block
, tmp
);
1862 /* The main body of the loop. */
1863 tmp
= gfc_trans_code (code
->block
->next
);
1864 gfc_add_expr_to_block (&block
, tmp
);
1866 /* Label for cycle statements (if needed). */
1867 if (TREE_USED (cycle_label
))
1869 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
1870 gfc_add_expr_to_block (&block
, tmp
);
1873 /* End of loop body. */
1874 tmp
= gfc_finish_block (&block
);
1876 gfc_init_block (&block
);
1877 /* Build the loop. */
1878 tmp
= fold_build1_loc (code
->expr1
->where
.lb
->location
, LOOP_EXPR
,
1879 void_type_node
, tmp
);
1880 gfc_add_expr_to_block (&block
, tmp
);
1882 /* Add the exit label. */
1883 tmp
= build1_v (LABEL_EXPR
, exit_label
);
1884 gfc_add_expr_to_block (&block
, tmp
);
1886 return gfc_finish_block (&block
);
1890 /* Translate the SELECT CASE construct for INTEGER case expressions,
1891 without killing all potential optimizations. The problem is that
1892 Fortran allows unbounded cases, but the back-end does not, so we
1893 need to intercept those before we enter the equivalent SWITCH_EXPR
1896 For example, we translate this,
1899 CASE (:100,101,105:115)
1909 to the GENERIC equivalent,
1913 case (minimum value for typeof(expr) ... 100:
1919 case 200 ... (maximum value for typeof(expr):
1936 gfc_trans_integer_select (gfc_code
* code
)
1946 gfc_start_block (&block
);
1948 /* Calculate the switch expression. */
1949 gfc_init_se (&se
, NULL
);
1950 gfc_conv_expr_val (&se
, code
->expr1
);
1951 gfc_add_block_to_block (&block
, &se
.pre
);
1953 end_label
= gfc_build_label_decl (NULL_TREE
);
1955 gfc_init_block (&body
);
1957 for (c
= code
->block
; c
; c
= c
->block
)
1959 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
1964 /* Assume it's the default case. */
1965 low
= high
= NULL_TREE
;
1969 low
= gfc_conv_mpz_to_tree (cp
->low
->value
.integer
,
1972 /* If there's only a lower bound, set the high bound to the
1973 maximum value of the case expression. */
1975 high
= TYPE_MAX_VALUE (TREE_TYPE (se
.expr
));
1980 /* Three cases are possible here:
1982 1) There is no lower bound, e.g. CASE (:N).
1983 2) There is a lower bound .NE. high bound, that is
1984 a case range, e.g. CASE (N:M) where M>N (we make
1985 sure that M>N during type resolution).
1986 3) There is a lower bound, and it has the same value
1987 as the high bound, e.g. CASE (N:N). This is our
1988 internal representation of CASE(N).
1990 In the first and second case, we need to set a value for
1991 high. In the third case, we don't because the GCC middle
1992 end represents a single case value by just letting high be
1993 a NULL_TREE. We can't do that because we need to be able
1994 to represent unbounded cases. */
1998 && mpz_cmp (cp
->low
->value
.integer
,
1999 cp
->high
->value
.integer
) != 0))
2000 high
= gfc_conv_mpz_to_tree (cp
->high
->value
.integer
,
2003 /* Unbounded case. */
2005 low
= TYPE_MIN_VALUE (TREE_TYPE (se
.expr
));
2008 /* Build a label. */
2009 label
= gfc_build_label_decl (NULL_TREE
);
2011 /* Add this case label.
2012 Add parameter 'label', make it match GCC backend. */
2013 tmp
= build_case_label (low
, high
, label
);
2014 gfc_add_expr_to_block (&body
, tmp
);
2017 /* Add the statements for this case. */
2018 tmp
= gfc_trans_code (c
->next
);
2019 gfc_add_expr_to_block (&body
, tmp
);
2021 /* Break to the end of the construct. */
2022 tmp
= build1_v (GOTO_EXPR
, end_label
);
2023 gfc_add_expr_to_block (&body
, tmp
);
2026 tmp
= gfc_finish_block (&body
);
2027 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2028 se
.expr
, tmp
, NULL_TREE
);
2029 gfc_add_expr_to_block (&block
, tmp
);
2031 tmp
= build1_v (LABEL_EXPR
, end_label
);
2032 gfc_add_expr_to_block (&block
, tmp
);
2034 return gfc_finish_block (&block
);
2038 /* Translate the SELECT CASE construct for LOGICAL case expressions.
2040 There are only two cases possible here, even though the standard
2041 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
2042 .FALSE., and DEFAULT.
2044 We never generate more than two blocks here. Instead, we always
2045 try to eliminate the DEFAULT case. This way, we can translate this
2046 kind of SELECT construct to a simple
2050 expression in GENERIC. */
2053 gfc_trans_logical_select (gfc_code
* code
)
2056 gfc_code
*t
, *f
, *d
;
2061 /* Assume we don't have any cases at all. */
2064 /* Now see which ones we actually do have. We can have at most two
2065 cases in a single case list: one for .TRUE. and one for .FALSE.
2066 The default case is always separate. If the cases for .TRUE. and
2067 .FALSE. are in the same case list, the block for that case list
2068 always executed, and we don't generate code a COND_EXPR. */
2069 for (c
= code
->block
; c
; c
= c
->block
)
2071 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2075 if (cp
->low
->value
.logical
== 0) /* .FALSE. */
2077 else /* if (cp->value.logical != 0), thus .TRUE. */
2085 /* Start a new block. */
2086 gfc_start_block (&block
);
2088 /* Calculate the switch expression. We always need to do this
2089 because it may have side effects. */
2090 gfc_init_se (&se
, NULL
);
2091 gfc_conv_expr_val (&se
, code
->expr1
);
2092 gfc_add_block_to_block (&block
, &se
.pre
);
2094 if (t
== f
&& t
!= NULL
)
2096 /* Cases for .TRUE. and .FALSE. are in the same block. Just
2097 translate the code for these cases, append it to the current
2099 gfc_add_expr_to_block (&block
, gfc_trans_code (t
->next
));
2103 tree true_tree
, false_tree
, stmt
;
2105 true_tree
= build_empty_stmt (input_location
);
2106 false_tree
= build_empty_stmt (input_location
);
2108 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
2109 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
2110 make the missing case the default case. */
2111 if (t
!= NULL
&& f
!= NULL
)
2121 /* Translate the code for each of these blocks, and append it to
2122 the current block. */
2124 true_tree
= gfc_trans_code (t
->next
);
2127 false_tree
= gfc_trans_code (f
->next
);
2129 stmt
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2130 se
.expr
, true_tree
, false_tree
);
2131 gfc_add_expr_to_block (&block
, stmt
);
2134 return gfc_finish_block (&block
);
2138 /* The jump table types are stored in static variables to avoid
2139 constructing them from scratch every single time. */
2140 static GTY(()) tree select_struct
[2];
2142 /* Translate the SELECT CASE construct for CHARACTER case expressions.
2143 Instead of generating compares and jumps, it is far simpler to
2144 generate a data structure describing the cases in order and call a
2145 library subroutine that locates the right case.
2146 This is particularly true because this is the only case where we
2147 might have to dispose of a temporary.
2148 The library subroutine returns a pointer to jump to or NULL if no
2149 branches are to be taken. */
2152 gfc_trans_character_select (gfc_code
*code
)
2154 tree init
, end_label
, tmp
, type
, case_num
, label
, fndecl
;
2155 stmtblock_t block
, body
;
2160 vec
<constructor_elt
, va_gc
> *inits
= NULL
;
2162 tree pchartype
= gfc_get_pchar_type (code
->expr1
->ts
.kind
);
2164 /* The jump table types are stored in static variables to avoid
2165 constructing them from scratch every single time. */
2166 static tree ss_string1
[2], ss_string1_len
[2];
2167 static tree ss_string2
[2], ss_string2_len
[2];
2168 static tree ss_target
[2];
2170 cp
= code
->block
->ext
.block
.case_list
;
2171 while (cp
->left
!= NULL
)
2174 /* Generate the body */
2175 gfc_start_block (&block
);
2176 gfc_init_se (&expr1se
, NULL
);
2177 gfc_conv_expr_reference (&expr1se
, code
->expr1
);
2179 gfc_add_block_to_block (&block
, &expr1se
.pre
);
2181 end_label
= gfc_build_label_decl (NULL_TREE
);
2183 gfc_init_block (&body
);
2185 /* Attempt to optimize length 1 selects. */
2186 if (integer_onep (expr1se
.string_length
))
2188 for (d
= cp
; d
; d
= d
->right
)
2193 gcc_assert (d
->low
->expr_type
== EXPR_CONSTANT
2194 && d
->low
->ts
.type
== BT_CHARACTER
);
2195 if (d
->low
->value
.character
.length
> 1)
2197 for (i
= 1; i
< d
->low
->value
.character
.length
; i
++)
2198 if (d
->low
->value
.character
.string
[i
] != ' ')
2200 if (i
!= d
->low
->value
.character
.length
)
2202 if (optimize
&& d
->high
&& i
== 1)
2204 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2205 && d
->high
->ts
.type
== BT_CHARACTER
);
2206 if (d
->high
->value
.character
.length
> 1
2207 && (d
->low
->value
.character
.string
[0]
2208 == d
->high
->value
.character
.string
[0])
2209 && d
->high
->value
.character
.string
[1] != ' '
2210 && ((d
->low
->value
.character
.string
[1] < ' ')
2211 == (d
->high
->value
.character
.string
[1]
2221 gcc_assert (d
->high
->expr_type
== EXPR_CONSTANT
2222 && d
->high
->ts
.type
== BT_CHARACTER
);
2223 if (d
->high
->value
.character
.length
> 1)
2225 for (i
= 1; i
< d
->high
->value
.character
.length
; i
++)
2226 if (d
->high
->value
.character
.string
[i
] != ' ')
2228 if (i
!= d
->high
->value
.character
.length
)
2235 tree ctype
= gfc_get_char_type (code
->expr1
->ts
.kind
);
2237 for (c
= code
->block
; c
; c
= c
->block
)
2239 for (cp
= c
->ext
.block
.case_list
; cp
; cp
= cp
->next
)
2245 /* Assume it's the default case. */
2246 low
= high
= NULL_TREE
;
2250 /* CASE ('ab') or CASE ('ab':'az') will never match
2251 any length 1 character. */
2252 if (cp
->low
->value
.character
.length
> 1
2253 && cp
->low
->value
.character
.string
[1] != ' ')
2256 if (cp
->low
->value
.character
.length
> 0)
2257 r
= cp
->low
->value
.character
.string
[0];
2260 low
= build_int_cst (ctype
, r
);
2262 /* If there's only a lower bound, set the high bound
2263 to the maximum value of the case expression. */
2265 high
= TYPE_MAX_VALUE (ctype
);
2271 || (cp
->low
->value
.character
.string
[0]
2272 != cp
->high
->value
.character
.string
[0]))
2274 if (cp
->high
->value
.character
.length
> 0)
2275 r
= cp
->high
->value
.character
.string
[0];
2278 high
= build_int_cst (ctype
, r
);
2281 /* Unbounded case. */
2283 low
= TYPE_MIN_VALUE (ctype
);
2286 /* Build a label. */
2287 label
= gfc_build_label_decl (NULL_TREE
);
2289 /* Add this case label.
2290 Add parameter 'label', make it match GCC backend. */
2291 tmp
= build_case_label (low
, high
, label
);
2292 gfc_add_expr_to_block (&body
, tmp
);
2295 /* Add the statements for this case. */
2296 tmp
= gfc_trans_code (c
->next
);
2297 gfc_add_expr_to_block (&body
, tmp
);
2299 /* Break to the end of the construct. */
2300 tmp
= build1_v (GOTO_EXPR
, end_label
);
2301 gfc_add_expr_to_block (&body
, tmp
);
2304 tmp
= gfc_string_to_single_character (expr1se
.string_length
,
2306 code
->expr1
->ts
.kind
);
2307 case_num
= gfc_create_var (ctype
, "case_num");
2308 gfc_add_modify (&block
, case_num
, tmp
);
2310 gfc_add_block_to_block (&block
, &expr1se
.post
);
2312 tmp
= gfc_finish_block (&body
);
2313 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2314 case_num
, tmp
, NULL_TREE
);
2315 gfc_add_expr_to_block (&block
, tmp
);
2317 tmp
= build1_v (LABEL_EXPR
, end_label
);
2318 gfc_add_expr_to_block (&block
, tmp
);
2320 return gfc_finish_block (&block
);
2324 if (code
->expr1
->ts
.kind
== 1)
2326 else if (code
->expr1
->ts
.kind
== 4)
2331 if (select_struct
[k
] == NULL
)
2334 select_struct
[k
] = make_node (RECORD_TYPE
);
2336 if (code
->expr1
->ts
.kind
== 1)
2337 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char1");
2338 else if (code
->expr1
->ts
.kind
== 4)
2339 TYPE_NAME (select_struct
[k
]) = get_identifier ("_jump_struct_char4");
2344 #define ADD_FIELD(NAME, TYPE) \
2345 ss_##NAME[k] = gfc_add_field_to_struct (select_struct[k], \
2346 get_identifier (stringize(NAME)), \
2350 ADD_FIELD (string1
, pchartype
);
2351 ADD_FIELD (string1_len
, gfc_charlen_type_node
);
2353 ADD_FIELD (string2
, pchartype
);
2354 ADD_FIELD (string2_len
, gfc_charlen_type_node
);
2356 ADD_FIELD (target
, integer_type_node
);
2359 gfc_finish_type (select_struct
[k
]);
2363 for (d
= cp
; d
; d
= d
->right
)
2366 for (c
= code
->block
; c
; c
= c
->block
)
2368 for (d
= c
->ext
.block
.case_list
; d
; d
= d
->next
)
2370 label
= gfc_build_label_decl (NULL_TREE
);
2371 tmp
= build_case_label ((d
->low
== NULL
&& d
->high
== NULL
)
2373 : build_int_cst (integer_type_node
, d
->n
),
2375 gfc_add_expr_to_block (&body
, tmp
);
2378 tmp
= gfc_trans_code (c
->next
);
2379 gfc_add_expr_to_block (&body
, tmp
);
2381 tmp
= build1_v (GOTO_EXPR
, end_label
);
2382 gfc_add_expr_to_block (&body
, tmp
);
2385 /* Generate the structure describing the branches */
2386 for (d
= cp
; d
; d
= d
->right
)
2388 vec
<constructor_elt
, va_gc
> *node
= NULL
;
2390 gfc_init_se (&se
, NULL
);
2394 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], null_pointer_node
);
2395 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], integer_zero_node
);
2399 gfc_conv_expr_reference (&se
, d
->low
);
2401 CONSTRUCTOR_APPEND_ELT (node
, ss_string1
[k
], se
.expr
);
2402 CONSTRUCTOR_APPEND_ELT (node
, ss_string1_len
[k
], se
.string_length
);
2405 if (d
->high
== NULL
)
2407 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], null_pointer_node
);
2408 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], integer_zero_node
);
2412 gfc_init_se (&se
, NULL
);
2413 gfc_conv_expr_reference (&se
, d
->high
);
2415 CONSTRUCTOR_APPEND_ELT (node
, ss_string2
[k
], se
.expr
);
2416 CONSTRUCTOR_APPEND_ELT (node
, ss_string2_len
[k
], se
.string_length
);
2419 CONSTRUCTOR_APPEND_ELT (node
, ss_target
[k
],
2420 build_int_cst (integer_type_node
, d
->n
));
2422 tmp
= build_constructor (select_struct
[k
], node
);
2423 CONSTRUCTOR_APPEND_ELT (inits
, NULL_TREE
, tmp
);
2426 type
= build_array_type (select_struct
[k
],
2427 build_index_type (size_int (n
-1)));
2429 init
= build_constructor (type
, inits
);
2430 TREE_CONSTANT (init
) = 1;
2431 TREE_STATIC (init
) = 1;
2432 /* Create a static variable to hold the jump table. */
2433 tmp
= gfc_create_var (type
, "jumptable");
2434 TREE_CONSTANT (tmp
) = 1;
2435 TREE_STATIC (tmp
) = 1;
2436 TREE_READONLY (tmp
) = 1;
2437 DECL_INITIAL (tmp
) = init
;
2440 /* Build the library call */
2441 init
= gfc_build_addr_expr (pvoid_type_node
, init
);
2443 if (code
->expr1
->ts
.kind
== 1)
2444 fndecl
= gfor_fndecl_select_string
;
2445 else if (code
->expr1
->ts
.kind
== 4)
2446 fndecl
= gfor_fndecl_select_string_char4
;
2450 tmp
= build_call_expr_loc (input_location
,
2452 build_int_cst (gfc_charlen_type_node
, n
),
2453 expr1se
.expr
, expr1se
.string_length
);
2454 case_num
= gfc_create_var (integer_type_node
, "case_num");
2455 gfc_add_modify (&block
, case_num
, tmp
);
2457 gfc_add_block_to_block (&block
, &expr1se
.post
);
2459 tmp
= gfc_finish_block (&body
);
2460 tmp
= fold_build3_loc (input_location
, SWITCH_EXPR
, NULL_TREE
,
2461 case_num
, tmp
, NULL_TREE
);
2462 gfc_add_expr_to_block (&block
, tmp
);
2464 tmp
= build1_v (LABEL_EXPR
, end_label
);
2465 gfc_add_expr_to_block (&block
, tmp
);
2467 return gfc_finish_block (&block
);
2471 /* Translate the three variants of the SELECT CASE construct.
2473 SELECT CASEs with INTEGER case expressions can be translated to an
2474 equivalent GENERIC switch statement, and for LOGICAL case
2475 expressions we build one or two if-else compares.
2477 SELECT CASEs with CHARACTER case expressions are a whole different
2478 story, because they don't exist in GENERIC. So we sort them and
2479 do a binary search at runtime.
2481 Fortran has no BREAK statement, and it does not allow jumps from
2482 one case block to another. That makes things a lot easier for
2486 gfc_trans_select (gfc_code
* code
)
2492 gcc_assert (code
&& code
->expr1
);
2493 gfc_init_block (&block
);
2495 /* Build the exit label and hang it in. */
2496 exit_label
= gfc_build_label_decl (NULL_TREE
);
2497 code
->exit_label
= exit_label
;
2499 /* Empty SELECT constructs are legal. */
2500 if (code
->block
== NULL
)
2501 body
= build_empty_stmt (input_location
);
2503 /* Select the correct translation function. */
2505 switch (code
->expr1
->ts
.type
)
2508 body
= gfc_trans_logical_select (code
);
2512 body
= gfc_trans_integer_select (code
);
2516 body
= gfc_trans_character_select (code
);
2520 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
2524 /* Build everything together. */
2525 gfc_add_expr_to_block (&block
, body
);
2526 gfc_add_expr_to_block (&block
, build1_v (LABEL_EXPR
, exit_label
));
2528 return gfc_finish_block (&block
);
2532 /* Traversal function to substitute a replacement symtree if the symbol
2533 in the expression is the same as that passed. f == 2 signals that
2534 that variable itself is not to be checked - only the references.
2535 This group of functions is used when the variable expression in a
2536 FORALL assignment has internal references. For example:
2537 FORALL (i = 1:4) p(p(i)) = i
2538 The only recourse here is to store a copy of 'p' for the index
2541 static gfc_symtree
*new_symtree
;
2542 static gfc_symtree
*old_symtree
;
2545 forall_replace (gfc_expr
*expr
, gfc_symbol
*sym
, int *f
)
2547 if (expr
->expr_type
!= EXPR_VARIABLE
)
2552 else if (expr
->symtree
->n
.sym
== sym
)
2553 expr
->symtree
= new_symtree
;
2559 forall_replace_symtree (gfc_expr
*e
, gfc_symbol
*sym
, int f
)
2561 gfc_traverse_expr (e
, sym
, forall_replace
, f
);
2565 forall_restore (gfc_expr
*expr
,
2566 gfc_symbol
*sym ATTRIBUTE_UNUSED
,
2567 int *f ATTRIBUTE_UNUSED
)
2569 if (expr
->expr_type
!= EXPR_VARIABLE
)
2572 if (expr
->symtree
== new_symtree
)
2573 expr
->symtree
= old_symtree
;
2579 forall_restore_symtree (gfc_expr
*e
)
2581 gfc_traverse_expr (e
, NULL
, forall_restore
, 0);
2585 forall_make_variable_temp (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2590 gfc_symbol
*new_sym
;
2591 gfc_symbol
*old_sym
;
2595 /* Build a copy of the lvalue. */
2596 old_symtree
= c
->expr1
->symtree
;
2597 old_sym
= old_symtree
->n
.sym
;
2598 e
= gfc_lval_expr_from_sym (old_sym
);
2599 if (old_sym
->attr
.dimension
)
2601 gfc_init_se (&tse
, NULL
);
2602 gfc_conv_subref_array_arg (&tse
, e
, 0, INTENT_IN
, false);
2603 gfc_add_block_to_block (pre
, &tse
.pre
);
2604 gfc_add_block_to_block (post
, &tse
.post
);
2605 tse
.expr
= build_fold_indirect_ref_loc (input_location
, tse
.expr
);
2607 if (e
->ts
.type
!= BT_CHARACTER
)
2609 /* Use the variable offset for the temporary. */
2610 tmp
= gfc_conv_array_offset (old_sym
->backend_decl
);
2611 gfc_conv_descriptor_offset_set (pre
, tse
.expr
, tmp
);
2616 gfc_init_se (&tse
, NULL
);
2617 gfc_init_se (&rse
, NULL
);
2618 gfc_conv_expr (&rse
, e
);
2619 if (e
->ts
.type
== BT_CHARACTER
)
2621 tse
.string_length
= rse
.string_length
;
2622 tmp
= gfc_get_character_type_len (gfc_default_character_kind
,
2624 tse
.expr
= gfc_conv_string_tmp (&tse
, build_pointer_type (tmp
),
2626 gfc_add_block_to_block (pre
, &tse
.pre
);
2627 gfc_add_block_to_block (post
, &tse
.post
);
2631 tmp
= gfc_typenode_for_spec (&e
->ts
);
2632 tse
.expr
= gfc_create_var (tmp
, "temp");
2635 tmp
= gfc_trans_scalar_assign (&tse
, &rse
, e
->ts
, true,
2636 e
->expr_type
== EXPR_VARIABLE
, true);
2637 gfc_add_expr_to_block (pre
, tmp
);
2641 /* Create a new symbol to represent the lvalue. */
2642 new_sym
= gfc_new_symbol (old_sym
->name
, NULL
);
2643 new_sym
->ts
= old_sym
->ts
;
2644 new_sym
->attr
.referenced
= 1;
2645 new_sym
->attr
.temporary
= 1;
2646 new_sym
->attr
.dimension
= old_sym
->attr
.dimension
;
2647 new_sym
->attr
.flavor
= old_sym
->attr
.flavor
;
2649 /* Use the temporary as the backend_decl. */
2650 new_sym
->backend_decl
= tse
.expr
;
2652 /* Create a fake symtree for it. */
2654 new_symtree
= gfc_new_symtree (&root
, old_sym
->name
);
2655 new_symtree
->n
.sym
= new_sym
;
2656 gcc_assert (new_symtree
== root
);
2658 /* Go through the expression reference replacing the old_symtree
2660 forall_replace_symtree (c
->expr1
, old_sym
, 2);
2662 /* Now we have made this temporary, we might as well use it for
2663 the right hand side. */
2664 forall_replace_symtree (c
->expr2
, old_sym
, 1);
2668 /* Handles dependencies in forall assignments. */
2670 check_forall_dependencies (gfc_code
*c
, stmtblock_t
*pre
, stmtblock_t
*post
)
2677 lsym
= c
->expr1
->symtree
->n
.sym
;
2678 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
2680 /* Now check for dependencies within the 'variable'
2681 expression itself. These are treated by making a complete
2682 copy of variable and changing all the references to it
2683 point to the copy instead. Note that the shallow copy of
2684 the variable will not suffice for derived types with
2685 pointer components. We therefore leave these to their
2687 if (lsym
->ts
.type
== BT_DERIVED
2688 && lsym
->ts
.u
.derived
->attr
.pointer_comp
)
2692 if (find_forall_index (c
->expr1
, lsym
, 2))
2694 forall_make_variable_temp (c
, pre
, post
);
2698 /* Substrings with dependencies are treated in the same
2700 if (c
->expr1
->ts
.type
== BT_CHARACTER
2702 && c
->expr2
->expr_type
== EXPR_VARIABLE
2703 && lsym
== c
->expr2
->symtree
->n
.sym
)
2705 for (lref
= c
->expr1
->ref
; lref
; lref
= lref
->next
)
2706 if (lref
->type
== REF_SUBSTRING
)
2708 for (rref
= c
->expr2
->ref
; rref
; rref
= rref
->next
)
2709 if (rref
->type
== REF_SUBSTRING
)
2713 && gfc_dep_compare_expr (rref
->u
.ss
.start
, lref
->u
.ss
.start
) < 0)
2715 forall_make_variable_temp (c
, pre
, post
);
2724 cleanup_forall_symtrees (gfc_code
*c
)
2726 forall_restore_symtree (c
->expr1
);
2727 forall_restore_symtree (c
->expr2
);
2728 free (new_symtree
->n
.sym
);
2733 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
2734 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
2735 indicates whether we should generate code to test the FORALLs mask
2736 array. OUTER is the loop header to be used for initializing mask
2739 The generated loop format is:
2740 count = (end - start + step) / step
2753 gfc_trans_forall_loop (forall_info
*forall_tmp
, tree body
,
2754 int mask_flag
, stmtblock_t
*outer
)
2762 tree var
, start
, end
, step
;
2765 /* Initialize the mask index outside the FORALL nest. */
2766 if (mask_flag
&& forall_tmp
->mask
)
2767 gfc_add_modify (outer
, forall_tmp
->maskindex
, gfc_index_zero_node
);
2769 iter
= forall_tmp
->this_loop
;
2770 nvar
= forall_tmp
->nvar
;
2771 for (n
= 0; n
< nvar
; n
++)
2774 start
= iter
->start
;
2778 exit_label
= gfc_build_label_decl (NULL_TREE
);
2779 TREE_USED (exit_label
) = 1;
2781 /* The loop counter. */
2782 count
= gfc_create_var (TREE_TYPE (var
), "count");
2784 /* The body of the loop. */
2785 gfc_init_block (&block
);
2787 /* The exit condition. */
2788 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
,
2789 count
, build_int_cst (TREE_TYPE (count
), 0));
2790 if (forall_tmp
->do_concurrent
)
2791 cond
= build2 (ANNOTATE_EXPR
, TREE_TYPE (cond
), cond
,
2792 build_int_cst (integer_type_node
,
2793 annot_expr_ivdep_kind
));
2795 tmp
= build1_v (GOTO_EXPR
, exit_label
);
2796 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
2797 cond
, tmp
, build_empty_stmt (input_location
));
2798 gfc_add_expr_to_block (&block
, tmp
);
2800 /* The main loop body. */
2801 gfc_add_expr_to_block (&block
, body
);
2803 /* Increment the loop variable. */
2804 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), var
,
2806 gfc_add_modify (&block
, var
, tmp
);
2808 /* Advance to the next mask element. Only do this for the
2810 if (n
== 0 && mask_flag
&& forall_tmp
->mask
)
2812 tree maskindex
= forall_tmp
->maskindex
;
2813 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
2814 maskindex
, gfc_index_one_node
);
2815 gfc_add_modify (&block
, maskindex
, tmp
);
2818 /* Decrement the loop counter. */
2819 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), count
,
2820 build_int_cst (TREE_TYPE (var
), 1));
2821 gfc_add_modify (&block
, count
, tmp
);
2823 body
= gfc_finish_block (&block
);
2825 /* Loop var initialization. */
2826 gfc_init_block (&block
);
2827 gfc_add_modify (&block
, var
, start
);
2830 /* Initialize the loop counter. */
2831 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (var
), step
,
2833 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (var
), end
,
2835 tmp
= fold_build2_loc (input_location
, TRUNC_DIV_EXPR
, TREE_TYPE (var
),
2837 gfc_add_modify (&block
, count
, tmp
);
2839 /* The loop expression. */
2840 tmp
= build1_v (LOOP_EXPR
, body
);
2841 gfc_add_expr_to_block (&block
, tmp
);
2843 /* The exit label. */
2844 tmp
= build1_v (LABEL_EXPR
, exit_label
);
2845 gfc_add_expr_to_block (&block
, tmp
);
2847 body
= gfc_finish_block (&block
);
2854 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
2855 is nonzero, the body is controlled by all masks in the forall nest.
2856 Otherwise, the innermost loop is not controlled by it's mask. This
2857 is used for initializing that mask. */
2860 gfc_trans_nested_forall_loop (forall_info
* nested_forall_info
, tree body
,
2865 forall_info
*forall_tmp
;
2866 tree mask
, maskindex
;
2868 gfc_start_block (&header
);
2870 forall_tmp
= nested_forall_info
;
2871 while (forall_tmp
!= NULL
)
2873 /* Generate body with masks' control. */
2876 mask
= forall_tmp
->mask
;
2877 maskindex
= forall_tmp
->maskindex
;
2879 /* If a mask was specified make the assignment conditional. */
2882 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
2883 body
= build3_v (COND_EXPR
, tmp
, body
,
2884 build_empty_stmt (input_location
));
2887 body
= gfc_trans_forall_loop (forall_tmp
, body
, mask_flag
, &header
);
2888 forall_tmp
= forall_tmp
->prev_nest
;
2892 gfc_add_expr_to_block (&header
, body
);
2893 return gfc_finish_block (&header
);
2897 /* Allocate data for holding a temporary array. Returns either a local
2898 temporary array or a pointer variable. */
2901 gfc_do_allocate (tree bytesize
, tree size
, tree
* pdata
, stmtblock_t
* pblock
,
2908 if (INTEGER_CST_P (size
))
2909 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, gfc_array_index_type
,
2910 size
, gfc_index_one_node
);
2914 type
= build_range_type (gfc_array_index_type
, gfc_index_zero_node
, tmp
);
2915 type
= build_array_type (elem_type
, type
);
2916 if (gfc_can_put_var_on_stack (bytesize
))
2918 gcc_assert (INTEGER_CST_P (size
));
2919 tmpvar
= gfc_create_var (type
, "temp");
2924 tmpvar
= gfc_create_var (build_pointer_type (type
), "temp");
2925 *pdata
= convert (pvoid_type_node
, tmpvar
);
2927 tmp
= gfc_call_malloc (pblock
, TREE_TYPE (tmpvar
), bytesize
);
2928 gfc_add_modify (pblock
, tmpvar
, tmp
);
2934 /* Generate codes to copy the temporary to the actual lhs. */
2937 generate_loop_for_temp_to_lhs (gfc_expr
*expr
, tree tmp1
, tree count3
,
2938 tree count1
, tree wheremask
, bool invert
)
2942 stmtblock_t block
, body
;
2948 lss
= gfc_walk_expr (expr
);
2950 if (lss
== gfc_ss_terminator
)
2952 gfc_start_block (&block
);
2954 gfc_init_se (&lse
, NULL
);
2956 /* Translate the expression. */
2957 gfc_conv_expr (&lse
, expr
);
2959 /* Form the expression for the temporary. */
2960 tmp
= gfc_build_array_ref (tmp1
, count1
, NULL
);
2962 /* Use the scalar assignment as is. */
2963 gfc_add_block_to_block (&block
, &lse
.pre
);
2964 gfc_add_modify (&block
, lse
.expr
, tmp
);
2965 gfc_add_block_to_block (&block
, &lse
.post
);
2967 /* Increment the count1. */
2968 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
2969 count1
, gfc_index_one_node
);
2970 gfc_add_modify (&block
, count1
, tmp
);
2972 tmp
= gfc_finish_block (&block
);
2976 gfc_start_block (&block
);
2978 gfc_init_loopinfo (&loop1
);
2979 gfc_init_se (&rse
, NULL
);
2980 gfc_init_se (&lse
, NULL
);
2982 /* Associate the lss with the loop. */
2983 gfc_add_ss_to_loop (&loop1
, lss
);
2985 /* Calculate the bounds of the scalarization. */
2986 gfc_conv_ss_startstride (&loop1
);
2987 /* Setup the scalarizing loops. */
2988 gfc_conv_loop_setup (&loop1
, &expr
->where
);
2990 gfc_mark_ss_chain_used (lss
, 1);
2992 /* Start the scalarized loop body. */
2993 gfc_start_scalarized_body (&loop1
, &body
);
2995 /* Setup the gfc_se structures. */
2996 gfc_copy_loopinfo_to_se (&lse
, &loop1
);
2999 /* Form the expression of the temporary. */
3000 if (lss
!= gfc_ss_terminator
)
3001 rse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3002 /* Translate expr. */
3003 gfc_conv_expr (&lse
, expr
);
3005 /* Use the scalar assignment. */
3006 rse
.string_length
= lse
.string_length
;
3007 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr
->ts
, false, true, true);
3009 /* Form the mask expression according to the mask tree list. */
3012 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3014 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3015 TREE_TYPE (wheremaskexpr
),
3017 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3019 build_empty_stmt (input_location
));
3022 gfc_add_expr_to_block (&body
, tmp
);
3024 /* Increment count1. */
3025 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3026 count1
, gfc_index_one_node
);
3027 gfc_add_modify (&body
, count1
, tmp
);
3029 /* Increment count3. */
3032 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3033 gfc_array_index_type
, count3
,
3034 gfc_index_one_node
);
3035 gfc_add_modify (&body
, count3
, tmp
);
3038 /* Generate the copying loops. */
3039 gfc_trans_scalarizing_loops (&loop1
, &body
);
3040 gfc_add_block_to_block (&block
, &loop1
.pre
);
3041 gfc_add_block_to_block (&block
, &loop1
.post
);
3042 gfc_cleanup_loop (&loop1
);
3044 tmp
= gfc_finish_block (&block
);
3050 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
3051 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
3052 and should not be freed. WHEREMASK is the conditional execution mask
3053 whose sense may be inverted by INVERT. */
3056 generate_loop_for_rhs_to_temp (gfc_expr
*expr2
, tree tmp1
, tree count3
,
3057 tree count1
, gfc_ss
*lss
, gfc_ss
*rss
,
3058 tree wheremask
, bool invert
)
3060 stmtblock_t block
, body1
;
3067 gfc_start_block (&block
);
3069 gfc_init_se (&rse
, NULL
);
3070 gfc_init_se (&lse
, NULL
);
3072 if (lss
== gfc_ss_terminator
)
3074 gfc_init_block (&body1
);
3075 gfc_conv_expr (&rse
, expr2
);
3076 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3080 /* Initialize the loop. */
3081 gfc_init_loopinfo (&loop
);
3083 /* We may need LSS to determine the shape of the expression. */
3084 gfc_add_ss_to_loop (&loop
, lss
);
3085 gfc_add_ss_to_loop (&loop
, rss
);
3087 gfc_conv_ss_startstride (&loop
);
3088 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3090 gfc_mark_ss_chain_used (rss
, 1);
3091 /* Start the loop body. */
3092 gfc_start_scalarized_body (&loop
, &body1
);
3094 /* Translate the expression. */
3095 gfc_copy_loopinfo_to_se (&rse
, &loop
);
3097 gfc_conv_expr (&rse
, expr2
);
3099 /* Form the expression of the temporary. */
3100 lse
.expr
= gfc_build_array_ref (tmp1
, count1
, NULL
);
3103 /* Use the scalar assignment. */
3104 lse
.string_length
= rse
.string_length
;
3105 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr2
->ts
, true,
3106 expr2
->expr_type
== EXPR_VARIABLE
, true);
3108 /* Form the mask expression according to the mask tree list. */
3111 wheremaskexpr
= gfc_build_array_ref (wheremask
, count3
, NULL
);
3113 wheremaskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
3114 TREE_TYPE (wheremaskexpr
),
3116 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
3118 build_empty_stmt (input_location
));
3121 gfc_add_expr_to_block (&body1
, tmp
);
3123 if (lss
== gfc_ss_terminator
)
3125 gfc_add_block_to_block (&block
, &body1
);
3127 /* Increment count1. */
3128 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (count1
),
3129 count1
, gfc_index_one_node
);
3130 gfc_add_modify (&block
, count1
, tmp
);
3134 /* Increment count1. */
3135 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3136 count1
, gfc_index_one_node
);
3137 gfc_add_modify (&body1
, count1
, tmp
);
3139 /* Increment count3. */
3142 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3143 gfc_array_index_type
,
3144 count3
, gfc_index_one_node
);
3145 gfc_add_modify (&body1
, count3
, tmp
);
3148 /* Generate the copying loops. */
3149 gfc_trans_scalarizing_loops (&loop
, &body1
);
3151 gfc_add_block_to_block (&block
, &loop
.pre
);
3152 gfc_add_block_to_block (&block
, &loop
.post
);
3154 gfc_cleanup_loop (&loop
);
3155 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3156 as tree nodes in SS may not be valid in different scope. */
3159 tmp
= gfc_finish_block (&block
);
3164 /* Calculate the size of temporary needed in the assignment inside forall.
3165 LSS and RSS are filled in this function. */
3168 compute_inner_temp_size (gfc_expr
*expr1
, gfc_expr
*expr2
,
3169 stmtblock_t
* pblock
,
3170 gfc_ss
**lss
, gfc_ss
**rss
)
3178 *lss
= gfc_walk_expr (expr1
);
3181 size
= gfc_index_one_node
;
3182 if (*lss
!= gfc_ss_terminator
)
3184 gfc_init_loopinfo (&loop
);
3186 /* Walk the RHS of the expression. */
3187 *rss
= gfc_walk_expr (expr2
);
3188 if (*rss
== gfc_ss_terminator
)
3189 /* The rhs is scalar. Add a ss for the expression. */
3190 *rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
3192 /* Associate the SS with the loop. */
3193 gfc_add_ss_to_loop (&loop
, *lss
);
3194 /* We don't actually need to add the rhs at this point, but it might
3195 make guessing the loop bounds a bit easier. */
3196 gfc_add_ss_to_loop (&loop
, *rss
);
3198 /* We only want the shape of the expression, not rest of the junk
3199 generated by the scalarizer. */
3200 loop
.array_parameter
= 1;
3202 /* Calculate the bounds of the scalarization. */
3203 save_flag
= gfc_option
.rtcheck
;
3204 gfc_option
.rtcheck
&= ~GFC_RTCHECK_BOUNDS
;
3205 gfc_conv_ss_startstride (&loop
);
3206 gfc_option
.rtcheck
= save_flag
;
3207 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3209 /* Figure out how many elements we need. */
3210 for (i
= 0; i
< loop
.dimen
; i
++)
3212 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
,
3213 gfc_array_index_type
,
3214 gfc_index_one_node
, loop
.from
[i
]);
3215 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3216 gfc_array_index_type
, tmp
, loop
.to
[i
]);
3217 size
= fold_build2_loc (input_location
, MULT_EXPR
,
3218 gfc_array_index_type
, size
, tmp
);
3220 gfc_add_block_to_block (pblock
, &loop
.pre
);
3221 size
= gfc_evaluate_now (size
, pblock
);
3222 gfc_add_block_to_block (pblock
, &loop
.post
);
3224 /* TODO: write a function that cleans up a loopinfo without freeing
3225 the SS chains. Currently a NOP. */
3232 /* Calculate the overall iterator number of the nested forall construct.
3233 This routine actually calculates the number of times the body of the
3234 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
3235 that by the expression INNER_SIZE. The BLOCK argument specifies the
3236 block in which to calculate the result, and the optional INNER_SIZE_BODY
3237 argument contains any statements that need to executed (inside the loop)
3238 to initialize or calculate INNER_SIZE. */
3241 compute_overall_iter_number (forall_info
*nested_forall_info
, tree inner_size
,
3242 stmtblock_t
*inner_size_body
, stmtblock_t
*block
)
3244 forall_info
*forall_tmp
= nested_forall_info
;
3248 /* We can eliminate the innermost unconditional loops with constant
3250 if (INTEGER_CST_P (inner_size
))
3253 && !forall_tmp
->mask
3254 && INTEGER_CST_P (forall_tmp
->size
))
3256 inner_size
= fold_build2_loc (input_location
, MULT_EXPR
,
3257 gfc_array_index_type
,
3258 inner_size
, forall_tmp
->size
);
3259 forall_tmp
= forall_tmp
->prev_nest
;
3262 /* If there are no loops left, we have our constant result. */
3267 /* Otherwise, create a temporary variable to compute the result. */
3268 number
= gfc_create_var (gfc_array_index_type
, "num");
3269 gfc_add_modify (block
, number
, gfc_index_zero_node
);
3271 gfc_start_block (&body
);
3272 if (inner_size_body
)
3273 gfc_add_block_to_block (&body
, inner_size_body
);
3275 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
3276 gfc_array_index_type
, number
, inner_size
);
3279 gfc_add_modify (&body
, number
, tmp
);
3280 tmp
= gfc_finish_block (&body
);
3282 /* Generate loops. */
3283 if (forall_tmp
!= NULL
)
3284 tmp
= gfc_trans_nested_forall_loop (forall_tmp
, tmp
, 1);
3286 gfc_add_expr_to_block (block
, tmp
);
3292 /* Allocate temporary for forall construct. SIZE is the size of temporary
3293 needed. PTEMP1 is returned for space free. */
3296 allocate_temp_for_forall_nest_1 (tree type
, tree size
, stmtblock_t
* block
,
3303 unit
= fold_convert (gfc_array_index_type
, TYPE_SIZE_UNIT (type
));
3304 if (!integer_onep (unit
))
3305 bytesize
= fold_build2_loc (input_location
, MULT_EXPR
,
3306 gfc_array_index_type
, size
, unit
);
3311 tmp
= gfc_do_allocate (bytesize
, size
, ptemp1
, block
, type
);
3314 tmp
= build_fold_indirect_ref_loc (input_location
, tmp
);
3319 /* Allocate temporary for forall construct according to the information in
3320 nested_forall_info. INNER_SIZE is the size of temporary needed in the
3321 assignment inside forall. PTEMP1 is returned for space free. */
3324 allocate_temp_for_forall_nest (forall_info
* nested_forall_info
, tree type
,
3325 tree inner_size
, stmtblock_t
* inner_size_body
,
3326 stmtblock_t
* block
, tree
* ptemp1
)
3330 /* Calculate the total size of temporary needed in forall construct. */
3331 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
3332 inner_size_body
, block
);
3334 return allocate_temp_for_forall_nest_1 (type
, size
, block
, ptemp1
);
3338 /* Handle assignments inside forall which need temporary.
3340 forall (i=start:end:stride; maskexpr)
3343 (where e,f<i> are arbitrary expressions possibly involving i
3344 and there is a dependency between e<i> and f<i>)
3346 masktmp(:) = maskexpr(:)
3351 for (i = start; i <= end; i += stride)
3355 for (i = start; i <= end; i += stride)
3357 if (masktmp[maskindex++])
3358 tmp[count1++] = f<i>
3362 for (i = start; i <= end; i += stride)
3364 if (masktmp[maskindex++])
3365 e<i> = tmp[count1++]
3370 gfc_trans_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3371 tree wheremask
, bool invert
,
3372 forall_info
* nested_forall_info
,
3373 stmtblock_t
* block
)
3381 stmtblock_t inner_size_body
;
3383 /* Create vars. count1 is the current iterator number of the nested
3385 count1
= gfc_create_var (gfc_array_index_type
, "count1");
3387 /* Count is the wheremask index. */
3390 count
= gfc_create_var (gfc_array_index_type
, "count");
3391 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3396 /* Initialize count1. */
3397 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3399 /* Calculate the size of temporary needed in the assignment. Return loop, lss
3400 and rss which are used in function generate_loop_for_rhs_to_temp(). */
3401 gfc_init_block (&inner_size_body
);
3402 inner_size
= compute_inner_temp_size (expr1
, expr2
, &inner_size_body
,
3405 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
3406 if (expr1
->ts
.type
== BT_CHARACTER
&& expr1
->ts
.u
.cl
->length
)
3408 if (!expr1
->ts
.u
.cl
->backend_decl
)
3411 gfc_init_se (&tse
, NULL
);
3412 gfc_conv_expr (&tse
, expr1
->ts
.u
.cl
->length
);
3413 expr1
->ts
.u
.cl
->backend_decl
= tse
.expr
;
3415 type
= gfc_get_character_type_len (gfc_default_character_kind
,
3416 expr1
->ts
.u
.cl
->backend_decl
);
3419 type
= gfc_typenode_for_spec (&expr1
->ts
);
3421 /* Allocate temporary for nested forall construct according to the
3422 information in nested_forall_info and inner_size. */
3423 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
, inner_size
,
3424 &inner_size_body
, block
, &ptemp1
);
3426 /* Generate codes to copy rhs to the temporary . */
3427 tmp
= generate_loop_for_rhs_to_temp (expr2
, tmp1
, count
, count1
, lss
, rss
,
3430 /* Generate body and loops according to the information in
3431 nested_forall_info. */
3432 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3433 gfc_add_expr_to_block (block
, tmp
);
3436 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
3440 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3442 /* Generate codes to copy the temporary to lhs. */
3443 tmp
= generate_loop_for_temp_to_lhs (expr1
, tmp1
, count
, count1
,
3446 /* Generate body and loops according to the information in
3447 nested_forall_info. */
3448 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3449 gfc_add_expr_to_block (block
, tmp
);
3453 /* Free the temporary. */
3454 tmp
= gfc_call_free (ptemp1
);
3455 gfc_add_expr_to_block (block
, tmp
);
3460 /* Translate pointer assignment inside FORALL which need temporary. */
3463 gfc_trans_pointer_assign_need_temp (gfc_expr
* expr1
, gfc_expr
* expr2
,
3464 forall_info
* nested_forall_info
,
3465 stmtblock_t
* block
)
3472 gfc_array_info
*info
;
3479 tree tmp
, tmp1
, ptemp1
;
3481 count
= gfc_create_var (gfc_array_index_type
, "count");
3482 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3484 inner_size
= gfc_index_one_node
;
3485 lss
= gfc_walk_expr (expr1
);
3486 rss
= gfc_walk_expr (expr2
);
3487 if (lss
== gfc_ss_terminator
)
3489 type
= gfc_typenode_for_spec (&expr1
->ts
);
3490 type
= build_pointer_type (type
);
3492 /* Allocate temporary for nested forall construct according to the
3493 information in nested_forall_info and inner_size. */
3494 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, type
,
3495 inner_size
, NULL
, block
, &ptemp1
);
3496 gfc_start_block (&body
);
3497 gfc_init_se (&lse
, NULL
);
3498 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3499 gfc_init_se (&rse
, NULL
);
3500 rse
.want_pointer
= 1;
3501 gfc_conv_expr (&rse
, expr2
);
3502 gfc_add_block_to_block (&body
, &rse
.pre
);
3503 gfc_add_modify (&body
, lse
.expr
,
3504 fold_convert (TREE_TYPE (lse
.expr
), rse
.expr
));
3505 gfc_add_block_to_block (&body
, &rse
.post
);
3507 /* Increment count. */
3508 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3509 count
, gfc_index_one_node
);
3510 gfc_add_modify (&body
, count
, tmp
);
3512 tmp
= gfc_finish_block (&body
);
3514 /* Generate body and loops according to the information in
3515 nested_forall_info. */
3516 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3517 gfc_add_expr_to_block (block
, tmp
);
3520 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3522 gfc_start_block (&body
);
3523 gfc_init_se (&lse
, NULL
);
3524 gfc_init_se (&rse
, NULL
);
3525 rse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3526 lse
.want_pointer
= 1;
3527 gfc_conv_expr (&lse
, expr1
);
3528 gfc_add_block_to_block (&body
, &lse
.pre
);
3529 gfc_add_modify (&body
, lse
.expr
, rse
.expr
);
3530 gfc_add_block_to_block (&body
, &lse
.post
);
3531 /* Increment count. */
3532 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3533 count
, gfc_index_one_node
);
3534 gfc_add_modify (&body
, count
, tmp
);
3535 tmp
= gfc_finish_block (&body
);
3537 /* Generate body and loops according to the information in
3538 nested_forall_info. */
3539 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3540 gfc_add_expr_to_block (block
, tmp
);
3544 gfc_init_loopinfo (&loop
);
3546 /* Associate the SS with the loop. */
3547 gfc_add_ss_to_loop (&loop
, rss
);
3549 /* Setup the scalarizing loops and bounds. */
3550 gfc_conv_ss_startstride (&loop
);
3552 gfc_conv_loop_setup (&loop
, &expr2
->where
);
3554 info
= &rss
->info
->data
.array
;
3555 desc
= info
->descriptor
;
3557 /* Make a new descriptor. */
3558 parmtype
= gfc_get_element_type (TREE_TYPE (desc
));
3559 parmtype
= gfc_get_array_type_bounds (parmtype
, loop
.dimen
, 0,
3560 loop
.from
, loop
.to
, 1,
3561 GFC_ARRAY_UNKNOWN
, true);
3563 /* Allocate temporary for nested forall construct. */
3564 tmp1
= allocate_temp_for_forall_nest (nested_forall_info
, parmtype
,
3565 inner_size
, NULL
, block
, &ptemp1
);
3566 gfc_start_block (&body
);
3567 gfc_init_se (&lse
, NULL
);
3568 lse
.expr
= gfc_build_array_ref (tmp1
, count
, NULL
);
3569 lse
.direct_byref
= 1;
3570 gfc_conv_expr_descriptor (&lse
, expr2
);
3572 gfc_add_block_to_block (&body
, &lse
.pre
);
3573 gfc_add_block_to_block (&body
, &lse
.post
);
3575 /* Increment count. */
3576 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3577 count
, gfc_index_one_node
);
3578 gfc_add_modify (&body
, count
, tmp
);
3580 tmp
= gfc_finish_block (&body
);
3582 /* Generate body and loops according to the information in
3583 nested_forall_info. */
3584 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3585 gfc_add_expr_to_block (block
, tmp
);
3588 gfc_add_modify (block
, count
, gfc_index_zero_node
);
3590 parm
= gfc_build_array_ref (tmp1
, count
, NULL
);
3591 gfc_init_se (&lse
, NULL
);
3592 gfc_conv_expr_descriptor (&lse
, expr1
);
3593 gfc_add_modify (&lse
.pre
, lse
.expr
, parm
);
3594 gfc_start_block (&body
);
3595 gfc_add_block_to_block (&body
, &lse
.pre
);
3596 gfc_add_block_to_block (&body
, &lse
.post
);
3598 /* Increment count. */
3599 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3600 count
, gfc_index_one_node
);
3601 gfc_add_modify (&body
, count
, tmp
);
3603 tmp
= gfc_finish_block (&body
);
3605 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3606 gfc_add_expr_to_block (block
, tmp
);
3608 /* Free the temporary. */
3611 tmp
= gfc_call_free (ptemp1
);
3612 gfc_add_expr_to_block (block
, tmp
);
3617 /* FORALL and WHERE statements are really nasty, especially when you nest
3618 them. All the rhs of a forall assignment must be evaluated before the
3619 actual assignments are performed. Presumably this also applies to all the
3620 assignments in an inner where statement. */
3622 /* Generate code for a FORALL statement. Any temporaries are allocated as a
3623 linear array, relying on the fact that we process in the same order in all
3626 forall (i=start:end:stride; maskexpr)
3630 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
3632 count = ((end + 1 - start) / stride)
3633 masktmp(:) = maskexpr(:)
3636 for (i = start; i <= end; i += stride)
3638 if (masktmp[maskindex++])
3642 for (i = start; i <= end; i += stride)
3644 if (masktmp[maskindex++])
3648 Note that this code only works when there are no dependencies.
3649 Forall loop with array assignments and data dependencies are a real pain,
3650 because the size of the temporary cannot always be determined before the
3651 loop is executed. This problem is compounded by the presence of nested
3656 gfc_trans_forall_1 (gfc_code
* code
, forall_info
* nested_forall_info
)
3673 tree cycle_label
= NULL_TREE
;
3677 gfc_forall_iterator
*fa
;
3680 gfc_saved_var
*saved_vars
;
3681 iter_info
*this_forall
;
3685 /* Do nothing if the mask is false. */
3687 && code
->expr1
->expr_type
== EXPR_CONSTANT
3688 && !code
->expr1
->value
.logical
)
3689 return build_empty_stmt (input_location
);
3692 /* Count the FORALL index number. */
3693 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3697 /* Allocate the space for var, start, end, step, varexpr. */
3698 var
= XCNEWVEC (tree
, nvar
);
3699 start
= XCNEWVEC (tree
, nvar
);
3700 end
= XCNEWVEC (tree
, nvar
);
3701 step
= XCNEWVEC (tree
, nvar
);
3702 varexpr
= XCNEWVEC (gfc_expr
*, nvar
);
3703 saved_vars
= XCNEWVEC (gfc_saved_var
, nvar
);
3705 /* Allocate the space for info. */
3706 info
= XCNEW (forall_info
);
3708 gfc_start_block (&pre
);
3709 gfc_init_block (&post
);
3710 gfc_init_block (&block
);
3713 for (fa
= code
->ext
.forall_iterator
; fa
; fa
= fa
->next
)
3715 gfc_symbol
*sym
= fa
->var
->symtree
->n
.sym
;
3717 /* Allocate space for this_forall. */
3718 this_forall
= XCNEW (iter_info
);
3720 /* Create a temporary variable for the FORALL index. */
3721 tmp
= gfc_typenode_for_spec (&sym
->ts
);
3722 var
[n
] = gfc_create_var (tmp
, sym
->name
);
3723 gfc_shadow_sym (sym
, var
[n
], &saved_vars
[n
]);
3725 /* Record it in this_forall. */
3726 this_forall
->var
= var
[n
];
3728 /* Replace the index symbol's backend_decl with the temporary decl. */
3729 sym
->backend_decl
= var
[n
];
3731 /* Work out the start, end and stride for the loop. */
3732 gfc_init_se (&se
, NULL
);
3733 gfc_conv_expr_val (&se
, fa
->start
);
3734 /* Record it in this_forall. */
3735 this_forall
->start
= se
.expr
;
3736 gfc_add_block_to_block (&block
, &se
.pre
);
3739 gfc_init_se (&se
, NULL
);
3740 gfc_conv_expr_val (&se
, fa
->end
);
3741 /* Record it in this_forall. */
3742 this_forall
->end
= se
.expr
;
3743 gfc_make_safe_expr (&se
);
3744 gfc_add_block_to_block (&block
, &se
.pre
);
3747 gfc_init_se (&se
, NULL
);
3748 gfc_conv_expr_val (&se
, fa
->stride
);
3749 /* Record it in this_forall. */
3750 this_forall
->step
= se
.expr
;
3751 gfc_make_safe_expr (&se
);
3752 gfc_add_block_to_block (&block
, &se
.pre
);
3755 /* Set the NEXT field of this_forall to NULL. */
3756 this_forall
->next
= NULL
;
3757 /* Link this_forall to the info construct. */
3758 if (info
->this_loop
)
3760 iter_info
*iter_tmp
= info
->this_loop
;
3761 while (iter_tmp
->next
!= NULL
)
3762 iter_tmp
= iter_tmp
->next
;
3763 iter_tmp
->next
= this_forall
;
3766 info
->this_loop
= this_forall
;
3772 /* Calculate the size needed for the current forall level. */
3773 size
= gfc_index_one_node
;
3774 for (n
= 0; n
< nvar
; n
++)
3776 /* size = (end + step - start) / step. */
3777 tmp
= fold_build2_loc (input_location
, MINUS_EXPR
, TREE_TYPE (start
[n
]),
3779 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, TREE_TYPE (end
[n
]),
3781 tmp
= fold_build2_loc (input_location
, FLOOR_DIV_EXPR
, TREE_TYPE (tmp
),
3783 tmp
= convert (gfc_array_index_type
, tmp
);
3785 size
= fold_build2_loc (input_location
, MULT_EXPR
, gfc_array_index_type
,
3789 /* Record the nvar and size of current forall level. */
3795 /* If the mask is .true., consider the FORALL unconditional. */
3796 if (code
->expr1
->expr_type
== EXPR_CONSTANT
3797 && code
->expr1
->value
.logical
)
3805 /* First we need to allocate the mask. */
3808 /* As the mask array can be very big, prefer compact boolean types. */
3809 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3810 mask
= allocate_temp_for_forall_nest (nested_forall_info
, mask_type
,
3811 size
, NULL
, &block
, &pmask
);
3812 maskindex
= gfc_create_var_np (gfc_array_index_type
, "mi");
3814 /* Record them in the info structure. */
3815 info
->maskindex
= maskindex
;
3820 /* No mask was specified. */
3821 maskindex
= NULL_TREE
;
3822 mask
= pmask
= NULL_TREE
;
3825 /* Link the current forall level to nested_forall_info. */
3826 info
->prev_nest
= nested_forall_info
;
3827 nested_forall_info
= info
;
3829 /* Copy the mask into a temporary variable if required.
3830 For now we assume a mask temporary is needed. */
3833 /* As the mask array can be very big, prefer compact boolean types. */
3834 tree mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
3836 gfc_add_modify (&block
, maskindex
, gfc_index_zero_node
);
3838 /* Start of mask assignment loop body. */
3839 gfc_start_block (&body
);
3841 /* Evaluate the mask expression. */
3842 gfc_init_se (&se
, NULL
);
3843 gfc_conv_expr_val (&se
, code
->expr1
);
3844 gfc_add_block_to_block (&body
, &se
.pre
);
3846 /* Store the mask. */
3847 se
.expr
= convert (mask_type
, se
.expr
);
3849 tmp
= gfc_build_array_ref (mask
, maskindex
, NULL
);
3850 gfc_add_modify (&body
, tmp
, se
.expr
);
3852 /* Advance to the next mask element. */
3853 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
3854 maskindex
, gfc_index_one_node
);
3855 gfc_add_modify (&body
, maskindex
, tmp
);
3857 /* Generate the loops. */
3858 tmp
= gfc_finish_block (&body
);
3859 tmp
= gfc_trans_nested_forall_loop (info
, tmp
, 0);
3860 gfc_add_expr_to_block (&block
, tmp
);
3863 if (code
->op
== EXEC_DO_CONCURRENT
)
3865 gfc_init_block (&body
);
3866 cycle_label
= gfc_build_label_decl (NULL_TREE
);
3867 code
->cycle_label
= cycle_label
;
3868 tmp
= gfc_trans_code (code
->block
->next
);
3869 gfc_add_expr_to_block (&body
, tmp
);
3871 if (TREE_USED (cycle_label
))
3873 tmp
= build1_v (LABEL_EXPR
, cycle_label
);
3874 gfc_add_expr_to_block (&body
, tmp
);
3877 tmp
= gfc_finish_block (&body
);
3878 nested_forall_info
->do_concurrent
= true;
3879 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp
, 1);
3880 gfc_add_expr_to_block (&block
, tmp
);
3884 c
= code
->block
->next
;
3886 /* TODO: loop merging in FORALL statements. */
3887 /* Now that we've got a copy of the mask, generate the assignment loops. */
3893 /* A scalar or array assignment. DO the simple check for
3894 lhs to rhs dependencies. These make a temporary for the
3895 rhs and form a second forall block to copy to variable. */
3896 need_temp
= check_forall_dependencies(c
, &pre
, &post
);
3898 /* Temporaries due to array assignment data dependencies introduce
3899 no end of problems. */
3901 gfc_trans_assign_need_temp (c
->expr1
, c
->expr2
, NULL
, false,
3902 nested_forall_info
, &block
);
3905 /* Use the normal assignment copying routines. */
3906 assign
= gfc_trans_assignment (c
->expr1
, c
->expr2
, false, true);
3908 /* Generate body and loops. */
3909 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3911 gfc_add_expr_to_block (&block
, tmp
);
3914 /* Cleanup any temporary symtrees that have been made to deal
3915 with dependencies. */
3917 cleanup_forall_symtrees (c
);
3922 /* Translate WHERE or WHERE construct nested in FORALL. */
3923 gfc_trans_where_2 (c
, NULL
, false, nested_forall_info
, &block
);
3926 /* Pointer assignment inside FORALL. */
3927 case EXEC_POINTER_ASSIGN
:
3928 need_temp
= gfc_check_dependency (c
->expr1
, c
->expr2
, 0);
3930 gfc_trans_pointer_assign_need_temp (c
->expr1
, c
->expr2
,
3931 nested_forall_info
, &block
);
3934 /* Use the normal assignment copying routines. */
3935 assign
= gfc_trans_pointer_assignment (c
->expr1
, c
->expr2
);
3937 /* Generate body and loops. */
3938 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
3940 gfc_add_expr_to_block (&block
, tmp
);
3945 tmp
= gfc_trans_forall_1 (c
, nested_forall_info
);
3946 gfc_add_expr_to_block (&block
, tmp
);
3949 /* Explicit subroutine calls are prevented by the frontend but interface
3950 assignments can legitimately produce them. */
3951 case EXEC_ASSIGN_CALL
:
3952 assign
= gfc_trans_call (c
, true, NULL_TREE
, NULL_TREE
, false);
3953 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
, assign
, 1);
3954 gfc_add_expr_to_block (&block
, tmp
);
3965 /* Restore the original index variables. */
3966 for (fa
= code
->ext
.forall_iterator
, n
= 0; fa
; fa
= fa
->next
, n
++)
3967 gfc_restore_sym (fa
->var
->symtree
->n
.sym
, &saved_vars
[n
]);
3969 /* Free the space for var, start, end, step, varexpr. */
3977 for (this_forall
= info
->this_loop
; this_forall
;)
3979 iter_info
*next
= this_forall
->next
;
3984 /* Free the space for this forall_info. */
3989 /* Free the temporary for the mask. */
3990 tmp
= gfc_call_free (pmask
);
3991 gfc_add_expr_to_block (&block
, tmp
);
3994 pushdecl (maskindex
);
3996 gfc_add_block_to_block (&pre
, &block
);
3997 gfc_add_block_to_block (&pre
, &post
);
3999 return gfc_finish_block (&pre
);
4003 /* Translate the FORALL statement or construct. */
4005 tree
gfc_trans_forall (gfc_code
* code
)
4007 return gfc_trans_forall_1 (code
, NULL
);
4011 /* Translate the DO CONCURRENT construct. */
4013 tree
gfc_trans_do_concurrent (gfc_code
* code
)
4015 return gfc_trans_forall_1 (code
, NULL
);
4019 /* Evaluate the WHERE mask expression, copy its value to a temporary.
4020 If the WHERE construct is nested in FORALL, compute the overall temporary
4021 needed by the WHERE mask expression multiplied by the iterator number of
4023 ME is the WHERE mask expression.
4024 MASK is the current execution mask upon input, whose sense may or may
4025 not be inverted as specified by the INVERT argument.
4026 CMASK is the updated execution mask on output, or NULL if not required.
4027 PMASK is the pending execution mask on output, or NULL if not required.
4028 BLOCK is the block in which to place the condition evaluation loops. */
4031 gfc_evaluate_where_mask (gfc_expr
* me
, forall_info
* nested_forall_info
,
4032 tree mask
, bool invert
, tree cmask
, tree pmask
,
4033 tree mask_type
, stmtblock_t
* block
)
4038 stmtblock_t body
, body1
;
4039 tree count
, cond
, mtmp
;
4042 gfc_init_loopinfo (&loop
);
4044 lss
= gfc_walk_expr (me
);
4045 rss
= gfc_walk_expr (me
);
4047 /* Variable to index the temporary. */
4048 count
= gfc_create_var (gfc_array_index_type
, "count");
4049 /* Initialize count. */
4050 gfc_add_modify (block
, count
, gfc_index_zero_node
);
4052 gfc_start_block (&body
);
4054 gfc_init_se (&rse
, NULL
);
4055 gfc_init_se (&lse
, NULL
);
4057 if (lss
== gfc_ss_terminator
)
4059 gfc_init_block (&body1
);
4063 /* Initialize the loop. */
4064 gfc_init_loopinfo (&loop
);
4066 /* We may need LSS to determine the shape of the expression. */
4067 gfc_add_ss_to_loop (&loop
, lss
);
4068 gfc_add_ss_to_loop (&loop
, rss
);
4070 gfc_conv_ss_startstride (&loop
);
4071 gfc_conv_loop_setup (&loop
, &me
->where
);
4073 gfc_mark_ss_chain_used (rss
, 1);
4074 /* Start the loop body. */
4075 gfc_start_scalarized_body (&loop
, &body1
);
4077 /* Translate the expression. */
4078 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4080 gfc_conv_expr (&rse
, me
);
4083 /* Variable to evaluate mask condition. */
4084 cond
= gfc_create_var (mask_type
, "cond");
4085 if (mask
&& (cmask
|| pmask
))
4086 mtmp
= gfc_create_var (mask_type
, "mask");
4087 else mtmp
= NULL_TREE
;
4089 gfc_add_block_to_block (&body1
, &lse
.pre
);
4090 gfc_add_block_to_block (&body1
, &rse
.pre
);
4092 gfc_add_modify (&body1
, cond
, fold_convert (mask_type
, rse
.expr
));
4094 if (mask
&& (cmask
|| pmask
))
4096 tmp
= gfc_build_array_ref (mask
, count
, NULL
);
4098 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, tmp
);
4099 gfc_add_modify (&body1
, mtmp
, tmp
);
4104 tmp1
= gfc_build_array_ref (cmask
, count
, NULL
);
4107 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
,
4109 gfc_add_modify (&body1
, tmp1
, tmp
);
4114 tmp1
= gfc_build_array_ref (pmask
, count
, NULL
);
4115 tmp
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
, mask_type
, cond
);
4117 tmp
= fold_build2_loc (input_location
, TRUTH_AND_EXPR
, mask_type
, mtmp
,
4119 gfc_add_modify (&body1
, tmp1
, tmp
);
4122 gfc_add_block_to_block (&body1
, &lse
.post
);
4123 gfc_add_block_to_block (&body1
, &rse
.post
);
4125 if (lss
== gfc_ss_terminator
)
4127 gfc_add_block_to_block (&body
, &body1
);
4131 /* Increment count. */
4132 tmp1
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4133 count
, gfc_index_one_node
);
4134 gfc_add_modify (&body1
, count
, tmp1
);
4136 /* Generate the copying loops. */
4137 gfc_trans_scalarizing_loops (&loop
, &body1
);
4139 gfc_add_block_to_block (&body
, &loop
.pre
);
4140 gfc_add_block_to_block (&body
, &loop
.post
);
4142 gfc_cleanup_loop (&loop
);
4143 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
4144 as tree nodes in SS may not be valid in different scope. */
4147 tmp1
= gfc_finish_block (&body
);
4148 /* If the WHERE construct is inside FORALL, fill the full temporary. */
4149 if (nested_forall_info
!= NULL
)
4150 tmp1
= gfc_trans_nested_forall_loop (nested_forall_info
, tmp1
, 1);
4152 gfc_add_expr_to_block (block
, tmp1
);
4156 /* Translate an assignment statement in a WHERE statement or construct
4157 statement. The MASK expression is used to control which elements
4158 of EXPR1 shall be assigned. The sense of MASK is specified by
4162 gfc_trans_where_assign (gfc_expr
*expr1
, gfc_expr
*expr2
,
4163 tree mask
, bool invert
,
4164 tree count1
, tree count2
,
4170 gfc_ss
*lss_section
;
4177 tree index
, maskexpr
;
4179 /* A defined assignment. */
4180 if (cnext
&& cnext
->resolved_sym
)
4181 return gfc_trans_call (cnext
, true, mask
, count1
, invert
);
4184 /* TODO: handle this special case.
4185 Special case a single function returning an array. */
4186 if (expr2
->expr_type
== EXPR_FUNCTION
&& expr2
->rank
> 0)
4188 tmp
= gfc_trans_arrayfunc_assign (expr1
, expr2
);
4194 /* Assignment of the form lhs = rhs. */
4195 gfc_start_block (&block
);
4197 gfc_init_se (&lse
, NULL
);
4198 gfc_init_se (&rse
, NULL
);
4201 lss
= gfc_walk_expr (expr1
);
4204 /* In each where-assign-stmt, the mask-expr and the variable being
4205 defined shall be arrays of the same shape. */
4206 gcc_assert (lss
!= gfc_ss_terminator
);
4208 /* The assignment needs scalarization. */
4211 /* Find a non-scalar SS from the lhs. */
4212 while (lss_section
!= gfc_ss_terminator
4213 && lss_section
->info
->type
!= GFC_SS_SECTION
)
4214 lss_section
= lss_section
->next
;
4216 gcc_assert (lss_section
!= gfc_ss_terminator
);
4218 /* Initialize the scalarizer. */
4219 gfc_init_loopinfo (&loop
);
4222 rss
= gfc_walk_expr (expr2
);
4223 if (rss
== gfc_ss_terminator
)
4225 /* The rhs is scalar. Add a ss for the expression. */
4226 rss
= gfc_get_scalar_ss (gfc_ss_terminator
, expr2
);
4227 rss
->info
->where
= 1;
4230 /* Associate the SS with the loop. */
4231 gfc_add_ss_to_loop (&loop
, lss
);
4232 gfc_add_ss_to_loop (&loop
, rss
);
4234 /* Calculate the bounds of the scalarization. */
4235 gfc_conv_ss_startstride (&loop
);
4237 /* Resolve any data dependencies in the statement. */
4238 gfc_conv_resolve_dependencies (&loop
, lss_section
, rss
);
4240 /* Setup the scalarizing loops. */
4241 gfc_conv_loop_setup (&loop
, &expr2
->where
);
4243 /* Setup the gfc_se structures. */
4244 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4245 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4248 gfc_mark_ss_chain_used (rss
, 1);
4249 if (loop
.temp_ss
== NULL
)
4252 gfc_mark_ss_chain_used (lss
, 1);
4256 lse
.ss
= loop
.temp_ss
;
4257 gfc_mark_ss_chain_used (lss
, 3);
4258 gfc_mark_ss_chain_used (loop
.temp_ss
, 3);
4261 /* Start the scalarized loop body. */
4262 gfc_start_scalarized_body (&loop
, &body
);
4264 /* Translate the expression. */
4265 gfc_conv_expr (&rse
, expr2
);
4266 if (lss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4267 gfc_conv_tmp_array_ref (&lse
);
4269 gfc_conv_expr (&lse
, expr1
);
4271 /* Form the mask expression according to the mask. */
4273 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4275 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4276 TREE_TYPE (maskexpr
), maskexpr
);
4278 /* Use the scalar assignment as is. */
4279 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
,
4280 loop
.temp_ss
!= NULL
, false, true);
4282 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
, build_empty_stmt (input_location
));
4284 gfc_add_expr_to_block (&body
, tmp
);
4286 if (lss
== gfc_ss_terminator
)
4288 /* Increment count1. */
4289 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
, gfc_array_index_type
,
4290 count1
, gfc_index_one_node
);
4291 gfc_add_modify (&body
, count1
, tmp
);
4293 /* Use the scalar assignment as is. */
4294 gfc_add_block_to_block (&block
, &body
);
4298 gcc_assert (lse
.ss
== gfc_ss_terminator
4299 && rse
.ss
== gfc_ss_terminator
);
4301 if (loop
.temp_ss
!= NULL
)
4303 /* Increment count1 before finish the main body of a scalarized
4305 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4306 gfc_array_index_type
, count1
, gfc_index_one_node
);
4307 gfc_add_modify (&body
, count1
, tmp
);
4308 gfc_trans_scalarized_loop_boundary (&loop
, &body
);
4310 /* We need to copy the temporary to the actual lhs. */
4311 gfc_init_se (&lse
, NULL
);
4312 gfc_init_se (&rse
, NULL
);
4313 gfc_copy_loopinfo_to_se (&lse
, &loop
);
4314 gfc_copy_loopinfo_to_se (&rse
, &loop
);
4316 rse
.ss
= loop
.temp_ss
;
4319 gfc_conv_tmp_array_ref (&rse
);
4320 gfc_conv_expr (&lse
, expr1
);
4322 gcc_assert (lse
.ss
== gfc_ss_terminator
4323 && rse
.ss
== gfc_ss_terminator
);
4325 /* Form the mask expression according to the mask tree list. */
4327 maskexpr
= gfc_build_array_ref (mask
, index
, NULL
);
4329 maskexpr
= fold_build1_loc (input_location
, TRUTH_NOT_EXPR
,
4330 TREE_TYPE (maskexpr
), maskexpr
);
4332 /* Use the scalar assignment as is. */
4333 tmp
= gfc_trans_scalar_assign (&lse
, &rse
, expr1
->ts
, false, false,
4335 tmp
= build3_v (COND_EXPR
, maskexpr
, tmp
,
4336 build_empty_stmt (input_location
));
4337 gfc_add_expr_to_block (&body
, tmp
);
4339 /* Increment count2. */
4340 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4341 gfc_array_index_type
, count2
,
4342 gfc_index_one_node
);
4343 gfc_add_modify (&body
, count2
, tmp
);
4347 /* Increment count1. */
4348 tmp
= fold_build2_loc (input_location
, PLUS_EXPR
,
4349 gfc_array_index_type
, count1
,
4350 gfc_index_one_node
);
4351 gfc_add_modify (&body
, count1
, tmp
);
4354 /* Generate the copying loops. */
4355 gfc_trans_scalarizing_loops (&loop
, &body
);
4357 /* Wrap the whole thing up. */
4358 gfc_add_block_to_block (&block
, &loop
.pre
);
4359 gfc_add_block_to_block (&block
, &loop
.post
);
4360 gfc_cleanup_loop (&loop
);
4363 return gfc_finish_block (&block
);
4367 /* Translate the WHERE construct or statement.
4368 This function can be called iteratively to translate the nested WHERE
4369 construct or statement.
4370 MASK is the control mask. */
4373 gfc_trans_where_2 (gfc_code
* code
, tree mask
, bool invert
,
4374 forall_info
* nested_forall_info
, stmtblock_t
* block
)
4376 stmtblock_t inner_size_body
;
4377 tree inner_size
, size
;
4386 tree count1
, count2
;
4390 tree pcmask
= NULL_TREE
;
4391 tree ppmask
= NULL_TREE
;
4392 tree cmask
= NULL_TREE
;
4393 tree pmask
= NULL_TREE
;
4394 gfc_actual_arglist
*arg
;
4396 /* the WHERE statement or the WHERE construct statement. */
4397 cblock
= code
->block
;
4399 /* As the mask array can be very big, prefer compact boolean types. */
4400 mask_type
= gfc_get_logical_type (gfc_logical_kinds
[0].kind
);
4402 /* Determine which temporary masks are needed. */
4405 /* One clause: No ELSEWHEREs. */
4406 need_cmask
= (cblock
->next
!= 0);
4409 else if (cblock
->block
->block
)
4411 /* Three or more clauses: Conditional ELSEWHEREs. */
4415 else if (cblock
->next
)
4417 /* Two clauses, the first non-empty. */
4419 need_pmask
= (mask
!= NULL_TREE
4420 && cblock
->block
->next
!= 0);
4422 else if (!cblock
->block
->next
)
4424 /* Two clauses, both empty. */
4428 /* Two clauses, the first empty, the second non-empty. */
4431 need_cmask
= (cblock
->block
->expr1
!= 0);
4440 if (need_cmask
|| need_pmask
)
4442 /* Calculate the size of temporary needed by the mask-expr. */
4443 gfc_init_block (&inner_size_body
);
4444 inner_size
= compute_inner_temp_size (cblock
->expr1
, cblock
->expr1
,
4445 &inner_size_body
, &lss
, &rss
);
4447 gfc_free_ss_chain (lss
);
4448 gfc_free_ss_chain (rss
);
4450 /* Calculate the total size of temporary needed. */
4451 size
= compute_overall_iter_number (nested_forall_info
, inner_size
,
4452 &inner_size_body
, block
);
4454 /* Check whether the size is negative. */
4455 cond
= fold_build2_loc (input_location
, LE_EXPR
, boolean_type_node
, size
,
4456 gfc_index_zero_node
);
4457 size
= fold_build3_loc (input_location
, COND_EXPR
, gfc_array_index_type
,
4458 cond
, gfc_index_zero_node
, size
);
4459 size
= gfc_evaluate_now (size
, block
);
4461 /* Allocate temporary for WHERE mask if needed. */
4463 cmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4466 /* Allocate temporary for !mask if needed. */
4468 pmask
= allocate_temp_for_forall_nest_1 (mask_type
, size
, block
,
4474 /* Each time around this loop, the where clause is conditional
4475 on the value of mask and invert, which are updated at the
4476 bottom of the loop. */
4478 /* Has mask-expr. */
4481 /* Ensure that the WHERE mask will be evaluated exactly once.
4482 If there are no statements in this WHERE/ELSEWHERE clause,
4483 then we don't need to update the control mask (cmask).
4484 If this is the last clause of the WHERE construct, then
4485 we don't need to update the pending control mask (pmask). */
4487 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4489 cblock
->next
? cmask
: NULL_TREE
,
4490 cblock
->block
? pmask
: NULL_TREE
,
4493 gfc_evaluate_where_mask (cblock
->expr1
, nested_forall_info
,
4495 (cblock
->next
|| cblock
->block
)
4496 ? cmask
: NULL_TREE
,
4497 NULL_TREE
, mask_type
, block
);
4501 /* It's a final elsewhere-stmt. No mask-expr is present. */
4505 /* The body of this where clause are controlled by cmask with
4506 sense specified by invert. */
4508 /* Get the assignment statement of a WHERE statement, or the first
4509 statement in where-body-construct of a WHERE construct. */
4510 cnext
= cblock
->next
;
4515 /* WHERE assignment statement. */
4516 case EXEC_ASSIGN_CALL
:
4518 arg
= cnext
->ext
.actual
;
4519 expr1
= expr2
= NULL
;
4520 for (; arg
; arg
= arg
->next
)
4532 expr1
= cnext
->expr1
;
4533 expr2
= cnext
->expr2
;
4535 if (nested_forall_info
!= NULL
)
4537 need_temp
= gfc_check_dependency (expr1
, expr2
, 0);
4538 if (need_temp
&& cnext
->op
!= EXEC_ASSIGN_CALL
)
4539 gfc_trans_assign_need_temp (expr1
, expr2
,
4541 nested_forall_info
, block
);
4544 /* Variables to control maskexpr. */
4545 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4546 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4547 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4548 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4550 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4555 tmp
= gfc_trans_nested_forall_loop (nested_forall_info
,
4557 gfc_add_expr_to_block (block
, tmp
);
4562 /* Variables to control maskexpr. */
4563 count1
= gfc_create_var (gfc_array_index_type
, "count1");
4564 count2
= gfc_create_var (gfc_array_index_type
, "count2");
4565 gfc_add_modify (block
, count1
, gfc_index_zero_node
);
4566 gfc_add_modify (block
, count2
, gfc_index_zero_node
);
4568 tmp
= gfc_trans_where_assign (expr1
, expr2
,
4572 gfc_add_expr_to_block (block
, tmp
);
4577 /* WHERE or WHERE construct is part of a where-body-construct. */
4579 gfc_trans_where_2 (cnext
, cmask
, invert
,
4580 nested_forall_info
, block
);
4587 /* The next statement within the same where-body-construct. */
4588 cnext
= cnext
->next
;
4590 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
4591 cblock
= cblock
->block
;
4592 if (mask
== NULL_TREE
)
4594 /* If we're the initial WHERE, we can simply invert the sense
4595 of the current mask to obtain the "mask" for the remaining
4602 /* Otherwise, for nested WHERE's we need to use the pending mask. */
4608 /* If we allocated a pending mask array, deallocate it now. */
4611 tmp
= gfc_call_free (ppmask
);
4612 gfc_add_expr_to_block (block
, tmp
);
4615 /* If we allocated a current mask array, deallocate it now. */
4618 tmp
= gfc_call_free (pcmask
);
4619 gfc_add_expr_to_block (block
, tmp
);
4623 /* Translate a simple WHERE construct or statement without dependencies.
4624 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
4625 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
4626 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
4629 gfc_trans_where_3 (gfc_code
* cblock
, gfc_code
* eblock
)
4631 stmtblock_t block
, body
;
4632 gfc_expr
*cond
, *tdst
, *tsrc
, *edst
, *esrc
;
4633 tree tmp
, cexpr
, tstmt
, estmt
;
4634 gfc_ss
*css
, *tdss
, *tsss
;
4635 gfc_se cse
, tdse
, tsse
, edse
, esse
;
4640 /* Allow the scalarizer to workshare simple where loops. */
4641 if (ompws_flags
& OMPWS_WORKSHARE_FLAG
)
4642 ompws_flags
|= OMPWS_SCALARIZER_WS
;
4644 cond
= cblock
->expr1
;
4645 tdst
= cblock
->next
->expr1
;
4646 tsrc
= cblock
->next
->expr2
;
4647 edst
= eblock
? eblock
->next
->expr1
: NULL
;
4648 esrc
= eblock
? eblock
->next
->expr2
: NULL
;
4650 gfc_start_block (&block
);
4651 gfc_init_loopinfo (&loop
);
4653 /* Handle the condition. */
4654 gfc_init_se (&cse
, NULL
);
4655 css
= gfc_walk_expr (cond
);
4656 gfc_add_ss_to_loop (&loop
, css
);
4658 /* Handle the then-clause. */
4659 gfc_init_se (&tdse
, NULL
);
4660 gfc_init_se (&tsse
, NULL
);
4661 tdss
= gfc_walk_expr (tdst
);
4662 tsss
= gfc_walk_expr (tsrc
);
4663 if (tsss
== gfc_ss_terminator
)
4665 tsss
= gfc_get_scalar_ss (gfc_ss_terminator
, tsrc
);
4666 tsss
->info
->where
= 1;
4668 gfc_add_ss_to_loop (&loop
, tdss
);
4669 gfc_add_ss_to_loop (&loop
, tsss
);
4673 /* Handle the else clause. */
4674 gfc_init_se (&edse
, NULL
);
4675 gfc_init_se (&esse
, NULL
);
4676 edss
= gfc_walk_expr (edst
);
4677 esss
= gfc_walk_expr (esrc
);
4678 if (esss
== gfc_ss_terminator
)
4680 esss
= gfc_get_scalar_ss (gfc_ss_terminator
, esrc
);
4681 esss
->info
->where
= 1;
4683 gfc_add_ss_to_loop (&loop
, edss
);
4684 gfc_add_ss_to_loop (&loop
, esss
);
4687 gfc_conv_ss_startstride (&loop
);
4688 gfc_conv_loop_setup (&loop
, &tdst
->where
);
4690 gfc_mark_ss_chain_used (css
, 1);
4691 gfc_mark_ss_chain_used (tdss
, 1);
4692 gfc_mark_ss_chain_used (tsss
, 1);
4695 gfc_mark_ss_chain_used (edss
, 1);
4696 gfc_mark_ss_chain_used (esss
, 1);
4699 gfc_start_scalarized_body (&loop
, &body
);
4701 gfc_copy_loopinfo_to_se (&cse
, &loop
);
4702 gfc_copy_loopinfo_to_se (&tdse
, &loop
);
4703 gfc_copy_loopinfo_to_se (&tsse
, &loop
);
4709 gfc_copy_loopinfo_to_se (&edse
, &loop
);
4710 gfc_copy_loopinfo_to_se (&esse
, &loop
);
4715 gfc_conv_expr (&cse
, cond
);
4716 gfc_add_block_to_block (&body
, &cse
.pre
);
4719 gfc_conv_expr (&tsse
, tsrc
);
4720 if (tdss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4721 gfc_conv_tmp_array_ref (&tdse
);
4723 gfc_conv_expr (&tdse
, tdst
);
4727 gfc_conv_expr (&esse
, esrc
);
4728 if (edss
!= gfc_ss_terminator
&& loop
.temp_ss
!= NULL
)
4729 gfc_conv_tmp_array_ref (&edse
);
4731 gfc_conv_expr (&edse
, edst
);
4734 tstmt
= gfc_trans_scalar_assign (&tdse
, &tsse
, tdst
->ts
, false, false, true);
4735 estmt
= eblock
? gfc_trans_scalar_assign (&edse
, &esse
, edst
->ts
, false,
4737 : build_empty_stmt (input_location
);
4738 tmp
= build3_v (COND_EXPR
, cexpr
, tstmt
, estmt
);
4739 gfc_add_expr_to_block (&body
, tmp
);
4740 gfc_add_block_to_block (&body
, &cse
.post
);
4742 gfc_trans_scalarizing_loops (&loop
, &body
);
4743 gfc_add_block_to_block (&block
, &loop
.pre
);
4744 gfc_add_block_to_block (&block
, &loop
.post
);
4745 gfc_cleanup_loop (&loop
);
4747 return gfc_finish_block (&block
);
4750 /* As the WHERE or WHERE construct statement can be nested, we call
4751 gfc_trans_where_2 to do the translation, and pass the initial
4752 NULL values for both the control mask and the pending control mask. */
4755 gfc_trans_where (gfc_code
* code
)
4761 cblock
= code
->block
;
4763 && cblock
->next
->op
== EXEC_ASSIGN
4764 && !cblock
->next
->next
)
4766 eblock
= cblock
->block
;
4769 /* A simple "WHERE (cond) x = y" statement or block is
4770 dependence free if cond is not dependent upon writing x,
4771 and the source y is unaffected by the destination x. */
4772 if (!gfc_check_dependency (cblock
->next
->expr1
,
4774 && !gfc_check_dependency (cblock
->next
->expr1
,
4775 cblock
->next
->expr2
, 0))
4776 return gfc_trans_where_3 (cblock
, NULL
);
4778 else if (!eblock
->expr1
4781 && eblock
->next
->op
== EXEC_ASSIGN
4782 && !eblock
->next
->next
)
4784 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
4785 block is dependence free if cond is not dependent on writes
4786 to x1 and x2, y1 is not dependent on writes to x2, and y2
4787 is not dependent on writes to x1, and both y's are not
4788 dependent upon their own x's. In addition to this, the
4789 final two dependency checks below exclude all but the same
4790 array reference if the where and elswhere destinations
4791 are the same. In short, this is VERY conservative and this
4792 is needed because the two loops, required by the standard
4793 are coalesced in gfc_trans_where_3. */
4794 if (!gfc_check_dependency (cblock
->next
->expr1
,
4796 && !gfc_check_dependency (eblock
->next
->expr1
,
4798 && !gfc_check_dependency (cblock
->next
->expr1
,
4799 eblock
->next
->expr2
, 1)
4800 && !gfc_check_dependency (eblock
->next
->expr1
,
4801 cblock
->next
->expr2
, 1)
4802 && !gfc_check_dependency (cblock
->next
->expr1
,
4803 cblock
->next
->expr2
, 1)
4804 && !gfc_check_dependency (eblock
->next
->expr1
,
4805 eblock
->next
->expr2
, 1)
4806 && !gfc_check_dependency (cblock
->next
->expr1
,
4807 eblock
->next
->expr1
, 0)
4808 && !gfc_check_dependency (eblock
->next
->expr1
,
4809 cblock
->next
->expr1
, 0))
4810 return gfc_trans_where_3 (cblock
, eblock
);
4814 gfc_start_block (&block
);
4816 gfc_trans_where_2 (code
, NULL
, false, NULL
, &block
);
4818 return gfc_finish_block (&block
);
4822 /* CYCLE a DO loop. The label decl has already been created by
4823 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
4824 node at the head of the loop. We must mark the label as used. */
4827 gfc_trans_cycle (gfc_code
* code
)
4831 cycle_label
= code
->ext
.which_construct
->cycle_label
;
4832 gcc_assert (cycle_label
);
4834 TREE_USED (cycle_label
) = 1;
4835 return build1_v (GOTO_EXPR
, cycle_label
);
4839 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
4840 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
4844 gfc_trans_exit (gfc_code
* code
)
4848 exit_label
= code
->ext
.which_construct
->exit_label
;
4849 gcc_assert (exit_label
);
4851 TREE_USED (exit_label
) = 1;
4852 return build1_v (GOTO_EXPR
, exit_label
);
4856 /* Translate the ALLOCATE statement. */
4859 gfc_trans_allocate (gfc_code
* code
)
4881 tree memsize
= NULL_TREE
;
4882 tree classexpr
= NULL_TREE
;
4884 if (!code
->ext
.alloc
.list
)
4887 stat
= tmp
= memsz
= NULL_TREE
;
4888 label_errmsg
= label_finish
= errmsg
= errlen
= NULL_TREE
;
4890 gfc_init_block (&block
);
4891 gfc_init_block (&post
);
4893 /* STAT= (and maybe ERRMSG=) is present. */
4897 tree gfc_int4_type_node
= gfc_get_int_type (4);
4898 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
4900 /* ERRMSG= only makes sense with STAT=. */
4903 gfc_init_se (&se
, NULL
);
4904 se
.want_pointer
= 1;
4905 gfc_conv_expr_lhs (&se
, code
->expr2
);
4907 errlen
= se
.string_length
;
4911 errmsg
= null_pointer_node
;
4912 errlen
= build_int_cst (gfc_charlen_type_node
, 0);
4915 /* GOTO destinations. */
4916 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
4917 label_finish
= gfc_build_label_decl (NULL_TREE
);
4918 TREE_USED (label_finish
) = 0;
4924 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
4926 expr
= gfc_copy_expr (al
->expr
);
4928 if (expr
->ts
.type
== BT_CLASS
)
4929 gfc_add_data_component (expr
);
4931 gfc_init_se (&se
, NULL
);
4933 se
.want_pointer
= 1;
4934 se
.descriptor_only
= 1;
4935 gfc_conv_expr (&se
, expr
);
4937 /* Evaluate expr3 just once if not a variable. */
4938 if (al
== code
->ext
.alloc
.list
4939 && al
->expr
->ts
.type
== BT_CLASS
4941 && code
->expr3
->ts
.type
== BT_CLASS
4942 && code
->expr3
->expr_type
!= EXPR_VARIABLE
)
4944 gfc_init_se (&se_sz
, NULL
);
4945 gfc_conv_expr_reference (&se_sz
, code
->expr3
);
4946 gfc_conv_class_to_class (&se_sz
, code
->expr3
,
4947 code
->expr3
->ts
, false, true, false, false);
4948 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
4949 gfc_add_block_to_block (&se
.post
, &se_sz
.post
);
4950 classexpr
= build_fold_indirect_ref_loc (input_location
,
4952 classexpr
= gfc_evaluate_now (classexpr
, &se
.pre
);
4953 memsize
= gfc_vtable_size_get (classexpr
);
4954 memsize
= fold_convert (sizetype
, memsize
);
4958 class_expr
= classexpr
;
4961 if (!gfc_array_allocate (&se
, expr
, stat
, errmsg
, errlen
, label_finish
,
4962 memsz
, &nelems
, code
->expr3
, &code
->ext
.alloc
.ts
))
4964 bool unlimited_char
;
4966 unlimited_char
= UNLIMITED_POLY (al
->expr
)
4967 && ((code
->expr3
&& code
->expr3
->ts
.type
== BT_CHARACTER
)
4968 || (code
->ext
.alloc
.ts
.type
== BT_CHARACTER
4969 && code
->ext
.alloc
.ts
.u
.cl
4970 && code
->ext
.alloc
.ts
.u
.cl
->length
));
4972 /* A scalar or derived type. */
4974 /* Determine allocate size. */
4975 if (al
->expr
->ts
.type
== BT_CLASS
4978 && memsz
== NULL_TREE
)
4980 if (code
->expr3
->ts
.type
== BT_CLASS
)
4982 sz
= gfc_copy_expr (code
->expr3
);
4983 gfc_add_vptr_component (sz
);
4984 gfc_add_size_component (sz
);
4985 gfc_init_se (&se_sz
, NULL
);
4986 gfc_conv_expr (&se_sz
, sz
);
4991 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->expr3
->ts
));
4993 else if (((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
4994 || unlimited_char
) && code
->expr3
)
4996 if (!code
->expr3
->ts
.u
.cl
->backend_decl
)
4998 /* Convert and use the length expression. */
4999 gfc_init_se (&se_sz
, NULL
);
5000 if (code
->expr3
->expr_type
== EXPR_VARIABLE
5001 || code
->expr3
->expr_type
== EXPR_CONSTANT
)
5003 gfc_conv_expr (&se_sz
, code
->expr3
);
5004 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5006 = gfc_evaluate_now (se_sz
.string_length
, &se
.pre
);
5007 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5008 memsz
= se_sz
.string_length
;
5010 else if (code
->expr3
->mold
5011 && code
->expr3
->ts
.u
.cl
5012 && code
->expr3
->ts
.u
.cl
->length
)
5014 gfc_conv_expr (&se_sz
, code
->expr3
->ts
.u
.cl
->length
);
5015 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5016 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5017 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5022 /* This is would be inefficient and possibly could
5023 generate wrong code if the result were not stored
5025 if (slen3
== NULL_TREE
)
5027 gfc_conv_expr (&se_sz
, code
->expr3
);
5028 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5029 expr3
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5030 gfc_add_block_to_block (&post
, &se_sz
.post
);
5031 slen3
= gfc_evaluate_now (se_sz
.string_length
,
5038 /* Otherwise use the stored string length. */
5039 memsz
= code
->expr3
->ts
.u
.cl
->backend_decl
;
5040 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5042 /* Store the string length. */
5043 if (tmp
&& TREE_CODE (tmp
) == VAR_DECL
)
5044 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5046 else if (al
->expr
->ts
.type
== BT_CHARACTER
5047 && al
->expr
->ts
.deferred
&& se
.string_length
)
5048 gfc_add_modify (&se
.pre
, se
.string_length
,
5049 fold_convert (TREE_TYPE (se
.string_length
),
5052 /* Convert to size in bytes, using the character KIND. */
5054 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->expr3
->ts
));
5056 tmp
= TREE_TYPE (gfc_typenode_for_spec (&al
->expr
->ts
));
5057 tmp
= TYPE_SIZE_UNIT (tmp
);
5058 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5059 TREE_TYPE (tmp
), tmp
,
5060 fold_convert (TREE_TYPE (tmp
), memsz
));
5062 else if ((al
->expr
->ts
.type
== BT_CHARACTER
&& al
->expr
->ts
.deferred
)
5065 gcc_assert (code
->ext
.alloc
.ts
.u
.cl
&& code
->ext
.alloc
.ts
.u
.cl
->length
);
5066 gfc_init_se (&se_sz
, NULL
);
5067 gfc_conv_expr (&se_sz
, code
->ext
.alloc
.ts
.u
.cl
->length
);
5068 gfc_add_block_to_block (&se
.pre
, &se_sz
.pre
);
5069 se_sz
.expr
= gfc_evaluate_now (se_sz
.expr
, &se
.pre
);
5070 gfc_add_block_to_block (&se
.pre
, &se_sz
.post
);
5071 /* Store the string length. */
5072 tmp
= al
->expr
->ts
.u
.cl
->backend_decl
;
5073 gfc_add_modify (&se
.pre
, tmp
, fold_convert (TREE_TYPE (tmp
),
5075 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5076 tmp
= TYPE_SIZE_UNIT (tmp
);
5077 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5078 TREE_TYPE (tmp
), tmp
,
5079 fold_convert (TREE_TYPE (se_sz
.expr
),
5082 else if (code
->ext
.alloc
.ts
.type
!= BT_UNKNOWN
)
5083 memsz
= TYPE_SIZE_UNIT (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5084 else if (memsz
== NULL_TREE
)
5085 memsz
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se
.expr
)));
5087 if (expr
->ts
.type
== BT_CHARACTER
&& memsz
== NULL_TREE
)
5089 memsz
= se
.string_length
;
5091 /* Convert to size in bytes, using the character KIND. */
5092 tmp
= TREE_TYPE (gfc_typenode_for_spec (&code
->ext
.alloc
.ts
));
5093 tmp
= TYPE_SIZE_UNIT (tmp
);
5094 memsz
= fold_build2_loc (input_location
, MULT_EXPR
,
5095 TREE_TYPE (tmp
), tmp
,
5096 fold_convert (TREE_TYPE (tmp
), memsz
));
5099 /* Allocate - for non-pointers with re-alloc checking. */
5100 if (gfc_expr_attr (expr
).allocatable
)
5101 gfc_allocate_allocatable (&se
.pre
, se
.expr
, memsz
, NULL_TREE
,
5102 stat
, errmsg
, errlen
, label_finish
, expr
);
5104 gfc_allocate_using_malloc (&se
.pre
, se
.expr
, memsz
, stat
);
5106 if (al
->expr
->ts
.type
== BT_DERIVED
5107 && expr
->ts
.u
.derived
->attr
.alloc_comp
)
5109 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5110 tmp
= gfc_nullify_alloc_comp (expr
->ts
.u
.derived
, tmp
, 0);
5111 gfc_add_expr_to_block (&se
.pre
, tmp
);
5115 gfc_add_block_to_block (&block
, &se
.pre
);
5117 /* Error checking -- Note: ERRMSG only makes sense with STAT. */
5120 tmp
= build1_v (GOTO_EXPR
, label_errmsg
);
5121 parm
= fold_build2_loc (input_location
, NE_EXPR
,
5122 boolean_type_node
, stat
,
5123 build_int_cst (TREE_TYPE (stat
), 0));
5124 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5125 gfc_unlikely (parm
, PRED_FORTRAN_FAIL_ALLOC
),
5126 tmp
, build_empty_stmt (input_location
));
5127 gfc_add_expr_to_block (&block
, tmp
);
5130 /* We need the vptr of CLASS objects to be initialized. */
5131 e
= gfc_copy_expr (al
->expr
);
5132 if (e
->ts
.type
== BT_CLASS
)
5134 gfc_expr
*lhs
, *rhs
;
5136 gfc_ref
*ref
, *class_ref
, *tail
;
5138 /* Find the last class reference. */
5140 for (ref
= e
->ref
; ref
; ref
= ref
->next
)
5142 if (ref
->type
== REF_COMPONENT
5143 && ref
->u
.c
.component
->ts
.type
== BT_CLASS
)
5146 if (ref
->next
== NULL
)
5150 /* Remove and store all subsequent references after the
5154 tail
= class_ref
->next
;
5155 class_ref
->next
= NULL
;
5163 lhs
= gfc_expr_to_initialize (e
);
5164 gfc_add_vptr_component (lhs
);
5166 /* Remove the _vptr component and restore the original tail
5170 gfc_free_ref_list (class_ref
->next
);
5171 class_ref
->next
= tail
;
5175 gfc_free_ref_list (e
->ref
);
5179 if (class_expr
!= NULL_TREE
)
5181 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5182 gfc_init_se (&lse
, NULL
);
5183 lse
.want_pointer
= 1;
5184 gfc_conv_expr (&lse
, lhs
);
5185 tmp
= gfc_class_vptr_get (class_expr
);
5186 gfc_add_modify (&block
, lse
.expr
,
5187 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5189 else if (code
->expr3
&& code
->expr3
->ts
.type
== BT_CLASS
)
5191 /* Polymorphic SOURCE: VPTR must be determined at run time. */
5192 rhs
= gfc_copy_expr (code
->expr3
);
5193 gfc_add_vptr_component (rhs
);
5194 tmp
= gfc_trans_pointer_assignment (lhs
, rhs
);
5195 gfc_add_expr_to_block (&block
, tmp
);
5196 gfc_free_expr (rhs
);
5197 rhs
= gfc_expr_to_initialize (e
);
5201 /* VPTR is fixed at compile time. */
5205 ts
= &code
->expr3
->ts
;
5206 else if (e
->ts
.type
== BT_DERIVED
)
5208 else if (code
->ext
.alloc
.ts
.type
== BT_DERIVED
|| UNLIMITED_POLY (al
->expr
))
5209 ts
= &code
->ext
.alloc
.ts
;
5210 else if (e
->ts
.type
== BT_CLASS
)
5211 ts
= &CLASS_DATA (e
)->ts
;
5215 if (ts
->type
== BT_DERIVED
|| UNLIMITED_POLY (e
))
5217 vtab
= gfc_find_vtab (ts
);
5219 gfc_init_se (&lse
, NULL
);
5220 lse
.want_pointer
= 1;
5221 gfc_conv_expr (&lse
, lhs
);
5222 tmp
= gfc_build_addr_expr (NULL_TREE
,
5223 gfc_get_symbol_decl (vtab
));
5224 gfc_add_modify (&block
, lse
.expr
,
5225 fold_convert (TREE_TYPE (lse
.expr
), tmp
));
5228 gfc_free_expr (lhs
);
5233 if (code
->expr3
&& !code
->expr3
->mold
)
5235 /* Initialization via SOURCE block
5236 (or static default initializer). */
5237 gfc_expr
*rhs
= gfc_copy_expr (code
->expr3
);
5238 if (class_expr
!= NULL_TREE
)
5241 to
= TREE_OPERAND (se
.expr
, 0);
5243 tmp
= gfc_copy_class_to_class (class_expr
, to
, nelems
);
5245 else if (al
->expr
->ts
.type
== BT_CLASS
)
5247 gfc_actual_arglist
*actual
;
5250 gfc_ref
*ref
, *dataref
;
5252 /* Do a polymorphic deep copy. */
5253 actual
= gfc_get_actual_arglist ();
5254 actual
->expr
= gfc_copy_expr (rhs
);
5255 if (rhs
->ts
.type
== BT_CLASS
)
5256 gfc_add_data_component (actual
->expr
);
5257 actual
->next
= gfc_get_actual_arglist ();
5258 actual
->next
->expr
= gfc_copy_expr (al
->expr
);
5259 actual
->next
->expr
->ts
.type
= BT_CLASS
;
5260 gfc_add_data_component (actual
->next
->expr
);
5263 /* Make sure we go up through the reference chain to
5264 the _data reference, where the arrayspec is found. */
5265 for (ref
= actual
->next
->expr
->ref
; ref
; ref
= ref
->next
)
5266 if (ref
->type
== REF_COMPONENT
5267 && strcmp (ref
->u
.c
.component
->name
, "_data") == 0)
5270 if (dataref
&& dataref
->u
.c
.component
->as
)
5274 gfc_ref
*ref
= dataref
->next
;
5275 ref
->u
.ar
.type
= AR_SECTION
;
5276 /* We have to set up the array reference to give ranges
5277 in all dimensions and ensure that the end and stride
5278 are set so that the copy can be scalarized. */
5280 for (; dim
< dataref
->u
.c
.component
->as
->rank
; dim
++)
5282 ref
->u
.ar
.dimen_type
[dim
] = DIMEN_RANGE
;
5283 if (ref
->u
.ar
.end
[dim
] == NULL
)
5285 ref
->u
.ar
.end
[dim
] = ref
->u
.ar
.start
[dim
];
5286 temp
= gfc_get_int_expr (gfc_default_integer_kind
,
5287 &al
->expr
->where
, 1);
5288 ref
->u
.ar
.start
[dim
] = temp
;
5290 temp
= gfc_subtract (gfc_copy_expr (ref
->u
.ar
.end
[dim
]),
5291 gfc_copy_expr (ref
->u
.ar
.start
[dim
]));
5292 temp
= gfc_add (gfc_get_int_expr (gfc_default_integer_kind
,
5293 &al
->expr
->where
, 1),
5297 if (rhs
->ts
.type
== BT_CLASS
)
5299 ppc
= gfc_copy_expr (rhs
);
5300 gfc_add_vptr_component (ppc
);
5303 ppc
= gfc_lval_expr_from_sym (gfc_find_vtab (&rhs
->ts
));
5304 gfc_add_component_ref (ppc
, "_copy");
5306 ppc_code
= gfc_get_code (EXEC_CALL
);
5307 ppc_code
->resolved_sym
= ppc
->symtree
->n
.sym
;
5308 /* Although '_copy' is set to be elemental in class.c, it is
5309 not staying that way. Find out why, sometime.... */
5310 ppc_code
->resolved_sym
->attr
.elemental
= 1;
5311 ppc_code
->ext
.actual
= actual
;
5312 ppc_code
->expr1
= ppc
;
5313 /* Since '_copy' is elemental, the scalarizer will take care
5314 of arrays in gfc_trans_call. */
5315 tmp
= gfc_trans_call (ppc_code
, true, NULL
, NULL
, false);
5316 gfc_free_statements (ppc_code
);
5318 else if (expr3
!= NULL_TREE
)
5320 tmp
= build_fold_indirect_ref_loc (input_location
, se
.expr
);
5321 gfc_trans_string_copy (&block
, slen3
, tmp
, code
->expr3
->ts
.kind
,
5322 slen3
, expr3
, code
->expr3
->ts
.kind
);
5327 /* Switch off automatic reallocation since we have just done
5329 int realloc_lhs
= gfc_option
.flag_realloc_lhs
;
5330 gfc_option
.flag_realloc_lhs
= 0;
5331 tmp
= gfc_trans_assignment (gfc_expr_to_initialize (expr
),
5333 gfc_option
.flag_realloc_lhs
= realloc_lhs
;
5335 gfc_free_expr (rhs
);
5336 gfc_add_expr_to_block (&block
, tmp
);
5338 else if (code
->expr3
&& code
->expr3
->mold
5339 && code
->expr3
->ts
.type
== BT_CLASS
)
5341 /* Since the _vptr has already been assigned to the allocate
5342 object, we can use gfc_copy_class_to_class in its
5343 initialization mode. */
5344 tmp
= TREE_OPERAND (se
.expr
, 0);
5345 tmp
= gfc_copy_class_to_class (NULL_TREE
, tmp
, nelems
);
5346 gfc_add_expr_to_block (&block
, tmp
);
5349 gfc_free_expr (expr
);
5355 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5356 gfc_add_expr_to_block (&block
, tmp
);
5359 /* ERRMSG - only useful if STAT is present. */
5360 if (code
->expr1
&& code
->expr2
)
5362 const char *msg
= "Attempt to allocate an allocated object";
5363 tree slen
, dlen
, errmsg_str
;
5364 stmtblock_t errmsg_block
;
5366 gfc_init_block (&errmsg_block
);
5368 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5369 gfc_add_modify (&errmsg_block
, errmsg_str
,
5370 gfc_build_addr_expr (pchar_type_node
,
5371 gfc_build_localized_cstring_const (msg
)));
5373 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5374 dlen
= gfc_get_expr_charlen (code
->expr2
);
5375 slen
= fold_build2_loc (input_location
, MIN_EXPR
, TREE_TYPE (slen
), dlen
,
5378 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5379 slen
, errmsg_str
, gfc_default_character_kind
);
5380 dlen
= gfc_finish_block (&errmsg_block
);
5382 tmp
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5383 build_int_cst (TREE_TYPE (stat
), 0));
5385 tmp
= build3_v (COND_EXPR
, tmp
, dlen
, build_empty_stmt (input_location
));
5387 gfc_add_expr_to_block (&block
, tmp
);
5393 if (TREE_USED (label_finish
))
5395 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5396 gfc_add_expr_to_block (&block
, tmp
);
5399 gfc_init_se (&se
, NULL
);
5400 gfc_conv_expr_lhs (&se
, code
->expr1
);
5401 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5402 gfc_add_modify (&block
, se
.expr
, tmp
);
5405 gfc_add_block_to_block (&block
, &se
.post
);
5406 gfc_add_block_to_block (&block
, &post
);
5408 return gfc_finish_block (&block
);
5412 /* Translate a DEALLOCATE statement. */
5415 gfc_trans_deallocate (gfc_code
*code
)
5419 tree apstat
, pstat
, stat
, errmsg
, errlen
, tmp
;
5420 tree label_finish
, label_errmsg
;
5423 pstat
= apstat
= stat
= errmsg
= errlen
= tmp
= NULL_TREE
;
5424 label_finish
= label_errmsg
= NULL_TREE
;
5426 gfc_start_block (&block
);
5428 /* Count the number of failed deallocations. If deallocate() was
5429 called with STAT= , then set STAT to the count. If deallocate
5430 was called with ERRMSG, then set ERRMG to a string. */
5433 tree gfc_int4_type_node
= gfc_get_int_type (4);
5435 stat
= gfc_create_var (gfc_int4_type_node
, "stat");
5436 pstat
= gfc_build_addr_expr (NULL_TREE
, stat
);
5438 /* GOTO destinations. */
5439 label_errmsg
= gfc_build_label_decl (NULL_TREE
);
5440 label_finish
= gfc_build_label_decl (NULL_TREE
);
5441 TREE_USED (label_finish
) = 0;
5444 /* Set ERRMSG - only needed if STAT is available. */
5445 if (code
->expr1
&& code
->expr2
)
5447 gfc_init_se (&se
, NULL
);
5448 se
.want_pointer
= 1;
5449 gfc_conv_expr_lhs (&se
, code
->expr2
);
5451 errlen
= se
.string_length
;
5454 for (al
= code
->ext
.alloc
.list
; al
!= NULL
; al
= al
->next
)
5456 gfc_expr
*expr
= gfc_copy_expr (al
->expr
);
5457 gcc_assert (expr
->expr_type
== EXPR_VARIABLE
);
5459 if (expr
->ts
.type
== BT_CLASS
)
5460 gfc_add_data_component (expr
);
5462 gfc_init_se (&se
, NULL
);
5463 gfc_start_block (&se
.pre
);
5465 se
.want_pointer
= 1;
5466 se
.descriptor_only
= 1;
5467 gfc_conv_expr (&se
, expr
);
5469 if (expr
->rank
|| gfc_is_coarray (expr
))
5471 if (expr
->ts
.type
== BT_DERIVED
&& expr
->ts
.u
.derived
->attr
.alloc_comp
5472 && !gfc_is_finalizable (expr
->ts
.u
.derived
, NULL
))
5475 gfc_ref
*last
= NULL
;
5476 for (ref
= expr
->ref
; ref
; ref
= ref
->next
)
5477 if (ref
->type
== REF_COMPONENT
)
5480 /* Do not deallocate the components of a derived type
5481 ultimate pointer component. */
5482 if (!(last
&& last
->u
.c
.component
->attr
.pointer
)
5483 && !(!last
&& expr
->symtree
->n
.sym
->attr
.pointer
))
5485 tmp
= gfc_deallocate_alloc_comp (expr
->ts
.u
.derived
, se
.expr
,
5487 gfc_add_expr_to_block (&se
.pre
, tmp
);
5490 tmp
= gfc_array_deallocate (se
.expr
, pstat
, errmsg
, errlen
,
5491 label_finish
, expr
);
5492 gfc_add_expr_to_block (&se
.pre
, tmp
);
5493 if (al
->expr
->ts
.type
== BT_CLASS
)
5494 gfc_reset_vptr (&se
.pre
, al
->expr
);
5498 tmp
= gfc_deallocate_scalar_with_status (se
.expr
, pstat
, false,
5499 al
->expr
, al
->expr
->ts
);
5500 gfc_add_expr_to_block (&se
.pre
, tmp
);
5502 /* Set to zero after deallocation. */
5503 tmp
= fold_build2_loc (input_location
, MODIFY_EXPR
, void_type_node
,
5505 build_int_cst (TREE_TYPE (se
.expr
), 0));
5506 gfc_add_expr_to_block (&se
.pre
, tmp
);
5508 if (al
->expr
->ts
.type
== BT_CLASS
)
5509 gfc_reset_vptr (&se
.pre
, al
->expr
);
5516 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5517 build_int_cst (TREE_TYPE (stat
), 0));
5518 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5519 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
),
5520 build1_v (GOTO_EXPR
, label_errmsg
),
5521 build_empty_stmt (input_location
));
5522 gfc_add_expr_to_block (&se
.pre
, tmp
);
5525 tmp
= gfc_finish_block (&se
.pre
);
5526 gfc_add_expr_to_block (&block
, tmp
);
5527 gfc_free_expr (expr
);
5532 tmp
= build1_v (LABEL_EXPR
, label_errmsg
);
5533 gfc_add_expr_to_block (&block
, tmp
);
5536 /* Set ERRMSG - only needed if STAT is available. */
5537 if (code
->expr1
&& code
->expr2
)
5539 const char *msg
= "Attempt to deallocate an unallocated object";
5540 stmtblock_t errmsg_block
;
5541 tree errmsg_str
, slen
, dlen
, cond
;
5543 gfc_init_block (&errmsg_block
);
5545 errmsg_str
= gfc_create_var (pchar_type_node
, "ERRMSG");
5546 gfc_add_modify (&errmsg_block
, errmsg_str
,
5547 gfc_build_addr_expr (pchar_type_node
,
5548 gfc_build_localized_cstring_const (msg
)));
5549 slen
= build_int_cst (gfc_charlen_type_node
, ((int) strlen (msg
)));
5550 dlen
= gfc_get_expr_charlen (code
->expr2
);
5552 gfc_trans_string_copy (&errmsg_block
, dlen
, errmsg
, code
->expr2
->ts
.kind
,
5553 slen
, errmsg_str
, gfc_default_character_kind
);
5554 tmp
= gfc_finish_block (&errmsg_block
);
5556 cond
= fold_build2_loc (input_location
, NE_EXPR
, boolean_type_node
, stat
,
5557 build_int_cst (TREE_TYPE (stat
), 0));
5558 tmp
= fold_build3_loc (input_location
, COND_EXPR
, void_type_node
,
5559 gfc_unlikely (cond
, PRED_FORTRAN_FAIL_ALLOC
), tmp
,
5560 build_empty_stmt (input_location
));
5562 gfc_add_expr_to_block (&block
, tmp
);
5565 if (code
->expr1
&& TREE_USED (label_finish
))
5567 tmp
= build1_v (LABEL_EXPR
, label_finish
);
5568 gfc_add_expr_to_block (&block
, tmp
);
5574 gfc_init_se (&se
, NULL
);
5575 gfc_conv_expr_lhs (&se
, code
->expr1
);
5576 tmp
= convert (TREE_TYPE (se
.expr
), stat
);
5577 gfc_add_modify (&block
, se
.expr
, tmp
);
5580 return gfc_finish_block (&block
);
5583 #include "gt-fortran-trans-stmt.h"