1 /* Gimple IR support functions.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "value-prof.h"
37 #include "langhooks.h"
41 /* All the tuples have their operand vector (if present) at the very bottom
42 of the structure. Therefore, the offset required to find the
43 operands vector the size of the structure minus the size of the 1
44 element tree array at the end (see gimple_ops). */
45 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
46 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
47 EXPORTED_CONST
size_t gimple_ops_offset_
[] = {
48 #include "gsstruct.def"
52 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
53 static const size_t gsstruct_code_size
[] = {
54 #include "gsstruct.def"
58 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
59 const char *const gimple_code_name
[] = {
64 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
65 EXPORTED_CONST
enum gimple_statement_structure_enum gss_for_code_
[] = {
72 int gimple_alloc_counts
[(int) gimple_alloc_kind_all
];
73 int gimple_alloc_sizes
[(int) gimple_alloc_kind_all
];
75 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
76 static const char * const gimple_alloc_kind_names
[] = {
83 /* Private API manipulation functions shared only with some
85 extern void gimple_set_stored_syms (gimple
, bitmap
, bitmap_obstack
*);
86 extern void gimple_set_loaded_syms (gimple
, bitmap
, bitmap_obstack
*);
88 /* Gimple tuple constructors.
89 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
90 be passed a NULL to start with an empty sequence. */
92 /* Set the code for statement G to CODE. */
95 gimple_set_code (gimple g
, enum gimple_code code
)
97 g
->gsbase
.code
= code
;
100 /* Return the number of bytes needed to hold a GIMPLE statement with
104 gimple_size (enum gimple_code code
)
106 return gsstruct_code_size
[gss_for_code (code
)];
109 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
113 gimple_alloc_stat (enum gimple_code code
, unsigned num_ops MEM_STAT_DECL
)
118 size
= gimple_size (code
);
120 size
+= sizeof (tree
) * (num_ops
- 1);
122 if (GATHER_STATISTICS
)
124 enum gimple_alloc_kind kind
= gimple_alloc_kind (code
);
125 gimple_alloc_counts
[(int) kind
]++;
126 gimple_alloc_sizes
[(int) kind
] += size
;
129 stmt
= ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT
);
130 gimple_set_code (stmt
, code
);
131 gimple_set_num_ops (stmt
, num_ops
);
133 /* Do not call gimple_set_modified here as it has other side
134 effects and this tuple is still not completely built. */
135 stmt
->gsbase
.modified
= 1;
136 gimple_init_singleton (stmt
);
141 /* Set SUBCODE to be the code of the expression computed by statement G. */
144 gimple_set_subcode (gimple g
, unsigned subcode
)
146 /* We only have 16 bits for the RHS code. Assert that we are not
148 gcc_assert (subcode
< (1 << 16));
149 g
->gsbase
.subcode
= subcode
;
154 /* Build a tuple with operands. CODE is the statement to build (which
155 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
156 for the new tuple. NUM_OPS is the number of operands to allocate. */
158 #define gimple_build_with_ops(c, s, n) \
159 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
162 gimple_build_with_ops_stat (enum gimple_code code
, unsigned subcode
,
163 unsigned num_ops MEM_STAT_DECL
)
165 gimple s
= gimple_alloc_stat (code
, num_ops PASS_MEM_STAT
);
166 gimple_set_subcode (s
, subcode
);
172 /* Build a GIMPLE_RETURN statement returning RETVAL. */
175 gimple_build_return (tree retval
)
177 gimple s
= gimple_build_with_ops (GIMPLE_RETURN
, ERROR_MARK
, 1);
179 gimple_return_set_retval (s
, retval
);
183 /* Reset alias information on call S. */
186 gimple_call_reset_alias_info (gimple s
)
188 if (gimple_call_flags (s
) & ECF_CONST
)
189 memset (gimple_call_use_set (s
), 0, sizeof (struct pt_solution
));
191 pt_solution_reset (gimple_call_use_set (s
));
192 if (gimple_call_flags (s
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
193 memset (gimple_call_clobber_set (s
), 0, sizeof (struct pt_solution
));
195 pt_solution_reset (gimple_call_clobber_set (s
));
198 /* Helper for gimple_build_call, gimple_build_call_valist,
199 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
200 components of a GIMPLE_CALL statement to function FN with NARGS
204 gimple_build_call_1 (tree fn
, unsigned nargs
)
206 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
207 if (TREE_CODE (fn
) == FUNCTION_DECL
)
208 fn
= build_fold_addr_expr (fn
);
209 gimple_set_op (s
, 1, fn
);
210 gimple_call_set_fntype (s
, TREE_TYPE (TREE_TYPE (fn
)));
211 gimple_call_reset_alias_info (s
);
216 /* Build a GIMPLE_CALL statement to function FN with the arguments
217 specified in vector ARGS. */
220 gimple_build_call_vec (tree fn
, vec
<tree
> args
)
223 unsigned nargs
= args
.length ();
224 gimple call
= gimple_build_call_1 (fn
, nargs
);
226 for (i
= 0; i
< nargs
; i
++)
227 gimple_call_set_arg (call
, i
, args
[i
]);
233 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
234 arguments. The ... are the arguments. */
237 gimple_build_call (tree fn
, unsigned nargs
, ...)
243 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
245 call
= gimple_build_call_1 (fn
, nargs
);
247 va_start (ap
, nargs
);
248 for (i
= 0; i
< nargs
; i
++)
249 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
256 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
257 arguments. AP contains the arguments. */
260 gimple_build_call_valist (tree fn
, unsigned nargs
, va_list ap
)
265 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
267 call
= gimple_build_call_1 (fn
, nargs
);
269 for (i
= 0; i
< nargs
; i
++)
270 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
276 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
277 Build the basic components of a GIMPLE_CALL statement to internal
278 function FN with NARGS arguments. */
281 gimple_build_call_internal_1 (enum internal_fn fn
, unsigned nargs
)
283 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
284 s
->gsbase
.subcode
|= GF_CALL_INTERNAL
;
285 gimple_call_set_internal_fn (s
, fn
);
286 gimple_call_reset_alias_info (s
);
291 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
292 the number of arguments. The ... are the arguments. */
295 gimple_build_call_internal (enum internal_fn fn
, unsigned nargs
, ...)
301 call
= gimple_build_call_internal_1 (fn
, nargs
);
302 va_start (ap
, nargs
);
303 for (i
= 0; i
< nargs
; i
++)
304 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
311 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
312 specified in vector ARGS. */
315 gimple_build_call_internal_vec (enum internal_fn fn
, vec
<tree
> args
)
320 nargs
= args
.length ();
321 call
= gimple_build_call_internal_1 (fn
, nargs
);
322 for (i
= 0; i
< nargs
; i
++)
323 gimple_call_set_arg (call
, i
, args
[i
]);
329 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
330 assumed to be in GIMPLE form already. Minimal checking is done of
334 gimple_build_call_from_tree (tree t
)
338 tree fndecl
= get_callee_fndecl (t
);
340 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
342 nargs
= call_expr_nargs (t
);
343 call
= gimple_build_call_1 (fndecl
? fndecl
: CALL_EXPR_FN (t
), nargs
);
345 for (i
= 0; i
< nargs
; i
++)
346 gimple_call_set_arg (call
, i
, CALL_EXPR_ARG (t
, i
));
348 gimple_set_block (call
, TREE_BLOCK (t
));
350 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
351 gimple_call_set_chain (call
, CALL_EXPR_STATIC_CHAIN (t
));
352 gimple_call_set_tail (call
, CALL_EXPR_TAILCALL (t
));
353 gimple_call_set_return_slot_opt (call
, CALL_EXPR_RETURN_SLOT_OPT (t
));
355 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
356 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
357 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
358 gimple_call_set_alloca_for_var (call
, CALL_ALLOCA_FOR_VAR_P (t
));
360 gimple_call_set_from_thunk (call
, CALL_FROM_THUNK_P (t
));
361 gimple_call_set_va_arg_pack (call
, CALL_EXPR_VA_ARG_PACK (t
));
362 gimple_call_set_nothrow (call
, TREE_NOTHROW (t
));
363 gimple_set_no_warning (call
, TREE_NO_WARNING (t
));
369 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
370 *OP1_P, *OP2_P and *OP3_P respectively. */
373 extract_ops_from_tree_1 (tree expr
, enum tree_code
*subcode_p
, tree
*op1_p
,
374 tree
*op2_p
, tree
*op3_p
)
376 enum gimple_rhs_class grhs_class
;
378 *subcode_p
= TREE_CODE (expr
);
379 grhs_class
= get_gimple_rhs_class (*subcode_p
);
381 if (grhs_class
== GIMPLE_TERNARY_RHS
)
383 *op1_p
= TREE_OPERAND (expr
, 0);
384 *op2_p
= TREE_OPERAND (expr
, 1);
385 *op3_p
= TREE_OPERAND (expr
, 2);
387 else if (grhs_class
== GIMPLE_BINARY_RHS
)
389 *op1_p
= TREE_OPERAND (expr
, 0);
390 *op2_p
= TREE_OPERAND (expr
, 1);
393 else if (grhs_class
== GIMPLE_UNARY_RHS
)
395 *op1_p
= TREE_OPERAND (expr
, 0);
399 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
410 /* Build a GIMPLE_ASSIGN statement.
412 LHS of the assignment.
413 RHS of the assignment which can be unary or binary. */
416 gimple_build_assign_stat (tree lhs
, tree rhs MEM_STAT_DECL
)
418 enum tree_code subcode
;
421 extract_ops_from_tree_1 (rhs
, &subcode
, &op1
, &op2
, &op3
);
422 return gimple_build_assign_with_ops (subcode
, lhs
, op1
, op2
, op3
427 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
428 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
429 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
432 gimple_build_assign_with_ops (enum tree_code subcode
, tree lhs
, tree op1
,
433 tree op2
, tree op3 MEM_STAT_DECL
)
438 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
440 num_ops
= get_gimple_rhs_num_ops (subcode
) + 1;
442 p
= gimple_build_with_ops_stat (GIMPLE_ASSIGN
, (unsigned)subcode
, num_ops
444 gimple_assign_set_lhs (p
, lhs
);
445 gimple_assign_set_rhs1 (p
, op1
);
448 gcc_assert (num_ops
> 2);
449 gimple_assign_set_rhs2 (p
, op2
);
454 gcc_assert (num_ops
> 3);
455 gimple_assign_set_rhs3 (p
, op3
);
462 gimple_build_assign_with_ops (enum tree_code subcode
, tree lhs
, tree op1
,
463 tree op2 MEM_STAT_DECL
)
465 return gimple_build_assign_with_ops (subcode
, lhs
, op1
, op2
, NULL_TREE
470 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
472 DST/SRC are the destination and source respectively. You can pass
473 ungimplified trees in DST or SRC, in which case they will be
474 converted to a gimple operand if necessary.
476 This function returns the newly created GIMPLE_ASSIGN tuple. */
479 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
481 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
482 gimplify_and_add (t
, seq_p
);
484 return gimple_seq_last_stmt (*seq_p
);
488 /* Build a GIMPLE_COND statement.
490 PRED is the condition used to compare LHS and the RHS.
491 T_LABEL is the label to jump to if the condition is true.
492 F_LABEL is the label to jump to otherwise. */
495 gimple_build_cond (enum tree_code pred_code
, tree lhs
, tree rhs
,
496 tree t_label
, tree f_label
)
500 gcc_assert (TREE_CODE_CLASS (pred_code
) == tcc_comparison
);
501 p
= gimple_build_with_ops (GIMPLE_COND
, pred_code
, 4);
502 gimple_cond_set_lhs (p
, lhs
);
503 gimple_cond_set_rhs (p
, rhs
);
504 gimple_cond_set_true_label (p
, t_label
);
505 gimple_cond_set_false_label (p
, f_label
);
510 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
513 gimple_cond_get_ops_from_tree (tree cond
, enum tree_code
*code_p
,
514 tree
*lhs_p
, tree
*rhs_p
)
516 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond
)) == tcc_comparison
517 || TREE_CODE (cond
) == TRUTH_NOT_EXPR
518 || is_gimple_min_invariant (cond
)
519 || SSA_VAR_P (cond
));
521 extract_ops_from_tree (cond
, code_p
, lhs_p
, rhs_p
);
523 /* Canonicalize conditionals of the form 'if (!VAL)'. */
524 if (*code_p
== TRUTH_NOT_EXPR
)
527 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
528 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
530 /* Canonicalize conditionals of the form 'if (VAL)' */
531 else if (TREE_CODE_CLASS (*code_p
) != tcc_comparison
)
534 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
535 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
540 /* Build a GIMPLE_COND statement from the conditional expression tree
541 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
544 gimple_build_cond_from_tree (tree cond
, tree t_label
, tree f_label
)
549 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
550 return gimple_build_cond (code
, lhs
, rhs
, t_label
, f_label
);
553 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
554 boolean expression tree COND. */
557 gimple_cond_set_condition_from_tree (gimple stmt
, tree cond
)
562 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
563 gimple_cond_set_condition (stmt
, code
, lhs
, rhs
);
566 /* Build a GIMPLE_LABEL statement for LABEL. */
569 gimple_build_label (tree label
)
571 gimple p
= gimple_build_with_ops (GIMPLE_LABEL
, ERROR_MARK
, 1);
572 gimple_label_set_label (p
, label
);
576 /* Build a GIMPLE_GOTO statement to label DEST. */
579 gimple_build_goto (tree dest
)
581 gimple p
= gimple_build_with_ops (GIMPLE_GOTO
, ERROR_MARK
, 1);
582 gimple_goto_set_dest (p
, dest
);
587 /* Build a GIMPLE_NOP statement. */
590 gimple_build_nop (void)
592 return gimple_alloc (GIMPLE_NOP
, 0);
596 /* Build a GIMPLE_BIND statement.
597 VARS are the variables in BODY.
598 BLOCK is the containing block. */
601 gimple_build_bind (tree vars
, gimple_seq body
, tree block
)
603 gimple p
= gimple_alloc (GIMPLE_BIND
, 0);
604 gimple_bind_set_vars (p
, vars
);
606 gimple_bind_set_body (p
, body
);
608 gimple_bind_set_block (p
, block
);
612 /* Helper function to set the simple fields of a asm stmt.
614 STRING is a pointer to a string that is the asm blocks assembly code.
615 NINPUT is the number of register inputs.
616 NOUTPUT is the number of register outputs.
617 NCLOBBERS is the number of clobbered registers.
621 gimple_build_asm_1 (const char *string
, unsigned ninputs
, unsigned noutputs
,
622 unsigned nclobbers
, unsigned nlabels
)
625 int size
= strlen (string
);
627 /* ASMs with labels cannot have outputs. This should have been
628 enforced by the front end. */
629 gcc_assert (nlabels
== 0 || noutputs
== 0);
631 p
= gimple_build_with_ops (GIMPLE_ASM
, ERROR_MARK
,
632 ninputs
+ noutputs
+ nclobbers
+ nlabels
);
634 p
->gimple_asm
.ni
= ninputs
;
635 p
->gimple_asm
.no
= noutputs
;
636 p
->gimple_asm
.nc
= nclobbers
;
637 p
->gimple_asm
.nl
= nlabels
;
638 p
->gimple_asm
.string
= ggc_alloc_string (string
, size
);
640 if (GATHER_STATISTICS
)
641 gimple_alloc_sizes
[(int) gimple_alloc_kind (GIMPLE_ASM
)] += size
;
646 /* Build a GIMPLE_ASM statement.
648 STRING is the assembly code.
649 NINPUT is the number of register inputs.
650 NOUTPUT is the number of register outputs.
651 NCLOBBERS is the number of clobbered registers.
652 INPUTS is a vector of the input register parameters.
653 OUTPUTS is a vector of the output register parameters.
654 CLOBBERS is a vector of the clobbered register parameters.
655 LABELS is a vector of destination labels. */
658 gimple_build_asm_vec (const char *string
, vec
<tree
, va_gc
> *inputs
,
659 vec
<tree
, va_gc
> *outputs
, vec
<tree
, va_gc
> *clobbers
,
660 vec
<tree
, va_gc
> *labels
)
665 p
= gimple_build_asm_1 (string
,
666 vec_safe_length (inputs
),
667 vec_safe_length (outputs
),
668 vec_safe_length (clobbers
),
669 vec_safe_length (labels
));
671 for (i
= 0; i
< vec_safe_length (inputs
); i
++)
672 gimple_asm_set_input_op (p
, i
, (*inputs
)[i
]);
674 for (i
= 0; i
< vec_safe_length (outputs
); i
++)
675 gimple_asm_set_output_op (p
, i
, (*outputs
)[i
]);
677 for (i
= 0; i
< vec_safe_length (clobbers
); i
++)
678 gimple_asm_set_clobber_op (p
, i
, (*clobbers
)[i
]);
680 for (i
= 0; i
< vec_safe_length (labels
); i
++)
681 gimple_asm_set_label_op (p
, i
, (*labels
)[i
]);
686 /* Build a GIMPLE_CATCH statement.
688 TYPES are the catch types.
689 HANDLER is the exception handler. */
692 gimple_build_catch (tree types
, gimple_seq handler
)
694 gimple p
= gimple_alloc (GIMPLE_CATCH
, 0);
695 gimple_catch_set_types (p
, types
);
697 gimple_catch_set_handler (p
, handler
);
702 /* Build a GIMPLE_EH_FILTER statement.
704 TYPES are the filter's types.
705 FAILURE is the filter's failure action. */
708 gimple_build_eh_filter (tree types
, gimple_seq failure
)
710 gimple p
= gimple_alloc (GIMPLE_EH_FILTER
, 0);
711 gimple_eh_filter_set_types (p
, types
);
713 gimple_eh_filter_set_failure (p
, failure
);
718 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
721 gimple_build_eh_must_not_throw (tree decl
)
723 gimple p
= gimple_alloc (GIMPLE_EH_MUST_NOT_THROW
, 0);
725 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
726 gcc_assert (flags_from_decl_or_type (decl
) & ECF_NORETURN
);
727 gimple_eh_must_not_throw_set_fndecl (p
, decl
);
732 /* Build a GIMPLE_EH_ELSE statement. */
735 gimple_build_eh_else (gimple_seq n_body
, gimple_seq e_body
)
737 gimple p
= gimple_alloc (GIMPLE_EH_ELSE
, 0);
738 gimple_eh_else_set_n_body (p
, n_body
);
739 gimple_eh_else_set_e_body (p
, e_body
);
743 /* Build a GIMPLE_TRY statement.
745 EVAL is the expression to evaluate.
746 CLEANUP is the cleanup expression.
747 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
748 whether this is a try/catch or a try/finally respectively. */
751 gimple_build_try (gimple_seq eval
, gimple_seq cleanup
,
752 enum gimple_try_flags kind
)
756 gcc_assert (kind
== GIMPLE_TRY_CATCH
|| kind
== GIMPLE_TRY_FINALLY
);
757 p
= gimple_alloc (GIMPLE_TRY
, 0);
758 gimple_set_subcode (p
, kind
);
760 gimple_try_set_eval (p
, eval
);
762 gimple_try_set_cleanup (p
, cleanup
);
767 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
769 CLEANUP is the cleanup expression. */
772 gimple_build_wce (gimple_seq cleanup
)
774 gimple p
= gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR
, 0);
776 gimple_wce_set_cleanup (p
, cleanup
);
782 /* Build a GIMPLE_RESX statement. */
785 gimple_build_resx (int region
)
787 gimple p
= gimple_build_with_ops (GIMPLE_RESX
, ERROR_MARK
, 0);
788 p
->gimple_eh_ctrl
.region
= region
;
793 /* The helper for constructing a gimple switch statement.
794 INDEX is the switch's index.
795 NLABELS is the number of labels in the switch excluding the default.
796 DEFAULT_LABEL is the default label for the switch statement. */
799 gimple_build_switch_nlabels (unsigned nlabels
, tree index
, tree default_label
)
801 /* nlabels + 1 default label + 1 index. */
802 gcc_checking_assert (default_label
);
803 gimple p
= gimple_build_with_ops (GIMPLE_SWITCH
, ERROR_MARK
,
805 gimple_switch_set_index (p
, index
);
806 gimple_switch_set_default_label (p
, default_label
);
810 /* Build a GIMPLE_SWITCH statement.
812 INDEX is the switch's index.
813 DEFAULT_LABEL is the default label
814 ARGS is a vector of labels excluding the default. */
817 gimple_build_switch (tree index
, tree default_label
, vec
<tree
> args
)
819 unsigned i
, nlabels
= args
.length ();
821 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
823 /* Copy the labels from the vector to the switch statement. */
824 for (i
= 0; i
< nlabels
; i
++)
825 gimple_switch_set_label (p
, i
+ 1, args
[i
]);
830 /* Build a GIMPLE_EH_DISPATCH statement. */
833 gimple_build_eh_dispatch (int region
)
835 gimple p
= gimple_build_with_ops (GIMPLE_EH_DISPATCH
, ERROR_MARK
, 0);
836 p
->gimple_eh_ctrl
.region
= region
;
840 /* Build a new GIMPLE_DEBUG_BIND statement.
842 VAR is bound to VALUE; block and location are taken from STMT. */
845 gimple_build_debug_bind_stat (tree var
, tree value
, gimple stmt MEM_STAT_DECL
)
847 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
848 (unsigned)GIMPLE_DEBUG_BIND
, 2
851 gimple_debug_bind_set_var (p
, var
);
852 gimple_debug_bind_set_value (p
, value
);
854 gimple_set_location (p
, gimple_location (stmt
));
860 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
862 VAR is bound to VALUE; block and location are taken from STMT. */
865 gimple_build_debug_source_bind_stat (tree var
, tree value
,
866 gimple stmt MEM_STAT_DECL
)
868 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
869 (unsigned)GIMPLE_DEBUG_SOURCE_BIND
, 2
872 gimple_debug_source_bind_set_var (p
, var
);
873 gimple_debug_source_bind_set_value (p
, value
);
875 gimple_set_location (p
, gimple_location (stmt
));
881 /* Build a GIMPLE_OMP_CRITICAL statement.
883 BODY is the sequence of statements for which only one thread can execute.
884 NAME is optional identifier for this critical block. */
887 gimple_build_omp_critical (gimple_seq body
, tree name
)
889 gimple p
= gimple_alloc (GIMPLE_OMP_CRITICAL
, 0);
890 gimple_omp_critical_set_name (p
, name
);
892 gimple_omp_set_body (p
, body
);
897 /* Build a GIMPLE_OMP_FOR statement.
899 BODY is sequence of statements inside the for loop.
900 KIND is the `for' variant.
901 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
902 lastprivate, reductions, ordered, schedule, and nowait.
903 COLLAPSE is the collapse count.
904 PRE_BODY is the sequence of statements that are loop invariant. */
907 gimple_build_omp_for (gimple_seq body
, int kind
, tree clauses
, size_t collapse
,
910 gimple p
= gimple_alloc (GIMPLE_OMP_FOR
, 0);
912 gimple_omp_set_body (p
, body
);
913 gimple_omp_for_set_clauses (p
, clauses
);
914 gimple_omp_for_set_kind (p
, kind
);
915 p
->gimple_omp_for
.collapse
= collapse
;
916 p
->gimple_omp_for
.iter
917 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse
);
919 gimple_omp_for_set_pre_body (p
, pre_body
);
925 /* Build a GIMPLE_OMP_PARALLEL statement.
927 BODY is sequence of statements which are executed in parallel.
928 CLAUSES, are the OMP parallel construct's clauses.
929 CHILD_FN is the function created for the parallel threads to execute.
930 DATA_ARG are the shared data argument(s). */
933 gimple_build_omp_parallel (gimple_seq body
, tree clauses
, tree child_fn
,
936 gimple p
= gimple_alloc (GIMPLE_OMP_PARALLEL
, 0);
938 gimple_omp_set_body (p
, body
);
939 gimple_omp_parallel_set_clauses (p
, clauses
);
940 gimple_omp_parallel_set_child_fn (p
, child_fn
);
941 gimple_omp_parallel_set_data_arg (p
, data_arg
);
947 /* Build a GIMPLE_OMP_TASK statement.
949 BODY is sequence of statements which are executed by the explicit task.
950 CLAUSES, are the OMP parallel construct's clauses.
951 CHILD_FN is the function created for the parallel threads to execute.
952 DATA_ARG are the shared data argument(s).
953 COPY_FN is the optional function for firstprivate initialization.
954 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
957 gimple_build_omp_task (gimple_seq body
, tree clauses
, tree child_fn
,
958 tree data_arg
, tree copy_fn
, tree arg_size
,
961 gimple p
= gimple_alloc (GIMPLE_OMP_TASK
, 0);
963 gimple_omp_set_body (p
, body
);
964 gimple_omp_task_set_clauses (p
, clauses
);
965 gimple_omp_task_set_child_fn (p
, child_fn
);
966 gimple_omp_task_set_data_arg (p
, data_arg
);
967 gimple_omp_task_set_copy_fn (p
, copy_fn
);
968 gimple_omp_task_set_arg_size (p
, arg_size
);
969 gimple_omp_task_set_arg_align (p
, arg_align
);
975 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
977 BODY is the sequence of statements in the section. */
980 gimple_build_omp_section (gimple_seq body
)
982 gimple p
= gimple_alloc (GIMPLE_OMP_SECTION
, 0);
984 gimple_omp_set_body (p
, body
);
990 /* Build a GIMPLE_OMP_MASTER statement.
992 BODY is the sequence of statements to be executed by just the master. */
995 gimple_build_omp_master (gimple_seq body
)
997 gimple p
= gimple_alloc (GIMPLE_OMP_MASTER
, 0);
999 gimple_omp_set_body (p
, body
);
1005 /* Build a GIMPLE_OMP_TASKGROUP statement.
1007 BODY is the sequence of statements to be executed by the taskgroup
1011 gimple_build_omp_taskgroup (gimple_seq body
)
1013 gimple p
= gimple_alloc (GIMPLE_OMP_TASKGROUP
, 0);
1015 gimple_omp_set_body (p
, body
);
1021 /* Build a GIMPLE_OMP_CONTINUE statement.
1023 CONTROL_DEF is the definition of the control variable.
1024 CONTROL_USE is the use of the control variable. */
1027 gimple_build_omp_continue (tree control_def
, tree control_use
)
1029 gimple p
= gimple_alloc (GIMPLE_OMP_CONTINUE
, 0);
1030 gimple_omp_continue_set_control_def (p
, control_def
);
1031 gimple_omp_continue_set_control_use (p
, control_use
);
1035 /* Build a GIMPLE_OMP_ORDERED statement.
1037 BODY is the sequence of statements inside a loop that will executed in
1041 gimple_build_omp_ordered (gimple_seq body
)
1043 gimple p
= gimple_alloc (GIMPLE_OMP_ORDERED
, 0);
1045 gimple_omp_set_body (p
, body
);
1051 /* Build a GIMPLE_OMP_RETURN statement.
1052 WAIT_P is true if this is a non-waiting return. */
1055 gimple_build_omp_return (bool wait_p
)
1057 gimple p
= gimple_alloc (GIMPLE_OMP_RETURN
, 0);
1059 gimple_omp_return_set_nowait (p
);
1065 /* Build a GIMPLE_OMP_SECTIONS statement.
1067 BODY is a sequence of section statements.
1068 CLAUSES are any of the OMP sections contsruct's clauses: private,
1069 firstprivate, lastprivate, reduction, and nowait. */
1072 gimple_build_omp_sections (gimple_seq body
, tree clauses
)
1074 gimple p
= gimple_alloc (GIMPLE_OMP_SECTIONS
, 0);
1076 gimple_omp_set_body (p
, body
);
1077 gimple_omp_sections_set_clauses (p
, clauses
);
1083 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1086 gimple_build_omp_sections_switch (void)
1088 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH
, 0);
1092 /* Build a GIMPLE_OMP_SINGLE statement.
1094 BODY is the sequence of statements that will be executed once.
1095 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1096 copyprivate, nowait. */
1099 gimple_build_omp_single (gimple_seq body
, tree clauses
)
1101 gimple p
= gimple_alloc (GIMPLE_OMP_SINGLE
, 0);
1103 gimple_omp_set_body (p
, body
);
1104 gimple_omp_single_set_clauses (p
, clauses
);
1110 /* Build a GIMPLE_OMP_TARGET statement.
1112 BODY is the sequence of statements that will be executed.
1113 CLAUSES are any of the OMP target construct's clauses. */
1116 gimple_build_omp_target (gimple_seq body
, int kind
, tree clauses
)
1118 gimple p
= gimple_alloc (GIMPLE_OMP_TARGET
, 0);
1120 gimple_omp_set_body (p
, body
);
1121 gimple_omp_target_set_clauses (p
, clauses
);
1122 gimple_omp_target_set_kind (p
, kind
);
1128 /* Build a GIMPLE_OMP_TEAMS statement.
1130 BODY is the sequence of statements that will be executed.
1131 CLAUSES are any of the OMP teams construct's clauses. */
1134 gimple_build_omp_teams (gimple_seq body
, tree clauses
)
1136 gimple p
= gimple_alloc (GIMPLE_OMP_TEAMS
, 0);
1138 gimple_omp_set_body (p
, body
);
1139 gimple_omp_teams_set_clauses (p
, clauses
);
1145 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1148 gimple_build_omp_atomic_load (tree lhs
, tree rhs
)
1150 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD
, 0);
1151 gimple_omp_atomic_load_set_lhs (p
, lhs
);
1152 gimple_omp_atomic_load_set_rhs (p
, rhs
);
1156 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1158 VAL is the value we are storing. */
1161 gimple_build_omp_atomic_store (tree val
)
1163 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_STORE
, 0);
1164 gimple_omp_atomic_store_set_val (p
, val
);
1168 /* Build a GIMPLE_TRANSACTION statement. */
1171 gimple_build_transaction (gimple_seq body
, tree label
)
1173 gimple p
= gimple_alloc (GIMPLE_TRANSACTION
, 0);
1174 gimple_transaction_set_body (p
, body
);
1175 gimple_transaction_set_label (p
, label
);
1179 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1180 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1183 gimple_build_predict (enum br_predictor predictor
, enum prediction outcome
)
1185 gimple p
= gimple_alloc (GIMPLE_PREDICT
, 0);
1186 /* Ensure all the predictors fit into the lower bits of the subcode. */
1187 gcc_assert ((int) END_PREDICTORS
<= GF_PREDICT_TAKEN
);
1188 gimple_predict_set_predictor (p
, predictor
);
1189 gimple_predict_set_outcome (p
, outcome
);
1193 #if defined ENABLE_GIMPLE_CHECKING
1194 /* Complain of a gimple type mismatch and die. */
1197 gimple_check_failed (const_gimple gs
, const char *file
, int line
,
1198 const char *function
, enum gimple_code code
,
1199 enum tree_code subcode
)
1201 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1202 gimple_code_name
[code
],
1203 get_tree_code_name (subcode
),
1204 gimple_code_name
[gimple_code (gs
)],
1205 gs
->gsbase
.subcode
> 0
1206 ? get_tree_code_name ((enum tree_code
) gs
->gsbase
.subcode
)
1208 function
, trim_filename (file
), line
);
1210 #endif /* ENABLE_GIMPLE_CHECKING */
1213 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1214 *SEQ_P is NULL, a new sequence is allocated. */
1217 gimple_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
1219 gimple_stmt_iterator si
;
1223 si
= gsi_last (*seq_p
);
1224 gsi_insert_after (&si
, gs
, GSI_NEW_STMT
);
1228 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1229 NULL, a new sequence is allocated. */
1232 gimple_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
1234 gimple_stmt_iterator si
;
1238 si
= gsi_last (*dst_p
);
1239 gsi_insert_seq_after (&si
, src
, GSI_NEW_STMT
);
1243 /* Helper function of empty_body_p. Return true if STMT is an empty
1247 empty_stmt_p (gimple stmt
)
1249 if (gimple_code (stmt
) == GIMPLE_NOP
)
1251 if (gimple_code (stmt
) == GIMPLE_BIND
)
1252 return empty_body_p (gimple_bind_body (stmt
));
1257 /* Return true if BODY contains nothing but empty statements. */
1260 empty_body_p (gimple_seq body
)
1262 gimple_stmt_iterator i
;
1264 if (gimple_seq_empty_p (body
))
1266 for (i
= gsi_start (body
); !gsi_end_p (i
); gsi_next (&i
))
1267 if (!empty_stmt_p (gsi_stmt (i
))
1268 && !is_gimple_debug (gsi_stmt (i
)))
1275 /* Perform a deep copy of sequence SRC and return the result. */
1278 gimple_seq_copy (gimple_seq src
)
1280 gimple_stmt_iterator gsi
;
1281 gimple_seq new_seq
= NULL
;
1284 for (gsi
= gsi_start (src
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1286 stmt
= gimple_copy (gsi_stmt (gsi
));
1287 gimple_seq_add_stmt (&new_seq
, stmt
);
1294 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1295 on each one. WI is as in walk_gimple_stmt.
1297 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1298 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1299 produced the value is returned if this statement has not been
1300 removed by a callback (wi->removed_stmt). If the statement has
1301 been removed, NULL is returned.
1303 Otherwise, all the statements are walked and NULL returned. */
1306 walk_gimple_seq_mod (gimple_seq
*pseq
, walk_stmt_fn callback_stmt
,
1307 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1309 gimple_stmt_iterator gsi
;
1311 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
); )
1313 tree ret
= walk_gimple_stmt (&gsi
, callback_stmt
, callback_op
, wi
);
1316 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1319 wi
->callback_result
= ret
;
1321 return wi
->removed_stmt
? NULL
: gsi_stmt (gsi
);
1324 if (!wi
->removed_stmt
)
1329 wi
->callback_result
= NULL_TREE
;
1335 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1336 changed by the callbacks. */
1339 walk_gimple_seq (gimple_seq seq
, walk_stmt_fn callback_stmt
,
1340 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1342 gimple_seq seq2
= seq
;
1343 gimple ret
= walk_gimple_seq_mod (&seq2
, callback_stmt
, callback_op
, wi
);
1344 gcc_assert (seq2
== seq
);
1349 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1352 walk_gimple_asm (gimple stmt
, walk_tree_fn callback_op
,
1353 struct walk_stmt_info
*wi
)
1357 const char **oconstraints
;
1359 const char *constraint
;
1360 bool allows_mem
, allows_reg
, is_inout
;
1362 noutputs
= gimple_asm_noutputs (stmt
);
1363 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1368 for (i
= 0; i
< noutputs
; i
++)
1370 op
= gimple_asm_output_op (stmt
, i
);
1371 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1372 oconstraints
[i
] = constraint
;
1373 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
, &allows_reg
,
1376 wi
->val_only
= (allows_reg
|| !allows_mem
);
1377 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1382 n
= gimple_asm_ninputs (stmt
);
1383 for (i
= 0; i
< n
; i
++)
1385 op
= gimple_asm_input_op (stmt
, i
);
1386 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1387 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1388 oconstraints
, &allows_mem
, &allows_reg
);
1391 wi
->val_only
= (allows_reg
|| !allows_mem
);
1392 /* Although input "m" is not really a LHS, we need a lvalue. */
1393 wi
->is_lhs
= !wi
->val_only
;
1395 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1403 wi
->val_only
= true;
1406 n
= gimple_asm_nlabels (stmt
);
1407 for (i
= 0; i
< n
; i
++)
1409 op
= gimple_asm_label_op (stmt
, i
);
1410 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1419 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1420 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1422 CALLBACK_OP is called on each operand of STMT via walk_tree.
1423 Additional parameters to walk_tree must be stored in WI. For each operand
1424 OP, walk_tree is called as:
1426 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1428 If CALLBACK_OP returns non-NULL for an operand, the remaining
1429 operands are not scanned.
1431 The return value is that returned by the last call to walk_tree, or
1432 NULL_TREE if no CALLBACK_OP is specified. */
1435 walk_gimple_op (gimple stmt
, walk_tree_fn callback_op
,
1436 struct walk_stmt_info
*wi
)
1438 struct pointer_set_t
*pset
= (wi
) ? wi
->pset
: NULL
;
1440 tree ret
= NULL_TREE
;
1442 switch (gimple_code (stmt
))
1445 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1446 is a register variable, we may use a COMPONENT_REF on the RHS. */
1449 tree lhs
= gimple_assign_lhs (stmt
);
1451 = (is_gimple_reg_type (TREE_TYPE (lhs
)) && !is_gimple_reg (lhs
))
1452 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
1455 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
1457 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
,
1463 /* Walk the LHS. If the RHS is appropriate for a memory, we
1464 may use a COMPONENT_REF on the LHS. */
1467 /* If the RHS is of a non-renamable type or is a register variable,
1468 we may use a COMPONENT_REF on the LHS. */
1469 tree rhs1
= gimple_assign_rhs1 (stmt
);
1471 = (is_gimple_reg_type (TREE_TYPE (rhs1
)) && !is_gimple_reg (rhs1
))
1472 || gimple_assign_rhs_class (stmt
) != GIMPLE_SINGLE_RHS
;
1476 ret
= walk_tree (gimple_op_ptr (stmt
, 0), callback_op
, wi
, pset
);
1482 wi
->val_only
= true;
1491 wi
->val_only
= true;
1494 ret
= walk_tree (gimple_call_chain_ptr (stmt
), callback_op
, wi
, pset
);
1498 ret
= walk_tree (gimple_call_fn_ptr (stmt
), callback_op
, wi
, pset
);
1502 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1506 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt
, i
)));
1507 ret
= walk_tree (gimple_call_arg_ptr (stmt
, i
), callback_op
, wi
,
1513 if (gimple_call_lhs (stmt
))
1519 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt
)));
1522 ret
= walk_tree (gimple_call_lhs_ptr (stmt
), callback_op
, wi
, pset
);
1530 wi
->val_only
= true;
1535 ret
= walk_tree (gimple_catch_types_ptr (stmt
), callback_op
, wi
,
1541 case GIMPLE_EH_FILTER
:
1542 ret
= walk_tree (gimple_eh_filter_types_ptr (stmt
), callback_op
, wi
,
1549 ret
= walk_gimple_asm (stmt
, callback_op
, wi
);
1554 case GIMPLE_OMP_CONTINUE
:
1555 ret
= walk_tree (gimple_omp_continue_control_def_ptr (stmt
),
1556 callback_op
, wi
, pset
);
1560 ret
= walk_tree (gimple_omp_continue_control_use_ptr (stmt
),
1561 callback_op
, wi
, pset
);
1566 case GIMPLE_OMP_CRITICAL
:
1567 ret
= walk_tree (gimple_omp_critical_name_ptr (stmt
), callback_op
, wi
,
1573 case GIMPLE_OMP_FOR
:
1574 ret
= walk_tree (gimple_omp_for_clauses_ptr (stmt
), callback_op
, wi
,
1578 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1580 ret
= walk_tree (gimple_omp_for_index_ptr (stmt
, i
), callback_op
,
1584 ret
= walk_tree (gimple_omp_for_initial_ptr (stmt
, i
), callback_op
,
1588 ret
= walk_tree (gimple_omp_for_final_ptr (stmt
, i
), callback_op
,
1592 ret
= walk_tree (gimple_omp_for_incr_ptr (stmt
, i
), callback_op
,
1599 case GIMPLE_OMP_PARALLEL
:
1600 ret
= walk_tree (gimple_omp_parallel_clauses_ptr (stmt
), callback_op
,
1604 ret
= walk_tree (gimple_omp_parallel_child_fn_ptr (stmt
), callback_op
,
1608 ret
= walk_tree (gimple_omp_parallel_data_arg_ptr (stmt
), callback_op
,
1614 case GIMPLE_OMP_TASK
:
1615 ret
= walk_tree (gimple_omp_task_clauses_ptr (stmt
), callback_op
,
1619 ret
= walk_tree (gimple_omp_task_child_fn_ptr (stmt
), callback_op
,
1623 ret
= walk_tree (gimple_omp_task_data_arg_ptr (stmt
), callback_op
,
1627 ret
= walk_tree (gimple_omp_task_copy_fn_ptr (stmt
), callback_op
,
1631 ret
= walk_tree (gimple_omp_task_arg_size_ptr (stmt
), callback_op
,
1635 ret
= walk_tree (gimple_omp_task_arg_align_ptr (stmt
), callback_op
,
1641 case GIMPLE_OMP_SECTIONS
:
1642 ret
= walk_tree (gimple_omp_sections_clauses_ptr (stmt
), callback_op
,
1647 ret
= walk_tree (gimple_omp_sections_control_ptr (stmt
), callback_op
,
1654 case GIMPLE_OMP_SINGLE
:
1655 ret
= walk_tree (gimple_omp_single_clauses_ptr (stmt
), callback_op
, wi
,
1661 case GIMPLE_OMP_TARGET
:
1662 ret
= walk_tree (gimple_omp_target_clauses_ptr (stmt
), callback_op
, wi
,
1668 case GIMPLE_OMP_TEAMS
:
1669 ret
= walk_tree (gimple_omp_teams_clauses_ptr (stmt
), callback_op
, wi
,
1675 case GIMPLE_OMP_ATOMIC_LOAD
:
1676 ret
= walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt
), callback_op
, wi
,
1681 ret
= walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt
), callback_op
, wi
,
1687 case GIMPLE_OMP_ATOMIC_STORE
:
1688 ret
= walk_tree (gimple_omp_atomic_store_val_ptr (stmt
), callback_op
,
1694 case GIMPLE_TRANSACTION
:
1695 ret
= walk_tree (gimple_transaction_label_ptr (stmt
), callback_op
,
1701 case GIMPLE_OMP_RETURN
:
1702 ret
= walk_tree (gimple_omp_return_lhs_ptr (stmt
), callback_op
, wi
,
1708 /* Tuples that do not have operands. */
1711 case GIMPLE_PREDICT
:
1716 enum gimple_statement_structure_enum gss
;
1717 gss
= gimple_statement_structure (stmt
);
1718 if (gss
== GSS_WITH_OPS
|| gss
== GSS_WITH_MEM_OPS
)
1719 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1721 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
, pset
);
1733 /* Walk the current statement in GSI (optionally using traversal state
1734 stored in WI). If WI is NULL, no state is kept during traversal.
1735 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1736 that it has handled all the operands of the statement, its return
1737 value is returned. Otherwise, the return value from CALLBACK_STMT
1738 is discarded and its operands are scanned.
1740 If CALLBACK_STMT is NULL or it didn't handle the operands,
1741 CALLBACK_OP is called on each operand of the statement via
1742 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1743 operand, the remaining operands are not scanned. In this case, the
1744 return value from CALLBACK_OP is returned.
1746 In any other case, NULL_TREE is returned. */
1749 walk_gimple_stmt (gimple_stmt_iterator
*gsi
, walk_stmt_fn callback_stmt
,
1750 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1754 gimple stmt
= gsi_stmt (*gsi
);
1759 wi
->removed_stmt
= false;
1761 if (wi
->want_locations
&& gimple_has_location (stmt
))
1762 input_location
= gimple_location (stmt
);
1767 /* Invoke the statement callback. Return if the callback handled
1768 all of STMT operands by itself. */
1771 bool handled_ops
= false;
1772 tree_ret
= callback_stmt (gsi
, &handled_ops
, wi
);
1776 /* If CALLBACK_STMT did not handle operands, it should not have
1777 a value to return. */
1778 gcc_assert (tree_ret
== NULL
);
1780 if (wi
&& wi
->removed_stmt
)
1783 /* Re-read stmt in case the callback changed it. */
1784 stmt
= gsi_stmt (*gsi
);
1787 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1790 tree_ret
= walk_gimple_op (stmt
, callback_op
, wi
);
1795 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1796 switch (gimple_code (stmt
))
1799 ret
= walk_gimple_seq_mod (gimple_bind_body_ptr (stmt
), callback_stmt
,
1802 return wi
->callback_result
;
1806 ret
= walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt
), callback_stmt
,
1809 return wi
->callback_result
;
1812 case GIMPLE_EH_FILTER
:
1813 ret
= walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt
), callback_stmt
,
1816 return wi
->callback_result
;
1819 case GIMPLE_EH_ELSE
:
1820 ret
= walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt
),
1821 callback_stmt
, callback_op
, wi
);
1823 return wi
->callback_result
;
1824 ret
= walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt
),
1825 callback_stmt
, callback_op
, wi
);
1827 return wi
->callback_result
;
1831 ret
= walk_gimple_seq_mod (gimple_try_eval_ptr (stmt
), callback_stmt
, callback_op
,
1834 return wi
->callback_result
;
1836 ret
= walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt
), callback_stmt
,
1839 return wi
->callback_result
;
1842 case GIMPLE_OMP_FOR
:
1843 ret
= walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt
), callback_stmt
,
1846 return wi
->callback_result
;
1849 case GIMPLE_OMP_CRITICAL
:
1850 case GIMPLE_OMP_MASTER
:
1851 case GIMPLE_OMP_TASKGROUP
:
1852 case GIMPLE_OMP_ORDERED
:
1853 case GIMPLE_OMP_SECTION
:
1854 case GIMPLE_OMP_PARALLEL
:
1855 case GIMPLE_OMP_TASK
:
1856 case GIMPLE_OMP_SECTIONS
:
1857 case GIMPLE_OMP_SINGLE
:
1858 case GIMPLE_OMP_TARGET
:
1859 case GIMPLE_OMP_TEAMS
:
1860 ret
= walk_gimple_seq_mod (gimple_omp_body_ptr (stmt
), callback_stmt
,
1863 return wi
->callback_result
;
1866 case GIMPLE_WITH_CLEANUP_EXPR
:
1867 ret
= walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt
), callback_stmt
,
1870 return wi
->callback_result
;
1873 case GIMPLE_TRANSACTION
:
1874 ret
= walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt
),
1875 callback_stmt
, callback_op
, wi
);
1877 return wi
->callback_result
;
1881 gcc_assert (!gimple_has_substatements (stmt
));
1889 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1892 gimple_set_body (tree fndecl
, gimple_seq seq
)
1894 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1897 /* If FNDECL still does not have a function structure associated
1898 with it, then it does not make sense for it to receive a
1900 gcc_assert (seq
== NULL
);
1903 fn
->gimple_body
= seq
;
1907 /* Return the body of GIMPLE statements for function FN. After the
1908 CFG pass, the function body doesn't exist anymore because it has
1909 been split up into basic blocks. In this case, it returns
1913 gimple_body (tree fndecl
)
1915 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1916 return fn
? fn
->gimple_body
: NULL
;
1919 /* Return true when FNDECL has Gimple body either in unlowered
1922 gimple_has_body_p (tree fndecl
)
1924 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1925 return (gimple_body (fndecl
) || (fn
&& fn
->cfg
));
1928 /* Return true if calls C1 and C2 are known to go to the same function. */
1931 gimple_call_same_target_p (const_gimple c1
, const_gimple c2
)
1933 if (gimple_call_internal_p (c1
))
1934 return (gimple_call_internal_p (c2
)
1935 && gimple_call_internal_fn (c1
) == gimple_call_internal_fn (c2
));
1937 return (gimple_call_fn (c1
) == gimple_call_fn (c2
)
1938 || (gimple_call_fndecl (c1
)
1939 && gimple_call_fndecl (c1
) == gimple_call_fndecl (c2
)));
1942 /* Detect flags from a GIMPLE_CALL. This is just like
1943 call_expr_flags, but for gimple tuples. */
1946 gimple_call_flags (const_gimple stmt
)
1949 tree decl
= gimple_call_fndecl (stmt
);
1952 flags
= flags_from_decl_or_type (decl
);
1953 else if (gimple_call_internal_p (stmt
))
1954 flags
= internal_fn_flags (gimple_call_internal_fn (stmt
));
1956 flags
= flags_from_decl_or_type (gimple_call_fntype (stmt
));
1958 if (stmt
->gsbase
.subcode
& GF_CALL_NOTHROW
)
1959 flags
|= ECF_NOTHROW
;
1964 /* Return the "fn spec" string for call STMT. */
1967 gimple_call_fnspec (const_gimple stmt
)
1971 type
= gimple_call_fntype (stmt
);
1975 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
1979 return TREE_VALUE (TREE_VALUE (attr
));
1982 /* Detects argument flags for argument number ARG on call STMT. */
1985 gimple_call_arg_flags (const_gimple stmt
, unsigned arg
)
1987 tree attr
= gimple_call_fnspec (stmt
);
1989 if (!attr
|| 1 + arg
>= (unsigned) TREE_STRING_LENGTH (attr
))
1992 switch (TREE_STRING_POINTER (attr
)[1 + arg
])
1999 return EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
;
2002 return EAF_NOCLOBBER
| EAF_NOESCAPE
;
2005 return EAF_DIRECT
| EAF_NOESCAPE
;
2008 return EAF_NOESCAPE
;
2016 /* Detects return flags for the call STMT. */
2019 gimple_call_return_flags (const_gimple stmt
)
2023 if (gimple_call_flags (stmt
) & ECF_MALLOC
)
2026 attr
= gimple_call_fnspec (stmt
);
2027 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
2030 switch (TREE_STRING_POINTER (attr
)[0])
2036 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
2048 /* Return true if GS is a copy assignment. */
2051 gimple_assign_copy_p (gimple gs
)
2053 return (gimple_assign_single_p (gs
)
2054 && is_gimple_val (gimple_op (gs
, 1)));
2058 /* Return true if GS is a SSA_NAME copy assignment. */
2061 gimple_assign_ssa_name_copy_p (gimple gs
)
2063 return (gimple_assign_single_p (gs
)
2064 && TREE_CODE (gimple_assign_lhs (gs
)) == SSA_NAME
2065 && TREE_CODE (gimple_assign_rhs1 (gs
)) == SSA_NAME
);
2069 /* Return true if GS is an assignment with a unary RHS, but the
2070 operator has no effect on the assigned value. The logic is adapted
2071 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2072 instances in which STRIP_NOPS was previously applied to the RHS of
2075 NOTE: In the use cases that led to the creation of this function
2076 and of gimple_assign_single_p, it is typical to test for either
2077 condition and to proceed in the same manner. In each case, the
2078 assigned value is represented by the single RHS operand of the
2079 assignment. I suspect there may be cases where gimple_assign_copy_p,
2080 gimple_assign_single_p, or equivalent logic is used where a similar
2081 treatment of unary NOPs is appropriate. */
2084 gimple_assign_unary_nop_p (gimple gs
)
2086 return (is_gimple_assign (gs
)
2087 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
))
2088 || gimple_assign_rhs_code (gs
) == NON_LVALUE_EXPR
)
2089 && gimple_assign_rhs1 (gs
) != error_mark_node
2090 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))
2091 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs
)))));
2094 /* Set BB to be the basic block holding G. */
2097 gimple_set_bb (gimple stmt
, basic_block bb
)
2099 stmt
->gsbase
.bb
= bb
;
2101 /* If the statement is a label, add the label to block-to-labels map
2102 so that we can speed up edge creation for GIMPLE_GOTOs. */
2103 if (cfun
->cfg
&& gimple_code (stmt
) == GIMPLE_LABEL
)
2108 t
= gimple_label_label (stmt
);
2109 uid
= LABEL_DECL_UID (t
);
2112 unsigned old_len
= vec_safe_length (label_to_block_map
);
2113 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
2114 if (old_len
<= (unsigned) uid
)
2116 unsigned new_len
= 3 * uid
/ 2 + 1;
2118 vec_safe_grow_cleared (label_to_block_map
, new_len
);
2122 (*label_to_block_map
)[uid
] = bb
;
2127 /* Modify the RHS of the assignment pointed-to by GSI using the
2128 operands in the expression tree EXPR.
2130 NOTE: The statement pointed-to by GSI may be reallocated if it
2131 did not have enough operand slots.
2133 This function is useful to convert an existing tree expression into
2134 the flat representation used for the RHS of a GIMPLE assignment.
2135 It will reallocate memory as needed to expand or shrink the number
2136 of operand slots needed to represent EXPR.
2138 NOTE: If you find yourself building a tree and then calling this
2139 function, you are most certainly doing it the slow way. It is much
2140 better to build a new assignment or to use the function
2141 gimple_assign_set_rhs_with_ops, which does not require an
2142 expression tree to be built. */
2145 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator
*gsi
, tree expr
)
2147 enum tree_code subcode
;
2150 extract_ops_from_tree_1 (expr
, &subcode
, &op1
, &op2
, &op3
);
2151 gimple_assign_set_rhs_with_ops_1 (gsi
, subcode
, op1
, op2
, op3
);
2155 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2156 operands OP1, OP2 and OP3.
2158 NOTE: The statement pointed-to by GSI may be reallocated if it
2159 did not have enough operand slots. */
2162 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
2163 tree op1
, tree op2
, tree op3
)
2165 unsigned new_rhs_ops
= get_gimple_rhs_num_ops (code
);
2166 gimple stmt
= gsi_stmt (*gsi
);
2168 /* If the new CODE needs more operands, allocate a new statement. */
2169 if (gimple_num_ops (stmt
) < new_rhs_ops
+ 1)
2171 tree lhs
= gimple_assign_lhs (stmt
);
2172 gimple new_stmt
= gimple_alloc (gimple_code (stmt
), new_rhs_ops
+ 1);
2173 memcpy (new_stmt
, stmt
, gimple_size (gimple_code (stmt
)));
2174 gimple_init_singleton (new_stmt
);
2175 gsi_replace (gsi
, new_stmt
, true);
2178 /* The LHS needs to be reset as this also changes the SSA name
2180 gimple_assign_set_lhs (stmt
, lhs
);
2183 gimple_set_num_ops (stmt
, new_rhs_ops
+ 1);
2184 gimple_set_subcode (stmt
, code
);
2185 gimple_assign_set_rhs1 (stmt
, op1
);
2186 if (new_rhs_ops
> 1)
2187 gimple_assign_set_rhs2 (stmt
, op2
);
2188 if (new_rhs_ops
> 2)
2189 gimple_assign_set_rhs3 (stmt
, op3
);
2193 /* Return the LHS of a statement that performs an assignment,
2194 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2195 for a call to a function that returns no value, or for a
2196 statement other than an assignment or a call. */
2199 gimple_get_lhs (const_gimple stmt
)
2201 enum gimple_code code
= gimple_code (stmt
);
2203 if (code
== GIMPLE_ASSIGN
)
2204 return gimple_assign_lhs (stmt
);
2205 else if (code
== GIMPLE_CALL
)
2206 return gimple_call_lhs (stmt
);
2212 /* Set the LHS of a statement that performs an assignment,
2213 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2216 gimple_set_lhs (gimple stmt
, tree lhs
)
2218 enum gimple_code code
= gimple_code (stmt
);
2220 if (code
== GIMPLE_ASSIGN
)
2221 gimple_assign_set_lhs (stmt
, lhs
);
2222 else if (code
== GIMPLE_CALL
)
2223 gimple_call_set_lhs (stmt
, lhs
);
2229 /* Return a deep copy of statement STMT. All the operands from STMT
2230 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2231 and VUSE operand arrays are set to empty in the new copy. The new
2232 copy isn't part of any sequence. */
2235 gimple_copy (gimple stmt
)
2237 enum gimple_code code
= gimple_code (stmt
);
2238 unsigned num_ops
= gimple_num_ops (stmt
);
2239 gimple copy
= gimple_alloc (code
, num_ops
);
2242 /* Shallow copy all the fields from STMT. */
2243 memcpy (copy
, stmt
, gimple_size (code
));
2244 gimple_init_singleton (copy
);
2246 /* If STMT has sub-statements, deep-copy them as well. */
2247 if (gimple_has_substatements (stmt
))
2252 switch (gimple_code (stmt
))
2255 new_seq
= gimple_seq_copy (gimple_bind_body (stmt
));
2256 gimple_bind_set_body (copy
, new_seq
);
2257 gimple_bind_set_vars (copy
, unshare_expr (gimple_bind_vars (stmt
)));
2258 gimple_bind_set_block (copy
, gimple_bind_block (stmt
));
2262 new_seq
= gimple_seq_copy (gimple_catch_handler (stmt
));
2263 gimple_catch_set_handler (copy
, new_seq
);
2264 t
= unshare_expr (gimple_catch_types (stmt
));
2265 gimple_catch_set_types (copy
, t
);
2268 case GIMPLE_EH_FILTER
:
2269 new_seq
= gimple_seq_copy (gimple_eh_filter_failure (stmt
));
2270 gimple_eh_filter_set_failure (copy
, new_seq
);
2271 t
= unshare_expr (gimple_eh_filter_types (stmt
));
2272 gimple_eh_filter_set_types (copy
, t
);
2275 case GIMPLE_EH_ELSE
:
2276 new_seq
= gimple_seq_copy (gimple_eh_else_n_body (stmt
));
2277 gimple_eh_else_set_n_body (copy
, new_seq
);
2278 new_seq
= gimple_seq_copy (gimple_eh_else_e_body (stmt
));
2279 gimple_eh_else_set_e_body (copy
, new_seq
);
2283 new_seq
= gimple_seq_copy (gimple_try_eval (stmt
));
2284 gimple_try_set_eval (copy
, new_seq
);
2285 new_seq
= gimple_seq_copy (gimple_try_cleanup (stmt
));
2286 gimple_try_set_cleanup (copy
, new_seq
);
2289 case GIMPLE_OMP_FOR
:
2290 new_seq
= gimple_seq_copy (gimple_omp_for_pre_body (stmt
));
2291 gimple_omp_for_set_pre_body (copy
, new_seq
);
2292 t
= unshare_expr (gimple_omp_for_clauses (stmt
));
2293 gimple_omp_for_set_clauses (copy
, t
);
2294 copy
->gimple_omp_for
.iter
2295 = ggc_alloc_vec_gimple_omp_for_iter
2296 (gimple_omp_for_collapse (stmt
));
2297 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2299 gimple_omp_for_set_cond (copy
, i
,
2300 gimple_omp_for_cond (stmt
, i
));
2301 gimple_omp_for_set_index (copy
, i
,
2302 gimple_omp_for_index (stmt
, i
));
2303 t
= unshare_expr (gimple_omp_for_initial (stmt
, i
));
2304 gimple_omp_for_set_initial (copy
, i
, t
);
2305 t
= unshare_expr (gimple_omp_for_final (stmt
, i
));
2306 gimple_omp_for_set_final (copy
, i
, t
);
2307 t
= unshare_expr (gimple_omp_for_incr (stmt
, i
));
2308 gimple_omp_for_set_incr (copy
, i
, t
);
2312 case GIMPLE_OMP_PARALLEL
:
2313 t
= unshare_expr (gimple_omp_parallel_clauses (stmt
));
2314 gimple_omp_parallel_set_clauses (copy
, t
);
2315 t
= unshare_expr (gimple_omp_parallel_child_fn (stmt
));
2316 gimple_omp_parallel_set_child_fn (copy
, t
);
2317 t
= unshare_expr (gimple_omp_parallel_data_arg (stmt
));
2318 gimple_omp_parallel_set_data_arg (copy
, t
);
2321 case GIMPLE_OMP_TASK
:
2322 t
= unshare_expr (gimple_omp_task_clauses (stmt
));
2323 gimple_omp_task_set_clauses (copy
, t
);
2324 t
= unshare_expr (gimple_omp_task_child_fn (stmt
));
2325 gimple_omp_task_set_child_fn (copy
, t
);
2326 t
= unshare_expr (gimple_omp_task_data_arg (stmt
));
2327 gimple_omp_task_set_data_arg (copy
, t
);
2328 t
= unshare_expr (gimple_omp_task_copy_fn (stmt
));
2329 gimple_omp_task_set_copy_fn (copy
, t
);
2330 t
= unshare_expr (gimple_omp_task_arg_size (stmt
));
2331 gimple_omp_task_set_arg_size (copy
, t
);
2332 t
= unshare_expr (gimple_omp_task_arg_align (stmt
));
2333 gimple_omp_task_set_arg_align (copy
, t
);
2336 case GIMPLE_OMP_CRITICAL
:
2337 t
= unshare_expr (gimple_omp_critical_name (stmt
));
2338 gimple_omp_critical_set_name (copy
, t
);
2341 case GIMPLE_OMP_SECTIONS
:
2342 t
= unshare_expr (gimple_omp_sections_clauses (stmt
));
2343 gimple_omp_sections_set_clauses (copy
, t
);
2344 t
= unshare_expr (gimple_omp_sections_control (stmt
));
2345 gimple_omp_sections_set_control (copy
, t
);
2348 case GIMPLE_OMP_SINGLE
:
2349 case GIMPLE_OMP_TARGET
:
2350 case GIMPLE_OMP_TEAMS
:
2351 case GIMPLE_OMP_SECTION
:
2352 case GIMPLE_OMP_MASTER
:
2353 case GIMPLE_OMP_TASKGROUP
:
2354 case GIMPLE_OMP_ORDERED
:
2356 new_seq
= gimple_seq_copy (gimple_omp_body (stmt
));
2357 gimple_omp_set_body (copy
, new_seq
);
2360 case GIMPLE_TRANSACTION
:
2361 new_seq
= gimple_seq_copy (gimple_transaction_body (stmt
));
2362 gimple_transaction_set_body (copy
, new_seq
);
2365 case GIMPLE_WITH_CLEANUP_EXPR
:
2366 new_seq
= gimple_seq_copy (gimple_wce_cleanup (stmt
));
2367 gimple_wce_set_cleanup (copy
, new_seq
);
2375 /* Make copy of operands. */
2376 for (i
= 0; i
< num_ops
; i
++)
2377 gimple_set_op (copy
, i
, unshare_expr (gimple_op (stmt
, i
)));
2379 if (gimple_has_mem_ops (stmt
))
2381 gimple_set_vdef (copy
, gimple_vdef (stmt
));
2382 gimple_set_vuse (copy
, gimple_vuse (stmt
));
2385 /* Clear out SSA operand vectors on COPY. */
2386 if (gimple_has_ops (stmt
))
2388 gimple_set_use_ops (copy
, NULL
);
2390 /* SSA operands need to be updated. */
2391 gimple_set_modified (copy
, true);
2398 /* Return true if statement S has side-effects. We consider a
2399 statement to have side effects if:
2401 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2402 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2405 gimple_has_side_effects (const_gimple s
)
2407 if (is_gimple_debug (s
))
2410 /* We don't have to scan the arguments to check for
2411 volatile arguments, though, at present, we still
2412 do a scan to check for TREE_SIDE_EFFECTS. */
2413 if (gimple_has_volatile_ops (s
))
2416 if (gimple_code (s
) == GIMPLE_ASM
2417 && gimple_asm_volatile_p (s
))
2420 if (is_gimple_call (s
))
2422 int flags
= gimple_call_flags (s
);
2424 /* An infinite loop is considered a side effect. */
2425 if (!(flags
& (ECF_CONST
| ECF_PURE
))
2426 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
2435 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2436 Return true if S can trap. When INCLUDE_MEM is true, check whether
2437 the memory operations could trap. When INCLUDE_STORES is true and
2438 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2441 gimple_could_trap_p_1 (gimple s
, bool include_mem
, bool include_stores
)
2443 tree t
, div
= NULL_TREE
;
2448 unsigned i
, start
= (is_gimple_assign (s
) && !include_stores
) ? 1 : 0;
2450 for (i
= start
; i
< gimple_num_ops (s
); i
++)
2451 if (tree_could_trap_p (gimple_op (s
, i
)))
2455 switch (gimple_code (s
))
2458 return gimple_asm_volatile_p (s
);
2461 t
= gimple_call_fndecl (s
);
2462 /* Assume that calls to weak functions may trap. */
2463 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
2468 t
= gimple_expr_type (s
);
2469 op
= gimple_assign_rhs_code (s
);
2470 if (get_gimple_rhs_class (op
) == GIMPLE_BINARY_RHS
)
2471 div
= gimple_assign_rhs2 (s
);
2472 return (operation_could_trap_p (op
, FLOAT_TYPE_P (t
),
2473 (INTEGRAL_TYPE_P (t
)
2474 && TYPE_OVERFLOW_TRAPS (t
)),
2484 /* Return true if statement S can trap. */
2487 gimple_could_trap_p (gimple s
)
2489 return gimple_could_trap_p_1 (s
, true, true);
2492 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2495 gimple_assign_rhs_could_trap_p (gimple s
)
2497 gcc_assert (is_gimple_assign (s
));
2498 return gimple_could_trap_p_1 (s
, true, false);
2502 /* Print debugging information for gimple stmts generated. */
2505 dump_gimple_statistics (void)
2507 int i
, total_tuples
= 0, total_bytes
= 0;
2509 if (! GATHER_STATISTICS
)
2511 fprintf (stderr
, "No gimple statistics\n");
2515 fprintf (stderr
, "\nGIMPLE statements\n");
2516 fprintf (stderr
, "Kind Stmts Bytes\n");
2517 fprintf (stderr
, "---------------------------------------\n");
2518 for (i
= 0; i
< (int) gimple_alloc_kind_all
; ++i
)
2520 fprintf (stderr
, "%-20s %7d %10d\n", gimple_alloc_kind_names
[i
],
2521 gimple_alloc_counts
[i
], gimple_alloc_sizes
[i
]);
2522 total_tuples
+= gimple_alloc_counts
[i
];
2523 total_bytes
+= gimple_alloc_sizes
[i
];
2525 fprintf (stderr
, "---------------------------------------\n");
2526 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_tuples
, total_bytes
);
2527 fprintf (stderr
, "---------------------------------------\n");
2531 /* Return the number of operands needed on the RHS of a GIMPLE
2532 assignment for an expression with tree code CODE. */
2535 get_gimple_rhs_num_ops (enum tree_code code
)
2537 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
2539 if (rhs_class
== GIMPLE_UNARY_RHS
|| rhs_class
== GIMPLE_SINGLE_RHS
)
2541 else if (rhs_class
== GIMPLE_BINARY_RHS
)
2543 else if (rhs_class
== GIMPLE_TERNARY_RHS
)
2549 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2551 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2552 : ((TYPE) == tcc_binary \
2553 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2554 : ((TYPE) == tcc_constant \
2555 || (TYPE) == tcc_declaration \
2556 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2557 : ((SYM) == TRUTH_AND_EXPR \
2558 || (SYM) == TRUTH_OR_EXPR \
2559 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2560 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2561 : ((SYM) == COND_EXPR \
2562 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2563 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2564 || (SYM) == DOT_PROD_EXPR \
2565 || (SYM) == REALIGN_LOAD_EXPR \
2566 || (SYM) == VEC_COND_EXPR \
2567 || (SYM) == VEC_PERM_EXPR \
2568 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2569 : ((SYM) == CONSTRUCTOR \
2570 || (SYM) == OBJ_TYPE_REF \
2571 || (SYM) == ASSERT_EXPR \
2572 || (SYM) == ADDR_EXPR \
2573 || (SYM) == WITH_SIZE_EXPR \
2574 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2575 : GIMPLE_INVALID_RHS),
2576 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2578 const unsigned char gimple_rhs_class_table
[] = {
2579 #include "all-tree.def"
2583 #undef END_OF_BASE_TREE_CODES
2585 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2587 /* Validation of GIMPLE expressions. */
2589 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2592 is_gimple_lvalue (tree t
)
2594 return (is_gimple_addressable (t
)
2595 || TREE_CODE (t
) == WITH_SIZE_EXPR
2596 /* These are complex lvalues, but don't have addresses, so they
2598 || TREE_CODE (t
) == BIT_FIELD_REF
);
2601 /* Return true if T is a GIMPLE condition. */
2604 is_gimple_condexpr (tree t
)
2606 return (is_gimple_val (t
) || (COMPARISON_CLASS_P (t
)
2607 && !tree_could_throw_p (t
)
2608 && is_gimple_val (TREE_OPERAND (t
, 0))
2609 && is_gimple_val (TREE_OPERAND (t
, 1))));
2612 /* Return true if T is something whose address can be taken. */
2615 is_gimple_addressable (tree t
)
2617 return (is_gimple_id (t
) || handled_component_p (t
)
2618 || TREE_CODE (t
) == MEM_REF
);
2621 /* Return true if T is a valid gimple constant. */
2624 is_gimple_constant (const_tree t
)
2626 switch (TREE_CODE (t
))
2641 /* Return true if T is a gimple address. */
2644 is_gimple_address (const_tree t
)
2648 if (TREE_CODE (t
) != ADDR_EXPR
)
2651 op
= TREE_OPERAND (t
, 0);
2652 while (handled_component_p (op
))
2654 if ((TREE_CODE (op
) == ARRAY_REF
2655 || TREE_CODE (op
) == ARRAY_RANGE_REF
)
2656 && !is_gimple_val (TREE_OPERAND (op
, 1)))
2659 op
= TREE_OPERAND (op
, 0);
2662 if (CONSTANT_CLASS_P (op
) || TREE_CODE (op
) == MEM_REF
)
2665 switch (TREE_CODE (op
))
2680 /* Return true if T is a gimple invariant address. */
2683 is_gimple_invariant_address (const_tree t
)
2687 if (TREE_CODE (t
) != ADDR_EXPR
)
2690 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2694 if (TREE_CODE (op
) == MEM_REF
)
2696 const_tree op0
= TREE_OPERAND (op
, 0);
2697 return (TREE_CODE (op0
) == ADDR_EXPR
2698 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2699 || decl_address_invariant_p (TREE_OPERAND (op0
, 0))));
2702 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
2705 /* Return true if T is a gimple invariant address at IPA level
2706 (so addresses of variables on stack are not allowed). */
2709 is_gimple_ip_invariant_address (const_tree t
)
2713 if (TREE_CODE (t
) != ADDR_EXPR
)
2716 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2720 if (TREE_CODE (op
) == MEM_REF
)
2722 const_tree op0
= TREE_OPERAND (op
, 0);
2723 return (TREE_CODE (op0
) == ADDR_EXPR
2724 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2725 || decl_address_ip_invariant_p (TREE_OPERAND (op0
, 0))));
2728 return CONSTANT_CLASS_P (op
) || decl_address_ip_invariant_p (op
);
2731 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2732 form of function invariant. */
2735 is_gimple_min_invariant (const_tree t
)
2737 if (TREE_CODE (t
) == ADDR_EXPR
)
2738 return is_gimple_invariant_address (t
);
2740 return is_gimple_constant (t
);
2743 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2744 form of gimple minimal invariant. */
2747 is_gimple_ip_invariant (const_tree t
)
2749 if (TREE_CODE (t
) == ADDR_EXPR
)
2750 return is_gimple_ip_invariant_address (t
);
2752 return is_gimple_constant (t
);
2755 /* Return true if T is a variable. */
2758 is_gimple_variable (tree t
)
2760 return (TREE_CODE (t
) == VAR_DECL
2761 || TREE_CODE (t
) == PARM_DECL
2762 || TREE_CODE (t
) == RESULT_DECL
2763 || TREE_CODE (t
) == SSA_NAME
);
2766 /* Return true if T is a GIMPLE identifier (something with an address). */
2769 is_gimple_id (tree t
)
2771 return (is_gimple_variable (t
)
2772 || TREE_CODE (t
) == FUNCTION_DECL
2773 || TREE_CODE (t
) == LABEL_DECL
2774 || TREE_CODE (t
) == CONST_DECL
2775 /* Allow string constants, since they are addressable. */
2776 || TREE_CODE (t
) == STRING_CST
);
2779 /* Return true if OP, an SSA name or a DECL is a virtual operand. */
2782 virtual_operand_p (tree op
)
2784 if (TREE_CODE (op
) == SSA_NAME
)
2786 op
= SSA_NAME_VAR (op
);
2791 if (TREE_CODE (op
) == VAR_DECL
)
2792 return VAR_DECL_IS_VIRTUAL_OPERAND (op
);
2798 /* Return true if T is a non-aggregate register variable. */
2801 is_gimple_reg (tree t
)
2803 if (virtual_operand_p (t
))
2806 if (TREE_CODE (t
) == SSA_NAME
)
2809 if (!is_gimple_variable (t
))
2812 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2815 /* A volatile decl is not acceptable because we can't reuse it as
2816 needed. We need to copy it into a temp first. */
2817 if (TREE_THIS_VOLATILE (t
))
2820 /* We define "registers" as things that can be renamed as needed,
2821 which with our infrastructure does not apply to memory. */
2822 if (needs_to_live_in_memory (t
))
2825 /* Hard register variables are an interesting case. For those that
2826 are call-clobbered, we don't know where all the calls are, since
2827 we don't (want to) take into account which operations will turn
2828 into libcalls at the rtl level. For those that are call-saved,
2829 we don't currently model the fact that calls may in fact change
2830 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2831 level, and so miss variable changes that might imply. All around,
2832 it seems safest to not do too much optimization with these at the
2833 tree level at all. We'll have to rely on the rtl optimizers to
2834 clean this up, as there we've got all the appropriate bits exposed. */
2835 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
2838 /* Complex and vector values must have been put into SSA-like form.
2839 That is, no assignments to the individual components. */
2840 if (TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
2841 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2842 return DECL_GIMPLE_REG_P (t
);
2848 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2851 is_gimple_val (tree t
)
2853 /* Make loads from volatiles and memory vars explicit. */
2854 if (is_gimple_variable (t
)
2855 && is_gimple_reg_type (TREE_TYPE (t
))
2856 && !is_gimple_reg (t
))
2859 return (is_gimple_variable (t
) || is_gimple_min_invariant (t
));
2862 /* Similarly, but accept hard registers as inputs to asm statements. */
2865 is_gimple_asm_val (tree t
)
2867 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
2870 return is_gimple_val (t
);
2873 /* Return true if T is a GIMPLE minimal lvalue. */
2876 is_gimple_min_lval (tree t
)
2878 if (!(t
= CONST_CAST_TREE (strip_invariant_refs (t
))))
2880 return (is_gimple_id (t
) || TREE_CODE (t
) == MEM_REF
);
2883 /* Return true if T is a valid function operand of a CALL_EXPR. */
2886 is_gimple_call_addr (tree t
)
2888 return (TREE_CODE (t
) == OBJ_TYPE_REF
|| is_gimple_val (t
));
2891 /* Return true if T is a valid address operand of a MEM_REF. */
2894 is_gimple_mem_ref_addr (tree t
)
2896 return (is_gimple_reg (t
)
2897 || TREE_CODE (t
) == INTEGER_CST
2898 || (TREE_CODE (t
) == ADDR_EXPR
2899 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 0))
2900 || decl_address_invariant_p (TREE_OPERAND (t
, 0)))));
2904 /* Given a memory reference expression T, return its base address.
2905 The base address of a memory reference expression is the main
2906 object being referenced. For instance, the base address for
2907 'array[i].fld[j]' is 'array'. You can think of this as stripping
2908 away the offset part from a memory address.
2910 This function calls handled_component_p to strip away all the inner
2911 parts of the memory reference until it reaches the base object. */
2914 get_base_address (tree t
)
2916 while (handled_component_p (t
))
2917 t
= TREE_OPERAND (t
, 0);
2919 if ((TREE_CODE (t
) == MEM_REF
2920 || TREE_CODE (t
) == TARGET_MEM_REF
)
2921 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
2922 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
2924 /* ??? Either the alias oracle or all callers need to properly deal
2925 with WITH_SIZE_EXPRs before we can look through those. */
2926 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2933 recalculate_side_effects (tree t
)
2935 enum tree_code code
= TREE_CODE (t
);
2936 int len
= TREE_OPERAND_LENGTH (t
);
2939 switch (TREE_CODE_CLASS (code
))
2941 case tcc_expression
:
2947 case PREDECREMENT_EXPR
:
2948 case PREINCREMENT_EXPR
:
2949 case POSTDECREMENT_EXPR
:
2950 case POSTINCREMENT_EXPR
:
2951 /* All of these have side-effects, no matter what their
2960 case tcc_comparison
: /* a comparison expression */
2961 case tcc_unary
: /* a unary arithmetic expression */
2962 case tcc_binary
: /* a binary arithmetic expression */
2963 case tcc_reference
: /* a reference */
2964 case tcc_vl_exp
: /* a function call */
2965 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
2966 for (i
= 0; i
< len
; ++i
)
2968 tree op
= TREE_OPERAND (t
, i
);
2969 if (op
&& TREE_SIDE_EFFECTS (op
))
2970 TREE_SIDE_EFFECTS (t
) = 1;
2975 /* No side-effects. */
2983 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2984 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2985 we failed to create one. */
2988 canonicalize_cond_expr_cond (tree t
)
2990 /* Strip conversions around boolean operations. */
2991 if (CONVERT_EXPR_P (t
)
2992 && (truth_value_p (TREE_CODE (TREE_OPERAND (t
, 0)))
2993 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0)))
2995 t
= TREE_OPERAND (t
, 0);
2997 /* For !x use x == 0. */
2998 if (TREE_CODE (t
) == TRUTH_NOT_EXPR
)
3000 tree top0
= TREE_OPERAND (t
, 0);
3001 t
= build2 (EQ_EXPR
, TREE_TYPE (t
),
3002 top0
, build_int_cst (TREE_TYPE (top0
), 0));
3004 /* For cmp ? 1 : 0 use cmp. */
3005 else if (TREE_CODE (t
) == COND_EXPR
3006 && COMPARISON_CLASS_P (TREE_OPERAND (t
, 0))
3007 && integer_onep (TREE_OPERAND (t
, 1))
3008 && integer_zerop (TREE_OPERAND (t
, 2)))
3010 tree top0
= TREE_OPERAND (t
, 0);
3011 t
= build2 (TREE_CODE (top0
), TREE_TYPE (t
),
3012 TREE_OPERAND (top0
, 0), TREE_OPERAND (top0
, 1));
3014 /* For x ^ y use x != y. */
3015 else if (TREE_CODE (t
) == BIT_XOR_EXPR
)
3016 t
= build2 (NE_EXPR
, TREE_TYPE (t
),
3017 TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1));
3019 if (is_gimple_condexpr (t
))
3025 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3026 the positions marked by the set ARGS_TO_SKIP. */
3029 gimple_call_copy_skip_args (gimple stmt
, bitmap args_to_skip
)
3032 int nargs
= gimple_call_num_args (stmt
);
3034 vargs
.create (nargs
);
3037 for (i
= 0; i
< nargs
; i
++)
3038 if (!bitmap_bit_p (args_to_skip
, i
))
3039 vargs
.quick_push (gimple_call_arg (stmt
, i
));
3041 if (gimple_call_internal_p (stmt
))
3042 new_stmt
= gimple_build_call_internal_vec (gimple_call_internal_fn (stmt
),
3045 new_stmt
= gimple_build_call_vec (gimple_call_fn (stmt
), vargs
);
3047 if (gimple_call_lhs (stmt
))
3048 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3050 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3051 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3053 if (gimple_has_location (stmt
))
3054 gimple_set_location (new_stmt
, gimple_location (stmt
));
3055 gimple_call_copy_flags (new_stmt
, stmt
);
3056 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3058 gimple_set_modified (new_stmt
, true);
3065 /* Return true if the field decls F1 and F2 are at the same offset.
3067 This is intended to be used on GIMPLE types only. */
3070 gimple_compare_field_offset (tree f1
, tree f2
)
3072 if (DECL_OFFSET_ALIGN (f1
) == DECL_OFFSET_ALIGN (f2
))
3074 tree offset1
= DECL_FIELD_OFFSET (f1
);
3075 tree offset2
= DECL_FIELD_OFFSET (f2
);
3076 return ((offset1
== offset2
3077 /* Once gimplification is done, self-referential offsets are
3078 instantiated as operand #2 of the COMPONENT_REF built for
3079 each access and reset. Therefore, they are not relevant
3080 anymore and fields are interchangeable provided that they
3081 represent the same access. */
3082 || (TREE_CODE (offset1
) == PLACEHOLDER_EXPR
3083 && TREE_CODE (offset2
) == PLACEHOLDER_EXPR
3084 && (DECL_SIZE (f1
) == DECL_SIZE (f2
)
3085 || (TREE_CODE (DECL_SIZE (f1
)) == PLACEHOLDER_EXPR
3086 && TREE_CODE (DECL_SIZE (f2
)) == PLACEHOLDER_EXPR
)
3087 || operand_equal_p (DECL_SIZE (f1
), DECL_SIZE (f2
), 0))
3088 && DECL_ALIGN (f1
) == DECL_ALIGN (f2
))
3089 || operand_equal_p (offset1
, offset2
, 0))
3090 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1
),
3091 DECL_FIELD_BIT_OFFSET (f2
)));
3094 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3095 should be, so handle differing ones specially by decomposing
3096 the offset into a byte and bit offset manually. */
3097 if (host_integerp (DECL_FIELD_OFFSET (f1
), 0)
3098 && host_integerp (DECL_FIELD_OFFSET (f2
), 0))
3100 unsigned HOST_WIDE_INT byte_offset1
, byte_offset2
;
3101 unsigned HOST_WIDE_INT bit_offset1
, bit_offset2
;
3102 bit_offset1
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1
));
3103 byte_offset1
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1
))
3104 + bit_offset1
/ BITS_PER_UNIT
);
3105 bit_offset2
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2
));
3106 byte_offset2
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2
))
3107 + bit_offset2
/ BITS_PER_UNIT
);
3108 if (byte_offset1
!= byte_offset2
)
3110 return bit_offset1
% BITS_PER_UNIT
== bit_offset2
% BITS_PER_UNIT
;
3117 /* Return a type the same as TYPE except unsigned or
3118 signed according to UNSIGNEDP. */
3121 gimple_signed_or_unsigned_type (bool unsignedp
, tree type
)
3125 type1
= TYPE_MAIN_VARIANT (type
);
3126 if (type1
== signed_char_type_node
3127 || type1
== char_type_node
3128 || type1
== unsigned_char_type_node
)
3129 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
3130 if (type1
== integer_type_node
|| type1
== unsigned_type_node
)
3131 return unsignedp
? unsigned_type_node
: integer_type_node
;
3132 if (type1
== short_integer_type_node
|| type1
== short_unsigned_type_node
)
3133 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
3134 if (type1
== long_integer_type_node
|| type1
== long_unsigned_type_node
)
3135 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
3136 if (type1
== long_long_integer_type_node
3137 || type1
== long_long_unsigned_type_node
)
3139 ? long_long_unsigned_type_node
3140 : long_long_integer_type_node
;
3141 if (int128_integer_type_node
&& (type1
== int128_integer_type_node
|| type1
== int128_unsigned_type_node
))
3143 ? int128_unsigned_type_node
3144 : int128_integer_type_node
;
3145 #if HOST_BITS_PER_WIDE_INT >= 64
3146 if (type1
== intTI_type_node
|| type1
== unsigned_intTI_type_node
)
3147 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
3149 if (type1
== intDI_type_node
|| type1
== unsigned_intDI_type_node
)
3150 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
3151 if (type1
== intSI_type_node
|| type1
== unsigned_intSI_type_node
)
3152 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
3153 if (type1
== intHI_type_node
|| type1
== unsigned_intHI_type_node
)
3154 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
3155 if (type1
== intQI_type_node
|| type1
== unsigned_intQI_type_node
)
3156 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
3158 #define GIMPLE_FIXED_TYPES(NAME) \
3159 if (type1 == short_ ## NAME ## _type_node \
3160 || type1 == unsigned_short_ ## NAME ## _type_node) \
3161 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3162 : short_ ## NAME ## _type_node; \
3163 if (type1 == NAME ## _type_node \
3164 || type1 == unsigned_ ## NAME ## _type_node) \
3165 return unsignedp ? unsigned_ ## NAME ## _type_node \
3166 : NAME ## _type_node; \
3167 if (type1 == long_ ## NAME ## _type_node \
3168 || type1 == unsigned_long_ ## NAME ## _type_node) \
3169 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3170 : long_ ## NAME ## _type_node; \
3171 if (type1 == long_long_ ## NAME ## _type_node \
3172 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3173 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3174 : long_long_ ## NAME ## _type_node;
3176 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
3177 if (type1 == NAME ## _type_node \
3178 || type1 == u ## NAME ## _type_node) \
3179 return unsignedp ? u ## NAME ## _type_node \
3180 : NAME ## _type_node;
3182 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
3183 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3184 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3185 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3186 : sat_ ## short_ ## NAME ## _type_node; \
3187 if (type1 == sat_ ## NAME ## _type_node \
3188 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3189 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3190 : sat_ ## NAME ## _type_node; \
3191 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3192 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3193 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3194 : sat_ ## long_ ## NAME ## _type_node; \
3195 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3196 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3197 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3198 : sat_ ## long_long_ ## NAME ## _type_node;
3200 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3201 if (type1 == sat_ ## NAME ## _type_node \
3202 || type1 == sat_ ## u ## NAME ## _type_node) \
3203 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3204 : sat_ ## NAME ## _type_node;
3206 GIMPLE_FIXED_TYPES (fract
);
3207 GIMPLE_FIXED_TYPES_SAT (fract
);
3208 GIMPLE_FIXED_TYPES (accum
);
3209 GIMPLE_FIXED_TYPES_SAT (accum
);
3211 GIMPLE_FIXED_MODE_TYPES (qq
);
3212 GIMPLE_FIXED_MODE_TYPES (hq
);
3213 GIMPLE_FIXED_MODE_TYPES (sq
);
3214 GIMPLE_FIXED_MODE_TYPES (dq
);
3215 GIMPLE_FIXED_MODE_TYPES (tq
);
3216 GIMPLE_FIXED_MODE_TYPES_SAT (qq
);
3217 GIMPLE_FIXED_MODE_TYPES_SAT (hq
);
3218 GIMPLE_FIXED_MODE_TYPES_SAT (sq
);
3219 GIMPLE_FIXED_MODE_TYPES_SAT (dq
);
3220 GIMPLE_FIXED_MODE_TYPES_SAT (tq
);
3221 GIMPLE_FIXED_MODE_TYPES (ha
);
3222 GIMPLE_FIXED_MODE_TYPES (sa
);
3223 GIMPLE_FIXED_MODE_TYPES (da
);
3224 GIMPLE_FIXED_MODE_TYPES (ta
);
3225 GIMPLE_FIXED_MODE_TYPES_SAT (ha
);
3226 GIMPLE_FIXED_MODE_TYPES_SAT (sa
);
3227 GIMPLE_FIXED_MODE_TYPES_SAT (da
);
3228 GIMPLE_FIXED_MODE_TYPES_SAT (ta
);
3230 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3231 the precision; they have precision set to match their range, but
3232 may use a wider mode to match an ABI. If we change modes, we may
3233 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3234 the precision as well, so as to yield correct results for
3235 bit-field types. C++ does not have these separate bit-field
3236 types, and producing a signed or unsigned variant of an
3237 ENUMERAL_TYPE may cause other problems as well. */
3238 if (!INTEGRAL_TYPE_P (type
)
3239 || TYPE_UNSIGNED (type
) == unsignedp
)
3242 #define TYPE_OK(node) \
3243 (TYPE_MODE (type) == TYPE_MODE (node) \
3244 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3245 if (TYPE_OK (signed_char_type_node
))
3246 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
3247 if (TYPE_OK (integer_type_node
))
3248 return unsignedp
? unsigned_type_node
: integer_type_node
;
3249 if (TYPE_OK (short_integer_type_node
))
3250 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
3251 if (TYPE_OK (long_integer_type_node
))
3252 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
3253 if (TYPE_OK (long_long_integer_type_node
))
3255 ? long_long_unsigned_type_node
3256 : long_long_integer_type_node
);
3257 if (int128_integer_type_node
&& TYPE_OK (int128_integer_type_node
))
3259 ? int128_unsigned_type_node
3260 : int128_integer_type_node
);
3262 #if HOST_BITS_PER_WIDE_INT >= 64
3263 if (TYPE_OK (intTI_type_node
))
3264 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
3266 if (TYPE_OK (intDI_type_node
))
3267 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
3268 if (TYPE_OK (intSI_type_node
))
3269 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
3270 if (TYPE_OK (intHI_type_node
))
3271 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
3272 if (TYPE_OK (intQI_type_node
))
3273 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
3275 #undef GIMPLE_FIXED_TYPES
3276 #undef GIMPLE_FIXED_MODE_TYPES
3277 #undef GIMPLE_FIXED_TYPES_SAT
3278 #undef GIMPLE_FIXED_MODE_TYPES_SAT
3281 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
3285 /* Return an unsigned type the same as TYPE in other respects. */
3288 gimple_unsigned_type (tree type
)
3290 return gimple_signed_or_unsigned_type (true, type
);
3294 /* Return a signed type the same as TYPE in other respects. */
3297 gimple_signed_type (tree type
)
3299 return gimple_signed_or_unsigned_type (false, type
);
3303 /* Return the typed-based alias set for T, which may be an expression
3304 or a type. Return -1 if we don't do anything special. */
3307 gimple_get_alias_set (tree t
)
3311 /* Permit type-punning when accessing a union, provided the access
3312 is directly through the union. For example, this code does not
3313 permit taking the address of a union member and then storing
3314 through it. Even the type-punning allowed here is a GCC
3315 extension, albeit a common and useful one; the C standard says
3316 that such accesses have implementation-defined behavior. */
3318 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
3319 u
= TREE_OPERAND (u
, 0))
3320 if (TREE_CODE (u
) == COMPONENT_REF
3321 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
3324 /* That's all the expressions we handle specially. */
3328 /* For convenience, follow the C standard when dealing with
3329 character types. Any object may be accessed via an lvalue that
3330 has character type. */
3331 if (t
== char_type_node
3332 || t
== signed_char_type_node
3333 || t
== unsigned_char_type_node
)
3336 /* Allow aliasing between signed and unsigned variants of the same
3337 type. We treat the signed variant as canonical. */
3338 if (TREE_CODE (t
) == INTEGER_TYPE
&& TYPE_UNSIGNED (t
))
3340 tree t1
= gimple_signed_type (t
);
3342 /* t1 == t can happen for boolean nodes which are always unsigned. */
3344 return get_alias_set (t1
);
3351 /* From a tree operand OP return the base of a load or store operation
3352 or NULL_TREE if OP is not a load or a store. */
3355 get_base_loadstore (tree op
)
3357 while (handled_component_p (op
))
3358 op
= TREE_OPERAND (op
, 0);
3360 || INDIRECT_REF_P (op
)
3361 || TREE_CODE (op
) == MEM_REF
3362 || TREE_CODE (op
) == TARGET_MEM_REF
)
3367 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3368 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3369 passing the STMT, the base of the operand and DATA to it. The base
3370 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3371 or the argument of an address expression.
3372 Returns the results of these callbacks or'ed. */
3375 walk_stmt_load_store_addr_ops (gimple stmt
, void *data
,
3376 bool (*visit_load
)(gimple
, tree
, void *),
3377 bool (*visit_store
)(gimple
, tree
, void *),
3378 bool (*visit_addr
)(gimple
, tree
, void *))
3382 if (gimple_assign_single_p (stmt
))
3387 lhs
= get_base_loadstore (gimple_assign_lhs (stmt
));
3389 ret
|= visit_store (stmt
, lhs
, data
);
3391 rhs
= gimple_assign_rhs1 (stmt
);
3392 while (handled_component_p (rhs
))
3393 rhs
= TREE_OPERAND (rhs
, 0);
3396 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3397 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
3398 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
3399 && TREE_CODE (TMR_BASE (rhs
)) == ADDR_EXPR
)
3400 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (rhs
), 0), data
);
3401 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
3402 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs
)) == ADDR_EXPR
)
3403 ret
|= visit_addr (stmt
, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs
),
3405 else if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), ix
, val
)
3411 if (TREE_CODE (val
) == ADDR_EXPR
)
3412 ret
|= visit_addr (stmt
, TREE_OPERAND (val
, 0), data
);
3413 else if (TREE_CODE (val
) == OBJ_TYPE_REF
3414 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val
)) == ADDR_EXPR
)
3415 ret
|= visit_addr (stmt
,
3416 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val
),
3419 lhs
= gimple_assign_lhs (stmt
);
3420 if (TREE_CODE (lhs
) == TARGET_MEM_REF
3421 && TREE_CODE (TMR_BASE (lhs
)) == ADDR_EXPR
)
3422 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (lhs
), 0), data
);
3426 rhs
= get_base_loadstore (rhs
);
3428 ret
|= visit_load (stmt
, rhs
, data
);
3432 && (is_gimple_assign (stmt
)
3433 || gimple_code (stmt
) == GIMPLE_COND
))
3435 for (i
= 0; i
< gimple_num_ops (stmt
); ++i
)
3437 tree op
= gimple_op (stmt
, i
);
3438 if (op
== NULL_TREE
)
3440 else if (TREE_CODE (op
) == ADDR_EXPR
)
3441 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
3442 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3443 tree with two operands. */
3444 else if (i
== 1 && COMPARISON_CLASS_P (op
))
3446 if (TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
3447 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 0),
3449 if (TREE_CODE (TREE_OPERAND (op
, 1)) == ADDR_EXPR
)
3450 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 1),
3455 else if (is_gimple_call (stmt
))
3459 tree lhs
= gimple_call_lhs (stmt
);
3462 lhs
= get_base_loadstore (lhs
);
3464 ret
|= visit_store (stmt
, lhs
, data
);
3467 if (visit_load
|| visit_addr
)
3468 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3470 tree rhs
= gimple_call_arg (stmt
, i
);
3472 && TREE_CODE (rhs
) == ADDR_EXPR
)
3473 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
3474 else if (visit_load
)
3476 rhs
= get_base_loadstore (rhs
);
3478 ret
|= visit_load (stmt
, rhs
, data
);
3482 && gimple_call_chain (stmt
)
3483 && TREE_CODE (gimple_call_chain (stmt
)) == ADDR_EXPR
)
3484 ret
|= visit_addr (stmt
, TREE_OPERAND (gimple_call_chain (stmt
), 0),
3487 && gimple_call_return_slot_opt_p (stmt
)
3488 && gimple_call_lhs (stmt
) != NULL_TREE
3489 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
3490 ret
|= visit_addr (stmt
, gimple_call_lhs (stmt
), data
);
3492 else if (gimple_code (stmt
) == GIMPLE_ASM
)
3495 const char *constraint
;
3496 const char **oconstraints
;
3497 bool allows_mem
, allows_reg
, is_inout
;
3498 noutputs
= gimple_asm_noutputs (stmt
);
3499 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3500 if (visit_store
|| visit_addr
)
3501 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3503 tree link
= gimple_asm_output_op (stmt
, i
);
3504 tree op
= get_base_loadstore (TREE_VALUE (link
));
3505 if (op
&& visit_store
)
3506 ret
|= visit_store (stmt
, op
, data
);
3509 constraint
= TREE_STRING_POINTER
3510 (TREE_VALUE (TREE_PURPOSE (link
)));
3511 oconstraints
[i
] = constraint
;
3512 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
3513 &allows_reg
, &is_inout
);
3514 if (op
&& !allows_reg
&& allows_mem
)
3515 ret
|= visit_addr (stmt
, op
, data
);
3518 if (visit_load
|| visit_addr
)
3519 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3521 tree link
= gimple_asm_input_op (stmt
, i
);
3522 tree op
= TREE_VALUE (link
);
3524 && TREE_CODE (op
) == ADDR_EXPR
)
3525 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
3526 else if (visit_load
|| visit_addr
)
3528 op
= get_base_loadstore (op
);
3532 ret
|= visit_load (stmt
, op
, data
);
3535 constraint
= TREE_STRING_POINTER
3536 (TREE_VALUE (TREE_PURPOSE (link
)));
3537 parse_input_constraint (&constraint
, 0, 0, noutputs
,
3539 &allows_mem
, &allows_reg
);
3540 if (!allows_reg
&& allows_mem
)
3541 ret
|= visit_addr (stmt
, op
, data
);
3547 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
3549 tree op
= gimple_return_retval (stmt
);
3553 && TREE_CODE (op
) == ADDR_EXPR
)
3554 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
3555 else if (visit_load
)
3557 op
= get_base_loadstore (op
);
3559 ret
|= visit_load (stmt
, op
, data
);
3564 && gimple_code (stmt
) == GIMPLE_PHI
)
3566 for (i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
3568 tree op
= gimple_phi_arg_def (stmt
, i
);
3569 if (TREE_CODE (op
) == ADDR_EXPR
)
3570 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
3574 && gimple_code (stmt
) == GIMPLE_GOTO
)
3576 tree op
= gimple_goto_dest (stmt
);
3577 if (TREE_CODE (op
) == ADDR_EXPR
)
3578 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
3584 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3585 should make a faster clone for this case. */
3588 walk_stmt_load_store_ops (gimple stmt
, void *data
,
3589 bool (*visit_load
)(gimple
, tree
, void *),
3590 bool (*visit_store
)(gimple
, tree
, void *))
3592 return walk_stmt_load_store_addr_ops (stmt
, data
,
3593 visit_load
, visit_store
, NULL
);
3596 /* Helper for gimple_ior_addresses_taken_1. */
3599 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED
,
3600 tree addr
, void *data
)
3602 bitmap addresses_taken
= (bitmap
)data
;
3603 addr
= get_base_address (addr
);
3607 bitmap_set_bit (addresses_taken
, DECL_UID (addr
));
3613 /* Set the bit for the uid of all decls that have their address taken
3614 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3615 were any in this stmt. */
3618 gimple_ior_addresses_taken (bitmap addresses_taken
, gimple stmt
)
3620 return walk_stmt_load_store_addr_ops (stmt
, addresses_taken
, NULL
, NULL
,
3621 gimple_ior_addresses_taken_1
);
3625 /* Return a printable name for symbol DECL. */
3628 gimple_decl_printable_name (tree decl
, int verbosity
)
3630 if (!DECL_NAME (decl
))
3633 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
3635 const char *str
, *mangled_str
;
3636 int dmgl_opts
= DMGL_NO_OPTS
;
3640 dmgl_opts
= DMGL_VERBOSE
3644 if (TREE_CODE (decl
) == FUNCTION_DECL
)
3645 dmgl_opts
|= DMGL_PARAMS
;
3648 mangled_str
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
3649 str
= cplus_demangle_v3 (mangled_str
, dmgl_opts
);
3650 return (str
) ? str
: mangled_str
;
3653 return IDENTIFIER_POINTER (DECL_NAME (decl
));
3656 /* Return TRUE iff stmt is a call to a built-in function. */
3659 is_gimple_builtin_call (gimple stmt
)
3663 if (is_gimple_call (stmt
)
3664 && (callee
= gimple_call_fndecl (stmt
))
3665 && is_builtin_fn (callee
)
3666 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
3672 /* Return true when STMTs arguments match those of FNDECL. */
3675 validate_call (gimple stmt
, tree fndecl
)
3677 tree targs
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3678 unsigned nargs
= gimple_call_num_args (stmt
);
3679 for (unsigned i
= 0; i
< nargs
; ++i
)
3681 /* Variadic args follow. */
3684 tree arg
= gimple_call_arg (stmt
, i
);
3685 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
3686 && INTEGRAL_TYPE_P (TREE_VALUE (targs
)))
3688 else if (POINTER_TYPE_P (TREE_TYPE (arg
))
3689 && POINTER_TYPE_P (TREE_VALUE (targs
)))
3691 else if (TREE_CODE (TREE_TYPE (arg
))
3692 != TREE_CODE (TREE_VALUE (targs
)))
3694 targs
= TREE_CHAIN (targs
);
3696 if (targs
&& !VOID_TYPE_P (TREE_VALUE (targs
)))
3701 /* Return true when STMT is builtins call to CLASS. */
3704 gimple_call_builtin_p (gimple stmt
, enum built_in_class klass
)
3707 if (is_gimple_call (stmt
)
3708 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
3709 && DECL_BUILT_IN_CLASS (fndecl
) == klass
)
3710 return validate_call (stmt
, fndecl
);
3714 /* Return true when STMT is builtins call to CODE of CLASS. */
3717 gimple_call_builtin_p (gimple stmt
, enum built_in_function code
)
3720 if (is_gimple_call (stmt
)
3721 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
3722 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
3723 && DECL_FUNCTION_CODE (fndecl
) == code
)
3724 return validate_call (stmt
, fndecl
);
3728 /* Return true if STMT clobbers memory. STMT is required to be a
3732 gimple_asm_clobbers_memory_p (const_gimple stmt
)
3736 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
3738 tree op
= gimple_asm_clobber_op (stmt
, i
);
3739 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op
)), "memory") == 0)
3747 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
3748 useless type conversion, otherwise return false.
3750 This function implicitly defines the middle-end type system. With
3751 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
3752 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
3753 the following invariants shall be fulfilled:
3755 1) useless_type_conversion_p is transitive.
3756 If a < b and b < c then a < c.
3758 2) useless_type_conversion_p is not symmetric.
3759 From a < b does not follow a > b.
3761 3) Types define the available set of operations applicable to values.
3762 A type conversion is useless if the operations for the target type
3763 is a subset of the operations for the source type. For example
3764 casts to void* are useless, casts from void* are not (void* can't
3765 be dereferenced or offsetted, but copied, hence its set of operations
3766 is a strict subset of that of all other data pointer types). Casts
3767 to const T* are useless (can't be written to), casts from const T*
3771 useless_type_conversion_p (tree outer_type
, tree inner_type
)
3773 /* Do the following before stripping toplevel qualifiers. */
3774 if (POINTER_TYPE_P (inner_type
)
3775 && POINTER_TYPE_P (outer_type
))
3777 /* Do not lose casts between pointers to different address spaces. */
3778 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type
))
3779 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type
)))
3783 /* From now on qualifiers on value types do not matter. */
3784 inner_type
= TYPE_MAIN_VARIANT (inner_type
);
3785 outer_type
= TYPE_MAIN_VARIANT (outer_type
);
3787 if (inner_type
== outer_type
)
3790 /* If we know the canonical types, compare them. */
3791 if (TYPE_CANONICAL (inner_type
)
3792 && TYPE_CANONICAL (inner_type
) == TYPE_CANONICAL (outer_type
))
3795 /* Changes in machine mode are never useless conversions unless we
3796 deal with aggregate types in which case we defer to later checks. */
3797 if (TYPE_MODE (inner_type
) != TYPE_MODE (outer_type
)
3798 && !AGGREGATE_TYPE_P (inner_type
))
3801 /* If both the inner and outer types are integral types, then the
3802 conversion is not necessary if they have the same mode and
3803 signedness and precision, and both or neither are boolean. */
3804 if (INTEGRAL_TYPE_P (inner_type
)
3805 && INTEGRAL_TYPE_P (outer_type
))
3807 /* Preserve changes in signedness or precision. */
3808 if (TYPE_UNSIGNED (inner_type
) != TYPE_UNSIGNED (outer_type
)
3809 || TYPE_PRECISION (inner_type
) != TYPE_PRECISION (outer_type
))
3812 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
3813 of precision one. */
3814 if (((TREE_CODE (inner_type
) == BOOLEAN_TYPE
)
3815 != (TREE_CODE (outer_type
) == BOOLEAN_TYPE
))
3816 && TYPE_PRECISION (outer_type
) != 1)
3819 /* We don't need to preserve changes in the types minimum or
3820 maximum value in general as these do not generate code
3821 unless the types precisions are different. */
3825 /* Scalar floating point types with the same mode are compatible. */
3826 else if (SCALAR_FLOAT_TYPE_P (inner_type
)
3827 && SCALAR_FLOAT_TYPE_P (outer_type
))
3830 /* Fixed point types with the same mode are compatible. */
3831 else if (FIXED_POINT_TYPE_P (inner_type
)
3832 && FIXED_POINT_TYPE_P (outer_type
))
3835 /* We need to take special care recursing to pointed-to types. */
3836 else if (POINTER_TYPE_P (inner_type
)
3837 && POINTER_TYPE_P (outer_type
))
3839 /* Do not lose casts to function pointer types. */
3840 if ((TREE_CODE (TREE_TYPE (outer_type
)) == FUNCTION_TYPE
3841 || TREE_CODE (TREE_TYPE (outer_type
)) == METHOD_TYPE
)
3842 && !(TREE_CODE (TREE_TYPE (inner_type
)) == FUNCTION_TYPE
3843 || TREE_CODE (TREE_TYPE (inner_type
)) == METHOD_TYPE
))
3846 /* We do not care for const qualification of the pointed-to types
3847 as const qualification has no semantic value to the middle-end. */
3849 /* Otherwise pointers/references are equivalent. */
3853 /* Recurse for complex types. */
3854 else if (TREE_CODE (inner_type
) == COMPLEX_TYPE
3855 && TREE_CODE (outer_type
) == COMPLEX_TYPE
)
3856 return useless_type_conversion_p (TREE_TYPE (outer_type
),
3857 TREE_TYPE (inner_type
));
3859 /* Recurse for vector types with the same number of subparts. */
3860 else if (TREE_CODE (inner_type
) == VECTOR_TYPE
3861 && TREE_CODE (outer_type
) == VECTOR_TYPE
3862 && TYPE_PRECISION (inner_type
) == TYPE_PRECISION (outer_type
))
3863 return useless_type_conversion_p (TREE_TYPE (outer_type
),
3864 TREE_TYPE (inner_type
));
3866 else if (TREE_CODE (inner_type
) == ARRAY_TYPE
3867 && TREE_CODE (outer_type
) == ARRAY_TYPE
)
3869 /* Preserve string attributes. */
3870 if (TYPE_STRING_FLAG (inner_type
) != TYPE_STRING_FLAG (outer_type
))
3873 /* Conversions from array types with unknown extent to
3874 array types with known extent are not useless. */
3875 if (!TYPE_DOMAIN (inner_type
)
3876 && TYPE_DOMAIN (outer_type
))
3879 /* Nor are conversions from array types with non-constant size to
3880 array types with constant size or to different size. */
3881 if (TYPE_SIZE (outer_type
)
3882 && TREE_CODE (TYPE_SIZE (outer_type
)) == INTEGER_CST
3883 && (!TYPE_SIZE (inner_type
)
3884 || TREE_CODE (TYPE_SIZE (inner_type
)) != INTEGER_CST
3885 || !tree_int_cst_equal (TYPE_SIZE (outer_type
),
3886 TYPE_SIZE (inner_type
))))
3889 /* Check conversions between arrays with partially known extents.
3890 If the array min/max values are constant they have to match.
3891 Otherwise allow conversions to unknown and variable extents.
3892 In particular this declares conversions that may change the
3893 mode to BLKmode as useless. */
3894 if (TYPE_DOMAIN (inner_type
)
3895 && TYPE_DOMAIN (outer_type
)
3896 && TYPE_DOMAIN (inner_type
) != TYPE_DOMAIN (outer_type
))
3898 tree inner_min
= TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type
));
3899 tree outer_min
= TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type
));
3900 tree inner_max
= TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type
));
3901 tree outer_max
= TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type
));
3903 /* After gimplification a variable min/max value carries no
3904 additional information compared to a NULL value. All that
3905 matters has been lowered to be part of the IL. */
3906 if (inner_min
&& TREE_CODE (inner_min
) != INTEGER_CST
)
3907 inner_min
= NULL_TREE
;
3908 if (outer_min
&& TREE_CODE (outer_min
) != INTEGER_CST
)
3909 outer_min
= NULL_TREE
;
3910 if (inner_max
&& TREE_CODE (inner_max
) != INTEGER_CST
)
3911 inner_max
= NULL_TREE
;
3912 if (outer_max
&& TREE_CODE (outer_max
) != INTEGER_CST
)
3913 outer_max
= NULL_TREE
;
3915 /* Conversions NULL / variable <- cst are useless, but not
3916 the other way around. */
3919 || !tree_int_cst_equal (inner_min
, outer_min
)))
3923 || !tree_int_cst_equal (inner_max
, outer_max
)))
3927 /* Recurse on the element check. */
3928 return useless_type_conversion_p (TREE_TYPE (outer_type
),
3929 TREE_TYPE (inner_type
));
3932 else if ((TREE_CODE (inner_type
) == FUNCTION_TYPE
3933 || TREE_CODE (inner_type
) == METHOD_TYPE
)
3934 && TREE_CODE (inner_type
) == TREE_CODE (outer_type
))
3936 tree outer_parm
, inner_parm
;
3938 /* If the return types are not compatible bail out. */
3939 if (!useless_type_conversion_p (TREE_TYPE (outer_type
),
3940 TREE_TYPE (inner_type
)))
3943 /* Method types should belong to a compatible base class. */
3944 if (TREE_CODE (inner_type
) == METHOD_TYPE
3945 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type
),
3946 TYPE_METHOD_BASETYPE (inner_type
)))
3949 /* A conversion to an unprototyped argument list is ok. */
3950 if (!prototype_p (outer_type
))
3953 /* If the unqualified argument types are compatible the conversion
3955 if (TYPE_ARG_TYPES (outer_type
) == TYPE_ARG_TYPES (inner_type
))
3958 for (outer_parm
= TYPE_ARG_TYPES (outer_type
),
3959 inner_parm
= TYPE_ARG_TYPES (inner_type
);
3960 outer_parm
&& inner_parm
;
3961 outer_parm
= TREE_CHAIN (outer_parm
),
3962 inner_parm
= TREE_CHAIN (inner_parm
))
3963 if (!useless_type_conversion_p
3964 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm
)),
3965 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm
))))
3968 /* If there is a mismatch in the number of arguments the functions
3969 are not compatible. */
3970 if (outer_parm
|| inner_parm
)
3973 /* Defer to the target if necessary. */
3974 if (TYPE_ATTRIBUTES (inner_type
) || TYPE_ATTRIBUTES (outer_type
))
3975 return comp_type_attributes (outer_type
, inner_type
) != 0;
3980 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
3981 explicit conversions for types involving to be structurally
3983 else if (AGGREGATE_TYPE_P (inner_type
)
3984 && TREE_CODE (inner_type
) == TREE_CODE (outer_type
))
3990 /* Return true if a conversion from either type of TYPE1 and TYPE2
3991 to the other is not required. Otherwise return false. */
3994 types_compatible_p (tree type1
, tree type2
)
3996 return (type1
== type2
3997 || (useless_type_conversion_p (type1
, type2
)
3998 && useless_type_conversion_p (type2
, type1
)));
4001 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
4004 dump_decl_set (FILE *file
, bitmap set
)
4011 fprintf (file
, "{ ");
4013 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
4015 fprintf (file
, "D.%u", i
);
4016 fprintf (file
, " ");
4019 fprintf (file
, "}");
4022 fprintf (file
, "NIL");
4025 /* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
4026 coalescing together, false otherwise.
4028 This must stay consistent with var_map_base_init in tree-ssa-live.c. */
4031 gimple_can_coalesce_p (tree name1
, tree name2
)
4033 /* First check the SSA_NAME's associated DECL. We only want to
4034 coalesce if they have the same DECL or both have no associated DECL. */
4035 tree var1
= SSA_NAME_VAR (name1
);
4036 tree var2
= SSA_NAME_VAR (name2
);
4037 var1
= (var1
&& (!VAR_P (var1
) || !DECL_IGNORED_P (var1
))) ? var1
: NULL_TREE
;
4038 var2
= (var2
&& (!VAR_P (var2
) || !DECL_IGNORED_P (var2
))) ? var2
: NULL_TREE
;
4042 /* Now check the types. If the types are the same, then we should
4043 try to coalesce V1 and V2. */
4044 tree t1
= TREE_TYPE (name1
);
4045 tree t2
= TREE_TYPE (name2
);
4049 /* If the types are not the same, check for a canonical type match. This
4050 (for example) allows coalescing when the types are fundamentally the
4051 same, but just have different names.
4053 Note pointer types with different address spaces may have the same
4054 canonical type. Those are rejected for coalescing by the
4055 types_compatible_p check. */
4056 if (TYPE_CANONICAL (t1
)
4057 && TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
)
4058 && types_compatible_p (t1
, t2
))
4064 /* Return true when CALL is a call stmt that definitely doesn't
4065 free any memory or makes it unavailable otherwise. */
4067 nonfreeing_call_p (gimple call
)
4069 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
4070 && gimple_call_flags (call
) & ECF_LEAF
)
4071 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call
)))
4073 /* Just in case these become ECF_LEAF in the future. */
4075 case BUILT_IN_TM_FREE
:
4076 case BUILT_IN_REALLOC
:
4077 case BUILT_IN_STACK_RESTORE
:
4086 /* Create a new VAR_DECL and copy information from VAR to it. */
4089 copy_var_decl (tree var
, tree name
, tree type
)
4091 tree copy
= build_decl (DECL_SOURCE_LOCATION (var
), VAR_DECL
, name
, type
);
4093 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (var
);
4094 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (var
);
4095 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (var
);
4096 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (var
);
4097 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (var
);
4098 DECL_CONTEXT (copy
) = DECL_CONTEXT (var
);
4099 TREE_NO_WARNING (copy
) = TREE_NO_WARNING (var
);
4100 TREE_USED (copy
) = 1;
4101 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4102 DECL_ATTRIBUTES (copy
) = DECL_ATTRIBUTES (var
);