1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node
)))
47 htab_t gimple_canonical_types
;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
49 htab_t type_hash_cache
;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map
)))
51 htab_t canonical_type_hash_cache
;
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST
size_t gimple_ops_offset_
[] = {
60 #include "gsstruct.def"
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size
[] = {
66 #include "gsstruct.def"
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name
[] = {
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST
enum gimple_statement_structure_enum gss_for_code_
[] = {
82 #ifdef GATHER_STATISTICS
85 int gimple_alloc_counts
[(int) gimple_alloc_kind_all
];
86 int gimple_alloc_sizes
[(int) gimple_alloc_kind_all
];
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names
[] = {
97 #endif /* GATHER_STATISTICS */
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable
)) struct gimple_seq_d
*gimple_seq_cache
;
103 /* Private API manipulation functions shared only with some
105 extern void gimple_set_stored_syms (gimple
, bitmap
, bitmap_obstack
*);
106 extern void gimple_set_loaded_syms (gimple
, bitmap
, bitmap_obstack
*);
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
112 /* Set the code for statement G to CODE. */
115 gimple_set_code (gimple g
, enum gimple_code code
)
117 g
->gsbase
.code
= code
;
120 /* Return the number of bytes needed to hold a GIMPLE statement with
124 gimple_size (enum gimple_code code
)
126 return gsstruct_code_size
[gss_for_code (code
)];
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
133 gimple_alloc_stat (enum gimple_code code
, unsigned num_ops MEM_STAT_DECL
)
138 size
= gimple_size (code
);
140 size
+= sizeof (tree
) * (num_ops
- 1);
142 #ifdef GATHER_STATISTICS
144 enum gimple_alloc_kind kind
= gimple_alloc_kind (code
);
145 gimple_alloc_counts
[(int) kind
]++;
146 gimple_alloc_sizes
[(int) kind
] += size
;
150 stmt
= ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT
);
151 gimple_set_code (stmt
, code
);
152 gimple_set_num_ops (stmt
, num_ops
);
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt
->gsbase
.modified
= 1;
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
164 gimple_set_subcode (gimple g
, unsigned subcode
)
166 /* We only have 16 bits for the RHS code. Assert that we are not
168 gcc_assert (subcode
< (1 << 16));
169 g
->gsbase
.subcode
= subcode
;
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
182 gimple_build_with_ops_stat (enum gimple_code code
, unsigned subcode
,
183 unsigned num_ops MEM_STAT_DECL
)
185 gimple s
= gimple_alloc_stat (code
, num_ops PASS_MEM_STAT
);
186 gimple_set_subcode (s
, subcode
);
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
195 gimple_build_return (tree retval
)
197 gimple s
= gimple_build_with_ops (GIMPLE_RETURN
, ERROR_MARK
, 1);
199 gimple_return_set_retval (s
, retval
);
203 /* Reset alias information on call S. */
206 gimple_call_reset_alias_info (gimple s
)
208 if (gimple_call_flags (s
) & ECF_CONST
)
209 memset (gimple_call_use_set (s
), 0, sizeof (struct pt_solution
));
211 pt_solution_reset (gimple_call_use_set (s
));
212 if (gimple_call_flags (s
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
213 memset (gimple_call_clobber_set (s
), 0, sizeof (struct pt_solution
));
215 pt_solution_reset (gimple_call_clobber_set (s
));
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
224 gimple_build_call_1 (tree fn
, unsigned nargs
)
226 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
227 if (TREE_CODE (fn
) == FUNCTION_DECL
)
228 fn
= build_fold_addr_expr (fn
);
229 gimple_set_op (s
, 1, fn
);
230 gimple_call_set_fntype (s
, TREE_TYPE (TREE_TYPE (fn
)));
231 gimple_call_reset_alias_info (s
);
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
240 gimple_build_call_vec (tree fn
, VEC(tree
, heap
) *args
)
243 unsigned nargs
= VEC_length (tree
, args
);
244 gimple call
= gimple_build_call_1 (fn
, nargs
);
246 for (i
= 0; i
< nargs
; i
++)
247 gimple_call_set_arg (call
, i
, VEC_index (tree
, args
, i
));
253 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
257 gimple_build_call (tree fn
, unsigned nargs
, ...)
263 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
265 call
= gimple_build_call_1 (fn
, nargs
);
267 va_start (ap
, nargs
);
268 for (i
= 0; i
< nargs
; i
++)
269 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
276 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
280 gimple_build_call_valist (tree fn
, unsigned nargs
, va_list ap
)
285 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
287 call
= gimple_build_call_1 (fn
, nargs
);
289 for (i
= 0; i
< nargs
; i
++)
290 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
301 gimple_build_call_internal_1 (enum internal_fn fn
, unsigned nargs
)
303 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
304 s
->gsbase
.subcode
|= GF_CALL_INTERNAL
;
305 gimple_call_set_internal_fn (s
, fn
);
306 gimple_call_reset_alias_info (s
);
311 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
315 gimple_build_call_internal (enum internal_fn fn
, unsigned nargs
, ...)
321 call
= gimple_build_call_internal_1 (fn
, nargs
);
322 va_start (ap
, nargs
);
323 for (i
= 0; i
< nargs
; i
++)
324 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
335 gimple_build_call_internal_vec (enum internal_fn fn
, VEC(tree
, heap
) *args
)
340 nargs
= VEC_length (tree
, args
);
341 call
= gimple_build_call_internal_1 (fn
, nargs
);
342 for (i
= 0; i
< nargs
; i
++)
343 gimple_call_set_arg (call
, i
, VEC_index (tree
, args
, i
));
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
354 gimple_build_call_from_tree (tree t
)
358 tree fndecl
= get_callee_fndecl (t
);
360 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
362 nargs
= call_expr_nargs (t
);
363 call
= gimple_build_call_1 (fndecl
? fndecl
: CALL_EXPR_FN (t
), nargs
);
365 for (i
= 0; i
< nargs
; i
++)
366 gimple_call_set_arg (call
, i
, CALL_EXPR_ARG (t
, i
));
368 gimple_set_block (call
, TREE_BLOCK (t
));
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call
, CALL_EXPR_STATIC_CHAIN (t
));
372 gimple_call_set_tail (call
, CALL_EXPR_TAILCALL (t
));
373 gimple_call_set_return_slot_opt (call
, CALL_EXPR_RETURN_SLOT_OPT (t
));
375 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
376 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
377 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
378 gimple_call_set_alloca_for_var (call
, CALL_ALLOCA_FOR_VAR_P (t
));
380 gimple_call_set_from_thunk (call
, CALL_FROM_THUNK_P (t
));
381 gimple_call_set_va_arg_pack (call
, CALL_EXPR_VA_ARG_PACK (t
));
382 gimple_call_set_nothrow (call
, TREE_NOTHROW (t
));
383 gimple_set_no_warning (call
, TREE_NO_WARNING (t
));
389 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
390 *OP1_P, *OP2_P and *OP3_P respectively. */
393 extract_ops_from_tree_1 (tree expr
, enum tree_code
*subcode_p
, tree
*op1_p
,
394 tree
*op2_p
, tree
*op3_p
)
396 enum gimple_rhs_class grhs_class
;
398 *subcode_p
= TREE_CODE (expr
);
399 grhs_class
= get_gimple_rhs_class (*subcode_p
);
401 if (grhs_class
== GIMPLE_TERNARY_RHS
)
403 *op1_p
= TREE_OPERAND (expr
, 0);
404 *op2_p
= TREE_OPERAND (expr
, 1);
405 *op3_p
= TREE_OPERAND (expr
, 2);
407 else if (grhs_class
== GIMPLE_BINARY_RHS
)
409 *op1_p
= TREE_OPERAND (expr
, 0);
410 *op2_p
= TREE_OPERAND (expr
, 1);
413 else if (grhs_class
== GIMPLE_UNARY_RHS
)
415 *op1_p
= TREE_OPERAND (expr
, 0);
419 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
430 /* Build a GIMPLE_ASSIGN statement.
432 LHS of the assignment.
433 RHS of the assignment which can be unary or binary. */
436 gimple_build_assign_stat (tree lhs
, tree rhs MEM_STAT_DECL
)
438 enum tree_code subcode
;
441 extract_ops_from_tree_1 (rhs
, &subcode
, &op1
, &op2
, &op3
);
442 return gimple_build_assign_with_ops_stat (subcode
, lhs
, op1
, op2
, op3
447 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
448 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
449 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
452 gimple_build_assign_with_ops_stat (enum tree_code subcode
, tree lhs
, tree op1
,
453 tree op2
, tree op3 MEM_STAT_DECL
)
458 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
460 num_ops
= get_gimple_rhs_num_ops (subcode
) + 1;
462 p
= gimple_build_with_ops_stat (GIMPLE_ASSIGN
, (unsigned)subcode
, num_ops
464 gimple_assign_set_lhs (p
, lhs
);
465 gimple_assign_set_rhs1 (p
, op1
);
468 gcc_assert (num_ops
> 2);
469 gimple_assign_set_rhs2 (p
, op2
);
474 gcc_assert (num_ops
> 3);
475 gimple_assign_set_rhs3 (p
, op3
);
482 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
484 DST/SRC are the destination and source respectively. You can pass
485 ungimplified trees in DST or SRC, in which case they will be
486 converted to a gimple operand if necessary.
488 This function returns the newly created GIMPLE_ASSIGN tuple. */
491 gimplify_assign (tree dst
, tree src
, gimple_seq
*seq_p
)
493 tree t
= build2 (MODIFY_EXPR
, TREE_TYPE (dst
), dst
, src
);
494 gimplify_and_add (t
, seq_p
);
496 return gimple_seq_last_stmt (*seq_p
);
500 /* Build a GIMPLE_COND statement.
502 PRED is the condition used to compare LHS and the RHS.
503 T_LABEL is the label to jump to if the condition is true.
504 F_LABEL is the label to jump to otherwise. */
507 gimple_build_cond (enum tree_code pred_code
, tree lhs
, tree rhs
,
508 tree t_label
, tree f_label
)
512 gcc_assert (TREE_CODE_CLASS (pred_code
) == tcc_comparison
);
513 p
= gimple_build_with_ops (GIMPLE_COND
, pred_code
, 4);
514 gimple_cond_set_lhs (p
, lhs
);
515 gimple_cond_set_rhs (p
, rhs
);
516 gimple_cond_set_true_label (p
, t_label
);
517 gimple_cond_set_false_label (p
, f_label
);
522 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
525 gimple_cond_get_ops_from_tree (tree cond
, enum tree_code
*code_p
,
526 tree
*lhs_p
, tree
*rhs_p
)
528 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond
)) == tcc_comparison
529 || TREE_CODE (cond
) == TRUTH_NOT_EXPR
530 || is_gimple_min_invariant (cond
)
531 || SSA_VAR_P (cond
));
533 extract_ops_from_tree (cond
, code_p
, lhs_p
, rhs_p
);
535 /* Canonicalize conditionals of the form 'if (!VAL)'. */
536 if (*code_p
== TRUTH_NOT_EXPR
)
539 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
540 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
542 /* Canonicalize conditionals of the form 'if (VAL)' */
543 else if (TREE_CODE_CLASS (*code_p
) != tcc_comparison
)
546 gcc_assert (*lhs_p
&& *rhs_p
== NULL_TREE
);
547 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
552 /* Build a GIMPLE_COND statement from the conditional expression tree
553 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
556 gimple_build_cond_from_tree (tree cond
, tree t_label
, tree f_label
)
561 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
562 return gimple_build_cond (code
, lhs
, rhs
, t_label
, f_label
);
565 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
566 boolean expression tree COND. */
569 gimple_cond_set_condition_from_tree (gimple stmt
, tree cond
)
574 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
575 gimple_cond_set_condition (stmt
, code
, lhs
, rhs
);
578 /* Build a GIMPLE_LABEL statement for LABEL. */
581 gimple_build_label (tree label
)
583 gimple p
= gimple_build_with_ops (GIMPLE_LABEL
, ERROR_MARK
, 1);
584 gimple_label_set_label (p
, label
);
588 /* Build a GIMPLE_GOTO statement to label DEST. */
591 gimple_build_goto (tree dest
)
593 gimple p
= gimple_build_with_ops (GIMPLE_GOTO
, ERROR_MARK
, 1);
594 gimple_goto_set_dest (p
, dest
);
599 /* Build a GIMPLE_NOP statement. */
602 gimple_build_nop (void)
604 return gimple_alloc (GIMPLE_NOP
, 0);
608 /* Build a GIMPLE_BIND statement.
609 VARS are the variables in BODY.
610 BLOCK is the containing block. */
613 gimple_build_bind (tree vars
, gimple_seq body
, tree block
)
615 gimple p
= gimple_alloc (GIMPLE_BIND
, 0);
616 gimple_bind_set_vars (p
, vars
);
618 gimple_bind_set_body (p
, body
);
620 gimple_bind_set_block (p
, block
);
624 /* Helper function to set the simple fields of a asm stmt.
626 STRING is a pointer to a string that is the asm blocks assembly code.
627 NINPUT is the number of register inputs.
628 NOUTPUT is the number of register outputs.
629 NCLOBBERS is the number of clobbered registers.
633 gimple_build_asm_1 (const char *string
, unsigned ninputs
, unsigned noutputs
,
634 unsigned nclobbers
, unsigned nlabels
)
637 int size
= strlen (string
);
639 /* ASMs with labels cannot have outputs. This should have been
640 enforced by the front end. */
641 gcc_assert (nlabels
== 0 || noutputs
== 0);
643 p
= gimple_build_with_ops (GIMPLE_ASM
, ERROR_MARK
,
644 ninputs
+ noutputs
+ nclobbers
+ nlabels
);
646 p
->gimple_asm
.ni
= ninputs
;
647 p
->gimple_asm
.no
= noutputs
;
648 p
->gimple_asm
.nc
= nclobbers
;
649 p
->gimple_asm
.nl
= nlabels
;
650 p
->gimple_asm
.string
= ggc_alloc_string (string
, size
);
652 #ifdef GATHER_STATISTICS
653 gimple_alloc_sizes
[(int) gimple_alloc_kind (GIMPLE_ASM
)] += size
;
659 /* Build a GIMPLE_ASM statement.
661 STRING is the assembly code.
662 NINPUT is the number of register inputs.
663 NOUTPUT is the number of register outputs.
664 NCLOBBERS is the number of clobbered registers.
665 INPUTS is a vector of the input register parameters.
666 OUTPUTS is a vector of the output register parameters.
667 CLOBBERS is a vector of the clobbered register parameters.
668 LABELS is a vector of destination labels. */
671 gimple_build_asm_vec (const char *string
, VEC(tree
,gc
)* inputs
,
672 VEC(tree
,gc
)* outputs
, VEC(tree
,gc
)* clobbers
,
673 VEC(tree
,gc
)* labels
)
678 p
= gimple_build_asm_1 (string
,
679 VEC_length (tree
, inputs
),
680 VEC_length (tree
, outputs
),
681 VEC_length (tree
, clobbers
),
682 VEC_length (tree
, labels
));
684 for (i
= 0; i
< VEC_length (tree
, inputs
); i
++)
685 gimple_asm_set_input_op (p
, i
, VEC_index (tree
, inputs
, i
));
687 for (i
= 0; i
< VEC_length (tree
, outputs
); i
++)
688 gimple_asm_set_output_op (p
, i
, VEC_index (tree
, outputs
, i
));
690 for (i
= 0; i
< VEC_length (tree
, clobbers
); i
++)
691 gimple_asm_set_clobber_op (p
, i
, VEC_index (tree
, clobbers
, i
));
693 for (i
= 0; i
< VEC_length (tree
, labels
); i
++)
694 gimple_asm_set_label_op (p
, i
, VEC_index (tree
, labels
, i
));
699 /* Build a GIMPLE_CATCH statement.
701 TYPES are the catch types.
702 HANDLER is the exception handler. */
705 gimple_build_catch (tree types
, gimple_seq handler
)
707 gimple p
= gimple_alloc (GIMPLE_CATCH
, 0);
708 gimple_catch_set_types (p
, types
);
710 gimple_catch_set_handler (p
, handler
);
715 /* Build a GIMPLE_EH_FILTER statement.
717 TYPES are the filter's types.
718 FAILURE is the filter's failure action. */
721 gimple_build_eh_filter (tree types
, gimple_seq failure
)
723 gimple p
= gimple_alloc (GIMPLE_EH_FILTER
, 0);
724 gimple_eh_filter_set_types (p
, types
);
726 gimple_eh_filter_set_failure (p
, failure
);
731 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
734 gimple_build_eh_must_not_throw (tree decl
)
736 gimple p
= gimple_alloc (GIMPLE_EH_MUST_NOT_THROW
, 0);
738 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
739 gcc_assert (flags_from_decl_or_type (decl
) & ECF_NORETURN
);
740 gimple_eh_must_not_throw_set_fndecl (p
, decl
);
745 /* Build a GIMPLE_EH_ELSE statement. */
748 gimple_build_eh_else (gimple_seq n_body
, gimple_seq e_body
)
750 gimple p
= gimple_alloc (GIMPLE_EH_ELSE
, 0);
751 gimple_eh_else_set_n_body (p
, n_body
);
752 gimple_eh_else_set_e_body (p
, e_body
);
756 /* Build a GIMPLE_TRY statement.
758 EVAL is the expression to evaluate.
759 CLEANUP is the cleanup expression.
760 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
761 whether this is a try/catch or a try/finally respectively. */
764 gimple_build_try (gimple_seq eval
, gimple_seq cleanup
,
765 enum gimple_try_flags kind
)
769 gcc_assert (kind
== GIMPLE_TRY_CATCH
|| kind
== GIMPLE_TRY_FINALLY
);
770 p
= gimple_alloc (GIMPLE_TRY
, 0);
771 gimple_set_subcode (p
, kind
);
773 gimple_try_set_eval (p
, eval
);
775 gimple_try_set_cleanup (p
, cleanup
);
780 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
782 CLEANUP is the cleanup expression. */
785 gimple_build_wce (gimple_seq cleanup
)
787 gimple p
= gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR
, 0);
789 gimple_wce_set_cleanup (p
, cleanup
);
795 /* Build a GIMPLE_RESX statement. */
798 gimple_build_resx (int region
)
800 gimple p
= gimple_build_with_ops (GIMPLE_RESX
, ERROR_MARK
, 0);
801 p
->gimple_eh_ctrl
.region
= region
;
806 /* The helper for constructing a gimple switch statement.
807 INDEX is the switch's index.
808 NLABELS is the number of labels in the switch excluding the default.
809 DEFAULT_LABEL is the default label for the switch statement. */
812 gimple_build_switch_nlabels (unsigned nlabels
, tree index
, tree default_label
)
814 /* nlabels + 1 default label + 1 index. */
815 gimple p
= gimple_build_with_ops (GIMPLE_SWITCH
, ERROR_MARK
,
816 1 + (default_label
!= NULL
) + nlabels
);
817 gimple_switch_set_index (p
, index
);
819 gimple_switch_set_default_label (p
, default_label
);
824 /* Build a GIMPLE_SWITCH statement.
826 INDEX is the switch's index.
827 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
828 ... are the labels excluding the default. */
831 gimple_build_switch (unsigned nlabels
, tree index
, tree default_label
, ...)
835 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
837 /* Store the rest of the labels. */
838 va_start (al
, default_label
);
839 offset
= (default_label
!= NULL
);
840 for (i
= 0; i
< nlabels
; i
++)
841 gimple_switch_set_label (p
, i
+ offset
, va_arg (al
, tree
));
848 /* Build a GIMPLE_SWITCH statement.
850 INDEX is the switch's index.
851 DEFAULT_LABEL is the default label
852 ARGS is a vector of labels excluding the default. */
855 gimple_build_switch_vec (tree index
, tree default_label
, VEC(tree
, heap
) *args
)
857 unsigned i
, offset
, nlabels
= VEC_length (tree
, args
);
858 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
860 /* Copy the labels from the vector to the switch statement. */
861 offset
= (default_label
!= NULL
);
862 for (i
= 0; i
< nlabels
; i
++)
863 gimple_switch_set_label (p
, i
+ offset
, VEC_index (tree
, args
, i
));
868 /* Build a GIMPLE_EH_DISPATCH statement. */
871 gimple_build_eh_dispatch (int region
)
873 gimple p
= gimple_build_with_ops (GIMPLE_EH_DISPATCH
, ERROR_MARK
, 0);
874 p
->gimple_eh_ctrl
.region
= region
;
878 /* Build a new GIMPLE_DEBUG_BIND statement.
880 VAR is bound to VALUE; block and location are taken from STMT. */
883 gimple_build_debug_bind_stat (tree var
, tree value
, gimple stmt MEM_STAT_DECL
)
885 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
886 (unsigned)GIMPLE_DEBUG_BIND
, 2
889 gimple_debug_bind_set_var (p
, var
);
890 gimple_debug_bind_set_value (p
, value
);
893 gimple_set_block (p
, gimple_block (stmt
));
894 gimple_set_location (p
, gimple_location (stmt
));
901 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
903 VAR is bound to VALUE; block and location are taken from STMT. */
906 gimple_build_debug_source_bind_stat (tree var
, tree value
,
907 gimple stmt MEM_STAT_DECL
)
909 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
910 (unsigned)GIMPLE_DEBUG_SOURCE_BIND
, 2
913 gimple_debug_source_bind_set_var (p
, var
);
914 gimple_debug_source_bind_set_value (p
, value
);
917 gimple_set_block (p
, gimple_block (stmt
));
918 gimple_set_location (p
, gimple_location (stmt
));
925 /* Build a GIMPLE_OMP_CRITICAL statement.
927 BODY is the sequence of statements for which only one thread can execute.
928 NAME is optional identifier for this critical block. */
931 gimple_build_omp_critical (gimple_seq body
, tree name
)
933 gimple p
= gimple_alloc (GIMPLE_OMP_CRITICAL
, 0);
934 gimple_omp_critical_set_name (p
, name
);
936 gimple_omp_set_body (p
, body
);
941 /* Build a GIMPLE_OMP_FOR statement.
943 BODY is sequence of statements inside the for loop.
944 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
945 lastprivate, reductions, ordered, schedule, and nowait.
946 COLLAPSE is the collapse count.
947 PRE_BODY is the sequence of statements that are loop invariant. */
950 gimple_build_omp_for (gimple_seq body
, tree clauses
, size_t collapse
,
953 gimple p
= gimple_alloc (GIMPLE_OMP_FOR
, 0);
955 gimple_omp_set_body (p
, body
);
956 gimple_omp_for_set_clauses (p
, clauses
);
957 p
->gimple_omp_for
.collapse
= collapse
;
958 p
->gimple_omp_for
.iter
959 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse
);
961 gimple_omp_for_set_pre_body (p
, pre_body
);
967 /* Build a GIMPLE_OMP_PARALLEL statement.
969 BODY is sequence of statements which are executed in parallel.
970 CLAUSES, are the OMP parallel construct's clauses.
971 CHILD_FN is the function created for the parallel threads to execute.
972 DATA_ARG are the shared data argument(s). */
975 gimple_build_omp_parallel (gimple_seq body
, tree clauses
, tree child_fn
,
978 gimple p
= gimple_alloc (GIMPLE_OMP_PARALLEL
, 0);
980 gimple_omp_set_body (p
, body
);
981 gimple_omp_parallel_set_clauses (p
, clauses
);
982 gimple_omp_parallel_set_child_fn (p
, child_fn
);
983 gimple_omp_parallel_set_data_arg (p
, data_arg
);
989 /* Build a GIMPLE_OMP_TASK statement.
991 BODY is sequence of statements which are executed by the explicit task.
992 CLAUSES, are the OMP parallel construct's clauses.
993 CHILD_FN is the function created for the parallel threads to execute.
994 DATA_ARG are the shared data argument(s).
995 COPY_FN is the optional function for firstprivate initialization.
996 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
999 gimple_build_omp_task (gimple_seq body
, tree clauses
, tree child_fn
,
1000 tree data_arg
, tree copy_fn
, tree arg_size
,
1003 gimple p
= gimple_alloc (GIMPLE_OMP_TASK
, 0);
1005 gimple_omp_set_body (p
, body
);
1006 gimple_omp_task_set_clauses (p
, clauses
);
1007 gimple_omp_task_set_child_fn (p
, child_fn
);
1008 gimple_omp_task_set_data_arg (p
, data_arg
);
1009 gimple_omp_task_set_copy_fn (p
, copy_fn
);
1010 gimple_omp_task_set_arg_size (p
, arg_size
);
1011 gimple_omp_task_set_arg_align (p
, arg_align
);
1017 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1019 BODY is the sequence of statements in the section. */
1022 gimple_build_omp_section (gimple_seq body
)
1024 gimple p
= gimple_alloc (GIMPLE_OMP_SECTION
, 0);
1026 gimple_omp_set_body (p
, body
);
1032 /* Build a GIMPLE_OMP_MASTER statement.
1034 BODY is the sequence of statements to be executed by just the master. */
1037 gimple_build_omp_master (gimple_seq body
)
1039 gimple p
= gimple_alloc (GIMPLE_OMP_MASTER
, 0);
1041 gimple_omp_set_body (p
, body
);
1047 /* Build a GIMPLE_OMP_CONTINUE statement.
1049 CONTROL_DEF is the definition of the control variable.
1050 CONTROL_USE is the use of the control variable. */
1053 gimple_build_omp_continue (tree control_def
, tree control_use
)
1055 gimple p
= gimple_alloc (GIMPLE_OMP_CONTINUE
, 0);
1056 gimple_omp_continue_set_control_def (p
, control_def
);
1057 gimple_omp_continue_set_control_use (p
, control_use
);
1061 /* Build a GIMPLE_OMP_ORDERED statement.
1063 BODY is the sequence of statements inside a loop that will executed in
1067 gimple_build_omp_ordered (gimple_seq body
)
1069 gimple p
= gimple_alloc (GIMPLE_OMP_ORDERED
, 0);
1071 gimple_omp_set_body (p
, body
);
1077 /* Build a GIMPLE_OMP_RETURN statement.
1078 WAIT_P is true if this is a non-waiting return. */
1081 gimple_build_omp_return (bool wait_p
)
1083 gimple p
= gimple_alloc (GIMPLE_OMP_RETURN
, 0);
1085 gimple_omp_return_set_nowait (p
);
1091 /* Build a GIMPLE_OMP_SECTIONS statement.
1093 BODY is a sequence of section statements.
1094 CLAUSES are any of the OMP sections contsruct's clauses: private,
1095 firstprivate, lastprivate, reduction, and nowait. */
1098 gimple_build_omp_sections (gimple_seq body
, tree clauses
)
1100 gimple p
= gimple_alloc (GIMPLE_OMP_SECTIONS
, 0);
1102 gimple_omp_set_body (p
, body
);
1103 gimple_omp_sections_set_clauses (p
, clauses
);
1109 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1112 gimple_build_omp_sections_switch (void)
1114 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH
, 0);
1118 /* Build a GIMPLE_OMP_SINGLE statement.
1120 BODY is the sequence of statements that will be executed once.
1121 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1122 copyprivate, nowait. */
1125 gimple_build_omp_single (gimple_seq body
, tree clauses
)
1127 gimple p
= gimple_alloc (GIMPLE_OMP_SINGLE
, 0);
1129 gimple_omp_set_body (p
, body
);
1130 gimple_omp_single_set_clauses (p
, clauses
);
1136 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1139 gimple_build_omp_atomic_load (tree lhs
, tree rhs
)
1141 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD
, 0);
1142 gimple_omp_atomic_load_set_lhs (p
, lhs
);
1143 gimple_omp_atomic_load_set_rhs (p
, rhs
);
1147 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1149 VAL is the value we are storing. */
1152 gimple_build_omp_atomic_store (tree val
)
1154 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_STORE
, 0);
1155 gimple_omp_atomic_store_set_val (p
, val
);
1159 /* Build a GIMPLE_TRANSACTION statement. */
1162 gimple_build_transaction (gimple_seq body
, tree label
)
1164 gimple p
= gimple_alloc (GIMPLE_TRANSACTION
, 0);
1165 gimple_transaction_set_body (p
, body
);
1166 gimple_transaction_set_label (p
, label
);
1170 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1171 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1174 gimple_build_predict (enum br_predictor predictor
, enum prediction outcome
)
1176 gimple p
= gimple_alloc (GIMPLE_PREDICT
, 0);
1177 /* Ensure all the predictors fit into the lower bits of the subcode. */
1178 gcc_assert ((int) END_PREDICTORS
<= GF_PREDICT_TAKEN
);
1179 gimple_predict_set_predictor (p
, predictor
);
1180 gimple_predict_set_outcome (p
, outcome
);
1184 #if defined ENABLE_GIMPLE_CHECKING
1185 /* Complain of a gimple type mismatch and die. */
1188 gimple_check_failed (const_gimple gs
, const char *file
, int line
,
1189 const char *function
, enum gimple_code code
,
1190 enum tree_code subcode
)
1192 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1193 gimple_code_name
[code
],
1194 tree_code_name
[subcode
],
1195 gimple_code_name
[gimple_code (gs
)],
1196 gs
->gsbase
.subcode
> 0
1197 ? tree_code_name
[gs
->gsbase
.subcode
]
1199 function
, trim_filename (file
), line
);
1201 #endif /* ENABLE_GIMPLE_CHECKING */
1204 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1205 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1209 gimple_seq_alloc (void)
1211 gimple_seq seq
= gimple_seq_cache
;
1214 gimple_seq_cache
= gimple_seq_cache
->next_free
;
1215 gcc_assert (gimple_seq_cache
!= seq
);
1216 memset (seq
, 0, sizeof (*seq
));
1220 seq
= ggc_alloc_cleared_gimple_seq_d ();
1221 #ifdef GATHER_STATISTICS
1222 gimple_alloc_counts
[(int) gimple_alloc_kind_seq
]++;
1223 gimple_alloc_sizes
[(int) gimple_alloc_kind_seq
] += sizeof (*seq
);
1230 /* Return SEQ to the free pool of GIMPLE sequences. */
1233 gimple_seq_free (gimple_seq seq
)
1238 gcc_assert (gimple_seq_first (seq
) == NULL
);
1239 gcc_assert (gimple_seq_last (seq
) == NULL
);
1241 /* If this triggers, it's a sign that the same list is being freed
1243 gcc_assert (seq
!= gimple_seq_cache
|| gimple_seq_cache
== NULL
);
1245 /* Add SEQ to the pool of free sequences. */
1246 seq
->next_free
= gimple_seq_cache
;
1247 gimple_seq_cache
= seq
;
1251 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1252 *SEQ_P is NULL, a new sequence is allocated. */
1255 gimple_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
1257 gimple_stmt_iterator si
;
1263 *seq_p
= gimple_seq_alloc ();
1265 si
= gsi_last (*seq_p
);
1266 gsi_insert_after (&si
, gs
, GSI_NEW_STMT
);
1270 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1271 NULL, a new sequence is allocated. */
1274 gimple_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
1276 gimple_stmt_iterator si
;
1282 *dst_p
= gimple_seq_alloc ();
1284 si
= gsi_last (*dst_p
);
1285 gsi_insert_seq_after (&si
, src
, GSI_NEW_STMT
);
1289 /* Helper function of empty_body_p. Return true if STMT is an empty
1293 empty_stmt_p (gimple stmt
)
1295 if (gimple_code (stmt
) == GIMPLE_NOP
)
1297 if (gimple_code (stmt
) == GIMPLE_BIND
)
1298 return empty_body_p (gimple_bind_body (stmt
));
1303 /* Return true if BODY contains nothing but empty statements. */
1306 empty_body_p (gimple_seq body
)
1308 gimple_stmt_iterator i
;
1310 if (gimple_seq_empty_p (body
))
1312 for (i
= gsi_start (body
); !gsi_end_p (i
); gsi_next (&i
))
1313 if (!empty_stmt_p (gsi_stmt (i
))
1314 && !is_gimple_debug (gsi_stmt (i
)))
1321 /* Perform a deep copy of sequence SRC and return the result. */
1324 gimple_seq_copy (gimple_seq src
)
1326 gimple_stmt_iterator gsi
;
1327 gimple_seq new_seq
= gimple_seq_alloc ();
1330 for (gsi
= gsi_start (src
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1332 stmt
= gimple_copy (gsi_stmt (gsi
));
1333 gimple_seq_add_stmt (&new_seq
, stmt
);
1340 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1341 on each one. WI is as in walk_gimple_stmt.
1343 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1344 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1345 produced the value is returned if this statement has not been
1346 removed by a callback (wi->removed_stmt). If the statement has
1347 been removed, NULL is returned.
1349 Otherwise, all the statements are walked and NULL returned. */
1352 walk_gimple_seq (gimple_seq seq
, walk_stmt_fn callback_stmt
,
1353 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1355 gimple_stmt_iterator gsi
;
1357 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); )
1359 tree ret
= walk_gimple_stmt (&gsi
, callback_stmt
, callback_op
, wi
);
1362 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1365 wi
->callback_result
= ret
;
1367 return wi
->removed_stmt
? NULL
: gsi_stmt (gsi
);
1370 if (!wi
->removed_stmt
)
1375 wi
->callback_result
= NULL_TREE
;
1381 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1384 walk_gimple_asm (gimple stmt
, walk_tree_fn callback_op
,
1385 struct walk_stmt_info
*wi
)
1389 const char **oconstraints
;
1391 const char *constraint
;
1392 bool allows_mem
, allows_reg
, is_inout
;
1394 noutputs
= gimple_asm_noutputs (stmt
);
1395 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1400 for (i
= 0; i
< noutputs
; i
++)
1402 op
= gimple_asm_output_op (stmt
, i
);
1403 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1404 oconstraints
[i
] = constraint
;
1405 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
, &allows_reg
,
1408 wi
->val_only
= (allows_reg
|| !allows_mem
);
1409 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1414 n
= gimple_asm_ninputs (stmt
);
1415 for (i
= 0; i
< n
; i
++)
1417 op
= gimple_asm_input_op (stmt
, i
);
1418 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op
)));
1419 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1420 oconstraints
, &allows_mem
, &allows_reg
);
1423 wi
->val_only
= (allows_reg
|| !allows_mem
);
1424 /* Although input "m" is not really a LHS, we need a lvalue. */
1425 wi
->is_lhs
= !wi
->val_only
;
1427 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1435 wi
->val_only
= true;
1438 n
= gimple_asm_nlabels (stmt
);
1439 for (i
= 0; i
< n
; i
++)
1441 op
= gimple_asm_label_op (stmt
, i
);
1442 ret
= walk_tree (&TREE_VALUE (op
), callback_op
, wi
, NULL
);
1451 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1452 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1454 CALLBACK_OP is called on each operand of STMT via walk_tree.
1455 Additional parameters to walk_tree must be stored in WI. For each operand
1456 OP, walk_tree is called as:
1458 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1460 If CALLBACK_OP returns non-NULL for an operand, the remaining
1461 operands are not scanned.
1463 The return value is that returned by the last call to walk_tree, or
1464 NULL_TREE if no CALLBACK_OP is specified. */
1467 walk_gimple_op (gimple stmt
, walk_tree_fn callback_op
,
1468 struct walk_stmt_info
*wi
)
1470 struct pointer_set_t
*pset
= (wi
) ? wi
->pset
: NULL
;
1472 tree ret
= NULL_TREE
;
1474 switch (gimple_code (stmt
))
1477 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1478 is a register variable, we may use a COMPONENT_REF on the RHS. */
1481 tree lhs
= gimple_assign_lhs (stmt
);
1483 = (is_gimple_reg_type (TREE_TYPE (lhs
)) && !is_gimple_reg (lhs
))
1484 || !gimple_assign_single_p (stmt
);
1487 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
1489 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
,
1495 /* Walk the LHS. If the RHS is appropriate for a memory, we
1496 may use a COMPONENT_REF on the LHS. */
1499 /* If the RHS has more than 1 operand, it is not appropriate
1501 wi
->val_only
= !(is_gimple_mem_rhs (gimple_assign_rhs1 (stmt
))
1502 || TREE_CODE (gimple_assign_rhs1 (stmt
))
1504 || !gimple_assign_single_p (stmt
);
1508 ret
= walk_tree (gimple_op_ptr (stmt
, 0), callback_op
, wi
, pset
);
1514 wi
->val_only
= true;
1523 wi
->val_only
= true;
1526 ret
= walk_tree (gimple_call_chain_ptr (stmt
), callback_op
, wi
, pset
);
1530 ret
= walk_tree (gimple_call_fn_ptr (stmt
), callback_op
, wi
, pset
);
1534 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1538 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt
, i
)));
1539 ret
= walk_tree (gimple_call_arg_ptr (stmt
, i
), callback_op
, wi
,
1545 if (gimple_call_lhs (stmt
))
1551 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt
)));
1554 ret
= walk_tree (gimple_call_lhs_ptr (stmt
), callback_op
, wi
, pset
);
1562 wi
->val_only
= true;
1567 ret
= walk_tree (gimple_catch_types_ptr (stmt
), callback_op
, wi
,
1573 case GIMPLE_EH_FILTER
:
1574 ret
= walk_tree (gimple_eh_filter_types_ptr (stmt
), callback_op
, wi
,
1581 ret
= walk_gimple_asm (stmt
, callback_op
, wi
);
1586 case GIMPLE_OMP_CONTINUE
:
1587 ret
= walk_tree (gimple_omp_continue_control_def_ptr (stmt
),
1588 callback_op
, wi
, pset
);
1592 ret
= walk_tree (gimple_omp_continue_control_use_ptr (stmt
),
1593 callback_op
, wi
, pset
);
1598 case GIMPLE_OMP_CRITICAL
:
1599 ret
= walk_tree (gimple_omp_critical_name_ptr (stmt
), callback_op
, wi
,
1605 case GIMPLE_OMP_FOR
:
1606 ret
= walk_tree (gimple_omp_for_clauses_ptr (stmt
), callback_op
, wi
,
1610 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1612 ret
= walk_tree (gimple_omp_for_index_ptr (stmt
, i
), callback_op
,
1616 ret
= walk_tree (gimple_omp_for_initial_ptr (stmt
, i
), callback_op
,
1620 ret
= walk_tree (gimple_omp_for_final_ptr (stmt
, i
), callback_op
,
1624 ret
= walk_tree (gimple_omp_for_incr_ptr (stmt
, i
), callback_op
,
1631 case GIMPLE_OMP_PARALLEL
:
1632 ret
= walk_tree (gimple_omp_parallel_clauses_ptr (stmt
), callback_op
,
1636 ret
= walk_tree (gimple_omp_parallel_child_fn_ptr (stmt
), callback_op
,
1640 ret
= walk_tree (gimple_omp_parallel_data_arg_ptr (stmt
), callback_op
,
1646 case GIMPLE_OMP_TASK
:
1647 ret
= walk_tree (gimple_omp_task_clauses_ptr (stmt
), callback_op
,
1651 ret
= walk_tree (gimple_omp_task_child_fn_ptr (stmt
), callback_op
,
1655 ret
= walk_tree (gimple_omp_task_data_arg_ptr (stmt
), callback_op
,
1659 ret
= walk_tree (gimple_omp_task_copy_fn_ptr (stmt
), callback_op
,
1663 ret
= walk_tree (gimple_omp_task_arg_size_ptr (stmt
), callback_op
,
1667 ret
= walk_tree (gimple_omp_task_arg_align_ptr (stmt
), callback_op
,
1673 case GIMPLE_OMP_SECTIONS
:
1674 ret
= walk_tree (gimple_omp_sections_clauses_ptr (stmt
), callback_op
,
1679 ret
= walk_tree (gimple_omp_sections_control_ptr (stmt
), callback_op
,
1686 case GIMPLE_OMP_SINGLE
:
1687 ret
= walk_tree (gimple_omp_single_clauses_ptr (stmt
), callback_op
, wi
,
1693 case GIMPLE_OMP_ATOMIC_LOAD
:
1694 ret
= walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt
), callback_op
, wi
,
1699 ret
= walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt
), callback_op
, wi
,
1705 case GIMPLE_OMP_ATOMIC_STORE
:
1706 ret
= walk_tree (gimple_omp_atomic_store_val_ptr (stmt
), callback_op
,
1712 case GIMPLE_TRANSACTION
:
1713 ret
= walk_tree (gimple_transaction_label_ptr (stmt
), callback_op
,
1719 /* Tuples that do not have operands. */
1722 case GIMPLE_OMP_RETURN
:
1723 case GIMPLE_PREDICT
:
1728 enum gimple_statement_structure_enum gss
;
1729 gss
= gimple_statement_structure (stmt
);
1730 if (gss
== GSS_WITH_OPS
|| gss
== GSS_WITH_MEM_OPS
)
1731 for (i
= 0; i
< gimple_num_ops (stmt
); i
++)
1733 ret
= walk_tree (gimple_op_ptr (stmt
, i
), callback_op
, wi
, pset
);
1745 /* Walk the current statement in GSI (optionally using traversal state
1746 stored in WI). If WI is NULL, no state is kept during traversal.
1747 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1748 that it has handled all the operands of the statement, its return
1749 value is returned. Otherwise, the return value from CALLBACK_STMT
1750 is discarded and its operands are scanned.
1752 If CALLBACK_STMT is NULL or it didn't handle the operands,
1753 CALLBACK_OP is called on each operand of the statement via
1754 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1755 operand, the remaining operands are not scanned. In this case, the
1756 return value from CALLBACK_OP is returned.
1758 In any other case, NULL_TREE is returned. */
1761 walk_gimple_stmt (gimple_stmt_iterator
*gsi
, walk_stmt_fn callback_stmt
,
1762 walk_tree_fn callback_op
, struct walk_stmt_info
*wi
)
1766 gimple stmt
= gsi_stmt (*gsi
);
1771 wi
->removed_stmt
= false;
1773 if (wi
->want_locations
&& gimple_has_location (stmt
))
1774 input_location
= gimple_location (stmt
);
1779 /* Invoke the statement callback. Return if the callback handled
1780 all of STMT operands by itself. */
1783 bool handled_ops
= false;
1784 tree_ret
= callback_stmt (gsi
, &handled_ops
, wi
);
1788 /* If CALLBACK_STMT did not handle operands, it should not have
1789 a value to return. */
1790 gcc_assert (tree_ret
== NULL
);
1792 if (wi
&& wi
->removed_stmt
)
1795 /* Re-read stmt in case the callback changed it. */
1796 stmt
= gsi_stmt (*gsi
);
1799 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1802 tree_ret
= walk_gimple_op (stmt
, callback_op
, wi
);
1807 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1808 switch (gimple_code (stmt
))
1811 ret
= walk_gimple_seq (gimple_bind_body (stmt
), callback_stmt
,
1814 return wi
->callback_result
;
1818 ret
= walk_gimple_seq (gimple_catch_handler (stmt
), callback_stmt
,
1821 return wi
->callback_result
;
1824 case GIMPLE_EH_FILTER
:
1825 ret
= walk_gimple_seq (gimple_eh_filter_failure (stmt
), callback_stmt
,
1828 return wi
->callback_result
;
1831 case GIMPLE_EH_ELSE
:
1832 ret
= walk_gimple_seq (gimple_eh_else_n_body (stmt
),
1833 callback_stmt
, callback_op
, wi
);
1835 return wi
->callback_result
;
1836 ret
= walk_gimple_seq (gimple_eh_else_e_body (stmt
),
1837 callback_stmt
, callback_op
, wi
);
1839 return wi
->callback_result
;
1843 ret
= walk_gimple_seq (gimple_try_eval (stmt
), callback_stmt
, callback_op
,
1846 return wi
->callback_result
;
1848 ret
= walk_gimple_seq (gimple_try_cleanup (stmt
), callback_stmt
,
1851 return wi
->callback_result
;
1854 case GIMPLE_OMP_FOR
:
1855 ret
= walk_gimple_seq (gimple_omp_for_pre_body (stmt
), callback_stmt
,
1858 return wi
->callback_result
;
1861 case GIMPLE_OMP_CRITICAL
:
1862 case GIMPLE_OMP_MASTER
:
1863 case GIMPLE_OMP_ORDERED
:
1864 case GIMPLE_OMP_SECTION
:
1865 case GIMPLE_OMP_PARALLEL
:
1866 case GIMPLE_OMP_TASK
:
1867 case GIMPLE_OMP_SECTIONS
:
1868 case GIMPLE_OMP_SINGLE
:
1869 ret
= walk_gimple_seq (gimple_omp_body (stmt
), callback_stmt
,
1872 return wi
->callback_result
;
1875 case GIMPLE_WITH_CLEANUP_EXPR
:
1876 ret
= walk_gimple_seq (gimple_wce_cleanup (stmt
), callback_stmt
,
1879 return wi
->callback_result
;
1882 case GIMPLE_TRANSACTION
:
1883 ret
= walk_gimple_seq (gimple_transaction_body (stmt
),
1884 callback_stmt
, callback_op
, wi
);
1886 return wi
->callback_result
;
1890 gcc_assert (!gimple_has_substatements (stmt
));
1898 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1901 gimple_set_body (tree fndecl
, gimple_seq seq
)
1903 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1906 /* If FNDECL still does not have a function structure associated
1907 with it, then it does not make sense for it to receive a
1909 gcc_assert (seq
== NULL
);
1912 fn
->gimple_body
= seq
;
1916 /* Return the body of GIMPLE statements for function FN. After the
1917 CFG pass, the function body doesn't exist anymore because it has
1918 been split up into basic blocks. In this case, it returns
1922 gimple_body (tree fndecl
)
1924 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1925 return fn
? fn
->gimple_body
: NULL
;
1928 /* Return true when FNDECL has Gimple body either in unlowered
1931 gimple_has_body_p (tree fndecl
)
1933 struct function
*fn
= DECL_STRUCT_FUNCTION (fndecl
);
1934 return (gimple_body (fndecl
) || (fn
&& fn
->cfg
));
1937 /* Return true if calls C1 and C2 are known to go to the same function. */
1940 gimple_call_same_target_p (const_gimple c1
, const_gimple c2
)
1942 if (gimple_call_internal_p (c1
))
1943 return (gimple_call_internal_p (c2
)
1944 && gimple_call_internal_fn (c1
) == gimple_call_internal_fn (c2
));
1946 return (gimple_call_fn (c1
) == gimple_call_fn (c2
)
1947 || (gimple_call_fndecl (c1
)
1948 && gimple_call_fndecl (c1
) == gimple_call_fndecl (c2
)));
1951 /* Detect flags from a GIMPLE_CALL. This is just like
1952 call_expr_flags, but for gimple tuples. */
1955 gimple_call_flags (const_gimple stmt
)
1958 tree decl
= gimple_call_fndecl (stmt
);
1961 flags
= flags_from_decl_or_type (decl
);
1962 else if (gimple_call_internal_p (stmt
))
1963 flags
= internal_fn_flags (gimple_call_internal_fn (stmt
));
1965 flags
= flags_from_decl_or_type (gimple_call_fntype (stmt
));
1967 if (stmt
->gsbase
.subcode
& GF_CALL_NOTHROW
)
1968 flags
|= ECF_NOTHROW
;
1973 /* Return the "fn spec" string for call STMT. */
1976 gimple_call_fnspec (const_gimple stmt
)
1980 type
= gimple_call_fntype (stmt
);
1984 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
1988 return TREE_VALUE (TREE_VALUE (attr
));
1991 /* Detects argument flags for argument number ARG on call STMT. */
1994 gimple_call_arg_flags (const_gimple stmt
, unsigned arg
)
1996 tree attr
= gimple_call_fnspec (stmt
);
1998 if (!attr
|| 1 + arg
>= (unsigned) TREE_STRING_LENGTH (attr
))
2001 switch (TREE_STRING_POINTER (attr
)[1 + arg
])
2008 return EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
;
2011 return EAF_NOCLOBBER
| EAF_NOESCAPE
;
2014 return EAF_DIRECT
| EAF_NOESCAPE
;
2017 return EAF_NOESCAPE
;
2025 /* Detects return flags for the call STMT. */
2028 gimple_call_return_flags (const_gimple stmt
)
2032 if (gimple_call_flags (stmt
) & ECF_MALLOC
)
2035 attr
= gimple_call_fnspec (stmt
);
2036 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
2039 switch (TREE_STRING_POINTER (attr
)[0])
2045 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
2057 /* Return true if GS is a copy assignment. */
2060 gimple_assign_copy_p (gimple gs
)
2062 return (gimple_assign_single_p (gs
)
2063 && is_gimple_val (gimple_op (gs
, 1)));
2067 /* Return true if GS is a SSA_NAME copy assignment. */
2070 gimple_assign_ssa_name_copy_p (gimple gs
)
2072 return (gimple_assign_single_p (gs
)
2073 && TREE_CODE (gimple_assign_lhs (gs
)) == SSA_NAME
2074 && TREE_CODE (gimple_assign_rhs1 (gs
)) == SSA_NAME
);
2078 /* Return true if GS is an assignment with a unary RHS, but the
2079 operator has no effect on the assigned value. The logic is adapted
2080 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2081 instances in which STRIP_NOPS was previously applied to the RHS of
2084 NOTE: In the use cases that led to the creation of this function
2085 and of gimple_assign_single_p, it is typical to test for either
2086 condition and to proceed in the same manner. In each case, the
2087 assigned value is represented by the single RHS operand of the
2088 assignment. I suspect there may be cases where gimple_assign_copy_p,
2089 gimple_assign_single_p, or equivalent logic is used where a similar
2090 treatment of unary NOPs is appropriate. */
2093 gimple_assign_unary_nop_p (gimple gs
)
2095 return (is_gimple_assign (gs
)
2096 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
))
2097 || gimple_assign_rhs_code (gs
) == NON_LVALUE_EXPR
)
2098 && gimple_assign_rhs1 (gs
) != error_mark_node
2099 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))
2100 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs
)))));
2103 /* Set BB to be the basic block holding G. */
2106 gimple_set_bb (gimple stmt
, basic_block bb
)
2108 stmt
->gsbase
.bb
= bb
;
2110 /* If the statement is a label, add the label to block-to-labels map
2111 so that we can speed up edge creation for GIMPLE_GOTOs. */
2112 if (cfun
->cfg
&& gimple_code (stmt
) == GIMPLE_LABEL
)
2117 t
= gimple_label_label (stmt
);
2118 uid
= LABEL_DECL_UID (t
);
2121 unsigned old_len
= VEC_length (basic_block
, label_to_block_map
);
2122 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
2123 if (old_len
<= (unsigned) uid
)
2125 unsigned new_len
= 3 * uid
/ 2 + 1;
2127 VEC_safe_grow_cleared (basic_block
, gc
, label_to_block_map
,
2132 VEC_replace (basic_block
, label_to_block_map
, uid
, bb
);
2137 /* Modify the RHS of the assignment pointed-to by GSI using the
2138 operands in the expression tree EXPR.
2140 NOTE: The statement pointed-to by GSI may be reallocated if it
2141 did not have enough operand slots.
2143 This function is useful to convert an existing tree expression into
2144 the flat representation used for the RHS of a GIMPLE assignment.
2145 It will reallocate memory as needed to expand or shrink the number
2146 of operand slots needed to represent EXPR.
2148 NOTE: If you find yourself building a tree and then calling this
2149 function, you are most certainly doing it the slow way. It is much
2150 better to build a new assignment or to use the function
2151 gimple_assign_set_rhs_with_ops, which does not require an
2152 expression tree to be built. */
2155 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator
*gsi
, tree expr
)
2157 enum tree_code subcode
;
2160 extract_ops_from_tree_1 (expr
, &subcode
, &op1
, &op2
, &op3
);
2161 gimple_assign_set_rhs_with_ops_1 (gsi
, subcode
, op1
, op2
, op3
);
2165 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2166 operands OP1, OP2 and OP3.
2168 NOTE: The statement pointed-to by GSI may be reallocated if it
2169 did not have enough operand slots. */
2172 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
2173 tree op1
, tree op2
, tree op3
)
2175 unsigned new_rhs_ops
= get_gimple_rhs_num_ops (code
);
2176 gimple stmt
= gsi_stmt (*gsi
);
2178 /* If the new CODE needs more operands, allocate a new statement. */
2179 if (gimple_num_ops (stmt
) < new_rhs_ops
+ 1)
2181 tree lhs
= gimple_assign_lhs (stmt
);
2182 gimple new_stmt
= gimple_alloc (gimple_code (stmt
), new_rhs_ops
+ 1);
2183 memcpy (new_stmt
, stmt
, gimple_size (gimple_code (stmt
)));
2184 gsi_replace (gsi
, new_stmt
, true);
2187 /* The LHS needs to be reset as this also changes the SSA name
2189 gimple_assign_set_lhs (stmt
, lhs
);
2192 gimple_set_num_ops (stmt
, new_rhs_ops
+ 1);
2193 gimple_set_subcode (stmt
, code
);
2194 gimple_assign_set_rhs1 (stmt
, op1
);
2195 if (new_rhs_ops
> 1)
2196 gimple_assign_set_rhs2 (stmt
, op2
);
2197 if (new_rhs_ops
> 2)
2198 gimple_assign_set_rhs3 (stmt
, op3
);
2202 /* Return the LHS of a statement that performs an assignment,
2203 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2204 for a call to a function that returns no value, or for a
2205 statement other than an assignment or a call. */
2208 gimple_get_lhs (const_gimple stmt
)
2210 enum gimple_code code
= gimple_code (stmt
);
2212 if (code
== GIMPLE_ASSIGN
)
2213 return gimple_assign_lhs (stmt
);
2214 else if (code
== GIMPLE_CALL
)
2215 return gimple_call_lhs (stmt
);
2221 /* Set the LHS of a statement that performs an assignment,
2222 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2225 gimple_set_lhs (gimple stmt
, tree lhs
)
2227 enum gimple_code code
= gimple_code (stmt
);
2229 if (code
== GIMPLE_ASSIGN
)
2230 gimple_assign_set_lhs (stmt
, lhs
);
2231 else if (code
== GIMPLE_CALL
)
2232 gimple_call_set_lhs (stmt
, lhs
);
2237 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2238 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2239 expression with a different value.
2241 This will update any annotations (say debug bind stmts) referring
2242 to the original LHS, so that they use the RHS instead. This is
2243 done even if NLHS and LHS are the same, for it is understood that
2244 the RHS will be modified afterwards, and NLHS will not be assigned
2245 an equivalent value.
2247 Adjusting any non-annotation uses of the LHS, if needed, is a
2248 responsibility of the caller.
2250 The effect of this call should be pretty much the same as that of
2251 inserting a copy of STMT before STMT, and then removing the
2252 original stmt, at which time gsi_remove() would have update
2253 annotations, but using this function saves all the inserting,
2254 copying and removing. */
2257 gimple_replace_lhs (gimple stmt
, tree nlhs
)
2259 if (MAY_HAVE_DEBUG_STMTS
)
2261 tree lhs
= gimple_get_lhs (stmt
);
2263 gcc_assert (SSA_NAME_DEF_STMT (lhs
) == stmt
);
2265 insert_debug_temp_for_var_def (NULL
, lhs
);
2268 gimple_set_lhs (stmt
, nlhs
);
2271 /* Return a deep copy of statement STMT. All the operands from STMT
2272 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2273 and VUSE operand arrays are set to empty in the new copy. */
2276 gimple_copy (gimple stmt
)
2278 enum gimple_code code
= gimple_code (stmt
);
2279 unsigned num_ops
= gimple_num_ops (stmt
);
2280 gimple copy
= gimple_alloc (code
, num_ops
);
2283 /* Shallow copy all the fields from STMT. */
2284 memcpy (copy
, stmt
, gimple_size (code
));
2286 /* If STMT has sub-statements, deep-copy them as well. */
2287 if (gimple_has_substatements (stmt
))
2292 switch (gimple_code (stmt
))
2295 new_seq
= gimple_seq_copy (gimple_bind_body (stmt
));
2296 gimple_bind_set_body (copy
, new_seq
);
2297 gimple_bind_set_vars (copy
, unshare_expr (gimple_bind_vars (stmt
)));
2298 gimple_bind_set_block (copy
, gimple_bind_block (stmt
));
2302 new_seq
= gimple_seq_copy (gimple_catch_handler (stmt
));
2303 gimple_catch_set_handler (copy
, new_seq
);
2304 t
= unshare_expr (gimple_catch_types (stmt
));
2305 gimple_catch_set_types (copy
, t
);
2308 case GIMPLE_EH_FILTER
:
2309 new_seq
= gimple_seq_copy (gimple_eh_filter_failure (stmt
));
2310 gimple_eh_filter_set_failure (copy
, new_seq
);
2311 t
= unshare_expr (gimple_eh_filter_types (stmt
));
2312 gimple_eh_filter_set_types (copy
, t
);
2315 case GIMPLE_EH_ELSE
:
2316 new_seq
= gimple_seq_copy (gimple_eh_else_n_body (stmt
));
2317 gimple_eh_else_set_n_body (copy
, new_seq
);
2318 new_seq
= gimple_seq_copy (gimple_eh_else_e_body (stmt
));
2319 gimple_eh_else_set_e_body (copy
, new_seq
);
2323 new_seq
= gimple_seq_copy (gimple_try_eval (stmt
));
2324 gimple_try_set_eval (copy
, new_seq
);
2325 new_seq
= gimple_seq_copy (gimple_try_cleanup (stmt
));
2326 gimple_try_set_cleanup (copy
, new_seq
);
2329 case GIMPLE_OMP_FOR
:
2330 new_seq
= gimple_seq_copy (gimple_omp_for_pre_body (stmt
));
2331 gimple_omp_for_set_pre_body (copy
, new_seq
);
2332 t
= unshare_expr (gimple_omp_for_clauses (stmt
));
2333 gimple_omp_for_set_clauses (copy
, t
);
2334 copy
->gimple_omp_for
.iter
2335 = ggc_alloc_vec_gimple_omp_for_iter
2336 (gimple_omp_for_collapse (stmt
));
2337 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
2339 gimple_omp_for_set_cond (copy
, i
,
2340 gimple_omp_for_cond (stmt
, i
));
2341 gimple_omp_for_set_index (copy
, i
,
2342 gimple_omp_for_index (stmt
, i
));
2343 t
= unshare_expr (gimple_omp_for_initial (stmt
, i
));
2344 gimple_omp_for_set_initial (copy
, i
, t
);
2345 t
= unshare_expr (gimple_omp_for_final (stmt
, i
));
2346 gimple_omp_for_set_final (copy
, i
, t
);
2347 t
= unshare_expr (gimple_omp_for_incr (stmt
, i
));
2348 gimple_omp_for_set_incr (copy
, i
, t
);
2352 case GIMPLE_OMP_PARALLEL
:
2353 t
= unshare_expr (gimple_omp_parallel_clauses (stmt
));
2354 gimple_omp_parallel_set_clauses (copy
, t
);
2355 t
= unshare_expr (gimple_omp_parallel_child_fn (stmt
));
2356 gimple_omp_parallel_set_child_fn (copy
, t
);
2357 t
= unshare_expr (gimple_omp_parallel_data_arg (stmt
));
2358 gimple_omp_parallel_set_data_arg (copy
, t
);
2361 case GIMPLE_OMP_TASK
:
2362 t
= unshare_expr (gimple_omp_task_clauses (stmt
));
2363 gimple_omp_task_set_clauses (copy
, t
);
2364 t
= unshare_expr (gimple_omp_task_child_fn (stmt
));
2365 gimple_omp_task_set_child_fn (copy
, t
);
2366 t
= unshare_expr (gimple_omp_task_data_arg (stmt
));
2367 gimple_omp_task_set_data_arg (copy
, t
);
2368 t
= unshare_expr (gimple_omp_task_copy_fn (stmt
));
2369 gimple_omp_task_set_copy_fn (copy
, t
);
2370 t
= unshare_expr (gimple_omp_task_arg_size (stmt
));
2371 gimple_omp_task_set_arg_size (copy
, t
);
2372 t
= unshare_expr (gimple_omp_task_arg_align (stmt
));
2373 gimple_omp_task_set_arg_align (copy
, t
);
2376 case GIMPLE_OMP_CRITICAL
:
2377 t
= unshare_expr (gimple_omp_critical_name (stmt
));
2378 gimple_omp_critical_set_name (copy
, t
);
2381 case GIMPLE_OMP_SECTIONS
:
2382 t
= unshare_expr (gimple_omp_sections_clauses (stmt
));
2383 gimple_omp_sections_set_clauses (copy
, t
);
2384 t
= unshare_expr (gimple_omp_sections_control (stmt
));
2385 gimple_omp_sections_set_control (copy
, t
);
2388 case GIMPLE_OMP_SINGLE
:
2389 case GIMPLE_OMP_SECTION
:
2390 case GIMPLE_OMP_MASTER
:
2391 case GIMPLE_OMP_ORDERED
:
2393 new_seq
= gimple_seq_copy (gimple_omp_body (stmt
));
2394 gimple_omp_set_body (copy
, new_seq
);
2397 case GIMPLE_TRANSACTION
:
2398 new_seq
= gimple_seq_copy (gimple_transaction_body (stmt
));
2399 gimple_transaction_set_body (copy
, new_seq
);
2402 case GIMPLE_WITH_CLEANUP_EXPR
:
2403 new_seq
= gimple_seq_copy (gimple_wce_cleanup (stmt
));
2404 gimple_wce_set_cleanup (copy
, new_seq
);
2412 /* Make copy of operands. */
2415 for (i
= 0; i
< num_ops
; i
++)
2416 gimple_set_op (copy
, i
, unshare_expr (gimple_op (stmt
, i
)));
2418 /* Clear out SSA operand vectors on COPY. */
2419 if (gimple_has_ops (stmt
))
2421 gimple_set_def_ops (copy
, NULL
);
2422 gimple_set_use_ops (copy
, NULL
);
2425 if (gimple_has_mem_ops (stmt
))
2427 gimple_set_vdef (copy
, gimple_vdef (stmt
));
2428 gimple_set_vuse (copy
, gimple_vuse (stmt
));
2431 /* SSA operands need to be updated. */
2432 gimple_set_modified (copy
, true);
2439 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2440 a MODIFIED field. */
2443 gimple_set_modified (gimple s
, bool modifiedp
)
2445 if (gimple_has_ops (s
))
2446 s
->gsbase
.modified
= (unsigned) modifiedp
;
2450 /* Return true if statement S has side-effects. We consider a
2451 statement to have side effects if:
2453 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2454 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2457 gimple_has_side_effects (const_gimple s
)
2459 if (is_gimple_debug (s
))
2462 /* We don't have to scan the arguments to check for
2463 volatile arguments, though, at present, we still
2464 do a scan to check for TREE_SIDE_EFFECTS. */
2465 if (gimple_has_volatile_ops (s
))
2468 if (gimple_code (s
) == GIMPLE_ASM
2469 && gimple_asm_volatile_p (s
))
2472 if (is_gimple_call (s
))
2474 int flags
= gimple_call_flags (s
);
2476 /* An infinite loop is considered a side effect. */
2477 if (!(flags
& (ECF_CONST
| ECF_PURE
))
2478 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
2487 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2488 Return true if S can trap. When INCLUDE_MEM is true, check whether
2489 the memory operations could trap. When INCLUDE_STORES is true and
2490 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2493 gimple_could_trap_p_1 (gimple s
, bool include_mem
, bool include_stores
)
2495 tree t
, div
= NULL_TREE
;
2500 unsigned i
, start
= (is_gimple_assign (s
) && !include_stores
) ? 1 : 0;
2502 for (i
= start
; i
< gimple_num_ops (s
); i
++)
2503 if (tree_could_trap_p (gimple_op (s
, i
)))
2507 switch (gimple_code (s
))
2510 return gimple_asm_volatile_p (s
);
2513 t
= gimple_call_fndecl (s
);
2514 /* Assume that calls to weak functions may trap. */
2515 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
2520 t
= gimple_expr_type (s
);
2521 op
= gimple_assign_rhs_code (s
);
2522 if (get_gimple_rhs_class (op
) == GIMPLE_BINARY_RHS
)
2523 div
= gimple_assign_rhs2 (s
);
2524 return (operation_could_trap_p (op
, FLOAT_TYPE_P (t
),
2525 (INTEGRAL_TYPE_P (t
)
2526 && TYPE_OVERFLOW_TRAPS (t
)),
2536 /* Return true if statement S can trap. */
2539 gimple_could_trap_p (gimple s
)
2541 return gimple_could_trap_p_1 (s
, true, true);
2544 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2547 gimple_assign_rhs_could_trap_p (gimple s
)
2549 gcc_assert (is_gimple_assign (s
));
2550 return gimple_could_trap_p_1 (s
, true, false);
2554 /* Print debugging information for gimple stmts generated. */
2557 dump_gimple_statistics (void)
2559 #ifdef GATHER_STATISTICS
2560 int i
, total_tuples
= 0, total_bytes
= 0;
2562 fprintf (stderr
, "\nGIMPLE statements\n");
2563 fprintf (stderr
, "Kind Stmts Bytes\n");
2564 fprintf (stderr
, "---------------------------------------\n");
2565 for (i
= 0; i
< (int) gimple_alloc_kind_all
; ++i
)
2567 fprintf (stderr
, "%-20s %7d %10d\n", gimple_alloc_kind_names
[i
],
2568 gimple_alloc_counts
[i
], gimple_alloc_sizes
[i
]);
2569 total_tuples
+= gimple_alloc_counts
[i
];
2570 total_bytes
+= gimple_alloc_sizes
[i
];
2572 fprintf (stderr
, "---------------------------------------\n");
2573 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_tuples
, total_bytes
);
2574 fprintf (stderr
, "---------------------------------------\n");
2576 fprintf (stderr
, "No gimple statistics\n");
2581 /* Return the number of operands needed on the RHS of a GIMPLE
2582 assignment for an expression with tree code CODE. */
2585 get_gimple_rhs_num_ops (enum tree_code code
)
2587 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
2589 if (rhs_class
== GIMPLE_UNARY_RHS
|| rhs_class
== GIMPLE_SINGLE_RHS
)
2591 else if (rhs_class
== GIMPLE_BINARY_RHS
)
2593 else if (rhs_class
== GIMPLE_TERNARY_RHS
)
2599 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2601 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2602 : ((TYPE) == tcc_binary \
2603 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2604 : ((TYPE) == tcc_constant \
2605 || (TYPE) == tcc_declaration \
2606 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2607 : ((SYM) == TRUTH_AND_EXPR \
2608 || (SYM) == TRUTH_OR_EXPR \
2609 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2610 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2611 : ((SYM) == COND_EXPR \
2612 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2613 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2614 || (SYM) == DOT_PROD_EXPR \
2615 || (SYM) == REALIGN_LOAD_EXPR \
2616 || (SYM) == VEC_COND_EXPR \
2617 || (SYM) == VEC_PERM_EXPR \
2618 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2619 : ((SYM) == CONSTRUCTOR \
2620 || (SYM) == OBJ_TYPE_REF \
2621 || (SYM) == ASSERT_EXPR \
2622 || (SYM) == ADDR_EXPR \
2623 || (SYM) == WITH_SIZE_EXPR \
2624 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2625 : GIMPLE_INVALID_RHS),
2626 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2628 const unsigned char gimple_rhs_class_table
[] = {
2629 #include "all-tree.def"
2633 #undef END_OF_BASE_TREE_CODES
2635 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2637 /* Validation of GIMPLE expressions. */
2639 /* Returns true iff T is a valid RHS for an assignment to a renamed
2640 user -- or front-end generated artificial -- variable. */
2643 is_gimple_reg_rhs (tree t
)
2645 return get_gimple_rhs_class (TREE_CODE (t
)) != GIMPLE_INVALID_RHS
;
2648 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2649 LHS, or for a call argument. */
2652 is_gimple_mem_rhs (tree t
)
2654 /* If we're dealing with a renamable type, either source or dest must be
2655 a renamed variable. */
2656 if (is_gimple_reg_type (TREE_TYPE (t
)))
2657 return is_gimple_val (t
);
2659 return is_gimple_val (t
) || is_gimple_lvalue (t
);
2662 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2665 is_gimple_lvalue (tree t
)
2667 return (is_gimple_addressable (t
)
2668 || TREE_CODE (t
) == WITH_SIZE_EXPR
2669 /* These are complex lvalues, but don't have addresses, so they
2671 || TREE_CODE (t
) == BIT_FIELD_REF
);
2674 /* Return true if T is a GIMPLE condition. */
2677 is_gimple_condexpr (tree t
)
2679 return (is_gimple_val (t
) || (COMPARISON_CLASS_P (t
)
2680 && !tree_could_throw_p (t
)
2681 && is_gimple_val (TREE_OPERAND (t
, 0))
2682 && is_gimple_val (TREE_OPERAND (t
, 1))));
2685 /* Return true if T is something whose address can be taken. */
2688 is_gimple_addressable (tree t
)
2690 return (is_gimple_id (t
) || handled_component_p (t
)
2691 || TREE_CODE (t
) == MEM_REF
);
2694 /* Return true if T is a valid gimple constant. */
2697 is_gimple_constant (const_tree t
)
2699 switch (TREE_CODE (t
))
2709 /* Vector constant constructors are gimple invariant. */
2711 if (TREE_TYPE (t
) && TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2712 return TREE_CONSTANT (t
);
2721 /* Return true if T is a gimple address. */
2724 is_gimple_address (const_tree t
)
2728 if (TREE_CODE (t
) != ADDR_EXPR
)
2731 op
= TREE_OPERAND (t
, 0);
2732 while (handled_component_p (op
))
2734 if ((TREE_CODE (op
) == ARRAY_REF
2735 || TREE_CODE (op
) == ARRAY_RANGE_REF
)
2736 && !is_gimple_val (TREE_OPERAND (op
, 1)))
2739 op
= TREE_OPERAND (op
, 0);
2742 if (CONSTANT_CLASS_P (op
) || TREE_CODE (op
) == MEM_REF
)
2745 switch (TREE_CODE (op
))
2760 /* Return true if T is a gimple invariant address. */
2763 is_gimple_invariant_address (const_tree t
)
2767 if (TREE_CODE (t
) != ADDR_EXPR
)
2770 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2774 if (TREE_CODE (op
) == MEM_REF
)
2776 const_tree op0
= TREE_OPERAND (op
, 0);
2777 return (TREE_CODE (op0
) == ADDR_EXPR
2778 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2779 || decl_address_invariant_p (TREE_OPERAND (op0
, 0))));
2782 return CONSTANT_CLASS_P (op
) || decl_address_invariant_p (op
);
2785 /* Return true if T is a gimple invariant address at IPA level
2786 (so addresses of variables on stack are not allowed). */
2789 is_gimple_ip_invariant_address (const_tree t
)
2793 if (TREE_CODE (t
) != ADDR_EXPR
)
2796 op
= strip_invariant_refs (TREE_OPERAND (t
, 0));
2800 if (TREE_CODE (op
) == MEM_REF
)
2802 const_tree op0
= TREE_OPERAND (op
, 0);
2803 return (TREE_CODE (op0
) == ADDR_EXPR
2804 && (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))
2805 || decl_address_ip_invariant_p (TREE_OPERAND (op0
, 0))));
2808 return CONSTANT_CLASS_P (op
) || decl_address_ip_invariant_p (op
);
2811 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2812 form of function invariant. */
2815 is_gimple_min_invariant (const_tree t
)
2817 if (TREE_CODE (t
) == ADDR_EXPR
)
2818 return is_gimple_invariant_address (t
);
2820 return is_gimple_constant (t
);
2823 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2824 form of gimple minimal invariant. */
2827 is_gimple_ip_invariant (const_tree t
)
2829 if (TREE_CODE (t
) == ADDR_EXPR
)
2830 return is_gimple_ip_invariant_address (t
);
2832 return is_gimple_constant (t
);
2835 /* Return true if T looks like a valid GIMPLE statement. */
2838 is_gimple_stmt (tree t
)
2840 const enum tree_code code
= TREE_CODE (t
);
2845 /* The only valid NOP_EXPR is the empty statement. */
2846 return IS_EMPTY_STMT (t
);
2850 /* These are only valid if they're void. */
2851 return TREE_TYPE (t
) == NULL
|| VOID_TYPE_P (TREE_TYPE (t
));
2857 case CASE_LABEL_EXPR
:
2858 case TRY_CATCH_EXPR
:
2859 case TRY_FINALLY_EXPR
:
2860 case EH_FILTER_EXPR
:
2863 case STATEMENT_LIST
:
2873 /* These are always void. */
2879 /* These are valid regardless of their type. */
2887 /* Return true if T is a variable. */
2890 is_gimple_variable (tree t
)
2892 return (TREE_CODE (t
) == VAR_DECL
2893 || TREE_CODE (t
) == PARM_DECL
2894 || TREE_CODE (t
) == RESULT_DECL
2895 || TREE_CODE (t
) == SSA_NAME
);
2898 /* Return true if T is a GIMPLE identifier (something with an address). */
2901 is_gimple_id (tree t
)
2903 return (is_gimple_variable (t
)
2904 || TREE_CODE (t
) == FUNCTION_DECL
2905 || TREE_CODE (t
) == LABEL_DECL
2906 || TREE_CODE (t
) == CONST_DECL
2907 /* Allow string constants, since they are addressable. */
2908 || TREE_CODE (t
) == STRING_CST
);
2911 /* Return true if TYPE is a suitable type for a scalar register variable. */
2914 is_gimple_reg_type (tree type
)
2916 return !AGGREGATE_TYPE_P (type
);
2919 /* Return true if T is a non-aggregate register variable. */
2922 is_gimple_reg (tree t
)
2924 if (TREE_CODE (t
) == SSA_NAME
)
2925 t
= SSA_NAME_VAR (t
);
2927 if (!is_gimple_variable (t
))
2930 if (!is_gimple_reg_type (TREE_TYPE (t
)))
2933 /* A volatile decl is not acceptable because we can't reuse it as
2934 needed. We need to copy it into a temp first. */
2935 if (TREE_THIS_VOLATILE (t
))
2938 /* We define "registers" as things that can be renamed as needed,
2939 which with our infrastructure does not apply to memory. */
2940 if (needs_to_live_in_memory (t
))
2943 /* Hard register variables are an interesting case. For those that
2944 are call-clobbered, we don't know where all the calls are, since
2945 we don't (want to) take into account which operations will turn
2946 into libcalls at the rtl level. For those that are call-saved,
2947 we don't currently model the fact that calls may in fact change
2948 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2949 level, and so miss variable changes that might imply. All around,
2950 it seems safest to not do too much optimization with these at the
2951 tree level at all. We'll have to rely on the rtl optimizers to
2952 clean this up, as there we've got all the appropriate bits exposed. */
2953 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
2956 /* Complex and vector values must have been put into SSA-like form.
2957 That is, no assignments to the individual components. */
2958 if (TREE_CODE (TREE_TYPE (t
)) == COMPLEX_TYPE
2959 || TREE_CODE (TREE_TYPE (t
)) == VECTOR_TYPE
)
2960 return DECL_GIMPLE_REG_P (t
);
2966 /* Return true if T is a GIMPLE variable whose address is not needed. */
2969 is_gimple_non_addressable (tree t
)
2971 if (TREE_CODE (t
) == SSA_NAME
)
2972 t
= SSA_NAME_VAR (t
);
2974 return (is_gimple_variable (t
) && ! needs_to_live_in_memory (t
));
2977 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2980 is_gimple_val (tree t
)
2982 /* Make loads from volatiles and memory vars explicit. */
2983 if (is_gimple_variable (t
)
2984 && is_gimple_reg_type (TREE_TYPE (t
))
2985 && !is_gimple_reg (t
))
2988 return (is_gimple_variable (t
) || is_gimple_min_invariant (t
));
2991 /* Similarly, but accept hard registers as inputs to asm statements. */
2994 is_gimple_asm_val (tree t
)
2996 if (TREE_CODE (t
) == VAR_DECL
&& DECL_HARD_REGISTER (t
))
2999 return is_gimple_val (t
);
3002 /* Return true if T is a GIMPLE minimal lvalue. */
3005 is_gimple_min_lval (tree t
)
3007 if (!(t
= CONST_CAST_TREE (strip_invariant_refs (t
))))
3009 return (is_gimple_id (t
) || TREE_CODE (t
) == MEM_REF
);
3012 /* Return true if T is a valid function operand of a CALL_EXPR. */
3015 is_gimple_call_addr (tree t
)
3017 return (TREE_CODE (t
) == OBJ_TYPE_REF
|| is_gimple_val (t
));
3020 /* Return true if T is a valid address operand of a MEM_REF. */
3023 is_gimple_mem_ref_addr (tree t
)
3025 return (is_gimple_reg (t
)
3026 || TREE_CODE (t
) == INTEGER_CST
3027 || (TREE_CODE (t
) == ADDR_EXPR
3028 && (CONSTANT_CLASS_P (TREE_OPERAND (t
, 0))
3029 || decl_address_invariant_p (TREE_OPERAND (t
, 0)))));
3033 /* Given a memory reference expression T, return its base address.
3034 The base address of a memory reference expression is the main
3035 object being referenced. For instance, the base address for
3036 'array[i].fld[j]' is 'array'. You can think of this as stripping
3037 away the offset part from a memory address.
3039 This function calls handled_component_p to strip away all the inner
3040 parts of the memory reference until it reaches the base object. */
3043 get_base_address (tree t
)
3045 while (handled_component_p (t
))
3046 t
= TREE_OPERAND (t
, 0);
3048 if ((TREE_CODE (t
) == MEM_REF
3049 || TREE_CODE (t
) == TARGET_MEM_REF
)
3050 && TREE_CODE (TREE_OPERAND (t
, 0)) == ADDR_EXPR
)
3051 t
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
3053 if (TREE_CODE (t
) == SSA_NAME
3055 || TREE_CODE (t
) == STRING_CST
3056 || TREE_CODE (t
) == CONSTRUCTOR
3057 || INDIRECT_REF_P (t
)
3058 || TREE_CODE (t
) == MEM_REF
3059 || TREE_CODE (t
) == TARGET_MEM_REF
)
3066 recalculate_side_effects (tree t
)
3068 enum tree_code code
= TREE_CODE (t
);
3069 int len
= TREE_OPERAND_LENGTH (t
);
3072 switch (TREE_CODE_CLASS (code
))
3074 case tcc_expression
:
3080 case PREDECREMENT_EXPR
:
3081 case PREINCREMENT_EXPR
:
3082 case POSTDECREMENT_EXPR
:
3083 case POSTINCREMENT_EXPR
:
3084 /* All of these have side-effects, no matter what their
3093 case tcc_comparison
: /* a comparison expression */
3094 case tcc_unary
: /* a unary arithmetic expression */
3095 case tcc_binary
: /* a binary arithmetic expression */
3096 case tcc_reference
: /* a reference */
3097 case tcc_vl_exp
: /* a function call */
3098 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
3099 for (i
= 0; i
< len
; ++i
)
3101 tree op
= TREE_OPERAND (t
, i
);
3102 if (op
&& TREE_SIDE_EFFECTS (op
))
3103 TREE_SIDE_EFFECTS (t
) = 1;
3108 /* No side-effects. */
3116 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3117 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3118 we failed to create one. */
3121 canonicalize_cond_expr_cond (tree t
)
3123 /* Strip conversions around boolean operations. */
3124 if (CONVERT_EXPR_P (t
)
3125 && (truth_value_p (TREE_CODE (TREE_OPERAND (t
, 0)))
3126 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0)))
3128 t
= TREE_OPERAND (t
, 0);
3130 /* For !x use x == 0. */
3131 if (TREE_CODE (t
) == TRUTH_NOT_EXPR
)
3133 tree top0
= TREE_OPERAND (t
, 0);
3134 t
= build2 (EQ_EXPR
, TREE_TYPE (t
),
3135 top0
, build_int_cst (TREE_TYPE (top0
), 0));
3137 /* For cmp ? 1 : 0 use cmp. */
3138 else if (TREE_CODE (t
) == COND_EXPR
3139 && COMPARISON_CLASS_P (TREE_OPERAND (t
, 0))
3140 && integer_onep (TREE_OPERAND (t
, 1))
3141 && integer_zerop (TREE_OPERAND (t
, 2)))
3143 tree top0
= TREE_OPERAND (t
, 0);
3144 t
= build2 (TREE_CODE (top0
), TREE_TYPE (t
),
3145 TREE_OPERAND (top0
, 0), TREE_OPERAND (top0
, 1));
3148 if (is_gimple_condexpr (t
))
3154 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3155 the positions marked by the set ARGS_TO_SKIP. */
3158 gimple_call_copy_skip_args (gimple stmt
, bitmap args_to_skip
)
3161 int nargs
= gimple_call_num_args (stmt
);
3162 VEC(tree
, heap
) *vargs
= VEC_alloc (tree
, heap
, nargs
);
3165 for (i
= 0; i
< nargs
; i
++)
3166 if (!bitmap_bit_p (args_to_skip
, i
))
3167 VEC_quick_push (tree
, vargs
, gimple_call_arg (stmt
, i
));
3169 if (gimple_call_internal_p (stmt
))
3170 new_stmt
= gimple_build_call_internal_vec (gimple_call_internal_fn (stmt
),
3173 new_stmt
= gimple_build_call_vec (gimple_call_fn (stmt
), vargs
);
3174 VEC_free (tree
, heap
, vargs
);
3175 if (gimple_call_lhs (stmt
))
3176 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
3178 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3179 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3181 gimple_set_block (new_stmt
, gimple_block (stmt
));
3182 if (gimple_has_location (stmt
))
3183 gimple_set_location (new_stmt
, gimple_location (stmt
));
3184 gimple_call_copy_flags (new_stmt
, stmt
);
3185 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
3187 gimple_set_modified (new_stmt
, true);
3193 enum gtc_mode
{ GTC_MERGE
= 0, GTC_DIAG
= 1 };
3195 static hashval_t
gimple_type_hash (const void *);
3197 /* Structure used to maintain a cache of some type pairs compared by
3198 gimple_types_compatible_p when comparing aggregate types. There are
3199 three possible values for SAME_P:
3201 -2: The pair (T1, T2) has just been inserted in the table.
3202 0: T1 and T2 are different types.
3203 1: T1 and T2 are the same type.
3205 The two elements in the SAME_P array are indexed by the comparison
3212 signed char same_p
[2];
3214 typedef struct type_pair_d
*type_pair_t
;
3215 DEF_VEC_P(type_pair_t
);
3216 DEF_VEC_ALLOC_P(type_pair_t
,heap
);
3218 #define GIMPLE_TYPE_PAIR_SIZE 16381
3219 struct type_pair_d
*type_pair_cache
;
3222 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3223 entry if none existed. */
3225 static inline type_pair_t
3226 lookup_type_pair (tree t1
, tree t2
)
3229 unsigned int uid1
, uid2
;
3231 if (type_pair_cache
== NULL
)
3232 type_pair_cache
= XCNEWVEC (struct type_pair_d
, GIMPLE_TYPE_PAIR_SIZE
);
3234 if (TYPE_UID (t1
) < TYPE_UID (t2
))
3236 uid1
= TYPE_UID (t1
);
3237 uid2
= TYPE_UID (t2
);
3241 uid1
= TYPE_UID (t2
);
3242 uid2
= TYPE_UID (t1
);
3244 gcc_checking_assert (uid1
!= uid2
);
3246 /* iterative_hash_hashval_t imply an function calls.
3247 We know that UIDS are in limited range. */
3248 index
= ((((unsigned HOST_WIDE_INT
)uid1
<< HOST_BITS_PER_WIDE_INT
/ 2) + uid2
)
3249 % GIMPLE_TYPE_PAIR_SIZE
);
3250 if (type_pair_cache
[index
].uid1
== uid1
3251 && type_pair_cache
[index
].uid2
== uid2
)
3252 return &type_pair_cache
[index
];
3254 type_pair_cache
[index
].uid1
= uid1
;
3255 type_pair_cache
[index
].uid2
= uid2
;
3256 type_pair_cache
[index
].same_p
[0] = -2;
3257 type_pair_cache
[index
].same_p
[1] = -2;
3259 return &type_pair_cache
[index
];
3262 /* Per pointer state for the SCC finding. The on_sccstack flag
3263 is not strictly required, it is true when there is no hash value
3264 recorded for the type and false otherwise. But querying that
3269 unsigned int dfsnum
;
3278 static unsigned int next_dfs_num
;
3279 static unsigned int gtc_next_dfs_num
;
3282 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3284 typedef struct GTY(()) gimple_type_leader_entry_s
{
3287 } gimple_type_leader_entry
;
3289 #define GIMPLE_TYPE_LEADER_SIZE 16381
3290 static GTY((deletable
, length("GIMPLE_TYPE_LEADER_SIZE")))
3291 gimple_type_leader_entry
*gimple_type_leader
;
3293 /* Lookup an existing leader for T and return it or NULL_TREE, if
3294 there is none in the cache. */
3297 gimple_lookup_type_leader (tree t
)
3299 gimple_type_leader_entry
*leader
;
3301 if (!gimple_type_leader
)
3304 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
3305 if (leader
->type
!= t
)
3308 return leader
->leader
;
3311 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3312 true then if any type has no name return false, otherwise return
3313 true if both types have no names. */
3316 compare_type_names_p (tree t1
, tree t2
)
3318 tree name1
= TYPE_NAME (t1
);
3319 tree name2
= TYPE_NAME (t2
);
3321 if ((name1
!= NULL_TREE
) != (name2
!= NULL_TREE
))
3324 if (name1
== NULL_TREE
)
3327 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3328 if (TREE_CODE (name1
) != TREE_CODE (name2
))
3331 if (TREE_CODE (name1
) == TYPE_DECL
)
3332 name1
= DECL_NAME (name1
);
3333 gcc_checking_assert (!name1
|| TREE_CODE (name1
) == IDENTIFIER_NODE
);
3335 if (TREE_CODE (name2
) == TYPE_DECL
)
3336 name2
= DECL_NAME (name2
);
3337 gcc_checking_assert (!name2
|| TREE_CODE (name2
) == IDENTIFIER_NODE
);
3339 /* Identifiers can be compared with pointer equality rather
3340 than a string comparison. */
3347 /* Return true if the field decls F1 and F2 are at the same offset.
3349 This is intended to be used on GIMPLE types only. */
3352 gimple_compare_field_offset (tree f1
, tree f2
)
3354 if (DECL_OFFSET_ALIGN (f1
) == DECL_OFFSET_ALIGN (f2
))
3356 tree offset1
= DECL_FIELD_OFFSET (f1
);
3357 tree offset2
= DECL_FIELD_OFFSET (f2
);
3358 return ((offset1
== offset2
3359 /* Once gimplification is done, self-referential offsets are
3360 instantiated as operand #2 of the COMPONENT_REF built for
3361 each access and reset. Therefore, they are not relevant
3362 anymore and fields are interchangeable provided that they
3363 represent the same access. */
3364 || (TREE_CODE (offset1
) == PLACEHOLDER_EXPR
3365 && TREE_CODE (offset2
) == PLACEHOLDER_EXPR
3366 && (DECL_SIZE (f1
) == DECL_SIZE (f2
)
3367 || (TREE_CODE (DECL_SIZE (f1
)) == PLACEHOLDER_EXPR
3368 && TREE_CODE (DECL_SIZE (f2
)) == PLACEHOLDER_EXPR
)
3369 || operand_equal_p (DECL_SIZE (f1
), DECL_SIZE (f2
), 0))
3370 && DECL_ALIGN (f1
) == DECL_ALIGN (f2
))
3371 || operand_equal_p (offset1
, offset2
, 0))
3372 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1
),
3373 DECL_FIELD_BIT_OFFSET (f2
)));
3376 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3377 should be, so handle differing ones specially by decomposing
3378 the offset into a byte and bit offset manually. */
3379 if (host_integerp (DECL_FIELD_OFFSET (f1
), 0)
3380 && host_integerp (DECL_FIELD_OFFSET (f2
), 0))
3382 unsigned HOST_WIDE_INT byte_offset1
, byte_offset2
;
3383 unsigned HOST_WIDE_INT bit_offset1
, bit_offset2
;
3384 bit_offset1
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1
));
3385 byte_offset1
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1
))
3386 + bit_offset1
/ BITS_PER_UNIT
);
3387 bit_offset2
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2
));
3388 byte_offset2
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2
))
3389 + bit_offset2
/ BITS_PER_UNIT
);
3390 if (byte_offset1
!= byte_offset2
)
3392 return bit_offset1
% BITS_PER_UNIT
== bit_offset2
% BITS_PER_UNIT
;
3399 gimple_types_compatible_p_1 (tree
, tree
, type_pair_t
,
3400 VEC(type_pair_t
, heap
) **,
3401 struct pointer_map_t
*, struct obstack
*);
3403 /* DFS visit the edge from the callers type pair with state *STATE to
3404 the pair T1, T2 while operating in FOR_MERGING_P mode.
3405 Update the merging status if it is not part of the SCC containing the
3406 callers pair and return it.
3407 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3410 gtc_visit (tree t1
, tree t2
,
3412 VEC(type_pair_t
, heap
) **sccstack
,
3413 struct pointer_map_t
*sccstate
,
3414 struct obstack
*sccstate_obstack
)
3416 struct sccs
*cstate
= NULL
;
3419 tree leader1
, leader2
;
3421 /* Check first for the obvious case of pointer identity. */
3425 /* Check that we have two types to compare. */
3426 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
3429 /* Can't be the same type if the types don't have the same code. */
3430 if (TREE_CODE (t1
) != TREE_CODE (t2
))
3433 /* Can't be the same type if they have different CV qualifiers. */
3434 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
3437 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
3440 /* Void types and nullptr types are always the same. */
3441 if (TREE_CODE (t1
) == VOID_TYPE
3442 || TREE_CODE (t1
) == NULLPTR_TYPE
)
3445 /* Can't be the same type if they have different alignment or mode. */
3446 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
3447 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
3450 /* Do some simple checks before doing three hashtable queries. */
3451 if (INTEGRAL_TYPE_P (t1
)
3452 || SCALAR_FLOAT_TYPE_P (t1
)
3453 || FIXED_POINT_TYPE_P (t1
)
3454 || TREE_CODE (t1
) == VECTOR_TYPE
3455 || TREE_CODE (t1
) == COMPLEX_TYPE
3456 || TREE_CODE (t1
) == OFFSET_TYPE
3457 || POINTER_TYPE_P (t1
))
3459 /* Can't be the same type if they have different sign or precision. */
3460 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
3461 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
3464 if (TREE_CODE (t1
) == INTEGER_TYPE
3465 && (TYPE_IS_SIZETYPE (t1
) != TYPE_IS_SIZETYPE (t2
)
3466 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)))
3469 /* That's all we need to check for float and fixed-point types. */
3470 if (SCALAR_FLOAT_TYPE_P (t1
)
3471 || FIXED_POINT_TYPE_P (t1
))
3474 /* For other types fall thru to more complex checks. */
3477 /* If the types have been previously registered and found equal
3479 leader1
= gimple_lookup_type_leader (t1
);
3480 leader2
= gimple_lookup_type_leader (t2
);
3483 || (leader1
&& leader1
== leader2
))
3486 /* If the hash values of t1 and t2 are different the types can't
3487 possibly be the same. This helps keeping the type-pair hashtable
3488 small, only tracking comparisons for hash collisions. */
3489 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
3492 /* Allocate a new cache entry for this comparison. */
3493 p
= lookup_type_pair (t1
, t2
);
3494 if (p
->same_p
[GTC_MERGE
] == 0 || p
->same_p
[GTC_MERGE
] == 1)
3496 /* We have already decided whether T1 and T2 are the
3497 same, return the cached result. */
3498 return p
->same_p
[GTC_MERGE
] == 1;
3501 if ((slot
= pointer_map_contains (sccstate
, p
)) != NULL
)
3502 cstate
= (struct sccs
*)*slot
;
3503 /* Not yet visited. DFS recurse. */
3506 gimple_types_compatible_p_1 (t1
, t2
, p
,
3507 sccstack
, sccstate
, sccstate_obstack
);
3508 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, p
);
3509 state
->low
= MIN (state
->low
, cstate
->low
);
3511 /* If the type is still on the SCC stack adjust the parents low. */
3512 if (cstate
->dfsnum
< state
->dfsnum
3513 && cstate
->on_sccstack
)
3514 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
3516 /* Return the current lattice value. We start with an equality
3517 assumption so types part of a SCC will be optimistically
3518 treated equal unless proven otherwise. */
3519 return cstate
->u
.same_p
;
3522 /* Worker for gimple_types_compatible.
3523 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3526 gimple_types_compatible_p_1 (tree t1
, tree t2
, type_pair_t p
,
3527 VEC(type_pair_t
, heap
) **sccstack
,
3528 struct pointer_map_t
*sccstate
,
3529 struct obstack
*sccstate_obstack
)
3533 gcc_assert (p
->same_p
[GTC_MERGE
] == -2);
3535 state
= XOBNEW (sccstate_obstack
, struct sccs
);
3536 *pointer_map_insert (sccstate
, p
) = state
;
3538 VEC_safe_push (type_pair_t
, heap
, *sccstack
, p
);
3539 state
->dfsnum
= gtc_next_dfs_num
++;
3540 state
->low
= state
->dfsnum
;
3541 state
->on_sccstack
= true;
3542 /* Start with an equality assumption. As we DFS recurse into child
3543 SCCs this assumption may get revisited. */
3544 state
->u
.same_p
= 1;
3546 /* The struct tags shall compare equal. */
3547 if (!compare_type_names_p (t1
, t2
))
3548 goto different_types
;
3550 /* We may not merge typedef types to the same type in different
3553 && TREE_CODE (TYPE_NAME (t1
)) == TYPE_DECL
3554 && DECL_CONTEXT (TYPE_NAME (t1
))
3555 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1
))))
3557 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1
)),
3558 DECL_CONTEXT (TYPE_NAME (t2
)),
3559 state
, sccstack
, sccstate
, sccstate_obstack
))
3560 goto different_types
;
3563 /* If their attributes are not the same they can't be the same type. */
3564 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
3565 goto different_types
;
3567 /* Do type-specific comparisons. */
3568 switch (TREE_CODE (t1
))
3572 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3573 state
, sccstack
, sccstate
, sccstate_obstack
))
3574 goto different_types
;
3578 /* Array types are the same if the element types are the same and
3579 the number of elements are the same. */
3580 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3581 state
, sccstack
, sccstate
, sccstate_obstack
)
3582 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
3583 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
3584 goto different_types
;
3587 tree i1
= TYPE_DOMAIN (t1
);
3588 tree i2
= TYPE_DOMAIN (t2
);
3590 /* For an incomplete external array, the type domain can be
3591 NULL_TREE. Check this condition also. */
3592 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
3594 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
3595 goto different_types
;
3596 /* If for a complete array type the possibly gimplified sizes
3597 are different the types are different. */
3598 else if (((TYPE_SIZE (i1
) != NULL
) ^ (TYPE_SIZE (i2
) != NULL
))
3601 && !operand_equal_p (TYPE_SIZE (i1
), TYPE_SIZE (i2
), 0)))
3602 goto different_types
;
3605 tree min1
= TYPE_MIN_VALUE (i1
);
3606 tree min2
= TYPE_MIN_VALUE (i2
);
3607 tree max1
= TYPE_MAX_VALUE (i1
);
3608 tree max2
= TYPE_MAX_VALUE (i2
);
3610 /* The minimum/maximum values have to be the same. */
3613 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
3614 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
3615 || operand_equal_p (min1
, min2
, 0))))
3618 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
3619 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
3620 || operand_equal_p (max1
, max2
, 0)))))
3623 goto different_types
;
3628 /* Method types should belong to the same class. */
3629 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
),
3630 state
, sccstack
, sccstate
, sccstate_obstack
))
3631 goto different_types
;
3636 /* Function types are the same if the return type and arguments types
3638 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3639 state
, sccstack
, sccstate
, sccstate_obstack
))
3640 goto different_types
;
3642 if (!comp_type_attributes (t1
, t2
))
3643 goto different_types
;
3645 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
3649 tree parms1
, parms2
;
3651 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
3653 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
3655 if (!gtc_visit (TREE_VALUE (parms1
), TREE_VALUE (parms2
),
3656 state
, sccstack
, sccstate
, sccstate_obstack
))
3657 goto different_types
;
3660 if (parms1
|| parms2
)
3661 goto different_types
;
3668 if (!gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3669 state
, sccstack
, sccstate
, sccstate_obstack
)
3670 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1
),
3671 TYPE_OFFSET_BASETYPE (t2
),
3672 state
, sccstack
, sccstate
, sccstate_obstack
))
3673 goto different_types
;
3679 case REFERENCE_TYPE
:
3681 /* If the two pointers have different ref-all attributes,
3682 they can't be the same type. */
3683 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
3684 goto different_types
;
3686 /* Otherwise, pointer and reference types are the same if the
3687 pointed-to types are the same. */
3688 if (gtc_visit (TREE_TYPE (t1
), TREE_TYPE (t2
),
3689 state
, sccstack
, sccstate
, sccstate_obstack
))
3692 goto different_types
;
3698 tree min1
= TYPE_MIN_VALUE (t1
);
3699 tree max1
= TYPE_MAX_VALUE (t1
);
3700 tree min2
= TYPE_MIN_VALUE (t2
);
3701 tree max2
= TYPE_MAX_VALUE (t2
);
3702 bool min_equal_p
= false;
3703 bool max_equal_p
= false;
3705 /* If either type has a minimum value, the other type must
3707 if (min1
== NULL_TREE
&& min2
== NULL_TREE
)
3709 else if (min1
&& min2
&& operand_equal_p (min1
, min2
, 0))
3712 /* Likewise, if either type has a maximum value, the other
3713 type must have the same. */
3714 if (max1
== NULL_TREE
&& max2
== NULL_TREE
)
3716 else if (max1
&& max2
&& operand_equal_p (max1
, max2
, 0))
3719 if (!min_equal_p
|| !max_equal_p
)
3720 goto different_types
;
3727 /* FIXME lto, we cannot check bounds on enumeral types because
3728 different front ends will produce different values.
3729 In C, enumeral types are integers, while in C++ each element
3730 will have its own symbolic value. We should decide how enums
3731 are to be represented in GIMPLE and have each front end lower
3735 /* For enumeral types, all the values must be the same. */
3736 if (TYPE_VALUES (t1
) == TYPE_VALUES (t2
))
3739 for (v1
= TYPE_VALUES (t1
), v2
= TYPE_VALUES (t2
);
3741 v1
= TREE_CHAIN (v1
), v2
= TREE_CHAIN (v2
))
3743 tree c1
= TREE_VALUE (v1
);
3744 tree c2
= TREE_VALUE (v2
);
3746 if (TREE_CODE (c1
) == CONST_DECL
)
3747 c1
= DECL_INITIAL (c1
);
3749 if (TREE_CODE (c2
) == CONST_DECL
)
3750 c2
= DECL_INITIAL (c2
);
3752 if (tree_int_cst_equal (c1
, c2
) != 1)
3753 goto different_types
;
3755 if (TREE_PURPOSE (v1
) != TREE_PURPOSE (v2
))
3756 goto different_types
;
3759 /* If one enumeration has more values than the other, they
3760 are not the same. */
3762 goto different_types
;
3769 case QUAL_UNION_TYPE
:
3773 /* For aggregate types, all the fields must be the same. */
3774 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
3776 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
3778 /* Different field kinds are not compatible. */
3779 if (TREE_CODE (f1
) != TREE_CODE (f2
))
3780 goto different_types
;
3781 /* Field decls must have the same name and offset. */
3782 if (TREE_CODE (f1
) == FIELD_DECL
3783 && (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
3784 || !gimple_compare_field_offset (f1
, f2
)))
3785 goto different_types
;
3786 /* All entities should have the same name and type. */
3787 if (DECL_NAME (f1
) != DECL_NAME (f2
)
3788 || !gtc_visit (TREE_TYPE (f1
), TREE_TYPE (f2
),
3789 state
, sccstack
, sccstate
, sccstate_obstack
))
3790 goto different_types
;
3793 /* If one aggregate has more fields than the other, they
3794 are not the same. */
3796 goto different_types
;
3805 /* Common exit path for types that are not compatible. */
3807 state
->u
.same_p
= 0;
3810 /* Common exit path for types that are compatible. */
3812 gcc_assert (state
->u
.same_p
== 1);
3815 if (state
->low
== state
->dfsnum
)
3819 /* Pop off the SCC and set its cache values to the final
3820 comparison result. */
3823 struct sccs
*cstate
;
3824 x
= VEC_pop (type_pair_t
, *sccstack
);
3825 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
3826 cstate
->on_sccstack
= false;
3827 x
->same_p
[GTC_MERGE
] = state
->u
.same_p
;
3832 return state
->u
.same_p
;
3835 /* Return true iff T1 and T2 are structurally identical. When
3836 FOR_MERGING_P is true the an incomplete type and a complete type
3837 are considered different, otherwise they are considered compatible. */
3840 gimple_types_compatible_p (tree t1
, tree t2
)
3842 VEC(type_pair_t
, heap
) *sccstack
= NULL
;
3843 struct pointer_map_t
*sccstate
;
3844 struct obstack sccstate_obstack
;
3845 type_pair_t p
= NULL
;
3847 tree leader1
, leader2
;
3849 /* Before starting to set up the SCC machinery handle simple cases. */
3851 /* Check first for the obvious case of pointer identity. */
3855 /* Check that we have two types to compare. */
3856 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
3859 /* Can't be the same type if the types don't have the same code. */
3860 if (TREE_CODE (t1
) != TREE_CODE (t2
))
3863 /* Can't be the same type if they have different CV qualifiers. */
3864 if (TYPE_QUALS (t1
) != TYPE_QUALS (t2
))
3867 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
3870 /* Void types and nullptr types are always the same. */
3871 if (TREE_CODE (t1
) == VOID_TYPE
3872 || TREE_CODE (t1
) == NULLPTR_TYPE
)
3875 /* Can't be the same type if they have different alignment or mode. */
3876 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
3877 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
3880 /* Do some simple checks before doing three hashtable queries. */
3881 if (INTEGRAL_TYPE_P (t1
)
3882 || SCALAR_FLOAT_TYPE_P (t1
)
3883 || FIXED_POINT_TYPE_P (t1
)
3884 || TREE_CODE (t1
) == VECTOR_TYPE
3885 || TREE_CODE (t1
) == COMPLEX_TYPE
3886 || TREE_CODE (t1
) == OFFSET_TYPE
3887 || POINTER_TYPE_P (t1
))
3889 /* Can't be the same type if they have different sign or precision. */
3890 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
3891 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
3894 if (TREE_CODE (t1
) == INTEGER_TYPE
3895 && (TYPE_IS_SIZETYPE (t1
) != TYPE_IS_SIZETYPE (t2
)
3896 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)))
3899 /* That's all we need to check for float and fixed-point types. */
3900 if (SCALAR_FLOAT_TYPE_P (t1
)
3901 || FIXED_POINT_TYPE_P (t1
))
3904 /* For other types fall thru to more complex checks. */
3907 /* If the types have been previously registered and found equal
3909 leader1
= gimple_lookup_type_leader (t1
);
3910 leader2
= gimple_lookup_type_leader (t2
);
3913 || (leader1
&& leader1
== leader2
))
3916 /* If the hash values of t1 and t2 are different the types can't
3917 possibly be the same. This helps keeping the type-pair hashtable
3918 small, only tracking comparisons for hash collisions. */
3919 if (gimple_type_hash (t1
) != gimple_type_hash (t2
))
3922 /* If we've visited this type pair before (in the case of aggregates
3923 with self-referential types), and we made a decision, return it. */
3924 p
= lookup_type_pair (t1
, t2
);
3925 if (p
->same_p
[GTC_MERGE
] == 0 || p
->same_p
[GTC_MERGE
] == 1)
3927 /* We have already decided whether T1 and T2 are the
3928 same, return the cached result. */
3929 return p
->same_p
[GTC_MERGE
] == 1;
3932 /* Now set up the SCC machinery for the comparison. */
3933 gtc_next_dfs_num
= 1;
3934 sccstate
= pointer_map_create ();
3935 gcc_obstack_init (&sccstate_obstack
);
3936 res
= gimple_types_compatible_p_1 (t1
, t2
, p
,
3937 &sccstack
, sccstate
, &sccstate_obstack
);
3938 VEC_free (type_pair_t
, heap
, sccstack
);
3939 pointer_map_destroy (sccstate
);
3940 obstack_free (&sccstate_obstack
, NULL
);
3947 iterative_hash_gimple_type (tree
, hashval_t
, VEC(tree
, heap
) **,
3948 struct pointer_map_t
*, struct obstack
*);
3950 /* DFS visit the edge from the callers type with state *STATE to T.
3951 Update the callers type hash V with the hash for T if it is not part
3952 of the SCC containing the callers type and return it.
3953 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3956 visit (tree t
, struct sccs
*state
, hashval_t v
,
3957 VEC (tree
, heap
) **sccstack
,
3958 struct pointer_map_t
*sccstate
,
3959 struct obstack
*sccstate_obstack
)
3961 struct sccs
*cstate
= NULL
;
3962 struct tree_int_map m
;
3965 /* If there is a hash value recorded for this type then it can't
3966 possibly be part of our parent SCC. Simply mix in its hash. */
3968 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
3970 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, v
);
3972 if ((slot
= pointer_map_contains (sccstate
, t
)) != NULL
)
3973 cstate
= (struct sccs
*)*slot
;
3977 /* Not yet visited. DFS recurse. */
3978 tem
= iterative_hash_gimple_type (t
, v
,
3979 sccstack
, sccstate
, sccstate_obstack
);
3981 cstate
= (struct sccs
*)* pointer_map_contains (sccstate
, t
);
3982 state
->low
= MIN (state
->low
, cstate
->low
);
3983 /* If the type is no longer on the SCC stack and thus is not part
3984 of the parents SCC mix in its hash value. Otherwise we will
3985 ignore the type for hashing purposes and return the unaltered
3987 if (!cstate
->on_sccstack
)
3990 if (cstate
->dfsnum
< state
->dfsnum
3991 && cstate
->on_sccstack
)
3992 state
->low
= MIN (cstate
->dfsnum
, state
->low
);
3994 /* We are part of our parents SCC, skip this type during hashing
3995 and return the unaltered hash value. */
3999 /* Hash NAME with the previous hash value V and return it. */
4002 iterative_hash_name (tree name
, hashval_t v
)
4006 v
= iterative_hash_hashval_t (TREE_CODE (name
), v
);
4007 if (TREE_CODE (name
) == TYPE_DECL
)
4008 name
= DECL_NAME (name
);
4011 gcc_assert (TREE_CODE (name
) == IDENTIFIER_NODE
);
4012 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name
), v
);
4015 /* A type, hashvalue pair for sorting SCC members. */
4017 struct type_hash_pair
{
4022 /* Compare two type, hashvalue pairs. */
4025 type_hash_pair_compare (const void *p1_
, const void *p2_
)
4027 const struct type_hash_pair
*p1
= (const struct type_hash_pair
*) p1_
;
4028 const struct type_hash_pair
*p2
= (const struct type_hash_pair
*) p2_
;
4029 if (p1
->hash
< p2
->hash
)
4031 else if (p1
->hash
> p2
->hash
)
4036 /* Returning a hash value for gimple type TYPE combined with VAL.
4037 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4039 To hash a type we end up hashing in types that are reachable.
4040 Through pointers we can end up with cycles which messes up the
4041 required property that we need to compute the same hash value
4042 for structurally equivalent types. To avoid this we have to
4043 hash all types in a cycle (the SCC) in a commutative way. The
4044 easiest way is to not mix in the hashes of the SCC members at
4045 all. To make this work we have to delay setting the hash
4046 values of the SCC until it is complete. */
4049 iterative_hash_gimple_type (tree type
, hashval_t val
,
4050 VEC(tree
, heap
) **sccstack
,
4051 struct pointer_map_t
*sccstate
,
4052 struct obstack
*sccstate_obstack
)
4058 /* Not visited during this DFS walk. */
4059 gcc_checking_assert (!pointer_map_contains (sccstate
, type
));
4060 state
= XOBNEW (sccstate_obstack
, struct sccs
);
4061 *pointer_map_insert (sccstate
, type
) = state
;
4063 VEC_safe_push (tree
, heap
, *sccstack
, type
);
4064 state
->dfsnum
= next_dfs_num
++;
4065 state
->low
= state
->dfsnum
;
4066 state
->on_sccstack
= true;
4068 /* Combine a few common features of types so that types are grouped into
4069 smaller sets; when searching for existing matching types to merge,
4070 only existing types having the same features as the new type will be
4072 v
= iterative_hash_name (TYPE_NAME (type
), 0);
4073 if (TYPE_NAME (type
)
4074 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
4075 && DECL_CONTEXT (TYPE_NAME (type
))
4076 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type
))))
4077 v
= visit (DECL_CONTEXT (TYPE_NAME (type
)), state
, v
,
4078 sccstack
, sccstate
, sccstate_obstack
);
4079 v
= iterative_hash_hashval_t (TREE_CODE (type
), v
);
4080 v
= iterative_hash_hashval_t (TYPE_QUALS (type
), v
);
4081 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
4083 /* Do not hash the types size as this will cause differences in
4084 hash values for the complete vs. the incomplete type variant. */
4086 /* Incorporate common features of numerical types. */
4087 if (INTEGRAL_TYPE_P (type
)
4088 || SCALAR_FLOAT_TYPE_P (type
)
4089 || FIXED_POINT_TYPE_P (type
))
4091 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
4092 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
4093 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
4096 /* For pointer and reference types, fold in information about the type
4098 if (POINTER_TYPE_P (type
))
4099 v
= visit (TREE_TYPE (type
), state
, v
,
4100 sccstack
, sccstate
, sccstate_obstack
);
4102 /* For integer types hash the types min/max values and the string flag. */
4103 if (TREE_CODE (type
) == INTEGER_TYPE
)
4105 /* OMP lowering can introduce error_mark_node in place of
4106 random local decls in types. */
4107 if (TYPE_MIN_VALUE (type
) != error_mark_node
)
4108 v
= iterative_hash_expr (TYPE_MIN_VALUE (type
), v
);
4109 if (TYPE_MAX_VALUE (type
) != error_mark_node
)
4110 v
= iterative_hash_expr (TYPE_MAX_VALUE (type
), v
);
4111 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4114 /* For array types hash their domain and the string flag. */
4115 if (TREE_CODE (type
) == ARRAY_TYPE
4116 && TYPE_DOMAIN (type
))
4118 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4119 v
= visit (TYPE_DOMAIN (type
), state
, v
,
4120 sccstack
, sccstate
, sccstate_obstack
);
4123 /* Recurse for aggregates with a single element type. */
4124 if (TREE_CODE (type
) == ARRAY_TYPE
4125 || TREE_CODE (type
) == COMPLEX_TYPE
4126 || TREE_CODE (type
) == VECTOR_TYPE
)
4127 v
= visit (TREE_TYPE (type
), state
, v
,
4128 sccstack
, sccstate
, sccstate_obstack
);
4130 /* Incorporate function return and argument types. */
4131 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
4136 /* For method types also incorporate their parent class. */
4137 if (TREE_CODE (type
) == METHOD_TYPE
)
4138 v
= visit (TYPE_METHOD_BASETYPE (type
), state
, v
,
4139 sccstack
, sccstate
, sccstate_obstack
);
4141 /* Check result and argument types. */
4142 v
= visit (TREE_TYPE (type
), state
, v
,
4143 sccstack
, sccstate
, sccstate_obstack
);
4144 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
4146 v
= visit (TREE_VALUE (p
), state
, v
,
4147 sccstack
, sccstate
, sccstate_obstack
);
4151 v
= iterative_hash_hashval_t (na
, v
);
4154 if (TREE_CODE (type
) == RECORD_TYPE
4155 || TREE_CODE (type
) == UNION_TYPE
4156 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4161 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
4163 v
= iterative_hash_name (DECL_NAME (f
), v
);
4164 v
= visit (TREE_TYPE (f
), state
, v
,
4165 sccstack
, sccstate
, sccstate_obstack
);
4169 v
= iterative_hash_hashval_t (nf
, v
);
4172 /* Record hash for us. */
4175 /* See if we found an SCC. */
4176 if (state
->low
== state
->dfsnum
)
4179 struct tree_int_map
*m
;
4181 /* Pop off the SCC and set its hash values. */
4182 x
= VEC_pop (tree
, *sccstack
);
4183 /* Optimize SCC size one. */
4186 state
->on_sccstack
= false;
4187 m
= ggc_alloc_cleared_tree_int_map ();
4190 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
4191 gcc_assert (!*slot
);
4196 struct sccs
*cstate
;
4197 unsigned first
, i
, size
, j
;
4198 struct type_hash_pair
*pairs
;
4199 /* Pop off the SCC and build an array of type, hash pairs. */
4200 first
= VEC_length (tree
, *sccstack
) - 1;
4201 while (VEC_index (tree
, *sccstack
, first
) != type
)
4203 size
= VEC_length (tree
, *sccstack
) - first
+ 1;
4204 pairs
= XALLOCAVEC (struct type_hash_pair
, size
);
4206 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
4207 cstate
->on_sccstack
= false;
4209 pairs
[i
].hash
= cstate
->u
.hash
;
4212 x
= VEC_pop (tree
, *sccstack
);
4213 cstate
= (struct sccs
*)*pointer_map_contains (sccstate
, x
);
4214 cstate
->on_sccstack
= false;
4217 pairs
[i
].hash
= cstate
->u
.hash
;
4220 gcc_assert (i
+ 1 == size
);
4221 /* Sort the arrays of type, hash pairs so that when we mix in
4222 all members of the SCC the hash value becomes independent on
4223 the order we visited the SCC. Disregard hashes equal to
4224 the hash of the type we mix into because we cannot guarantee
4225 a stable sort for those across different TUs. */
4226 qsort (pairs
, size
, sizeof (struct type_hash_pair
),
4227 type_hash_pair_compare
);
4228 for (i
= 0; i
< size
; ++i
)
4231 m
= ggc_alloc_cleared_tree_int_map ();
4232 m
->base
.from
= pairs
[i
].type
;
4233 hash
= pairs
[i
].hash
;
4234 /* Skip same hashes. */
4235 for (j
= i
+ 1; j
< size
&& pairs
[j
].hash
== pairs
[i
].hash
; ++j
)
4237 for (; j
< size
; ++j
)
4238 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
4239 for (j
= 0; pairs
[j
].hash
!= pairs
[i
].hash
; ++j
)
4240 hash
= iterative_hash_hashval_t (pairs
[j
].hash
, hash
);
4242 if (pairs
[i
].type
== type
)
4244 slot
= htab_find_slot (type_hash_cache
, m
, INSERT
);
4245 gcc_assert (!*slot
);
4251 return iterative_hash_hashval_t (v
, val
);
4255 /* Returns a hash value for P (assumed to be a type). The hash value
4256 is computed using some distinguishing features of the type. Note
4257 that we cannot use pointer hashing here as we may be dealing with
4258 two distinct instances of the same type.
4260 This function should produce the same hash value for two compatible
4261 types according to gimple_types_compatible_p. */
4264 gimple_type_hash (const void *p
)
4266 const_tree t
= (const_tree
) p
;
4267 VEC(tree
, heap
) *sccstack
= NULL
;
4268 struct pointer_map_t
*sccstate
;
4269 struct obstack sccstate_obstack
;
4272 struct tree_int_map m
;
4274 if (type_hash_cache
== NULL
)
4275 type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
4276 tree_int_map_eq
, NULL
);
4278 m
.base
.from
= CONST_CAST_TREE (t
);
4279 if ((slot
= htab_find_slot (type_hash_cache
, &m
, NO_INSERT
))
4281 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, 0);
4283 /* Perform a DFS walk and pre-hash all reachable types. */
4285 sccstate
= pointer_map_create ();
4286 gcc_obstack_init (&sccstate_obstack
);
4287 val
= iterative_hash_gimple_type (CONST_CAST_TREE (t
), 0,
4288 &sccstack
, sccstate
, &sccstate_obstack
);
4289 VEC_free (tree
, heap
, sccstack
);
4290 pointer_map_destroy (sccstate
);
4291 obstack_free (&sccstate_obstack
, NULL
);
4296 /* Returning a hash value for gimple type TYPE combined with VAL.
4298 The hash value returned is equal for types considered compatible
4299 by gimple_canonical_types_compatible_p. */
4302 iterative_hash_canonical_type (tree type
, hashval_t val
)
4306 struct tree_int_map
*mp
, m
;
4309 if ((slot
= htab_find_slot (canonical_type_hash_cache
, &m
, INSERT
))
4311 return iterative_hash_hashval_t (((struct tree_int_map
*) *slot
)->to
, val
);
4313 /* Combine a few common features of types so that types are grouped into
4314 smaller sets; when searching for existing matching types to merge,
4315 only existing types having the same features as the new type will be
4317 v
= iterative_hash_hashval_t (TREE_CODE (type
), 0);
4318 v
= iterative_hash_hashval_t (TREE_ADDRESSABLE (type
), v
);
4319 v
= iterative_hash_hashval_t (TYPE_ALIGN (type
), v
);
4320 v
= iterative_hash_hashval_t (TYPE_MODE (type
), v
);
4322 /* Incorporate common features of numerical types. */
4323 if (INTEGRAL_TYPE_P (type
)
4324 || SCALAR_FLOAT_TYPE_P (type
)
4325 || FIXED_POINT_TYPE_P (type
)
4326 || TREE_CODE (type
) == VECTOR_TYPE
4327 || TREE_CODE (type
) == COMPLEX_TYPE
4328 || TREE_CODE (type
) == OFFSET_TYPE
4329 || POINTER_TYPE_P (type
))
4331 v
= iterative_hash_hashval_t (TYPE_PRECISION (type
), v
);
4332 v
= iterative_hash_hashval_t (TYPE_UNSIGNED (type
), v
);
4335 /* For pointer and reference types, fold in information about the type
4336 pointed to but do not recurse to the pointed-to type. */
4337 if (POINTER_TYPE_P (type
))
4339 v
= iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type
), v
);
4340 v
= iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type
)), v
);
4341 v
= iterative_hash_hashval_t (TYPE_RESTRICT (type
), v
);
4342 v
= iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type
)), v
);
4345 /* For integer types hash the types min/max values and the string flag. */
4346 if (TREE_CODE (type
) == INTEGER_TYPE
)
4348 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4349 v
= iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type
), v
);
4352 /* For array types hash their domain and the string flag. */
4353 if (TREE_CODE (type
) == ARRAY_TYPE
4354 && TYPE_DOMAIN (type
))
4356 v
= iterative_hash_hashval_t (TYPE_STRING_FLAG (type
), v
);
4357 v
= iterative_hash_canonical_type (TYPE_DOMAIN (type
), v
);
4360 /* Recurse for aggregates with a single element type. */
4361 if (TREE_CODE (type
) == ARRAY_TYPE
4362 || TREE_CODE (type
) == COMPLEX_TYPE
4363 || TREE_CODE (type
) == VECTOR_TYPE
)
4364 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
4366 /* Incorporate function return and argument types. */
4367 if (TREE_CODE (type
) == FUNCTION_TYPE
|| TREE_CODE (type
) == METHOD_TYPE
)
4372 /* For method types also incorporate their parent class. */
4373 if (TREE_CODE (type
) == METHOD_TYPE
)
4374 v
= iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type
), v
);
4376 v
= iterative_hash_canonical_type (TREE_TYPE (type
), v
);
4378 for (p
= TYPE_ARG_TYPES (type
), na
= 0; p
; p
= TREE_CHAIN (p
))
4380 v
= iterative_hash_canonical_type (TREE_VALUE (p
), v
);
4384 v
= iterative_hash_hashval_t (na
, v
);
4387 if (TREE_CODE (type
) == RECORD_TYPE
4388 || TREE_CODE (type
) == UNION_TYPE
4389 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
4394 for (f
= TYPE_FIELDS (type
), nf
= 0; f
; f
= TREE_CHAIN (f
))
4395 if (TREE_CODE (f
) == FIELD_DECL
)
4397 v
= iterative_hash_canonical_type (TREE_TYPE (f
), v
);
4401 v
= iterative_hash_hashval_t (nf
, v
);
4404 /* Cache the just computed hash value. */
4405 mp
= ggc_alloc_cleared_tree_int_map ();
4406 mp
->base
.from
= type
;
4408 *slot
= (void *) mp
;
4410 return iterative_hash_hashval_t (v
, val
);
4414 gimple_canonical_type_hash (const void *p
)
4416 if (canonical_type_hash_cache
== NULL
)
4417 canonical_type_hash_cache
= htab_create_ggc (512, tree_int_map_hash
,
4418 tree_int_map_eq
, NULL
);
4420 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree
) p
), 0);
4424 /* Returns nonzero if P1 and P2 are equal. */
4427 gimple_type_eq (const void *p1
, const void *p2
)
4429 const_tree t1
= (const_tree
) p1
;
4430 const_tree t2
= (const_tree
) p2
;
4431 return gimple_types_compatible_p (CONST_CAST_TREE (t1
),
4432 CONST_CAST_TREE (t2
));
4436 /* Worker for gimple_register_type.
4437 Register type T in the global type table gimple_types.
4438 When REGISTERING_MV is false first recurse for the main variant of T. */
4441 gimple_register_type_1 (tree t
, bool registering_mv
)
4444 gimple_type_leader_entry
*leader
;
4446 /* If we registered this type before return the cached result. */
4447 leader
= &gimple_type_leader
[TYPE_UID (t
) % GIMPLE_TYPE_LEADER_SIZE
];
4448 if (leader
->type
== t
)
4449 return leader
->leader
;
4451 /* Always register the main variant first. This is important so we
4452 pick up the non-typedef variants as canonical, otherwise we'll end
4453 up taking typedef ids for structure tags during comparison.
4454 It also makes sure that main variants will be merged to main variants.
4455 As we are operating on a possibly partially fixed up type graph
4456 do not bother to recurse more than once, otherwise we may end up
4458 If we are registering a main variant it will either remain its
4459 own main variant or it will be merged to something else in which
4460 case we do not care for the main variant leader. */
4462 && TYPE_MAIN_VARIANT (t
) != t
)
4463 gimple_register_type_1 (TYPE_MAIN_VARIANT (t
), true);
4465 /* See if we already have an equivalent type registered. */
4466 slot
= htab_find_slot (gimple_types
, t
, INSERT
);
4468 && *(tree
*)slot
!= t
)
4470 tree new_type
= (tree
) *((tree
*) slot
);
4472 leader
->leader
= new_type
;
4476 /* If not, insert it to the cache and the hash. */
4483 /* Register type T in the global type table gimple_types.
4484 If another type T', compatible with T, already existed in
4485 gimple_types then return T', otherwise return T. This is used by
4486 LTO to merge identical types read from different TUs. */
4489 gimple_register_type (tree t
)
4491 gcc_assert (TYPE_P (t
));
4493 if (!gimple_type_leader
)
4494 gimple_type_leader
= ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4495 (GIMPLE_TYPE_LEADER_SIZE
);
4497 if (gimple_types
== NULL
)
4498 gimple_types
= htab_create_ggc (16381, gimple_type_hash
, gimple_type_eq
, 0);
4500 return gimple_register_type_1 (t
, false);
4503 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4504 the middle-end types_compatible_p function. It needs to avoid
4505 claiming types are different for types that should be treated
4506 the same with respect to TBAA. Canonical types are also used
4507 for IL consistency checks via the useless_type_conversion_p
4508 predicate which does not handle all type kinds itself but falls
4509 back to pointer-comparison of TYPE_CANONICAL for aggregates
4512 /* Return true iff T1 and T2 are structurally identical for what
4513 TBAA is concerned. */
4516 gimple_canonical_types_compatible_p (tree t1
, tree t2
)
4518 /* Before starting to set up the SCC machinery handle simple cases. */
4520 /* Check first for the obvious case of pointer identity. */
4524 /* Check that we have two types to compare. */
4525 if (t1
== NULL_TREE
|| t2
== NULL_TREE
)
4528 /* If the types have been previously registered and found equal
4530 if (TYPE_CANONICAL (t1
)
4531 && TYPE_CANONICAL (t1
) == TYPE_CANONICAL (t2
))
4534 /* Can't be the same type if the types don't have the same code. */
4535 if (TREE_CODE (t1
) != TREE_CODE (t2
))
4538 if (TREE_ADDRESSABLE (t1
) != TREE_ADDRESSABLE (t2
))
4541 /* Qualifiers do not matter for canonical type comparison purposes. */
4543 /* Void types and nullptr types are always the same. */
4544 if (TREE_CODE (t1
) == VOID_TYPE
4545 || TREE_CODE (t1
) == NULLPTR_TYPE
)
4548 /* Can't be the same type if they have different alignment, or mode. */
4549 if (TYPE_ALIGN (t1
) != TYPE_ALIGN (t2
)
4550 || TYPE_MODE (t1
) != TYPE_MODE (t2
))
4553 /* Non-aggregate types can be handled cheaply. */
4554 if (INTEGRAL_TYPE_P (t1
)
4555 || SCALAR_FLOAT_TYPE_P (t1
)
4556 || FIXED_POINT_TYPE_P (t1
)
4557 || TREE_CODE (t1
) == VECTOR_TYPE
4558 || TREE_CODE (t1
) == COMPLEX_TYPE
4559 || TREE_CODE (t1
) == OFFSET_TYPE
4560 || POINTER_TYPE_P (t1
))
4562 /* Can't be the same type if they have different sign or precision. */
4563 if (TYPE_PRECISION (t1
) != TYPE_PRECISION (t2
)
4564 || TYPE_UNSIGNED (t1
) != TYPE_UNSIGNED (t2
))
4567 if (TREE_CODE (t1
) == INTEGER_TYPE
4568 && (TYPE_IS_SIZETYPE (t1
) != TYPE_IS_SIZETYPE (t2
)
4569 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)))
4572 /* For canonical type comparisons we do not want to build SCCs
4573 so we cannot compare pointed-to types. But we can, for now,
4574 require the same pointed-to type kind and match what
4575 useless_type_conversion_p would do. */
4576 if (POINTER_TYPE_P (t1
))
4578 /* If the two pointers have different ref-all attributes,
4579 they can't be the same type. */
4580 if (TYPE_REF_CAN_ALIAS_ALL (t1
) != TYPE_REF_CAN_ALIAS_ALL (t2
))
4583 if (TYPE_ADDR_SPACE (TREE_TYPE (t1
))
4584 != TYPE_ADDR_SPACE (TREE_TYPE (t2
)))
4587 if (TYPE_RESTRICT (t1
) != TYPE_RESTRICT (t2
))
4590 if (TREE_CODE (TREE_TYPE (t1
)) != TREE_CODE (TREE_TYPE (t2
)))
4594 /* Tail-recurse to components. */
4595 if (TREE_CODE (t1
) == VECTOR_TYPE
4596 || TREE_CODE (t1
) == COMPLEX_TYPE
)
4597 return gimple_canonical_types_compatible_p (TREE_TYPE (t1
),
4603 /* If their attributes are not the same they can't be the same type. */
4604 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1
), TYPE_ATTRIBUTES (t2
)))
4607 /* Do type-specific comparisons. */
4608 switch (TREE_CODE (t1
))
4611 /* Array types are the same if the element types are the same and
4612 the number of elements are the same. */
4613 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
))
4614 || TYPE_STRING_FLAG (t1
) != TYPE_STRING_FLAG (t2
)
4615 || TYPE_NONALIASED_COMPONENT (t1
) != TYPE_NONALIASED_COMPONENT (t2
))
4619 tree i1
= TYPE_DOMAIN (t1
);
4620 tree i2
= TYPE_DOMAIN (t2
);
4622 /* For an incomplete external array, the type domain can be
4623 NULL_TREE. Check this condition also. */
4624 if (i1
== NULL_TREE
&& i2
== NULL_TREE
)
4626 else if (i1
== NULL_TREE
|| i2
== NULL_TREE
)
4628 /* If for a complete array type the possibly gimplified sizes
4629 are different the types are different. */
4630 else if (((TYPE_SIZE (i1
) != NULL
) ^ (TYPE_SIZE (i2
) != NULL
))
4633 && !operand_equal_p (TYPE_SIZE (i1
), TYPE_SIZE (i2
), 0)))
4637 tree min1
= TYPE_MIN_VALUE (i1
);
4638 tree min2
= TYPE_MIN_VALUE (i2
);
4639 tree max1
= TYPE_MAX_VALUE (i1
);
4640 tree max2
= TYPE_MAX_VALUE (i2
);
4642 /* The minimum/maximum values have to be the same. */
4645 && ((TREE_CODE (min1
) == PLACEHOLDER_EXPR
4646 && TREE_CODE (min2
) == PLACEHOLDER_EXPR
)
4647 || operand_equal_p (min1
, min2
, 0))))
4650 && ((TREE_CODE (max1
) == PLACEHOLDER_EXPR
4651 && TREE_CODE (max2
) == PLACEHOLDER_EXPR
)
4652 || operand_equal_p (max1
, max2
, 0)))))
4660 /* Method types should belong to the same class. */
4661 if (!gimple_canonical_types_compatible_p
4662 (TYPE_METHOD_BASETYPE (t1
), TYPE_METHOD_BASETYPE (t2
)))
4668 /* Function types are the same if the return type and arguments types
4670 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))
4673 if (!comp_type_attributes (t1
, t2
))
4676 if (TYPE_ARG_TYPES (t1
) == TYPE_ARG_TYPES (t2
))
4680 tree parms1
, parms2
;
4682 for (parms1
= TYPE_ARG_TYPES (t1
), parms2
= TYPE_ARG_TYPES (t2
);
4684 parms1
= TREE_CHAIN (parms1
), parms2
= TREE_CHAIN (parms2
))
4686 if (!gimple_canonical_types_compatible_p
4687 (TREE_VALUE (parms1
), TREE_VALUE (parms2
)))
4691 if (parms1
|| parms2
)
4699 case QUAL_UNION_TYPE
:
4703 /* For aggregate types, all the fields must be the same. */
4704 for (f1
= TYPE_FIELDS (t1
), f2
= TYPE_FIELDS (t2
);
4706 f1
= TREE_CHAIN (f1
), f2
= TREE_CHAIN (f2
))
4708 /* Skip non-fields. */
4709 while (f1
&& TREE_CODE (f1
) != FIELD_DECL
)
4710 f1
= TREE_CHAIN (f1
);
4711 while (f2
&& TREE_CODE (f2
) != FIELD_DECL
)
4712 f2
= TREE_CHAIN (f2
);
4715 /* The fields must have the same name, offset and type. */
4716 if (DECL_NONADDRESSABLE_P (f1
) != DECL_NONADDRESSABLE_P (f2
)
4717 || !gimple_compare_field_offset (f1
, f2
)
4718 || !gimple_canonical_types_compatible_p
4719 (TREE_TYPE (f1
), TREE_TYPE (f2
)))
4723 /* If one aggregate has more fields than the other, they
4724 are not the same. */
4737 /* Returns nonzero if P1 and P2 are equal. */
4740 gimple_canonical_type_eq (const void *p1
, const void *p2
)
4742 const_tree t1
= (const_tree
) p1
;
4743 const_tree t2
= (const_tree
) p2
;
4744 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1
),
4745 CONST_CAST_TREE (t2
));
4748 /* Register type T in the global type table gimple_types.
4749 If another type T', compatible with T, already existed in
4750 gimple_types then return T', otherwise return T. This is used by
4751 LTO to merge identical types read from different TUs.
4753 ??? This merging does not exactly match how the tree.c middle-end
4754 functions will assign TYPE_CANONICAL when new types are created
4755 during optimization (which at least happens for pointer and array
4759 gimple_register_canonical_type (tree t
)
4763 gcc_assert (TYPE_P (t
));
4765 if (TYPE_CANONICAL (t
))
4766 return TYPE_CANONICAL (t
);
4768 if (gimple_canonical_types
== NULL
)
4769 gimple_canonical_types
= htab_create_ggc (16381, gimple_canonical_type_hash
,
4770 gimple_canonical_type_eq
, 0);
4772 slot
= htab_find_slot (gimple_canonical_types
, t
, INSERT
);
4774 && *(tree
*)slot
!= t
)
4776 tree new_type
= (tree
) *((tree
*) slot
);
4778 TYPE_CANONICAL (t
) = new_type
;
4783 TYPE_CANONICAL (t
) = t
;
4791 /* Show statistics on references to the global type table gimple_types. */
4794 print_gimple_types_stats (void)
4797 fprintf (stderr
, "GIMPLE type table: size %ld, %ld elements, "
4798 "%ld searches, %ld collisions (ratio: %f)\n",
4799 (long) htab_size (gimple_types
),
4800 (long) htab_elements (gimple_types
),
4801 (long) gimple_types
->searches
,
4802 (long) gimple_types
->collisions
,
4803 htab_collisions (gimple_types
));
4805 fprintf (stderr
, "GIMPLE type table is empty\n");
4806 if (type_hash_cache
)
4807 fprintf (stderr
, "GIMPLE type hash table: size %ld, %ld elements, "
4808 "%ld searches, %ld collisions (ratio: %f)\n",
4809 (long) htab_size (type_hash_cache
),
4810 (long) htab_elements (type_hash_cache
),
4811 (long) type_hash_cache
->searches
,
4812 (long) type_hash_cache
->collisions
,
4813 htab_collisions (type_hash_cache
));
4815 fprintf (stderr
, "GIMPLE type hash table is empty\n");
4816 if (gimple_canonical_types
)
4817 fprintf (stderr
, "GIMPLE canonical type table: size %ld, %ld elements, "
4818 "%ld searches, %ld collisions (ratio: %f)\n",
4819 (long) htab_size (gimple_canonical_types
),
4820 (long) htab_elements (gimple_canonical_types
),
4821 (long) gimple_canonical_types
->searches
,
4822 (long) gimple_canonical_types
->collisions
,
4823 htab_collisions (gimple_canonical_types
));
4825 fprintf (stderr
, "GIMPLE canonical type table is empty\n");
4826 if (canonical_type_hash_cache
)
4827 fprintf (stderr
, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4828 "%ld searches, %ld collisions (ratio: %f)\n",
4829 (long) htab_size (canonical_type_hash_cache
),
4830 (long) htab_elements (canonical_type_hash_cache
),
4831 (long) canonical_type_hash_cache
->searches
,
4832 (long) canonical_type_hash_cache
->collisions
,
4833 htab_collisions (canonical_type_hash_cache
));
4835 fprintf (stderr
, "GIMPLE canonical type hash table is empty\n");
4838 /* Free the gimple type hashtables used for LTO type merging. */
4841 free_gimple_type_tables (void)
4843 /* Last chance to print stats for the tables. */
4844 if (flag_lto_report
)
4845 print_gimple_types_stats ();
4849 htab_delete (gimple_types
);
4850 gimple_types
= NULL
;
4852 if (gimple_canonical_types
)
4854 htab_delete (gimple_canonical_types
);
4855 gimple_canonical_types
= NULL
;
4857 if (type_hash_cache
)
4859 htab_delete (type_hash_cache
);
4860 type_hash_cache
= NULL
;
4862 if (canonical_type_hash_cache
)
4864 htab_delete (canonical_type_hash_cache
);
4865 canonical_type_hash_cache
= NULL
;
4867 if (type_pair_cache
)
4869 free (type_pair_cache
);
4870 type_pair_cache
= NULL
;
4872 gimple_type_leader
= NULL
;
4876 /* Return a type the same as TYPE except unsigned or
4877 signed according to UNSIGNEDP. */
4880 gimple_signed_or_unsigned_type (bool unsignedp
, tree type
)
4884 type1
= TYPE_MAIN_VARIANT (type
);
4885 if (type1
== signed_char_type_node
4886 || type1
== char_type_node
4887 || type1
== unsigned_char_type_node
)
4888 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
4889 if (type1
== integer_type_node
|| type1
== unsigned_type_node
)
4890 return unsignedp
? unsigned_type_node
: integer_type_node
;
4891 if (type1
== short_integer_type_node
|| type1
== short_unsigned_type_node
)
4892 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
4893 if (type1
== long_integer_type_node
|| type1
== long_unsigned_type_node
)
4894 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
4895 if (type1
== long_long_integer_type_node
4896 || type1
== long_long_unsigned_type_node
)
4898 ? long_long_unsigned_type_node
4899 : long_long_integer_type_node
;
4900 if (int128_integer_type_node
&& (type1
== int128_integer_type_node
|| type1
== int128_unsigned_type_node
))
4902 ? int128_unsigned_type_node
4903 : int128_integer_type_node
;
4904 #if HOST_BITS_PER_WIDE_INT >= 64
4905 if (type1
== intTI_type_node
|| type1
== unsigned_intTI_type_node
)
4906 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
4908 if (type1
== intDI_type_node
|| type1
== unsigned_intDI_type_node
)
4909 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
4910 if (type1
== intSI_type_node
|| type1
== unsigned_intSI_type_node
)
4911 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
4912 if (type1
== intHI_type_node
|| type1
== unsigned_intHI_type_node
)
4913 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
4914 if (type1
== intQI_type_node
|| type1
== unsigned_intQI_type_node
)
4915 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
4917 #define GIMPLE_FIXED_TYPES(NAME) \
4918 if (type1 == short_ ## NAME ## _type_node \
4919 || type1 == unsigned_short_ ## NAME ## _type_node) \
4920 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4921 : short_ ## NAME ## _type_node; \
4922 if (type1 == NAME ## _type_node \
4923 || type1 == unsigned_ ## NAME ## _type_node) \
4924 return unsignedp ? unsigned_ ## NAME ## _type_node \
4925 : NAME ## _type_node; \
4926 if (type1 == long_ ## NAME ## _type_node \
4927 || type1 == unsigned_long_ ## NAME ## _type_node) \
4928 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4929 : long_ ## NAME ## _type_node; \
4930 if (type1 == long_long_ ## NAME ## _type_node \
4931 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4932 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4933 : long_long_ ## NAME ## _type_node;
4935 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4936 if (type1 == NAME ## _type_node \
4937 || type1 == u ## NAME ## _type_node) \
4938 return unsignedp ? u ## NAME ## _type_node \
4939 : NAME ## _type_node;
4941 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4942 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4943 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4944 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4945 : sat_ ## short_ ## NAME ## _type_node; \
4946 if (type1 == sat_ ## NAME ## _type_node \
4947 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4948 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4949 : sat_ ## NAME ## _type_node; \
4950 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4951 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4952 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4953 : sat_ ## long_ ## NAME ## _type_node; \
4954 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4955 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4956 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4957 : sat_ ## long_long_ ## NAME ## _type_node;
4959 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4960 if (type1 == sat_ ## NAME ## _type_node \
4961 || type1 == sat_ ## u ## NAME ## _type_node) \
4962 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4963 : sat_ ## NAME ## _type_node;
4965 GIMPLE_FIXED_TYPES (fract
);
4966 GIMPLE_FIXED_TYPES_SAT (fract
);
4967 GIMPLE_FIXED_TYPES (accum
);
4968 GIMPLE_FIXED_TYPES_SAT (accum
);
4970 GIMPLE_FIXED_MODE_TYPES (qq
);
4971 GIMPLE_FIXED_MODE_TYPES (hq
);
4972 GIMPLE_FIXED_MODE_TYPES (sq
);
4973 GIMPLE_FIXED_MODE_TYPES (dq
);
4974 GIMPLE_FIXED_MODE_TYPES (tq
);
4975 GIMPLE_FIXED_MODE_TYPES_SAT (qq
);
4976 GIMPLE_FIXED_MODE_TYPES_SAT (hq
);
4977 GIMPLE_FIXED_MODE_TYPES_SAT (sq
);
4978 GIMPLE_FIXED_MODE_TYPES_SAT (dq
);
4979 GIMPLE_FIXED_MODE_TYPES_SAT (tq
);
4980 GIMPLE_FIXED_MODE_TYPES (ha
);
4981 GIMPLE_FIXED_MODE_TYPES (sa
);
4982 GIMPLE_FIXED_MODE_TYPES (da
);
4983 GIMPLE_FIXED_MODE_TYPES (ta
);
4984 GIMPLE_FIXED_MODE_TYPES_SAT (ha
);
4985 GIMPLE_FIXED_MODE_TYPES_SAT (sa
);
4986 GIMPLE_FIXED_MODE_TYPES_SAT (da
);
4987 GIMPLE_FIXED_MODE_TYPES_SAT (ta
);
4989 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4990 the precision; they have precision set to match their range, but
4991 may use a wider mode to match an ABI. If we change modes, we may
4992 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4993 the precision as well, so as to yield correct results for
4994 bit-field types. C++ does not have these separate bit-field
4995 types, and producing a signed or unsigned variant of an
4996 ENUMERAL_TYPE may cause other problems as well. */
4997 if (!INTEGRAL_TYPE_P (type
)
4998 || TYPE_UNSIGNED (type
) == unsignedp
)
5001 #define TYPE_OK(node) \
5002 (TYPE_MODE (type) == TYPE_MODE (node) \
5003 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5004 if (TYPE_OK (signed_char_type_node
))
5005 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
5006 if (TYPE_OK (integer_type_node
))
5007 return unsignedp
? unsigned_type_node
: integer_type_node
;
5008 if (TYPE_OK (short_integer_type_node
))
5009 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
5010 if (TYPE_OK (long_integer_type_node
))
5011 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
5012 if (TYPE_OK (long_long_integer_type_node
))
5014 ? long_long_unsigned_type_node
5015 : long_long_integer_type_node
);
5016 if (int128_integer_type_node
&& TYPE_OK (int128_integer_type_node
))
5018 ? int128_unsigned_type_node
5019 : int128_integer_type_node
);
5021 #if HOST_BITS_PER_WIDE_INT >= 64
5022 if (TYPE_OK (intTI_type_node
))
5023 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
5025 if (TYPE_OK (intDI_type_node
))
5026 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
5027 if (TYPE_OK (intSI_type_node
))
5028 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
5029 if (TYPE_OK (intHI_type_node
))
5030 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
5031 if (TYPE_OK (intQI_type_node
))
5032 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
5034 #undef GIMPLE_FIXED_TYPES
5035 #undef GIMPLE_FIXED_MODE_TYPES
5036 #undef GIMPLE_FIXED_TYPES_SAT
5037 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5040 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
5044 /* Return an unsigned type the same as TYPE in other respects. */
5047 gimple_unsigned_type (tree type
)
5049 return gimple_signed_or_unsigned_type (true, type
);
5053 /* Return a signed type the same as TYPE in other respects. */
5056 gimple_signed_type (tree type
)
5058 return gimple_signed_or_unsigned_type (false, type
);
5062 /* Return the typed-based alias set for T, which may be an expression
5063 or a type. Return -1 if we don't do anything special. */
5066 gimple_get_alias_set (tree t
)
5070 /* Permit type-punning when accessing a union, provided the access
5071 is directly through the union. For example, this code does not
5072 permit taking the address of a union member and then storing
5073 through it. Even the type-punning allowed here is a GCC
5074 extension, albeit a common and useful one; the C standard says
5075 that such accesses have implementation-defined behavior. */
5077 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
5078 u
= TREE_OPERAND (u
, 0))
5079 if (TREE_CODE (u
) == COMPONENT_REF
5080 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
5083 /* That's all the expressions we handle specially. */
5087 /* For convenience, follow the C standard when dealing with
5088 character types. Any object may be accessed via an lvalue that
5089 has character type. */
5090 if (t
== char_type_node
5091 || t
== signed_char_type_node
5092 || t
== unsigned_char_type_node
)
5095 /* Allow aliasing between signed and unsigned variants of the same
5096 type. We treat the signed variant as canonical. */
5097 if (TREE_CODE (t
) == INTEGER_TYPE
&& TYPE_UNSIGNED (t
))
5099 tree t1
= gimple_signed_type (t
);
5101 /* t1 == t can happen for boolean nodes which are always unsigned. */
5103 return get_alias_set (t1
);
5110 /* Data structure used to count the number of dereferences to PTR
5111 inside an expression. */
5115 unsigned num_stores
;
5119 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5120 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5123 count_ptr_derefs (tree
*tp
, int *walk_subtrees
, void *data
)
5125 struct walk_stmt_info
*wi_p
= (struct walk_stmt_info
*) data
;
5126 struct count_ptr_d
*count_p
= (struct count_ptr_d
*) wi_p
->info
;
5128 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5129 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5130 the address of 'fld' as 'ptr + offsetof(fld)'. */
5131 if (TREE_CODE (*tp
) == ADDR_EXPR
)
5137 if (TREE_CODE (*tp
) == MEM_REF
&& TREE_OPERAND (*tp
, 0) == count_p
->ptr
)
5140 count_p
->num_stores
++;
5142 count_p
->num_loads
++;
5148 /* Count the number of direct and indirect uses for pointer PTR in
5149 statement STMT. The number of direct uses is stored in
5150 *NUM_USES_P. Indirect references are counted separately depending
5151 on whether they are store or load operations. The counts are
5152 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5155 count_uses_and_derefs (tree ptr
, gimple stmt
, unsigned *num_uses_p
,
5156 unsigned *num_loads_p
, unsigned *num_stores_p
)
5165 /* Find out the total number of uses of PTR in STMT. */
5166 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, i
, SSA_OP_USE
)
5170 /* Now count the number of indirect references to PTR. This is
5171 truly awful, but we don't have much choice. There are no parent
5172 pointers inside INDIRECT_REFs, so an expression like
5173 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5174 find all the indirect and direct uses of x_1 inside. The only
5175 shortcut we can take is the fact that GIMPLE only allows
5176 INDIRECT_REFs inside the expressions below. */
5177 if (is_gimple_assign (stmt
)
5178 || gimple_code (stmt
) == GIMPLE_RETURN
5179 || gimple_code (stmt
) == GIMPLE_ASM
5180 || is_gimple_call (stmt
))
5182 struct walk_stmt_info wi
;
5183 struct count_ptr_d count
;
5186 count
.num_stores
= 0;
5187 count
.num_loads
= 0;
5189 memset (&wi
, 0, sizeof (wi
));
5191 walk_gimple_op (stmt
, count_ptr_derefs
, &wi
);
5193 *num_stores_p
= count
.num_stores
;
5194 *num_loads_p
= count
.num_loads
;
5197 gcc_assert (*num_uses_p
>= *num_loads_p
+ *num_stores_p
);
5200 /* From a tree operand OP return the base of a load or store operation
5201 or NULL_TREE if OP is not a load or a store. */
5204 get_base_loadstore (tree op
)
5206 while (handled_component_p (op
))
5207 op
= TREE_OPERAND (op
, 0);
5209 || INDIRECT_REF_P (op
)
5210 || TREE_CODE (op
) == MEM_REF
5211 || TREE_CODE (op
) == TARGET_MEM_REF
)
5216 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5217 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5218 passing the STMT, the base of the operand and DATA to it. The base
5219 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5220 or the argument of an address expression.
5221 Returns the results of these callbacks or'ed. */
5224 walk_stmt_load_store_addr_ops (gimple stmt
, void *data
,
5225 bool (*visit_load
)(gimple
, tree
, void *),
5226 bool (*visit_store
)(gimple
, tree
, void *),
5227 bool (*visit_addr
)(gimple
, tree
, void *))
5231 if (gimple_assign_single_p (stmt
))
5236 lhs
= get_base_loadstore (gimple_assign_lhs (stmt
));
5238 ret
|= visit_store (stmt
, lhs
, data
);
5240 rhs
= gimple_assign_rhs1 (stmt
);
5241 while (handled_component_p (rhs
))
5242 rhs
= TREE_OPERAND (rhs
, 0);
5245 if (TREE_CODE (rhs
) == ADDR_EXPR
)
5246 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
5247 else if (TREE_CODE (rhs
) == TARGET_MEM_REF
5248 && TREE_CODE (TMR_BASE (rhs
)) == ADDR_EXPR
)
5249 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (rhs
), 0), data
);
5250 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
5251 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs
)) == ADDR_EXPR
)
5252 ret
|= visit_addr (stmt
, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs
),
5254 else if (TREE_CODE (rhs
) == CONSTRUCTOR
)
5259 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), ix
, val
)
5260 if (TREE_CODE (val
) == ADDR_EXPR
)
5261 ret
|= visit_addr (stmt
, TREE_OPERAND (val
, 0), data
);
5262 else if (TREE_CODE (val
) == OBJ_TYPE_REF
5263 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val
)) == ADDR_EXPR
)
5264 ret
|= visit_addr (stmt
,
5265 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val
),
5268 lhs
= gimple_assign_lhs (stmt
);
5269 if (TREE_CODE (lhs
) == TARGET_MEM_REF
5270 && TREE_CODE (TMR_BASE (lhs
)) == ADDR_EXPR
)
5271 ret
|= visit_addr (stmt
, TREE_OPERAND (TMR_BASE (lhs
), 0), data
);
5275 rhs
= get_base_loadstore (rhs
);
5277 ret
|= visit_load (stmt
, rhs
, data
);
5281 && (is_gimple_assign (stmt
)
5282 || gimple_code (stmt
) == GIMPLE_COND
))
5284 for (i
= 0; i
< gimple_num_ops (stmt
); ++i
)
5286 tree op
= gimple_op (stmt
, i
);
5287 if (op
== NULL_TREE
)
5289 else if (TREE_CODE (op
) == ADDR_EXPR
)
5290 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5291 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5292 tree with two operands. */
5293 else if (i
== 1 && COMPARISON_CLASS_P (op
))
5295 if (TREE_CODE (TREE_OPERAND (op
, 0)) == ADDR_EXPR
)
5296 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 0),
5298 if (TREE_CODE (TREE_OPERAND (op
, 1)) == ADDR_EXPR
)
5299 ret
|= visit_addr (stmt
, TREE_OPERAND (TREE_OPERAND (op
, 1),
5304 else if (is_gimple_call (stmt
))
5308 tree lhs
= gimple_call_lhs (stmt
);
5311 lhs
= get_base_loadstore (lhs
);
5313 ret
|= visit_store (stmt
, lhs
, data
);
5316 if (visit_load
|| visit_addr
)
5317 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5319 tree rhs
= gimple_call_arg (stmt
, i
);
5321 && TREE_CODE (rhs
) == ADDR_EXPR
)
5322 ret
|= visit_addr (stmt
, TREE_OPERAND (rhs
, 0), data
);
5323 else if (visit_load
)
5325 rhs
= get_base_loadstore (rhs
);
5327 ret
|= visit_load (stmt
, rhs
, data
);
5331 && gimple_call_chain (stmt
)
5332 && TREE_CODE (gimple_call_chain (stmt
)) == ADDR_EXPR
)
5333 ret
|= visit_addr (stmt
, TREE_OPERAND (gimple_call_chain (stmt
), 0),
5336 && gimple_call_return_slot_opt_p (stmt
)
5337 && gimple_call_lhs (stmt
) != NULL_TREE
5338 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt
))))
5339 ret
|= visit_addr (stmt
, gimple_call_lhs (stmt
), data
);
5341 else if (gimple_code (stmt
) == GIMPLE_ASM
)
5344 const char *constraint
;
5345 const char **oconstraints
;
5346 bool allows_mem
, allows_reg
, is_inout
;
5347 noutputs
= gimple_asm_noutputs (stmt
);
5348 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5349 if (visit_store
|| visit_addr
)
5350 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
5352 tree link
= gimple_asm_output_op (stmt
, i
);
5353 tree op
= get_base_loadstore (TREE_VALUE (link
));
5354 if (op
&& visit_store
)
5355 ret
|= visit_store (stmt
, op
, data
);
5358 constraint
= TREE_STRING_POINTER
5359 (TREE_VALUE (TREE_PURPOSE (link
)));
5360 oconstraints
[i
] = constraint
;
5361 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
5362 &allows_reg
, &is_inout
);
5363 if (op
&& !allows_reg
&& allows_mem
)
5364 ret
|= visit_addr (stmt
, op
, data
);
5367 if (visit_load
|| visit_addr
)
5368 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
5370 tree link
= gimple_asm_input_op (stmt
, i
);
5371 tree op
= TREE_VALUE (link
);
5373 && TREE_CODE (op
) == ADDR_EXPR
)
5374 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5375 else if (visit_load
|| visit_addr
)
5377 op
= get_base_loadstore (op
);
5381 ret
|= visit_load (stmt
, op
, data
);
5384 constraint
= TREE_STRING_POINTER
5385 (TREE_VALUE (TREE_PURPOSE (link
)));
5386 parse_input_constraint (&constraint
, 0, 0, noutputs
,
5388 &allows_mem
, &allows_reg
);
5389 if (!allows_reg
&& allows_mem
)
5390 ret
|= visit_addr (stmt
, op
, data
);
5396 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
5398 tree op
= gimple_return_retval (stmt
);
5402 && TREE_CODE (op
) == ADDR_EXPR
)
5403 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5404 else if (visit_load
)
5406 op
= get_base_loadstore (op
);
5408 ret
|= visit_load (stmt
, op
, data
);
5413 && gimple_code (stmt
) == GIMPLE_PHI
)
5415 for (i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
5417 tree op
= PHI_ARG_DEF (stmt
, i
);
5418 if (TREE_CODE (op
) == ADDR_EXPR
)
5419 ret
|= visit_addr (stmt
, TREE_OPERAND (op
, 0), data
);
5426 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5427 should make a faster clone for this case. */
5430 walk_stmt_load_store_ops (gimple stmt
, void *data
,
5431 bool (*visit_load
)(gimple
, tree
, void *),
5432 bool (*visit_store
)(gimple
, tree
, void *))
5434 return walk_stmt_load_store_addr_ops (stmt
, data
,
5435 visit_load
, visit_store
, NULL
);
5438 /* Helper for gimple_ior_addresses_taken_1. */
5441 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED
,
5442 tree addr
, void *data
)
5444 bitmap addresses_taken
= (bitmap
)data
;
5445 addr
= get_base_address (addr
);
5449 bitmap_set_bit (addresses_taken
, DECL_UID (addr
));
5455 /* Set the bit for the uid of all decls that have their address taken
5456 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5457 were any in this stmt. */
5460 gimple_ior_addresses_taken (bitmap addresses_taken
, gimple stmt
)
5462 return walk_stmt_load_store_addr_ops (stmt
, addresses_taken
, NULL
, NULL
,
5463 gimple_ior_addresses_taken_1
);
5467 /* Return a printable name for symbol DECL. */
5470 gimple_decl_printable_name (tree decl
, int verbosity
)
5472 if (!DECL_NAME (decl
))
5475 if (DECL_ASSEMBLER_NAME_SET_P (decl
))
5477 const char *str
, *mangled_str
;
5478 int dmgl_opts
= DMGL_NO_OPTS
;
5482 dmgl_opts
= DMGL_VERBOSE
5486 if (TREE_CODE (decl
) == FUNCTION_DECL
)
5487 dmgl_opts
|= DMGL_PARAMS
;
5490 mangled_str
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl
));
5491 str
= cplus_demangle_v3 (mangled_str
, dmgl_opts
);
5492 return (str
) ? str
: mangled_str
;
5495 return IDENTIFIER_POINTER (DECL_NAME (decl
));
5498 /* Return true when STMT is builtins call to CODE. */
5501 gimple_call_builtin_p (gimple stmt
, enum built_in_function code
)
5504 return (is_gimple_call (stmt
)
5505 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL
5506 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
5507 && DECL_FUNCTION_CODE (fndecl
) == code
);
5510 /* Return true if STMT clobbers memory. STMT is required to be a
5514 gimple_asm_clobbers_memory_p (const_gimple stmt
)
5518 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
5520 tree op
= gimple_asm_clobber_op (stmt
, i
);
5521 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op
)), "memory") == 0)
5527 #include "gt-gimple.h"