Add BMI2 support.
[official-gcc.git] / gcc / gimple.c
blob561e41e45826d351992b9c74f0dd2afdc15d8f3c
1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
62 #undef DEFGSSTRUCT
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
68 #undef DEFGSSTRUCT
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
74 #undef DEFGSCODE
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
80 #undef DEFGSCODE
82 #ifdef GATHER_STATISTICS
83 /* Gimple stats. */
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
93 "sequences",
94 "everything else"
97 #endif /* GATHER_STATISTICS */
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
103 /* Private API manipulation functions shared only with some
104 other files. */
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
112 /* Set the code for statement G to CODE. */
114 static inline void
115 gimple_set_code (gimple g, enum gimple_code code)
117 g->gsbase.code = code;
120 /* Return the number of bytes needed to hold a GIMPLE statement with
121 code CODE. */
123 static inline size_t
124 gimple_size (enum gimple_code code)
126 return gsstruct_code_size[gss_for_code (code)];
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130 operands. */
132 gimple
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
135 size_t size;
136 gimple stmt;
138 size = gimple_size (code);
139 if (num_ops > 0)
140 size += sizeof (tree) * (num_ops - 1);
142 #ifdef GATHER_STATISTICS
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
148 #endif
150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
158 return stmt;
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
163 static inline void
164 gimple_set_subcode (gimple g, unsigned subcode)
166 /* We only have 16 bits for the RHS code. Assert that we are not
167 overflowing it. */
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
181 static gimple
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 unsigned num_ops MEM_STAT_DECL)
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
188 return s;
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
194 gimple
195 gimple_build_return (tree retval)
197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
198 if (retval)
199 gimple_return_set_retval (s, retval);
200 return s;
203 /* Reset alias information on call S. */
205 void
206 gimple_call_reset_alias_info (gimple s)
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214 else
215 pt_solution_reset (gimple_call_clobber_set (s));
218 /* Helper for gimple_build_call, gimple_build_call_vec and
219 gimple_build_call_from_tree. Build the basic components of a
220 GIMPLE_CALL statement to function FN with NARGS arguments. */
222 static inline gimple
223 gimple_build_call_1 (tree fn, unsigned nargs)
225 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
226 if (TREE_CODE (fn) == FUNCTION_DECL)
227 fn = build_fold_addr_expr (fn);
228 gimple_set_op (s, 1, fn);
229 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
230 gimple_call_reset_alias_info (s);
231 return s;
235 /* Build a GIMPLE_CALL statement to function FN with the arguments
236 specified in vector ARGS. */
238 gimple
239 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
241 unsigned i;
242 unsigned nargs = VEC_length (tree, args);
243 gimple call = gimple_build_call_1 (fn, nargs);
245 for (i = 0; i < nargs; i++)
246 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
248 return call;
252 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
253 arguments. The ... are the arguments. */
255 gimple
256 gimple_build_call (tree fn, unsigned nargs, ...)
258 va_list ap;
259 gimple call;
260 unsigned i;
262 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
264 call = gimple_build_call_1 (fn, nargs);
266 va_start (ap, nargs);
267 for (i = 0; i < nargs; i++)
268 gimple_call_set_arg (call, i, va_arg (ap, tree));
269 va_end (ap);
271 return call;
275 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
276 Build the basic components of a GIMPLE_CALL statement to internal
277 function FN with NARGS arguments. */
279 static inline gimple
280 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
282 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
283 s->gsbase.subcode |= GF_CALL_INTERNAL;
284 gimple_call_set_internal_fn (s, fn);
285 gimple_call_reset_alias_info (s);
286 return s;
290 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
291 the number of arguments. The ... are the arguments. */
293 gimple
294 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
296 va_list ap;
297 gimple call;
298 unsigned i;
300 call = gimple_build_call_internal_1 (fn, nargs);
301 va_start (ap, nargs);
302 for (i = 0; i < nargs; i++)
303 gimple_call_set_arg (call, i, va_arg (ap, tree));
304 va_end (ap);
306 return call;
310 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
311 specified in vector ARGS. */
313 gimple
314 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
316 unsigned i, nargs;
317 gimple call;
319 nargs = VEC_length (tree, args);
320 call = gimple_build_call_internal_1 (fn, nargs);
321 for (i = 0; i < nargs; i++)
322 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
324 return call;
328 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
329 assumed to be in GIMPLE form already. Minimal checking is done of
330 this fact. */
332 gimple
333 gimple_build_call_from_tree (tree t)
335 unsigned i, nargs;
336 gimple call;
337 tree fndecl = get_callee_fndecl (t);
339 gcc_assert (TREE_CODE (t) == CALL_EXPR);
341 nargs = call_expr_nargs (t);
342 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
344 for (i = 0; i < nargs; i++)
345 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
347 gimple_set_block (call, TREE_BLOCK (t));
349 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
350 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
351 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
352 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
353 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
354 if (fndecl
355 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
356 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
357 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
358 else
359 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
360 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
361 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
362 gimple_set_no_warning (call, TREE_NO_WARNING (t));
364 return call;
368 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
369 *OP1_P, *OP2_P and *OP3_P respectively. */
371 void
372 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
373 tree *op2_p, tree *op3_p)
375 enum gimple_rhs_class grhs_class;
377 *subcode_p = TREE_CODE (expr);
378 grhs_class = get_gimple_rhs_class (*subcode_p);
380 if (grhs_class == GIMPLE_TERNARY_RHS)
382 *op1_p = TREE_OPERAND (expr, 0);
383 *op2_p = TREE_OPERAND (expr, 1);
384 *op3_p = TREE_OPERAND (expr, 2);
386 else if (grhs_class == GIMPLE_BINARY_RHS)
388 *op1_p = TREE_OPERAND (expr, 0);
389 *op2_p = TREE_OPERAND (expr, 1);
390 *op3_p = NULL_TREE;
392 else if (grhs_class == GIMPLE_UNARY_RHS)
394 *op1_p = TREE_OPERAND (expr, 0);
395 *op2_p = NULL_TREE;
396 *op3_p = NULL_TREE;
398 else if (grhs_class == GIMPLE_SINGLE_RHS)
400 *op1_p = expr;
401 *op2_p = NULL_TREE;
402 *op3_p = NULL_TREE;
404 else
405 gcc_unreachable ();
409 /* Build a GIMPLE_ASSIGN statement.
411 LHS of the assignment.
412 RHS of the assignment which can be unary or binary. */
414 gimple
415 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
417 enum tree_code subcode;
418 tree op1, op2, op3;
420 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
421 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
422 PASS_MEM_STAT);
426 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
427 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
428 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
430 gimple
431 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
432 tree op2, tree op3 MEM_STAT_DECL)
434 unsigned num_ops;
435 gimple p;
437 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
438 code). */
439 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
441 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
442 PASS_MEM_STAT);
443 gimple_assign_set_lhs (p, lhs);
444 gimple_assign_set_rhs1 (p, op1);
445 if (op2)
447 gcc_assert (num_ops > 2);
448 gimple_assign_set_rhs2 (p, op2);
451 if (op3)
453 gcc_assert (num_ops > 3);
454 gimple_assign_set_rhs3 (p, op3);
457 return p;
461 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
463 DST/SRC are the destination and source respectively. You can pass
464 ungimplified trees in DST or SRC, in which case they will be
465 converted to a gimple operand if necessary.
467 This function returns the newly created GIMPLE_ASSIGN tuple. */
469 gimple
470 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
472 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
473 gimplify_and_add (t, seq_p);
474 ggc_free (t);
475 return gimple_seq_last_stmt (*seq_p);
479 /* Build a GIMPLE_COND statement.
481 PRED is the condition used to compare LHS and the RHS.
482 T_LABEL is the label to jump to if the condition is true.
483 F_LABEL is the label to jump to otherwise. */
485 gimple
486 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
487 tree t_label, tree f_label)
489 gimple p;
491 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
492 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
493 gimple_cond_set_lhs (p, lhs);
494 gimple_cond_set_rhs (p, rhs);
495 gimple_cond_set_true_label (p, t_label);
496 gimple_cond_set_false_label (p, f_label);
497 return p;
501 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
503 void
504 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
505 tree *lhs_p, tree *rhs_p)
507 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
508 || TREE_CODE (cond) == TRUTH_NOT_EXPR
509 || is_gimple_min_invariant (cond)
510 || SSA_VAR_P (cond));
512 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
514 /* Canonicalize conditionals of the form 'if (!VAL)'. */
515 if (*code_p == TRUTH_NOT_EXPR)
517 *code_p = EQ_EXPR;
518 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
519 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
521 /* Canonicalize conditionals of the form 'if (VAL)' */
522 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
524 *code_p = NE_EXPR;
525 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
526 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
531 /* Build a GIMPLE_COND statement from the conditional expression tree
532 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
534 gimple
535 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
537 enum tree_code code;
538 tree lhs, rhs;
540 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
541 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
544 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
545 boolean expression tree COND. */
547 void
548 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
550 enum tree_code code;
551 tree lhs, rhs;
553 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
554 gimple_cond_set_condition (stmt, code, lhs, rhs);
557 /* Build a GIMPLE_LABEL statement for LABEL. */
559 gimple
560 gimple_build_label (tree label)
562 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
563 gimple_label_set_label (p, label);
564 return p;
567 /* Build a GIMPLE_GOTO statement to label DEST. */
569 gimple
570 gimple_build_goto (tree dest)
572 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
573 gimple_goto_set_dest (p, dest);
574 return p;
578 /* Build a GIMPLE_NOP statement. */
580 gimple
581 gimple_build_nop (void)
583 return gimple_alloc (GIMPLE_NOP, 0);
587 /* Build a GIMPLE_BIND statement.
588 VARS are the variables in BODY.
589 BLOCK is the containing block. */
591 gimple
592 gimple_build_bind (tree vars, gimple_seq body, tree block)
594 gimple p = gimple_alloc (GIMPLE_BIND, 0);
595 gimple_bind_set_vars (p, vars);
596 if (body)
597 gimple_bind_set_body (p, body);
598 if (block)
599 gimple_bind_set_block (p, block);
600 return p;
603 /* Helper function to set the simple fields of a asm stmt.
605 STRING is a pointer to a string that is the asm blocks assembly code.
606 NINPUT is the number of register inputs.
607 NOUTPUT is the number of register outputs.
608 NCLOBBERS is the number of clobbered registers.
611 static inline gimple
612 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
613 unsigned nclobbers, unsigned nlabels)
615 gimple p;
616 int size = strlen (string);
618 /* ASMs with labels cannot have outputs. This should have been
619 enforced by the front end. */
620 gcc_assert (nlabels == 0 || noutputs == 0);
622 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
623 ninputs + noutputs + nclobbers + nlabels);
625 p->gimple_asm.ni = ninputs;
626 p->gimple_asm.no = noutputs;
627 p->gimple_asm.nc = nclobbers;
628 p->gimple_asm.nl = nlabels;
629 p->gimple_asm.string = ggc_alloc_string (string, size);
631 #ifdef GATHER_STATISTICS
632 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
633 #endif
635 return p;
638 /* Build a GIMPLE_ASM statement.
640 STRING is the assembly code.
641 NINPUT is the number of register inputs.
642 NOUTPUT is the number of register outputs.
643 NCLOBBERS is the number of clobbered registers.
644 INPUTS is a vector of the input register parameters.
645 OUTPUTS is a vector of the output register parameters.
646 CLOBBERS is a vector of the clobbered register parameters.
647 LABELS is a vector of destination labels. */
649 gimple
650 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
651 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
652 VEC(tree,gc)* labels)
654 gimple p;
655 unsigned i;
657 p = gimple_build_asm_1 (string,
658 VEC_length (tree, inputs),
659 VEC_length (tree, outputs),
660 VEC_length (tree, clobbers),
661 VEC_length (tree, labels));
663 for (i = 0; i < VEC_length (tree, inputs); i++)
664 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
666 for (i = 0; i < VEC_length (tree, outputs); i++)
667 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
669 for (i = 0; i < VEC_length (tree, clobbers); i++)
670 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
672 for (i = 0; i < VEC_length (tree, labels); i++)
673 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
675 return p;
678 /* Build a GIMPLE_CATCH statement.
680 TYPES are the catch types.
681 HANDLER is the exception handler. */
683 gimple
684 gimple_build_catch (tree types, gimple_seq handler)
686 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
687 gimple_catch_set_types (p, types);
688 if (handler)
689 gimple_catch_set_handler (p, handler);
691 return p;
694 /* Build a GIMPLE_EH_FILTER statement.
696 TYPES are the filter's types.
697 FAILURE is the filter's failure action. */
699 gimple
700 gimple_build_eh_filter (tree types, gimple_seq failure)
702 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
703 gimple_eh_filter_set_types (p, types);
704 if (failure)
705 gimple_eh_filter_set_failure (p, failure);
707 return p;
710 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
712 gimple
713 gimple_build_eh_must_not_throw (tree decl)
715 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
717 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
718 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
719 gimple_eh_must_not_throw_set_fndecl (p, decl);
721 return p;
724 /* Build a GIMPLE_TRY statement.
726 EVAL is the expression to evaluate.
727 CLEANUP is the cleanup expression.
728 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
729 whether this is a try/catch or a try/finally respectively. */
731 gimple
732 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
733 enum gimple_try_flags kind)
735 gimple p;
737 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
738 p = gimple_alloc (GIMPLE_TRY, 0);
739 gimple_set_subcode (p, kind);
740 if (eval)
741 gimple_try_set_eval (p, eval);
742 if (cleanup)
743 gimple_try_set_cleanup (p, cleanup);
745 return p;
748 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
750 CLEANUP is the cleanup expression. */
752 gimple
753 gimple_build_wce (gimple_seq cleanup)
755 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
756 if (cleanup)
757 gimple_wce_set_cleanup (p, cleanup);
759 return p;
763 /* Build a GIMPLE_RESX statement. */
765 gimple
766 gimple_build_resx (int region)
768 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
769 p->gimple_eh_ctrl.region = region;
770 return p;
774 /* The helper for constructing a gimple switch statement.
775 INDEX is the switch's index.
776 NLABELS is the number of labels in the switch excluding the default.
777 DEFAULT_LABEL is the default label for the switch statement. */
779 gimple
780 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
782 /* nlabels + 1 default label + 1 index. */
783 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
784 1 + (default_label != NULL) + nlabels);
785 gimple_switch_set_index (p, index);
786 if (default_label)
787 gimple_switch_set_default_label (p, default_label);
788 return p;
792 /* Build a GIMPLE_SWITCH statement.
794 INDEX is the switch's index.
795 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
796 ... are the labels excluding the default. */
798 gimple
799 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
801 va_list al;
802 unsigned i, offset;
803 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
805 /* Store the rest of the labels. */
806 va_start (al, default_label);
807 offset = (default_label != NULL);
808 for (i = 0; i < nlabels; i++)
809 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
810 va_end (al);
812 return p;
816 /* Build a GIMPLE_SWITCH statement.
818 INDEX is the switch's index.
819 DEFAULT_LABEL is the default label
820 ARGS is a vector of labels excluding the default. */
822 gimple
823 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
825 unsigned i, offset, nlabels = VEC_length (tree, args);
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
828 /* Copy the labels from the vector to the switch statement. */
829 offset = (default_label != NULL);
830 for (i = 0; i < nlabels; i++)
831 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
833 return p;
836 /* Build a GIMPLE_EH_DISPATCH statement. */
838 gimple
839 gimple_build_eh_dispatch (int region)
841 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
842 p->gimple_eh_ctrl.region = region;
843 return p;
846 /* Build a new GIMPLE_DEBUG_BIND statement.
848 VAR is bound to VALUE; block and location are taken from STMT. */
850 gimple
851 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
853 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
854 (unsigned)GIMPLE_DEBUG_BIND, 2
855 PASS_MEM_STAT);
857 gimple_debug_bind_set_var (p, var);
858 gimple_debug_bind_set_value (p, value);
859 if (stmt)
861 gimple_set_block (p, gimple_block (stmt));
862 gimple_set_location (p, gimple_location (stmt));
865 return p;
869 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
871 VAR is bound to VALUE; block and location are taken from STMT. */
873 gimple
874 gimple_build_debug_source_bind_stat (tree var, tree value,
875 gimple stmt MEM_STAT_DECL)
877 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
878 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
879 PASS_MEM_STAT);
881 gimple_debug_source_bind_set_var (p, var);
882 gimple_debug_source_bind_set_value (p, value);
883 if (stmt)
885 gimple_set_block (p, gimple_block (stmt));
886 gimple_set_location (p, gimple_location (stmt));
889 return p;
893 /* Build a GIMPLE_OMP_CRITICAL statement.
895 BODY is the sequence of statements for which only one thread can execute.
896 NAME is optional identifier for this critical block. */
898 gimple
899 gimple_build_omp_critical (gimple_seq body, tree name)
901 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
902 gimple_omp_critical_set_name (p, name);
903 if (body)
904 gimple_omp_set_body (p, body);
906 return p;
909 /* Build a GIMPLE_OMP_FOR statement.
911 BODY is sequence of statements inside the for loop.
912 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
913 lastprivate, reductions, ordered, schedule, and nowait.
914 COLLAPSE is the collapse count.
915 PRE_BODY is the sequence of statements that are loop invariant. */
917 gimple
918 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
919 gimple_seq pre_body)
921 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
922 if (body)
923 gimple_omp_set_body (p, body);
924 gimple_omp_for_set_clauses (p, clauses);
925 p->gimple_omp_for.collapse = collapse;
926 p->gimple_omp_for.iter
927 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
928 if (pre_body)
929 gimple_omp_for_set_pre_body (p, pre_body);
931 return p;
935 /* Build a GIMPLE_OMP_PARALLEL statement.
937 BODY is sequence of statements which are executed in parallel.
938 CLAUSES, are the OMP parallel construct's clauses.
939 CHILD_FN is the function created for the parallel threads to execute.
940 DATA_ARG are the shared data argument(s). */
942 gimple
943 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
944 tree data_arg)
946 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
947 if (body)
948 gimple_omp_set_body (p, body);
949 gimple_omp_parallel_set_clauses (p, clauses);
950 gimple_omp_parallel_set_child_fn (p, child_fn);
951 gimple_omp_parallel_set_data_arg (p, data_arg);
953 return p;
957 /* Build a GIMPLE_OMP_TASK statement.
959 BODY is sequence of statements which are executed by the explicit task.
960 CLAUSES, are the OMP parallel construct's clauses.
961 CHILD_FN is the function created for the parallel threads to execute.
962 DATA_ARG are the shared data argument(s).
963 COPY_FN is the optional function for firstprivate initialization.
964 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
966 gimple
967 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
968 tree data_arg, tree copy_fn, tree arg_size,
969 tree arg_align)
971 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
972 if (body)
973 gimple_omp_set_body (p, body);
974 gimple_omp_task_set_clauses (p, clauses);
975 gimple_omp_task_set_child_fn (p, child_fn);
976 gimple_omp_task_set_data_arg (p, data_arg);
977 gimple_omp_task_set_copy_fn (p, copy_fn);
978 gimple_omp_task_set_arg_size (p, arg_size);
979 gimple_omp_task_set_arg_align (p, arg_align);
981 return p;
985 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
987 BODY is the sequence of statements in the section. */
989 gimple
990 gimple_build_omp_section (gimple_seq body)
992 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
993 if (body)
994 gimple_omp_set_body (p, body);
996 return p;
1000 /* Build a GIMPLE_OMP_MASTER statement.
1002 BODY is the sequence of statements to be executed by just the master. */
1004 gimple
1005 gimple_build_omp_master (gimple_seq body)
1007 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1008 if (body)
1009 gimple_omp_set_body (p, body);
1011 return p;
1015 /* Build a GIMPLE_OMP_CONTINUE statement.
1017 CONTROL_DEF is the definition of the control variable.
1018 CONTROL_USE is the use of the control variable. */
1020 gimple
1021 gimple_build_omp_continue (tree control_def, tree control_use)
1023 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1024 gimple_omp_continue_set_control_def (p, control_def);
1025 gimple_omp_continue_set_control_use (p, control_use);
1026 return p;
1029 /* Build a GIMPLE_OMP_ORDERED statement.
1031 BODY is the sequence of statements inside a loop that will executed in
1032 sequence. */
1034 gimple
1035 gimple_build_omp_ordered (gimple_seq body)
1037 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1038 if (body)
1039 gimple_omp_set_body (p, body);
1041 return p;
1045 /* Build a GIMPLE_OMP_RETURN statement.
1046 WAIT_P is true if this is a non-waiting return. */
1048 gimple
1049 gimple_build_omp_return (bool wait_p)
1051 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1052 if (wait_p)
1053 gimple_omp_return_set_nowait (p);
1055 return p;
1059 /* Build a GIMPLE_OMP_SECTIONS statement.
1061 BODY is a sequence of section statements.
1062 CLAUSES are any of the OMP sections contsruct's clauses: private,
1063 firstprivate, lastprivate, reduction, and nowait. */
1065 gimple
1066 gimple_build_omp_sections (gimple_seq body, tree clauses)
1068 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1069 if (body)
1070 gimple_omp_set_body (p, body);
1071 gimple_omp_sections_set_clauses (p, clauses);
1073 return p;
1077 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1079 gimple
1080 gimple_build_omp_sections_switch (void)
1082 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1086 /* Build a GIMPLE_OMP_SINGLE statement.
1088 BODY is the sequence of statements that will be executed once.
1089 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1090 copyprivate, nowait. */
1092 gimple
1093 gimple_build_omp_single (gimple_seq body, tree clauses)
1095 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1096 if (body)
1097 gimple_omp_set_body (p, body);
1098 gimple_omp_single_set_clauses (p, clauses);
1100 return p;
1104 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1106 gimple
1107 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1109 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1110 gimple_omp_atomic_load_set_lhs (p, lhs);
1111 gimple_omp_atomic_load_set_rhs (p, rhs);
1112 return p;
1115 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1117 VAL is the value we are storing. */
1119 gimple
1120 gimple_build_omp_atomic_store (tree val)
1122 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1123 gimple_omp_atomic_store_set_val (p, val);
1124 return p;
1127 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1128 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1130 gimple
1131 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1133 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1134 /* Ensure all the predictors fit into the lower bits of the subcode. */
1135 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1136 gimple_predict_set_predictor (p, predictor);
1137 gimple_predict_set_outcome (p, outcome);
1138 return p;
1141 #if defined ENABLE_GIMPLE_CHECKING
1142 /* Complain of a gimple type mismatch and die. */
1144 void
1145 gimple_check_failed (const_gimple gs, const char *file, int line,
1146 const char *function, enum gimple_code code,
1147 enum tree_code subcode)
1149 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1150 gimple_code_name[code],
1151 tree_code_name[subcode],
1152 gimple_code_name[gimple_code (gs)],
1153 gs->gsbase.subcode > 0
1154 ? tree_code_name[gs->gsbase.subcode]
1155 : "",
1156 function, trim_filename (file), line);
1158 #endif /* ENABLE_GIMPLE_CHECKING */
1161 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1162 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1163 instead. */
1165 gimple_seq
1166 gimple_seq_alloc (void)
1168 gimple_seq seq = gimple_seq_cache;
1169 if (seq)
1171 gimple_seq_cache = gimple_seq_cache->next_free;
1172 gcc_assert (gimple_seq_cache != seq);
1173 memset (seq, 0, sizeof (*seq));
1175 else
1177 seq = ggc_alloc_cleared_gimple_seq_d ();
1178 #ifdef GATHER_STATISTICS
1179 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1180 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1181 #endif
1184 return seq;
1187 /* Return SEQ to the free pool of GIMPLE sequences. */
1189 void
1190 gimple_seq_free (gimple_seq seq)
1192 if (seq == NULL)
1193 return;
1195 gcc_assert (gimple_seq_first (seq) == NULL);
1196 gcc_assert (gimple_seq_last (seq) == NULL);
1198 /* If this triggers, it's a sign that the same list is being freed
1199 twice. */
1200 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1202 /* Add SEQ to the pool of free sequences. */
1203 seq->next_free = gimple_seq_cache;
1204 gimple_seq_cache = seq;
1208 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1209 *SEQ_P is NULL, a new sequence is allocated. */
1211 void
1212 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1214 gimple_stmt_iterator si;
1216 if (gs == NULL)
1217 return;
1219 if (*seq_p == NULL)
1220 *seq_p = gimple_seq_alloc ();
1222 si = gsi_last (*seq_p);
1223 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1227 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1228 NULL, a new sequence is allocated. */
1230 void
1231 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1233 gimple_stmt_iterator si;
1235 if (src == NULL)
1236 return;
1238 if (*dst_p == NULL)
1239 *dst_p = gimple_seq_alloc ();
1241 si = gsi_last (*dst_p);
1242 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1246 /* Helper function of empty_body_p. Return true if STMT is an empty
1247 statement. */
1249 static bool
1250 empty_stmt_p (gimple stmt)
1252 if (gimple_code (stmt) == GIMPLE_NOP)
1253 return true;
1254 if (gimple_code (stmt) == GIMPLE_BIND)
1255 return empty_body_p (gimple_bind_body (stmt));
1256 return false;
1260 /* Return true if BODY contains nothing but empty statements. */
1262 bool
1263 empty_body_p (gimple_seq body)
1265 gimple_stmt_iterator i;
1267 if (gimple_seq_empty_p (body))
1268 return true;
1269 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1270 if (!empty_stmt_p (gsi_stmt (i))
1271 && !is_gimple_debug (gsi_stmt (i)))
1272 return false;
1274 return true;
1278 /* Perform a deep copy of sequence SRC and return the result. */
1280 gimple_seq
1281 gimple_seq_copy (gimple_seq src)
1283 gimple_stmt_iterator gsi;
1284 gimple_seq new_seq = gimple_seq_alloc ();
1285 gimple stmt;
1287 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1289 stmt = gimple_copy (gsi_stmt (gsi));
1290 gimple_seq_add_stmt (&new_seq, stmt);
1293 return new_seq;
1297 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1298 on each one. WI is as in walk_gimple_stmt.
1300 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1301 value is stored in WI->CALLBACK_RESULT and the statement that
1302 produced the value is returned.
1304 Otherwise, all the statements are walked and NULL returned. */
1306 gimple
1307 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1308 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1310 gimple_stmt_iterator gsi;
1312 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1314 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1315 if (ret)
1317 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1318 to hold it. */
1319 gcc_assert (wi);
1320 wi->callback_result = ret;
1321 return gsi_stmt (gsi);
1325 if (wi)
1326 wi->callback_result = NULL_TREE;
1328 return NULL;
1332 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1334 static tree
1335 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1336 struct walk_stmt_info *wi)
1338 tree ret, op;
1339 unsigned noutputs;
1340 const char **oconstraints;
1341 unsigned i, n;
1342 const char *constraint;
1343 bool allows_mem, allows_reg, is_inout;
1345 noutputs = gimple_asm_noutputs (stmt);
1346 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1348 if (wi)
1349 wi->is_lhs = true;
1351 for (i = 0; i < noutputs; i++)
1353 op = gimple_asm_output_op (stmt, i);
1354 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1355 oconstraints[i] = constraint;
1356 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1357 &is_inout);
1358 if (wi)
1359 wi->val_only = (allows_reg || !allows_mem);
1360 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1361 if (ret)
1362 return ret;
1365 n = gimple_asm_ninputs (stmt);
1366 for (i = 0; i < n; i++)
1368 op = gimple_asm_input_op (stmt, i);
1369 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1370 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1371 oconstraints, &allows_mem, &allows_reg);
1372 if (wi)
1374 wi->val_only = (allows_reg || !allows_mem);
1375 /* Although input "m" is not really a LHS, we need a lvalue. */
1376 wi->is_lhs = !wi->val_only;
1378 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1379 if (ret)
1380 return ret;
1383 if (wi)
1385 wi->is_lhs = false;
1386 wi->val_only = true;
1389 n = gimple_asm_nlabels (stmt);
1390 for (i = 0; i < n; i++)
1392 op = gimple_asm_label_op (stmt, i);
1393 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1394 if (ret)
1395 return ret;
1398 return NULL_TREE;
1402 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1403 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1405 CALLBACK_OP is called on each operand of STMT via walk_tree.
1406 Additional parameters to walk_tree must be stored in WI. For each operand
1407 OP, walk_tree is called as:
1409 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1411 If CALLBACK_OP returns non-NULL for an operand, the remaining
1412 operands are not scanned.
1414 The return value is that returned by the last call to walk_tree, or
1415 NULL_TREE if no CALLBACK_OP is specified. */
1417 tree
1418 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1419 struct walk_stmt_info *wi)
1421 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1422 unsigned i;
1423 tree ret = NULL_TREE;
1425 switch (gimple_code (stmt))
1427 case GIMPLE_ASSIGN:
1428 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1429 is a register variable, we may use a COMPONENT_REF on the RHS. */
1430 if (wi)
1432 tree lhs = gimple_assign_lhs (stmt);
1433 wi->val_only
1434 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1435 || !gimple_assign_single_p (stmt);
1438 for (i = 1; i < gimple_num_ops (stmt); i++)
1440 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1441 pset);
1442 if (ret)
1443 return ret;
1446 /* Walk the LHS. If the RHS is appropriate for a memory, we
1447 may use a COMPONENT_REF on the LHS. */
1448 if (wi)
1450 /* If the RHS has more than 1 operand, it is not appropriate
1451 for the memory. */
1452 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1453 || !gimple_assign_single_p (stmt);
1454 wi->is_lhs = true;
1457 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1458 if (ret)
1459 return ret;
1461 if (wi)
1463 wi->val_only = true;
1464 wi->is_lhs = false;
1466 break;
1468 case GIMPLE_CALL:
1469 if (wi)
1471 wi->is_lhs = false;
1472 wi->val_only = true;
1475 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1476 if (ret)
1477 return ret;
1479 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1480 if (ret)
1481 return ret;
1483 for (i = 0; i < gimple_call_num_args (stmt); i++)
1485 if (wi)
1486 wi->val_only
1487 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1488 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1489 pset);
1490 if (ret)
1491 return ret;
1494 if (gimple_call_lhs (stmt))
1496 if (wi)
1498 wi->is_lhs = true;
1499 wi->val_only
1500 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1503 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1504 if (ret)
1505 return ret;
1508 if (wi)
1510 wi->is_lhs = false;
1511 wi->val_only = true;
1513 break;
1515 case GIMPLE_CATCH:
1516 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1517 pset);
1518 if (ret)
1519 return ret;
1520 break;
1522 case GIMPLE_EH_FILTER:
1523 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1524 pset);
1525 if (ret)
1526 return ret;
1527 break;
1529 case GIMPLE_ASM:
1530 ret = walk_gimple_asm (stmt, callback_op, wi);
1531 if (ret)
1532 return ret;
1533 break;
1535 case GIMPLE_OMP_CONTINUE:
1536 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1537 callback_op, wi, pset);
1538 if (ret)
1539 return ret;
1541 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1542 callback_op, wi, pset);
1543 if (ret)
1544 return ret;
1545 break;
1547 case GIMPLE_OMP_CRITICAL:
1548 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1549 pset);
1550 if (ret)
1551 return ret;
1552 break;
1554 case GIMPLE_OMP_FOR:
1555 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1556 pset);
1557 if (ret)
1558 return ret;
1559 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1561 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1562 wi, pset);
1563 if (ret)
1564 return ret;
1565 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1566 wi, pset);
1567 if (ret)
1568 return ret;
1569 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1570 wi, pset);
1571 if (ret)
1572 return ret;
1573 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1574 wi, pset);
1576 if (ret)
1577 return ret;
1578 break;
1580 case GIMPLE_OMP_PARALLEL:
1581 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1582 wi, pset);
1583 if (ret)
1584 return ret;
1585 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1586 wi, pset);
1587 if (ret)
1588 return ret;
1589 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1590 wi, pset);
1591 if (ret)
1592 return ret;
1593 break;
1595 case GIMPLE_OMP_TASK:
1596 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1597 wi, pset);
1598 if (ret)
1599 return ret;
1600 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1601 wi, pset);
1602 if (ret)
1603 return ret;
1604 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1605 wi, pset);
1606 if (ret)
1607 return ret;
1608 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1609 wi, pset);
1610 if (ret)
1611 return ret;
1612 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1613 wi, pset);
1614 if (ret)
1615 return ret;
1616 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1617 wi, pset);
1618 if (ret)
1619 return ret;
1620 break;
1622 case GIMPLE_OMP_SECTIONS:
1623 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1624 wi, pset);
1625 if (ret)
1626 return ret;
1628 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1629 wi, pset);
1630 if (ret)
1631 return ret;
1633 break;
1635 case GIMPLE_OMP_SINGLE:
1636 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1637 pset);
1638 if (ret)
1639 return ret;
1640 break;
1642 case GIMPLE_OMP_ATOMIC_LOAD:
1643 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1644 pset);
1645 if (ret)
1646 return ret;
1648 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1649 pset);
1650 if (ret)
1651 return ret;
1652 break;
1654 case GIMPLE_OMP_ATOMIC_STORE:
1655 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1656 wi, pset);
1657 if (ret)
1658 return ret;
1659 break;
1661 /* Tuples that do not have operands. */
1662 case GIMPLE_NOP:
1663 case GIMPLE_RESX:
1664 case GIMPLE_OMP_RETURN:
1665 case GIMPLE_PREDICT:
1666 break;
1668 default:
1670 enum gimple_statement_structure_enum gss;
1671 gss = gimple_statement_structure (stmt);
1672 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1673 for (i = 0; i < gimple_num_ops (stmt); i++)
1675 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1676 if (ret)
1677 return ret;
1680 break;
1683 return NULL_TREE;
1687 /* Walk the current statement in GSI (optionally using traversal state
1688 stored in WI). If WI is NULL, no state is kept during traversal.
1689 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1690 that it has handled all the operands of the statement, its return
1691 value is returned. Otherwise, the return value from CALLBACK_STMT
1692 is discarded and its operands are scanned.
1694 If CALLBACK_STMT is NULL or it didn't handle the operands,
1695 CALLBACK_OP is called on each operand of the statement via
1696 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1697 operand, the remaining operands are not scanned. In this case, the
1698 return value from CALLBACK_OP is returned.
1700 In any other case, NULL_TREE is returned. */
1702 tree
1703 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1704 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1706 gimple ret;
1707 tree tree_ret;
1708 gimple stmt = gsi_stmt (*gsi);
1710 if (wi)
1711 wi->gsi = *gsi;
1713 if (wi && wi->want_locations && gimple_has_location (stmt))
1714 input_location = gimple_location (stmt);
1716 ret = NULL;
1718 /* Invoke the statement callback. Return if the callback handled
1719 all of STMT operands by itself. */
1720 if (callback_stmt)
1722 bool handled_ops = false;
1723 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1724 if (handled_ops)
1725 return tree_ret;
1727 /* If CALLBACK_STMT did not handle operands, it should not have
1728 a value to return. */
1729 gcc_assert (tree_ret == NULL);
1731 /* Re-read stmt in case the callback changed it. */
1732 stmt = gsi_stmt (*gsi);
1735 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1736 if (callback_op)
1738 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1739 if (tree_ret)
1740 return tree_ret;
1743 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1744 switch (gimple_code (stmt))
1746 case GIMPLE_BIND:
1747 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1748 callback_op, wi);
1749 if (ret)
1750 return wi->callback_result;
1751 break;
1753 case GIMPLE_CATCH:
1754 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1755 callback_op, wi);
1756 if (ret)
1757 return wi->callback_result;
1758 break;
1760 case GIMPLE_EH_FILTER:
1761 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1762 callback_op, wi);
1763 if (ret)
1764 return wi->callback_result;
1765 break;
1767 case GIMPLE_TRY:
1768 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1769 wi);
1770 if (ret)
1771 return wi->callback_result;
1773 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1774 callback_op, wi);
1775 if (ret)
1776 return wi->callback_result;
1777 break;
1779 case GIMPLE_OMP_FOR:
1780 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1781 callback_op, wi);
1782 if (ret)
1783 return wi->callback_result;
1785 /* FALL THROUGH. */
1786 case GIMPLE_OMP_CRITICAL:
1787 case GIMPLE_OMP_MASTER:
1788 case GIMPLE_OMP_ORDERED:
1789 case GIMPLE_OMP_SECTION:
1790 case GIMPLE_OMP_PARALLEL:
1791 case GIMPLE_OMP_TASK:
1792 case GIMPLE_OMP_SECTIONS:
1793 case GIMPLE_OMP_SINGLE:
1794 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1795 wi);
1796 if (ret)
1797 return wi->callback_result;
1798 break;
1800 case GIMPLE_WITH_CLEANUP_EXPR:
1801 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1802 callback_op, wi);
1803 if (ret)
1804 return wi->callback_result;
1805 break;
1807 default:
1808 gcc_assert (!gimple_has_substatements (stmt));
1809 break;
1812 return NULL;
1816 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1818 void
1819 gimple_set_body (tree fndecl, gimple_seq seq)
1821 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1822 if (fn == NULL)
1824 /* If FNDECL still does not have a function structure associated
1825 with it, then it does not make sense for it to receive a
1826 GIMPLE body. */
1827 gcc_assert (seq == NULL);
1829 else
1830 fn->gimple_body = seq;
1834 /* Return the body of GIMPLE statements for function FN. After the
1835 CFG pass, the function body doesn't exist anymore because it has
1836 been split up into basic blocks. In this case, it returns
1837 NULL. */
1839 gimple_seq
1840 gimple_body (tree fndecl)
1842 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1843 return fn ? fn->gimple_body : NULL;
1846 /* Return true when FNDECL has Gimple body either in unlowered
1847 or CFG form. */
1848 bool
1849 gimple_has_body_p (tree fndecl)
1851 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1852 return (gimple_body (fndecl) || (fn && fn->cfg));
1855 /* Return true if calls C1 and C2 are known to go to the same function. */
1857 bool
1858 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1860 if (gimple_call_internal_p (c1))
1861 return (gimple_call_internal_p (c2)
1862 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1863 else
1864 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1865 || (gimple_call_fndecl (c1)
1866 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1869 /* Detect flags from a GIMPLE_CALL. This is just like
1870 call_expr_flags, but for gimple tuples. */
1873 gimple_call_flags (const_gimple stmt)
1875 int flags;
1876 tree decl = gimple_call_fndecl (stmt);
1878 if (decl)
1879 flags = flags_from_decl_or_type (decl);
1880 else if (gimple_call_internal_p (stmt))
1881 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1882 else
1883 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1885 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1886 flags |= ECF_NOTHROW;
1888 return flags;
1891 /* Return the "fn spec" string for call STMT. */
1893 static tree
1894 gimple_call_fnspec (const_gimple stmt)
1896 tree type, attr;
1898 type = gimple_call_fntype (stmt);
1899 if (!type)
1900 return NULL_TREE;
1902 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1903 if (!attr)
1904 return NULL_TREE;
1906 return TREE_VALUE (TREE_VALUE (attr));
1909 /* Detects argument flags for argument number ARG on call STMT. */
1912 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1914 tree attr = gimple_call_fnspec (stmt);
1916 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1917 return 0;
1919 switch (TREE_STRING_POINTER (attr)[1 + arg])
1921 case 'x':
1922 case 'X':
1923 return EAF_UNUSED;
1925 case 'R':
1926 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1928 case 'r':
1929 return EAF_NOCLOBBER | EAF_NOESCAPE;
1931 case 'W':
1932 return EAF_DIRECT | EAF_NOESCAPE;
1934 case 'w':
1935 return EAF_NOESCAPE;
1937 case '.':
1938 default:
1939 return 0;
1943 /* Detects return flags for the call STMT. */
1946 gimple_call_return_flags (const_gimple stmt)
1948 tree attr;
1950 if (gimple_call_flags (stmt) & ECF_MALLOC)
1951 return ERF_NOALIAS;
1953 attr = gimple_call_fnspec (stmt);
1954 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1955 return 0;
1957 switch (TREE_STRING_POINTER (attr)[0])
1959 case '1':
1960 case '2':
1961 case '3':
1962 case '4':
1963 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1965 case 'm':
1966 return ERF_NOALIAS;
1968 case '.':
1969 default:
1970 return 0;
1975 /* Return true if GS is a copy assignment. */
1977 bool
1978 gimple_assign_copy_p (gimple gs)
1980 return (gimple_assign_single_p (gs)
1981 && is_gimple_val (gimple_op (gs, 1)));
1985 /* Return true if GS is a SSA_NAME copy assignment. */
1987 bool
1988 gimple_assign_ssa_name_copy_p (gimple gs)
1990 return (gimple_assign_single_p (gs)
1991 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1992 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1996 /* Return true if GS is an assignment with a unary RHS, but the
1997 operator has no effect on the assigned value. The logic is adapted
1998 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1999 instances in which STRIP_NOPS was previously applied to the RHS of
2000 an assignment.
2002 NOTE: In the use cases that led to the creation of this function
2003 and of gimple_assign_single_p, it is typical to test for either
2004 condition and to proceed in the same manner. In each case, the
2005 assigned value is represented by the single RHS operand of the
2006 assignment. I suspect there may be cases where gimple_assign_copy_p,
2007 gimple_assign_single_p, or equivalent logic is used where a similar
2008 treatment of unary NOPs is appropriate. */
2010 bool
2011 gimple_assign_unary_nop_p (gimple gs)
2013 return (is_gimple_assign (gs)
2014 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2015 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2016 && gimple_assign_rhs1 (gs) != error_mark_node
2017 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2018 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2021 /* Set BB to be the basic block holding G. */
2023 void
2024 gimple_set_bb (gimple stmt, basic_block bb)
2026 stmt->gsbase.bb = bb;
2028 /* If the statement is a label, add the label to block-to-labels map
2029 so that we can speed up edge creation for GIMPLE_GOTOs. */
2030 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2032 tree t;
2033 int uid;
2035 t = gimple_label_label (stmt);
2036 uid = LABEL_DECL_UID (t);
2037 if (uid == -1)
2039 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2040 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2041 if (old_len <= (unsigned) uid)
2043 unsigned new_len = 3 * uid / 2 + 1;
2045 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2046 new_len);
2050 VEC_replace (basic_block, label_to_block_map, uid, bb);
2055 /* Modify the RHS of the assignment pointed-to by GSI using the
2056 operands in the expression tree EXPR.
2058 NOTE: The statement pointed-to by GSI may be reallocated if it
2059 did not have enough operand slots.
2061 This function is useful to convert an existing tree expression into
2062 the flat representation used for the RHS of a GIMPLE assignment.
2063 It will reallocate memory as needed to expand or shrink the number
2064 of operand slots needed to represent EXPR.
2066 NOTE: If you find yourself building a tree and then calling this
2067 function, you are most certainly doing it the slow way. It is much
2068 better to build a new assignment or to use the function
2069 gimple_assign_set_rhs_with_ops, which does not require an
2070 expression tree to be built. */
2072 void
2073 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2075 enum tree_code subcode;
2076 tree op1, op2, op3;
2078 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2079 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2083 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2084 operands OP1, OP2 and OP3.
2086 NOTE: The statement pointed-to by GSI may be reallocated if it
2087 did not have enough operand slots. */
2089 void
2090 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2091 tree op1, tree op2, tree op3)
2093 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2094 gimple stmt = gsi_stmt (*gsi);
2096 /* If the new CODE needs more operands, allocate a new statement. */
2097 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2099 tree lhs = gimple_assign_lhs (stmt);
2100 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2101 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2102 gsi_replace (gsi, new_stmt, true);
2103 stmt = new_stmt;
2105 /* The LHS needs to be reset as this also changes the SSA name
2106 on the LHS. */
2107 gimple_assign_set_lhs (stmt, lhs);
2110 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2111 gimple_set_subcode (stmt, code);
2112 gimple_assign_set_rhs1 (stmt, op1);
2113 if (new_rhs_ops > 1)
2114 gimple_assign_set_rhs2 (stmt, op2);
2115 if (new_rhs_ops > 2)
2116 gimple_assign_set_rhs3 (stmt, op3);
2120 /* Return the LHS of a statement that performs an assignment,
2121 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2122 for a call to a function that returns no value, or for a
2123 statement other than an assignment or a call. */
2125 tree
2126 gimple_get_lhs (const_gimple stmt)
2128 enum gimple_code code = gimple_code (stmt);
2130 if (code == GIMPLE_ASSIGN)
2131 return gimple_assign_lhs (stmt);
2132 else if (code == GIMPLE_CALL)
2133 return gimple_call_lhs (stmt);
2134 else
2135 return NULL_TREE;
2139 /* Set the LHS of a statement that performs an assignment,
2140 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2142 void
2143 gimple_set_lhs (gimple stmt, tree lhs)
2145 enum gimple_code code = gimple_code (stmt);
2147 if (code == GIMPLE_ASSIGN)
2148 gimple_assign_set_lhs (stmt, lhs);
2149 else if (code == GIMPLE_CALL)
2150 gimple_call_set_lhs (stmt, lhs);
2151 else
2152 gcc_unreachable();
2155 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2156 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2157 expression with a different value.
2159 This will update any annotations (say debug bind stmts) referring
2160 to the original LHS, so that they use the RHS instead. This is
2161 done even if NLHS and LHS are the same, for it is understood that
2162 the RHS will be modified afterwards, and NLHS will not be assigned
2163 an equivalent value.
2165 Adjusting any non-annotation uses of the LHS, if needed, is a
2166 responsibility of the caller.
2168 The effect of this call should be pretty much the same as that of
2169 inserting a copy of STMT before STMT, and then removing the
2170 original stmt, at which time gsi_remove() would have update
2171 annotations, but using this function saves all the inserting,
2172 copying and removing. */
2174 void
2175 gimple_replace_lhs (gimple stmt, tree nlhs)
2177 if (MAY_HAVE_DEBUG_STMTS)
2179 tree lhs = gimple_get_lhs (stmt);
2181 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2183 insert_debug_temp_for_var_def (NULL, lhs);
2186 gimple_set_lhs (stmt, nlhs);
2189 /* Return a deep copy of statement STMT. All the operands from STMT
2190 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2191 and VUSE operand arrays are set to empty in the new copy. */
2193 gimple
2194 gimple_copy (gimple stmt)
2196 enum gimple_code code = gimple_code (stmt);
2197 unsigned num_ops = gimple_num_ops (stmt);
2198 gimple copy = gimple_alloc (code, num_ops);
2199 unsigned i;
2201 /* Shallow copy all the fields from STMT. */
2202 memcpy (copy, stmt, gimple_size (code));
2204 /* If STMT has sub-statements, deep-copy them as well. */
2205 if (gimple_has_substatements (stmt))
2207 gimple_seq new_seq;
2208 tree t;
2210 switch (gimple_code (stmt))
2212 case GIMPLE_BIND:
2213 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2214 gimple_bind_set_body (copy, new_seq);
2215 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2216 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2217 break;
2219 case GIMPLE_CATCH:
2220 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2221 gimple_catch_set_handler (copy, new_seq);
2222 t = unshare_expr (gimple_catch_types (stmt));
2223 gimple_catch_set_types (copy, t);
2224 break;
2226 case GIMPLE_EH_FILTER:
2227 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2228 gimple_eh_filter_set_failure (copy, new_seq);
2229 t = unshare_expr (gimple_eh_filter_types (stmt));
2230 gimple_eh_filter_set_types (copy, t);
2231 break;
2233 case GIMPLE_TRY:
2234 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2235 gimple_try_set_eval (copy, new_seq);
2236 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2237 gimple_try_set_cleanup (copy, new_seq);
2238 break;
2240 case GIMPLE_OMP_FOR:
2241 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2242 gimple_omp_for_set_pre_body (copy, new_seq);
2243 t = unshare_expr (gimple_omp_for_clauses (stmt));
2244 gimple_omp_for_set_clauses (copy, t);
2245 copy->gimple_omp_for.iter
2246 = ggc_alloc_vec_gimple_omp_for_iter
2247 (gimple_omp_for_collapse (stmt));
2248 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2250 gimple_omp_for_set_cond (copy, i,
2251 gimple_omp_for_cond (stmt, i));
2252 gimple_omp_for_set_index (copy, i,
2253 gimple_omp_for_index (stmt, i));
2254 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2255 gimple_omp_for_set_initial (copy, i, t);
2256 t = unshare_expr (gimple_omp_for_final (stmt, i));
2257 gimple_omp_for_set_final (copy, i, t);
2258 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2259 gimple_omp_for_set_incr (copy, i, t);
2261 goto copy_omp_body;
2263 case GIMPLE_OMP_PARALLEL:
2264 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2265 gimple_omp_parallel_set_clauses (copy, t);
2266 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2267 gimple_omp_parallel_set_child_fn (copy, t);
2268 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2269 gimple_omp_parallel_set_data_arg (copy, t);
2270 goto copy_omp_body;
2272 case GIMPLE_OMP_TASK:
2273 t = unshare_expr (gimple_omp_task_clauses (stmt));
2274 gimple_omp_task_set_clauses (copy, t);
2275 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2276 gimple_omp_task_set_child_fn (copy, t);
2277 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2278 gimple_omp_task_set_data_arg (copy, t);
2279 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2280 gimple_omp_task_set_copy_fn (copy, t);
2281 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2282 gimple_omp_task_set_arg_size (copy, t);
2283 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2284 gimple_omp_task_set_arg_align (copy, t);
2285 goto copy_omp_body;
2287 case GIMPLE_OMP_CRITICAL:
2288 t = unshare_expr (gimple_omp_critical_name (stmt));
2289 gimple_omp_critical_set_name (copy, t);
2290 goto copy_omp_body;
2292 case GIMPLE_OMP_SECTIONS:
2293 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2294 gimple_omp_sections_set_clauses (copy, t);
2295 t = unshare_expr (gimple_omp_sections_control (stmt));
2296 gimple_omp_sections_set_control (copy, t);
2297 /* FALLTHRU */
2299 case GIMPLE_OMP_SINGLE:
2300 case GIMPLE_OMP_SECTION:
2301 case GIMPLE_OMP_MASTER:
2302 case GIMPLE_OMP_ORDERED:
2303 copy_omp_body:
2304 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2305 gimple_omp_set_body (copy, new_seq);
2306 break;
2308 case GIMPLE_WITH_CLEANUP_EXPR:
2309 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2310 gimple_wce_set_cleanup (copy, new_seq);
2311 break;
2313 default:
2314 gcc_unreachable ();
2318 /* Make copy of operands. */
2319 if (num_ops > 0)
2321 for (i = 0; i < num_ops; i++)
2322 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2324 /* Clear out SSA operand vectors on COPY. */
2325 if (gimple_has_ops (stmt))
2327 gimple_set_def_ops (copy, NULL);
2328 gimple_set_use_ops (copy, NULL);
2331 if (gimple_has_mem_ops (stmt))
2333 gimple_set_vdef (copy, gimple_vdef (stmt));
2334 gimple_set_vuse (copy, gimple_vuse (stmt));
2337 /* SSA operands need to be updated. */
2338 gimple_set_modified (copy, true);
2341 return copy;
2345 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2346 a MODIFIED field. */
2348 void
2349 gimple_set_modified (gimple s, bool modifiedp)
2351 if (gimple_has_ops (s))
2352 s->gsbase.modified = (unsigned) modifiedp;
2356 /* Return true if statement S has side-effects. We consider a
2357 statement to have side effects if:
2359 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2360 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2362 bool
2363 gimple_has_side_effects (const_gimple s)
2365 unsigned i;
2367 if (is_gimple_debug (s))
2368 return false;
2370 /* We don't have to scan the arguments to check for
2371 volatile arguments, though, at present, we still
2372 do a scan to check for TREE_SIDE_EFFECTS. */
2373 if (gimple_has_volatile_ops (s))
2374 return true;
2376 if (gimple_code (s) == GIMPLE_ASM
2377 && gimple_asm_volatile_p (s))
2378 return true;
2380 if (is_gimple_call (s))
2382 unsigned nargs = gimple_call_num_args (s);
2383 tree fn;
2385 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2386 return true;
2387 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2388 /* An infinite loop is considered a side effect. */
2389 return true;
2391 if (gimple_call_lhs (s)
2392 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2394 gcc_checking_assert (gimple_has_volatile_ops (s));
2395 return true;
2398 fn = gimple_call_fn (s);
2399 if (fn && TREE_SIDE_EFFECTS (fn))
2400 return true;
2402 for (i = 0; i < nargs; i++)
2403 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2405 gcc_checking_assert (gimple_has_volatile_ops (s));
2406 return true;
2409 return false;
2411 else
2413 for (i = 0; i < gimple_num_ops (s); i++)
2415 tree op = gimple_op (s, i);
2416 if (op && TREE_SIDE_EFFECTS (op))
2418 gcc_checking_assert (gimple_has_volatile_ops (s));
2419 return true;
2424 return false;
2427 /* Return true if the RHS of statement S has side effects.
2428 We may use it to determine if it is admissable to replace
2429 an assignment or call with a copy of a previously-computed
2430 value. In such cases, side-effects due to the LHS are
2431 preserved. */
2433 bool
2434 gimple_rhs_has_side_effects (const_gimple s)
2436 unsigned i;
2438 if (is_gimple_call (s))
2440 unsigned nargs = gimple_call_num_args (s);
2441 tree fn;
2443 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2444 return true;
2446 /* We cannot use gimple_has_volatile_ops here,
2447 because we must ignore a volatile LHS. */
2448 fn = gimple_call_fn (s);
2449 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2451 gcc_assert (gimple_has_volatile_ops (s));
2452 return true;
2455 for (i = 0; i < nargs; i++)
2456 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2457 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2458 return true;
2460 return false;
2462 else if (is_gimple_assign (s))
2464 /* Skip the first operand, the LHS. */
2465 for (i = 1; i < gimple_num_ops (s); i++)
2466 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2467 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2469 gcc_assert (gimple_has_volatile_ops (s));
2470 return true;
2473 else if (is_gimple_debug (s))
2474 return false;
2475 else
2477 /* For statements without an LHS, examine all arguments. */
2478 for (i = 0; i < gimple_num_ops (s); i++)
2479 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2480 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2482 gcc_assert (gimple_has_volatile_ops (s));
2483 return true;
2487 return false;
2490 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2491 Return true if S can trap. When INCLUDE_MEM is true, check whether
2492 the memory operations could trap. When INCLUDE_STORES is true and
2493 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2495 bool
2496 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2498 tree t, div = NULL_TREE;
2499 enum tree_code op;
2501 if (include_mem)
2503 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2505 for (i = start; i < gimple_num_ops (s); i++)
2506 if (tree_could_trap_p (gimple_op (s, i)))
2507 return true;
2510 switch (gimple_code (s))
2512 case GIMPLE_ASM:
2513 return gimple_asm_volatile_p (s);
2515 case GIMPLE_CALL:
2516 t = gimple_call_fndecl (s);
2517 /* Assume that calls to weak functions may trap. */
2518 if (!t || !DECL_P (t) || DECL_WEAK (t))
2519 return true;
2520 return false;
2522 case GIMPLE_ASSIGN:
2523 t = gimple_expr_type (s);
2524 op = gimple_assign_rhs_code (s);
2525 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2526 div = gimple_assign_rhs2 (s);
2527 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2528 (INTEGRAL_TYPE_P (t)
2529 && TYPE_OVERFLOW_TRAPS (t)),
2530 div));
2532 default:
2533 break;
2536 return false;
2539 /* Return true if statement S can trap. */
2541 bool
2542 gimple_could_trap_p (gimple s)
2544 return gimple_could_trap_p_1 (s, true, true);
2547 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2549 bool
2550 gimple_assign_rhs_could_trap_p (gimple s)
2552 gcc_assert (is_gimple_assign (s));
2553 return gimple_could_trap_p_1 (s, true, false);
2557 /* Print debugging information for gimple stmts generated. */
2559 void
2560 dump_gimple_statistics (void)
2562 #ifdef GATHER_STATISTICS
2563 int i, total_tuples = 0, total_bytes = 0;
2565 fprintf (stderr, "\nGIMPLE statements\n");
2566 fprintf (stderr, "Kind Stmts Bytes\n");
2567 fprintf (stderr, "---------------------------------------\n");
2568 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2570 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2571 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2572 total_tuples += gimple_alloc_counts[i];
2573 total_bytes += gimple_alloc_sizes[i];
2575 fprintf (stderr, "---------------------------------------\n");
2576 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2577 fprintf (stderr, "---------------------------------------\n");
2578 #else
2579 fprintf (stderr, "No gimple statistics\n");
2580 #endif
2584 /* Return the number of operands needed on the RHS of a GIMPLE
2585 assignment for an expression with tree code CODE. */
2587 unsigned
2588 get_gimple_rhs_num_ops (enum tree_code code)
2590 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2592 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2593 return 1;
2594 else if (rhs_class == GIMPLE_BINARY_RHS)
2595 return 2;
2596 else if (rhs_class == GIMPLE_TERNARY_RHS)
2597 return 3;
2598 else
2599 gcc_unreachable ();
2602 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2603 (unsigned char) \
2604 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2605 : ((TYPE) == tcc_binary \
2606 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2607 : ((TYPE) == tcc_constant \
2608 || (TYPE) == tcc_declaration \
2609 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2610 : ((SYM) == TRUTH_AND_EXPR \
2611 || (SYM) == TRUTH_OR_EXPR \
2612 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2613 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2614 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
2615 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2616 || (SYM) == DOT_PROD_EXPR \
2617 || (SYM) == REALIGN_LOAD_EXPR \
2618 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2619 : ((SYM) == COND_EXPR \
2620 || (SYM) == CONSTRUCTOR \
2621 || (SYM) == OBJ_TYPE_REF \
2622 || (SYM) == ASSERT_EXPR \
2623 || (SYM) == ADDR_EXPR \
2624 || (SYM) == WITH_SIZE_EXPR \
2625 || (SYM) == SSA_NAME \
2626 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
2627 : GIMPLE_INVALID_RHS),
2628 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2630 const unsigned char gimple_rhs_class_table[] = {
2631 #include "all-tree.def"
2634 #undef DEFTREECODE
2635 #undef END_OF_BASE_TREE_CODES
2637 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2639 /* Validation of GIMPLE expressions. */
2641 /* Returns true iff T is a valid RHS for an assignment to a renamed
2642 user -- or front-end generated artificial -- variable. */
2644 bool
2645 is_gimple_reg_rhs (tree t)
2647 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2650 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2651 LHS, or for a call argument. */
2653 bool
2654 is_gimple_mem_rhs (tree t)
2656 /* If we're dealing with a renamable type, either source or dest must be
2657 a renamed variable. */
2658 if (is_gimple_reg_type (TREE_TYPE (t)))
2659 return is_gimple_val (t);
2660 else
2661 return is_gimple_val (t) || is_gimple_lvalue (t);
2664 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2666 bool
2667 is_gimple_lvalue (tree t)
2669 return (is_gimple_addressable (t)
2670 || TREE_CODE (t) == WITH_SIZE_EXPR
2671 /* These are complex lvalues, but don't have addresses, so they
2672 go here. */
2673 || TREE_CODE (t) == BIT_FIELD_REF);
2676 /* Return true if T is a GIMPLE condition. */
2678 bool
2679 is_gimple_condexpr (tree t)
2681 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2682 && !tree_could_throw_p (t)
2683 && is_gimple_val (TREE_OPERAND (t, 0))
2684 && is_gimple_val (TREE_OPERAND (t, 1))));
2687 /* Return true if T is something whose address can be taken. */
2689 bool
2690 is_gimple_addressable (tree t)
2692 return (is_gimple_id (t) || handled_component_p (t)
2693 || TREE_CODE (t) == MEM_REF);
2696 /* Return true if T is a valid gimple constant. */
2698 bool
2699 is_gimple_constant (const_tree t)
2701 switch (TREE_CODE (t))
2703 case INTEGER_CST:
2704 case REAL_CST:
2705 case FIXED_CST:
2706 case STRING_CST:
2707 case COMPLEX_CST:
2708 case VECTOR_CST:
2709 return true;
2711 /* Vector constant constructors are gimple invariant. */
2712 case CONSTRUCTOR:
2713 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2714 return TREE_CONSTANT (t);
2715 else
2716 return false;
2718 default:
2719 return false;
2723 /* Return true if T is a gimple address. */
2725 bool
2726 is_gimple_address (const_tree t)
2728 tree op;
2730 if (TREE_CODE (t) != ADDR_EXPR)
2731 return false;
2733 op = TREE_OPERAND (t, 0);
2734 while (handled_component_p (op))
2736 if ((TREE_CODE (op) == ARRAY_REF
2737 || TREE_CODE (op) == ARRAY_RANGE_REF)
2738 && !is_gimple_val (TREE_OPERAND (op, 1)))
2739 return false;
2741 op = TREE_OPERAND (op, 0);
2744 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2745 return true;
2747 switch (TREE_CODE (op))
2749 case PARM_DECL:
2750 case RESULT_DECL:
2751 case LABEL_DECL:
2752 case FUNCTION_DECL:
2753 case VAR_DECL:
2754 case CONST_DECL:
2755 return true;
2757 default:
2758 return false;
2762 /* Strip out all handled components that produce invariant
2763 offsets. */
2765 static const_tree
2766 strip_invariant_refs (const_tree op)
2768 while (handled_component_p (op))
2770 switch (TREE_CODE (op))
2772 case ARRAY_REF:
2773 case ARRAY_RANGE_REF:
2774 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2775 || TREE_OPERAND (op, 2) != NULL_TREE
2776 || TREE_OPERAND (op, 3) != NULL_TREE)
2777 return NULL;
2778 break;
2780 case COMPONENT_REF:
2781 if (TREE_OPERAND (op, 2) != NULL_TREE)
2782 return NULL;
2783 break;
2785 default:;
2787 op = TREE_OPERAND (op, 0);
2790 return op;
2793 /* Return true if T is a gimple invariant address. */
2795 bool
2796 is_gimple_invariant_address (const_tree t)
2798 const_tree op;
2800 if (TREE_CODE (t) != ADDR_EXPR)
2801 return false;
2803 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2804 if (!op)
2805 return false;
2807 if (TREE_CODE (op) == MEM_REF)
2809 const_tree op0 = TREE_OPERAND (op, 0);
2810 return (TREE_CODE (op0) == ADDR_EXPR
2811 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2812 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2815 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2818 /* Return true if T is a gimple invariant address at IPA level
2819 (so addresses of variables on stack are not allowed). */
2821 bool
2822 is_gimple_ip_invariant_address (const_tree t)
2824 const_tree op;
2826 if (TREE_CODE (t) != ADDR_EXPR)
2827 return false;
2829 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2831 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2834 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2835 form of function invariant. */
2837 bool
2838 is_gimple_min_invariant (const_tree t)
2840 if (TREE_CODE (t) == ADDR_EXPR)
2841 return is_gimple_invariant_address (t);
2843 return is_gimple_constant (t);
2846 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2847 form of gimple minimal invariant. */
2849 bool
2850 is_gimple_ip_invariant (const_tree t)
2852 if (TREE_CODE (t) == ADDR_EXPR)
2853 return is_gimple_ip_invariant_address (t);
2855 return is_gimple_constant (t);
2858 /* Return true if T looks like a valid GIMPLE statement. */
2860 bool
2861 is_gimple_stmt (tree t)
2863 const enum tree_code code = TREE_CODE (t);
2865 switch (code)
2867 case NOP_EXPR:
2868 /* The only valid NOP_EXPR is the empty statement. */
2869 return IS_EMPTY_STMT (t);
2871 case BIND_EXPR:
2872 case COND_EXPR:
2873 /* These are only valid if they're void. */
2874 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2876 case SWITCH_EXPR:
2877 case GOTO_EXPR:
2878 case RETURN_EXPR:
2879 case LABEL_EXPR:
2880 case CASE_LABEL_EXPR:
2881 case TRY_CATCH_EXPR:
2882 case TRY_FINALLY_EXPR:
2883 case EH_FILTER_EXPR:
2884 case CATCH_EXPR:
2885 case ASM_EXPR:
2886 case STATEMENT_LIST:
2887 case OMP_PARALLEL:
2888 case OMP_FOR:
2889 case OMP_SECTIONS:
2890 case OMP_SECTION:
2891 case OMP_SINGLE:
2892 case OMP_MASTER:
2893 case OMP_ORDERED:
2894 case OMP_CRITICAL:
2895 case OMP_TASK:
2896 /* These are always void. */
2897 return true;
2899 case CALL_EXPR:
2900 case MODIFY_EXPR:
2901 case PREDICT_EXPR:
2902 /* These are valid regardless of their type. */
2903 return true;
2905 default:
2906 return false;
2910 /* Return true if T is a variable. */
2912 bool
2913 is_gimple_variable (tree t)
2915 return (TREE_CODE (t) == VAR_DECL
2916 || TREE_CODE (t) == PARM_DECL
2917 || TREE_CODE (t) == RESULT_DECL
2918 || TREE_CODE (t) == SSA_NAME);
2921 /* Return true if T is a GIMPLE identifier (something with an address). */
2923 bool
2924 is_gimple_id (tree t)
2926 return (is_gimple_variable (t)
2927 || TREE_CODE (t) == FUNCTION_DECL
2928 || TREE_CODE (t) == LABEL_DECL
2929 || TREE_CODE (t) == CONST_DECL
2930 /* Allow string constants, since they are addressable. */
2931 || TREE_CODE (t) == STRING_CST);
2934 /* Return true if TYPE is a suitable type for a scalar register variable. */
2936 bool
2937 is_gimple_reg_type (tree type)
2939 return !AGGREGATE_TYPE_P (type);
2942 /* Return true if T is a non-aggregate register variable. */
2944 bool
2945 is_gimple_reg (tree t)
2947 if (TREE_CODE (t) == SSA_NAME)
2948 t = SSA_NAME_VAR (t);
2950 if (!is_gimple_variable (t))
2951 return false;
2953 if (!is_gimple_reg_type (TREE_TYPE (t)))
2954 return false;
2956 /* A volatile decl is not acceptable because we can't reuse it as
2957 needed. We need to copy it into a temp first. */
2958 if (TREE_THIS_VOLATILE (t))
2959 return false;
2961 /* We define "registers" as things that can be renamed as needed,
2962 which with our infrastructure does not apply to memory. */
2963 if (needs_to_live_in_memory (t))
2964 return false;
2966 /* Hard register variables are an interesting case. For those that
2967 are call-clobbered, we don't know where all the calls are, since
2968 we don't (want to) take into account which operations will turn
2969 into libcalls at the rtl level. For those that are call-saved,
2970 we don't currently model the fact that calls may in fact change
2971 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2972 level, and so miss variable changes that might imply. All around,
2973 it seems safest to not do too much optimization with these at the
2974 tree level at all. We'll have to rely on the rtl optimizers to
2975 clean this up, as there we've got all the appropriate bits exposed. */
2976 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2977 return false;
2979 /* Complex and vector values must have been put into SSA-like form.
2980 That is, no assignments to the individual components. */
2981 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2982 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2983 return DECL_GIMPLE_REG_P (t);
2985 return true;
2989 /* Return true if T is a GIMPLE variable whose address is not needed. */
2991 bool
2992 is_gimple_non_addressable (tree t)
2994 if (TREE_CODE (t) == SSA_NAME)
2995 t = SSA_NAME_VAR (t);
2997 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
3000 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
3002 bool
3003 is_gimple_val (tree t)
3005 /* Make loads from volatiles and memory vars explicit. */
3006 if (is_gimple_variable (t)
3007 && is_gimple_reg_type (TREE_TYPE (t))
3008 && !is_gimple_reg (t))
3009 return false;
3011 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
3014 /* Similarly, but accept hard registers as inputs to asm statements. */
3016 bool
3017 is_gimple_asm_val (tree t)
3019 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3020 return true;
3022 return is_gimple_val (t);
3025 /* Return true if T is a GIMPLE minimal lvalue. */
3027 bool
3028 is_gimple_min_lval (tree t)
3030 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3031 return false;
3032 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3035 /* Return true if T is a valid function operand of a CALL_EXPR. */
3037 bool
3038 is_gimple_call_addr (tree t)
3040 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3043 /* Return true if T is a valid address operand of a MEM_REF. */
3045 bool
3046 is_gimple_mem_ref_addr (tree t)
3048 return (is_gimple_reg (t)
3049 || TREE_CODE (t) == INTEGER_CST
3050 || (TREE_CODE (t) == ADDR_EXPR
3051 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3052 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3055 /* If T makes a function call, return the corresponding CALL_EXPR operand.
3056 Otherwise, return NULL_TREE. */
3058 tree
3059 get_call_expr_in (tree t)
3061 if (TREE_CODE (t) == MODIFY_EXPR)
3062 t = TREE_OPERAND (t, 1);
3063 if (TREE_CODE (t) == WITH_SIZE_EXPR)
3064 t = TREE_OPERAND (t, 0);
3065 if (TREE_CODE (t) == CALL_EXPR)
3066 return t;
3067 return NULL_TREE;
3071 /* Given a memory reference expression T, return its base address.
3072 The base address of a memory reference expression is the main
3073 object being referenced. For instance, the base address for
3074 'array[i].fld[j]' is 'array'. You can think of this as stripping
3075 away the offset part from a memory address.
3077 This function calls handled_component_p to strip away all the inner
3078 parts of the memory reference until it reaches the base object. */
3080 tree
3081 get_base_address (tree t)
3083 while (handled_component_p (t))
3084 t = TREE_OPERAND (t, 0);
3086 if ((TREE_CODE (t) == MEM_REF
3087 || TREE_CODE (t) == TARGET_MEM_REF)
3088 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3089 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3091 if (TREE_CODE (t) == SSA_NAME
3092 || DECL_P (t)
3093 || TREE_CODE (t) == STRING_CST
3094 || TREE_CODE (t) == CONSTRUCTOR
3095 || INDIRECT_REF_P (t)
3096 || TREE_CODE (t) == MEM_REF
3097 || TREE_CODE (t) == TARGET_MEM_REF)
3098 return t;
3099 else
3100 return NULL_TREE;
3103 void
3104 recalculate_side_effects (tree t)
3106 enum tree_code code = TREE_CODE (t);
3107 int len = TREE_OPERAND_LENGTH (t);
3108 int i;
3110 switch (TREE_CODE_CLASS (code))
3112 case tcc_expression:
3113 switch (code)
3115 case INIT_EXPR:
3116 case MODIFY_EXPR:
3117 case VA_ARG_EXPR:
3118 case PREDECREMENT_EXPR:
3119 case PREINCREMENT_EXPR:
3120 case POSTDECREMENT_EXPR:
3121 case POSTINCREMENT_EXPR:
3122 /* All of these have side-effects, no matter what their
3123 operands are. */
3124 return;
3126 default:
3127 break;
3129 /* Fall through. */
3131 case tcc_comparison: /* a comparison expression */
3132 case tcc_unary: /* a unary arithmetic expression */
3133 case tcc_binary: /* a binary arithmetic expression */
3134 case tcc_reference: /* a reference */
3135 case tcc_vl_exp: /* a function call */
3136 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3137 for (i = 0; i < len; ++i)
3139 tree op = TREE_OPERAND (t, i);
3140 if (op && TREE_SIDE_EFFECTS (op))
3141 TREE_SIDE_EFFECTS (t) = 1;
3143 break;
3145 case tcc_constant:
3146 /* No side-effects. */
3147 return;
3149 default:
3150 gcc_unreachable ();
3154 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3155 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3156 we failed to create one. */
3158 tree
3159 canonicalize_cond_expr_cond (tree t)
3161 /* Strip conversions around boolean operations. */
3162 if (CONVERT_EXPR_P (t)
3163 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3164 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3165 == BOOLEAN_TYPE))
3166 t = TREE_OPERAND (t, 0);
3168 /* For !x use x == 0. */
3169 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3171 tree top0 = TREE_OPERAND (t, 0);
3172 t = build2 (EQ_EXPR, TREE_TYPE (t),
3173 top0, build_int_cst (TREE_TYPE (top0), 0));
3175 /* For cmp ? 1 : 0 use cmp. */
3176 else if (TREE_CODE (t) == COND_EXPR
3177 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3178 && integer_onep (TREE_OPERAND (t, 1))
3179 && integer_zerop (TREE_OPERAND (t, 2)))
3181 tree top0 = TREE_OPERAND (t, 0);
3182 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3183 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3186 if (is_gimple_condexpr (t))
3187 return t;
3189 return NULL_TREE;
3192 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3193 the positions marked by the set ARGS_TO_SKIP. */
3195 gimple
3196 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3198 int i;
3199 int nargs = gimple_call_num_args (stmt);
3200 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3201 gimple new_stmt;
3203 for (i = 0; i < nargs; i++)
3204 if (!bitmap_bit_p (args_to_skip, i))
3205 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3207 if (gimple_call_internal_p (stmt))
3208 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3209 vargs);
3210 else
3211 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3212 VEC_free (tree, heap, vargs);
3213 if (gimple_call_lhs (stmt))
3214 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3216 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3217 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3219 gimple_set_block (new_stmt, gimple_block (stmt));
3220 if (gimple_has_location (stmt))
3221 gimple_set_location (new_stmt, gimple_location (stmt));
3222 gimple_call_copy_flags (new_stmt, stmt);
3223 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3225 gimple_set_modified (new_stmt, true);
3227 return new_stmt;
3231 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3233 static hashval_t gimple_type_hash (const void *);
3235 /* Structure used to maintain a cache of some type pairs compared by
3236 gimple_types_compatible_p when comparing aggregate types. There are
3237 three possible values for SAME_P:
3239 -2: The pair (T1, T2) has just been inserted in the table.
3240 0: T1 and T2 are different types.
3241 1: T1 and T2 are the same type.
3243 The two elements in the SAME_P array are indexed by the comparison
3244 mode gtc_mode. */
3246 struct type_pair_d
3248 unsigned int uid1;
3249 unsigned int uid2;
3250 signed char same_p[2];
3252 typedef struct type_pair_d *type_pair_t;
3253 DEF_VEC_P(type_pair_t);
3254 DEF_VEC_ALLOC_P(type_pair_t,heap);
3256 #define GIMPLE_TYPE_PAIR_SIZE 16381
3257 struct type_pair_d *type_pair_cache;
3260 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3261 entry if none existed. */
3263 static inline type_pair_t
3264 lookup_type_pair (tree t1, tree t2)
3266 unsigned int index;
3267 unsigned int uid1, uid2;
3269 if (type_pair_cache == NULL)
3270 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3272 if (TYPE_UID (t1) < TYPE_UID (t2))
3274 uid1 = TYPE_UID (t1);
3275 uid2 = TYPE_UID (t2);
3277 else
3279 uid1 = TYPE_UID (t2);
3280 uid2 = TYPE_UID (t1);
3282 gcc_checking_assert (uid1 != uid2);
3284 /* iterative_hash_hashval_t imply an function calls.
3285 We know that UIDS are in limited range. */
3286 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3287 % GIMPLE_TYPE_PAIR_SIZE);
3288 if (type_pair_cache [index].uid1 == uid1
3289 && type_pair_cache [index].uid2 == uid2)
3290 return &type_pair_cache[index];
3292 type_pair_cache [index].uid1 = uid1;
3293 type_pair_cache [index].uid2 = uid2;
3294 type_pair_cache [index].same_p[0] = -2;
3295 type_pair_cache [index].same_p[1] = -2;
3297 return &type_pair_cache[index];
3300 /* Per pointer state for the SCC finding. The on_sccstack flag
3301 is not strictly required, it is true when there is no hash value
3302 recorded for the type and false otherwise. But querying that
3303 is slower. */
3305 struct sccs
3307 unsigned int dfsnum;
3308 unsigned int low;
3309 bool on_sccstack;
3310 union {
3311 hashval_t hash;
3312 signed char same_p;
3313 } u;
3316 static unsigned int next_dfs_num;
3317 static unsigned int gtc_next_dfs_num;
3320 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3322 typedef struct GTY(()) gimple_type_leader_entry_s {
3323 tree type;
3324 tree leader;
3325 } gimple_type_leader_entry;
3327 #define GIMPLE_TYPE_LEADER_SIZE 16381
3328 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3329 gimple_type_leader_entry *gimple_type_leader;
3331 /* Lookup an existing leader for T and return it or NULL_TREE, if
3332 there is none in the cache. */
3334 static inline tree
3335 gimple_lookup_type_leader (tree t)
3337 gimple_type_leader_entry *leader;
3339 if (!gimple_type_leader)
3340 return NULL_TREE;
3342 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3343 if (leader->type != t)
3344 return NULL_TREE;
3346 return leader->leader;
3349 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3350 true then if any type has no name return false, otherwise return
3351 true if both types have no names. */
3353 static bool
3354 compare_type_names_p (tree t1, tree t2)
3356 tree name1 = TYPE_NAME (t1);
3357 tree name2 = TYPE_NAME (t2);
3359 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3360 name1 = DECL_NAME (name1);
3361 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3363 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3364 name2 = DECL_NAME (name2);
3365 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3367 /* Identifiers can be compared with pointer equality rather
3368 than a string comparison. */
3369 if (name1 == name2)
3370 return true;
3372 return false;
3375 /* Return true if the field decls F1 and F2 are at the same offset.
3377 This is intended to be used on GIMPLE types only. */
3379 bool
3380 gimple_compare_field_offset (tree f1, tree f2)
3382 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3384 tree offset1 = DECL_FIELD_OFFSET (f1);
3385 tree offset2 = DECL_FIELD_OFFSET (f2);
3386 return ((offset1 == offset2
3387 /* Once gimplification is done, self-referential offsets are
3388 instantiated as operand #2 of the COMPONENT_REF built for
3389 each access and reset. Therefore, they are not relevant
3390 anymore and fields are interchangeable provided that they
3391 represent the same access. */
3392 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3393 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3394 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3395 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3396 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3397 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3398 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3399 || operand_equal_p (offset1, offset2, 0))
3400 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3401 DECL_FIELD_BIT_OFFSET (f2)));
3404 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3405 should be, so handle differing ones specially by decomposing
3406 the offset into a byte and bit offset manually. */
3407 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3408 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3410 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3411 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3412 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3413 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3414 + bit_offset1 / BITS_PER_UNIT);
3415 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3416 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3417 + bit_offset2 / BITS_PER_UNIT);
3418 if (byte_offset1 != byte_offset2)
3419 return false;
3420 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3423 return false;
3426 static bool
3427 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3428 VEC(type_pair_t, heap) **,
3429 struct pointer_map_t *, struct obstack *);
3431 /* DFS visit the edge from the callers type pair with state *STATE to
3432 the pair T1, T2 while operating in FOR_MERGING_P mode.
3433 Update the merging status if it is not part of the SCC containing the
3434 callers pair and return it.
3435 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3437 static bool
3438 gtc_visit (tree t1, tree t2,
3439 struct sccs *state,
3440 VEC(type_pair_t, heap) **sccstack,
3441 struct pointer_map_t *sccstate,
3442 struct obstack *sccstate_obstack)
3444 struct sccs *cstate = NULL;
3445 type_pair_t p;
3446 void **slot;
3447 tree leader1, leader2;
3449 /* Check first for the obvious case of pointer identity. */
3450 if (t1 == t2)
3451 return true;
3453 /* Check that we have two types to compare. */
3454 if (t1 == NULL_TREE || t2 == NULL_TREE)
3455 return false;
3457 /* Can't be the same type if the types don't have the same code. */
3458 if (TREE_CODE (t1) != TREE_CODE (t2))
3459 return false;
3461 /* Can't be the same type if they have different CV qualifiers. */
3462 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3463 return false;
3465 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3466 return false;
3468 /* Void types and nullptr types are always the same. */
3469 if (TREE_CODE (t1) == VOID_TYPE
3470 || TREE_CODE (t1) == NULLPTR_TYPE)
3471 return true;
3473 /* Can't be the same type if they have different alignment or mode. */
3474 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3475 || TYPE_MODE (t1) != TYPE_MODE (t2))
3476 return false;
3478 /* Do some simple checks before doing three hashtable queries. */
3479 if (INTEGRAL_TYPE_P (t1)
3480 || SCALAR_FLOAT_TYPE_P (t1)
3481 || FIXED_POINT_TYPE_P (t1)
3482 || TREE_CODE (t1) == VECTOR_TYPE
3483 || TREE_CODE (t1) == COMPLEX_TYPE
3484 || TREE_CODE (t1) == OFFSET_TYPE
3485 || POINTER_TYPE_P (t1))
3487 /* Can't be the same type if they have different sign or precision. */
3488 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3489 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3490 return false;
3492 if (TREE_CODE (t1) == INTEGER_TYPE
3493 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3494 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3495 return false;
3497 /* That's all we need to check for float and fixed-point types. */
3498 if (SCALAR_FLOAT_TYPE_P (t1)
3499 || FIXED_POINT_TYPE_P (t1))
3500 return true;
3502 /* For other types fall thru to more complex checks. */
3505 /* If the types have been previously registered and found equal
3506 they still are. */
3507 leader1 = gimple_lookup_type_leader (t1);
3508 leader2 = gimple_lookup_type_leader (t2);
3509 if (leader1 == t2
3510 || t1 == leader2
3511 || (leader1 && leader1 == leader2))
3512 return true;
3514 /* If the hash values of t1 and t2 are different the types can't
3515 possibly be the same. This helps keeping the type-pair hashtable
3516 small, only tracking comparisons for hash collisions. */
3517 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3518 return false;
3520 /* Allocate a new cache entry for this comparison. */
3521 p = lookup_type_pair (t1, t2);
3522 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3524 /* We have already decided whether T1 and T2 are the
3525 same, return the cached result. */
3526 return p->same_p[GTC_MERGE] == 1;
3529 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3530 cstate = (struct sccs *)*slot;
3531 /* Not yet visited. DFS recurse. */
3532 if (!cstate)
3534 gimple_types_compatible_p_1 (t1, t2, p,
3535 sccstack, sccstate, sccstate_obstack);
3536 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3537 state->low = MIN (state->low, cstate->low);
3539 /* If the type is still on the SCC stack adjust the parents low. */
3540 if (cstate->dfsnum < state->dfsnum
3541 && cstate->on_sccstack)
3542 state->low = MIN (cstate->dfsnum, state->low);
3544 /* Return the current lattice value. We start with an equality
3545 assumption so types part of a SCC will be optimistically
3546 treated equal unless proven otherwise. */
3547 return cstate->u.same_p;
3550 /* Worker for gimple_types_compatible.
3551 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3553 static bool
3554 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3555 VEC(type_pair_t, heap) **sccstack,
3556 struct pointer_map_t *sccstate,
3557 struct obstack *sccstate_obstack)
3559 struct sccs *state;
3561 gcc_assert (p->same_p[GTC_MERGE] == -2);
3563 state = XOBNEW (sccstate_obstack, struct sccs);
3564 *pointer_map_insert (sccstate, p) = state;
3566 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3567 state->dfsnum = gtc_next_dfs_num++;
3568 state->low = state->dfsnum;
3569 state->on_sccstack = true;
3570 /* Start with an equality assumption. As we DFS recurse into child
3571 SCCs this assumption may get revisited. */
3572 state->u.same_p = 1;
3574 /* The struct tags shall compare equal. */
3575 if (!compare_type_names_p (t1, t2))
3576 goto different_types;
3578 /* If their attributes are not the same they can't be the same type. */
3579 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3580 goto different_types;
3582 /* Do type-specific comparisons. */
3583 switch (TREE_CODE (t1))
3585 case VECTOR_TYPE:
3586 case COMPLEX_TYPE:
3587 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3588 state, sccstack, sccstate, sccstate_obstack))
3589 goto different_types;
3590 goto same_types;
3592 case ARRAY_TYPE:
3593 /* Array types are the same if the element types are the same and
3594 the number of elements are the same. */
3595 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3596 state, sccstack, sccstate, sccstate_obstack)
3597 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3598 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3599 goto different_types;
3600 else
3602 tree i1 = TYPE_DOMAIN (t1);
3603 tree i2 = TYPE_DOMAIN (t2);
3605 /* For an incomplete external array, the type domain can be
3606 NULL_TREE. Check this condition also. */
3607 if (i1 == NULL_TREE && i2 == NULL_TREE)
3608 goto same_types;
3609 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3610 goto different_types;
3611 /* If for a complete array type the possibly gimplified sizes
3612 are different the types are different. */
3613 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3614 || (TYPE_SIZE (i1)
3615 && TYPE_SIZE (i2)
3616 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3617 goto different_types;
3618 else
3620 tree min1 = TYPE_MIN_VALUE (i1);
3621 tree min2 = TYPE_MIN_VALUE (i2);
3622 tree max1 = TYPE_MAX_VALUE (i1);
3623 tree max2 = TYPE_MAX_VALUE (i2);
3625 /* The minimum/maximum values have to be the same. */
3626 if ((min1 == min2
3627 || (min1 && min2
3628 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3629 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3630 || operand_equal_p (min1, min2, 0))))
3631 && (max1 == max2
3632 || (max1 && max2
3633 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3634 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3635 || operand_equal_p (max1, max2, 0)))))
3636 goto same_types;
3637 else
3638 goto different_types;
3642 case METHOD_TYPE:
3643 /* Method types should belong to the same class. */
3644 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3645 state, sccstack, sccstate, sccstate_obstack))
3646 goto different_types;
3648 /* Fallthru */
3650 case FUNCTION_TYPE:
3651 /* Function types are the same if the return type and arguments types
3652 are the same. */
3653 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3654 state, sccstack, sccstate, sccstate_obstack))
3655 goto different_types;
3657 if (!comp_type_attributes (t1, t2))
3658 goto different_types;
3660 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3661 goto same_types;
3662 else
3664 tree parms1, parms2;
3666 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3667 parms1 && parms2;
3668 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3670 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3671 state, sccstack, sccstate, sccstate_obstack))
3672 goto different_types;
3675 if (parms1 || parms2)
3676 goto different_types;
3678 goto same_types;
3681 case OFFSET_TYPE:
3683 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3684 state, sccstack, sccstate, sccstate_obstack)
3685 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3686 TYPE_OFFSET_BASETYPE (t2),
3687 state, sccstack, sccstate, sccstate_obstack))
3688 goto different_types;
3690 goto same_types;
3693 case POINTER_TYPE:
3694 case REFERENCE_TYPE:
3696 /* If the two pointers have different ref-all attributes,
3697 they can't be the same type. */
3698 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3699 goto different_types;
3701 /* Otherwise, pointer and reference types are the same if the
3702 pointed-to types are the same. */
3703 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3704 state, sccstack, sccstate, sccstate_obstack))
3705 goto same_types;
3707 goto different_types;
3710 case INTEGER_TYPE:
3711 case BOOLEAN_TYPE:
3713 tree min1 = TYPE_MIN_VALUE (t1);
3714 tree max1 = TYPE_MAX_VALUE (t1);
3715 tree min2 = TYPE_MIN_VALUE (t2);
3716 tree max2 = TYPE_MAX_VALUE (t2);
3717 bool min_equal_p = false;
3718 bool max_equal_p = false;
3720 /* If either type has a minimum value, the other type must
3721 have the same. */
3722 if (min1 == NULL_TREE && min2 == NULL_TREE)
3723 min_equal_p = true;
3724 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3725 min_equal_p = true;
3727 /* Likewise, if either type has a maximum value, the other
3728 type must have the same. */
3729 if (max1 == NULL_TREE && max2 == NULL_TREE)
3730 max_equal_p = true;
3731 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3732 max_equal_p = true;
3734 if (!min_equal_p || !max_equal_p)
3735 goto different_types;
3737 goto same_types;
3740 case ENUMERAL_TYPE:
3742 /* FIXME lto, we cannot check bounds on enumeral types because
3743 different front ends will produce different values.
3744 In C, enumeral types are integers, while in C++ each element
3745 will have its own symbolic value. We should decide how enums
3746 are to be represented in GIMPLE and have each front end lower
3747 to that. */
3748 tree v1, v2;
3750 /* For enumeral types, all the values must be the same. */
3751 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3752 goto same_types;
3754 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3755 v1 && v2;
3756 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3758 tree c1 = TREE_VALUE (v1);
3759 tree c2 = TREE_VALUE (v2);
3761 if (TREE_CODE (c1) == CONST_DECL)
3762 c1 = DECL_INITIAL (c1);
3764 if (TREE_CODE (c2) == CONST_DECL)
3765 c2 = DECL_INITIAL (c2);
3767 if (tree_int_cst_equal (c1, c2) != 1)
3768 goto different_types;
3770 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3771 goto different_types;
3774 /* If one enumeration has more values than the other, they
3775 are not the same. */
3776 if (v1 || v2)
3777 goto different_types;
3779 goto same_types;
3782 case RECORD_TYPE:
3783 case UNION_TYPE:
3784 case QUAL_UNION_TYPE:
3786 tree f1, f2;
3788 /* For aggregate types, all the fields must be the same. */
3789 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3790 f1 && f2;
3791 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3793 /* Different field kinds are not compatible. */
3794 if (TREE_CODE (f1) != TREE_CODE (f2))
3795 goto different_types;
3796 /* Field decls must have the same name and offset. */
3797 if (TREE_CODE (f1) == FIELD_DECL
3798 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3799 || !gimple_compare_field_offset (f1, f2)))
3800 goto different_types;
3801 /* All entities should have the same name and type. */
3802 if (DECL_NAME (f1) != DECL_NAME (f2)
3803 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3804 state, sccstack, sccstate, sccstate_obstack))
3805 goto different_types;
3808 /* If one aggregate has more fields than the other, they
3809 are not the same. */
3810 if (f1 || f2)
3811 goto different_types;
3813 goto same_types;
3816 default:
3817 gcc_unreachable ();
3820 /* Common exit path for types that are not compatible. */
3821 different_types:
3822 state->u.same_p = 0;
3823 goto pop;
3825 /* Common exit path for types that are compatible. */
3826 same_types:
3827 gcc_assert (state->u.same_p == 1);
3829 pop:
3830 if (state->low == state->dfsnum)
3832 type_pair_t x;
3834 /* Pop off the SCC and set its cache values to the final
3835 comparison result. */
3838 struct sccs *cstate;
3839 x = VEC_pop (type_pair_t, *sccstack);
3840 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3841 cstate->on_sccstack = false;
3842 x->same_p[GTC_MERGE] = state->u.same_p;
3844 while (x != p);
3847 return state->u.same_p;
3850 /* Return true iff T1 and T2 are structurally identical. When
3851 FOR_MERGING_P is true the an incomplete type and a complete type
3852 are considered different, otherwise they are considered compatible. */
3854 static bool
3855 gimple_types_compatible_p (tree t1, tree t2)
3857 VEC(type_pair_t, heap) *sccstack = NULL;
3858 struct pointer_map_t *sccstate;
3859 struct obstack sccstate_obstack;
3860 type_pair_t p = NULL;
3861 bool res;
3862 tree leader1, leader2;
3864 /* Before starting to set up the SCC machinery handle simple cases. */
3866 /* Check first for the obvious case of pointer identity. */
3867 if (t1 == t2)
3868 return true;
3870 /* Check that we have two types to compare. */
3871 if (t1 == NULL_TREE || t2 == NULL_TREE)
3872 return false;
3874 /* Can't be the same type if the types don't have the same code. */
3875 if (TREE_CODE (t1) != TREE_CODE (t2))
3876 return false;
3878 /* Can't be the same type if they have different CV qualifiers. */
3879 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3880 return false;
3882 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3883 return false;
3885 /* Void types and nullptr types are always the same. */
3886 if (TREE_CODE (t1) == VOID_TYPE
3887 || TREE_CODE (t1) == NULLPTR_TYPE)
3888 return true;
3890 /* Can't be the same type if they have different alignment or mode. */
3891 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3892 || TYPE_MODE (t1) != TYPE_MODE (t2))
3893 return false;
3895 /* Do some simple checks before doing three hashtable queries. */
3896 if (INTEGRAL_TYPE_P (t1)
3897 || SCALAR_FLOAT_TYPE_P (t1)
3898 || FIXED_POINT_TYPE_P (t1)
3899 || TREE_CODE (t1) == VECTOR_TYPE
3900 || TREE_CODE (t1) == COMPLEX_TYPE
3901 || TREE_CODE (t1) == OFFSET_TYPE
3902 || POINTER_TYPE_P (t1))
3904 /* Can't be the same type if they have different sign or precision. */
3905 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3906 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3907 return false;
3909 if (TREE_CODE (t1) == INTEGER_TYPE
3910 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3911 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3912 return false;
3914 /* That's all we need to check for float and fixed-point types. */
3915 if (SCALAR_FLOAT_TYPE_P (t1)
3916 || FIXED_POINT_TYPE_P (t1))
3917 return true;
3919 /* For other types fall thru to more complex checks. */
3922 /* If the types have been previously registered and found equal
3923 they still are. */
3924 leader1 = gimple_lookup_type_leader (t1);
3925 leader2 = gimple_lookup_type_leader (t2);
3926 if (leader1 == t2
3927 || t1 == leader2
3928 || (leader1 && leader1 == leader2))
3929 return true;
3931 /* If the hash values of t1 and t2 are different the types can't
3932 possibly be the same. This helps keeping the type-pair hashtable
3933 small, only tracking comparisons for hash collisions. */
3934 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3935 return false;
3937 /* If we've visited this type pair before (in the case of aggregates
3938 with self-referential types), and we made a decision, return it. */
3939 p = lookup_type_pair (t1, t2);
3940 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3942 /* We have already decided whether T1 and T2 are the
3943 same, return the cached result. */
3944 return p->same_p[GTC_MERGE] == 1;
3947 /* Now set up the SCC machinery for the comparison. */
3948 gtc_next_dfs_num = 1;
3949 sccstate = pointer_map_create ();
3950 gcc_obstack_init (&sccstate_obstack);
3951 res = gimple_types_compatible_p_1 (t1, t2, p,
3952 &sccstack, sccstate, &sccstate_obstack);
3953 VEC_free (type_pair_t, heap, sccstack);
3954 pointer_map_destroy (sccstate);
3955 obstack_free (&sccstate_obstack, NULL);
3957 return res;
3961 static hashval_t
3962 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3963 struct pointer_map_t *, struct obstack *);
3965 /* DFS visit the edge from the callers type with state *STATE to T.
3966 Update the callers type hash V with the hash for T if it is not part
3967 of the SCC containing the callers type and return it.
3968 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3970 static hashval_t
3971 visit (tree t, struct sccs *state, hashval_t v,
3972 VEC (tree, heap) **sccstack,
3973 struct pointer_map_t *sccstate,
3974 struct obstack *sccstate_obstack)
3976 struct sccs *cstate = NULL;
3977 struct tree_int_map m;
3978 void **slot;
3980 /* If there is a hash value recorded for this type then it can't
3981 possibly be part of our parent SCC. Simply mix in its hash. */
3982 m.base.from = t;
3983 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
3984 && *slot)
3985 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3987 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3988 cstate = (struct sccs *)*slot;
3989 if (!cstate)
3991 hashval_t tem;
3992 /* Not yet visited. DFS recurse. */
3993 tem = iterative_hash_gimple_type (t, v,
3994 sccstack, sccstate, sccstate_obstack);
3995 if (!cstate)
3996 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3997 state->low = MIN (state->low, cstate->low);
3998 /* If the type is no longer on the SCC stack and thus is not part
3999 of the parents SCC mix in its hash value. Otherwise we will
4000 ignore the type for hashing purposes and return the unaltered
4001 hash value. */
4002 if (!cstate->on_sccstack)
4003 return tem;
4005 if (cstate->dfsnum < state->dfsnum
4006 && cstate->on_sccstack)
4007 state->low = MIN (cstate->dfsnum, state->low);
4009 /* We are part of our parents SCC, skip this type during hashing
4010 and return the unaltered hash value. */
4011 return v;
4014 /* Hash NAME with the previous hash value V and return it. */
4016 static hashval_t
4017 iterative_hash_name (tree name, hashval_t v)
4019 if (!name)
4020 return v;
4021 if (TREE_CODE (name) == TYPE_DECL)
4022 name = DECL_NAME (name);
4023 if (!name)
4024 return v;
4025 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4026 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4029 /* A type, hashvalue pair for sorting SCC members. */
4031 struct type_hash_pair {
4032 tree type;
4033 hashval_t hash;
4036 /* Compare two type, hashvalue pairs. */
4038 static int
4039 type_hash_pair_compare (const void *p1_, const void *p2_)
4041 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4042 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4043 if (p1->hash < p2->hash)
4044 return -1;
4045 else if (p1->hash > p2->hash)
4046 return 1;
4047 return 0;
4050 /* Returning a hash value for gimple type TYPE combined with VAL.
4051 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4053 To hash a type we end up hashing in types that are reachable.
4054 Through pointers we can end up with cycles which messes up the
4055 required property that we need to compute the same hash value
4056 for structurally equivalent types. To avoid this we have to
4057 hash all types in a cycle (the SCC) in a commutative way. The
4058 easiest way is to not mix in the hashes of the SCC members at
4059 all. To make this work we have to delay setting the hash
4060 values of the SCC until it is complete. */
4062 static hashval_t
4063 iterative_hash_gimple_type (tree type, hashval_t val,
4064 VEC(tree, heap) **sccstack,
4065 struct pointer_map_t *sccstate,
4066 struct obstack *sccstate_obstack)
4068 hashval_t v;
4069 void **slot;
4070 struct sccs *state;
4072 /* Not visited during this DFS walk. */
4073 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4074 state = XOBNEW (sccstate_obstack, struct sccs);
4075 *pointer_map_insert (sccstate, type) = state;
4077 VEC_safe_push (tree, heap, *sccstack, type);
4078 state->dfsnum = next_dfs_num++;
4079 state->low = state->dfsnum;
4080 state->on_sccstack = true;
4082 /* Combine a few common features of types so that types are grouped into
4083 smaller sets; when searching for existing matching types to merge,
4084 only existing types having the same features as the new type will be
4085 checked. */
4086 v = iterative_hash_name (TYPE_NAME (type), 0);
4087 v = iterative_hash_hashval_t (TREE_CODE (type), v);
4088 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4089 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4091 /* Do not hash the types size as this will cause differences in
4092 hash values for the complete vs. the incomplete type variant. */
4094 /* Incorporate common features of numerical types. */
4095 if (INTEGRAL_TYPE_P (type)
4096 || SCALAR_FLOAT_TYPE_P (type)
4097 || FIXED_POINT_TYPE_P (type))
4099 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4100 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4101 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4104 /* For pointer and reference types, fold in information about the type
4105 pointed to. */
4106 if (POINTER_TYPE_P (type))
4107 v = visit (TREE_TYPE (type), state, v,
4108 sccstack, sccstate, sccstate_obstack);
4110 /* For integer types hash the types min/max values and the string flag. */
4111 if (TREE_CODE (type) == INTEGER_TYPE)
4113 /* OMP lowering can introduce error_mark_node in place of
4114 random local decls in types. */
4115 if (TYPE_MIN_VALUE (type) != error_mark_node)
4116 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4117 if (TYPE_MAX_VALUE (type) != error_mark_node)
4118 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4119 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4122 /* For array types hash their domain and the string flag. */
4123 if (TREE_CODE (type) == ARRAY_TYPE
4124 && TYPE_DOMAIN (type))
4126 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4127 v = visit (TYPE_DOMAIN (type), state, v,
4128 sccstack, sccstate, sccstate_obstack);
4131 /* Recurse for aggregates with a single element type. */
4132 if (TREE_CODE (type) == ARRAY_TYPE
4133 || TREE_CODE (type) == COMPLEX_TYPE
4134 || TREE_CODE (type) == VECTOR_TYPE)
4135 v = visit (TREE_TYPE (type), state, v,
4136 sccstack, sccstate, sccstate_obstack);
4138 /* Incorporate function return and argument types. */
4139 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4141 unsigned na;
4142 tree p;
4144 /* For method types also incorporate their parent class. */
4145 if (TREE_CODE (type) == METHOD_TYPE)
4146 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4147 sccstack, sccstate, sccstate_obstack);
4149 /* Check result and argument types. */
4150 v = visit (TREE_TYPE (type), state, v,
4151 sccstack, sccstate, sccstate_obstack);
4152 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4154 v = visit (TREE_VALUE (p), state, v,
4155 sccstack, sccstate, sccstate_obstack);
4156 na++;
4159 v = iterative_hash_hashval_t (na, v);
4162 if (TREE_CODE (type) == RECORD_TYPE
4163 || TREE_CODE (type) == UNION_TYPE
4164 || TREE_CODE (type) == QUAL_UNION_TYPE)
4166 unsigned nf;
4167 tree f;
4169 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4171 v = iterative_hash_name (DECL_NAME (f), v);
4172 v = visit (TREE_TYPE (f), state, v,
4173 sccstack, sccstate, sccstate_obstack);
4174 nf++;
4177 v = iterative_hash_hashval_t (nf, v);
4180 /* Record hash for us. */
4181 state->u.hash = v;
4183 /* See if we found an SCC. */
4184 if (state->low == state->dfsnum)
4186 tree x;
4187 struct tree_int_map *m;
4189 /* Pop off the SCC and set its hash values. */
4190 x = VEC_pop (tree, *sccstack);
4191 /* Optimize SCC size one. */
4192 if (x == type)
4194 state->on_sccstack = false;
4195 m = ggc_alloc_cleared_tree_int_map ();
4196 m->base.from = x;
4197 m->to = v;
4198 slot = htab_find_slot (type_hash_cache, m, INSERT);
4199 gcc_assert (!*slot);
4200 *slot = (void *) m;
4202 else
4204 struct sccs *cstate;
4205 unsigned first, i, size, j;
4206 struct type_hash_pair *pairs;
4207 /* Pop off the SCC and build an array of type, hash pairs. */
4208 first = VEC_length (tree, *sccstack) - 1;
4209 while (VEC_index (tree, *sccstack, first) != type)
4210 --first;
4211 size = VEC_length (tree, *sccstack) - first + 1;
4212 pairs = XALLOCAVEC (struct type_hash_pair, size);
4213 i = 0;
4214 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4215 cstate->on_sccstack = false;
4216 pairs[i].type = x;
4217 pairs[i].hash = cstate->u.hash;
4220 x = VEC_pop (tree, *sccstack);
4221 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4222 cstate->on_sccstack = false;
4223 ++i;
4224 pairs[i].type = x;
4225 pairs[i].hash = cstate->u.hash;
4227 while (x != type);
4228 gcc_assert (i + 1 == size);
4229 /* Sort the arrays of type, hash pairs so that when we mix in
4230 all members of the SCC the hash value becomes independent on
4231 the order we visited the SCC. Disregard hashes equal to
4232 the hash of the type we mix into because we cannot guarantee
4233 a stable sort for those across different TUs. */
4234 qsort (pairs, size, sizeof (struct type_hash_pair),
4235 type_hash_pair_compare);
4236 for (i = 0; i < size; ++i)
4238 hashval_t hash;
4239 m = ggc_alloc_cleared_tree_int_map ();
4240 m->base.from = pairs[i].type;
4241 hash = pairs[i].hash;
4242 /* Skip same hashes. */
4243 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4245 for (; j < size; ++j)
4246 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4247 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4248 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4249 m->to = hash;
4250 if (pairs[i].type == type)
4251 v = hash;
4252 slot = htab_find_slot (type_hash_cache, m, INSERT);
4253 gcc_assert (!*slot);
4254 *slot = (void *) m;
4259 return iterative_hash_hashval_t (v, val);
4263 /* Returns a hash value for P (assumed to be a type). The hash value
4264 is computed using some distinguishing features of the type. Note
4265 that we cannot use pointer hashing here as we may be dealing with
4266 two distinct instances of the same type.
4268 This function should produce the same hash value for two compatible
4269 types according to gimple_types_compatible_p. */
4271 static hashval_t
4272 gimple_type_hash (const void *p)
4274 const_tree t = (const_tree) p;
4275 VEC(tree, heap) *sccstack = NULL;
4276 struct pointer_map_t *sccstate;
4277 struct obstack sccstate_obstack;
4278 hashval_t val;
4279 void **slot;
4280 struct tree_int_map m;
4282 if (type_hash_cache == NULL)
4283 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4284 tree_int_map_eq, NULL);
4286 m.base.from = CONST_CAST_TREE (t);
4287 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4288 && *slot)
4289 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4291 /* Perform a DFS walk and pre-hash all reachable types. */
4292 next_dfs_num = 1;
4293 sccstate = pointer_map_create ();
4294 gcc_obstack_init (&sccstate_obstack);
4295 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4296 &sccstack, sccstate, &sccstate_obstack);
4297 VEC_free (tree, heap, sccstack);
4298 pointer_map_destroy (sccstate);
4299 obstack_free (&sccstate_obstack, NULL);
4301 return val;
4304 /* Returning a hash value for gimple type TYPE combined with VAL.
4306 The hash value returned is equal for types considered compatible
4307 by gimple_canonical_types_compatible_p. */
4309 static hashval_t
4310 iterative_hash_canonical_type (tree type, hashval_t val)
4312 hashval_t v;
4313 void **slot;
4314 struct tree_int_map *mp, m;
4316 m.base.from = type;
4317 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4318 && *slot)
4319 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4321 /* Combine a few common features of types so that types are grouped into
4322 smaller sets; when searching for existing matching types to merge,
4323 only existing types having the same features as the new type will be
4324 checked. */
4325 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4326 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4327 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4328 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4330 /* Incorporate common features of numerical types. */
4331 if (INTEGRAL_TYPE_P (type)
4332 || SCALAR_FLOAT_TYPE_P (type)
4333 || FIXED_POINT_TYPE_P (type)
4334 || TREE_CODE (type) == VECTOR_TYPE
4335 || TREE_CODE (type) == COMPLEX_TYPE
4336 || TREE_CODE (type) == OFFSET_TYPE
4337 || POINTER_TYPE_P (type))
4339 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4340 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4343 /* For pointer and reference types, fold in information about the type
4344 pointed to but do not recurse to the pointed-to type. */
4345 if (POINTER_TYPE_P (type))
4347 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4348 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4349 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4350 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4353 /* For integer types hash the types min/max values and the string flag. */
4354 if (TREE_CODE (type) == INTEGER_TYPE)
4356 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4357 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4360 /* For array types hash their domain and the string flag. */
4361 if (TREE_CODE (type) == ARRAY_TYPE
4362 && TYPE_DOMAIN (type))
4364 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4365 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4368 /* Recurse for aggregates with a single element type. */
4369 if (TREE_CODE (type) == ARRAY_TYPE
4370 || TREE_CODE (type) == COMPLEX_TYPE
4371 || TREE_CODE (type) == VECTOR_TYPE)
4372 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4374 /* Incorporate function return and argument types. */
4375 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4377 unsigned na;
4378 tree p;
4380 /* For method types also incorporate their parent class. */
4381 if (TREE_CODE (type) == METHOD_TYPE)
4382 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4384 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4386 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4388 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4389 na++;
4392 v = iterative_hash_hashval_t (na, v);
4395 if (TREE_CODE (type) == RECORD_TYPE
4396 || TREE_CODE (type) == UNION_TYPE
4397 || TREE_CODE (type) == QUAL_UNION_TYPE)
4399 unsigned nf;
4400 tree f;
4402 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4403 if (TREE_CODE (f) == FIELD_DECL)
4405 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4406 nf++;
4409 v = iterative_hash_hashval_t (nf, v);
4412 /* Cache the just computed hash value. */
4413 mp = ggc_alloc_cleared_tree_int_map ();
4414 mp->base.from = type;
4415 mp->to = v;
4416 *slot = (void *) mp;
4418 return iterative_hash_hashval_t (v, val);
4421 static hashval_t
4422 gimple_canonical_type_hash (const void *p)
4424 if (canonical_type_hash_cache == NULL)
4425 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4426 tree_int_map_eq, NULL);
4428 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4432 /* Returns nonzero if P1 and P2 are equal. */
4434 static int
4435 gimple_type_eq (const void *p1, const void *p2)
4437 const_tree t1 = (const_tree) p1;
4438 const_tree t2 = (const_tree) p2;
4439 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4440 CONST_CAST_TREE (t2));
4444 /* Worker for gimple_register_type.
4445 Register type T in the global type table gimple_types.
4446 When REGISTERING_MV is false first recurse for the main variant of T. */
4448 static tree
4449 gimple_register_type_1 (tree t, bool registering_mv)
4451 void **slot;
4452 gimple_type_leader_entry *leader;
4454 /* If we registered this type before return the cached result. */
4455 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4456 if (leader->type == t)
4457 return leader->leader;
4459 /* Always register the main variant first. This is important so we
4460 pick up the non-typedef variants as canonical, otherwise we'll end
4461 up taking typedef ids for structure tags during comparison.
4462 It also makes sure that main variants will be merged to main variants.
4463 As we are operating on a possibly partially fixed up type graph
4464 do not bother to recurse more than once, otherwise we may end up
4465 walking in circles.
4466 If we are registering a main variant it will either remain its
4467 own main variant or it will be merged to something else in which
4468 case we do not care for the main variant leader. */
4469 if (!registering_mv
4470 && TYPE_MAIN_VARIANT (t) != t)
4471 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4473 /* See if we already have an equivalent type registered. */
4474 slot = htab_find_slot (gimple_types, t, INSERT);
4475 if (*slot
4476 && *(tree *)slot != t)
4478 tree new_type = (tree) *((tree *) slot);
4479 leader->type = t;
4480 leader->leader = new_type;
4481 return new_type;
4484 /* If not, insert it to the cache and the hash. */
4485 leader->type = t;
4486 leader->leader = t;
4487 *slot = (void *) t;
4488 return t;
4491 /* Register type T in the global type table gimple_types.
4492 If another type T', compatible with T, already existed in
4493 gimple_types then return T', otherwise return T. This is used by
4494 LTO to merge identical types read from different TUs. */
4496 tree
4497 gimple_register_type (tree t)
4499 gcc_assert (TYPE_P (t));
4501 if (!gimple_type_leader)
4502 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4503 (GIMPLE_TYPE_LEADER_SIZE);
4505 if (gimple_types == NULL)
4506 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4508 return gimple_register_type_1 (t, false);
4511 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4512 the middle-end types_compatible_p function. It needs to avoid
4513 claiming types are different for types that should be treated
4514 the same with respect to TBAA. Canonical types are also used
4515 for IL consistency checks via the useless_type_conversion_p
4516 predicate which does not handle all type kinds itself but falls
4517 back to pointer-comparison of TYPE_CANONICAL for aggregates
4518 for example. */
4520 /* Return true iff T1 and T2 are structurally identical for what
4521 TBAA is concerned. */
4523 static bool
4524 gimple_canonical_types_compatible_p (tree t1, tree t2)
4526 /* Before starting to set up the SCC machinery handle simple cases. */
4528 /* Check first for the obvious case of pointer identity. */
4529 if (t1 == t2)
4530 return true;
4532 /* Check that we have two types to compare. */
4533 if (t1 == NULL_TREE || t2 == NULL_TREE)
4534 return false;
4536 /* If the types have been previously registered and found equal
4537 they still are. */
4538 if (TYPE_CANONICAL (t1)
4539 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4540 return true;
4542 /* Can't be the same type if the types don't have the same code. */
4543 if (TREE_CODE (t1) != TREE_CODE (t2))
4544 return false;
4546 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4547 return false;
4549 /* Qualifiers do not matter for canonical type comparison purposes. */
4551 /* Void types and nullptr types are always the same. */
4552 if (TREE_CODE (t1) == VOID_TYPE
4553 || TREE_CODE (t1) == NULLPTR_TYPE)
4554 return true;
4556 /* Can't be the same type if they have different alignment, or mode. */
4557 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4558 || TYPE_MODE (t1) != TYPE_MODE (t2))
4559 return false;
4561 /* Non-aggregate types can be handled cheaply. */
4562 if (INTEGRAL_TYPE_P (t1)
4563 || SCALAR_FLOAT_TYPE_P (t1)
4564 || FIXED_POINT_TYPE_P (t1)
4565 || TREE_CODE (t1) == VECTOR_TYPE
4566 || TREE_CODE (t1) == COMPLEX_TYPE
4567 || TREE_CODE (t1) == OFFSET_TYPE
4568 || POINTER_TYPE_P (t1))
4570 /* Can't be the same type if they have different sign or precision. */
4571 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4572 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4573 return false;
4575 if (TREE_CODE (t1) == INTEGER_TYPE
4576 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4577 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4578 return false;
4580 /* For canonical type comparisons we do not want to build SCCs
4581 so we cannot compare pointed-to types. But we can, for now,
4582 require the same pointed-to type kind and match what
4583 useless_type_conversion_p would do. */
4584 if (POINTER_TYPE_P (t1))
4586 /* If the two pointers have different ref-all attributes,
4587 they can't be the same type. */
4588 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4589 return false;
4591 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4592 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4593 return false;
4595 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4596 return false;
4598 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4599 return false;
4602 /* Tail-recurse to components. */
4603 if (TREE_CODE (t1) == VECTOR_TYPE
4604 || TREE_CODE (t1) == COMPLEX_TYPE)
4605 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4606 TREE_TYPE (t2));
4608 return true;
4611 /* If their attributes are not the same they can't be the same type. */
4612 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4613 return false;
4615 /* Do type-specific comparisons. */
4616 switch (TREE_CODE (t1))
4618 case ARRAY_TYPE:
4619 /* Array types are the same if the element types are the same and
4620 the number of elements are the same. */
4621 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4622 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4623 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4624 return false;
4625 else
4627 tree i1 = TYPE_DOMAIN (t1);
4628 tree i2 = TYPE_DOMAIN (t2);
4630 /* For an incomplete external array, the type domain can be
4631 NULL_TREE. Check this condition also. */
4632 if (i1 == NULL_TREE && i2 == NULL_TREE)
4633 return true;
4634 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4635 return false;
4636 /* If for a complete array type the possibly gimplified sizes
4637 are different the types are different. */
4638 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4639 || (TYPE_SIZE (i1)
4640 && TYPE_SIZE (i2)
4641 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
4642 return false;
4643 else
4645 tree min1 = TYPE_MIN_VALUE (i1);
4646 tree min2 = TYPE_MIN_VALUE (i2);
4647 tree max1 = TYPE_MAX_VALUE (i1);
4648 tree max2 = TYPE_MAX_VALUE (i2);
4650 /* The minimum/maximum values have to be the same. */
4651 if ((min1 == min2
4652 || (min1 && min2
4653 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4654 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4655 || operand_equal_p (min1, min2, 0))))
4656 && (max1 == max2
4657 || (max1 && max2
4658 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4659 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4660 || operand_equal_p (max1, max2, 0)))))
4661 return true;
4662 else
4663 return false;
4667 case METHOD_TYPE:
4668 /* Method types should belong to the same class. */
4669 if (!gimple_canonical_types_compatible_p
4670 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4671 return false;
4673 /* Fallthru */
4675 case FUNCTION_TYPE:
4676 /* Function types are the same if the return type and arguments types
4677 are the same. */
4678 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4679 return false;
4681 if (!comp_type_attributes (t1, t2))
4682 return false;
4684 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4685 return true;
4686 else
4688 tree parms1, parms2;
4690 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4691 parms1 && parms2;
4692 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4694 if (!gimple_canonical_types_compatible_p
4695 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4696 return false;
4699 if (parms1 || parms2)
4700 return false;
4702 return true;
4705 case RECORD_TYPE:
4706 case UNION_TYPE:
4707 case QUAL_UNION_TYPE:
4709 tree f1, f2;
4711 /* For aggregate types, all the fields must be the same. */
4712 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4713 f1 && f2;
4714 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4716 /* Skip non-fields. */
4717 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4718 f1 = TREE_CHAIN (f1);
4719 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4720 f2 = TREE_CHAIN (f2);
4721 if (!f1 || !f2)
4722 break;
4723 /* The fields must have the same name, offset and type. */
4724 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4725 || !gimple_compare_field_offset (f1, f2)
4726 || !gimple_canonical_types_compatible_p
4727 (TREE_TYPE (f1), TREE_TYPE (f2)))
4728 return false;
4731 /* If one aggregate has more fields than the other, they
4732 are not the same. */
4733 if (f1 || f2)
4734 return false;
4736 return true;
4739 default:
4740 gcc_unreachable ();
4745 /* Returns nonzero if P1 and P2 are equal. */
4747 static int
4748 gimple_canonical_type_eq (const void *p1, const void *p2)
4750 const_tree t1 = (const_tree) p1;
4751 const_tree t2 = (const_tree) p2;
4752 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4753 CONST_CAST_TREE (t2));
4756 /* Register type T in the global type table gimple_types.
4757 If another type T', compatible with T, already existed in
4758 gimple_types then return T', otherwise return T. This is used by
4759 LTO to merge identical types read from different TUs.
4761 ??? This merging does not exactly match how the tree.c middle-end
4762 functions will assign TYPE_CANONICAL when new types are created
4763 during optimization (which at least happens for pointer and array
4764 types). */
4766 tree
4767 gimple_register_canonical_type (tree t)
4769 void **slot;
4771 gcc_assert (TYPE_P (t));
4773 if (TYPE_CANONICAL (t))
4774 return TYPE_CANONICAL (t);
4776 if (gimple_canonical_types == NULL)
4777 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4778 gimple_canonical_type_eq, 0);
4780 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4781 if (*slot
4782 && *(tree *)slot != t)
4784 tree new_type = (tree) *((tree *) slot);
4786 TYPE_CANONICAL (t) = new_type;
4787 t = new_type;
4789 else
4791 TYPE_CANONICAL (t) = t;
4792 *slot = (void *) t;
4795 return t;
4799 /* Show statistics on references to the global type table gimple_types. */
4801 void
4802 print_gimple_types_stats (void)
4804 if (gimple_types)
4805 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4806 "%ld searches, %ld collisions (ratio: %f)\n",
4807 (long) htab_size (gimple_types),
4808 (long) htab_elements (gimple_types),
4809 (long) gimple_types->searches,
4810 (long) gimple_types->collisions,
4811 htab_collisions (gimple_types));
4812 else
4813 fprintf (stderr, "GIMPLE type table is empty\n");
4814 if (type_hash_cache)
4815 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4816 "%ld searches, %ld collisions (ratio: %f)\n",
4817 (long) htab_size (type_hash_cache),
4818 (long) htab_elements (type_hash_cache),
4819 (long) type_hash_cache->searches,
4820 (long) type_hash_cache->collisions,
4821 htab_collisions (type_hash_cache));
4822 else
4823 fprintf (stderr, "GIMPLE type hash table is empty\n");
4824 if (gimple_canonical_types)
4825 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4826 "%ld searches, %ld collisions (ratio: %f)\n",
4827 (long) htab_size (gimple_canonical_types),
4828 (long) htab_elements (gimple_canonical_types),
4829 (long) gimple_canonical_types->searches,
4830 (long) gimple_canonical_types->collisions,
4831 htab_collisions (gimple_canonical_types));
4832 else
4833 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4834 if (canonical_type_hash_cache)
4835 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4836 "%ld searches, %ld collisions (ratio: %f)\n",
4837 (long) htab_size (canonical_type_hash_cache),
4838 (long) htab_elements (canonical_type_hash_cache),
4839 (long) canonical_type_hash_cache->searches,
4840 (long) canonical_type_hash_cache->collisions,
4841 htab_collisions (canonical_type_hash_cache));
4842 else
4843 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4846 /* Free the gimple type hashtables used for LTO type merging. */
4848 void
4849 free_gimple_type_tables (void)
4851 /* Last chance to print stats for the tables. */
4852 if (flag_lto_report)
4853 print_gimple_types_stats ();
4855 if (gimple_types)
4857 htab_delete (gimple_types);
4858 gimple_types = NULL;
4860 if (gimple_canonical_types)
4862 htab_delete (gimple_canonical_types);
4863 gimple_canonical_types = NULL;
4865 if (type_hash_cache)
4867 htab_delete (type_hash_cache);
4868 type_hash_cache = NULL;
4870 if (canonical_type_hash_cache)
4872 htab_delete (canonical_type_hash_cache);
4873 canonical_type_hash_cache = NULL;
4875 if (type_pair_cache)
4877 free (type_pair_cache);
4878 type_pair_cache = NULL;
4880 gimple_type_leader = NULL;
4884 /* Return a type the same as TYPE except unsigned or
4885 signed according to UNSIGNEDP. */
4887 static tree
4888 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4890 tree type1;
4892 type1 = TYPE_MAIN_VARIANT (type);
4893 if (type1 == signed_char_type_node
4894 || type1 == char_type_node
4895 || type1 == unsigned_char_type_node)
4896 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4897 if (type1 == integer_type_node || type1 == unsigned_type_node)
4898 return unsignedp ? unsigned_type_node : integer_type_node;
4899 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4900 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4901 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4902 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4903 if (type1 == long_long_integer_type_node
4904 || type1 == long_long_unsigned_type_node)
4905 return unsignedp
4906 ? long_long_unsigned_type_node
4907 : long_long_integer_type_node;
4908 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4909 return unsignedp
4910 ? int128_unsigned_type_node
4911 : int128_integer_type_node;
4912 #if HOST_BITS_PER_WIDE_INT >= 64
4913 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4914 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4915 #endif
4916 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4917 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4918 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4919 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4920 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4921 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4922 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4923 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4925 #define GIMPLE_FIXED_TYPES(NAME) \
4926 if (type1 == short_ ## NAME ## _type_node \
4927 || type1 == unsigned_short_ ## NAME ## _type_node) \
4928 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4929 : short_ ## NAME ## _type_node; \
4930 if (type1 == NAME ## _type_node \
4931 || type1 == unsigned_ ## NAME ## _type_node) \
4932 return unsignedp ? unsigned_ ## NAME ## _type_node \
4933 : NAME ## _type_node; \
4934 if (type1 == long_ ## NAME ## _type_node \
4935 || type1 == unsigned_long_ ## NAME ## _type_node) \
4936 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4937 : long_ ## NAME ## _type_node; \
4938 if (type1 == long_long_ ## NAME ## _type_node \
4939 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4940 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4941 : long_long_ ## NAME ## _type_node;
4943 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4944 if (type1 == NAME ## _type_node \
4945 || type1 == u ## NAME ## _type_node) \
4946 return unsignedp ? u ## NAME ## _type_node \
4947 : NAME ## _type_node;
4949 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4950 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4951 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4952 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4953 : sat_ ## short_ ## NAME ## _type_node; \
4954 if (type1 == sat_ ## NAME ## _type_node \
4955 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4956 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4957 : sat_ ## NAME ## _type_node; \
4958 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4959 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4960 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4961 : sat_ ## long_ ## NAME ## _type_node; \
4962 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4963 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4964 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4965 : sat_ ## long_long_ ## NAME ## _type_node;
4967 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4968 if (type1 == sat_ ## NAME ## _type_node \
4969 || type1 == sat_ ## u ## NAME ## _type_node) \
4970 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4971 : sat_ ## NAME ## _type_node;
4973 GIMPLE_FIXED_TYPES (fract);
4974 GIMPLE_FIXED_TYPES_SAT (fract);
4975 GIMPLE_FIXED_TYPES (accum);
4976 GIMPLE_FIXED_TYPES_SAT (accum);
4978 GIMPLE_FIXED_MODE_TYPES (qq);
4979 GIMPLE_FIXED_MODE_TYPES (hq);
4980 GIMPLE_FIXED_MODE_TYPES (sq);
4981 GIMPLE_FIXED_MODE_TYPES (dq);
4982 GIMPLE_FIXED_MODE_TYPES (tq);
4983 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4984 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4985 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4986 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4987 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4988 GIMPLE_FIXED_MODE_TYPES (ha);
4989 GIMPLE_FIXED_MODE_TYPES (sa);
4990 GIMPLE_FIXED_MODE_TYPES (da);
4991 GIMPLE_FIXED_MODE_TYPES (ta);
4992 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4993 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4994 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4995 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4997 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4998 the precision; they have precision set to match their range, but
4999 may use a wider mode to match an ABI. If we change modes, we may
5000 wind up with bad conversions. For INTEGER_TYPEs in C, must check
5001 the precision as well, so as to yield correct results for
5002 bit-field types. C++ does not have these separate bit-field
5003 types, and producing a signed or unsigned variant of an
5004 ENUMERAL_TYPE may cause other problems as well. */
5005 if (!INTEGRAL_TYPE_P (type)
5006 || TYPE_UNSIGNED (type) == unsignedp)
5007 return type;
5009 #define TYPE_OK(node) \
5010 (TYPE_MODE (type) == TYPE_MODE (node) \
5011 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5012 if (TYPE_OK (signed_char_type_node))
5013 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
5014 if (TYPE_OK (integer_type_node))
5015 return unsignedp ? unsigned_type_node : integer_type_node;
5016 if (TYPE_OK (short_integer_type_node))
5017 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
5018 if (TYPE_OK (long_integer_type_node))
5019 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
5020 if (TYPE_OK (long_long_integer_type_node))
5021 return (unsignedp
5022 ? long_long_unsigned_type_node
5023 : long_long_integer_type_node);
5024 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
5025 return (unsignedp
5026 ? int128_unsigned_type_node
5027 : int128_integer_type_node);
5029 #if HOST_BITS_PER_WIDE_INT >= 64
5030 if (TYPE_OK (intTI_type_node))
5031 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5032 #endif
5033 if (TYPE_OK (intDI_type_node))
5034 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5035 if (TYPE_OK (intSI_type_node))
5036 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5037 if (TYPE_OK (intHI_type_node))
5038 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5039 if (TYPE_OK (intQI_type_node))
5040 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5042 #undef GIMPLE_FIXED_TYPES
5043 #undef GIMPLE_FIXED_MODE_TYPES
5044 #undef GIMPLE_FIXED_TYPES_SAT
5045 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5046 #undef TYPE_OK
5048 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5052 /* Return an unsigned type the same as TYPE in other respects. */
5054 tree
5055 gimple_unsigned_type (tree type)
5057 return gimple_signed_or_unsigned_type (true, type);
5061 /* Return a signed type the same as TYPE in other respects. */
5063 tree
5064 gimple_signed_type (tree type)
5066 return gimple_signed_or_unsigned_type (false, type);
5070 /* Return the typed-based alias set for T, which may be an expression
5071 or a type. Return -1 if we don't do anything special. */
5073 alias_set_type
5074 gimple_get_alias_set (tree t)
5076 tree u;
5078 /* Permit type-punning when accessing a union, provided the access
5079 is directly through the union. For example, this code does not
5080 permit taking the address of a union member and then storing
5081 through it. Even the type-punning allowed here is a GCC
5082 extension, albeit a common and useful one; the C standard says
5083 that such accesses have implementation-defined behavior. */
5084 for (u = t;
5085 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5086 u = TREE_OPERAND (u, 0))
5087 if (TREE_CODE (u) == COMPONENT_REF
5088 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5089 return 0;
5091 /* That's all the expressions we handle specially. */
5092 if (!TYPE_P (t))
5093 return -1;
5095 /* For convenience, follow the C standard when dealing with
5096 character types. Any object may be accessed via an lvalue that
5097 has character type. */
5098 if (t == char_type_node
5099 || t == signed_char_type_node
5100 || t == unsigned_char_type_node)
5101 return 0;
5103 /* Allow aliasing between signed and unsigned variants of the same
5104 type. We treat the signed variant as canonical. */
5105 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5107 tree t1 = gimple_signed_type (t);
5109 /* t1 == t can happen for boolean nodes which are always unsigned. */
5110 if (t1 != t)
5111 return get_alias_set (t1);
5114 return -1;
5118 /* Data structure used to count the number of dereferences to PTR
5119 inside an expression. */
5120 struct count_ptr_d
5122 tree ptr;
5123 unsigned num_stores;
5124 unsigned num_loads;
5127 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5128 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5130 static tree
5131 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5133 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5134 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5136 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5137 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5138 the address of 'fld' as 'ptr + offsetof(fld)'. */
5139 if (TREE_CODE (*tp) == ADDR_EXPR)
5141 *walk_subtrees = 0;
5142 return NULL_TREE;
5145 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5147 if (wi_p->is_lhs)
5148 count_p->num_stores++;
5149 else
5150 count_p->num_loads++;
5153 return NULL_TREE;
5156 /* Count the number of direct and indirect uses for pointer PTR in
5157 statement STMT. The number of direct uses is stored in
5158 *NUM_USES_P. Indirect references are counted separately depending
5159 on whether they are store or load operations. The counts are
5160 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5162 void
5163 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5164 unsigned *num_loads_p, unsigned *num_stores_p)
5166 ssa_op_iter i;
5167 tree use;
5169 *num_uses_p = 0;
5170 *num_loads_p = 0;
5171 *num_stores_p = 0;
5173 /* Find out the total number of uses of PTR in STMT. */
5174 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5175 if (use == ptr)
5176 (*num_uses_p)++;
5178 /* Now count the number of indirect references to PTR. This is
5179 truly awful, but we don't have much choice. There are no parent
5180 pointers inside INDIRECT_REFs, so an expression like
5181 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5182 find all the indirect and direct uses of x_1 inside. The only
5183 shortcut we can take is the fact that GIMPLE only allows
5184 INDIRECT_REFs inside the expressions below. */
5185 if (is_gimple_assign (stmt)
5186 || gimple_code (stmt) == GIMPLE_RETURN
5187 || gimple_code (stmt) == GIMPLE_ASM
5188 || is_gimple_call (stmt))
5190 struct walk_stmt_info wi;
5191 struct count_ptr_d count;
5193 count.ptr = ptr;
5194 count.num_stores = 0;
5195 count.num_loads = 0;
5197 memset (&wi, 0, sizeof (wi));
5198 wi.info = &count;
5199 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5201 *num_stores_p = count.num_stores;
5202 *num_loads_p = count.num_loads;
5205 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5208 /* From a tree operand OP return the base of a load or store operation
5209 or NULL_TREE if OP is not a load or a store. */
5211 static tree
5212 get_base_loadstore (tree op)
5214 while (handled_component_p (op))
5215 op = TREE_OPERAND (op, 0);
5216 if (DECL_P (op)
5217 || INDIRECT_REF_P (op)
5218 || TREE_CODE (op) == MEM_REF
5219 || TREE_CODE (op) == TARGET_MEM_REF)
5220 return op;
5221 return NULL_TREE;
5224 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5225 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5226 passing the STMT, the base of the operand and DATA to it. The base
5227 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5228 or the argument of an address expression.
5229 Returns the results of these callbacks or'ed. */
5231 bool
5232 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5233 bool (*visit_load)(gimple, tree, void *),
5234 bool (*visit_store)(gimple, tree, void *),
5235 bool (*visit_addr)(gimple, tree, void *))
5237 bool ret = false;
5238 unsigned i;
5239 if (gimple_assign_single_p (stmt))
5241 tree lhs, rhs;
5242 if (visit_store)
5244 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5245 if (lhs)
5246 ret |= visit_store (stmt, lhs, data);
5248 rhs = gimple_assign_rhs1 (stmt);
5249 while (handled_component_p (rhs))
5250 rhs = TREE_OPERAND (rhs, 0);
5251 if (visit_addr)
5253 if (TREE_CODE (rhs) == ADDR_EXPR)
5254 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5255 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5256 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5257 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5258 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5259 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5260 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5261 0), data);
5262 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5264 unsigned int ix;
5265 tree val;
5267 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5268 if (TREE_CODE (val) == ADDR_EXPR)
5269 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5270 else if (TREE_CODE (val) == OBJ_TYPE_REF
5271 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5272 ret |= visit_addr (stmt,
5273 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5274 0), data);
5276 lhs = gimple_assign_lhs (stmt);
5277 if (TREE_CODE (lhs) == TARGET_MEM_REF
5278 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5279 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5281 if (visit_load)
5283 rhs = get_base_loadstore (rhs);
5284 if (rhs)
5285 ret |= visit_load (stmt, rhs, data);
5288 else if (visit_addr
5289 && (is_gimple_assign (stmt)
5290 || gimple_code (stmt) == GIMPLE_COND))
5292 for (i = 0; i < gimple_num_ops (stmt); ++i)
5293 if (gimple_op (stmt, i)
5294 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
5295 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
5297 else if (is_gimple_call (stmt))
5299 if (visit_store)
5301 tree lhs = gimple_call_lhs (stmt);
5302 if (lhs)
5304 lhs = get_base_loadstore (lhs);
5305 if (lhs)
5306 ret |= visit_store (stmt, lhs, data);
5309 if (visit_load || visit_addr)
5310 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5312 tree rhs = gimple_call_arg (stmt, i);
5313 if (visit_addr
5314 && TREE_CODE (rhs) == ADDR_EXPR)
5315 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5316 else if (visit_load)
5318 rhs = get_base_loadstore (rhs);
5319 if (rhs)
5320 ret |= visit_load (stmt, rhs, data);
5323 if (visit_addr
5324 && gimple_call_chain (stmt)
5325 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5326 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5327 data);
5328 if (visit_addr
5329 && gimple_call_return_slot_opt_p (stmt)
5330 && gimple_call_lhs (stmt) != NULL_TREE
5331 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5332 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5334 else if (gimple_code (stmt) == GIMPLE_ASM)
5336 unsigned noutputs;
5337 const char *constraint;
5338 const char **oconstraints;
5339 bool allows_mem, allows_reg, is_inout;
5340 noutputs = gimple_asm_noutputs (stmt);
5341 oconstraints = XALLOCAVEC (const char *, noutputs);
5342 if (visit_store || visit_addr)
5343 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5345 tree link = gimple_asm_output_op (stmt, i);
5346 tree op = get_base_loadstore (TREE_VALUE (link));
5347 if (op && visit_store)
5348 ret |= visit_store (stmt, op, data);
5349 if (visit_addr)
5351 constraint = TREE_STRING_POINTER
5352 (TREE_VALUE (TREE_PURPOSE (link)));
5353 oconstraints[i] = constraint;
5354 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5355 &allows_reg, &is_inout);
5356 if (op && !allows_reg && allows_mem)
5357 ret |= visit_addr (stmt, op, data);
5360 if (visit_load || visit_addr)
5361 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5363 tree link = gimple_asm_input_op (stmt, i);
5364 tree op = TREE_VALUE (link);
5365 if (visit_addr
5366 && TREE_CODE (op) == ADDR_EXPR)
5367 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5368 else if (visit_load || visit_addr)
5370 op = get_base_loadstore (op);
5371 if (op)
5373 if (visit_load)
5374 ret |= visit_load (stmt, op, data);
5375 if (visit_addr)
5377 constraint = TREE_STRING_POINTER
5378 (TREE_VALUE (TREE_PURPOSE (link)));
5379 parse_input_constraint (&constraint, 0, 0, noutputs,
5380 0, oconstraints,
5381 &allows_mem, &allows_reg);
5382 if (!allows_reg && allows_mem)
5383 ret |= visit_addr (stmt, op, data);
5389 else if (gimple_code (stmt) == GIMPLE_RETURN)
5391 tree op = gimple_return_retval (stmt);
5392 if (op)
5394 if (visit_addr
5395 && TREE_CODE (op) == ADDR_EXPR)
5396 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5397 else if (visit_load)
5399 op = get_base_loadstore (op);
5400 if (op)
5401 ret |= visit_load (stmt, op, data);
5405 else if (visit_addr
5406 && gimple_code (stmt) == GIMPLE_PHI)
5408 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5410 tree op = PHI_ARG_DEF (stmt, i);
5411 if (TREE_CODE (op) == ADDR_EXPR)
5412 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5416 return ret;
5419 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5420 should make a faster clone for this case. */
5422 bool
5423 walk_stmt_load_store_ops (gimple stmt, void *data,
5424 bool (*visit_load)(gimple, tree, void *),
5425 bool (*visit_store)(gimple, tree, void *))
5427 return walk_stmt_load_store_addr_ops (stmt, data,
5428 visit_load, visit_store, NULL);
5431 /* Helper for gimple_ior_addresses_taken_1. */
5433 static bool
5434 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5435 tree addr, void *data)
5437 bitmap addresses_taken = (bitmap)data;
5438 addr = get_base_address (addr);
5439 if (addr
5440 && DECL_P (addr))
5442 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5443 return true;
5445 return false;
5448 /* Set the bit for the uid of all decls that have their address taken
5449 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5450 were any in this stmt. */
5452 bool
5453 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5455 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5456 gimple_ior_addresses_taken_1);
5460 /* Return a printable name for symbol DECL. */
5462 const char *
5463 gimple_decl_printable_name (tree decl, int verbosity)
5465 if (!DECL_NAME (decl))
5466 return NULL;
5468 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5470 const char *str, *mangled_str;
5471 int dmgl_opts = DMGL_NO_OPTS;
5473 if (verbosity >= 2)
5475 dmgl_opts = DMGL_VERBOSE
5476 | DMGL_ANSI
5477 | DMGL_GNU_V3
5478 | DMGL_RET_POSTFIX;
5479 if (TREE_CODE (decl) == FUNCTION_DECL)
5480 dmgl_opts |= DMGL_PARAMS;
5483 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5484 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5485 return (str) ? str : mangled_str;
5488 return IDENTIFIER_POINTER (DECL_NAME (decl));
5491 /* Return true when STMT is builtins call to CODE. */
5493 bool
5494 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5496 tree fndecl;
5497 return (is_gimple_call (stmt)
5498 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5499 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5500 && DECL_FUNCTION_CODE (fndecl) == code);
5503 /* Return true if STMT clobbers memory. STMT is required to be a
5504 GIMPLE_ASM. */
5506 bool
5507 gimple_asm_clobbers_memory_p (const_gimple stmt)
5509 unsigned i;
5511 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5513 tree op = gimple_asm_clobber_op (stmt, i);
5514 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5515 return true;
5518 return false;
5520 #include "gt-gimple.h"