* config/sh/sh.md (prologue, epilogue): Use braced strings.
[official-gcc.git] / gcc / gimple.c
blob2bb4b67ab542dbb8c1b8b42a36478279ecac7fff
1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
40 /* Global canonical type table. */
41 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
42 htab_t gimple_canonical_types;
43 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
44 htab_t canonical_type_hash_cache;
46 /* All the tuples have their operand vector (if present) at the very bottom
47 of the structure. Therefore, the offset required to find the
48 operands vector the size of the structure minus the size of the 1
49 element tree array at the end (see gimple_ops). */
50 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
51 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
52 EXPORTED_CONST size_t gimple_ops_offset_[] = {
53 #include "gsstruct.def"
55 #undef DEFGSSTRUCT
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
58 static const size_t gsstruct_code_size[] = {
59 #include "gsstruct.def"
61 #undef DEFGSSTRUCT
63 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
64 const char *const gimple_code_name[] = {
65 #include "gimple.def"
67 #undef DEFGSCODE
69 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
70 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
71 #include "gimple.def"
73 #undef DEFGSCODE
75 /* Gimple stats. */
77 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
78 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
80 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
81 static const char * const gimple_alloc_kind_names[] = {
82 "assignments",
83 "phi nodes",
84 "conditionals",
85 "everything else"
88 /* Private API manipulation functions shared only with some
89 other files. */
90 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
91 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
93 /* Gimple tuple constructors.
94 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
95 be passed a NULL to start with an empty sequence. */
97 /* Set the code for statement G to CODE. */
99 static inline void
100 gimple_set_code (gimple g, enum gimple_code code)
102 g->gsbase.code = code;
105 /* Return the number of bytes needed to hold a GIMPLE statement with
106 code CODE. */
108 static inline size_t
109 gimple_size (enum gimple_code code)
111 return gsstruct_code_size[gss_for_code (code)];
114 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
115 operands. */
117 gimple
118 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
120 size_t size;
121 gimple stmt;
123 size = gimple_size (code);
124 if (num_ops > 0)
125 size += sizeof (tree) * (num_ops - 1);
127 if (GATHER_STATISTICS)
129 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
130 gimple_alloc_counts[(int) kind]++;
131 gimple_alloc_sizes[(int) kind] += size;
134 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
135 gimple_set_code (stmt, code);
136 gimple_set_num_ops (stmt, num_ops);
138 /* Do not call gimple_set_modified here as it has other side
139 effects and this tuple is still not completely built. */
140 stmt->gsbase.modified = 1;
141 gimple_init_singleton (stmt);
143 return stmt;
146 /* Set SUBCODE to be the code of the expression computed by statement G. */
148 static inline void
149 gimple_set_subcode (gimple g, unsigned subcode)
151 /* We only have 16 bits for the RHS code. Assert that we are not
152 overflowing it. */
153 gcc_assert (subcode < (1 << 16));
154 g->gsbase.subcode = subcode;
159 /* Build a tuple with operands. CODE is the statement to build (which
160 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
161 for the new tuple. NUM_OPS is the number of operands to allocate. */
163 #define gimple_build_with_ops(c, s, n) \
164 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
166 static gimple
167 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
168 unsigned num_ops MEM_STAT_DECL)
170 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
171 gimple_set_subcode (s, subcode);
173 return s;
177 /* Build a GIMPLE_RETURN statement returning RETVAL. */
179 gimple
180 gimple_build_return (tree retval)
182 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
183 if (retval)
184 gimple_return_set_retval (s, retval);
185 return s;
188 /* Reset alias information on call S. */
190 void
191 gimple_call_reset_alias_info (gimple s)
193 if (gimple_call_flags (s) & ECF_CONST)
194 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
195 else
196 pt_solution_reset (gimple_call_use_set (s));
197 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
198 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
199 else
200 pt_solution_reset (gimple_call_clobber_set (s));
203 /* Helper for gimple_build_call, gimple_build_call_valist,
204 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
205 components of a GIMPLE_CALL statement to function FN with NARGS
206 arguments. */
208 static inline gimple
209 gimple_build_call_1 (tree fn, unsigned nargs)
211 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
212 if (TREE_CODE (fn) == FUNCTION_DECL)
213 fn = build_fold_addr_expr (fn);
214 gimple_set_op (s, 1, fn);
215 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
216 gimple_call_reset_alias_info (s);
217 return s;
221 /* Build a GIMPLE_CALL statement to function FN with the arguments
222 specified in vector ARGS. */
224 gimple
225 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
227 unsigned i;
228 unsigned nargs = VEC_length (tree, args);
229 gimple call = gimple_build_call_1 (fn, nargs);
231 for (i = 0; i < nargs; i++)
232 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
234 return call;
238 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
239 arguments. The ... are the arguments. */
241 gimple
242 gimple_build_call (tree fn, unsigned nargs, ...)
244 va_list ap;
245 gimple call;
246 unsigned i;
248 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
250 call = gimple_build_call_1 (fn, nargs);
252 va_start (ap, nargs);
253 for (i = 0; i < nargs; i++)
254 gimple_call_set_arg (call, i, va_arg (ap, tree));
255 va_end (ap);
257 return call;
261 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
262 arguments. AP contains the arguments. */
264 gimple
265 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
267 gimple call;
268 unsigned i;
270 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
272 call = gimple_build_call_1 (fn, nargs);
274 for (i = 0; i < nargs; i++)
275 gimple_call_set_arg (call, i, va_arg (ap, tree));
277 return call;
281 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
282 Build the basic components of a GIMPLE_CALL statement to internal
283 function FN with NARGS arguments. */
285 static inline gimple
286 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
288 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
289 s->gsbase.subcode |= GF_CALL_INTERNAL;
290 gimple_call_set_internal_fn (s, fn);
291 gimple_call_reset_alias_info (s);
292 return s;
296 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
297 the number of arguments. The ... are the arguments. */
299 gimple
300 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
302 va_list ap;
303 gimple call;
304 unsigned i;
306 call = gimple_build_call_internal_1 (fn, nargs);
307 va_start (ap, nargs);
308 for (i = 0; i < nargs; i++)
309 gimple_call_set_arg (call, i, va_arg (ap, tree));
310 va_end (ap);
312 return call;
316 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
317 specified in vector ARGS. */
319 gimple
320 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
322 unsigned i, nargs;
323 gimple call;
325 nargs = VEC_length (tree, args);
326 call = gimple_build_call_internal_1 (fn, nargs);
327 for (i = 0; i < nargs; i++)
328 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
330 return call;
334 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
335 assumed to be in GIMPLE form already. Minimal checking is done of
336 this fact. */
338 gimple
339 gimple_build_call_from_tree (tree t)
341 unsigned i, nargs;
342 gimple call;
343 tree fndecl = get_callee_fndecl (t);
345 gcc_assert (TREE_CODE (t) == CALL_EXPR);
347 nargs = call_expr_nargs (t);
348 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
350 for (i = 0; i < nargs; i++)
351 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
353 gimple_set_block (call, TREE_BLOCK (t));
355 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
356 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
357 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
359 if (fndecl
360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
361 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
363 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
364 else
365 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
366 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
367 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
368 gimple_set_no_warning (call, TREE_NO_WARNING (t));
370 return call;
374 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
375 *OP1_P, *OP2_P and *OP3_P respectively. */
377 void
378 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
379 tree *op2_p, tree *op3_p)
381 enum gimple_rhs_class grhs_class;
383 *subcode_p = TREE_CODE (expr);
384 grhs_class = get_gimple_rhs_class (*subcode_p);
386 if (grhs_class == GIMPLE_TERNARY_RHS)
388 *op1_p = TREE_OPERAND (expr, 0);
389 *op2_p = TREE_OPERAND (expr, 1);
390 *op3_p = TREE_OPERAND (expr, 2);
392 else if (grhs_class == GIMPLE_BINARY_RHS)
394 *op1_p = TREE_OPERAND (expr, 0);
395 *op2_p = TREE_OPERAND (expr, 1);
396 *op3_p = NULL_TREE;
398 else if (grhs_class == GIMPLE_UNARY_RHS)
400 *op1_p = TREE_OPERAND (expr, 0);
401 *op2_p = NULL_TREE;
402 *op3_p = NULL_TREE;
404 else if (grhs_class == GIMPLE_SINGLE_RHS)
406 *op1_p = expr;
407 *op2_p = NULL_TREE;
408 *op3_p = NULL_TREE;
410 else
411 gcc_unreachable ();
415 /* Build a GIMPLE_ASSIGN statement.
417 LHS of the assignment.
418 RHS of the assignment which can be unary or binary. */
420 gimple
421 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
423 enum tree_code subcode;
424 tree op1, op2, op3;
426 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
427 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
428 PASS_MEM_STAT);
432 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
433 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
434 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
436 gimple
437 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
438 tree op2, tree op3 MEM_STAT_DECL)
440 unsigned num_ops;
441 gimple p;
443 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
444 code). */
445 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
447 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
448 PASS_MEM_STAT);
449 gimple_assign_set_lhs (p, lhs);
450 gimple_assign_set_rhs1 (p, op1);
451 if (op2)
453 gcc_assert (num_ops > 2);
454 gimple_assign_set_rhs2 (p, op2);
457 if (op3)
459 gcc_assert (num_ops > 3);
460 gimple_assign_set_rhs3 (p, op3);
463 return p;
466 gimple
467 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
468 tree op2 MEM_STAT_DECL)
470 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
471 PASS_MEM_STAT);
475 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
477 DST/SRC are the destination and source respectively. You can pass
478 ungimplified trees in DST or SRC, in which case they will be
479 converted to a gimple operand if necessary.
481 This function returns the newly created GIMPLE_ASSIGN tuple. */
483 gimple
484 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
486 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
487 gimplify_and_add (t, seq_p);
488 ggc_free (t);
489 return gimple_seq_last_stmt (*seq_p);
493 /* Build a GIMPLE_COND statement.
495 PRED is the condition used to compare LHS and the RHS.
496 T_LABEL is the label to jump to if the condition is true.
497 F_LABEL is the label to jump to otherwise. */
499 gimple
500 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
501 tree t_label, tree f_label)
503 gimple p;
505 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
506 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
507 gimple_cond_set_lhs (p, lhs);
508 gimple_cond_set_rhs (p, rhs);
509 gimple_cond_set_true_label (p, t_label);
510 gimple_cond_set_false_label (p, f_label);
511 return p;
515 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
517 void
518 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
519 tree *lhs_p, tree *rhs_p)
521 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
522 || TREE_CODE (cond) == TRUTH_NOT_EXPR
523 || is_gimple_min_invariant (cond)
524 || SSA_VAR_P (cond));
526 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
528 /* Canonicalize conditionals of the form 'if (!VAL)'. */
529 if (*code_p == TRUTH_NOT_EXPR)
531 *code_p = EQ_EXPR;
532 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
533 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
535 /* Canonicalize conditionals of the form 'if (VAL)' */
536 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
538 *code_p = NE_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
545 /* Build a GIMPLE_COND statement from the conditional expression tree
546 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
548 gimple
549 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
551 enum tree_code code;
552 tree lhs, rhs;
554 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
555 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
558 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
559 boolean expression tree COND. */
561 void
562 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
564 enum tree_code code;
565 tree lhs, rhs;
567 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
568 gimple_cond_set_condition (stmt, code, lhs, rhs);
571 /* Build a GIMPLE_LABEL statement for LABEL. */
573 gimple
574 gimple_build_label (tree label)
576 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
577 gimple_label_set_label (p, label);
578 return p;
581 /* Build a GIMPLE_GOTO statement to label DEST. */
583 gimple
584 gimple_build_goto (tree dest)
586 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
587 gimple_goto_set_dest (p, dest);
588 return p;
592 /* Build a GIMPLE_NOP statement. */
594 gimple
595 gimple_build_nop (void)
597 return gimple_alloc (GIMPLE_NOP, 0);
601 /* Build a GIMPLE_BIND statement.
602 VARS are the variables in BODY.
603 BLOCK is the containing block. */
605 gimple
606 gimple_build_bind (tree vars, gimple_seq body, tree block)
608 gimple p = gimple_alloc (GIMPLE_BIND, 0);
609 gimple_bind_set_vars (p, vars);
610 if (body)
611 gimple_bind_set_body (p, body);
612 if (block)
613 gimple_bind_set_block (p, block);
614 return p;
617 /* Helper function to set the simple fields of a asm stmt.
619 STRING is a pointer to a string that is the asm blocks assembly code.
620 NINPUT is the number of register inputs.
621 NOUTPUT is the number of register outputs.
622 NCLOBBERS is the number of clobbered registers.
625 static inline gimple
626 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
627 unsigned nclobbers, unsigned nlabels)
629 gimple p;
630 int size = strlen (string);
632 /* ASMs with labels cannot have outputs. This should have been
633 enforced by the front end. */
634 gcc_assert (nlabels == 0 || noutputs == 0);
636 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
637 ninputs + noutputs + nclobbers + nlabels);
639 p->gimple_asm.ni = ninputs;
640 p->gimple_asm.no = noutputs;
641 p->gimple_asm.nc = nclobbers;
642 p->gimple_asm.nl = nlabels;
643 p->gimple_asm.string = ggc_alloc_string (string, size);
645 if (GATHER_STATISTICS)
646 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
648 return p;
651 /* Build a GIMPLE_ASM statement.
653 STRING is the assembly code.
654 NINPUT is the number of register inputs.
655 NOUTPUT is the number of register outputs.
656 NCLOBBERS is the number of clobbered registers.
657 INPUTS is a vector of the input register parameters.
658 OUTPUTS is a vector of the output register parameters.
659 CLOBBERS is a vector of the clobbered register parameters.
660 LABELS is a vector of destination labels. */
662 gimple
663 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
664 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
665 VEC(tree,gc)* labels)
667 gimple p;
668 unsigned i;
670 p = gimple_build_asm_1 (string,
671 VEC_length (tree, inputs),
672 VEC_length (tree, outputs),
673 VEC_length (tree, clobbers),
674 VEC_length (tree, labels));
676 for (i = 0; i < VEC_length (tree, inputs); i++)
677 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
679 for (i = 0; i < VEC_length (tree, outputs); i++)
680 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
682 for (i = 0; i < VEC_length (tree, clobbers); i++)
683 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
685 for (i = 0; i < VEC_length (tree, labels); i++)
686 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
688 return p;
691 /* Build a GIMPLE_CATCH statement.
693 TYPES are the catch types.
694 HANDLER is the exception handler. */
696 gimple
697 gimple_build_catch (tree types, gimple_seq handler)
699 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
700 gimple_catch_set_types (p, types);
701 if (handler)
702 gimple_catch_set_handler (p, handler);
704 return p;
707 /* Build a GIMPLE_EH_FILTER statement.
709 TYPES are the filter's types.
710 FAILURE is the filter's failure action. */
712 gimple
713 gimple_build_eh_filter (tree types, gimple_seq failure)
715 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
716 gimple_eh_filter_set_types (p, types);
717 if (failure)
718 gimple_eh_filter_set_failure (p, failure);
720 return p;
723 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
725 gimple
726 gimple_build_eh_must_not_throw (tree decl)
728 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
730 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
731 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
732 gimple_eh_must_not_throw_set_fndecl (p, decl);
734 return p;
737 /* Build a GIMPLE_EH_ELSE statement. */
739 gimple
740 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
742 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
743 gimple_eh_else_set_n_body (p, n_body);
744 gimple_eh_else_set_e_body (p, e_body);
745 return p;
748 /* Build a GIMPLE_TRY statement.
750 EVAL is the expression to evaluate.
751 CLEANUP is the cleanup expression.
752 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
753 whether this is a try/catch or a try/finally respectively. */
755 gimple
756 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
757 enum gimple_try_flags kind)
759 gimple p;
761 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
762 p = gimple_alloc (GIMPLE_TRY, 0);
763 gimple_set_subcode (p, kind);
764 if (eval)
765 gimple_try_set_eval (p, eval);
766 if (cleanup)
767 gimple_try_set_cleanup (p, cleanup);
769 return p;
772 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
774 CLEANUP is the cleanup expression. */
776 gimple
777 gimple_build_wce (gimple_seq cleanup)
779 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
780 if (cleanup)
781 gimple_wce_set_cleanup (p, cleanup);
783 return p;
787 /* Build a GIMPLE_RESX statement. */
789 gimple
790 gimple_build_resx (int region)
792 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
793 p->gimple_eh_ctrl.region = region;
794 return p;
798 /* The helper for constructing a gimple switch statement.
799 INDEX is the switch's index.
800 NLABELS is the number of labels in the switch excluding the default.
801 DEFAULT_LABEL is the default label for the switch statement. */
803 gimple
804 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
806 /* nlabels + 1 default label + 1 index. */
807 gcc_checking_assert (default_label);
808 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
809 1 + 1 + nlabels);
810 gimple_switch_set_index (p, index);
811 gimple_switch_set_default_label (p, default_label);
812 return p;
815 /* Build a GIMPLE_SWITCH statement.
817 INDEX is the switch's index.
818 DEFAULT_LABEL is the default label
819 ARGS is a vector of labels excluding the default. */
821 gimple
822 gimple_build_switch (tree index, tree default_label, VEC(tree, heap) *args)
824 unsigned i, nlabels = VEC_length (tree, args);
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
828 /* Copy the labels from the vector to the switch statement. */
829 for (i = 0; i < nlabels; i++)
830 gimple_switch_set_label (p, i + 1, VEC_index (tree, args, i));
832 return p;
835 /* Build a GIMPLE_EH_DISPATCH statement. */
837 gimple
838 gimple_build_eh_dispatch (int region)
840 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
841 p->gimple_eh_ctrl.region = region;
842 return p;
845 /* Build a new GIMPLE_DEBUG_BIND statement.
847 VAR is bound to VALUE; block and location are taken from STMT. */
849 gimple
850 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
852 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
853 (unsigned)GIMPLE_DEBUG_BIND, 2
854 PASS_MEM_STAT);
856 gimple_debug_bind_set_var (p, var);
857 gimple_debug_bind_set_value (p, value);
858 if (stmt)
860 gimple_set_block (p, gimple_block (stmt));
861 gimple_set_location (p, gimple_location (stmt));
864 return p;
868 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
870 VAR is bound to VALUE; block and location are taken from STMT. */
872 gimple
873 gimple_build_debug_source_bind_stat (tree var, tree value,
874 gimple stmt MEM_STAT_DECL)
876 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
877 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
878 PASS_MEM_STAT);
880 gimple_debug_source_bind_set_var (p, var);
881 gimple_debug_source_bind_set_value (p, value);
882 if (stmt)
884 gimple_set_block (p, gimple_block (stmt));
885 gimple_set_location (p, gimple_location (stmt));
888 return p;
892 /* Build a GIMPLE_OMP_CRITICAL statement.
894 BODY is the sequence of statements for which only one thread can execute.
895 NAME is optional identifier for this critical block. */
897 gimple
898 gimple_build_omp_critical (gimple_seq body, tree name)
900 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
901 gimple_omp_critical_set_name (p, name);
902 if (body)
903 gimple_omp_set_body (p, body);
905 return p;
908 /* Build a GIMPLE_OMP_FOR statement.
910 BODY is sequence of statements inside the for loop.
911 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
912 lastprivate, reductions, ordered, schedule, and nowait.
913 COLLAPSE is the collapse count.
914 PRE_BODY is the sequence of statements that are loop invariant. */
916 gimple
917 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
918 gimple_seq pre_body)
920 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
921 if (body)
922 gimple_omp_set_body (p, body);
923 gimple_omp_for_set_clauses (p, clauses);
924 p->gimple_omp_for.collapse = collapse;
925 p->gimple_omp_for.iter
926 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
927 if (pre_body)
928 gimple_omp_for_set_pre_body (p, pre_body);
930 return p;
934 /* Build a GIMPLE_OMP_PARALLEL statement.
936 BODY is sequence of statements which are executed in parallel.
937 CLAUSES, are the OMP parallel construct's clauses.
938 CHILD_FN is the function created for the parallel threads to execute.
939 DATA_ARG are the shared data argument(s). */
941 gimple
942 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
943 tree data_arg)
945 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
946 if (body)
947 gimple_omp_set_body (p, body);
948 gimple_omp_parallel_set_clauses (p, clauses);
949 gimple_omp_parallel_set_child_fn (p, child_fn);
950 gimple_omp_parallel_set_data_arg (p, data_arg);
952 return p;
956 /* Build a GIMPLE_OMP_TASK statement.
958 BODY is sequence of statements which are executed by the explicit task.
959 CLAUSES, are the OMP parallel construct's clauses.
960 CHILD_FN is the function created for the parallel threads to execute.
961 DATA_ARG are the shared data argument(s).
962 COPY_FN is the optional function for firstprivate initialization.
963 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
965 gimple
966 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
967 tree data_arg, tree copy_fn, tree arg_size,
968 tree arg_align)
970 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
971 if (body)
972 gimple_omp_set_body (p, body);
973 gimple_omp_task_set_clauses (p, clauses);
974 gimple_omp_task_set_child_fn (p, child_fn);
975 gimple_omp_task_set_data_arg (p, data_arg);
976 gimple_omp_task_set_copy_fn (p, copy_fn);
977 gimple_omp_task_set_arg_size (p, arg_size);
978 gimple_omp_task_set_arg_align (p, arg_align);
980 return p;
984 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
986 BODY is the sequence of statements in the section. */
988 gimple
989 gimple_build_omp_section (gimple_seq body)
991 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
992 if (body)
993 gimple_omp_set_body (p, body);
995 return p;
999 /* Build a GIMPLE_OMP_MASTER statement.
1001 BODY is the sequence of statements to be executed by just the master. */
1003 gimple
1004 gimple_build_omp_master (gimple_seq body)
1006 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1007 if (body)
1008 gimple_omp_set_body (p, body);
1010 return p;
1014 /* Build a GIMPLE_OMP_CONTINUE statement.
1016 CONTROL_DEF is the definition of the control variable.
1017 CONTROL_USE is the use of the control variable. */
1019 gimple
1020 gimple_build_omp_continue (tree control_def, tree control_use)
1022 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1023 gimple_omp_continue_set_control_def (p, control_def);
1024 gimple_omp_continue_set_control_use (p, control_use);
1025 return p;
1028 /* Build a GIMPLE_OMP_ORDERED statement.
1030 BODY is the sequence of statements inside a loop that will executed in
1031 sequence. */
1033 gimple
1034 gimple_build_omp_ordered (gimple_seq body)
1036 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1037 if (body)
1038 gimple_omp_set_body (p, body);
1040 return p;
1044 /* Build a GIMPLE_OMP_RETURN statement.
1045 WAIT_P is true if this is a non-waiting return. */
1047 gimple
1048 gimple_build_omp_return (bool wait_p)
1050 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1051 if (wait_p)
1052 gimple_omp_return_set_nowait (p);
1054 return p;
1058 /* Build a GIMPLE_OMP_SECTIONS statement.
1060 BODY is a sequence of section statements.
1061 CLAUSES are any of the OMP sections contsruct's clauses: private,
1062 firstprivate, lastprivate, reduction, and nowait. */
1064 gimple
1065 gimple_build_omp_sections (gimple_seq body, tree clauses)
1067 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1068 if (body)
1069 gimple_omp_set_body (p, body);
1070 gimple_omp_sections_set_clauses (p, clauses);
1072 return p;
1076 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1078 gimple
1079 gimple_build_omp_sections_switch (void)
1081 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1085 /* Build a GIMPLE_OMP_SINGLE statement.
1087 BODY is the sequence of statements that will be executed once.
1088 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1089 copyprivate, nowait. */
1091 gimple
1092 gimple_build_omp_single (gimple_seq body, tree clauses)
1094 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1095 if (body)
1096 gimple_omp_set_body (p, body);
1097 gimple_omp_single_set_clauses (p, clauses);
1099 return p;
1103 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1105 gimple
1106 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1108 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1109 gimple_omp_atomic_load_set_lhs (p, lhs);
1110 gimple_omp_atomic_load_set_rhs (p, rhs);
1111 return p;
1114 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1116 VAL is the value we are storing. */
1118 gimple
1119 gimple_build_omp_atomic_store (tree val)
1121 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1122 gimple_omp_atomic_store_set_val (p, val);
1123 return p;
1126 /* Build a GIMPLE_TRANSACTION statement. */
1128 gimple
1129 gimple_build_transaction (gimple_seq body, tree label)
1131 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1132 gimple_transaction_set_body (p, body);
1133 gimple_transaction_set_label (p, label);
1134 return p;
1137 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1138 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1140 gimple
1141 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1143 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1144 /* Ensure all the predictors fit into the lower bits of the subcode. */
1145 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1146 gimple_predict_set_predictor (p, predictor);
1147 gimple_predict_set_outcome (p, outcome);
1148 return p;
1151 #if defined ENABLE_GIMPLE_CHECKING
1152 /* Complain of a gimple type mismatch and die. */
1154 void
1155 gimple_check_failed (const_gimple gs, const char *file, int line,
1156 const char *function, enum gimple_code code,
1157 enum tree_code subcode)
1159 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1160 gimple_code_name[code],
1161 tree_code_name[subcode],
1162 gimple_code_name[gimple_code (gs)],
1163 gs->gsbase.subcode > 0
1164 ? tree_code_name[gs->gsbase.subcode]
1165 : "",
1166 function, trim_filename (file), line);
1168 #endif /* ENABLE_GIMPLE_CHECKING */
1171 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1172 *SEQ_P is NULL, a new sequence is allocated. */
1174 void
1175 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1177 gimple_stmt_iterator si;
1178 if (gs == NULL)
1179 return;
1181 si = gsi_last (*seq_p);
1182 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1186 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1187 NULL, a new sequence is allocated. */
1189 void
1190 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1192 gimple_stmt_iterator si;
1193 if (src == NULL)
1194 return;
1196 si = gsi_last (*dst_p);
1197 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1201 /* Helper function of empty_body_p. Return true if STMT is an empty
1202 statement. */
1204 static bool
1205 empty_stmt_p (gimple stmt)
1207 if (gimple_code (stmt) == GIMPLE_NOP)
1208 return true;
1209 if (gimple_code (stmt) == GIMPLE_BIND)
1210 return empty_body_p (gimple_bind_body (stmt));
1211 return false;
1215 /* Return true if BODY contains nothing but empty statements. */
1217 bool
1218 empty_body_p (gimple_seq body)
1220 gimple_stmt_iterator i;
1222 if (gimple_seq_empty_p (body))
1223 return true;
1224 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1225 if (!empty_stmt_p (gsi_stmt (i))
1226 && !is_gimple_debug (gsi_stmt (i)))
1227 return false;
1229 return true;
1233 /* Perform a deep copy of sequence SRC and return the result. */
1235 gimple_seq
1236 gimple_seq_copy (gimple_seq src)
1238 gimple_stmt_iterator gsi;
1239 gimple_seq new_seq = NULL;
1240 gimple stmt;
1242 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1244 stmt = gimple_copy (gsi_stmt (gsi));
1245 gimple_seq_add_stmt (&new_seq, stmt);
1248 return new_seq;
1252 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1253 on each one. WI is as in walk_gimple_stmt.
1255 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1256 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1257 produced the value is returned if this statement has not been
1258 removed by a callback (wi->removed_stmt). If the statement has
1259 been removed, NULL is returned.
1261 Otherwise, all the statements are walked and NULL returned. */
1263 gimple
1264 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1265 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1267 gimple_stmt_iterator gsi;
1269 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
1271 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1272 if (ret)
1274 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1275 to hold it. */
1276 gcc_assert (wi);
1277 wi->callback_result = ret;
1279 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1282 if (!wi->removed_stmt)
1283 gsi_next (&gsi);
1286 if (wi)
1287 wi->callback_result = NULL_TREE;
1289 return NULL;
1293 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1294 changed by the callbacks. */
1296 gimple
1297 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1298 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1300 gimple_seq seq2 = seq;
1301 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1302 gcc_assert (seq2 == seq);
1303 return ret;
1307 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1309 static tree
1310 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1311 struct walk_stmt_info *wi)
1313 tree ret, op;
1314 unsigned noutputs;
1315 const char **oconstraints;
1316 unsigned i, n;
1317 const char *constraint;
1318 bool allows_mem, allows_reg, is_inout;
1320 noutputs = gimple_asm_noutputs (stmt);
1321 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1323 if (wi)
1324 wi->is_lhs = true;
1326 for (i = 0; i < noutputs; i++)
1328 op = gimple_asm_output_op (stmt, i);
1329 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1330 oconstraints[i] = constraint;
1331 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1332 &is_inout);
1333 if (wi)
1334 wi->val_only = (allows_reg || !allows_mem);
1335 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1336 if (ret)
1337 return ret;
1340 n = gimple_asm_ninputs (stmt);
1341 for (i = 0; i < n; i++)
1343 op = gimple_asm_input_op (stmt, i);
1344 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1345 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1346 oconstraints, &allows_mem, &allows_reg);
1347 if (wi)
1349 wi->val_only = (allows_reg || !allows_mem);
1350 /* Although input "m" is not really a LHS, we need a lvalue. */
1351 wi->is_lhs = !wi->val_only;
1353 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1354 if (ret)
1355 return ret;
1358 if (wi)
1360 wi->is_lhs = false;
1361 wi->val_only = true;
1364 n = gimple_asm_nlabels (stmt);
1365 for (i = 0; i < n; i++)
1367 op = gimple_asm_label_op (stmt, i);
1368 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1369 if (ret)
1370 return ret;
1373 return NULL_TREE;
1377 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1378 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1380 CALLBACK_OP is called on each operand of STMT via walk_tree.
1381 Additional parameters to walk_tree must be stored in WI. For each operand
1382 OP, walk_tree is called as:
1384 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1386 If CALLBACK_OP returns non-NULL for an operand, the remaining
1387 operands are not scanned.
1389 The return value is that returned by the last call to walk_tree, or
1390 NULL_TREE if no CALLBACK_OP is specified. */
1392 tree
1393 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1394 struct walk_stmt_info *wi)
1396 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1397 unsigned i;
1398 tree ret = NULL_TREE;
1400 switch (gimple_code (stmt))
1402 case GIMPLE_ASSIGN:
1403 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1404 is a register variable, we may use a COMPONENT_REF on the RHS. */
1405 if (wi)
1407 tree lhs = gimple_assign_lhs (stmt);
1408 wi->val_only
1409 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1410 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1413 for (i = 1; i < gimple_num_ops (stmt); i++)
1415 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1416 pset);
1417 if (ret)
1418 return ret;
1421 /* Walk the LHS. If the RHS is appropriate for a memory, we
1422 may use a COMPONENT_REF on the LHS. */
1423 if (wi)
1425 /* If the RHS is of a non-renamable type or is a register variable,
1426 we may use a COMPONENT_REF on the LHS. */
1427 tree rhs1 = gimple_assign_rhs1 (stmt);
1428 wi->val_only
1429 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1430 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1431 wi->is_lhs = true;
1434 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1435 if (ret)
1436 return ret;
1438 if (wi)
1440 wi->val_only = true;
1441 wi->is_lhs = false;
1443 break;
1445 case GIMPLE_CALL:
1446 if (wi)
1448 wi->is_lhs = false;
1449 wi->val_only = true;
1452 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1453 if (ret)
1454 return ret;
1456 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1457 if (ret)
1458 return ret;
1460 for (i = 0; i < gimple_call_num_args (stmt); i++)
1462 if (wi)
1463 wi->val_only
1464 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1465 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1466 pset);
1467 if (ret)
1468 return ret;
1471 if (gimple_call_lhs (stmt))
1473 if (wi)
1475 wi->is_lhs = true;
1476 wi->val_only
1477 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1480 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1481 if (ret)
1482 return ret;
1485 if (wi)
1487 wi->is_lhs = false;
1488 wi->val_only = true;
1490 break;
1492 case GIMPLE_CATCH:
1493 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1494 pset);
1495 if (ret)
1496 return ret;
1497 break;
1499 case GIMPLE_EH_FILTER:
1500 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1501 pset);
1502 if (ret)
1503 return ret;
1504 break;
1506 case GIMPLE_ASM:
1507 ret = walk_gimple_asm (stmt, callback_op, wi);
1508 if (ret)
1509 return ret;
1510 break;
1512 case GIMPLE_OMP_CONTINUE:
1513 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1514 callback_op, wi, pset);
1515 if (ret)
1516 return ret;
1518 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1519 callback_op, wi, pset);
1520 if (ret)
1521 return ret;
1522 break;
1524 case GIMPLE_OMP_CRITICAL:
1525 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1526 pset);
1527 if (ret)
1528 return ret;
1529 break;
1531 case GIMPLE_OMP_FOR:
1532 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1533 pset);
1534 if (ret)
1535 return ret;
1536 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1538 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1539 wi, pset);
1540 if (ret)
1541 return ret;
1542 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1543 wi, pset);
1544 if (ret)
1545 return ret;
1546 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1547 wi, pset);
1548 if (ret)
1549 return ret;
1550 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1551 wi, pset);
1553 if (ret)
1554 return ret;
1555 break;
1557 case GIMPLE_OMP_PARALLEL:
1558 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1559 wi, pset);
1560 if (ret)
1561 return ret;
1562 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1563 wi, pset);
1564 if (ret)
1565 return ret;
1566 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1567 wi, pset);
1568 if (ret)
1569 return ret;
1570 break;
1572 case GIMPLE_OMP_TASK:
1573 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1574 wi, pset);
1575 if (ret)
1576 return ret;
1577 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1578 wi, pset);
1579 if (ret)
1580 return ret;
1581 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1582 wi, pset);
1583 if (ret)
1584 return ret;
1585 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1586 wi, pset);
1587 if (ret)
1588 return ret;
1589 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1590 wi, pset);
1591 if (ret)
1592 return ret;
1593 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1594 wi, pset);
1595 if (ret)
1596 return ret;
1597 break;
1599 case GIMPLE_OMP_SECTIONS:
1600 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1601 wi, pset);
1602 if (ret)
1603 return ret;
1605 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1606 wi, pset);
1607 if (ret)
1608 return ret;
1610 break;
1612 case GIMPLE_OMP_SINGLE:
1613 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1614 pset);
1615 if (ret)
1616 return ret;
1617 break;
1619 case GIMPLE_OMP_ATOMIC_LOAD:
1620 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1621 pset);
1622 if (ret)
1623 return ret;
1625 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1626 pset);
1627 if (ret)
1628 return ret;
1629 break;
1631 case GIMPLE_OMP_ATOMIC_STORE:
1632 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1633 wi, pset);
1634 if (ret)
1635 return ret;
1636 break;
1638 case GIMPLE_TRANSACTION:
1639 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1640 wi, pset);
1641 if (ret)
1642 return ret;
1643 break;
1645 /* Tuples that do not have operands. */
1646 case GIMPLE_NOP:
1647 case GIMPLE_RESX:
1648 case GIMPLE_OMP_RETURN:
1649 case GIMPLE_PREDICT:
1650 break;
1652 default:
1654 enum gimple_statement_structure_enum gss;
1655 gss = gimple_statement_structure (stmt);
1656 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1657 for (i = 0; i < gimple_num_ops (stmt); i++)
1659 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1660 if (ret)
1661 return ret;
1664 break;
1667 return NULL_TREE;
1671 /* Walk the current statement in GSI (optionally using traversal state
1672 stored in WI). If WI is NULL, no state is kept during traversal.
1673 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1674 that it has handled all the operands of the statement, its return
1675 value is returned. Otherwise, the return value from CALLBACK_STMT
1676 is discarded and its operands are scanned.
1678 If CALLBACK_STMT is NULL or it didn't handle the operands,
1679 CALLBACK_OP is called on each operand of the statement via
1680 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1681 operand, the remaining operands are not scanned. In this case, the
1682 return value from CALLBACK_OP is returned.
1684 In any other case, NULL_TREE is returned. */
1686 tree
1687 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1688 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1690 gimple ret;
1691 tree tree_ret;
1692 gimple stmt = gsi_stmt (*gsi);
1694 if (wi)
1696 wi->gsi = *gsi;
1697 wi->removed_stmt = false;
1699 if (wi->want_locations && gimple_has_location (stmt))
1700 input_location = gimple_location (stmt);
1703 ret = NULL;
1705 /* Invoke the statement callback. Return if the callback handled
1706 all of STMT operands by itself. */
1707 if (callback_stmt)
1709 bool handled_ops = false;
1710 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1711 if (handled_ops)
1712 return tree_ret;
1714 /* If CALLBACK_STMT did not handle operands, it should not have
1715 a value to return. */
1716 gcc_assert (tree_ret == NULL);
1718 if (wi && wi->removed_stmt)
1719 return NULL;
1721 /* Re-read stmt in case the callback changed it. */
1722 stmt = gsi_stmt (*gsi);
1725 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1726 if (callback_op)
1728 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1729 if (tree_ret)
1730 return tree_ret;
1733 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1734 switch (gimple_code (stmt))
1736 case GIMPLE_BIND:
1737 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1738 callback_op, wi);
1739 if (ret)
1740 return wi->callback_result;
1741 break;
1743 case GIMPLE_CATCH:
1744 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1745 callback_op, wi);
1746 if (ret)
1747 return wi->callback_result;
1748 break;
1750 case GIMPLE_EH_FILTER:
1751 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
1752 callback_op, wi);
1753 if (ret)
1754 return wi->callback_result;
1755 break;
1757 case GIMPLE_EH_ELSE:
1758 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
1759 callback_stmt, callback_op, wi);
1760 if (ret)
1761 return wi->callback_result;
1762 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
1763 callback_stmt, callback_op, wi);
1764 if (ret)
1765 return wi->callback_result;
1766 break;
1768 case GIMPLE_TRY:
1769 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
1770 wi);
1771 if (ret)
1772 return wi->callback_result;
1774 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
1775 callback_op, wi);
1776 if (ret)
1777 return wi->callback_result;
1778 break;
1780 case GIMPLE_OMP_FOR:
1781 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
1782 callback_op, wi);
1783 if (ret)
1784 return wi->callback_result;
1786 /* FALL THROUGH. */
1787 case GIMPLE_OMP_CRITICAL:
1788 case GIMPLE_OMP_MASTER:
1789 case GIMPLE_OMP_ORDERED:
1790 case GIMPLE_OMP_SECTION:
1791 case GIMPLE_OMP_PARALLEL:
1792 case GIMPLE_OMP_TASK:
1793 case GIMPLE_OMP_SECTIONS:
1794 case GIMPLE_OMP_SINGLE:
1795 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
1796 callback_op, wi);
1797 if (ret)
1798 return wi->callback_result;
1799 break;
1801 case GIMPLE_WITH_CLEANUP_EXPR:
1802 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
1803 callback_op, wi);
1804 if (ret)
1805 return wi->callback_result;
1806 break;
1808 case GIMPLE_TRANSACTION:
1809 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
1810 callback_stmt, callback_op, wi);
1811 if (ret)
1812 return wi->callback_result;
1813 break;
1815 default:
1816 gcc_assert (!gimple_has_substatements (stmt));
1817 break;
1820 return NULL;
1824 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1826 void
1827 gimple_set_body (tree fndecl, gimple_seq seq)
1829 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1830 if (fn == NULL)
1832 /* If FNDECL still does not have a function structure associated
1833 with it, then it does not make sense for it to receive a
1834 GIMPLE body. */
1835 gcc_assert (seq == NULL);
1837 else
1838 fn->gimple_body = seq;
1842 /* Return the body of GIMPLE statements for function FN. After the
1843 CFG pass, the function body doesn't exist anymore because it has
1844 been split up into basic blocks. In this case, it returns
1845 NULL. */
1847 gimple_seq
1848 gimple_body (tree fndecl)
1850 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1851 return fn ? fn->gimple_body : NULL;
1854 /* Return true when FNDECL has Gimple body either in unlowered
1855 or CFG form. */
1856 bool
1857 gimple_has_body_p (tree fndecl)
1859 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1860 return (gimple_body (fndecl) || (fn && fn->cfg));
1863 /* Return true if calls C1 and C2 are known to go to the same function. */
1865 bool
1866 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1868 if (gimple_call_internal_p (c1))
1869 return (gimple_call_internal_p (c2)
1870 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1871 else
1872 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1873 || (gimple_call_fndecl (c1)
1874 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1877 /* Detect flags from a GIMPLE_CALL. This is just like
1878 call_expr_flags, but for gimple tuples. */
1881 gimple_call_flags (const_gimple stmt)
1883 int flags;
1884 tree decl = gimple_call_fndecl (stmt);
1886 if (decl)
1887 flags = flags_from_decl_or_type (decl);
1888 else if (gimple_call_internal_p (stmt))
1889 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1890 else
1891 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1893 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1894 flags |= ECF_NOTHROW;
1896 return flags;
1899 /* Return the "fn spec" string for call STMT. */
1901 static tree
1902 gimple_call_fnspec (const_gimple stmt)
1904 tree type, attr;
1906 type = gimple_call_fntype (stmt);
1907 if (!type)
1908 return NULL_TREE;
1910 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1911 if (!attr)
1912 return NULL_TREE;
1914 return TREE_VALUE (TREE_VALUE (attr));
1917 /* Detects argument flags for argument number ARG on call STMT. */
1920 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1922 tree attr = gimple_call_fnspec (stmt);
1924 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1925 return 0;
1927 switch (TREE_STRING_POINTER (attr)[1 + arg])
1929 case 'x':
1930 case 'X':
1931 return EAF_UNUSED;
1933 case 'R':
1934 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1936 case 'r':
1937 return EAF_NOCLOBBER | EAF_NOESCAPE;
1939 case 'W':
1940 return EAF_DIRECT | EAF_NOESCAPE;
1942 case 'w':
1943 return EAF_NOESCAPE;
1945 case '.':
1946 default:
1947 return 0;
1951 /* Detects return flags for the call STMT. */
1954 gimple_call_return_flags (const_gimple stmt)
1956 tree attr;
1958 if (gimple_call_flags (stmt) & ECF_MALLOC)
1959 return ERF_NOALIAS;
1961 attr = gimple_call_fnspec (stmt);
1962 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1963 return 0;
1965 switch (TREE_STRING_POINTER (attr)[0])
1967 case '1':
1968 case '2':
1969 case '3':
1970 case '4':
1971 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1973 case 'm':
1974 return ERF_NOALIAS;
1976 case '.':
1977 default:
1978 return 0;
1983 /* Return true if GS is a copy assignment. */
1985 bool
1986 gimple_assign_copy_p (gimple gs)
1988 return (gimple_assign_single_p (gs)
1989 && is_gimple_val (gimple_op (gs, 1)));
1993 /* Return true if GS is a SSA_NAME copy assignment. */
1995 bool
1996 gimple_assign_ssa_name_copy_p (gimple gs)
1998 return (gimple_assign_single_p (gs)
1999 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2000 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2004 /* Return true if GS is an assignment with a unary RHS, but the
2005 operator has no effect on the assigned value. The logic is adapted
2006 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2007 instances in which STRIP_NOPS was previously applied to the RHS of
2008 an assignment.
2010 NOTE: In the use cases that led to the creation of this function
2011 and of gimple_assign_single_p, it is typical to test for either
2012 condition and to proceed in the same manner. In each case, the
2013 assigned value is represented by the single RHS operand of the
2014 assignment. I suspect there may be cases where gimple_assign_copy_p,
2015 gimple_assign_single_p, or equivalent logic is used where a similar
2016 treatment of unary NOPs is appropriate. */
2018 bool
2019 gimple_assign_unary_nop_p (gimple gs)
2021 return (is_gimple_assign (gs)
2022 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2023 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2024 && gimple_assign_rhs1 (gs) != error_mark_node
2025 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2026 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2029 /* Set BB to be the basic block holding G. */
2031 void
2032 gimple_set_bb (gimple stmt, basic_block bb)
2034 stmt->gsbase.bb = bb;
2036 /* If the statement is a label, add the label to block-to-labels map
2037 so that we can speed up edge creation for GIMPLE_GOTOs. */
2038 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2040 tree t;
2041 int uid;
2043 t = gimple_label_label (stmt);
2044 uid = LABEL_DECL_UID (t);
2045 if (uid == -1)
2047 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2048 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2049 if (old_len <= (unsigned) uid)
2051 unsigned new_len = 3 * uid / 2 + 1;
2053 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2054 new_len);
2058 VEC_replace (basic_block, label_to_block_map, uid, bb);
2063 /* Modify the RHS of the assignment pointed-to by GSI using the
2064 operands in the expression tree EXPR.
2066 NOTE: The statement pointed-to by GSI may be reallocated if it
2067 did not have enough operand slots.
2069 This function is useful to convert an existing tree expression into
2070 the flat representation used for the RHS of a GIMPLE assignment.
2071 It will reallocate memory as needed to expand or shrink the number
2072 of operand slots needed to represent EXPR.
2074 NOTE: If you find yourself building a tree and then calling this
2075 function, you are most certainly doing it the slow way. It is much
2076 better to build a new assignment or to use the function
2077 gimple_assign_set_rhs_with_ops, which does not require an
2078 expression tree to be built. */
2080 void
2081 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2083 enum tree_code subcode;
2084 tree op1, op2, op3;
2086 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2087 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2091 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2092 operands OP1, OP2 and OP3.
2094 NOTE: The statement pointed-to by GSI may be reallocated if it
2095 did not have enough operand slots. */
2097 void
2098 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2099 tree op1, tree op2, tree op3)
2101 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2102 gimple stmt = gsi_stmt (*gsi);
2104 /* If the new CODE needs more operands, allocate a new statement. */
2105 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2107 tree lhs = gimple_assign_lhs (stmt);
2108 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2109 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2110 gimple_init_singleton (new_stmt);
2111 gsi_replace (gsi, new_stmt, true);
2112 stmt = new_stmt;
2114 /* The LHS needs to be reset as this also changes the SSA name
2115 on the LHS. */
2116 gimple_assign_set_lhs (stmt, lhs);
2119 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2120 gimple_set_subcode (stmt, code);
2121 gimple_assign_set_rhs1 (stmt, op1);
2122 if (new_rhs_ops > 1)
2123 gimple_assign_set_rhs2 (stmt, op2);
2124 if (new_rhs_ops > 2)
2125 gimple_assign_set_rhs3 (stmt, op3);
2129 /* Return the LHS of a statement that performs an assignment,
2130 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2131 for a call to a function that returns no value, or for a
2132 statement other than an assignment or a call. */
2134 tree
2135 gimple_get_lhs (const_gimple stmt)
2137 enum gimple_code code = gimple_code (stmt);
2139 if (code == GIMPLE_ASSIGN)
2140 return gimple_assign_lhs (stmt);
2141 else if (code == GIMPLE_CALL)
2142 return gimple_call_lhs (stmt);
2143 else
2144 return NULL_TREE;
2148 /* Set the LHS of a statement that performs an assignment,
2149 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2151 void
2152 gimple_set_lhs (gimple stmt, tree lhs)
2154 enum gimple_code code = gimple_code (stmt);
2156 if (code == GIMPLE_ASSIGN)
2157 gimple_assign_set_lhs (stmt, lhs);
2158 else if (code == GIMPLE_CALL)
2159 gimple_call_set_lhs (stmt, lhs);
2160 else
2161 gcc_unreachable();
2164 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2165 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2166 expression with a different value.
2168 This will update any annotations (say debug bind stmts) referring
2169 to the original LHS, so that they use the RHS instead. This is
2170 done even if NLHS and LHS are the same, for it is understood that
2171 the RHS will be modified afterwards, and NLHS will not be assigned
2172 an equivalent value.
2174 Adjusting any non-annotation uses of the LHS, if needed, is a
2175 responsibility of the caller.
2177 The effect of this call should be pretty much the same as that of
2178 inserting a copy of STMT before STMT, and then removing the
2179 original stmt, at which time gsi_remove() would have update
2180 annotations, but using this function saves all the inserting,
2181 copying and removing. */
2183 void
2184 gimple_replace_lhs (gimple stmt, tree nlhs)
2186 if (MAY_HAVE_DEBUG_STMTS)
2188 tree lhs = gimple_get_lhs (stmt);
2190 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2192 insert_debug_temp_for_var_def (NULL, lhs);
2195 gimple_set_lhs (stmt, nlhs);
2198 /* Return a deep copy of statement STMT. All the operands from STMT
2199 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2200 and VUSE operand arrays are set to empty in the new copy. The new
2201 copy isn't part of any sequence. */
2203 gimple
2204 gimple_copy (gimple stmt)
2206 enum gimple_code code = gimple_code (stmt);
2207 unsigned num_ops = gimple_num_ops (stmt);
2208 gimple copy = gimple_alloc (code, num_ops);
2209 unsigned i;
2211 /* Shallow copy all the fields from STMT. */
2212 memcpy (copy, stmt, gimple_size (code));
2213 gimple_init_singleton (copy);
2215 /* If STMT has sub-statements, deep-copy them as well. */
2216 if (gimple_has_substatements (stmt))
2218 gimple_seq new_seq;
2219 tree t;
2221 switch (gimple_code (stmt))
2223 case GIMPLE_BIND:
2224 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2225 gimple_bind_set_body (copy, new_seq);
2226 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2227 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2228 break;
2230 case GIMPLE_CATCH:
2231 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2232 gimple_catch_set_handler (copy, new_seq);
2233 t = unshare_expr (gimple_catch_types (stmt));
2234 gimple_catch_set_types (copy, t);
2235 break;
2237 case GIMPLE_EH_FILTER:
2238 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2239 gimple_eh_filter_set_failure (copy, new_seq);
2240 t = unshare_expr (gimple_eh_filter_types (stmt));
2241 gimple_eh_filter_set_types (copy, t);
2242 break;
2244 case GIMPLE_EH_ELSE:
2245 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2246 gimple_eh_else_set_n_body (copy, new_seq);
2247 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2248 gimple_eh_else_set_e_body (copy, new_seq);
2249 break;
2251 case GIMPLE_TRY:
2252 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2253 gimple_try_set_eval (copy, new_seq);
2254 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2255 gimple_try_set_cleanup (copy, new_seq);
2256 break;
2258 case GIMPLE_OMP_FOR:
2259 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2260 gimple_omp_for_set_pre_body (copy, new_seq);
2261 t = unshare_expr (gimple_omp_for_clauses (stmt));
2262 gimple_omp_for_set_clauses (copy, t);
2263 copy->gimple_omp_for.iter
2264 = ggc_alloc_vec_gimple_omp_for_iter
2265 (gimple_omp_for_collapse (stmt));
2266 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2268 gimple_omp_for_set_cond (copy, i,
2269 gimple_omp_for_cond (stmt, i));
2270 gimple_omp_for_set_index (copy, i,
2271 gimple_omp_for_index (stmt, i));
2272 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2273 gimple_omp_for_set_initial (copy, i, t);
2274 t = unshare_expr (gimple_omp_for_final (stmt, i));
2275 gimple_omp_for_set_final (copy, i, t);
2276 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2277 gimple_omp_for_set_incr (copy, i, t);
2279 goto copy_omp_body;
2281 case GIMPLE_OMP_PARALLEL:
2282 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2283 gimple_omp_parallel_set_clauses (copy, t);
2284 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2285 gimple_omp_parallel_set_child_fn (copy, t);
2286 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2287 gimple_omp_parallel_set_data_arg (copy, t);
2288 goto copy_omp_body;
2290 case GIMPLE_OMP_TASK:
2291 t = unshare_expr (gimple_omp_task_clauses (stmt));
2292 gimple_omp_task_set_clauses (copy, t);
2293 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2294 gimple_omp_task_set_child_fn (copy, t);
2295 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2296 gimple_omp_task_set_data_arg (copy, t);
2297 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2298 gimple_omp_task_set_copy_fn (copy, t);
2299 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2300 gimple_omp_task_set_arg_size (copy, t);
2301 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2302 gimple_omp_task_set_arg_align (copy, t);
2303 goto copy_omp_body;
2305 case GIMPLE_OMP_CRITICAL:
2306 t = unshare_expr (gimple_omp_critical_name (stmt));
2307 gimple_omp_critical_set_name (copy, t);
2308 goto copy_omp_body;
2310 case GIMPLE_OMP_SECTIONS:
2311 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2312 gimple_omp_sections_set_clauses (copy, t);
2313 t = unshare_expr (gimple_omp_sections_control (stmt));
2314 gimple_omp_sections_set_control (copy, t);
2315 /* FALLTHRU */
2317 case GIMPLE_OMP_SINGLE:
2318 case GIMPLE_OMP_SECTION:
2319 case GIMPLE_OMP_MASTER:
2320 case GIMPLE_OMP_ORDERED:
2321 copy_omp_body:
2322 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2323 gimple_omp_set_body (copy, new_seq);
2324 break;
2326 case GIMPLE_TRANSACTION:
2327 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2328 gimple_transaction_set_body (copy, new_seq);
2329 break;
2331 case GIMPLE_WITH_CLEANUP_EXPR:
2332 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2333 gimple_wce_set_cleanup (copy, new_seq);
2334 break;
2336 default:
2337 gcc_unreachable ();
2341 /* Make copy of operands. */
2342 if (num_ops > 0)
2344 for (i = 0; i < num_ops; i++)
2345 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2347 /* Clear out SSA operand vectors on COPY. */
2348 if (gimple_has_ops (stmt))
2350 gimple_set_def_ops (copy, NULL);
2351 gimple_set_use_ops (copy, NULL);
2354 if (gimple_has_mem_ops (stmt))
2356 gimple_set_vdef (copy, gimple_vdef (stmt));
2357 gimple_set_vuse (copy, gimple_vuse (stmt));
2360 /* SSA operands need to be updated. */
2361 gimple_set_modified (copy, true);
2364 return copy;
2368 /* Return true if statement S has side-effects. We consider a
2369 statement to have side effects if:
2371 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2372 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2374 bool
2375 gimple_has_side_effects (const_gimple s)
2377 if (is_gimple_debug (s))
2378 return false;
2380 /* We don't have to scan the arguments to check for
2381 volatile arguments, though, at present, we still
2382 do a scan to check for TREE_SIDE_EFFECTS. */
2383 if (gimple_has_volatile_ops (s))
2384 return true;
2386 if (gimple_code (s) == GIMPLE_ASM
2387 && gimple_asm_volatile_p (s))
2388 return true;
2390 if (is_gimple_call (s))
2392 int flags = gimple_call_flags (s);
2394 /* An infinite loop is considered a side effect. */
2395 if (!(flags & (ECF_CONST | ECF_PURE))
2396 || (flags & ECF_LOOPING_CONST_OR_PURE))
2397 return true;
2399 return false;
2402 return false;
2405 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2406 Return true if S can trap. When INCLUDE_MEM is true, check whether
2407 the memory operations could trap. When INCLUDE_STORES is true and
2408 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2410 bool
2411 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2413 tree t, div = NULL_TREE;
2414 enum tree_code op;
2416 if (include_mem)
2418 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2420 for (i = start; i < gimple_num_ops (s); i++)
2421 if (tree_could_trap_p (gimple_op (s, i)))
2422 return true;
2425 switch (gimple_code (s))
2427 case GIMPLE_ASM:
2428 return gimple_asm_volatile_p (s);
2430 case GIMPLE_CALL:
2431 t = gimple_call_fndecl (s);
2432 /* Assume that calls to weak functions may trap. */
2433 if (!t || !DECL_P (t) || DECL_WEAK (t))
2434 return true;
2435 return false;
2437 case GIMPLE_ASSIGN:
2438 t = gimple_expr_type (s);
2439 op = gimple_assign_rhs_code (s);
2440 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2441 div = gimple_assign_rhs2 (s);
2442 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2443 (INTEGRAL_TYPE_P (t)
2444 && TYPE_OVERFLOW_TRAPS (t)),
2445 div));
2447 default:
2448 break;
2451 return false;
2454 /* Return true if statement S can trap. */
2456 bool
2457 gimple_could_trap_p (gimple s)
2459 return gimple_could_trap_p_1 (s, true, true);
2462 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2464 bool
2465 gimple_assign_rhs_could_trap_p (gimple s)
2467 gcc_assert (is_gimple_assign (s));
2468 return gimple_could_trap_p_1 (s, true, false);
2472 /* Print debugging information for gimple stmts generated. */
2474 void
2475 dump_gimple_statistics (void)
2477 int i, total_tuples = 0, total_bytes = 0;
2479 if (! GATHER_STATISTICS)
2481 fprintf (stderr, "No gimple statistics\n");
2482 return;
2485 fprintf (stderr, "\nGIMPLE statements\n");
2486 fprintf (stderr, "Kind Stmts Bytes\n");
2487 fprintf (stderr, "---------------------------------------\n");
2488 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2490 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2491 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2492 total_tuples += gimple_alloc_counts[i];
2493 total_bytes += gimple_alloc_sizes[i];
2495 fprintf (stderr, "---------------------------------------\n");
2496 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2497 fprintf (stderr, "---------------------------------------\n");
2501 /* Return the number of operands needed on the RHS of a GIMPLE
2502 assignment for an expression with tree code CODE. */
2504 unsigned
2505 get_gimple_rhs_num_ops (enum tree_code code)
2507 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2509 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2510 return 1;
2511 else if (rhs_class == GIMPLE_BINARY_RHS)
2512 return 2;
2513 else if (rhs_class == GIMPLE_TERNARY_RHS)
2514 return 3;
2515 else
2516 gcc_unreachable ();
2519 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2520 (unsigned char) \
2521 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2522 : ((TYPE) == tcc_binary \
2523 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2524 : ((TYPE) == tcc_constant \
2525 || (TYPE) == tcc_declaration \
2526 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2527 : ((SYM) == TRUTH_AND_EXPR \
2528 || (SYM) == TRUTH_OR_EXPR \
2529 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2530 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2531 : ((SYM) == COND_EXPR \
2532 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2533 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2534 || (SYM) == DOT_PROD_EXPR \
2535 || (SYM) == REALIGN_LOAD_EXPR \
2536 || (SYM) == VEC_COND_EXPR \
2537 || (SYM) == VEC_PERM_EXPR \
2538 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2539 : ((SYM) == CONSTRUCTOR \
2540 || (SYM) == OBJ_TYPE_REF \
2541 || (SYM) == ASSERT_EXPR \
2542 || (SYM) == ADDR_EXPR \
2543 || (SYM) == WITH_SIZE_EXPR \
2544 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2545 : GIMPLE_INVALID_RHS),
2546 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2548 const unsigned char gimple_rhs_class_table[] = {
2549 #include "all-tree.def"
2552 #undef DEFTREECODE
2553 #undef END_OF_BASE_TREE_CODES
2555 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2557 /* Validation of GIMPLE expressions. */
2559 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2561 bool
2562 is_gimple_lvalue (tree t)
2564 return (is_gimple_addressable (t)
2565 || TREE_CODE (t) == WITH_SIZE_EXPR
2566 /* These are complex lvalues, but don't have addresses, so they
2567 go here. */
2568 || TREE_CODE (t) == BIT_FIELD_REF);
2571 /* Return true if T is a GIMPLE condition. */
2573 bool
2574 is_gimple_condexpr (tree t)
2576 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2577 && !tree_could_throw_p (t)
2578 && is_gimple_val (TREE_OPERAND (t, 0))
2579 && is_gimple_val (TREE_OPERAND (t, 1))));
2582 /* Return true if T is something whose address can be taken. */
2584 bool
2585 is_gimple_addressable (tree t)
2587 return (is_gimple_id (t) || handled_component_p (t)
2588 || TREE_CODE (t) == MEM_REF);
2591 /* Return true if T is a valid gimple constant. */
2593 bool
2594 is_gimple_constant (const_tree t)
2596 switch (TREE_CODE (t))
2598 case INTEGER_CST:
2599 case REAL_CST:
2600 case FIXED_CST:
2601 case STRING_CST:
2602 case COMPLEX_CST:
2603 case VECTOR_CST:
2604 return true;
2606 /* Vector constant constructors are gimple invariant. */
2607 case CONSTRUCTOR:
2608 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2609 return TREE_CONSTANT (t);
2610 else
2611 return false;
2613 default:
2614 return false;
2618 /* Return true if T is a gimple address. */
2620 bool
2621 is_gimple_address (const_tree t)
2623 tree op;
2625 if (TREE_CODE (t) != ADDR_EXPR)
2626 return false;
2628 op = TREE_OPERAND (t, 0);
2629 while (handled_component_p (op))
2631 if ((TREE_CODE (op) == ARRAY_REF
2632 || TREE_CODE (op) == ARRAY_RANGE_REF)
2633 && !is_gimple_val (TREE_OPERAND (op, 1)))
2634 return false;
2636 op = TREE_OPERAND (op, 0);
2639 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2640 return true;
2642 switch (TREE_CODE (op))
2644 case PARM_DECL:
2645 case RESULT_DECL:
2646 case LABEL_DECL:
2647 case FUNCTION_DECL:
2648 case VAR_DECL:
2649 case CONST_DECL:
2650 return true;
2652 default:
2653 return false;
2657 /* Return true if T is a gimple invariant address. */
2659 bool
2660 is_gimple_invariant_address (const_tree t)
2662 const_tree op;
2664 if (TREE_CODE (t) != ADDR_EXPR)
2665 return false;
2667 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2668 if (!op)
2669 return false;
2671 if (TREE_CODE (op) == MEM_REF)
2673 const_tree op0 = TREE_OPERAND (op, 0);
2674 return (TREE_CODE (op0) == ADDR_EXPR
2675 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2676 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2679 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2682 /* Return true if T is a gimple invariant address at IPA level
2683 (so addresses of variables on stack are not allowed). */
2685 bool
2686 is_gimple_ip_invariant_address (const_tree t)
2688 const_tree op;
2690 if (TREE_CODE (t) != ADDR_EXPR)
2691 return false;
2693 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2694 if (!op)
2695 return false;
2697 if (TREE_CODE (op) == MEM_REF)
2699 const_tree op0 = TREE_OPERAND (op, 0);
2700 return (TREE_CODE (op0) == ADDR_EXPR
2701 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2702 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2705 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2708 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2709 form of function invariant. */
2711 bool
2712 is_gimple_min_invariant (const_tree t)
2714 if (TREE_CODE (t) == ADDR_EXPR)
2715 return is_gimple_invariant_address (t);
2717 return is_gimple_constant (t);
2720 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2721 form of gimple minimal invariant. */
2723 bool
2724 is_gimple_ip_invariant (const_tree t)
2726 if (TREE_CODE (t) == ADDR_EXPR)
2727 return is_gimple_ip_invariant_address (t);
2729 return is_gimple_constant (t);
2732 /* Return true if T is a variable. */
2734 bool
2735 is_gimple_variable (tree t)
2737 return (TREE_CODE (t) == VAR_DECL
2738 || TREE_CODE (t) == PARM_DECL
2739 || TREE_CODE (t) == RESULT_DECL
2740 || TREE_CODE (t) == SSA_NAME);
2743 /* Return true if T is a GIMPLE identifier (something with an address). */
2745 bool
2746 is_gimple_id (tree t)
2748 return (is_gimple_variable (t)
2749 || TREE_CODE (t) == FUNCTION_DECL
2750 || TREE_CODE (t) == LABEL_DECL
2751 || TREE_CODE (t) == CONST_DECL
2752 /* Allow string constants, since they are addressable. */
2753 || TREE_CODE (t) == STRING_CST);
2756 /* Return true if T is a non-aggregate register variable. */
2758 bool
2759 is_gimple_reg (tree t)
2761 if (virtual_operand_p (t))
2762 return false;
2764 if (TREE_CODE (t) == SSA_NAME)
2765 return true;
2767 if (!is_gimple_variable (t))
2768 return false;
2770 if (!is_gimple_reg_type (TREE_TYPE (t)))
2771 return false;
2773 /* A volatile decl is not acceptable because we can't reuse it as
2774 needed. We need to copy it into a temp first. */
2775 if (TREE_THIS_VOLATILE (t))
2776 return false;
2778 /* We define "registers" as things that can be renamed as needed,
2779 which with our infrastructure does not apply to memory. */
2780 if (needs_to_live_in_memory (t))
2781 return false;
2783 /* Hard register variables are an interesting case. For those that
2784 are call-clobbered, we don't know where all the calls are, since
2785 we don't (want to) take into account which operations will turn
2786 into libcalls at the rtl level. For those that are call-saved,
2787 we don't currently model the fact that calls may in fact change
2788 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2789 level, and so miss variable changes that might imply. All around,
2790 it seems safest to not do too much optimization with these at the
2791 tree level at all. We'll have to rely on the rtl optimizers to
2792 clean this up, as there we've got all the appropriate bits exposed. */
2793 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2794 return false;
2796 /* Complex and vector values must have been put into SSA-like form.
2797 That is, no assignments to the individual components. */
2798 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2799 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2800 return DECL_GIMPLE_REG_P (t);
2802 return true;
2806 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2808 bool
2809 is_gimple_val (tree t)
2811 /* Make loads from volatiles and memory vars explicit. */
2812 if (is_gimple_variable (t)
2813 && is_gimple_reg_type (TREE_TYPE (t))
2814 && !is_gimple_reg (t))
2815 return false;
2817 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2820 /* Similarly, but accept hard registers as inputs to asm statements. */
2822 bool
2823 is_gimple_asm_val (tree t)
2825 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2826 return true;
2828 return is_gimple_val (t);
2831 /* Return true if T is a GIMPLE minimal lvalue. */
2833 bool
2834 is_gimple_min_lval (tree t)
2836 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2837 return false;
2838 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2841 /* Return true if T is a valid function operand of a CALL_EXPR. */
2843 bool
2844 is_gimple_call_addr (tree t)
2846 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2849 /* Return true if T is a valid address operand of a MEM_REF. */
2851 bool
2852 is_gimple_mem_ref_addr (tree t)
2854 return (is_gimple_reg (t)
2855 || TREE_CODE (t) == INTEGER_CST
2856 || (TREE_CODE (t) == ADDR_EXPR
2857 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2858 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2862 /* Given a memory reference expression T, return its base address.
2863 The base address of a memory reference expression is the main
2864 object being referenced. For instance, the base address for
2865 'array[i].fld[j]' is 'array'. You can think of this as stripping
2866 away the offset part from a memory address.
2868 This function calls handled_component_p to strip away all the inner
2869 parts of the memory reference until it reaches the base object. */
2871 tree
2872 get_base_address (tree t)
2874 while (handled_component_p (t))
2875 t = TREE_OPERAND (t, 0);
2877 if ((TREE_CODE (t) == MEM_REF
2878 || TREE_CODE (t) == TARGET_MEM_REF)
2879 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2880 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2882 /* ??? Either the alias oracle or all callers need to properly deal
2883 with WITH_SIZE_EXPRs before we can look through those. */
2884 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2885 return NULL_TREE;
2887 return t;
2890 void
2891 recalculate_side_effects (tree t)
2893 enum tree_code code = TREE_CODE (t);
2894 int len = TREE_OPERAND_LENGTH (t);
2895 int i;
2897 switch (TREE_CODE_CLASS (code))
2899 case tcc_expression:
2900 switch (code)
2902 case INIT_EXPR:
2903 case MODIFY_EXPR:
2904 case VA_ARG_EXPR:
2905 case PREDECREMENT_EXPR:
2906 case PREINCREMENT_EXPR:
2907 case POSTDECREMENT_EXPR:
2908 case POSTINCREMENT_EXPR:
2909 /* All of these have side-effects, no matter what their
2910 operands are. */
2911 return;
2913 default:
2914 break;
2916 /* Fall through. */
2918 case tcc_comparison: /* a comparison expression */
2919 case tcc_unary: /* a unary arithmetic expression */
2920 case tcc_binary: /* a binary arithmetic expression */
2921 case tcc_reference: /* a reference */
2922 case tcc_vl_exp: /* a function call */
2923 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2924 for (i = 0; i < len; ++i)
2926 tree op = TREE_OPERAND (t, i);
2927 if (op && TREE_SIDE_EFFECTS (op))
2928 TREE_SIDE_EFFECTS (t) = 1;
2930 break;
2932 case tcc_constant:
2933 /* No side-effects. */
2934 return;
2936 default:
2937 gcc_unreachable ();
2941 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2942 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2943 we failed to create one. */
2945 tree
2946 canonicalize_cond_expr_cond (tree t)
2948 /* Strip conversions around boolean operations. */
2949 if (CONVERT_EXPR_P (t)
2950 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2951 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2952 == BOOLEAN_TYPE))
2953 t = TREE_OPERAND (t, 0);
2955 /* For !x use x == 0. */
2956 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2958 tree top0 = TREE_OPERAND (t, 0);
2959 t = build2 (EQ_EXPR, TREE_TYPE (t),
2960 top0, build_int_cst (TREE_TYPE (top0), 0));
2962 /* For cmp ? 1 : 0 use cmp. */
2963 else if (TREE_CODE (t) == COND_EXPR
2964 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2965 && integer_onep (TREE_OPERAND (t, 1))
2966 && integer_zerop (TREE_OPERAND (t, 2)))
2968 tree top0 = TREE_OPERAND (t, 0);
2969 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2970 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2973 if (is_gimple_condexpr (t))
2974 return t;
2976 return NULL_TREE;
2979 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2980 the positions marked by the set ARGS_TO_SKIP. */
2982 gimple
2983 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2985 int i;
2986 int nargs = gimple_call_num_args (stmt);
2987 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
2988 gimple new_stmt;
2990 for (i = 0; i < nargs; i++)
2991 if (!bitmap_bit_p (args_to_skip, i))
2992 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
2994 if (gimple_call_internal_p (stmt))
2995 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2996 vargs);
2997 else
2998 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2999 VEC_free (tree, heap, vargs);
3000 if (gimple_call_lhs (stmt))
3001 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3003 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3004 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3006 gimple_set_block (new_stmt, gimple_block (stmt));
3007 if (gimple_has_location (stmt))
3008 gimple_set_location (new_stmt, gimple_location (stmt));
3009 gimple_call_copy_flags (new_stmt, stmt);
3010 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3012 gimple_set_modified (new_stmt, true);
3014 return new_stmt;
3019 /* Return true if the field decls F1 and F2 are at the same offset.
3021 This is intended to be used on GIMPLE types only. */
3023 bool
3024 gimple_compare_field_offset (tree f1, tree f2)
3026 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3028 tree offset1 = DECL_FIELD_OFFSET (f1);
3029 tree offset2 = DECL_FIELD_OFFSET (f2);
3030 return ((offset1 == offset2
3031 /* Once gimplification is done, self-referential offsets are
3032 instantiated as operand #2 of the COMPONENT_REF built for
3033 each access and reset. Therefore, they are not relevant
3034 anymore and fields are interchangeable provided that they
3035 represent the same access. */
3036 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3037 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3038 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3039 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3040 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3041 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3042 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3043 || operand_equal_p (offset1, offset2, 0))
3044 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3045 DECL_FIELD_BIT_OFFSET (f2)));
3048 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3049 should be, so handle differing ones specially by decomposing
3050 the offset into a byte and bit offset manually. */
3051 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3052 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3054 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3055 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3056 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3057 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3058 + bit_offset1 / BITS_PER_UNIT);
3059 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3060 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3061 + bit_offset2 / BITS_PER_UNIT);
3062 if (byte_offset1 != byte_offset2)
3063 return false;
3064 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3067 return false;
3070 /* Returning a hash value for gimple type TYPE combined with VAL.
3072 The hash value returned is equal for types considered compatible
3073 by gimple_canonical_types_compatible_p. */
3075 static hashval_t
3076 iterative_hash_canonical_type (tree type, hashval_t val)
3078 hashval_t v;
3079 void **slot;
3080 struct tree_int_map *mp, m;
3082 m.base.from = type;
3083 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
3084 && *slot)
3085 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
3087 /* Combine a few common features of types so that types are grouped into
3088 smaller sets; when searching for existing matching types to merge,
3089 only existing types having the same features as the new type will be
3090 checked. */
3091 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
3092 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3093 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
3094 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3096 /* Incorporate common features of numerical types. */
3097 if (INTEGRAL_TYPE_P (type)
3098 || SCALAR_FLOAT_TYPE_P (type)
3099 || FIXED_POINT_TYPE_P (type)
3100 || TREE_CODE (type) == VECTOR_TYPE
3101 || TREE_CODE (type) == COMPLEX_TYPE
3102 || TREE_CODE (type) == OFFSET_TYPE
3103 || POINTER_TYPE_P (type))
3105 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3106 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3109 /* For pointer and reference types, fold in information about the type
3110 pointed to but do not recurse to the pointed-to type. */
3111 if (POINTER_TYPE_P (type))
3113 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
3114 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
3115 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
3116 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
3119 /* For integer types hash only the string flag. */
3120 if (TREE_CODE (type) == INTEGER_TYPE)
3121 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3123 /* For array types hash the domain bounds and the string flag. */
3124 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3126 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3127 /* OMP lowering can introduce error_mark_node in place of
3128 random local decls in types. */
3129 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3130 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
3131 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3132 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
3135 /* Recurse for aggregates with a single element type. */
3136 if (TREE_CODE (type) == ARRAY_TYPE
3137 || TREE_CODE (type) == COMPLEX_TYPE
3138 || TREE_CODE (type) == VECTOR_TYPE)
3139 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3141 /* Incorporate function return and argument types. */
3142 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3144 unsigned na;
3145 tree p;
3147 /* For method types also incorporate their parent class. */
3148 if (TREE_CODE (type) == METHOD_TYPE)
3149 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
3151 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3153 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3155 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
3156 na++;
3159 v = iterative_hash_hashval_t (na, v);
3162 if (RECORD_OR_UNION_TYPE_P (type))
3164 unsigned nf;
3165 tree f;
3167 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
3168 if (TREE_CODE (f) == FIELD_DECL)
3170 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
3171 nf++;
3174 v = iterative_hash_hashval_t (nf, v);
3177 /* Cache the just computed hash value. */
3178 mp = ggc_alloc_cleared_tree_int_map ();
3179 mp->base.from = type;
3180 mp->to = v;
3181 *slot = (void *) mp;
3183 return iterative_hash_hashval_t (v, val);
3186 static hashval_t
3187 gimple_canonical_type_hash (const void *p)
3189 if (canonical_type_hash_cache == NULL)
3190 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3191 tree_int_map_eq, NULL);
3193 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
3199 /* The TYPE_CANONICAL merging machinery. It should closely resemble
3200 the middle-end types_compatible_p function. It needs to avoid
3201 claiming types are different for types that should be treated
3202 the same with respect to TBAA. Canonical types are also used
3203 for IL consistency checks via the useless_type_conversion_p
3204 predicate which does not handle all type kinds itself but falls
3205 back to pointer-comparison of TYPE_CANONICAL for aggregates
3206 for example. */
3208 /* Return true iff T1 and T2 are structurally identical for what
3209 TBAA is concerned. */
3211 static bool
3212 gimple_canonical_types_compatible_p (tree t1, tree t2)
3214 /* Before starting to set up the SCC machinery handle simple cases. */
3216 /* Check first for the obvious case of pointer identity. */
3217 if (t1 == t2)
3218 return true;
3220 /* Check that we have two types to compare. */
3221 if (t1 == NULL_TREE || t2 == NULL_TREE)
3222 return false;
3224 /* If the types have been previously registered and found equal
3225 they still are. */
3226 if (TYPE_CANONICAL (t1)
3227 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3228 return true;
3230 /* Can't be the same type if the types don't have the same code. */
3231 if (TREE_CODE (t1) != TREE_CODE (t2))
3232 return false;
3234 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3235 return false;
3237 /* Qualifiers do not matter for canonical type comparison purposes. */
3239 /* Void types and nullptr types are always the same. */
3240 if (TREE_CODE (t1) == VOID_TYPE
3241 || TREE_CODE (t1) == NULLPTR_TYPE)
3242 return true;
3244 /* Can't be the same type if they have different alignment, or mode. */
3245 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3246 || TYPE_MODE (t1) != TYPE_MODE (t2))
3247 return false;
3249 /* Non-aggregate types can be handled cheaply. */
3250 if (INTEGRAL_TYPE_P (t1)
3251 || SCALAR_FLOAT_TYPE_P (t1)
3252 || FIXED_POINT_TYPE_P (t1)
3253 || TREE_CODE (t1) == VECTOR_TYPE
3254 || TREE_CODE (t1) == COMPLEX_TYPE
3255 || TREE_CODE (t1) == OFFSET_TYPE
3256 || POINTER_TYPE_P (t1))
3258 /* Can't be the same type if they have different sign or precision. */
3259 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3260 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3261 return false;
3263 if (TREE_CODE (t1) == INTEGER_TYPE
3264 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
3265 return false;
3267 /* For canonical type comparisons we do not want to build SCCs
3268 so we cannot compare pointed-to types. But we can, for now,
3269 require the same pointed-to type kind and match what
3270 useless_type_conversion_p would do. */
3271 if (POINTER_TYPE_P (t1))
3273 /* If the two pointers have different ref-all attributes,
3274 they can't be the same type. */
3275 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3276 return false;
3278 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
3279 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
3280 return false;
3282 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
3283 return false;
3285 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
3286 return false;
3289 /* Tail-recurse to components. */
3290 if (TREE_CODE (t1) == VECTOR_TYPE
3291 || TREE_CODE (t1) == COMPLEX_TYPE)
3292 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
3293 TREE_TYPE (t2));
3295 return true;
3298 /* Do type-specific comparisons. */
3299 switch (TREE_CODE (t1))
3301 case ARRAY_TYPE:
3302 /* Array types are the same if the element types are the same and
3303 the number of elements are the same. */
3304 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3305 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3306 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3307 return false;
3308 else
3310 tree i1 = TYPE_DOMAIN (t1);
3311 tree i2 = TYPE_DOMAIN (t2);
3313 /* For an incomplete external array, the type domain can be
3314 NULL_TREE. Check this condition also. */
3315 if (i1 == NULL_TREE && i2 == NULL_TREE)
3316 return true;
3317 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3318 return false;
3319 else
3321 tree min1 = TYPE_MIN_VALUE (i1);
3322 tree min2 = TYPE_MIN_VALUE (i2);
3323 tree max1 = TYPE_MAX_VALUE (i1);
3324 tree max2 = TYPE_MAX_VALUE (i2);
3326 /* The minimum/maximum values have to be the same. */
3327 if ((min1 == min2
3328 || (min1 && min2
3329 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3330 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3331 || operand_equal_p (min1, min2, 0))))
3332 && (max1 == max2
3333 || (max1 && max2
3334 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3335 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3336 || operand_equal_p (max1, max2, 0)))))
3337 return true;
3338 else
3339 return false;
3343 case METHOD_TYPE:
3344 case FUNCTION_TYPE:
3345 /* Function types are the same if the return type and arguments types
3346 are the same. */
3347 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3348 return false;
3350 if (!comp_type_attributes (t1, t2))
3351 return false;
3353 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3354 return true;
3355 else
3357 tree parms1, parms2;
3359 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3360 parms1 && parms2;
3361 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3363 if (!gimple_canonical_types_compatible_p
3364 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3365 return false;
3368 if (parms1 || parms2)
3369 return false;
3371 return true;
3374 case RECORD_TYPE:
3375 case UNION_TYPE:
3376 case QUAL_UNION_TYPE:
3378 tree f1, f2;
3380 /* For aggregate types, all the fields must be the same. */
3381 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3382 f1 || f2;
3383 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3385 /* Skip non-fields. */
3386 while (f1 && TREE_CODE (f1) != FIELD_DECL)
3387 f1 = TREE_CHAIN (f1);
3388 while (f2 && TREE_CODE (f2) != FIELD_DECL)
3389 f2 = TREE_CHAIN (f2);
3390 if (!f1 || !f2)
3391 break;
3392 /* The fields must have the same name, offset and type. */
3393 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3394 || !gimple_compare_field_offset (f1, f2)
3395 || !gimple_canonical_types_compatible_p
3396 (TREE_TYPE (f1), TREE_TYPE (f2)))
3397 return false;
3400 /* If one aggregate has more fields than the other, they
3401 are not the same. */
3402 if (f1 || f2)
3403 return false;
3405 return true;
3408 default:
3409 gcc_unreachable ();
3414 /* Returns nonzero if P1 and P2 are equal. */
3416 static int
3417 gimple_canonical_type_eq (const void *p1, const void *p2)
3419 const_tree t1 = (const_tree) p1;
3420 const_tree t2 = (const_tree) p2;
3421 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
3422 CONST_CAST_TREE (t2));
3425 /* Register type T in the global type table gimple_types.
3426 If another type T', compatible with T, already existed in
3427 gimple_types then return T', otherwise return T. This is used by
3428 LTO to merge identical types read from different TUs.
3430 ??? This merging does not exactly match how the tree.c middle-end
3431 functions will assign TYPE_CANONICAL when new types are created
3432 during optimization (which at least happens for pointer and array
3433 types). */
3435 tree
3436 gimple_register_canonical_type (tree t)
3438 void **slot;
3440 gcc_assert (TYPE_P (t));
3442 if (TYPE_CANONICAL (t))
3443 return TYPE_CANONICAL (t);
3445 if (gimple_canonical_types == NULL)
3446 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
3447 gimple_canonical_type_eq, 0);
3449 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
3450 if (*slot
3451 && *(tree *)slot != t)
3453 tree new_type = (tree) *((tree *) slot);
3455 TYPE_CANONICAL (t) = new_type;
3456 t = new_type;
3458 else
3460 TYPE_CANONICAL (t) = t;
3461 *slot = (void *) t;
3464 return t;
3468 /* Show statistics on references to the global type table gimple_types. */
3470 void
3471 print_gimple_types_stats (const char *pfx)
3473 if (gimple_canonical_types)
3474 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3475 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3476 (long) htab_size (gimple_canonical_types),
3477 (long) htab_elements (gimple_canonical_types),
3478 (long) gimple_canonical_types->searches,
3479 (long) gimple_canonical_types->collisions,
3480 htab_collisions (gimple_canonical_types));
3481 else
3482 fprintf (stderr, "[%s] GIMPLE canonical type table is empty\n", pfx);
3483 if (canonical_type_hash_cache)
3484 fprintf (stderr, "[%s] GIMPLE canonical type hash table: size %ld, "
3485 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3486 (long) htab_size (canonical_type_hash_cache),
3487 (long) htab_elements (canonical_type_hash_cache),
3488 (long) canonical_type_hash_cache->searches,
3489 (long) canonical_type_hash_cache->collisions,
3490 htab_collisions (canonical_type_hash_cache));
3491 else
3492 fprintf (stderr, "[%s] GIMPLE canonical type hash table is empty\n", pfx);
3495 /* Free the gimple type hashtables used for LTO type merging. */
3497 void
3498 free_gimple_type_tables (void)
3500 if (gimple_canonical_types)
3502 htab_delete (gimple_canonical_types);
3503 gimple_canonical_types = NULL;
3505 if (canonical_type_hash_cache)
3507 htab_delete (canonical_type_hash_cache);
3508 canonical_type_hash_cache = NULL;
3513 /* Return a type the same as TYPE except unsigned or
3514 signed according to UNSIGNEDP. */
3516 static tree
3517 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3519 tree type1;
3521 type1 = TYPE_MAIN_VARIANT (type);
3522 if (type1 == signed_char_type_node
3523 || type1 == char_type_node
3524 || type1 == unsigned_char_type_node)
3525 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3526 if (type1 == integer_type_node || type1 == unsigned_type_node)
3527 return unsignedp ? unsigned_type_node : integer_type_node;
3528 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3529 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3530 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3531 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3532 if (type1 == long_long_integer_type_node
3533 || type1 == long_long_unsigned_type_node)
3534 return unsignedp
3535 ? long_long_unsigned_type_node
3536 : long_long_integer_type_node;
3537 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
3538 return unsignedp
3539 ? int128_unsigned_type_node
3540 : int128_integer_type_node;
3541 #if HOST_BITS_PER_WIDE_INT >= 64
3542 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3543 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3544 #endif
3545 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3546 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3547 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3548 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3549 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3550 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3551 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3552 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3554 #define GIMPLE_FIXED_TYPES(NAME) \
3555 if (type1 == short_ ## NAME ## _type_node \
3556 || type1 == unsigned_short_ ## NAME ## _type_node) \
3557 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3558 : short_ ## NAME ## _type_node; \
3559 if (type1 == NAME ## _type_node \
3560 || type1 == unsigned_ ## NAME ## _type_node) \
3561 return unsignedp ? unsigned_ ## NAME ## _type_node \
3562 : NAME ## _type_node; \
3563 if (type1 == long_ ## NAME ## _type_node \
3564 || type1 == unsigned_long_ ## NAME ## _type_node) \
3565 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3566 : long_ ## NAME ## _type_node; \
3567 if (type1 == long_long_ ## NAME ## _type_node \
3568 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3569 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3570 : long_long_ ## NAME ## _type_node;
3572 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
3573 if (type1 == NAME ## _type_node \
3574 || type1 == u ## NAME ## _type_node) \
3575 return unsignedp ? u ## NAME ## _type_node \
3576 : NAME ## _type_node;
3578 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
3579 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3580 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3581 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3582 : sat_ ## short_ ## NAME ## _type_node; \
3583 if (type1 == sat_ ## NAME ## _type_node \
3584 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3585 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3586 : sat_ ## NAME ## _type_node; \
3587 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3588 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3589 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3590 : sat_ ## long_ ## NAME ## _type_node; \
3591 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3592 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3593 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3594 : sat_ ## long_long_ ## NAME ## _type_node;
3596 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3597 if (type1 == sat_ ## NAME ## _type_node \
3598 || type1 == sat_ ## u ## NAME ## _type_node) \
3599 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3600 : sat_ ## NAME ## _type_node;
3602 GIMPLE_FIXED_TYPES (fract);
3603 GIMPLE_FIXED_TYPES_SAT (fract);
3604 GIMPLE_FIXED_TYPES (accum);
3605 GIMPLE_FIXED_TYPES_SAT (accum);
3607 GIMPLE_FIXED_MODE_TYPES (qq);
3608 GIMPLE_FIXED_MODE_TYPES (hq);
3609 GIMPLE_FIXED_MODE_TYPES (sq);
3610 GIMPLE_FIXED_MODE_TYPES (dq);
3611 GIMPLE_FIXED_MODE_TYPES (tq);
3612 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
3613 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
3614 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
3615 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
3616 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
3617 GIMPLE_FIXED_MODE_TYPES (ha);
3618 GIMPLE_FIXED_MODE_TYPES (sa);
3619 GIMPLE_FIXED_MODE_TYPES (da);
3620 GIMPLE_FIXED_MODE_TYPES (ta);
3621 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
3622 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
3623 GIMPLE_FIXED_MODE_TYPES_SAT (da);
3624 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
3626 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3627 the precision; they have precision set to match their range, but
3628 may use a wider mode to match an ABI. If we change modes, we may
3629 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3630 the precision as well, so as to yield correct results for
3631 bit-field types. C++ does not have these separate bit-field
3632 types, and producing a signed or unsigned variant of an
3633 ENUMERAL_TYPE may cause other problems as well. */
3634 if (!INTEGRAL_TYPE_P (type)
3635 || TYPE_UNSIGNED (type) == unsignedp)
3636 return type;
3638 #define TYPE_OK(node) \
3639 (TYPE_MODE (type) == TYPE_MODE (node) \
3640 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3641 if (TYPE_OK (signed_char_type_node))
3642 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3643 if (TYPE_OK (integer_type_node))
3644 return unsignedp ? unsigned_type_node : integer_type_node;
3645 if (TYPE_OK (short_integer_type_node))
3646 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3647 if (TYPE_OK (long_integer_type_node))
3648 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3649 if (TYPE_OK (long_long_integer_type_node))
3650 return (unsignedp
3651 ? long_long_unsigned_type_node
3652 : long_long_integer_type_node);
3653 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
3654 return (unsignedp
3655 ? int128_unsigned_type_node
3656 : int128_integer_type_node);
3658 #if HOST_BITS_PER_WIDE_INT >= 64
3659 if (TYPE_OK (intTI_type_node))
3660 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3661 #endif
3662 if (TYPE_OK (intDI_type_node))
3663 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3664 if (TYPE_OK (intSI_type_node))
3665 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3666 if (TYPE_OK (intHI_type_node))
3667 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3668 if (TYPE_OK (intQI_type_node))
3669 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3671 #undef GIMPLE_FIXED_TYPES
3672 #undef GIMPLE_FIXED_MODE_TYPES
3673 #undef GIMPLE_FIXED_TYPES_SAT
3674 #undef GIMPLE_FIXED_MODE_TYPES_SAT
3675 #undef TYPE_OK
3677 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
3681 /* Return an unsigned type the same as TYPE in other respects. */
3683 tree
3684 gimple_unsigned_type (tree type)
3686 return gimple_signed_or_unsigned_type (true, type);
3690 /* Return a signed type the same as TYPE in other respects. */
3692 tree
3693 gimple_signed_type (tree type)
3695 return gimple_signed_or_unsigned_type (false, type);
3699 /* Return the typed-based alias set for T, which may be an expression
3700 or a type. Return -1 if we don't do anything special. */
3702 alias_set_type
3703 gimple_get_alias_set (tree t)
3705 tree u;
3707 /* Permit type-punning when accessing a union, provided the access
3708 is directly through the union. For example, this code does not
3709 permit taking the address of a union member and then storing
3710 through it. Even the type-punning allowed here is a GCC
3711 extension, albeit a common and useful one; the C standard says
3712 that such accesses have implementation-defined behavior. */
3713 for (u = t;
3714 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3715 u = TREE_OPERAND (u, 0))
3716 if (TREE_CODE (u) == COMPONENT_REF
3717 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3718 return 0;
3720 /* That's all the expressions we handle specially. */
3721 if (!TYPE_P (t))
3722 return -1;
3724 /* For convenience, follow the C standard when dealing with
3725 character types. Any object may be accessed via an lvalue that
3726 has character type. */
3727 if (t == char_type_node
3728 || t == signed_char_type_node
3729 || t == unsigned_char_type_node)
3730 return 0;
3732 /* Allow aliasing between signed and unsigned variants of the same
3733 type. We treat the signed variant as canonical. */
3734 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
3736 tree t1 = gimple_signed_type (t);
3738 /* t1 == t can happen for boolean nodes which are always unsigned. */
3739 if (t1 != t)
3740 return get_alias_set (t1);
3743 return -1;
3747 /* Data structure used to count the number of dereferences to PTR
3748 inside an expression. */
3749 struct count_ptr_d
3751 tree ptr;
3752 unsigned num_stores;
3753 unsigned num_loads;
3756 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
3757 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3759 static tree
3760 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3762 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3763 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3765 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3766 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3767 the address of 'fld' as 'ptr + offsetof(fld)'. */
3768 if (TREE_CODE (*tp) == ADDR_EXPR)
3770 *walk_subtrees = 0;
3771 return NULL_TREE;
3774 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
3776 if (wi_p->is_lhs)
3777 count_p->num_stores++;
3778 else
3779 count_p->num_loads++;
3782 return NULL_TREE;
3785 /* Count the number of direct and indirect uses for pointer PTR in
3786 statement STMT. The number of direct uses is stored in
3787 *NUM_USES_P. Indirect references are counted separately depending
3788 on whether they are store or load operations. The counts are
3789 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3791 void
3792 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3793 unsigned *num_loads_p, unsigned *num_stores_p)
3795 ssa_op_iter i;
3796 tree use;
3798 *num_uses_p = 0;
3799 *num_loads_p = 0;
3800 *num_stores_p = 0;
3802 /* Find out the total number of uses of PTR in STMT. */
3803 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3804 if (use == ptr)
3805 (*num_uses_p)++;
3807 /* Now count the number of indirect references to PTR. This is
3808 truly awful, but we don't have much choice. There are no parent
3809 pointers inside INDIRECT_REFs, so an expression like
3810 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3811 find all the indirect and direct uses of x_1 inside. The only
3812 shortcut we can take is the fact that GIMPLE only allows
3813 INDIRECT_REFs inside the expressions below. */
3814 if (is_gimple_assign (stmt)
3815 || gimple_code (stmt) == GIMPLE_RETURN
3816 || gimple_code (stmt) == GIMPLE_ASM
3817 || is_gimple_call (stmt))
3819 struct walk_stmt_info wi;
3820 struct count_ptr_d count;
3822 count.ptr = ptr;
3823 count.num_stores = 0;
3824 count.num_loads = 0;
3826 memset (&wi, 0, sizeof (wi));
3827 wi.info = &count;
3828 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3830 *num_stores_p = count.num_stores;
3831 *num_loads_p = count.num_loads;
3834 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3837 /* From a tree operand OP return the base of a load or store operation
3838 or NULL_TREE if OP is not a load or a store. */
3840 static tree
3841 get_base_loadstore (tree op)
3843 while (handled_component_p (op))
3844 op = TREE_OPERAND (op, 0);
3845 if (DECL_P (op)
3846 || INDIRECT_REF_P (op)
3847 || TREE_CODE (op) == MEM_REF
3848 || TREE_CODE (op) == TARGET_MEM_REF)
3849 return op;
3850 return NULL_TREE;
3853 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3854 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3855 passing the STMT, the base of the operand and DATA to it. The base
3856 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3857 or the argument of an address expression.
3858 Returns the results of these callbacks or'ed. */
3860 bool
3861 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3862 bool (*visit_load)(gimple, tree, void *),
3863 bool (*visit_store)(gimple, tree, void *),
3864 bool (*visit_addr)(gimple, tree, void *))
3866 bool ret = false;
3867 unsigned i;
3868 if (gimple_assign_single_p (stmt))
3870 tree lhs, rhs;
3871 if (visit_store)
3873 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3874 if (lhs)
3875 ret |= visit_store (stmt, lhs, data);
3877 rhs = gimple_assign_rhs1 (stmt);
3878 while (handled_component_p (rhs))
3879 rhs = TREE_OPERAND (rhs, 0);
3880 if (visit_addr)
3882 if (TREE_CODE (rhs) == ADDR_EXPR)
3883 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3884 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3885 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3886 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3887 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3888 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3889 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3890 0), data);
3891 else if (TREE_CODE (rhs) == CONSTRUCTOR)
3893 unsigned int ix;
3894 tree val;
3896 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
3897 if (TREE_CODE (val) == ADDR_EXPR)
3898 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
3899 else if (TREE_CODE (val) == OBJ_TYPE_REF
3900 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
3901 ret |= visit_addr (stmt,
3902 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
3903 0), data);
3905 lhs = gimple_assign_lhs (stmt);
3906 if (TREE_CODE (lhs) == TARGET_MEM_REF
3907 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3908 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3910 if (visit_load)
3912 rhs = get_base_loadstore (rhs);
3913 if (rhs)
3914 ret |= visit_load (stmt, rhs, data);
3917 else if (visit_addr
3918 && (is_gimple_assign (stmt)
3919 || gimple_code (stmt) == GIMPLE_COND))
3921 for (i = 0; i < gimple_num_ops (stmt); ++i)
3923 tree op = gimple_op (stmt, i);
3924 if (op == NULL_TREE)
3926 else if (TREE_CODE (op) == ADDR_EXPR)
3927 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3928 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3929 tree with two operands. */
3930 else if (i == 1 && COMPARISON_CLASS_P (op))
3932 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
3933 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
3934 0), data);
3935 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
3936 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
3937 0), data);
3941 else if (is_gimple_call (stmt))
3943 if (visit_store)
3945 tree lhs = gimple_call_lhs (stmt);
3946 if (lhs)
3948 lhs = get_base_loadstore (lhs);
3949 if (lhs)
3950 ret |= visit_store (stmt, lhs, data);
3953 if (visit_load || visit_addr)
3954 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3956 tree rhs = gimple_call_arg (stmt, i);
3957 if (visit_addr
3958 && TREE_CODE (rhs) == ADDR_EXPR)
3959 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3960 else if (visit_load)
3962 rhs = get_base_loadstore (rhs);
3963 if (rhs)
3964 ret |= visit_load (stmt, rhs, data);
3967 if (visit_addr
3968 && gimple_call_chain (stmt)
3969 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3970 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3971 data);
3972 if (visit_addr
3973 && gimple_call_return_slot_opt_p (stmt)
3974 && gimple_call_lhs (stmt) != NULL_TREE
3975 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3976 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3978 else if (gimple_code (stmt) == GIMPLE_ASM)
3980 unsigned noutputs;
3981 const char *constraint;
3982 const char **oconstraints;
3983 bool allows_mem, allows_reg, is_inout;
3984 noutputs = gimple_asm_noutputs (stmt);
3985 oconstraints = XALLOCAVEC (const char *, noutputs);
3986 if (visit_store || visit_addr)
3987 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3989 tree link = gimple_asm_output_op (stmt, i);
3990 tree op = get_base_loadstore (TREE_VALUE (link));
3991 if (op && visit_store)
3992 ret |= visit_store (stmt, op, data);
3993 if (visit_addr)
3995 constraint = TREE_STRING_POINTER
3996 (TREE_VALUE (TREE_PURPOSE (link)));
3997 oconstraints[i] = constraint;
3998 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3999 &allows_reg, &is_inout);
4000 if (op && !allows_reg && allows_mem)
4001 ret |= visit_addr (stmt, op, data);
4004 if (visit_load || visit_addr)
4005 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
4007 tree link = gimple_asm_input_op (stmt, i);
4008 tree op = TREE_VALUE (link);
4009 if (visit_addr
4010 && TREE_CODE (op) == ADDR_EXPR)
4011 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4012 else if (visit_load || visit_addr)
4014 op = get_base_loadstore (op);
4015 if (op)
4017 if (visit_load)
4018 ret |= visit_load (stmt, op, data);
4019 if (visit_addr)
4021 constraint = TREE_STRING_POINTER
4022 (TREE_VALUE (TREE_PURPOSE (link)));
4023 parse_input_constraint (&constraint, 0, 0, noutputs,
4024 0, oconstraints,
4025 &allows_mem, &allows_reg);
4026 if (!allows_reg && allows_mem)
4027 ret |= visit_addr (stmt, op, data);
4033 else if (gimple_code (stmt) == GIMPLE_RETURN)
4035 tree op = gimple_return_retval (stmt);
4036 if (op)
4038 if (visit_addr
4039 && TREE_CODE (op) == ADDR_EXPR)
4040 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4041 else if (visit_load)
4043 op = get_base_loadstore (op);
4044 if (op)
4045 ret |= visit_load (stmt, op, data);
4049 else if (visit_addr
4050 && gimple_code (stmt) == GIMPLE_PHI)
4052 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4054 tree op = PHI_ARG_DEF (stmt, i);
4055 if (TREE_CODE (op) == ADDR_EXPR)
4056 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4060 return ret;
4063 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
4064 should make a faster clone for this case. */
4066 bool
4067 walk_stmt_load_store_ops (gimple stmt, void *data,
4068 bool (*visit_load)(gimple, tree, void *),
4069 bool (*visit_store)(gimple, tree, void *))
4071 return walk_stmt_load_store_addr_ops (stmt, data,
4072 visit_load, visit_store, NULL);
4075 /* Helper for gimple_ior_addresses_taken_1. */
4077 static bool
4078 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4079 tree addr, void *data)
4081 bitmap addresses_taken = (bitmap)data;
4082 addr = get_base_address (addr);
4083 if (addr
4084 && DECL_P (addr))
4086 bitmap_set_bit (addresses_taken, DECL_UID (addr));
4087 return true;
4089 return false;
4092 /* Set the bit for the uid of all decls that have their address taken
4093 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
4094 were any in this stmt. */
4096 bool
4097 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4099 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4100 gimple_ior_addresses_taken_1);
4104 /* Return a printable name for symbol DECL. */
4106 const char *
4107 gimple_decl_printable_name (tree decl, int verbosity)
4109 if (!DECL_NAME (decl))
4110 return NULL;
4112 if (DECL_ASSEMBLER_NAME_SET_P (decl))
4114 const char *str, *mangled_str;
4115 int dmgl_opts = DMGL_NO_OPTS;
4117 if (verbosity >= 2)
4119 dmgl_opts = DMGL_VERBOSE
4120 | DMGL_ANSI
4121 | DMGL_GNU_V3
4122 | DMGL_RET_POSTFIX;
4123 if (TREE_CODE (decl) == FUNCTION_DECL)
4124 dmgl_opts |= DMGL_PARAMS;
4127 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4128 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4129 return (str) ? str : mangled_str;
4132 return IDENTIFIER_POINTER (DECL_NAME (decl));
4135 /* Return true when STMT is builtins call to CODE. */
4137 bool
4138 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
4140 tree fndecl;
4141 return (is_gimple_call (stmt)
4142 && (fndecl = gimple_call_fndecl (stmt)) != NULL
4143 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4144 && DECL_FUNCTION_CODE (fndecl) == code);
4147 /* Return true if STMT clobbers memory. STMT is required to be a
4148 GIMPLE_ASM. */
4150 bool
4151 gimple_asm_clobbers_memory_p (const_gimple stmt)
4153 unsigned i;
4155 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
4157 tree op = gimple_asm_clobber_op (stmt, i);
4158 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
4159 return true;
4162 return false;
4164 #include "gt-gimple.h"