Merge trunk version 203514 into gupc branch.
[official-gcc.git] / gcc / gimple.c
blob4d2726cd65a6d1a06bf8ed99e6c3e3bfd0787e17
1 /* Gimple IR support functions.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
40 /* Global canonical type table. */
41 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
42 htab_t gimple_canonical_types;
43 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
44 htab_t canonical_type_hash_cache;
46 /* All the tuples have their operand vector (if present) at the very bottom
47 of the structure. Therefore, the offset required to find the
48 operands vector the size of the structure minus the size of the 1
49 element tree array at the end (see gimple_ops). */
50 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
51 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
52 EXPORTED_CONST size_t gimple_ops_offset_[] = {
53 #include "gsstruct.def"
55 #undef DEFGSSTRUCT
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
58 static const size_t gsstruct_code_size[] = {
59 #include "gsstruct.def"
61 #undef DEFGSSTRUCT
63 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
64 const char *const gimple_code_name[] = {
65 #include "gimple.def"
67 #undef DEFGSCODE
69 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
70 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
71 #include "gimple.def"
73 #undef DEFGSCODE
75 /* Gimple stats. */
77 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
78 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
80 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
81 static const char * const gimple_alloc_kind_names[] = {
82 "assignments",
83 "phi nodes",
84 "conditionals",
85 "everything else"
88 /* Private API manipulation functions shared only with some
89 other files. */
90 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
91 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
93 /* Gimple tuple constructors.
94 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
95 be passed a NULL to start with an empty sequence. */
97 /* Set the code for statement G to CODE. */
99 static inline void
100 gimple_set_code (gimple g, enum gimple_code code)
102 g->gsbase.code = code;
105 /* Return the number of bytes needed to hold a GIMPLE statement with
106 code CODE. */
108 static inline size_t
109 gimple_size (enum gimple_code code)
111 return gsstruct_code_size[gss_for_code (code)];
114 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
115 operands. */
117 gimple
118 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
120 size_t size;
121 gimple stmt;
123 size = gimple_size (code);
124 if (num_ops > 0)
125 size += sizeof (tree) * (num_ops - 1);
127 if (GATHER_STATISTICS)
129 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
130 gimple_alloc_counts[(int) kind]++;
131 gimple_alloc_sizes[(int) kind] += size;
134 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
135 gimple_set_code (stmt, code);
136 gimple_set_num_ops (stmt, num_ops);
138 /* Do not call gimple_set_modified here as it has other side
139 effects and this tuple is still not completely built. */
140 stmt->gsbase.modified = 1;
141 gimple_init_singleton (stmt);
143 return stmt;
146 /* Set SUBCODE to be the code of the expression computed by statement G. */
148 static inline void
149 gimple_set_subcode (gimple g, unsigned subcode)
151 /* We only have 16 bits for the RHS code. Assert that we are not
152 overflowing it. */
153 gcc_assert (subcode < (1 << 16));
154 g->gsbase.subcode = subcode;
159 /* Build a tuple with operands. CODE is the statement to build (which
160 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
161 for the new tuple. NUM_OPS is the number of operands to allocate. */
163 #define gimple_build_with_ops(c, s, n) \
164 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
166 static gimple
167 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
168 unsigned num_ops MEM_STAT_DECL)
170 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
171 gimple_set_subcode (s, subcode);
173 return s;
177 /* Build a GIMPLE_RETURN statement returning RETVAL. */
179 gimple
180 gimple_build_return (tree retval)
182 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
183 if (retval)
184 gimple_return_set_retval (s, retval);
185 return s;
188 /* Reset alias information on call S. */
190 void
191 gimple_call_reset_alias_info (gimple s)
193 if (gimple_call_flags (s) & ECF_CONST)
194 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
195 else
196 pt_solution_reset (gimple_call_use_set (s));
197 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
198 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
199 else
200 pt_solution_reset (gimple_call_clobber_set (s));
203 /* Helper for gimple_build_call, gimple_build_call_valist,
204 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
205 components of a GIMPLE_CALL statement to function FN with NARGS
206 arguments. */
208 static inline gimple
209 gimple_build_call_1 (tree fn, unsigned nargs)
211 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
212 if (TREE_CODE (fn) == FUNCTION_DECL)
213 fn = build_fold_addr_expr (fn);
214 gimple_set_op (s, 1, fn);
215 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
216 gimple_call_reset_alias_info (s);
217 return s;
221 /* Build a GIMPLE_CALL statement to function FN with the arguments
222 specified in vector ARGS. */
224 gimple
225 gimple_build_call_vec (tree fn, vec<tree> args)
227 unsigned i;
228 unsigned nargs = args.length ();
229 gimple call = gimple_build_call_1 (fn, nargs);
231 for (i = 0; i < nargs; i++)
232 gimple_call_set_arg (call, i, args[i]);
234 return call;
238 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
239 arguments. The ... are the arguments. */
241 gimple
242 gimple_build_call (tree fn, unsigned nargs, ...)
244 va_list ap;
245 gimple call;
246 unsigned i;
248 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
250 call = gimple_build_call_1 (fn, nargs);
252 va_start (ap, nargs);
253 for (i = 0; i < nargs; i++)
254 gimple_call_set_arg (call, i, va_arg (ap, tree));
255 va_end (ap);
257 return call;
261 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
262 arguments. AP contains the arguments. */
264 gimple
265 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
267 gimple call;
268 unsigned i;
270 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
272 call = gimple_build_call_1 (fn, nargs);
274 for (i = 0; i < nargs; i++)
275 gimple_call_set_arg (call, i, va_arg (ap, tree));
277 return call;
281 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
282 Build the basic components of a GIMPLE_CALL statement to internal
283 function FN with NARGS arguments. */
285 static inline gimple
286 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
288 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
289 s->gsbase.subcode |= GF_CALL_INTERNAL;
290 gimple_call_set_internal_fn (s, fn);
291 gimple_call_reset_alias_info (s);
292 return s;
296 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
297 the number of arguments. The ... are the arguments. */
299 gimple
300 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
302 va_list ap;
303 gimple call;
304 unsigned i;
306 call = gimple_build_call_internal_1 (fn, nargs);
307 va_start (ap, nargs);
308 for (i = 0; i < nargs; i++)
309 gimple_call_set_arg (call, i, va_arg (ap, tree));
310 va_end (ap);
312 return call;
316 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
317 specified in vector ARGS. */
319 gimple
320 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
322 unsigned i, nargs;
323 gimple call;
325 nargs = args.length ();
326 call = gimple_build_call_internal_1 (fn, nargs);
327 for (i = 0; i < nargs; i++)
328 gimple_call_set_arg (call, i, args[i]);
330 return call;
334 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
335 assumed to be in GIMPLE form already. Minimal checking is done of
336 this fact. */
338 gimple
339 gimple_build_call_from_tree (tree t)
341 unsigned i, nargs;
342 gimple call;
343 tree fndecl = get_callee_fndecl (t);
345 gcc_assert (TREE_CODE (t) == CALL_EXPR);
347 nargs = call_expr_nargs (t);
348 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
350 for (i = 0; i < nargs; i++)
351 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
353 gimple_set_block (call, TREE_BLOCK (t));
355 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
356 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
357 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
359 if (fndecl
360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
361 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
363 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
364 else
365 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
366 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
367 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
368 gimple_set_no_warning (call, TREE_NO_WARNING (t));
370 return call;
374 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
375 *OP1_P, *OP2_P and *OP3_P respectively. */
377 void
378 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
379 tree *op2_p, tree *op3_p)
381 enum gimple_rhs_class grhs_class;
383 *subcode_p = TREE_CODE (expr);
384 grhs_class = get_gimple_rhs_class (*subcode_p);
386 if (grhs_class == GIMPLE_TERNARY_RHS)
388 *op1_p = TREE_OPERAND (expr, 0);
389 *op2_p = TREE_OPERAND (expr, 1);
390 *op3_p = TREE_OPERAND (expr, 2);
392 else if (grhs_class == GIMPLE_BINARY_RHS)
394 *op1_p = TREE_OPERAND (expr, 0);
395 *op2_p = TREE_OPERAND (expr, 1);
396 *op3_p = NULL_TREE;
398 else if (grhs_class == GIMPLE_UNARY_RHS)
400 *op1_p = TREE_OPERAND (expr, 0);
401 *op2_p = NULL_TREE;
402 *op3_p = NULL_TREE;
404 else if (grhs_class == GIMPLE_SINGLE_RHS)
406 *op1_p = expr;
407 *op2_p = NULL_TREE;
408 *op3_p = NULL_TREE;
410 else
411 gcc_unreachable ();
415 /* Build a GIMPLE_ASSIGN statement.
417 LHS of the assignment.
418 RHS of the assignment which can be unary or binary. */
420 gimple
421 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
423 enum tree_code subcode;
424 tree op1, op2, op3;
426 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
427 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
428 PASS_MEM_STAT);
432 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
433 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
434 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
436 gimple
437 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
438 tree op2, tree op3 MEM_STAT_DECL)
440 unsigned num_ops;
441 gimple p;
443 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
444 code). */
445 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
447 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
448 PASS_MEM_STAT);
449 gimple_assign_set_lhs (p, lhs);
450 gimple_assign_set_rhs1 (p, op1);
451 if (op2)
453 gcc_assert (num_ops > 2);
454 gimple_assign_set_rhs2 (p, op2);
457 if (op3)
459 gcc_assert (num_ops > 3);
460 gimple_assign_set_rhs3 (p, op3);
463 return p;
466 gimple
467 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
468 tree op2 MEM_STAT_DECL)
470 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
471 PASS_MEM_STAT);
475 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
477 DST/SRC are the destination and source respectively. You can pass
478 ungimplified trees in DST or SRC, in which case they will be
479 converted to a gimple operand if necessary.
481 This function returns the newly created GIMPLE_ASSIGN tuple. */
483 gimple
484 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
486 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
487 gimplify_and_add (t, seq_p);
488 ggc_free (t);
489 return gimple_seq_last_stmt (*seq_p);
493 /* Build a GIMPLE_COND statement.
495 PRED is the condition used to compare LHS and the RHS.
496 T_LABEL is the label to jump to if the condition is true.
497 F_LABEL is the label to jump to otherwise. */
499 gimple
500 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
501 tree t_label, tree f_label)
503 gimple p;
505 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
506 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
507 gimple_cond_set_lhs (p, lhs);
508 gimple_cond_set_rhs (p, rhs);
509 gimple_cond_set_true_label (p, t_label);
510 gimple_cond_set_false_label (p, f_label);
511 return p;
515 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
517 void
518 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
519 tree *lhs_p, tree *rhs_p)
521 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
522 || TREE_CODE (cond) == TRUTH_NOT_EXPR
523 || is_gimple_min_invariant (cond)
524 || SSA_VAR_P (cond));
526 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
528 /* Canonicalize conditionals of the form 'if (!VAL)'. */
529 if (*code_p == TRUTH_NOT_EXPR)
531 *code_p = EQ_EXPR;
532 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
533 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
535 /* Canonicalize conditionals of the form 'if (VAL)' */
536 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
538 *code_p = NE_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
545 /* Build a GIMPLE_COND statement from the conditional expression tree
546 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
548 gimple
549 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
551 enum tree_code code;
552 tree lhs, rhs;
554 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
555 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
558 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
559 boolean expression tree COND. */
561 void
562 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
564 enum tree_code code;
565 tree lhs, rhs;
567 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
568 gimple_cond_set_condition (stmt, code, lhs, rhs);
571 /* Build a GIMPLE_LABEL statement for LABEL. */
573 gimple
574 gimple_build_label (tree label)
576 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
577 gimple_label_set_label (p, label);
578 return p;
581 /* Build a GIMPLE_GOTO statement to label DEST. */
583 gimple
584 gimple_build_goto (tree dest)
586 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
587 gimple_goto_set_dest (p, dest);
588 return p;
592 /* Build a GIMPLE_NOP statement. */
594 gimple
595 gimple_build_nop (void)
597 return gimple_alloc (GIMPLE_NOP, 0);
601 /* Build a GIMPLE_BIND statement.
602 VARS are the variables in BODY.
603 BLOCK is the containing block. */
605 gimple
606 gimple_build_bind (tree vars, gimple_seq body, tree block)
608 gimple p = gimple_alloc (GIMPLE_BIND, 0);
609 gimple_bind_set_vars (p, vars);
610 if (body)
611 gimple_bind_set_body (p, body);
612 if (block)
613 gimple_bind_set_block (p, block);
614 return p;
617 /* Helper function to set the simple fields of a asm stmt.
619 STRING is a pointer to a string that is the asm blocks assembly code.
620 NINPUT is the number of register inputs.
621 NOUTPUT is the number of register outputs.
622 NCLOBBERS is the number of clobbered registers.
625 static inline gimple
626 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
627 unsigned nclobbers, unsigned nlabels)
629 gimple p;
630 int size = strlen (string);
632 /* ASMs with labels cannot have outputs. This should have been
633 enforced by the front end. */
634 gcc_assert (nlabels == 0 || noutputs == 0);
636 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
637 ninputs + noutputs + nclobbers + nlabels);
639 p->gimple_asm.ni = ninputs;
640 p->gimple_asm.no = noutputs;
641 p->gimple_asm.nc = nclobbers;
642 p->gimple_asm.nl = nlabels;
643 p->gimple_asm.string = ggc_alloc_string (string, size);
645 if (GATHER_STATISTICS)
646 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
648 return p;
651 /* Build a GIMPLE_ASM statement.
653 STRING is the assembly code.
654 NINPUT is the number of register inputs.
655 NOUTPUT is the number of register outputs.
656 NCLOBBERS is the number of clobbered registers.
657 INPUTS is a vector of the input register parameters.
658 OUTPUTS is a vector of the output register parameters.
659 CLOBBERS is a vector of the clobbered register parameters.
660 LABELS is a vector of destination labels. */
662 gimple
663 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
664 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
665 vec<tree, va_gc> *labels)
667 gimple p;
668 unsigned i;
670 p = gimple_build_asm_1 (string,
671 vec_safe_length (inputs),
672 vec_safe_length (outputs),
673 vec_safe_length (clobbers),
674 vec_safe_length (labels));
676 for (i = 0; i < vec_safe_length (inputs); i++)
677 gimple_asm_set_input_op (p, i, (*inputs)[i]);
679 for (i = 0; i < vec_safe_length (outputs); i++)
680 gimple_asm_set_output_op (p, i, (*outputs)[i]);
682 for (i = 0; i < vec_safe_length (clobbers); i++)
683 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
685 for (i = 0; i < vec_safe_length (labels); i++)
686 gimple_asm_set_label_op (p, i, (*labels)[i]);
688 return p;
691 /* Build a GIMPLE_CATCH statement.
693 TYPES are the catch types.
694 HANDLER is the exception handler. */
696 gimple
697 gimple_build_catch (tree types, gimple_seq handler)
699 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
700 gimple_catch_set_types (p, types);
701 if (handler)
702 gimple_catch_set_handler (p, handler);
704 return p;
707 /* Build a GIMPLE_EH_FILTER statement.
709 TYPES are the filter's types.
710 FAILURE is the filter's failure action. */
712 gimple
713 gimple_build_eh_filter (tree types, gimple_seq failure)
715 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
716 gimple_eh_filter_set_types (p, types);
717 if (failure)
718 gimple_eh_filter_set_failure (p, failure);
720 return p;
723 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
725 gimple
726 gimple_build_eh_must_not_throw (tree decl)
728 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
730 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
731 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
732 gimple_eh_must_not_throw_set_fndecl (p, decl);
734 return p;
737 /* Build a GIMPLE_EH_ELSE statement. */
739 gimple
740 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
742 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
743 gimple_eh_else_set_n_body (p, n_body);
744 gimple_eh_else_set_e_body (p, e_body);
745 return p;
748 /* Build a GIMPLE_TRY statement.
750 EVAL is the expression to evaluate.
751 CLEANUP is the cleanup expression.
752 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
753 whether this is a try/catch or a try/finally respectively. */
755 gimple
756 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
757 enum gimple_try_flags kind)
759 gimple p;
761 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
762 p = gimple_alloc (GIMPLE_TRY, 0);
763 gimple_set_subcode (p, kind);
764 if (eval)
765 gimple_try_set_eval (p, eval);
766 if (cleanup)
767 gimple_try_set_cleanup (p, cleanup);
769 return p;
772 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
774 CLEANUP is the cleanup expression. */
776 gimple
777 gimple_build_wce (gimple_seq cleanup)
779 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
780 if (cleanup)
781 gimple_wce_set_cleanup (p, cleanup);
783 return p;
787 /* Build a GIMPLE_RESX statement. */
789 gimple
790 gimple_build_resx (int region)
792 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
793 p->gimple_eh_ctrl.region = region;
794 return p;
798 /* The helper for constructing a gimple switch statement.
799 INDEX is the switch's index.
800 NLABELS is the number of labels in the switch excluding the default.
801 DEFAULT_LABEL is the default label for the switch statement. */
803 gimple
804 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
806 /* nlabels + 1 default label + 1 index. */
807 gcc_checking_assert (default_label);
808 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
809 1 + 1 + nlabels);
810 gimple_switch_set_index (p, index);
811 gimple_switch_set_default_label (p, default_label);
812 return p;
815 /* Build a GIMPLE_SWITCH statement.
817 INDEX is the switch's index.
818 DEFAULT_LABEL is the default label
819 ARGS is a vector of labels excluding the default. */
821 gimple
822 gimple_build_switch (tree index, tree default_label, vec<tree> args)
824 unsigned i, nlabels = args.length ();
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
828 /* Copy the labels from the vector to the switch statement. */
829 for (i = 0; i < nlabels; i++)
830 gimple_switch_set_label (p, i + 1, args[i]);
832 return p;
835 /* Build a GIMPLE_EH_DISPATCH statement. */
837 gimple
838 gimple_build_eh_dispatch (int region)
840 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
841 p->gimple_eh_ctrl.region = region;
842 return p;
845 /* Build a new GIMPLE_DEBUG_BIND statement.
847 VAR is bound to VALUE; block and location are taken from STMT. */
849 gimple
850 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
852 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
853 (unsigned)GIMPLE_DEBUG_BIND, 2
854 PASS_MEM_STAT);
856 gimple_debug_bind_set_var (p, var);
857 gimple_debug_bind_set_value (p, value);
858 if (stmt)
859 gimple_set_location (p, gimple_location (stmt));
861 return p;
865 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
867 VAR is bound to VALUE; block and location are taken from STMT. */
869 gimple
870 gimple_build_debug_source_bind_stat (tree var, tree value,
871 gimple stmt MEM_STAT_DECL)
873 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
874 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
875 PASS_MEM_STAT);
877 gimple_debug_source_bind_set_var (p, var);
878 gimple_debug_source_bind_set_value (p, value);
879 if (stmt)
880 gimple_set_location (p, gimple_location (stmt));
882 return p;
886 /* Build a GIMPLE_OMP_CRITICAL statement.
888 BODY is the sequence of statements for which only one thread can execute.
889 NAME is optional identifier for this critical block. */
891 gimple
892 gimple_build_omp_critical (gimple_seq body, tree name)
894 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
895 gimple_omp_critical_set_name (p, name);
896 if (body)
897 gimple_omp_set_body (p, body);
899 return p;
902 /* Build a GIMPLE_OMP_FOR statement.
904 BODY is sequence of statements inside the for loop.
905 KIND is the `for' variant.
906 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
907 lastprivate, reductions, ordered, schedule, and nowait.
908 COLLAPSE is the collapse count.
909 PRE_BODY is the sequence of statements that are loop invariant. */
911 gimple
912 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
913 gimple_seq pre_body)
915 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
916 if (body)
917 gimple_omp_set_body (p, body);
918 gimple_omp_for_set_clauses (p, clauses);
919 gimple_omp_for_set_kind (p, kind);
920 p->gimple_omp_for.collapse = collapse;
921 p->gimple_omp_for.iter
922 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
923 if (pre_body)
924 gimple_omp_for_set_pre_body (p, pre_body);
926 return p;
930 /* Build a GIMPLE_OMP_PARALLEL statement.
932 BODY is sequence of statements which are executed in parallel.
933 CLAUSES, are the OMP parallel construct's clauses.
934 CHILD_FN is the function created for the parallel threads to execute.
935 DATA_ARG are the shared data argument(s). */
937 gimple
938 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
939 tree data_arg)
941 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
942 if (body)
943 gimple_omp_set_body (p, body);
944 gimple_omp_parallel_set_clauses (p, clauses);
945 gimple_omp_parallel_set_child_fn (p, child_fn);
946 gimple_omp_parallel_set_data_arg (p, data_arg);
948 return p;
952 /* Build a GIMPLE_OMP_TASK statement.
954 BODY is sequence of statements which are executed by the explicit task.
955 CLAUSES, are the OMP parallel construct's clauses.
956 CHILD_FN is the function created for the parallel threads to execute.
957 DATA_ARG are the shared data argument(s).
958 COPY_FN is the optional function for firstprivate initialization.
959 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
961 gimple
962 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
963 tree data_arg, tree copy_fn, tree arg_size,
964 tree arg_align)
966 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
967 if (body)
968 gimple_omp_set_body (p, body);
969 gimple_omp_task_set_clauses (p, clauses);
970 gimple_omp_task_set_child_fn (p, child_fn);
971 gimple_omp_task_set_data_arg (p, data_arg);
972 gimple_omp_task_set_copy_fn (p, copy_fn);
973 gimple_omp_task_set_arg_size (p, arg_size);
974 gimple_omp_task_set_arg_align (p, arg_align);
976 return p;
980 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
982 BODY is the sequence of statements in the section. */
984 gimple
985 gimple_build_omp_section (gimple_seq body)
987 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
988 if (body)
989 gimple_omp_set_body (p, body);
991 return p;
995 /* Build a GIMPLE_OMP_MASTER statement.
997 BODY is the sequence of statements to be executed by just the master. */
999 gimple
1000 gimple_build_omp_master (gimple_seq body)
1002 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1003 if (body)
1004 gimple_omp_set_body (p, body);
1006 return p;
1010 /* Build a GIMPLE_OMP_TASKGROUP statement.
1012 BODY is the sequence of statements to be executed by the taskgroup
1013 construct. */
1015 gimple
1016 gimple_build_omp_taskgroup (gimple_seq body)
1018 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
1019 if (body)
1020 gimple_omp_set_body (p, body);
1022 return p;
1026 /* Build a GIMPLE_OMP_CONTINUE statement.
1028 CONTROL_DEF is the definition of the control variable.
1029 CONTROL_USE is the use of the control variable. */
1031 gimple
1032 gimple_build_omp_continue (tree control_def, tree control_use)
1034 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1035 gimple_omp_continue_set_control_def (p, control_def);
1036 gimple_omp_continue_set_control_use (p, control_use);
1037 return p;
1040 /* Build a GIMPLE_OMP_ORDERED statement.
1042 BODY is the sequence of statements inside a loop that will executed in
1043 sequence. */
1045 gimple
1046 gimple_build_omp_ordered (gimple_seq body)
1048 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1049 if (body)
1050 gimple_omp_set_body (p, body);
1052 return p;
1056 /* Build a GIMPLE_OMP_RETURN statement.
1057 WAIT_P is true if this is a non-waiting return. */
1059 gimple
1060 gimple_build_omp_return (bool wait_p)
1062 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1063 if (wait_p)
1064 gimple_omp_return_set_nowait (p);
1066 return p;
1070 /* Build a GIMPLE_OMP_SECTIONS statement.
1072 BODY is a sequence of section statements.
1073 CLAUSES are any of the OMP sections contsruct's clauses: private,
1074 firstprivate, lastprivate, reduction, and nowait. */
1076 gimple
1077 gimple_build_omp_sections (gimple_seq body, tree clauses)
1079 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1080 if (body)
1081 gimple_omp_set_body (p, body);
1082 gimple_omp_sections_set_clauses (p, clauses);
1084 return p;
1088 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1090 gimple
1091 gimple_build_omp_sections_switch (void)
1093 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1097 /* Build a GIMPLE_OMP_SINGLE statement.
1099 BODY is the sequence of statements that will be executed once.
1100 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1101 copyprivate, nowait. */
1103 gimple
1104 gimple_build_omp_single (gimple_seq body, tree clauses)
1106 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1107 if (body)
1108 gimple_omp_set_body (p, body);
1109 gimple_omp_single_set_clauses (p, clauses);
1111 return p;
1115 /* Build a GIMPLE_OMP_TARGET statement.
1117 BODY is the sequence of statements that will be executed.
1118 CLAUSES are any of the OMP target construct's clauses. */
1120 gimple
1121 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1123 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1124 if (body)
1125 gimple_omp_set_body (p, body);
1126 gimple_omp_target_set_clauses (p, clauses);
1127 gimple_omp_target_set_kind (p, kind);
1129 return p;
1133 /* Build a GIMPLE_OMP_TEAMS statement.
1135 BODY is the sequence of statements that will be executed.
1136 CLAUSES are any of the OMP teams construct's clauses. */
1138 gimple
1139 gimple_build_omp_teams (gimple_seq body, tree clauses)
1141 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1142 if (body)
1143 gimple_omp_set_body (p, body);
1144 gimple_omp_teams_set_clauses (p, clauses);
1146 return p;
1150 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1152 gimple
1153 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1155 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1156 gimple_omp_atomic_load_set_lhs (p, lhs);
1157 gimple_omp_atomic_load_set_rhs (p, rhs);
1158 return p;
1161 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1163 VAL is the value we are storing. */
1165 gimple
1166 gimple_build_omp_atomic_store (tree val)
1168 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1169 gimple_omp_atomic_store_set_val (p, val);
1170 return p;
1173 /* Build a GIMPLE_TRANSACTION statement. */
1175 gimple
1176 gimple_build_transaction (gimple_seq body, tree label)
1178 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1179 gimple_transaction_set_body (p, body);
1180 gimple_transaction_set_label (p, label);
1181 return p;
1184 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1185 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1187 gimple
1188 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1190 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1191 /* Ensure all the predictors fit into the lower bits of the subcode. */
1192 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1193 gimple_predict_set_predictor (p, predictor);
1194 gimple_predict_set_outcome (p, outcome);
1195 return p;
1198 #if defined ENABLE_GIMPLE_CHECKING
1199 /* Complain of a gimple type mismatch and die. */
1201 void
1202 gimple_check_failed (const_gimple gs, const char *file, int line,
1203 const char *function, enum gimple_code code,
1204 enum tree_code subcode)
1206 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1207 gimple_code_name[code],
1208 tree_code_name[subcode],
1209 gimple_code_name[gimple_code (gs)],
1210 gs->gsbase.subcode > 0
1211 ? tree_code_name[gs->gsbase.subcode]
1212 : "",
1213 function, trim_filename (file), line);
1215 #endif /* ENABLE_GIMPLE_CHECKING */
1218 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1219 *SEQ_P is NULL, a new sequence is allocated. */
1221 void
1222 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1224 gimple_stmt_iterator si;
1225 if (gs == NULL)
1226 return;
1228 si = gsi_last (*seq_p);
1229 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1233 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1234 NULL, a new sequence is allocated. */
1236 void
1237 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1239 gimple_stmt_iterator si;
1240 if (src == NULL)
1241 return;
1243 si = gsi_last (*dst_p);
1244 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1248 /* Helper function of empty_body_p. Return true if STMT is an empty
1249 statement. */
1251 static bool
1252 empty_stmt_p (gimple stmt)
1254 if (gimple_code (stmt) == GIMPLE_NOP)
1255 return true;
1256 if (gimple_code (stmt) == GIMPLE_BIND)
1257 return empty_body_p (gimple_bind_body (stmt));
1258 return false;
1262 /* Return true if BODY contains nothing but empty statements. */
1264 bool
1265 empty_body_p (gimple_seq body)
1267 gimple_stmt_iterator i;
1269 if (gimple_seq_empty_p (body))
1270 return true;
1271 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1272 if (!empty_stmt_p (gsi_stmt (i))
1273 && !is_gimple_debug (gsi_stmt (i)))
1274 return false;
1276 return true;
1280 /* Perform a deep copy of sequence SRC and return the result. */
1282 gimple_seq
1283 gimple_seq_copy (gimple_seq src)
1285 gimple_stmt_iterator gsi;
1286 gimple_seq new_seq = NULL;
1287 gimple stmt;
1289 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1291 stmt = gimple_copy (gsi_stmt (gsi));
1292 gimple_seq_add_stmt (&new_seq, stmt);
1295 return new_seq;
1299 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1300 on each one. WI is as in walk_gimple_stmt.
1302 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1303 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1304 produced the value is returned if this statement has not been
1305 removed by a callback (wi->removed_stmt). If the statement has
1306 been removed, NULL is returned.
1308 Otherwise, all the statements are walked and NULL returned. */
1310 gimple
1311 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1312 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1314 gimple_stmt_iterator gsi;
1316 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
1318 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1319 if (ret)
1321 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1322 to hold it. */
1323 gcc_assert (wi);
1324 wi->callback_result = ret;
1326 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1329 if (!wi->removed_stmt)
1330 gsi_next (&gsi);
1333 if (wi)
1334 wi->callback_result = NULL_TREE;
1336 return NULL;
1340 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1341 changed by the callbacks. */
1343 gimple
1344 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1345 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1347 gimple_seq seq2 = seq;
1348 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1349 gcc_assert (seq2 == seq);
1350 return ret;
1354 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1356 static tree
1357 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1358 struct walk_stmt_info *wi)
1360 tree ret, op;
1361 unsigned noutputs;
1362 const char **oconstraints;
1363 unsigned i, n;
1364 const char *constraint;
1365 bool allows_mem, allows_reg, is_inout;
1367 noutputs = gimple_asm_noutputs (stmt);
1368 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1370 if (wi)
1371 wi->is_lhs = true;
1373 for (i = 0; i < noutputs; i++)
1375 op = gimple_asm_output_op (stmt, i);
1376 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1377 oconstraints[i] = constraint;
1378 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1379 &is_inout);
1380 if (wi)
1381 wi->val_only = (allows_reg || !allows_mem);
1382 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1383 if (ret)
1384 return ret;
1387 n = gimple_asm_ninputs (stmt);
1388 for (i = 0; i < n; i++)
1390 op = gimple_asm_input_op (stmt, i);
1391 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1392 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1393 oconstraints, &allows_mem, &allows_reg);
1394 if (wi)
1396 wi->val_only = (allows_reg || !allows_mem);
1397 /* Although input "m" is not really a LHS, we need a lvalue. */
1398 wi->is_lhs = !wi->val_only;
1400 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1401 if (ret)
1402 return ret;
1405 if (wi)
1407 wi->is_lhs = false;
1408 wi->val_only = true;
1411 n = gimple_asm_nlabels (stmt);
1412 for (i = 0; i < n; i++)
1414 op = gimple_asm_label_op (stmt, i);
1415 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1416 if (ret)
1417 return ret;
1420 return NULL_TREE;
1424 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1425 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1427 CALLBACK_OP is called on each operand of STMT via walk_tree.
1428 Additional parameters to walk_tree must be stored in WI. For each operand
1429 OP, walk_tree is called as:
1431 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1433 If CALLBACK_OP returns non-NULL for an operand, the remaining
1434 operands are not scanned.
1436 The return value is that returned by the last call to walk_tree, or
1437 NULL_TREE if no CALLBACK_OP is specified. */
1439 tree
1440 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1441 struct walk_stmt_info *wi)
1443 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1444 unsigned i;
1445 tree ret = NULL_TREE;
1447 switch (gimple_code (stmt))
1449 case GIMPLE_ASSIGN:
1450 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1451 is a register variable, we may use a COMPONENT_REF on the RHS. */
1452 if (wi)
1454 tree lhs = gimple_assign_lhs (stmt);
1455 wi->val_only
1456 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1457 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1460 for (i = 1; i < gimple_num_ops (stmt); i++)
1462 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1463 pset);
1464 if (ret)
1465 return ret;
1468 /* Walk the LHS. If the RHS is appropriate for a memory, we
1469 may use a COMPONENT_REF on the LHS. */
1470 if (wi)
1472 /* If the RHS is of a non-renamable type or is a register variable,
1473 we may use a COMPONENT_REF on the LHS. */
1474 tree rhs1 = gimple_assign_rhs1 (stmt);
1475 wi->val_only
1476 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1477 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1478 wi->is_lhs = true;
1481 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1482 if (ret)
1483 return ret;
1485 if (wi)
1487 wi->val_only = true;
1488 wi->is_lhs = false;
1490 break;
1492 case GIMPLE_CALL:
1493 if (wi)
1495 wi->is_lhs = false;
1496 wi->val_only = true;
1499 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1500 if (ret)
1501 return ret;
1503 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1504 if (ret)
1505 return ret;
1507 for (i = 0; i < gimple_call_num_args (stmt); i++)
1509 if (wi)
1510 wi->val_only
1511 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1512 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1513 pset);
1514 if (ret)
1515 return ret;
1518 if (gimple_call_lhs (stmt))
1520 if (wi)
1522 wi->is_lhs = true;
1523 wi->val_only
1524 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1527 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1528 if (ret)
1529 return ret;
1532 if (wi)
1534 wi->is_lhs = false;
1535 wi->val_only = true;
1537 break;
1539 case GIMPLE_CATCH:
1540 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1541 pset);
1542 if (ret)
1543 return ret;
1544 break;
1546 case GIMPLE_EH_FILTER:
1547 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1548 pset);
1549 if (ret)
1550 return ret;
1551 break;
1553 case GIMPLE_ASM:
1554 ret = walk_gimple_asm (stmt, callback_op, wi);
1555 if (ret)
1556 return ret;
1557 break;
1559 case GIMPLE_OMP_CONTINUE:
1560 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1561 callback_op, wi, pset);
1562 if (ret)
1563 return ret;
1565 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1566 callback_op, wi, pset);
1567 if (ret)
1568 return ret;
1569 break;
1571 case GIMPLE_OMP_CRITICAL:
1572 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1573 pset);
1574 if (ret)
1575 return ret;
1576 break;
1578 case GIMPLE_OMP_FOR:
1579 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1580 pset);
1581 if (ret)
1582 return ret;
1583 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1585 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1586 wi, pset);
1587 if (ret)
1588 return ret;
1589 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1590 wi, pset);
1591 if (ret)
1592 return ret;
1593 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1594 wi, pset);
1595 if (ret)
1596 return ret;
1597 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1598 wi, pset);
1600 if (ret)
1601 return ret;
1602 break;
1604 case GIMPLE_OMP_PARALLEL:
1605 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1606 wi, pset);
1607 if (ret)
1608 return ret;
1609 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1610 wi, pset);
1611 if (ret)
1612 return ret;
1613 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1614 wi, pset);
1615 if (ret)
1616 return ret;
1617 break;
1619 case GIMPLE_OMP_TASK:
1620 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1621 wi, pset);
1622 if (ret)
1623 return ret;
1624 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1625 wi, pset);
1626 if (ret)
1627 return ret;
1628 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1629 wi, pset);
1630 if (ret)
1631 return ret;
1632 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1633 wi, pset);
1634 if (ret)
1635 return ret;
1636 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1637 wi, pset);
1638 if (ret)
1639 return ret;
1640 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1641 wi, pset);
1642 if (ret)
1643 return ret;
1644 break;
1646 case GIMPLE_OMP_SECTIONS:
1647 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1648 wi, pset);
1649 if (ret)
1650 return ret;
1652 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1653 wi, pset);
1654 if (ret)
1655 return ret;
1657 break;
1659 case GIMPLE_OMP_SINGLE:
1660 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1661 pset);
1662 if (ret)
1663 return ret;
1664 break;
1666 case GIMPLE_OMP_TARGET:
1667 ret = walk_tree (gimple_omp_target_clauses_ptr (stmt), callback_op, wi,
1668 pset);
1669 if (ret)
1670 return ret;
1671 break;
1673 case GIMPLE_OMP_TEAMS:
1674 ret = walk_tree (gimple_omp_teams_clauses_ptr (stmt), callback_op, wi,
1675 pset);
1676 if (ret)
1677 return ret;
1678 break;
1680 case GIMPLE_OMP_ATOMIC_LOAD:
1681 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1682 pset);
1683 if (ret)
1684 return ret;
1686 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1687 pset);
1688 if (ret)
1689 return ret;
1690 break;
1692 case GIMPLE_OMP_ATOMIC_STORE:
1693 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1694 wi, pset);
1695 if (ret)
1696 return ret;
1697 break;
1699 case GIMPLE_TRANSACTION:
1700 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1701 wi, pset);
1702 if (ret)
1703 return ret;
1704 break;
1706 case GIMPLE_OMP_RETURN:
1707 ret = walk_tree (gimple_omp_return_lhs_ptr (stmt), callback_op, wi,
1708 pset);
1709 if (ret)
1710 return ret;
1711 break;
1713 /* Tuples that do not have operands. */
1714 case GIMPLE_NOP:
1715 case GIMPLE_RESX:
1716 case GIMPLE_PREDICT:
1717 break;
1719 default:
1721 enum gimple_statement_structure_enum gss;
1722 gss = gimple_statement_structure (stmt);
1723 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1724 for (i = 0; i < gimple_num_ops (stmt); i++)
1726 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1727 if (ret)
1728 return ret;
1731 break;
1734 return NULL_TREE;
1738 /* Walk the current statement in GSI (optionally using traversal state
1739 stored in WI). If WI is NULL, no state is kept during traversal.
1740 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1741 that it has handled all the operands of the statement, its return
1742 value is returned. Otherwise, the return value from CALLBACK_STMT
1743 is discarded and its operands are scanned.
1745 If CALLBACK_STMT is NULL or it didn't handle the operands,
1746 CALLBACK_OP is called on each operand of the statement via
1747 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1748 operand, the remaining operands are not scanned. In this case, the
1749 return value from CALLBACK_OP is returned.
1751 In any other case, NULL_TREE is returned. */
1753 tree
1754 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1755 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1757 gimple ret;
1758 tree tree_ret;
1759 gimple stmt = gsi_stmt (*gsi);
1761 if (wi)
1763 wi->gsi = *gsi;
1764 wi->removed_stmt = false;
1766 if (wi->want_locations && gimple_has_location (stmt))
1767 input_location = gimple_location (stmt);
1770 ret = NULL;
1772 /* Invoke the statement callback. Return if the callback handled
1773 all of STMT operands by itself. */
1774 if (callback_stmt)
1776 bool handled_ops = false;
1777 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1778 if (handled_ops)
1779 return tree_ret;
1781 /* If CALLBACK_STMT did not handle operands, it should not have
1782 a value to return. */
1783 gcc_assert (tree_ret == NULL);
1785 if (wi && wi->removed_stmt)
1786 return NULL;
1788 /* Re-read stmt in case the callback changed it. */
1789 stmt = gsi_stmt (*gsi);
1792 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1793 if (callback_op)
1795 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1796 if (tree_ret)
1797 return tree_ret;
1800 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1801 switch (gimple_code (stmt))
1803 case GIMPLE_BIND:
1804 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1805 callback_op, wi);
1806 if (ret)
1807 return wi->callback_result;
1808 break;
1810 case GIMPLE_CATCH:
1811 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1812 callback_op, wi);
1813 if (ret)
1814 return wi->callback_result;
1815 break;
1817 case GIMPLE_EH_FILTER:
1818 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
1819 callback_op, wi);
1820 if (ret)
1821 return wi->callback_result;
1822 break;
1824 case GIMPLE_EH_ELSE:
1825 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
1826 callback_stmt, callback_op, wi);
1827 if (ret)
1828 return wi->callback_result;
1829 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
1830 callback_stmt, callback_op, wi);
1831 if (ret)
1832 return wi->callback_result;
1833 break;
1835 case GIMPLE_TRY:
1836 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
1837 wi);
1838 if (ret)
1839 return wi->callback_result;
1841 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
1842 callback_op, wi);
1843 if (ret)
1844 return wi->callback_result;
1845 break;
1847 case GIMPLE_OMP_FOR:
1848 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
1849 callback_op, wi);
1850 if (ret)
1851 return wi->callback_result;
1853 /* FALL THROUGH. */
1854 case GIMPLE_OMP_CRITICAL:
1855 case GIMPLE_OMP_MASTER:
1856 case GIMPLE_OMP_TASKGROUP:
1857 case GIMPLE_OMP_ORDERED:
1858 case GIMPLE_OMP_SECTION:
1859 case GIMPLE_OMP_PARALLEL:
1860 case GIMPLE_OMP_TASK:
1861 case GIMPLE_OMP_SECTIONS:
1862 case GIMPLE_OMP_SINGLE:
1863 case GIMPLE_OMP_TARGET:
1864 case GIMPLE_OMP_TEAMS:
1865 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
1866 callback_op, wi);
1867 if (ret)
1868 return wi->callback_result;
1869 break;
1871 case GIMPLE_WITH_CLEANUP_EXPR:
1872 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
1873 callback_op, wi);
1874 if (ret)
1875 return wi->callback_result;
1876 break;
1878 case GIMPLE_TRANSACTION:
1879 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
1880 callback_stmt, callback_op, wi);
1881 if (ret)
1882 return wi->callback_result;
1883 break;
1885 default:
1886 gcc_assert (!gimple_has_substatements (stmt));
1887 break;
1890 return NULL;
1894 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1896 void
1897 gimple_set_body (tree fndecl, gimple_seq seq)
1899 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1900 if (fn == NULL)
1902 /* If FNDECL still does not have a function structure associated
1903 with it, then it does not make sense for it to receive a
1904 GIMPLE body. */
1905 gcc_assert (seq == NULL);
1907 else
1908 fn->gimple_body = seq;
1912 /* Return the body of GIMPLE statements for function FN. After the
1913 CFG pass, the function body doesn't exist anymore because it has
1914 been split up into basic blocks. In this case, it returns
1915 NULL. */
1917 gimple_seq
1918 gimple_body (tree fndecl)
1920 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1921 return fn ? fn->gimple_body : NULL;
1924 /* Return true when FNDECL has Gimple body either in unlowered
1925 or CFG form. */
1926 bool
1927 gimple_has_body_p (tree fndecl)
1929 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1930 return (gimple_body (fndecl) || (fn && fn->cfg));
1933 /* Return true if calls C1 and C2 are known to go to the same function. */
1935 bool
1936 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1938 if (gimple_call_internal_p (c1))
1939 return (gimple_call_internal_p (c2)
1940 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1941 else
1942 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1943 || (gimple_call_fndecl (c1)
1944 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1947 /* Detect flags from a GIMPLE_CALL. This is just like
1948 call_expr_flags, but for gimple tuples. */
1951 gimple_call_flags (const_gimple stmt)
1953 int flags;
1954 tree decl = gimple_call_fndecl (stmt);
1956 if (decl)
1957 flags = flags_from_decl_or_type (decl);
1958 else if (gimple_call_internal_p (stmt))
1959 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1960 else
1961 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1963 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1964 flags |= ECF_NOTHROW;
1966 return flags;
1969 /* Return the "fn spec" string for call STMT. */
1971 static tree
1972 gimple_call_fnspec (const_gimple stmt)
1974 tree type, attr;
1976 type = gimple_call_fntype (stmt);
1977 if (!type)
1978 return NULL_TREE;
1980 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1981 if (!attr)
1982 return NULL_TREE;
1984 return TREE_VALUE (TREE_VALUE (attr));
1987 /* Detects argument flags for argument number ARG on call STMT. */
1990 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1992 tree attr = gimple_call_fnspec (stmt);
1994 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1995 return 0;
1997 switch (TREE_STRING_POINTER (attr)[1 + arg])
1999 case 'x':
2000 case 'X':
2001 return EAF_UNUSED;
2003 case 'R':
2004 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2006 case 'r':
2007 return EAF_NOCLOBBER | EAF_NOESCAPE;
2009 case 'W':
2010 return EAF_DIRECT | EAF_NOESCAPE;
2012 case 'w':
2013 return EAF_NOESCAPE;
2015 case '.':
2016 default:
2017 return 0;
2021 /* Detects return flags for the call STMT. */
2024 gimple_call_return_flags (const_gimple stmt)
2026 tree attr;
2028 if (gimple_call_flags (stmt) & ECF_MALLOC)
2029 return ERF_NOALIAS;
2031 attr = gimple_call_fnspec (stmt);
2032 if (!attr || TREE_STRING_LENGTH (attr) < 1)
2033 return 0;
2035 switch (TREE_STRING_POINTER (attr)[0])
2037 case '1':
2038 case '2':
2039 case '3':
2040 case '4':
2041 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2043 case 'm':
2044 return ERF_NOALIAS;
2046 case '.':
2047 default:
2048 return 0;
2053 /* Return true if GS is a copy assignment. */
2055 bool
2056 gimple_assign_copy_p (gimple gs)
2058 return (gimple_assign_single_p (gs)
2059 && is_gimple_val (gimple_op (gs, 1)));
2063 /* Return true if GS is a SSA_NAME copy assignment. */
2065 bool
2066 gimple_assign_ssa_name_copy_p (gimple gs)
2068 return (gimple_assign_single_p (gs)
2069 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2070 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2074 /* Return true if GS is an assignment with a unary RHS, but the
2075 operator has no effect on the assigned value. The logic is adapted
2076 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2077 instances in which STRIP_NOPS was previously applied to the RHS of
2078 an assignment.
2080 NOTE: In the use cases that led to the creation of this function
2081 and of gimple_assign_single_p, it is typical to test for either
2082 condition and to proceed in the same manner. In each case, the
2083 assigned value is represented by the single RHS operand of the
2084 assignment. I suspect there may be cases where gimple_assign_copy_p,
2085 gimple_assign_single_p, or equivalent logic is used where a similar
2086 treatment of unary NOPs is appropriate. */
2088 bool
2089 gimple_assign_unary_nop_p (gimple gs)
2091 return (is_gimple_assign (gs)
2092 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2093 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2094 && gimple_assign_rhs1 (gs) != error_mark_node
2095 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2096 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2099 /* Set BB to be the basic block holding G. */
2101 void
2102 gimple_set_bb (gimple stmt, basic_block bb)
2104 stmt->gsbase.bb = bb;
2106 /* If the statement is a label, add the label to block-to-labels map
2107 so that we can speed up edge creation for GIMPLE_GOTOs. */
2108 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2110 tree t;
2111 int uid;
2113 t = gimple_label_label (stmt);
2114 uid = LABEL_DECL_UID (t);
2115 if (uid == -1)
2117 unsigned old_len = vec_safe_length (label_to_block_map);
2118 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2119 if (old_len <= (unsigned) uid)
2121 unsigned new_len = 3 * uid / 2 + 1;
2123 vec_safe_grow_cleared (label_to_block_map, new_len);
2127 (*label_to_block_map)[uid] = bb;
2132 /* Modify the RHS of the assignment pointed-to by GSI using the
2133 operands in the expression tree EXPR.
2135 NOTE: The statement pointed-to by GSI may be reallocated if it
2136 did not have enough operand slots.
2138 This function is useful to convert an existing tree expression into
2139 the flat representation used for the RHS of a GIMPLE assignment.
2140 It will reallocate memory as needed to expand or shrink the number
2141 of operand slots needed to represent EXPR.
2143 NOTE: If you find yourself building a tree and then calling this
2144 function, you are most certainly doing it the slow way. It is much
2145 better to build a new assignment or to use the function
2146 gimple_assign_set_rhs_with_ops, which does not require an
2147 expression tree to be built. */
2149 void
2150 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2152 enum tree_code subcode;
2153 tree op1, op2, op3;
2155 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2156 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2160 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2161 operands OP1, OP2 and OP3.
2163 NOTE: The statement pointed-to by GSI may be reallocated if it
2164 did not have enough operand slots. */
2166 void
2167 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2168 tree op1, tree op2, tree op3)
2170 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2171 gimple stmt = gsi_stmt (*gsi);
2173 /* If the new CODE needs more operands, allocate a new statement. */
2174 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2176 tree lhs = gimple_assign_lhs (stmt);
2177 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2178 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2179 gimple_init_singleton (new_stmt);
2180 gsi_replace (gsi, new_stmt, true);
2181 stmt = new_stmt;
2183 /* The LHS needs to be reset as this also changes the SSA name
2184 on the LHS. */
2185 gimple_assign_set_lhs (stmt, lhs);
2188 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2189 gimple_set_subcode (stmt, code);
2190 gimple_assign_set_rhs1 (stmt, op1);
2191 if (new_rhs_ops > 1)
2192 gimple_assign_set_rhs2 (stmt, op2);
2193 if (new_rhs_ops > 2)
2194 gimple_assign_set_rhs3 (stmt, op3);
2198 /* Return the LHS of a statement that performs an assignment,
2199 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2200 for a call to a function that returns no value, or for a
2201 statement other than an assignment or a call. */
2203 tree
2204 gimple_get_lhs (const_gimple stmt)
2206 enum gimple_code code = gimple_code (stmt);
2208 if (code == GIMPLE_ASSIGN)
2209 return gimple_assign_lhs (stmt);
2210 else if (code == GIMPLE_CALL)
2211 return gimple_call_lhs (stmt);
2212 else
2213 return NULL_TREE;
2217 /* Set the LHS of a statement that performs an assignment,
2218 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2220 void
2221 gimple_set_lhs (gimple stmt, tree lhs)
2223 enum gimple_code code = gimple_code (stmt);
2225 if (code == GIMPLE_ASSIGN)
2226 gimple_assign_set_lhs (stmt, lhs);
2227 else if (code == GIMPLE_CALL)
2228 gimple_call_set_lhs (stmt, lhs);
2229 else
2230 gcc_unreachable ();
2234 /* Return a deep copy of statement STMT. All the operands from STMT
2235 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2236 and VUSE operand arrays are set to empty in the new copy. The new
2237 copy isn't part of any sequence. */
2239 gimple
2240 gimple_copy (gimple stmt)
2242 enum gimple_code code = gimple_code (stmt);
2243 unsigned num_ops = gimple_num_ops (stmt);
2244 gimple copy = gimple_alloc (code, num_ops);
2245 unsigned i;
2247 /* Shallow copy all the fields from STMT. */
2248 memcpy (copy, stmt, gimple_size (code));
2249 gimple_init_singleton (copy);
2251 /* If STMT has sub-statements, deep-copy them as well. */
2252 if (gimple_has_substatements (stmt))
2254 gimple_seq new_seq;
2255 tree t;
2257 switch (gimple_code (stmt))
2259 case GIMPLE_BIND:
2260 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2261 gimple_bind_set_body (copy, new_seq);
2262 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2263 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2264 break;
2266 case GIMPLE_CATCH:
2267 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2268 gimple_catch_set_handler (copy, new_seq);
2269 t = unshare_expr (gimple_catch_types (stmt));
2270 gimple_catch_set_types (copy, t);
2271 break;
2273 case GIMPLE_EH_FILTER:
2274 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2275 gimple_eh_filter_set_failure (copy, new_seq);
2276 t = unshare_expr (gimple_eh_filter_types (stmt));
2277 gimple_eh_filter_set_types (copy, t);
2278 break;
2280 case GIMPLE_EH_ELSE:
2281 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2282 gimple_eh_else_set_n_body (copy, new_seq);
2283 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2284 gimple_eh_else_set_e_body (copy, new_seq);
2285 break;
2287 case GIMPLE_TRY:
2288 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2289 gimple_try_set_eval (copy, new_seq);
2290 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2291 gimple_try_set_cleanup (copy, new_seq);
2292 break;
2294 case GIMPLE_OMP_FOR:
2295 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2296 gimple_omp_for_set_pre_body (copy, new_seq);
2297 t = unshare_expr (gimple_omp_for_clauses (stmt));
2298 gimple_omp_for_set_clauses (copy, t);
2299 copy->gimple_omp_for.iter
2300 = ggc_alloc_vec_gimple_omp_for_iter
2301 (gimple_omp_for_collapse (stmt));
2302 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2304 gimple_omp_for_set_cond (copy, i,
2305 gimple_omp_for_cond (stmt, i));
2306 gimple_omp_for_set_index (copy, i,
2307 gimple_omp_for_index (stmt, i));
2308 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2309 gimple_omp_for_set_initial (copy, i, t);
2310 t = unshare_expr (gimple_omp_for_final (stmt, i));
2311 gimple_omp_for_set_final (copy, i, t);
2312 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2313 gimple_omp_for_set_incr (copy, i, t);
2315 goto copy_omp_body;
2317 case GIMPLE_OMP_PARALLEL:
2318 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2319 gimple_omp_parallel_set_clauses (copy, t);
2320 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2321 gimple_omp_parallel_set_child_fn (copy, t);
2322 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2323 gimple_omp_parallel_set_data_arg (copy, t);
2324 goto copy_omp_body;
2326 case GIMPLE_OMP_TASK:
2327 t = unshare_expr (gimple_omp_task_clauses (stmt));
2328 gimple_omp_task_set_clauses (copy, t);
2329 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2330 gimple_omp_task_set_child_fn (copy, t);
2331 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2332 gimple_omp_task_set_data_arg (copy, t);
2333 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2334 gimple_omp_task_set_copy_fn (copy, t);
2335 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2336 gimple_omp_task_set_arg_size (copy, t);
2337 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2338 gimple_omp_task_set_arg_align (copy, t);
2339 goto copy_omp_body;
2341 case GIMPLE_OMP_CRITICAL:
2342 t = unshare_expr (gimple_omp_critical_name (stmt));
2343 gimple_omp_critical_set_name (copy, t);
2344 goto copy_omp_body;
2346 case GIMPLE_OMP_SECTIONS:
2347 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2348 gimple_omp_sections_set_clauses (copy, t);
2349 t = unshare_expr (gimple_omp_sections_control (stmt));
2350 gimple_omp_sections_set_control (copy, t);
2351 /* FALLTHRU */
2353 case GIMPLE_OMP_SINGLE:
2354 case GIMPLE_OMP_TARGET:
2355 case GIMPLE_OMP_TEAMS:
2356 case GIMPLE_OMP_SECTION:
2357 case GIMPLE_OMP_MASTER:
2358 case GIMPLE_OMP_TASKGROUP:
2359 case GIMPLE_OMP_ORDERED:
2360 copy_omp_body:
2361 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2362 gimple_omp_set_body (copy, new_seq);
2363 break;
2365 case GIMPLE_TRANSACTION:
2366 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2367 gimple_transaction_set_body (copy, new_seq);
2368 break;
2370 case GIMPLE_WITH_CLEANUP_EXPR:
2371 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2372 gimple_wce_set_cleanup (copy, new_seq);
2373 break;
2375 default:
2376 gcc_unreachable ();
2380 /* Make copy of operands. */
2381 for (i = 0; i < num_ops; i++)
2382 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2384 if (gimple_has_mem_ops (stmt))
2386 gimple_set_vdef (copy, gimple_vdef (stmt));
2387 gimple_set_vuse (copy, gimple_vuse (stmt));
2390 /* Clear out SSA operand vectors on COPY. */
2391 if (gimple_has_ops (stmt))
2393 gimple_set_use_ops (copy, NULL);
2395 /* SSA operands need to be updated. */
2396 gimple_set_modified (copy, true);
2399 return copy;
2403 /* Return true if statement S has side-effects. We consider a
2404 statement to have side effects if:
2406 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2407 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2409 bool
2410 gimple_has_side_effects (const_gimple s)
2412 if (is_gimple_debug (s))
2413 return false;
2415 /* We don't have to scan the arguments to check for
2416 volatile arguments, though, at present, we still
2417 do a scan to check for TREE_SIDE_EFFECTS. */
2418 if (gimple_has_volatile_ops (s))
2419 return true;
2421 if (gimple_code (s) == GIMPLE_ASM
2422 && gimple_asm_volatile_p (s))
2423 return true;
2425 if (is_gimple_call (s))
2427 int flags = gimple_call_flags (s);
2429 /* An infinite loop is considered a side effect. */
2430 if (!(flags & (ECF_CONST | ECF_PURE))
2431 || (flags & ECF_LOOPING_CONST_OR_PURE))
2432 return true;
2434 return false;
2437 return false;
2440 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2441 Return true if S can trap. When INCLUDE_MEM is true, check whether
2442 the memory operations could trap. When INCLUDE_STORES is true and
2443 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2445 bool
2446 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2448 tree t, div = NULL_TREE;
2449 enum tree_code op;
2451 if (include_mem)
2453 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2455 for (i = start; i < gimple_num_ops (s); i++)
2456 if (tree_could_trap_p (gimple_op (s, i)))
2457 return true;
2460 switch (gimple_code (s))
2462 case GIMPLE_ASM:
2463 return gimple_asm_volatile_p (s);
2465 case GIMPLE_CALL:
2466 t = gimple_call_fndecl (s);
2467 /* Assume that calls to weak functions may trap. */
2468 if (!t || !DECL_P (t) || DECL_WEAK (t))
2469 return true;
2470 return false;
2472 case GIMPLE_ASSIGN:
2473 t = gimple_expr_type (s);
2474 op = gimple_assign_rhs_code (s);
2475 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2476 div = gimple_assign_rhs2 (s);
2477 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2478 (INTEGRAL_TYPE_P (t)
2479 && TYPE_OVERFLOW_TRAPS (t)),
2480 div));
2482 default:
2483 break;
2486 return false;
2489 /* Return true if statement S can trap. */
2491 bool
2492 gimple_could_trap_p (gimple s)
2494 return gimple_could_trap_p_1 (s, true, true);
2497 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2499 bool
2500 gimple_assign_rhs_could_trap_p (gimple s)
2502 gcc_assert (is_gimple_assign (s));
2503 return gimple_could_trap_p_1 (s, true, false);
2507 /* Print debugging information for gimple stmts generated. */
2509 void
2510 dump_gimple_statistics (void)
2512 int i, total_tuples = 0, total_bytes = 0;
2514 if (! GATHER_STATISTICS)
2516 fprintf (stderr, "No gimple statistics\n");
2517 return;
2520 fprintf (stderr, "\nGIMPLE statements\n");
2521 fprintf (stderr, "Kind Stmts Bytes\n");
2522 fprintf (stderr, "---------------------------------------\n");
2523 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2525 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2526 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2527 total_tuples += gimple_alloc_counts[i];
2528 total_bytes += gimple_alloc_sizes[i];
2530 fprintf (stderr, "---------------------------------------\n");
2531 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2532 fprintf (stderr, "---------------------------------------\n");
2536 /* Return the number of operands needed on the RHS of a GIMPLE
2537 assignment for an expression with tree code CODE. */
2539 unsigned
2540 get_gimple_rhs_num_ops (enum tree_code code)
2542 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2544 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2545 return 1;
2546 else if (rhs_class == GIMPLE_BINARY_RHS)
2547 return 2;
2548 else if (rhs_class == GIMPLE_TERNARY_RHS)
2549 return 3;
2550 else
2551 gcc_unreachable ();
2554 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2555 (unsigned char) \
2556 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2557 : ((TYPE) == tcc_binary \
2558 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2559 : ((TYPE) == tcc_constant \
2560 || (TYPE) == tcc_declaration \
2561 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2562 : ((SYM) == TRUTH_AND_EXPR \
2563 || (SYM) == TRUTH_OR_EXPR \
2564 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2565 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2566 : ((SYM) == COND_EXPR \
2567 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2568 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2569 || (SYM) == DOT_PROD_EXPR \
2570 || (SYM) == REALIGN_LOAD_EXPR \
2571 || (SYM) == VEC_COND_EXPR \
2572 || (SYM) == VEC_PERM_EXPR \
2573 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2574 : ((SYM) == CONSTRUCTOR \
2575 || (SYM) == OBJ_TYPE_REF \
2576 || (SYM) == ASSERT_EXPR \
2577 || (SYM) == ADDR_EXPR \
2578 || (SYM) == WITH_SIZE_EXPR \
2579 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2580 : GIMPLE_INVALID_RHS),
2581 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2583 const unsigned char gimple_rhs_class_table[] = {
2584 #include "all-tree.def"
2587 #undef DEFTREECODE
2588 #undef END_OF_BASE_TREE_CODES
2590 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2592 /* Validation of GIMPLE expressions. */
2594 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2596 bool
2597 is_gimple_lvalue (tree t)
2599 return (is_gimple_addressable (t)
2600 || TREE_CODE (t) == WITH_SIZE_EXPR
2601 /* These are complex lvalues, but don't have addresses, so they
2602 go here. */
2603 || TREE_CODE (t) == BIT_FIELD_REF);
2606 /* Return true if T is a GIMPLE condition. */
2608 bool
2609 is_gimple_condexpr (tree t)
2611 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2612 && !tree_could_throw_p (t)
2613 && is_gimple_val (TREE_OPERAND (t, 0))
2614 && is_gimple_val (TREE_OPERAND (t, 1))));
2617 /* Return true if T is something whose address can be taken. */
2619 bool
2620 is_gimple_addressable (tree t)
2622 return (is_gimple_id (t) || handled_component_p (t)
2623 || TREE_CODE (t) == MEM_REF);
2626 /* Return true if T is a valid gimple constant. */
2628 bool
2629 is_gimple_constant (const_tree t)
2631 switch (TREE_CODE (t))
2633 case INTEGER_CST:
2634 case REAL_CST:
2635 case FIXED_CST:
2636 case STRING_CST:
2637 case COMPLEX_CST:
2638 case VECTOR_CST:
2639 return true;
2641 default:
2642 return false;
2646 /* Return true if T is a gimple address. */
2648 bool
2649 is_gimple_address (const_tree t)
2651 tree op;
2653 if (TREE_CODE (t) != ADDR_EXPR)
2654 return false;
2656 op = TREE_OPERAND (t, 0);
2657 while (handled_component_p (op))
2659 if ((TREE_CODE (op) == ARRAY_REF
2660 || TREE_CODE (op) == ARRAY_RANGE_REF)
2661 && !is_gimple_val (TREE_OPERAND (op, 1)))
2662 return false;
2664 op = TREE_OPERAND (op, 0);
2667 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2668 return true;
2670 switch (TREE_CODE (op))
2672 case PARM_DECL:
2673 case RESULT_DECL:
2674 case LABEL_DECL:
2675 case FUNCTION_DECL:
2676 case VAR_DECL:
2677 case CONST_DECL:
2678 return true;
2680 default:
2681 return false;
2685 /* Return true if T is a gimple invariant address. */
2687 bool
2688 is_gimple_invariant_address (const_tree t)
2690 const_tree op;
2692 if (TREE_CODE (t) != ADDR_EXPR)
2693 return false;
2695 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2696 if (!op)
2697 return false;
2699 if (TREE_CODE (op) == MEM_REF)
2701 const_tree op0 = TREE_OPERAND (op, 0);
2702 return (TREE_CODE (op0) == ADDR_EXPR
2703 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2704 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2707 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2710 /* Return true if T is a gimple invariant address at IPA level
2711 (so addresses of variables on stack are not allowed). */
2713 bool
2714 is_gimple_ip_invariant_address (const_tree t)
2716 const_tree op;
2718 if (TREE_CODE (t) != ADDR_EXPR)
2719 return false;
2721 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2722 if (!op)
2723 return false;
2725 if (TREE_CODE (op) == MEM_REF)
2727 const_tree op0 = TREE_OPERAND (op, 0);
2728 return (TREE_CODE (op0) == ADDR_EXPR
2729 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2730 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2733 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2736 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2737 form of function invariant. */
2739 bool
2740 is_gimple_min_invariant (const_tree t)
2742 if (TREE_CODE (t) == ADDR_EXPR)
2743 return is_gimple_invariant_address (t);
2745 return is_gimple_constant (t);
2748 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2749 form of gimple minimal invariant. */
2751 bool
2752 is_gimple_ip_invariant (const_tree t)
2754 if (TREE_CODE (t) == ADDR_EXPR)
2755 return is_gimple_ip_invariant_address (t);
2757 return is_gimple_constant (t);
2760 /* Return true if T is a variable. */
2762 bool
2763 is_gimple_variable (tree t)
2765 return (TREE_CODE (t) == VAR_DECL
2766 || TREE_CODE (t) == PARM_DECL
2767 || TREE_CODE (t) == RESULT_DECL
2768 || TREE_CODE (t) == SSA_NAME);
2771 /* Return true if T is a GIMPLE identifier (something with an address). */
2773 bool
2774 is_gimple_id (tree t)
2776 return (is_gimple_variable (t)
2777 || TREE_CODE (t) == FUNCTION_DECL
2778 || TREE_CODE (t) == LABEL_DECL
2779 || TREE_CODE (t) == CONST_DECL
2780 /* Allow string constants, since they are addressable. */
2781 || TREE_CODE (t) == STRING_CST);
2784 /* Return true if T is a non-aggregate register variable. */
2786 bool
2787 is_gimple_reg (tree t)
2789 if (virtual_operand_p (t))
2790 return false;
2792 if (TREE_CODE (t) == SSA_NAME)
2793 return true;
2795 if (!is_gimple_variable (t))
2796 return false;
2798 if (!is_gimple_reg_type (TREE_TYPE (t)))
2799 return false;
2801 /* A volatile decl is not acceptable because we can't reuse it as
2802 needed. We need to copy it into a temp first. */
2803 if (TREE_THIS_VOLATILE (t))
2804 return false;
2806 /* We define "registers" as things that can be renamed as needed,
2807 which with our infrastructure does not apply to memory. */
2808 if (needs_to_live_in_memory (t))
2809 return false;
2811 /* Hard register variables are an interesting case. For those that
2812 are call-clobbered, we don't know where all the calls are, since
2813 we don't (want to) take into account which operations will turn
2814 into libcalls at the rtl level. For those that are call-saved,
2815 we don't currently model the fact that calls may in fact change
2816 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2817 level, and so miss variable changes that might imply. All around,
2818 it seems safest to not do too much optimization with these at the
2819 tree level at all. We'll have to rely on the rtl optimizers to
2820 clean this up, as there we've got all the appropriate bits exposed. */
2821 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2822 return false;
2824 /* Complex and vector values must have been put into SSA-like form.
2825 That is, no assignments to the individual components. */
2826 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2827 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2828 return DECL_GIMPLE_REG_P (t);
2830 return true;
2834 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2836 bool
2837 is_gimple_val (tree t)
2839 /* Make loads from volatiles and memory vars explicit. */
2840 if (is_gimple_variable (t)
2841 && is_gimple_reg_type (TREE_TYPE (t))
2842 && !is_gimple_reg (t))
2843 return false;
2845 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2848 /* Similarly, but accept hard registers as inputs to asm statements. */
2850 bool
2851 is_gimple_asm_val (tree t)
2853 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2854 return true;
2856 return is_gimple_val (t);
2859 /* Return true if T is a GIMPLE minimal lvalue. */
2861 bool
2862 is_gimple_min_lval (tree t)
2864 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2865 return false;
2866 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2869 /* Return true if T is a valid function operand of a CALL_EXPR. */
2871 bool
2872 is_gimple_call_addr (tree t)
2874 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2877 /* Return true if T is a valid address operand of a MEM_REF. */
2879 bool
2880 is_gimple_mem_ref_addr (tree t)
2882 return (is_gimple_reg (t)
2883 || TREE_CODE (t) == INTEGER_CST
2884 || (TREE_CODE (t) == ADDR_EXPR
2885 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2886 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2890 /* Given a memory reference expression T, return its base address.
2891 The base address of a memory reference expression is the main
2892 object being referenced. For instance, the base address for
2893 'array[i].fld[j]' is 'array'. You can think of this as stripping
2894 away the offset part from a memory address.
2896 This function calls handled_component_p to strip away all the inner
2897 parts of the memory reference until it reaches the base object. */
2899 tree
2900 get_base_address (tree t)
2902 while (handled_component_p (t))
2903 t = TREE_OPERAND (t, 0);
2905 if ((TREE_CODE (t) == MEM_REF
2906 || TREE_CODE (t) == TARGET_MEM_REF)
2907 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2908 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2910 /* ??? Either the alias oracle or all callers need to properly deal
2911 with WITH_SIZE_EXPRs before we can look through those. */
2912 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2913 return NULL_TREE;
2915 return t;
2918 void
2919 recalculate_side_effects (tree t)
2921 enum tree_code code = TREE_CODE (t);
2922 int len = TREE_OPERAND_LENGTH (t);
2923 int i;
2925 switch (TREE_CODE_CLASS (code))
2927 case tcc_expression:
2928 switch (code)
2930 case INIT_EXPR:
2931 case MODIFY_EXPR:
2932 case VA_ARG_EXPR:
2933 case PREDECREMENT_EXPR:
2934 case PREINCREMENT_EXPR:
2935 case POSTDECREMENT_EXPR:
2936 case POSTINCREMENT_EXPR:
2937 /* All of these have side-effects, no matter what their
2938 operands are. */
2939 return;
2941 default:
2942 break;
2944 /* Fall through. */
2946 case tcc_comparison: /* a comparison expression */
2947 case tcc_unary: /* a unary arithmetic expression */
2948 case tcc_binary: /* a binary arithmetic expression */
2949 case tcc_reference: /* a reference */
2950 case tcc_vl_exp: /* a function call */
2951 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2952 for (i = 0; i < len; ++i)
2954 tree op = TREE_OPERAND (t, i);
2955 if (op && TREE_SIDE_EFFECTS (op))
2956 TREE_SIDE_EFFECTS (t) = 1;
2958 break;
2960 case tcc_constant:
2961 /* No side-effects. */
2962 return;
2964 default:
2965 gcc_unreachable ();
2969 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2970 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2971 we failed to create one. */
2973 tree
2974 canonicalize_cond_expr_cond (tree t)
2976 /* Strip conversions around boolean operations. */
2977 if (CONVERT_EXPR_P (t)
2978 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2979 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2980 == BOOLEAN_TYPE))
2981 t = TREE_OPERAND (t, 0);
2983 /* For !x use x == 0. */
2984 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2986 tree top0 = TREE_OPERAND (t, 0);
2987 t = build2 (EQ_EXPR, TREE_TYPE (t),
2988 top0, build_int_cst (TREE_TYPE (top0), 0));
2990 /* For cmp ? 1 : 0 use cmp. */
2991 else if (TREE_CODE (t) == COND_EXPR
2992 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2993 && integer_onep (TREE_OPERAND (t, 1))
2994 && integer_zerop (TREE_OPERAND (t, 2)))
2996 tree top0 = TREE_OPERAND (t, 0);
2997 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2998 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3000 /* For x ^ y use x != y. */
3001 else if (TREE_CODE (t) == BIT_XOR_EXPR)
3002 t = build2 (NE_EXPR, TREE_TYPE (t),
3003 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
3005 if (is_gimple_condexpr (t))
3006 return t;
3008 return NULL_TREE;
3011 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3012 the positions marked by the set ARGS_TO_SKIP. */
3014 gimple
3015 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3017 int i;
3018 int nargs = gimple_call_num_args (stmt);
3019 vec<tree> vargs;
3020 vargs.create (nargs);
3021 gimple new_stmt;
3023 for (i = 0; i < nargs; i++)
3024 if (!bitmap_bit_p (args_to_skip, i))
3025 vargs.quick_push (gimple_call_arg (stmt, i));
3027 if (gimple_call_internal_p (stmt))
3028 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3029 vargs);
3030 else
3031 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3032 vargs.release ();
3033 if (gimple_call_lhs (stmt))
3034 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3036 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3037 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3039 if (gimple_has_location (stmt))
3040 gimple_set_location (new_stmt, gimple_location (stmt));
3041 gimple_call_copy_flags (new_stmt, stmt);
3042 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3044 gimple_set_modified (new_stmt, true);
3046 return new_stmt;
3051 /* Return true if the field decls F1 and F2 are at the same offset.
3053 This is intended to be used on GIMPLE types only. */
3055 bool
3056 gimple_compare_field_offset (tree f1, tree f2)
3058 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3060 tree offset1 = DECL_FIELD_OFFSET (f1);
3061 tree offset2 = DECL_FIELD_OFFSET (f2);
3062 return ((offset1 == offset2
3063 /* Once gimplification is done, self-referential offsets are
3064 instantiated as operand #2 of the COMPONENT_REF built for
3065 each access and reset. Therefore, they are not relevant
3066 anymore and fields are interchangeable provided that they
3067 represent the same access. */
3068 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3069 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3070 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3071 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3072 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3073 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3074 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3075 || operand_equal_p (offset1, offset2, 0))
3076 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3077 DECL_FIELD_BIT_OFFSET (f2)));
3080 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3081 should be, so handle differing ones specially by decomposing
3082 the offset into a byte and bit offset manually. */
3083 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3084 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3086 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3087 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3088 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3089 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3090 + bit_offset1 / BITS_PER_UNIT);
3091 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3092 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3093 + bit_offset2 / BITS_PER_UNIT);
3094 if (byte_offset1 != byte_offset2)
3095 return false;
3096 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3099 return false;
3102 /* Returning a hash value for gimple type TYPE combined with VAL.
3104 The hash value returned is equal for types considered compatible
3105 by gimple_canonical_types_compatible_p. */
3107 static hashval_t
3108 iterative_hash_canonical_type (tree type, hashval_t val)
3110 hashval_t v;
3111 void **slot;
3112 struct tree_int_map *mp, m;
3114 m.base.from = type;
3115 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
3116 && *slot)
3117 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
3119 /* Combine a few common features of types so that types are grouped into
3120 smaller sets; when searching for existing matching types to merge,
3121 only existing types having the same features as the new type will be
3122 checked. */
3123 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
3124 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3125 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
3126 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3128 /* Incorporate common features of numerical types. */
3129 if (INTEGRAL_TYPE_P (type)
3130 || SCALAR_FLOAT_TYPE_P (type)
3131 || FIXED_POINT_TYPE_P (type)
3132 || TREE_CODE (type) == OFFSET_TYPE
3133 || POINTER_TYPE_P (type))
3135 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3136 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3139 if (VECTOR_TYPE_P (type))
3141 v = iterative_hash_hashval_t (TYPE_VECTOR_SUBPARTS (type), v);
3142 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3145 if (TREE_CODE (type) == COMPLEX_TYPE)
3146 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3148 /* For pointer and reference types, fold in information about the type
3149 pointed to but do not recurse to the pointed-to type. */
3150 if (POINTER_TYPE_P (type))
3152 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
3153 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
3154 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
3155 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
3158 /* For integer types hash only the string flag. */
3159 if (TREE_CODE (type) == INTEGER_TYPE)
3160 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3162 /* For array types hash the domain bounds and the string flag. */
3163 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3165 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3166 /* OMP lowering can introduce error_mark_node in place of
3167 random local decls in types. */
3168 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3169 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
3170 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3171 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
3174 /* Recurse for aggregates with a single element type. */
3175 if (TREE_CODE (type) == ARRAY_TYPE
3176 || TREE_CODE (type) == COMPLEX_TYPE
3177 || TREE_CODE (type) == VECTOR_TYPE)
3178 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3180 /* Incorporate function return and argument types. */
3181 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3183 unsigned na;
3184 tree p;
3186 /* For method types also incorporate their parent class. */
3187 if (TREE_CODE (type) == METHOD_TYPE)
3188 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
3190 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3192 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3194 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
3195 na++;
3198 v = iterative_hash_hashval_t (na, v);
3201 if (RECORD_OR_UNION_TYPE_P (type))
3203 unsigned nf;
3204 tree f;
3206 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
3207 if (TREE_CODE (f) == FIELD_DECL)
3209 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
3210 nf++;
3213 v = iterative_hash_hashval_t (nf, v);
3216 /* Cache the just computed hash value. */
3217 mp = ggc_alloc_cleared_tree_int_map ();
3218 mp->base.from = type;
3219 mp->to = v;
3220 *slot = (void *) mp;
3222 return iterative_hash_hashval_t (v, val);
3225 static hashval_t
3226 gimple_canonical_type_hash (const void *p)
3228 if (canonical_type_hash_cache == NULL)
3229 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3230 tree_int_map_eq, NULL);
3232 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
3238 /* The TYPE_CANONICAL merging machinery. It should closely resemble
3239 the middle-end types_compatible_p function. It needs to avoid
3240 claiming types are different for types that should be treated
3241 the same with respect to TBAA. Canonical types are also used
3242 for IL consistency checks via the useless_type_conversion_p
3243 predicate which does not handle all type kinds itself but falls
3244 back to pointer-comparison of TYPE_CANONICAL for aggregates
3245 for example. */
3247 /* Return true iff T1 and T2 are structurally identical for what
3248 TBAA is concerned. */
3250 static bool
3251 gimple_canonical_types_compatible_p (tree t1, tree t2)
3253 /* Before starting to set up the SCC machinery handle simple cases. */
3255 /* Check first for the obvious case of pointer identity. */
3256 if (t1 == t2)
3257 return true;
3259 /* Check that we have two types to compare. */
3260 if (t1 == NULL_TREE || t2 == NULL_TREE)
3261 return false;
3263 /* If the types have been previously registered and found equal
3264 they still are. */
3265 if (TYPE_CANONICAL (t1)
3266 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3267 return true;
3269 /* Can't be the same type if the types don't have the same code. */
3270 if (TREE_CODE (t1) != TREE_CODE (t2))
3271 return false;
3273 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3274 return false;
3276 /* Qualifiers do not matter for canonical type comparison purposes. */
3278 /* Void types and nullptr types are always the same. */
3279 if (TREE_CODE (t1) == VOID_TYPE
3280 || TREE_CODE (t1) == NULLPTR_TYPE)
3281 return true;
3283 /* Can't be the same type if they have different alignment, or mode. */
3284 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3285 || TYPE_MODE (t1) != TYPE_MODE (t2))
3286 return false;
3288 /* Non-aggregate types can be handled cheaply. */
3289 if (INTEGRAL_TYPE_P (t1)
3290 || SCALAR_FLOAT_TYPE_P (t1)
3291 || FIXED_POINT_TYPE_P (t1)
3292 || TREE_CODE (t1) == VECTOR_TYPE
3293 || TREE_CODE (t1) == COMPLEX_TYPE
3294 || TREE_CODE (t1) == OFFSET_TYPE
3295 || POINTER_TYPE_P (t1))
3297 /* Can't be the same type if they have different sign or precision. */
3298 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3299 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3300 return false;
3302 if (TREE_CODE (t1) == INTEGER_TYPE
3303 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
3304 return false;
3306 /* For canonical type comparisons we do not want to build SCCs
3307 so we cannot compare pointed-to types. But we can, for now,
3308 require the same pointed-to type kind and match what
3309 useless_type_conversion_p would do. */
3310 if (POINTER_TYPE_P (t1))
3312 /* If the two pointers have different ref-all attributes,
3313 they can't be the same type. */
3314 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3315 return false;
3317 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
3318 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
3319 return false;
3321 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
3322 return false;
3324 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
3325 return false;
3328 /* Tail-recurse to components. */
3329 if (TREE_CODE (t1) == VECTOR_TYPE
3330 || TREE_CODE (t1) == COMPLEX_TYPE)
3331 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
3332 TREE_TYPE (t2));
3334 return true;
3337 /* Do type-specific comparisons. */
3338 switch (TREE_CODE (t1))
3340 case ARRAY_TYPE:
3341 /* Array types are the same if the element types are the same and
3342 the number of elements are the same. */
3343 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3344 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3345 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3346 return false;
3347 else
3349 tree i1 = TYPE_DOMAIN (t1);
3350 tree i2 = TYPE_DOMAIN (t2);
3352 /* For an incomplete external array, the type domain can be
3353 NULL_TREE. Check this condition also. */
3354 if (i1 == NULL_TREE && i2 == NULL_TREE)
3355 return true;
3356 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3357 return false;
3358 else
3360 tree min1 = TYPE_MIN_VALUE (i1);
3361 tree min2 = TYPE_MIN_VALUE (i2);
3362 tree max1 = TYPE_MAX_VALUE (i1);
3363 tree max2 = TYPE_MAX_VALUE (i2);
3365 /* The minimum/maximum values have to be the same. */
3366 if ((min1 == min2
3367 || (min1 && min2
3368 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3369 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3370 || operand_equal_p (min1, min2, 0))))
3371 && (max1 == max2
3372 || (max1 && max2
3373 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3374 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3375 || operand_equal_p (max1, max2, 0)))))
3376 return true;
3377 else
3378 return false;
3382 case METHOD_TYPE:
3383 case FUNCTION_TYPE:
3384 /* Function types are the same if the return type and arguments types
3385 are the same. */
3386 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3387 return false;
3389 if (!comp_type_attributes (t1, t2))
3390 return false;
3392 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3393 return true;
3394 else
3396 tree parms1, parms2;
3398 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3399 parms1 && parms2;
3400 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3402 if (!gimple_canonical_types_compatible_p
3403 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3404 return false;
3407 if (parms1 || parms2)
3408 return false;
3410 return true;
3413 case RECORD_TYPE:
3414 case UNION_TYPE:
3415 case QUAL_UNION_TYPE:
3417 tree f1, f2;
3419 /* For aggregate types, all the fields must be the same. */
3420 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3421 f1 || f2;
3422 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3424 /* Skip non-fields. */
3425 while (f1 && TREE_CODE (f1) != FIELD_DECL)
3426 f1 = TREE_CHAIN (f1);
3427 while (f2 && TREE_CODE (f2) != FIELD_DECL)
3428 f2 = TREE_CHAIN (f2);
3429 if (!f1 || !f2)
3430 break;
3431 /* The fields must have the same name, offset and type. */
3432 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3433 || !gimple_compare_field_offset (f1, f2)
3434 || !gimple_canonical_types_compatible_p
3435 (TREE_TYPE (f1), TREE_TYPE (f2)))
3436 return false;
3439 /* If one aggregate has more fields than the other, they
3440 are not the same. */
3441 if (f1 || f2)
3442 return false;
3444 return true;
3447 default:
3448 gcc_unreachable ();
3453 /* Returns nonzero if P1 and P2 are equal. */
3455 static int
3456 gimple_canonical_type_eq (const void *p1, const void *p2)
3458 const_tree t1 = (const_tree) p1;
3459 const_tree t2 = (const_tree) p2;
3460 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
3461 CONST_CAST_TREE (t2));
3464 /* Register type T in the global type table gimple_types.
3465 If another type T', compatible with T, already existed in
3466 gimple_types then return T', otherwise return T. This is used by
3467 LTO to merge identical types read from different TUs.
3469 ??? This merging does not exactly match how the tree.c middle-end
3470 functions will assign TYPE_CANONICAL when new types are created
3471 during optimization (which at least happens for pointer and array
3472 types). */
3474 tree
3475 gimple_register_canonical_type (tree t)
3477 void **slot;
3479 gcc_assert (TYPE_P (t));
3481 if (TYPE_CANONICAL (t))
3482 return TYPE_CANONICAL (t);
3484 if (gimple_canonical_types == NULL)
3485 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
3486 gimple_canonical_type_eq, 0);
3488 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
3489 if (*slot
3490 && *(tree *)slot != t)
3492 tree new_type = (tree) *((tree *) slot);
3494 TYPE_CANONICAL (t) = new_type;
3495 t = new_type;
3497 else
3499 TYPE_CANONICAL (t) = t;
3500 *slot = (void *) t;
3503 return t;
3507 /* Show statistics on references to the global type table gimple_types. */
3509 void
3510 print_gimple_types_stats (const char *pfx)
3512 if (gimple_canonical_types)
3513 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3514 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3515 (long) htab_size (gimple_canonical_types),
3516 (long) htab_elements (gimple_canonical_types),
3517 (long) gimple_canonical_types->searches,
3518 (long) gimple_canonical_types->collisions,
3519 htab_collisions (gimple_canonical_types));
3520 else
3521 fprintf (stderr, "[%s] GIMPLE canonical type table is empty\n", pfx);
3522 if (canonical_type_hash_cache)
3523 fprintf (stderr, "[%s] GIMPLE canonical type hash table: size %ld, "
3524 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3525 (long) htab_size (canonical_type_hash_cache),
3526 (long) htab_elements (canonical_type_hash_cache),
3527 (long) canonical_type_hash_cache->searches,
3528 (long) canonical_type_hash_cache->collisions,
3529 htab_collisions (canonical_type_hash_cache));
3530 else
3531 fprintf (stderr, "[%s] GIMPLE canonical type hash table is empty\n", pfx);
3534 /* Free the gimple type hashtables used for LTO type merging. */
3536 void
3537 free_gimple_type_tables (void)
3539 if (gimple_canonical_types)
3541 htab_delete (gimple_canonical_types);
3542 gimple_canonical_types = NULL;
3544 if (canonical_type_hash_cache)
3546 htab_delete (canonical_type_hash_cache);
3547 canonical_type_hash_cache = NULL;
3552 /* Return a type the same as TYPE except unsigned or
3553 signed according to UNSIGNEDP. */
3555 static tree
3556 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3558 tree type1;
3560 type1 = TYPE_MAIN_VARIANT (type);
3561 if (type1 == signed_char_type_node
3562 || type1 == char_type_node
3563 || type1 == unsigned_char_type_node)
3564 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3565 if (type1 == integer_type_node || type1 == unsigned_type_node)
3566 return unsignedp ? unsigned_type_node : integer_type_node;
3567 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3568 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3569 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3570 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3571 if (type1 == long_long_integer_type_node
3572 || type1 == long_long_unsigned_type_node)
3573 return unsignedp
3574 ? long_long_unsigned_type_node
3575 : long_long_integer_type_node;
3576 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
3577 return unsignedp
3578 ? int128_unsigned_type_node
3579 : int128_integer_type_node;
3580 #if HOST_BITS_PER_WIDE_INT >= 64
3581 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3582 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3583 #endif
3584 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3585 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3586 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3587 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3588 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3589 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3590 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3591 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3593 #define GIMPLE_FIXED_TYPES(NAME) \
3594 if (type1 == short_ ## NAME ## _type_node \
3595 || type1 == unsigned_short_ ## NAME ## _type_node) \
3596 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3597 : short_ ## NAME ## _type_node; \
3598 if (type1 == NAME ## _type_node \
3599 || type1 == unsigned_ ## NAME ## _type_node) \
3600 return unsignedp ? unsigned_ ## NAME ## _type_node \
3601 : NAME ## _type_node; \
3602 if (type1 == long_ ## NAME ## _type_node \
3603 || type1 == unsigned_long_ ## NAME ## _type_node) \
3604 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3605 : long_ ## NAME ## _type_node; \
3606 if (type1 == long_long_ ## NAME ## _type_node \
3607 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3608 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3609 : long_long_ ## NAME ## _type_node;
3611 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
3612 if (type1 == NAME ## _type_node \
3613 || type1 == u ## NAME ## _type_node) \
3614 return unsignedp ? u ## NAME ## _type_node \
3615 : NAME ## _type_node;
3617 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
3618 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3619 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3620 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3621 : sat_ ## short_ ## NAME ## _type_node; \
3622 if (type1 == sat_ ## NAME ## _type_node \
3623 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3624 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3625 : sat_ ## NAME ## _type_node; \
3626 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3627 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3628 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3629 : sat_ ## long_ ## NAME ## _type_node; \
3630 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3631 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3632 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3633 : sat_ ## long_long_ ## NAME ## _type_node;
3635 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3636 if (type1 == sat_ ## NAME ## _type_node \
3637 || type1 == sat_ ## u ## NAME ## _type_node) \
3638 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3639 : sat_ ## NAME ## _type_node;
3641 GIMPLE_FIXED_TYPES (fract);
3642 GIMPLE_FIXED_TYPES_SAT (fract);
3643 GIMPLE_FIXED_TYPES (accum);
3644 GIMPLE_FIXED_TYPES_SAT (accum);
3646 GIMPLE_FIXED_MODE_TYPES (qq);
3647 GIMPLE_FIXED_MODE_TYPES (hq);
3648 GIMPLE_FIXED_MODE_TYPES (sq);
3649 GIMPLE_FIXED_MODE_TYPES (dq);
3650 GIMPLE_FIXED_MODE_TYPES (tq);
3651 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
3652 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
3653 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
3654 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
3655 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
3656 GIMPLE_FIXED_MODE_TYPES (ha);
3657 GIMPLE_FIXED_MODE_TYPES (sa);
3658 GIMPLE_FIXED_MODE_TYPES (da);
3659 GIMPLE_FIXED_MODE_TYPES (ta);
3660 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
3661 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
3662 GIMPLE_FIXED_MODE_TYPES_SAT (da);
3663 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
3665 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3666 the precision; they have precision set to match their range, but
3667 may use a wider mode to match an ABI. If we change modes, we may
3668 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3669 the precision as well, so as to yield correct results for
3670 bit-field types. C++ does not have these separate bit-field
3671 types, and producing a signed or unsigned variant of an
3672 ENUMERAL_TYPE may cause other problems as well. */
3673 if (!INTEGRAL_TYPE_P (type)
3674 || TYPE_UNSIGNED (type) == unsignedp)
3675 return type;
3677 #define TYPE_OK(node) \
3678 (TYPE_MODE (type) == TYPE_MODE (node) \
3679 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3680 if (TYPE_OK (signed_char_type_node))
3681 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3682 if (TYPE_OK (integer_type_node))
3683 return unsignedp ? unsigned_type_node : integer_type_node;
3684 if (TYPE_OK (short_integer_type_node))
3685 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3686 if (TYPE_OK (long_integer_type_node))
3687 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3688 if (TYPE_OK (long_long_integer_type_node))
3689 return (unsignedp
3690 ? long_long_unsigned_type_node
3691 : long_long_integer_type_node);
3692 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
3693 return (unsignedp
3694 ? int128_unsigned_type_node
3695 : int128_integer_type_node);
3697 #if HOST_BITS_PER_WIDE_INT >= 64
3698 if (TYPE_OK (intTI_type_node))
3699 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3700 #endif
3701 if (TYPE_OK (intDI_type_node))
3702 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3703 if (TYPE_OK (intSI_type_node))
3704 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3705 if (TYPE_OK (intHI_type_node))
3706 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3707 if (TYPE_OK (intQI_type_node))
3708 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3710 #undef GIMPLE_FIXED_TYPES
3711 #undef GIMPLE_FIXED_MODE_TYPES
3712 #undef GIMPLE_FIXED_TYPES_SAT
3713 #undef GIMPLE_FIXED_MODE_TYPES_SAT
3714 #undef TYPE_OK
3716 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
3720 /* Return an unsigned type the same as TYPE in other respects. */
3722 tree
3723 gimple_unsigned_type (tree type)
3725 return gimple_signed_or_unsigned_type (true, type);
3729 /* Return a signed type the same as TYPE in other respects. */
3731 tree
3732 gimple_signed_type (tree type)
3734 return gimple_signed_or_unsigned_type (false, type);
3738 /* Return the typed-based alias set for T, which may be an expression
3739 or a type. Return -1 if we don't do anything special. */
3741 alias_set_type
3742 gimple_get_alias_set (tree t)
3744 tree u;
3746 /* Permit type-punning when accessing a union, provided the access
3747 is directly through the union. For example, this code does not
3748 permit taking the address of a union member and then storing
3749 through it. Even the type-punning allowed here is a GCC
3750 extension, albeit a common and useful one; the C standard says
3751 that such accesses have implementation-defined behavior. */
3752 for (u = t;
3753 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3754 u = TREE_OPERAND (u, 0))
3755 if (TREE_CODE (u) == COMPONENT_REF
3756 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3757 return 0;
3759 /* That's all the expressions we handle specially. */
3760 if (!TYPE_P (t))
3761 return -1;
3763 /* For convenience, follow the C standard when dealing with
3764 character types. Any object may be accessed via an lvalue that
3765 has character type. */
3766 if (t == char_type_node
3767 || t == signed_char_type_node
3768 || t == unsigned_char_type_node)
3769 return 0;
3771 /* Allow aliasing between signed and unsigned variants of the same
3772 type. We treat the signed variant as canonical. */
3773 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
3775 tree t1 = gimple_signed_type (t);
3777 /* t1 == t can happen for boolean nodes which are always unsigned. */
3778 if (t1 != t)
3779 return get_alias_set (t1);
3782 return -1;
3786 /* From a tree operand OP return the base of a load or store operation
3787 or NULL_TREE if OP is not a load or a store. */
3789 static tree
3790 get_base_loadstore (tree op)
3792 while (handled_component_p (op))
3793 op = TREE_OPERAND (op, 0);
3794 if (DECL_P (op)
3795 || INDIRECT_REF_P (op)
3796 || TREE_CODE (op) == MEM_REF
3797 || TREE_CODE (op) == TARGET_MEM_REF)
3798 return op;
3799 return NULL_TREE;
3802 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3803 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3804 passing the STMT, the base of the operand and DATA to it. The base
3805 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3806 or the argument of an address expression.
3807 Returns the results of these callbacks or'ed. */
3809 bool
3810 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3811 bool (*visit_load)(gimple, tree, void *),
3812 bool (*visit_store)(gimple, tree, void *),
3813 bool (*visit_addr)(gimple, tree, void *))
3815 bool ret = false;
3816 unsigned i;
3817 if (gimple_assign_single_p (stmt))
3819 tree lhs, rhs;
3820 if (visit_store)
3822 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3823 if (lhs)
3824 ret |= visit_store (stmt, lhs, data);
3826 rhs = gimple_assign_rhs1 (stmt);
3827 while (handled_component_p (rhs))
3828 rhs = TREE_OPERAND (rhs, 0);
3829 if (visit_addr)
3831 if (TREE_CODE (rhs) == ADDR_EXPR)
3832 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3833 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3834 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3835 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3836 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3837 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3838 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3839 0), data);
3840 else if (TREE_CODE (rhs) == CONSTRUCTOR)
3842 unsigned int ix;
3843 tree val;
3845 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
3846 if (TREE_CODE (val) == ADDR_EXPR)
3847 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
3848 else if (TREE_CODE (val) == OBJ_TYPE_REF
3849 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
3850 ret |= visit_addr (stmt,
3851 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
3852 0), data);
3854 lhs = gimple_assign_lhs (stmt);
3855 if (TREE_CODE (lhs) == TARGET_MEM_REF
3856 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3857 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3859 if (visit_load)
3861 rhs = get_base_loadstore (rhs);
3862 if (rhs)
3863 ret |= visit_load (stmt, rhs, data);
3866 else if (visit_addr
3867 && (is_gimple_assign (stmt)
3868 || gimple_code (stmt) == GIMPLE_COND))
3870 for (i = 0; i < gimple_num_ops (stmt); ++i)
3872 tree op = gimple_op (stmt, i);
3873 if (op == NULL_TREE)
3875 else if (TREE_CODE (op) == ADDR_EXPR)
3876 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3877 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3878 tree with two operands. */
3879 else if (i == 1 && COMPARISON_CLASS_P (op))
3881 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
3882 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
3883 0), data);
3884 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
3885 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
3886 0), data);
3890 else if (is_gimple_call (stmt))
3892 if (visit_store)
3894 tree lhs = gimple_call_lhs (stmt);
3895 if (lhs)
3897 lhs = get_base_loadstore (lhs);
3898 if (lhs)
3899 ret |= visit_store (stmt, lhs, data);
3902 if (visit_load || visit_addr)
3903 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3905 tree rhs = gimple_call_arg (stmt, i);
3906 if (visit_addr
3907 && TREE_CODE (rhs) == ADDR_EXPR)
3908 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3909 else if (visit_load)
3911 rhs = get_base_loadstore (rhs);
3912 if (rhs)
3913 ret |= visit_load (stmt, rhs, data);
3916 if (visit_addr
3917 && gimple_call_chain (stmt)
3918 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3919 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3920 data);
3921 if (visit_addr
3922 && gimple_call_return_slot_opt_p (stmt)
3923 && gimple_call_lhs (stmt) != NULL_TREE
3924 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3925 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3927 else if (gimple_code (stmt) == GIMPLE_ASM)
3929 unsigned noutputs;
3930 const char *constraint;
3931 const char **oconstraints;
3932 bool allows_mem, allows_reg, is_inout;
3933 noutputs = gimple_asm_noutputs (stmt);
3934 oconstraints = XALLOCAVEC (const char *, noutputs);
3935 if (visit_store || visit_addr)
3936 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3938 tree link = gimple_asm_output_op (stmt, i);
3939 tree op = get_base_loadstore (TREE_VALUE (link));
3940 if (op && visit_store)
3941 ret |= visit_store (stmt, op, data);
3942 if (visit_addr)
3944 constraint = TREE_STRING_POINTER
3945 (TREE_VALUE (TREE_PURPOSE (link)));
3946 oconstraints[i] = constraint;
3947 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3948 &allows_reg, &is_inout);
3949 if (op && !allows_reg && allows_mem)
3950 ret |= visit_addr (stmt, op, data);
3953 if (visit_load || visit_addr)
3954 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3956 tree link = gimple_asm_input_op (stmt, i);
3957 tree op = TREE_VALUE (link);
3958 if (visit_addr
3959 && TREE_CODE (op) == ADDR_EXPR)
3960 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3961 else if (visit_load || visit_addr)
3963 op = get_base_loadstore (op);
3964 if (op)
3966 if (visit_load)
3967 ret |= visit_load (stmt, op, data);
3968 if (visit_addr)
3970 constraint = TREE_STRING_POINTER
3971 (TREE_VALUE (TREE_PURPOSE (link)));
3972 parse_input_constraint (&constraint, 0, 0, noutputs,
3973 0, oconstraints,
3974 &allows_mem, &allows_reg);
3975 if (!allows_reg && allows_mem)
3976 ret |= visit_addr (stmt, op, data);
3982 else if (gimple_code (stmt) == GIMPLE_RETURN)
3984 tree op = gimple_return_retval (stmt);
3985 if (op)
3987 if (visit_addr
3988 && TREE_CODE (op) == ADDR_EXPR)
3989 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3990 else if (visit_load)
3992 op = get_base_loadstore (op);
3993 if (op)
3994 ret |= visit_load (stmt, op, data);
3998 else if (visit_addr
3999 && gimple_code (stmt) == GIMPLE_PHI)
4001 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4003 tree op = gimple_phi_arg_def (stmt, i);
4004 if (TREE_CODE (op) == ADDR_EXPR)
4005 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4008 else if (visit_addr
4009 && gimple_code (stmt) == GIMPLE_GOTO)
4011 tree op = gimple_goto_dest (stmt);
4012 if (TREE_CODE (op) == ADDR_EXPR)
4013 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4016 return ret;
4019 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
4020 should make a faster clone for this case. */
4022 bool
4023 walk_stmt_load_store_ops (gimple stmt, void *data,
4024 bool (*visit_load)(gimple, tree, void *),
4025 bool (*visit_store)(gimple, tree, void *))
4027 return walk_stmt_load_store_addr_ops (stmt, data,
4028 visit_load, visit_store, NULL);
4031 /* Helper for gimple_ior_addresses_taken_1. */
4033 static bool
4034 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4035 tree addr, void *data)
4037 bitmap addresses_taken = (bitmap)data;
4038 addr = get_base_address (addr);
4039 if (addr
4040 && DECL_P (addr))
4042 bitmap_set_bit (addresses_taken, DECL_UID (addr));
4043 return true;
4045 return false;
4048 /* Set the bit for the uid of all decls that have their address taken
4049 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
4050 were any in this stmt. */
4052 bool
4053 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4055 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4056 gimple_ior_addresses_taken_1);
4060 /* Return a printable name for symbol DECL. */
4062 const char *
4063 gimple_decl_printable_name (tree decl, int verbosity)
4065 if (!DECL_NAME (decl))
4066 return NULL;
4068 if (DECL_ASSEMBLER_NAME_SET_P (decl))
4070 const char *str, *mangled_str;
4071 int dmgl_opts = DMGL_NO_OPTS;
4073 if (verbosity >= 2)
4075 dmgl_opts = DMGL_VERBOSE
4076 | DMGL_ANSI
4077 | DMGL_GNU_V3
4078 | DMGL_RET_POSTFIX;
4079 if (TREE_CODE (decl) == FUNCTION_DECL)
4080 dmgl_opts |= DMGL_PARAMS;
4083 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4084 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4085 return (str) ? str : mangled_str;
4088 return IDENTIFIER_POINTER (DECL_NAME (decl));
4091 /* Return TRUE iff stmt is a call to a built-in function. */
4093 bool
4094 is_gimple_builtin_call (gimple stmt)
4096 tree callee;
4098 if (is_gimple_call (stmt)
4099 && (callee = gimple_call_fndecl (stmt))
4100 && is_builtin_fn (callee)
4101 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
4102 return true;
4104 return false;
4107 /* Return true when STMTs arguments match those of FNDECL. */
4109 static bool
4110 validate_call (gimple stmt, tree fndecl)
4112 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4113 unsigned nargs = gimple_call_num_args (stmt);
4114 for (unsigned i = 0; i < nargs; ++i)
4116 /* Variadic args follow. */
4117 if (!targs)
4118 return true;
4119 tree arg = gimple_call_arg (stmt, i);
4120 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
4121 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
4123 else if (POINTER_TYPE_P (TREE_TYPE (arg))
4124 && POINTER_TYPE_P (TREE_VALUE (targs)))
4126 else if (TREE_CODE (TREE_TYPE (arg))
4127 != TREE_CODE (TREE_VALUE (targs)))
4128 return false;
4129 targs = TREE_CHAIN (targs);
4131 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
4132 return false;
4133 return true;
4136 /* Return true when STMT is builtins call to CLASS. */
4138 bool
4139 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
4141 tree fndecl;
4142 if (is_gimple_call (stmt)
4143 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
4144 && DECL_BUILT_IN_CLASS (fndecl) == klass)
4145 return validate_call (stmt, fndecl);
4146 return false;
4149 /* Return true when STMT is builtins call to CODE of CLASS. */
4151 bool
4152 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
4154 tree fndecl;
4155 if (is_gimple_call (stmt)
4156 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
4157 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4158 && DECL_FUNCTION_CODE (fndecl) == code)
4159 return validate_call (stmt, fndecl);
4160 return false;
4163 /* Return true if STMT clobbers memory. STMT is required to be a
4164 GIMPLE_ASM. */
4166 bool
4167 gimple_asm_clobbers_memory_p (const_gimple stmt)
4169 unsigned i;
4171 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
4173 tree op = gimple_asm_clobber_op (stmt, i);
4174 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
4175 return true;
4178 return false;
4182 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
4183 useless type conversion, otherwise return false.
4185 This function implicitly defines the middle-end type system. With
4186 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
4187 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
4188 the following invariants shall be fulfilled:
4190 1) useless_type_conversion_p is transitive.
4191 If a < b and b < c then a < c.
4193 2) useless_type_conversion_p is not symmetric.
4194 From a < b does not follow a > b.
4196 3) Types define the available set of operations applicable to values.
4197 A type conversion is useless if the operations for the target type
4198 is a subset of the operations for the source type. For example
4199 casts to void* are useless, casts from void* are not (void* can't
4200 be dereferenced or offsetted, but copied, hence its set of operations
4201 is a strict subset of that of all other data pointer types). Casts
4202 to const T* are useless (can't be written to), casts from const T*
4203 to T* are not. */
4205 bool
4206 useless_type_conversion_p (tree outer_type, tree inner_type)
4208 /* Do the following before stripping toplevel qualifiers. */
4209 if (POINTER_TYPE_P (inner_type)
4210 && POINTER_TYPE_P (outer_type))
4212 int i_shared = upc_shared_type_p (TREE_TYPE (inner_type));
4213 int o_shared = upc_shared_type_p (TREE_TYPE (outer_type));
4215 /* Retain conversions from a UPC shared pointer to
4216 a regular C pointer. */
4217 if (!o_shared && i_shared)
4218 return false;
4220 /* Retain conversions between incompatible UPC shared pointers. */
4221 if (o_shared && i_shared
4222 && !lang_hooks.types_compatible_p (inner_type, outer_type))
4223 return false;
4225 /* Do not lose casts between pointers to different address spaces. */
4226 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
4227 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
4228 return false;
4231 /* From now on qualifiers on value types do not matter. */
4232 inner_type = TYPE_MAIN_VARIANT (inner_type);
4233 outer_type = TYPE_MAIN_VARIANT (outer_type);
4235 if (inner_type == outer_type)
4236 return true;
4238 /* If we know the canonical types, compare them. */
4239 if (TYPE_CANONICAL (inner_type)
4240 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
4241 return true;
4243 /* Changes in machine mode are never useless conversions unless we
4244 deal with aggregate types in which case we defer to later checks. */
4245 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
4246 && !AGGREGATE_TYPE_P (inner_type))
4247 return false;
4249 /* If both the inner and outer types are integral types, then the
4250 conversion is not necessary if they have the same mode and
4251 signedness and precision, and both or neither are boolean. */
4252 if (INTEGRAL_TYPE_P (inner_type)
4253 && INTEGRAL_TYPE_P (outer_type))
4255 /* Preserve changes in signedness or precision. */
4256 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
4257 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
4258 return false;
4260 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
4261 of precision one. */
4262 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
4263 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
4264 && TYPE_PRECISION (outer_type) != 1)
4265 return false;
4267 /* We don't need to preserve changes in the types minimum or
4268 maximum value in general as these do not generate code
4269 unless the types precisions are different. */
4270 return true;
4273 /* Scalar floating point types with the same mode are compatible. */
4274 else if (SCALAR_FLOAT_TYPE_P (inner_type)
4275 && SCALAR_FLOAT_TYPE_P (outer_type))
4276 return true;
4278 /* Fixed point types with the same mode are compatible. */
4279 else if (FIXED_POINT_TYPE_P (inner_type)
4280 && FIXED_POINT_TYPE_P (outer_type))
4281 return true;
4283 /* We need to take special care recursing to pointed-to types. */
4284 else if (POINTER_TYPE_P (inner_type)
4285 && POINTER_TYPE_P (outer_type))
4287 /* Do not lose casts to function pointer types. */
4288 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
4289 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
4290 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
4291 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
4292 return false;
4294 /* We do not care for const qualification of the pointed-to types
4295 as const qualification has no semantic value to the middle-end. */
4297 /* Otherwise pointers/references are equivalent. */
4298 return true;
4301 /* Recurse for complex types. */
4302 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
4303 && TREE_CODE (outer_type) == COMPLEX_TYPE)
4304 return useless_type_conversion_p (TREE_TYPE (outer_type),
4305 TREE_TYPE (inner_type));
4307 /* Recurse for vector types with the same number of subparts. */
4308 else if (TREE_CODE (inner_type) == VECTOR_TYPE
4309 && TREE_CODE (outer_type) == VECTOR_TYPE
4310 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
4311 return useless_type_conversion_p (TREE_TYPE (outer_type),
4312 TREE_TYPE (inner_type));
4314 else if (TREE_CODE (inner_type) == ARRAY_TYPE
4315 && TREE_CODE (outer_type) == ARRAY_TYPE)
4317 /* Preserve string attributes. */
4318 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
4319 return false;
4321 /* Conversions from array types with unknown extent to
4322 array types with known extent are not useless. */
4323 if (!TYPE_DOMAIN (inner_type)
4324 && TYPE_DOMAIN (outer_type))
4325 return false;
4327 /* Nor are conversions from array types with non-constant size to
4328 array types with constant size or to different size. */
4329 if (TYPE_SIZE (outer_type)
4330 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
4331 && (!TYPE_SIZE (inner_type)
4332 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
4333 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
4334 TYPE_SIZE (inner_type))))
4335 return false;
4337 /* Check conversions between arrays with partially known extents.
4338 If the array min/max values are constant they have to match.
4339 Otherwise allow conversions to unknown and variable extents.
4340 In particular this declares conversions that may change the
4341 mode to BLKmode as useless. */
4342 if (TYPE_DOMAIN (inner_type)
4343 && TYPE_DOMAIN (outer_type)
4344 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
4346 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
4347 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
4348 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
4349 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
4351 /* After gimplification a variable min/max value carries no
4352 additional information compared to a NULL value. All that
4353 matters has been lowered to be part of the IL. */
4354 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
4355 inner_min = NULL_TREE;
4356 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
4357 outer_min = NULL_TREE;
4358 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
4359 inner_max = NULL_TREE;
4360 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
4361 outer_max = NULL_TREE;
4363 /* Conversions NULL / variable <- cst are useless, but not
4364 the other way around. */
4365 if (outer_min
4366 && (!inner_min
4367 || !tree_int_cst_equal (inner_min, outer_min)))
4368 return false;
4369 if (outer_max
4370 && (!inner_max
4371 || !tree_int_cst_equal (inner_max, outer_max)))
4372 return false;
4375 /* Recurse on the element check. */
4376 return useless_type_conversion_p (TREE_TYPE (outer_type),
4377 TREE_TYPE (inner_type));
4380 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
4381 || TREE_CODE (inner_type) == METHOD_TYPE)
4382 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
4384 tree outer_parm, inner_parm;
4386 /* If the return types are not compatible bail out. */
4387 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
4388 TREE_TYPE (inner_type)))
4389 return false;
4391 /* Method types should belong to a compatible base class. */
4392 if (TREE_CODE (inner_type) == METHOD_TYPE
4393 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
4394 TYPE_METHOD_BASETYPE (inner_type)))
4395 return false;
4397 /* A conversion to an unprototyped argument list is ok. */
4398 if (!prototype_p (outer_type))
4399 return true;
4401 /* If the unqualified argument types are compatible the conversion
4402 is useless. */
4403 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
4404 return true;
4406 for (outer_parm = TYPE_ARG_TYPES (outer_type),
4407 inner_parm = TYPE_ARG_TYPES (inner_type);
4408 outer_parm && inner_parm;
4409 outer_parm = TREE_CHAIN (outer_parm),
4410 inner_parm = TREE_CHAIN (inner_parm))
4411 if (!useless_type_conversion_p
4412 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
4413 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
4414 return false;
4416 /* If there is a mismatch in the number of arguments the functions
4417 are not compatible. */
4418 if (outer_parm || inner_parm)
4419 return false;
4421 /* Defer to the target if necessary. */
4422 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
4423 return comp_type_attributes (outer_type, inner_type) != 0;
4425 return true;
4428 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
4429 explicit conversions for types involving to be structurally
4430 compared types. */
4431 else if (AGGREGATE_TYPE_P (inner_type)
4432 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
4433 return false;
4435 return false;
4438 /* Return true if a conversion from either type of TYPE1 and TYPE2
4439 to the other is not required. Otherwise return false. */
4441 bool
4442 types_compatible_p (tree type1, tree type2)
4444 return (type1 == type2
4445 || (useless_type_conversion_p (type1, type2)
4446 && useless_type_conversion_p (type2, type1)));
4449 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
4451 void
4452 dump_decl_set (FILE *file, bitmap set)
4454 if (set)
4456 bitmap_iterator bi;
4457 unsigned i;
4459 fprintf (file, "{ ");
4461 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
4463 fprintf (file, "D.%u", i);
4464 fprintf (file, " ");
4467 fprintf (file, "}");
4469 else
4470 fprintf (file, "NIL");
4473 /* Given SSA_NAMEs NAME1 and NAME2, return true if they are candidates for
4474 coalescing together, false otherwise.
4476 This must stay consistent with var_map_base_init in tree-ssa-live.c. */
4478 bool
4479 gimple_can_coalesce_p (tree name1, tree name2)
4481 /* First check the SSA_NAME's associated DECL. We only want to
4482 coalesce if they have the same DECL or both have no associated DECL. */
4483 tree var1 = SSA_NAME_VAR (name1);
4484 tree var2 = SSA_NAME_VAR (name2);
4485 var1 = (var1 && (!VAR_P (var1) || !DECL_IGNORED_P (var1))) ? var1 : NULL_TREE;
4486 var2 = (var2 && (!VAR_P (var2) || !DECL_IGNORED_P (var2))) ? var2 : NULL_TREE;
4487 if (var1 != var2)
4488 return false;
4490 /* Now check the types. If the types are the same, then we should
4491 try to coalesce V1 and V2. */
4492 tree t1 = TREE_TYPE (name1);
4493 tree t2 = TREE_TYPE (name2);
4494 if (t1 == t2)
4495 return true;
4497 /* If the types are not the same, check for a canonical type match. This
4498 (for example) allows coalescing when the types are fundamentally the
4499 same, but just have different names.
4501 Note pointer types with different address spaces may have the same
4502 canonical type. Those are rejected for coalescing by the
4503 types_compatible_p check. */
4504 if (TYPE_CANONICAL (t1)
4505 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2)
4506 && types_compatible_p (t1, t2))
4507 return true;
4509 return false;
4512 /* Return true when CALL is a call stmt that definitely doesn't
4513 free any memory or makes it unavailable otherwise. */
4514 bool
4515 nonfreeing_call_p (gimple call)
4517 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
4518 && gimple_call_flags (call) & ECF_LEAF)
4519 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
4521 /* Just in case these become ECF_LEAF in the future. */
4522 case BUILT_IN_FREE:
4523 case BUILT_IN_TM_FREE:
4524 case BUILT_IN_REALLOC:
4525 case BUILT_IN_STACK_RESTORE:
4526 return false;
4527 default:
4528 return true;
4531 return false;
4534 #include "gt-gimple.h"