Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / gimple.c
blob9223aaa8824372cf54ef0b980a6176088515608a
1 /* Gimple IR support functions.
3 Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "ggc.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "toplev.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "demangle.h"
38 #define DEFGSCODE(SYM, NAME, STRUCT) NAME,
39 const char *const gimple_code_name[] = {
40 #include "gimple.def"
42 #undef DEFGSCODE
44 /* All the tuples have their operand vector at the very bottom
45 of the structure. Therefore, the offset required to find the
46 operands vector the size of the structure minus the size of the 1
47 element tree array at the end (see gimple_ops). */
48 #define DEFGSCODE(SYM, NAME, STRUCT) (sizeof (STRUCT) - sizeof (tree)),
49 EXPORTED_CONST size_t gimple_ops_offset_[] = {
50 #include "gimple.def"
52 #undef DEFGSCODE
54 #ifdef GATHER_STATISTICS
55 /* Gimple stats. */
57 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
58 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
60 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
61 static const char * const gimple_alloc_kind_names[] = {
62 "assignments",
63 "phi nodes",
64 "conditionals",
65 "sequences",
66 "everything else"
69 #endif /* GATHER_STATISTICS */
71 /* A cache of gimple_seq objects. Sequences are created and destroyed
72 fairly often during gimplification. */
73 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
75 /* Private API manipulation functions shared only with some
76 other files. */
77 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
78 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
80 /* Gimple tuple constructors.
81 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
82 be passed a NULL to start with an empty sequence. */
84 /* Set the code for statement G to CODE. */
86 static inline void
87 gimple_set_code (gimple g, enum gimple_code code)
89 g->gsbase.code = code;
93 /* Return the GSS_* identifier for the given GIMPLE statement CODE. */
95 static enum gimple_statement_structure_enum
96 gss_for_code (enum gimple_code code)
98 switch (code)
100 case GIMPLE_ASSIGN:
101 case GIMPLE_CALL:
102 case GIMPLE_RETURN: return GSS_WITH_MEM_OPS;
103 case GIMPLE_COND:
104 case GIMPLE_GOTO:
105 case GIMPLE_LABEL:
106 case GIMPLE_DEBUG:
107 case GIMPLE_SWITCH: return GSS_WITH_OPS;
108 case GIMPLE_ASM: return GSS_ASM;
109 case GIMPLE_BIND: return GSS_BIND;
110 case GIMPLE_CATCH: return GSS_CATCH;
111 case GIMPLE_EH_FILTER: return GSS_EH_FILTER;
112 case GIMPLE_NOP: return GSS_BASE;
113 case GIMPLE_PHI: return GSS_PHI;
114 case GIMPLE_RESX: return GSS_RESX;
115 case GIMPLE_TRY: return GSS_TRY;
116 case GIMPLE_WITH_CLEANUP_EXPR: return GSS_WCE;
117 case GIMPLE_OMP_CRITICAL: return GSS_OMP_CRITICAL;
118 case GIMPLE_OMP_FOR: return GSS_OMP_FOR;
119 case GIMPLE_OMP_MASTER:
120 case GIMPLE_OMP_ORDERED:
121 case GIMPLE_OMP_SECTION: return GSS_OMP;
122 case GIMPLE_OMP_RETURN:
123 case GIMPLE_OMP_SECTIONS_SWITCH: return GSS_BASE;
124 case GIMPLE_OMP_CONTINUE: return GSS_OMP_CONTINUE;
125 case GIMPLE_OMP_PARALLEL: return GSS_OMP_PARALLEL;
126 case GIMPLE_OMP_TASK: return GSS_OMP_TASK;
127 case GIMPLE_OMP_SECTIONS: return GSS_OMP_SECTIONS;
128 case GIMPLE_OMP_SINGLE: return GSS_OMP_SINGLE;
129 case GIMPLE_OMP_ATOMIC_LOAD: return GSS_OMP_ATOMIC_LOAD;
130 case GIMPLE_OMP_ATOMIC_STORE: return GSS_OMP_ATOMIC_STORE;
131 case GIMPLE_PREDICT: return GSS_BASE;
132 default: gcc_unreachable ();
137 /* Return the number of bytes needed to hold a GIMPLE statement with
138 code CODE. */
140 static size_t
141 gimple_size (enum gimple_code code)
143 enum gimple_statement_structure_enum gss = gss_for_code (code);
145 if (gss == GSS_WITH_OPS)
146 return sizeof (struct gimple_statement_with_ops);
147 else if (gss == GSS_WITH_MEM_OPS)
148 return sizeof (struct gimple_statement_with_memory_ops);
150 switch (code)
152 case GIMPLE_ASM:
153 return sizeof (struct gimple_statement_asm);
154 case GIMPLE_NOP:
155 return sizeof (struct gimple_statement_base);
156 case GIMPLE_BIND:
157 return sizeof (struct gimple_statement_bind);
158 case GIMPLE_CATCH:
159 return sizeof (struct gimple_statement_catch);
160 case GIMPLE_EH_FILTER:
161 return sizeof (struct gimple_statement_eh_filter);
162 case GIMPLE_TRY:
163 return sizeof (struct gimple_statement_try);
164 case GIMPLE_RESX:
165 return sizeof (struct gimple_statement_resx);
166 case GIMPLE_OMP_CRITICAL:
167 return sizeof (struct gimple_statement_omp_critical);
168 case GIMPLE_OMP_FOR:
169 return sizeof (struct gimple_statement_omp_for);
170 case GIMPLE_OMP_PARALLEL:
171 return sizeof (struct gimple_statement_omp_parallel);
172 case GIMPLE_OMP_TASK:
173 return sizeof (struct gimple_statement_omp_task);
174 case GIMPLE_OMP_SECTION:
175 case GIMPLE_OMP_MASTER:
176 case GIMPLE_OMP_ORDERED:
177 return sizeof (struct gimple_statement_omp);
178 case GIMPLE_OMP_RETURN:
179 return sizeof (struct gimple_statement_base);
180 case GIMPLE_OMP_CONTINUE:
181 return sizeof (struct gimple_statement_omp_continue);
182 case GIMPLE_OMP_SECTIONS:
183 return sizeof (struct gimple_statement_omp_sections);
184 case GIMPLE_OMP_SECTIONS_SWITCH:
185 return sizeof (struct gimple_statement_base);
186 case GIMPLE_OMP_SINGLE:
187 return sizeof (struct gimple_statement_omp_single);
188 case GIMPLE_OMP_ATOMIC_LOAD:
189 return sizeof (struct gimple_statement_omp_atomic_load);
190 case GIMPLE_OMP_ATOMIC_STORE:
191 return sizeof (struct gimple_statement_omp_atomic_store);
192 case GIMPLE_WITH_CLEANUP_EXPR:
193 return sizeof (struct gimple_statement_wce);
194 case GIMPLE_PREDICT:
195 return sizeof (struct gimple_statement_base);
196 default:
197 break;
200 gcc_unreachable ();
204 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
205 operands. */
207 #define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
208 static gimple
209 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
211 size_t size;
212 gimple stmt;
214 size = gimple_size (code);
215 if (num_ops > 0)
216 size += sizeof (tree) * (num_ops - 1);
218 #ifdef GATHER_STATISTICS
220 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
221 gimple_alloc_counts[(int) kind]++;
222 gimple_alloc_sizes[(int) kind] += size;
224 #endif
226 stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
227 gimple_set_code (stmt, code);
228 gimple_set_num_ops (stmt, num_ops);
230 /* Do not call gimple_set_modified here as it has other side
231 effects and this tuple is still not completely built. */
232 stmt->gsbase.modified = 1;
234 return stmt;
237 /* Set SUBCODE to be the code of the expression computed by statement G. */
239 static inline void
240 gimple_set_subcode (gimple g, unsigned subcode)
242 /* We only have 16 bits for the RHS code. Assert that we are not
243 overflowing it. */
244 gcc_assert (subcode < (1 << 16));
245 g->gsbase.subcode = subcode;
250 /* Build a tuple with operands. CODE is the statement to build (which
251 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
252 for the new tuple. NUM_OPS is the number of operands to allocate. */
254 #define gimple_build_with_ops(c, s, n) \
255 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
257 static gimple
258 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
259 unsigned num_ops MEM_STAT_DECL)
261 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
262 gimple_set_subcode (s, subcode);
264 return s;
268 /* Build a GIMPLE_RETURN statement returning RETVAL. */
270 gimple
271 gimple_build_return (tree retval)
273 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
274 if (retval)
275 gimple_return_set_retval (s, retval);
276 return s;
279 /* Helper for gimple_build_call, gimple_build_call_vec and
280 gimple_build_call_from_tree. Build the basic components of a
281 GIMPLE_CALL statement to function FN with NARGS arguments. */
283 static inline gimple
284 gimple_build_call_1 (tree fn, unsigned nargs)
286 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
287 if (TREE_CODE (fn) == FUNCTION_DECL)
288 fn = build_fold_addr_expr (fn);
289 gimple_set_op (s, 1, fn);
290 return s;
294 /* Build a GIMPLE_CALL statement to function FN with the arguments
295 specified in vector ARGS. */
297 gimple
298 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
300 unsigned i;
301 unsigned nargs = VEC_length (tree, args);
302 gimple call = gimple_build_call_1 (fn, nargs);
304 for (i = 0; i < nargs; i++)
305 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
307 return call;
311 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
312 arguments. The ... are the arguments. */
314 gimple
315 gimple_build_call (tree fn, unsigned nargs, ...)
317 va_list ap;
318 gimple call;
319 unsigned i;
321 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
323 call = gimple_build_call_1 (fn, nargs);
325 va_start (ap, nargs);
326 for (i = 0; i < nargs; i++)
327 gimple_call_set_arg (call, i, va_arg (ap, tree));
328 va_end (ap);
330 return call;
334 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
335 assumed to be in GIMPLE form already. Minimal checking is done of
336 this fact. */
338 gimple
339 gimple_build_call_from_tree (tree t)
341 unsigned i, nargs;
342 gimple call;
343 tree fndecl = get_callee_fndecl (t);
345 gcc_assert (TREE_CODE (t) == CALL_EXPR);
347 nargs = call_expr_nargs (t);
348 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
350 for (i = 0; i < nargs; i++)
351 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
353 gimple_set_block (call, TREE_BLOCK (t));
355 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
356 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
357 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
358 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
359 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
360 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
361 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
362 gimple_set_no_warning (call, TREE_NO_WARNING (t));
364 return call;
368 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
369 *OP1_P and *OP2_P respectively. */
371 void
372 extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
373 tree *op2_p)
375 enum gimple_rhs_class grhs_class;
377 *subcode_p = TREE_CODE (expr);
378 grhs_class = get_gimple_rhs_class (*subcode_p);
380 if (grhs_class == GIMPLE_BINARY_RHS)
382 *op1_p = TREE_OPERAND (expr, 0);
383 *op2_p = TREE_OPERAND (expr, 1);
385 else if (grhs_class == GIMPLE_UNARY_RHS)
387 *op1_p = TREE_OPERAND (expr, 0);
388 *op2_p = NULL_TREE;
390 else if (grhs_class == GIMPLE_SINGLE_RHS)
392 *op1_p = expr;
393 *op2_p = NULL_TREE;
395 else
396 gcc_unreachable ();
400 /* Build a GIMPLE_ASSIGN statement.
402 LHS of the assignment.
403 RHS of the assignment which can be unary or binary. */
405 gimple
406 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
408 enum tree_code subcode;
409 tree op1, op2;
411 extract_ops_from_tree (rhs, &subcode, &op1, &op2);
412 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
413 PASS_MEM_STAT);
417 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
418 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
419 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
421 gimple
422 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
423 tree op2 MEM_STAT_DECL)
425 unsigned num_ops;
426 gimple p;
428 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
429 code). */
430 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
432 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
433 PASS_MEM_STAT);
434 gimple_assign_set_lhs (p, lhs);
435 gimple_assign_set_rhs1 (p, op1);
436 if (op2)
438 gcc_assert (num_ops > 2);
439 gimple_assign_set_rhs2 (p, op2);
442 return p;
446 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
448 DST/SRC are the destination and source respectively. You can pass
449 ungimplified trees in DST or SRC, in which case they will be
450 converted to a gimple operand if necessary.
452 This function returns the newly created GIMPLE_ASSIGN tuple. */
454 gimple
455 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
457 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
458 gimplify_and_add (t, seq_p);
459 ggc_free (t);
460 return gimple_seq_last_stmt (*seq_p);
464 /* Build a GIMPLE_COND statement.
466 PRED is the condition used to compare LHS and the RHS.
467 T_LABEL is the label to jump to if the condition is true.
468 F_LABEL is the label to jump to otherwise. */
470 gimple
471 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
472 tree t_label, tree f_label)
474 gimple p;
476 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
477 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
478 gimple_cond_set_lhs (p, lhs);
479 gimple_cond_set_rhs (p, rhs);
480 gimple_cond_set_true_label (p, t_label);
481 gimple_cond_set_false_label (p, f_label);
482 return p;
486 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
488 void
489 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
490 tree *lhs_p, tree *rhs_p)
492 location_t loc = EXPR_LOCATION (cond);
493 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
494 || TREE_CODE (cond) == TRUTH_NOT_EXPR
495 || is_gimple_min_invariant (cond)
496 || SSA_VAR_P (cond));
498 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
500 /* Canonicalize conditionals of the form 'if (!VAL)'. */
501 if (*code_p == TRUTH_NOT_EXPR)
503 *code_p = EQ_EXPR;
504 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
505 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
507 /* Canonicalize conditionals of the form 'if (VAL)' */
508 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
510 *code_p = NE_EXPR;
511 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
512 *rhs_p = fold_convert_loc (loc, TREE_TYPE (*lhs_p), integer_zero_node);
517 /* Build a GIMPLE_COND statement from the conditional expression tree
518 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
520 gimple
521 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
523 enum tree_code code;
524 tree lhs, rhs;
526 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
527 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
530 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
531 boolean expression tree COND. */
533 void
534 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
536 enum tree_code code;
537 tree lhs, rhs;
539 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
540 gimple_cond_set_condition (stmt, code, lhs, rhs);
543 /* Build a GIMPLE_LABEL statement for LABEL. */
545 gimple
546 gimple_build_label (tree label)
548 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
549 gimple_label_set_label (p, label);
550 return p;
553 /* Build a GIMPLE_GOTO statement to label DEST. */
555 gimple
556 gimple_build_goto (tree dest)
558 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
559 gimple_goto_set_dest (p, dest);
560 return p;
564 /* Build a GIMPLE_NOP statement. */
566 gimple
567 gimple_build_nop (void)
569 return gimple_alloc (GIMPLE_NOP, 0);
573 /* Build a GIMPLE_BIND statement.
574 VARS are the variables in BODY.
575 BLOCK is the containing block. */
577 gimple
578 gimple_build_bind (tree vars, gimple_seq body, tree block)
580 gimple p = gimple_alloc (GIMPLE_BIND, 0);
581 gimple_bind_set_vars (p, vars);
582 if (body)
583 gimple_bind_set_body (p, body);
584 if (block)
585 gimple_bind_set_block (p, block);
586 return p;
589 /* Helper function to set the simple fields of a asm stmt.
591 STRING is a pointer to a string that is the asm blocks assembly code.
592 NINPUT is the number of register inputs.
593 NOUTPUT is the number of register outputs.
594 NCLOBBERS is the number of clobbered registers.
597 static inline gimple
598 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
599 unsigned nclobbers)
601 gimple p;
602 int size = strlen (string);
604 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
605 ninputs + noutputs + nclobbers);
607 p->gimple_asm.ni = ninputs;
608 p->gimple_asm.no = noutputs;
609 p->gimple_asm.nc = nclobbers;
610 p->gimple_asm.string = ggc_alloc_string (string, size);
612 #ifdef GATHER_STATISTICS
613 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
614 #endif
616 return p;
619 /* Build a GIMPLE_ASM statement.
621 STRING is the assembly code.
622 NINPUT is the number of register inputs.
623 NOUTPUT is the number of register outputs.
624 NCLOBBERS is the number of clobbered registers.
625 INPUTS is a vector of the input register parameters.
626 OUTPUTS is a vector of the output register parameters.
627 CLOBBERS is a vector of the clobbered register parameters. */
629 gimple
630 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
631 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
633 gimple p;
634 unsigned i;
636 p = gimple_build_asm_1 (string,
637 VEC_length (tree, inputs),
638 VEC_length (tree, outputs),
639 VEC_length (tree, clobbers));
641 for (i = 0; i < VEC_length (tree, inputs); i++)
642 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
644 for (i = 0; i < VEC_length (tree, outputs); i++)
645 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
647 for (i = 0; i < VEC_length (tree, clobbers); i++)
648 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
650 return p;
653 /* Build a GIMPLE_ASM statement.
655 STRING is the assembly code.
656 NINPUT is the number of register inputs.
657 NOUTPUT is the number of register outputs.
658 NCLOBBERS is the number of clobbered registers.
659 ... are trees for each input, output and clobbered register. */
661 gimple
662 gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs,
663 unsigned nclobbers, ...)
665 gimple p;
666 unsigned i;
667 va_list ap;
669 p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
671 va_start (ap, nclobbers);
673 for (i = 0; i < ninputs; i++)
674 gimple_asm_set_input_op (p, i, va_arg (ap, tree));
676 for (i = 0; i < noutputs; i++)
677 gimple_asm_set_output_op (p, i, va_arg (ap, tree));
679 for (i = 0; i < nclobbers; i++)
680 gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
682 va_end (ap);
684 return p;
687 /* Build a GIMPLE_CATCH statement.
689 TYPES are the catch types.
690 HANDLER is the exception handler. */
692 gimple
693 gimple_build_catch (tree types, gimple_seq handler)
695 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
696 gimple_catch_set_types (p, types);
697 if (handler)
698 gimple_catch_set_handler (p, handler);
700 return p;
703 /* Build a GIMPLE_EH_FILTER statement.
705 TYPES are the filter's types.
706 FAILURE is the filter's failure action. */
708 gimple
709 gimple_build_eh_filter (tree types, gimple_seq failure)
711 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
712 gimple_eh_filter_set_types (p, types);
713 if (failure)
714 gimple_eh_filter_set_failure (p, failure);
716 return p;
719 /* Build a GIMPLE_TRY statement.
721 EVAL is the expression to evaluate.
722 CLEANUP is the cleanup expression.
723 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
724 whether this is a try/catch or a try/finally respectively. */
726 gimple
727 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
728 enum gimple_try_flags kind)
730 gimple p;
732 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
733 p = gimple_alloc (GIMPLE_TRY, 0);
734 gimple_set_subcode (p, kind);
735 if (eval)
736 gimple_try_set_eval (p, eval);
737 if (cleanup)
738 gimple_try_set_cleanup (p, cleanup);
740 return p;
743 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
745 CLEANUP is the cleanup expression. */
747 gimple
748 gimple_build_wce (gimple_seq cleanup)
750 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
751 if (cleanup)
752 gimple_wce_set_cleanup (p, cleanup);
754 return p;
758 /* Build a GIMPLE_RESX statement.
760 REGION is the region number from which this resx causes control flow to
761 leave. */
763 gimple
764 gimple_build_resx (int region)
766 gimple p = gimple_alloc (GIMPLE_RESX, 0);
767 gimple_resx_set_region (p, region);
768 return p;
772 /* The helper for constructing a gimple switch statement.
773 INDEX is the switch's index.
774 NLABELS is the number of labels in the switch excluding the default.
775 DEFAULT_LABEL is the default label for the switch statement. */
777 static inline gimple
778 gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label)
780 /* nlabels + 1 default label + 1 index. */
781 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
782 nlabels + 1 + 1);
783 gimple_switch_set_index (p, index);
784 gimple_switch_set_default_label (p, default_label);
785 return p;
789 /* Build a GIMPLE_SWITCH statement.
791 INDEX is the switch's index.
792 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
793 ... are the labels excluding the default. */
795 gimple
796 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
798 va_list al;
799 unsigned i;
800 gimple p;
802 p = gimple_build_switch_1 (nlabels, index, default_label);
804 /* Store the rest of the labels. */
805 va_start (al, default_label);
806 for (i = 1; i <= nlabels; i++)
807 gimple_switch_set_label (p, i, va_arg (al, tree));
808 va_end (al);
810 return p;
814 /* Build a GIMPLE_SWITCH statement.
816 INDEX is the switch's index.
817 DEFAULT_LABEL is the default label
818 ARGS is a vector of labels excluding the default. */
820 gimple
821 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
823 unsigned i;
824 unsigned nlabels = VEC_length (tree, args);
825 gimple p = gimple_build_switch_1 (nlabels, index, default_label);
827 /* Put labels in labels[1 - (nlabels + 1)].
828 Default label is in labels[0]. */
829 for (i = 1; i <= nlabels; i++)
830 gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1));
832 return p;
836 /* Build a new GIMPLE_DEBUG_BIND statement.
838 VAR is bound to VALUE; block and location are taken from STMT. */
840 gimple
841 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
843 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
844 (unsigned)GIMPLE_DEBUG_BIND, 2
845 PASS_MEM_STAT);
847 gimple_debug_bind_set_var (p, var);
848 gimple_debug_bind_set_value (p, value);
849 if (stmt)
851 gimple_set_block (p, gimple_block (stmt));
852 gimple_set_location (p, gimple_location (stmt));
855 return p;
859 /* Build a GIMPLE_OMP_CRITICAL statement.
861 BODY is the sequence of statements for which only one thread can execute.
862 NAME is optional identifier for this critical block. */
864 gimple
865 gimple_build_omp_critical (gimple_seq body, tree name)
867 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
868 gimple_omp_critical_set_name (p, name);
869 if (body)
870 gimple_omp_set_body (p, body);
872 return p;
875 /* Build a GIMPLE_OMP_FOR statement.
877 BODY is sequence of statements inside the for loop.
878 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
879 lastprivate, reductions, ordered, schedule, and nowait.
880 COLLAPSE is the collapse count.
881 PRE_BODY is the sequence of statements that are loop invariant. */
883 gimple
884 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
885 gimple_seq pre_body)
887 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
888 if (body)
889 gimple_omp_set_body (p, body);
890 gimple_omp_for_set_clauses (p, clauses);
891 p->gimple_omp_for.collapse = collapse;
892 p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
893 if (pre_body)
894 gimple_omp_for_set_pre_body (p, pre_body);
896 return p;
900 /* Build a GIMPLE_OMP_PARALLEL statement.
902 BODY is sequence of statements which are executed in parallel.
903 CLAUSES, are the OMP parallel construct's clauses.
904 CHILD_FN is the function created for the parallel threads to execute.
905 DATA_ARG are the shared data argument(s). */
907 gimple
908 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
909 tree data_arg)
911 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
912 if (body)
913 gimple_omp_set_body (p, body);
914 gimple_omp_parallel_set_clauses (p, clauses);
915 gimple_omp_parallel_set_child_fn (p, child_fn);
916 gimple_omp_parallel_set_data_arg (p, data_arg);
918 return p;
922 /* Build a GIMPLE_OMP_TASK statement.
924 BODY is sequence of statements which are executed by the explicit task.
925 CLAUSES, are the OMP parallel construct's clauses.
926 CHILD_FN is the function created for the parallel threads to execute.
927 DATA_ARG are the shared data argument(s).
928 COPY_FN is the optional function for firstprivate initialization.
929 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
931 gimple
932 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
933 tree data_arg, tree copy_fn, tree arg_size,
934 tree arg_align)
936 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
937 if (body)
938 gimple_omp_set_body (p, body);
939 gimple_omp_task_set_clauses (p, clauses);
940 gimple_omp_task_set_child_fn (p, child_fn);
941 gimple_omp_task_set_data_arg (p, data_arg);
942 gimple_omp_task_set_copy_fn (p, copy_fn);
943 gimple_omp_task_set_arg_size (p, arg_size);
944 gimple_omp_task_set_arg_align (p, arg_align);
946 return p;
950 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
952 BODY is the sequence of statements in the section. */
954 gimple
955 gimple_build_omp_section (gimple_seq body)
957 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
958 if (body)
959 gimple_omp_set_body (p, body);
961 return p;
965 /* Build a GIMPLE_OMP_MASTER statement.
967 BODY is the sequence of statements to be executed by just the master. */
969 gimple
970 gimple_build_omp_master (gimple_seq body)
972 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
973 if (body)
974 gimple_omp_set_body (p, body);
976 return p;
980 /* Build a GIMPLE_OMP_CONTINUE statement.
982 CONTROL_DEF is the definition of the control variable.
983 CONTROL_USE is the use of the control variable. */
985 gimple
986 gimple_build_omp_continue (tree control_def, tree control_use)
988 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
989 gimple_omp_continue_set_control_def (p, control_def);
990 gimple_omp_continue_set_control_use (p, control_use);
991 return p;
994 /* Build a GIMPLE_OMP_ORDERED statement.
996 BODY is the sequence of statements inside a loop that will executed in
997 sequence. */
999 gimple
1000 gimple_build_omp_ordered (gimple_seq body)
1002 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1003 if (body)
1004 gimple_omp_set_body (p, body);
1006 return p;
1010 /* Build a GIMPLE_OMP_RETURN statement.
1011 WAIT_P is true if this is a non-waiting return. */
1013 gimple
1014 gimple_build_omp_return (bool wait_p)
1016 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1017 if (wait_p)
1018 gimple_omp_return_set_nowait (p);
1020 return p;
1024 /* Build a GIMPLE_OMP_SECTIONS statement.
1026 BODY is a sequence of section statements.
1027 CLAUSES are any of the OMP sections contsruct's clauses: private,
1028 firstprivate, lastprivate, reduction, and nowait. */
1030 gimple
1031 gimple_build_omp_sections (gimple_seq body, tree clauses)
1033 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1034 if (body)
1035 gimple_omp_set_body (p, body);
1036 gimple_omp_sections_set_clauses (p, clauses);
1038 return p;
1042 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1044 gimple
1045 gimple_build_omp_sections_switch (void)
1047 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1051 /* Build a GIMPLE_OMP_SINGLE statement.
1053 BODY is the sequence of statements that will be executed once.
1054 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1055 copyprivate, nowait. */
1057 gimple
1058 gimple_build_omp_single (gimple_seq body, tree clauses)
1060 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1061 if (body)
1062 gimple_omp_set_body (p, body);
1063 gimple_omp_single_set_clauses (p, clauses);
1065 return p;
1069 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1071 gimple
1072 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1074 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1075 gimple_omp_atomic_load_set_lhs (p, lhs);
1076 gimple_omp_atomic_load_set_rhs (p, rhs);
1077 return p;
1080 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1082 VAL is the value we are storing. */
1084 gimple
1085 gimple_build_omp_atomic_store (tree val)
1087 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1088 gimple_omp_atomic_store_set_val (p, val);
1089 return p;
1092 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1093 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1095 gimple
1096 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1098 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1099 /* Ensure all the predictors fit into the lower bits of the subcode. */
1100 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1101 gimple_predict_set_predictor (p, predictor);
1102 gimple_predict_set_outcome (p, outcome);
1103 return p;
1106 /* Return which gimple structure is used by T. The enums here are defined
1107 in gsstruct.def. */
1109 enum gimple_statement_structure_enum
1110 gimple_statement_structure (gimple gs)
1112 return gss_for_code (gimple_code (gs));
1115 #if defined ENABLE_GIMPLE_CHECKING
1116 /* Complain of a gimple type mismatch and die. */
1118 void
1119 gimple_check_failed (const_gimple gs, const char *file, int line,
1120 const char *function, enum gimple_code code,
1121 enum tree_code subcode)
1123 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1124 gimple_code_name[code],
1125 tree_code_name[subcode],
1126 gimple_code_name[gimple_code (gs)],
1127 gs->gsbase.subcode > 0
1128 ? tree_code_name[gs->gsbase.subcode]
1129 : "",
1130 function, trim_filename (file), line);
1132 #endif /* ENABLE_GIMPLE_CHECKING */
1135 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1136 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1137 instead. */
1139 gimple_seq
1140 gimple_seq_alloc (void)
1142 gimple_seq seq = gimple_seq_cache;
1143 if (seq)
1145 gimple_seq_cache = gimple_seq_cache->next_free;
1146 gcc_assert (gimple_seq_cache != seq);
1147 memset (seq, 0, sizeof (*seq));
1149 else
1151 seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
1152 #ifdef GATHER_STATISTICS
1153 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1154 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1155 #endif
1158 return seq;
1161 /* Return SEQ to the free pool of GIMPLE sequences. */
1163 void
1164 gimple_seq_free (gimple_seq seq)
1166 if (seq == NULL)
1167 return;
1169 gcc_assert (gimple_seq_first (seq) == NULL);
1170 gcc_assert (gimple_seq_last (seq) == NULL);
1172 /* If this triggers, it's a sign that the same list is being freed
1173 twice. */
1174 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1176 /* Add SEQ to the pool of free sequences. */
1177 seq->next_free = gimple_seq_cache;
1178 gimple_seq_cache = seq;
1182 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1183 *SEQ_P is NULL, a new sequence is allocated. */
1185 void
1186 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1188 gimple_stmt_iterator si;
1190 if (gs == NULL)
1191 return;
1193 if (*seq_p == NULL)
1194 *seq_p = gimple_seq_alloc ();
1196 si = gsi_last (*seq_p);
1197 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1201 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1202 NULL, a new sequence is allocated. */
1204 void
1205 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1207 gimple_stmt_iterator si;
1209 if (src == NULL)
1210 return;
1212 if (*dst_p == NULL)
1213 *dst_p = gimple_seq_alloc ();
1215 si = gsi_last (*dst_p);
1216 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1220 /* Helper function of empty_body_p. Return true if STMT is an empty
1221 statement. */
1223 static bool
1224 empty_stmt_p (gimple stmt)
1226 if (gimple_code (stmt) == GIMPLE_NOP)
1227 return true;
1228 if (gimple_code (stmt) == GIMPLE_BIND)
1229 return empty_body_p (gimple_bind_body (stmt));
1230 return false;
1234 /* Return true if BODY contains nothing but empty statements. */
1236 bool
1237 empty_body_p (gimple_seq body)
1239 gimple_stmt_iterator i;
1241 if (gimple_seq_empty_p (body))
1242 return true;
1243 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1244 if (!empty_stmt_p (gsi_stmt (i))
1245 && !is_gimple_debug (gsi_stmt (i)))
1246 return false;
1248 return true;
1252 /* Perform a deep copy of sequence SRC and return the result. */
1254 gimple_seq
1255 gimple_seq_copy (gimple_seq src)
1257 gimple_stmt_iterator gsi;
1258 gimple_seq new_seq = gimple_seq_alloc ();
1259 gimple stmt;
1261 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1263 stmt = gimple_copy (gsi_stmt (gsi));
1264 gimple_seq_add_stmt (&new_seq, stmt);
1267 return new_seq;
1271 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1272 on each one. WI is as in walk_gimple_stmt.
1274 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1275 value is stored in WI->CALLBACK_RESULT and the statement that
1276 produced the value is returned.
1278 Otherwise, all the statements are walked and NULL returned. */
1280 gimple
1281 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1282 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1284 gimple_stmt_iterator gsi;
1286 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1288 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1289 if (ret)
1291 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1292 to hold it. */
1293 gcc_assert (wi);
1294 wi->callback_result = ret;
1295 return gsi_stmt (gsi);
1299 if (wi)
1300 wi->callback_result = NULL_TREE;
1302 return NULL;
1306 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1308 static tree
1309 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1310 struct walk_stmt_info *wi)
1312 tree ret;
1313 unsigned noutputs;
1314 const char **oconstraints;
1315 unsigned i;
1316 const char *constraint;
1317 bool allows_mem, allows_reg, is_inout;
1319 noutputs = gimple_asm_noutputs (stmt);
1320 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1322 if (wi)
1323 wi->is_lhs = true;
1325 for (i = 0; i < noutputs; i++)
1327 tree op = gimple_asm_output_op (stmt, i);
1328 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1329 oconstraints[i] = constraint;
1330 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1331 &is_inout);
1332 if (wi)
1333 wi->val_only = (allows_reg || !allows_mem);
1334 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1335 if (ret)
1336 return ret;
1339 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1341 tree op = gimple_asm_input_op (stmt, i);
1342 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1343 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1344 oconstraints, &allows_mem, &allows_reg);
1345 if (wi)
1346 wi->val_only = (allows_reg || !allows_mem);
1348 /* Although input "m" is not really a LHS, we need a lvalue. */
1349 if (wi)
1350 wi->is_lhs = !wi->val_only;
1351 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1352 if (ret)
1353 return ret;
1356 if (wi)
1358 wi->is_lhs = false;
1359 wi->val_only = true;
1362 return NULL_TREE;
1366 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1367 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1369 CALLBACK_OP is called on each operand of STMT via walk_tree.
1370 Additional parameters to walk_tree must be stored in WI. For each operand
1371 OP, walk_tree is called as:
1373 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1375 If CALLBACK_OP returns non-NULL for an operand, the remaining
1376 operands are not scanned.
1378 The return value is that returned by the last call to walk_tree, or
1379 NULL_TREE if no CALLBACK_OP is specified. */
1381 inline tree
1382 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1383 struct walk_stmt_info *wi)
1385 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1386 unsigned i;
1387 tree ret = NULL_TREE;
1389 switch (gimple_code (stmt))
1391 case GIMPLE_ASSIGN:
1392 /* Walk the RHS operands. A formal temporary LHS may use a
1393 COMPONENT_REF RHS. */
1394 if (wi)
1395 wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
1396 || !gimple_assign_single_p (stmt);
1398 for (i = 1; i < gimple_num_ops (stmt); i++)
1400 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1401 pset);
1402 if (ret)
1403 return ret;
1406 /* Walk the LHS. If the RHS is appropriate for a memory, we
1407 may use a COMPONENT_REF on the LHS. */
1408 if (wi)
1410 /* If the RHS has more than 1 operand, it is not appropriate
1411 for the memory. */
1412 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1413 || !gimple_assign_single_p (stmt);
1414 wi->is_lhs = true;
1417 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1418 if (ret)
1419 return ret;
1421 if (wi)
1423 wi->val_only = true;
1424 wi->is_lhs = false;
1426 break;
1428 case GIMPLE_CALL:
1429 if (wi)
1430 wi->is_lhs = false;
1432 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1433 if (ret)
1434 return ret;
1436 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1437 if (ret)
1438 return ret;
1440 for (i = 0; i < gimple_call_num_args (stmt); i++)
1442 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1443 pset);
1444 if (ret)
1445 return ret;
1448 if (wi)
1449 wi->is_lhs = true;
1451 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1452 if (ret)
1453 return ret;
1455 if (wi)
1456 wi->is_lhs = false;
1457 break;
1459 case GIMPLE_CATCH:
1460 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1461 pset);
1462 if (ret)
1463 return ret;
1464 break;
1466 case GIMPLE_EH_FILTER:
1467 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1468 pset);
1469 if (ret)
1470 return ret;
1471 break;
1473 case GIMPLE_ASM:
1474 ret = walk_gimple_asm (stmt, callback_op, wi);
1475 if (ret)
1476 return ret;
1477 break;
1479 case GIMPLE_OMP_CONTINUE:
1480 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1481 callback_op, wi, pset);
1482 if (ret)
1483 return ret;
1485 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1486 callback_op, wi, pset);
1487 if (ret)
1488 return ret;
1489 break;
1491 case GIMPLE_OMP_CRITICAL:
1492 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1493 pset);
1494 if (ret)
1495 return ret;
1496 break;
1498 case GIMPLE_OMP_FOR:
1499 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1500 pset);
1501 if (ret)
1502 return ret;
1503 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1505 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1506 wi, pset);
1507 if (ret)
1508 return ret;
1509 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1510 wi, pset);
1511 if (ret)
1512 return ret;
1513 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1514 wi, pset);
1515 if (ret)
1516 return ret;
1517 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1518 wi, pset);
1520 if (ret)
1521 return ret;
1522 break;
1524 case GIMPLE_OMP_PARALLEL:
1525 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1526 wi, pset);
1527 if (ret)
1528 return ret;
1529 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1530 wi, pset);
1531 if (ret)
1532 return ret;
1533 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1534 wi, pset);
1535 if (ret)
1536 return ret;
1537 break;
1539 case GIMPLE_OMP_TASK:
1540 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1541 wi, pset);
1542 if (ret)
1543 return ret;
1544 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1545 wi, pset);
1546 if (ret)
1547 return ret;
1548 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1549 wi, pset);
1550 if (ret)
1551 return ret;
1552 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1553 wi, pset);
1554 if (ret)
1555 return ret;
1556 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1557 wi, pset);
1558 if (ret)
1559 return ret;
1560 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1561 wi, pset);
1562 if (ret)
1563 return ret;
1564 break;
1566 case GIMPLE_OMP_SECTIONS:
1567 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1568 wi, pset);
1569 if (ret)
1570 return ret;
1572 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1573 wi, pset);
1574 if (ret)
1575 return ret;
1577 break;
1579 case GIMPLE_OMP_SINGLE:
1580 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1581 pset);
1582 if (ret)
1583 return ret;
1584 break;
1586 case GIMPLE_OMP_ATOMIC_LOAD:
1587 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1588 pset);
1589 if (ret)
1590 return ret;
1592 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1593 pset);
1594 if (ret)
1595 return ret;
1596 break;
1598 case GIMPLE_OMP_ATOMIC_STORE:
1599 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1600 wi, pset);
1601 if (ret)
1602 return ret;
1603 break;
1605 /* Tuples that do not have operands. */
1606 case GIMPLE_NOP:
1607 case GIMPLE_RESX:
1608 case GIMPLE_OMP_RETURN:
1609 case GIMPLE_PREDICT:
1610 break;
1612 default:
1614 enum gimple_statement_structure_enum gss;
1615 gss = gimple_statement_structure (stmt);
1616 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1617 for (i = 0; i < gimple_num_ops (stmt); i++)
1619 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1620 if (ret)
1621 return ret;
1624 break;
1627 return NULL_TREE;
1631 /* Walk the current statement in GSI (optionally using traversal state
1632 stored in WI). If WI is NULL, no state is kept during traversal.
1633 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1634 that it has handled all the operands of the statement, its return
1635 value is returned. Otherwise, the return value from CALLBACK_STMT
1636 is discarded and its operands are scanned.
1638 If CALLBACK_STMT is NULL or it didn't handle the operands,
1639 CALLBACK_OP is called on each operand of the statement via
1640 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1641 operand, the remaining operands are not scanned. In this case, the
1642 return value from CALLBACK_OP is returned.
1644 In any other case, NULL_TREE is returned. */
1646 tree
1647 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1648 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1650 gimple ret;
1651 tree tree_ret;
1652 gimple stmt = gsi_stmt (*gsi);
1654 if (wi)
1655 wi->gsi = *gsi;
1657 if (wi && wi->want_locations && gimple_has_location (stmt))
1658 input_location = gimple_location (stmt);
1660 ret = NULL;
1662 /* Invoke the statement callback. Return if the callback handled
1663 all of STMT operands by itself. */
1664 if (callback_stmt)
1666 bool handled_ops = false;
1667 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1668 if (handled_ops)
1669 return tree_ret;
1671 /* If CALLBACK_STMT did not handle operands, it should not have
1672 a value to return. */
1673 gcc_assert (tree_ret == NULL);
1675 /* Re-read stmt in case the callback changed it. */
1676 stmt = gsi_stmt (*gsi);
1679 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1680 if (callback_op)
1682 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1683 if (tree_ret)
1684 return tree_ret;
1687 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1688 switch (gimple_code (stmt))
1690 case GIMPLE_BIND:
1691 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1692 callback_op, wi);
1693 if (ret)
1694 return wi->callback_result;
1695 break;
1697 case GIMPLE_CATCH:
1698 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1699 callback_op, wi);
1700 if (ret)
1701 return wi->callback_result;
1702 break;
1704 case GIMPLE_EH_FILTER:
1705 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1706 callback_op, wi);
1707 if (ret)
1708 return wi->callback_result;
1709 break;
1711 case GIMPLE_TRY:
1712 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1713 wi);
1714 if (ret)
1715 return wi->callback_result;
1717 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1718 callback_op, wi);
1719 if (ret)
1720 return wi->callback_result;
1721 break;
1723 case GIMPLE_OMP_FOR:
1724 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1725 callback_op, wi);
1726 if (ret)
1727 return wi->callback_result;
1729 /* FALL THROUGH. */
1730 case GIMPLE_OMP_CRITICAL:
1731 case GIMPLE_OMP_MASTER:
1732 case GIMPLE_OMP_ORDERED:
1733 case GIMPLE_OMP_SECTION:
1734 case GIMPLE_OMP_PARALLEL:
1735 case GIMPLE_OMP_TASK:
1736 case GIMPLE_OMP_SECTIONS:
1737 case GIMPLE_OMP_SINGLE:
1738 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1739 wi);
1740 if (ret)
1741 return wi->callback_result;
1742 break;
1744 case GIMPLE_WITH_CLEANUP_EXPR:
1745 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1746 callback_op, wi);
1747 if (ret)
1748 return wi->callback_result;
1749 break;
1751 default:
1752 gcc_assert (!gimple_has_substatements (stmt));
1753 break;
1756 return NULL;
1760 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1762 void
1763 gimple_set_body (tree fndecl, gimple_seq seq)
1765 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1766 if (fn == NULL)
1768 /* If FNDECL still does not have a function structure associated
1769 with it, then it does not make sense for it to receive a
1770 GIMPLE body. */
1771 gcc_assert (seq == NULL);
1773 else
1774 fn->gimple_body = seq;
1778 /* Return the body of GIMPLE statements for function FN. */
1780 gimple_seq
1781 gimple_body (tree fndecl)
1783 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1784 return fn ? fn->gimple_body : NULL;
1787 /* Return true when FNDECL has Gimple body either in unlowered
1788 or CFG form. */
1789 bool
1790 gimple_has_body_p (tree fndecl)
1792 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1793 return (gimple_body (fndecl) || (fn && fn->cfg));
1796 /* Detect flags from a GIMPLE_CALL. This is just like
1797 call_expr_flags, but for gimple tuples. */
1800 gimple_call_flags (const_gimple stmt)
1802 int flags;
1803 tree decl = gimple_call_fndecl (stmt);
1804 tree t;
1806 if (decl)
1807 flags = flags_from_decl_or_type (decl);
1808 else
1810 t = TREE_TYPE (gimple_call_fn (stmt));
1811 if (t && TREE_CODE (t) == POINTER_TYPE)
1812 flags = flags_from_decl_or_type (TREE_TYPE (t));
1813 else
1814 flags = 0;
1817 return flags;
1821 /* Return true if GS is a copy assignment. */
1823 bool
1824 gimple_assign_copy_p (gimple gs)
1826 return gimple_code (gs) == GIMPLE_ASSIGN
1827 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1828 == GIMPLE_SINGLE_RHS
1829 && is_gimple_val (gimple_op (gs, 1));
1833 /* Return true if GS is a SSA_NAME copy assignment. */
1835 bool
1836 gimple_assign_ssa_name_copy_p (gimple gs)
1838 return (gimple_code (gs) == GIMPLE_ASSIGN
1839 && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1840 == GIMPLE_SINGLE_RHS)
1841 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1842 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1846 /* Return true if GS is an assignment with a singleton RHS, i.e.,
1847 there is no operator associated with the assignment itself.
1848 Unlike gimple_assign_copy_p, this predicate returns true for
1849 any RHS operand, including those that perform an operation
1850 and do not have the semantics of a copy, such as COND_EXPR. */
1852 bool
1853 gimple_assign_single_p (gimple gs)
1855 return (gimple_code (gs) == GIMPLE_ASSIGN
1856 && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
1857 == GIMPLE_SINGLE_RHS);
1860 /* Return true if GS is an assignment with a unary RHS, but the
1861 operator has no effect on the assigned value. The logic is adapted
1862 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1863 instances in which STRIP_NOPS was previously applied to the RHS of
1864 an assignment.
1866 NOTE: In the use cases that led to the creation of this function
1867 and of gimple_assign_single_p, it is typical to test for either
1868 condition and to proceed in the same manner. In each case, the
1869 assigned value is represented by the single RHS operand of the
1870 assignment. I suspect there may be cases where gimple_assign_copy_p,
1871 gimple_assign_single_p, or equivalent logic is used where a similar
1872 treatment of unary NOPs is appropriate. */
1874 bool
1875 gimple_assign_unary_nop_p (gimple gs)
1877 return (gimple_code (gs) == GIMPLE_ASSIGN
1878 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1879 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1880 && gimple_assign_rhs1 (gs) != error_mark_node
1881 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1882 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1885 /* Set BB to be the basic block holding G. */
1887 void
1888 gimple_set_bb (gimple stmt, basic_block bb)
1890 stmt->gsbase.bb = bb;
1892 /* If the statement is a label, add the label to block-to-labels map
1893 so that we can speed up edge creation for GIMPLE_GOTOs. */
1894 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1896 tree t;
1897 int uid;
1899 t = gimple_label_label (stmt);
1900 uid = LABEL_DECL_UID (t);
1901 if (uid == -1)
1903 unsigned old_len = VEC_length (basic_block, label_to_block_map);
1904 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1905 if (old_len <= (unsigned) uid)
1907 unsigned new_len = 3 * uid / 2 + 1;
1909 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
1910 new_len);
1914 VEC_replace (basic_block, label_to_block_map, uid, bb);
1919 /* Fold the expression computed by STMT. If the expression can be
1920 folded, return the folded result, otherwise return NULL. STMT is
1921 not modified. */
1923 tree
1924 gimple_fold (const_gimple stmt)
1926 location_t loc = gimple_location (stmt);
1927 switch (gimple_code (stmt))
1929 case GIMPLE_COND:
1930 return fold_binary_loc (loc, gimple_cond_code (stmt),
1931 boolean_type_node,
1932 gimple_cond_lhs (stmt),
1933 gimple_cond_rhs (stmt));
1935 case GIMPLE_ASSIGN:
1936 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1938 case GIMPLE_UNARY_RHS:
1939 return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
1940 TREE_TYPE (gimple_assign_lhs (stmt)),
1941 gimple_assign_rhs1 (stmt));
1942 case GIMPLE_BINARY_RHS:
1943 return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
1944 TREE_TYPE (gimple_assign_lhs (stmt)),
1945 gimple_assign_rhs1 (stmt),
1946 gimple_assign_rhs2 (stmt));
1947 case GIMPLE_SINGLE_RHS:
1948 return fold (gimple_assign_rhs1 (stmt));
1949 default:;
1951 break;
1953 case GIMPLE_SWITCH:
1954 return gimple_switch_index (stmt);
1956 case GIMPLE_CALL:
1957 return NULL_TREE;
1959 default:
1960 break;
1963 gcc_unreachable ();
1967 /* Modify the RHS of the assignment pointed-to by GSI using the
1968 operands in the expression tree EXPR.
1970 NOTE: The statement pointed-to by GSI may be reallocated if it
1971 did not have enough operand slots.
1973 This function is useful to convert an existing tree expression into
1974 the flat representation used for the RHS of a GIMPLE assignment.
1975 It will reallocate memory as needed to expand or shrink the number
1976 of operand slots needed to represent EXPR.
1978 NOTE: If you find yourself building a tree and then calling this
1979 function, you are most certainly doing it the slow way. It is much
1980 better to build a new assignment or to use the function
1981 gimple_assign_set_rhs_with_ops, which does not require an
1982 expression tree to be built. */
1984 void
1985 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1987 enum tree_code subcode;
1988 tree op1, op2;
1990 extract_ops_from_tree (expr, &subcode, &op1, &op2);
1991 gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
1995 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1996 operands OP1 and OP2.
1998 NOTE: The statement pointed-to by GSI may be reallocated if it
1999 did not have enough operand slots. */
2001 void
2002 gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
2003 tree op1, tree op2)
2005 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2006 gimple stmt = gsi_stmt (*gsi);
2008 /* If the new CODE needs more operands, allocate a new statement. */
2009 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2011 tree lhs = gimple_assign_lhs (stmt);
2012 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2013 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2014 gsi_replace (gsi, new_stmt, true);
2015 stmt = new_stmt;
2017 /* The LHS needs to be reset as this also changes the SSA name
2018 on the LHS. */
2019 gimple_assign_set_lhs (stmt, lhs);
2022 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2023 gimple_set_subcode (stmt, code);
2024 gimple_assign_set_rhs1 (stmt, op1);
2025 if (new_rhs_ops > 1)
2026 gimple_assign_set_rhs2 (stmt, op2);
2030 /* Return the LHS of a statement that performs an assignment,
2031 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2032 for a call to a function that returns no value, or for a
2033 statement other than an assignment or a call. */
2035 tree
2036 gimple_get_lhs (const_gimple stmt)
2038 enum gimple_code code = gimple_code (stmt);
2040 if (code == GIMPLE_ASSIGN)
2041 return gimple_assign_lhs (stmt);
2042 else if (code == GIMPLE_CALL)
2043 return gimple_call_lhs (stmt);
2044 else
2045 return NULL_TREE;
2049 /* Set the LHS of a statement that performs an assignment,
2050 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2052 void
2053 gimple_set_lhs (gimple stmt, tree lhs)
2055 enum gimple_code code = gimple_code (stmt);
2057 if (code == GIMPLE_ASSIGN)
2058 gimple_assign_set_lhs (stmt, lhs);
2059 else if (code == GIMPLE_CALL)
2060 gimple_call_set_lhs (stmt, lhs);
2061 else
2062 gcc_unreachable();
2066 /* Return a deep copy of statement STMT. All the operands from STMT
2067 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2068 and VUSE operand arrays are set to empty in the new copy. */
2070 gimple
2071 gimple_copy (gimple stmt)
2073 enum gimple_code code = gimple_code (stmt);
2074 unsigned num_ops = gimple_num_ops (stmt);
2075 gimple copy = gimple_alloc (code, num_ops);
2076 unsigned i;
2078 /* Shallow copy all the fields from STMT. */
2079 memcpy (copy, stmt, gimple_size (code));
2081 /* If STMT has sub-statements, deep-copy them as well. */
2082 if (gimple_has_substatements (stmt))
2084 gimple_seq new_seq;
2085 tree t;
2087 switch (gimple_code (stmt))
2089 case GIMPLE_BIND:
2090 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2091 gimple_bind_set_body (copy, new_seq);
2092 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2093 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2094 break;
2096 case GIMPLE_CATCH:
2097 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2098 gimple_catch_set_handler (copy, new_seq);
2099 t = unshare_expr (gimple_catch_types (stmt));
2100 gimple_catch_set_types (copy, t);
2101 break;
2103 case GIMPLE_EH_FILTER:
2104 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2105 gimple_eh_filter_set_failure (copy, new_seq);
2106 t = unshare_expr (gimple_eh_filter_types (stmt));
2107 gimple_eh_filter_set_types (copy, t);
2108 break;
2110 case GIMPLE_TRY:
2111 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2112 gimple_try_set_eval (copy, new_seq);
2113 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2114 gimple_try_set_cleanup (copy, new_seq);
2115 break;
2117 case GIMPLE_OMP_FOR:
2118 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2119 gimple_omp_for_set_pre_body (copy, new_seq);
2120 t = unshare_expr (gimple_omp_for_clauses (stmt));
2121 gimple_omp_for_set_clauses (copy, t);
2122 copy->gimple_omp_for.iter
2123 = GGC_NEWVEC (struct gimple_omp_for_iter,
2124 gimple_omp_for_collapse (stmt));
2125 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2127 gimple_omp_for_set_cond (copy, i,
2128 gimple_omp_for_cond (stmt, i));
2129 gimple_omp_for_set_index (copy, i,
2130 gimple_omp_for_index (stmt, i));
2131 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2132 gimple_omp_for_set_initial (copy, i, t);
2133 t = unshare_expr (gimple_omp_for_final (stmt, i));
2134 gimple_omp_for_set_final (copy, i, t);
2135 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2136 gimple_omp_for_set_incr (copy, i, t);
2138 goto copy_omp_body;
2140 case GIMPLE_OMP_PARALLEL:
2141 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2142 gimple_omp_parallel_set_clauses (copy, t);
2143 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2144 gimple_omp_parallel_set_child_fn (copy, t);
2145 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2146 gimple_omp_parallel_set_data_arg (copy, t);
2147 goto copy_omp_body;
2149 case GIMPLE_OMP_TASK:
2150 t = unshare_expr (gimple_omp_task_clauses (stmt));
2151 gimple_omp_task_set_clauses (copy, t);
2152 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2153 gimple_omp_task_set_child_fn (copy, t);
2154 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2155 gimple_omp_task_set_data_arg (copy, t);
2156 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2157 gimple_omp_task_set_copy_fn (copy, t);
2158 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2159 gimple_omp_task_set_arg_size (copy, t);
2160 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2161 gimple_omp_task_set_arg_align (copy, t);
2162 goto copy_omp_body;
2164 case GIMPLE_OMP_CRITICAL:
2165 t = unshare_expr (gimple_omp_critical_name (stmt));
2166 gimple_omp_critical_set_name (copy, t);
2167 goto copy_omp_body;
2169 case GIMPLE_OMP_SECTIONS:
2170 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2171 gimple_omp_sections_set_clauses (copy, t);
2172 t = unshare_expr (gimple_omp_sections_control (stmt));
2173 gimple_omp_sections_set_control (copy, t);
2174 /* FALLTHRU */
2176 case GIMPLE_OMP_SINGLE:
2177 case GIMPLE_OMP_SECTION:
2178 case GIMPLE_OMP_MASTER:
2179 case GIMPLE_OMP_ORDERED:
2180 copy_omp_body:
2181 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2182 gimple_omp_set_body (copy, new_seq);
2183 break;
2185 case GIMPLE_WITH_CLEANUP_EXPR:
2186 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2187 gimple_wce_set_cleanup (copy, new_seq);
2188 break;
2190 default:
2191 gcc_unreachable ();
2195 /* Make copy of operands. */
2196 if (num_ops > 0)
2198 for (i = 0; i < num_ops; i++)
2199 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2201 /* Clear out SSA operand vectors on COPY. */
2202 if (gimple_has_ops (stmt))
2204 gimple_set_def_ops (copy, NULL);
2205 gimple_set_use_ops (copy, NULL);
2208 if (gimple_has_mem_ops (stmt))
2210 gimple_set_vdef (copy, gimple_vdef (stmt));
2211 gimple_set_vuse (copy, gimple_vuse (stmt));
2214 /* SSA operands need to be updated. */
2215 gimple_set_modified (copy, true);
2218 return copy;
2222 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2223 a MODIFIED field. */
2225 void
2226 gimple_set_modified (gimple s, bool modifiedp)
2228 if (gimple_has_ops (s))
2230 s->gsbase.modified = (unsigned) modifiedp;
2232 if (modifiedp
2233 && cfun->gimple_df
2234 && is_gimple_call (s)
2235 && gimple_call_noreturn_p (s))
2236 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
2241 /* Return true if statement S has side-effects. We consider a
2242 statement to have side effects if:
2244 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2245 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2247 bool
2248 gimple_has_side_effects (const_gimple s)
2250 unsigned i;
2252 if (is_gimple_debug (s))
2253 return false;
2255 /* We don't have to scan the arguments to check for
2256 volatile arguments, though, at present, we still
2257 do a scan to check for TREE_SIDE_EFFECTS. */
2258 if (gimple_has_volatile_ops (s))
2259 return true;
2261 if (is_gimple_call (s))
2263 unsigned nargs = gimple_call_num_args (s);
2265 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2266 return true;
2267 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2268 /* An infinite loop is considered a side effect. */
2269 return true;
2271 if (gimple_call_lhs (s)
2272 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2274 gcc_assert (gimple_has_volatile_ops (s));
2275 return true;
2278 if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
2279 return true;
2281 for (i = 0; i < nargs; i++)
2282 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2284 gcc_assert (gimple_has_volatile_ops (s));
2285 return true;
2288 return false;
2290 else
2292 for (i = 0; i < gimple_num_ops (s); i++)
2293 if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
2295 gcc_assert (gimple_has_volatile_ops (s));
2296 return true;
2300 return false;
2303 /* Return true if the RHS of statement S has side effects.
2304 We may use it to determine if it is admissable to replace
2305 an assignment or call with a copy of a previously-computed
2306 value. In such cases, side-effects due the the LHS are
2307 preserved. */
2309 bool
2310 gimple_rhs_has_side_effects (const_gimple s)
2312 unsigned i;
2314 if (is_gimple_call (s))
2316 unsigned nargs = gimple_call_num_args (s);
2318 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2319 return true;
2321 /* We cannot use gimple_has_volatile_ops here,
2322 because we must ignore a volatile LHS. */
2323 if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
2324 || TREE_THIS_VOLATILE (gimple_call_fn (s)))
2326 gcc_assert (gimple_has_volatile_ops (s));
2327 return true;
2330 for (i = 0; i < nargs; i++)
2331 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2332 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2333 return true;
2335 return false;
2337 else if (is_gimple_assign (s))
2339 /* Skip the first operand, the LHS. */
2340 for (i = 1; i < gimple_num_ops (s); i++)
2341 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2342 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2344 gcc_assert (gimple_has_volatile_ops (s));
2345 return true;
2348 else if (is_gimple_debug (s))
2349 return false;
2350 else
2352 /* For statements without an LHS, examine all arguments. */
2353 for (i = 0; i < gimple_num_ops (s); i++)
2354 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2355 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2357 gcc_assert (gimple_has_volatile_ops (s));
2358 return true;
2362 return false;
2366 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2367 Return true if S can trap. If INCLUDE_LHS is true and S is a
2368 GIMPLE_ASSIGN, the LHS of the assignment is also checked.
2369 Otherwise, only the RHS of the assignment is checked. */
2371 static bool
2372 gimple_could_trap_p_1 (gimple s, bool include_lhs)
2374 unsigned i, start;
2375 tree t, div = NULL_TREE;
2376 enum tree_code op;
2378 start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
2380 for (i = start; i < gimple_num_ops (s); i++)
2381 if (tree_could_trap_p (gimple_op (s, i)))
2382 return true;
2384 switch (gimple_code (s))
2386 case GIMPLE_ASM:
2387 return gimple_asm_volatile_p (s);
2389 case GIMPLE_CALL:
2390 t = gimple_call_fndecl (s);
2391 /* Assume that calls to weak functions may trap. */
2392 if (!t || !DECL_P (t) || DECL_WEAK (t))
2393 return true;
2394 return false;
2396 case GIMPLE_ASSIGN:
2397 t = gimple_expr_type (s);
2398 op = gimple_assign_rhs_code (s);
2399 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2400 div = gimple_assign_rhs2 (s);
2401 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2402 (INTEGRAL_TYPE_P (t)
2403 && TYPE_OVERFLOW_TRAPS (t)),
2404 div));
2406 default:
2407 break;
2410 return false;
2415 /* Return true if statement S can trap. */
2417 bool
2418 gimple_could_trap_p (gimple s)
2420 return gimple_could_trap_p_1 (s, true);
2424 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2426 bool
2427 gimple_assign_rhs_could_trap_p (gimple s)
2429 gcc_assert (is_gimple_assign (s));
2430 return gimple_could_trap_p_1 (s, false);
2434 /* Print debugging information for gimple stmts generated. */
2436 void
2437 dump_gimple_statistics (void)
2439 #ifdef GATHER_STATISTICS
2440 int i, total_tuples = 0, total_bytes = 0;
2442 fprintf (stderr, "\nGIMPLE statements\n");
2443 fprintf (stderr, "Kind Stmts Bytes\n");
2444 fprintf (stderr, "---------------------------------------\n");
2445 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2447 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2448 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2449 total_tuples += gimple_alloc_counts[i];
2450 total_bytes += gimple_alloc_sizes[i];
2452 fprintf (stderr, "---------------------------------------\n");
2453 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2454 fprintf (stderr, "---------------------------------------\n");
2455 #else
2456 fprintf (stderr, "No gimple statistics\n");
2457 #endif
2461 /* Return the number of operands needed on the RHS of a GIMPLE
2462 assignment for an expression with tree code CODE. */
2464 unsigned
2465 get_gimple_rhs_num_ops (enum tree_code code)
2467 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2469 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2470 return 1;
2471 else if (rhs_class == GIMPLE_BINARY_RHS)
2472 return 2;
2473 else
2474 gcc_unreachable ();
2477 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2478 (unsigned char) \
2479 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2480 : ((TYPE) == tcc_binary \
2481 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2482 : ((TYPE) == tcc_constant \
2483 || (TYPE) == tcc_declaration \
2484 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2485 : ((SYM) == TRUTH_AND_EXPR \
2486 || (SYM) == TRUTH_OR_EXPR \
2487 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2488 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2489 : ((SYM) == COND_EXPR \
2490 || (SYM) == CONSTRUCTOR \
2491 || (SYM) == OBJ_TYPE_REF \
2492 || (SYM) == ASSERT_EXPR \
2493 || (SYM) == ADDR_EXPR \
2494 || (SYM) == WITH_SIZE_EXPR \
2495 || (SYM) == EXC_PTR_EXPR \
2496 || (SYM) == SSA_NAME \
2497 || (SYM) == FILTER_EXPR \
2498 || (SYM) == POLYNOMIAL_CHREC \
2499 || (SYM) == DOT_PROD_EXPR \
2500 || (SYM) == VEC_COND_EXPR \
2501 || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS \
2502 : GIMPLE_INVALID_RHS),
2503 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2505 const unsigned char gimple_rhs_class_table[] = {
2506 #include "all-tree.def"
2509 #undef DEFTREECODE
2510 #undef END_OF_BASE_TREE_CODES
2512 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2514 /* Validation of GIMPLE expressions. */
2516 /* Return true if OP is an acceptable tree node to be used as a GIMPLE
2517 operand. */
2519 bool
2520 is_gimple_operand (const_tree op)
2522 return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
2525 /* Returns true iff T is a valid RHS for an assignment to a renamed
2526 user -- or front-end generated artificial -- variable. */
2528 bool
2529 is_gimple_reg_rhs (tree t)
2531 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2534 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2535 LHS, or for a call argument. */
2537 bool
2538 is_gimple_mem_rhs (tree t)
2540 /* If we're dealing with a renamable type, either source or dest must be
2541 a renamed variable. */
2542 if (is_gimple_reg_type (TREE_TYPE (t)))
2543 return is_gimple_val (t);
2544 else
2545 return is_gimple_val (t) || is_gimple_lvalue (t);
2548 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2550 bool
2551 is_gimple_lvalue (tree t)
2553 return (is_gimple_addressable (t)
2554 || TREE_CODE (t) == WITH_SIZE_EXPR
2555 /* These are complex lvalues, but don't have addresses, so they
2556 go here. */
2557 || TREE_CODE (t) == BIT_FIELD_REF);
2560 /* Return true if T is a GIMPLE condition. */
2562 bool
2563 is_gimple_condexpr (tree t)
2565 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2566 && !tree_could_trap_p (t)
2567 && is_gimple_val (TREE_OPERAND (t, 0))
2568 && is_gimple_val (TREE_OPERAND (t, 1))));
2571 /* Return true if T is something whose address can be taken. */
2573 bool
2574 is_gimple_addressable (tree t)
2576 return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
2579 /* Return true if T is a valid gimple constant. */
2581 bool
2582 is_gimple_constant (const_tree t)
2584 switch (TREE_CODE (t))
2586 case INTEGER_CST:
2587 case REAL_CST:
2588 case FIXED_CST:
2589 case STRING_CST:
2590 case COMPLEX_CST:
2591 case VECTOR_CST:
2592 return true;
2594 /* Vector constant constructors are gimple invariant. */
2595 case CONSTRUCTOR:
2596 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2597 return TREE_CONSTANT (t);
2598 else
2599 return false;
2601 default:
2602 return false;
2606 /* Return true if T is a gimple address. */
2608 bool
2609 is_gimple_address (const_tree t)
2611 tree op;
2613 if (TREE_CODE (t) != ADDR_EXPR)
2614 return false;
2616 op = TREE_OPERAND (t, 0);
2617 while (handled_component_p (op))
2619 if ((TREE_CODE (op) == ARRAY_REF
2620 || TREE_CODE (op) == ARRAY_RANGE_REF)
2621 && !is_gimple_val (TREE_OPERAND (op, 1)))
2622 return false;
2624 op = TREE_OPERAND (op, 0);
2627 if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
2628 return true;
2630 switch (TREE_CODE (op))
2632 case PARM_DECL:
2633 case RESULT_DECL:
2634 case LABEL_DECL:
2635 case FUNCTION_DECL:
2636 case VAR_DECL:
2637 case CONST_DECL:
2638 return true;
2640 default:
2641 return false;
2645 /* Strip out all handled components that produce invariant
2646 offsets. */
2648 static const_tree
2649 strip_invariant_refs (const_tree op)
2651 while (handled_component_p (op))
2653 switch (TREE_CODE (op))
2655 case ARRAY_REF:
2656 case ARRAY_RANGE_REF:
2657 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2658 || TREE_OPERAND (op, 2) != NULL_TREE
2659 || TREE_OPERAND (op, 3) != NULL_TREE)
2660 return NULL;
2661 break;
2663 case COMPONENT_REF:
2664 if (TREE_OPERAND (op, 2) != NULL_TREE)
2665 return NULL;
2666 break;
2668 default:;
2670 op = TREE_OPERAND (op, 0);
2673 return op;
2676 /* Return true if T is a gimple invariant address. */
2678 bool
2679 is_gimple_invariant_address (const_tree t)
2681 const_tree op;
2683 if (TREE_CODE (t) != ADDR_EXPR)
2684 return false;
2686 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2688 return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
2691 /* Return true if T is a gimple invariant address at IPA level
2692 (so addresses of variables on stack are not allowed). */
2694 bool
2695 is_gimple_ip_invariant_address (const_tree t)
2697 const_tree op;
2699 if (TREE_CODE (t) != ADDR_EXPR)
2700 return false;
2702 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2704 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2707 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2708 form of function invariant. */
2710 bool
2711 is_gimple_min_invariant (const_tree t)
2713 if (TREE_CODE (t) == ADDR_EXPR)
2714 return is_gimple_invariant_address (t);
2716 return is_gimple_constant (t);
2719 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2720 form of gimple minimal invariant. */
2722 bool
2723 is_gimple_ip_invariant (const_tree t)
2725 if (TREE_CODE (t) == ADDR_EXPR)
2726 return is_gimple_ip_invariant_address (t);
2728 return is_gimple_constant (t);
2731 /* Return true if T looks like a valid GIMPLE statement. */
2733 bool
2734 is_gimple_stmt (tree t)
2736 const enum tree_code code = TREE_CODE (t);
2738 switch (code)
2740 case NOP_EXPR:
2741 /* The only valid NOP_EXPR is the empty statement. */
2742 return IS_EMPTY_STMT (t);
2744 case BIND_EXPR:
2745 case COND_EXPR:
2746 /* These are only valid if they're void. */
2747 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2749 case SWITCH_EXPR:
2750 case GOTO_EXPR:
2751 case RETURN_EXPR:
2752 case LABEL_EXPR:
2753 case CASE_LABEL_EXPR:
2754 case TRY_CATCH_EXPR:
2755 case TRY_FINALLY_EXPR:
2756 case EH_FILTER_EXPR:
2757 case CATCH_EXPR:
2758 case ASM_EXPR:
2759 case RESX_EXPR:
2760 case STATEMENT_LIST:
2761 case OMP_PARALLEL:
2762 case OMP_FOR:
2763 case OMP_SECTIONS:
2764 case OMP_SECTION:
2765 case OMP_SINGLE:
2766 case OMP_MASTER:
2767 case OMP_ORDERED:
2768 case OMP_CRITICAL:
2769 case OMP_TASK:
2770 /* These are always void. */
2771 return true;
2773 case CALL_EXPR:
2774 case MODIFY_EXPR:
2775 case PREDICT_EXPR:
2776 /* These are valid regardless of their type. */
2777 return true;
2779 default:
2780 return false;
2784 /* Return true if T is a variable. */
2786 bool
2787 is_gimple_variable (tree t)
2789 return (TREE_CODE (t) == VAR_DECL
2790 || TREE_CODE (t) == PARM_DECL
2791 || TREE_CODE (t) == RESULT_DECL
2792 || TREE_CODE (t) == SSA_NAME);
2795 /* Return true if T is a GIMPLE identifier (something with an address). */
2797 bool
2798 is_gimple_id (tree t)
2800 return (is_gimple_variable (t)
2801 || TREE_CODE (t) == FUNCTION_DECL
2802 || TREE_CODE (t) == LABEL_DECL
2803 || TREE_CODE (t) == CONST_DECL
2804 /* Allow string constants, since they are addressable. */
2805 || TREE_CODE (t) == STRING_CST);
2808 /* Return true if TYPE is a suitable type for a scalar register variable. */
2810 bool
2811 is_gimple_reg_type (tree type)
2813 return !AGGREGATE_TYPE_P (type);
2816 /* Return true if T is a non-aggregate register variable. */
2818 bool
2819 is_gimple_reg (tree t)
2821 if (TREE_CODE (t) == SSA_NAME)
2822 t = SSA_NAME_VAR (t);
2824 if (!is_gimple_variable (t))
2825 return false;
2827 if (!is_gimple_reg_type (TREE_TYPE (t)))
2828 return false;
2830 /* A volatile decl is not acceptable because we can't reuse it as
2831 needed. We need to copy it into a temp first. */
2832 if (TREE_THIS_VOLATILE (t))
2833 return false;
2835 /* We define "registers" as things that can be renamed as needed,
2836 which with our infrastructure does not apply to memory. */
2837 if (needs_to_live_in_memory (t))
2838 return false;
2840 /* Hard register variables are an interesting case. For those that
2841 are call-clobbered, we don't know where all the calls are, since
2842 we don't (want to) take into account which operations will turn
2843 into libcalls at the rtl level. For those that are call-saved,
2844 we don't currently model the fact that calls may in fact change
2845 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2846 level, and so miss variable changes that might imply. All around,
2847 it seems safest to not do too much optimization with these at the
2848 tree level at all. We'll have to rely on the rtl optimizers to
2849 clean this up, as there we've got all the appropriate bits exposed. */
2850 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2851 return false;
2853 /* Complex and vector values must have been put into SSA-like form.
2854 That is, no assignments to the individual components. */
2855 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2856 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2857 return DECL_GIMPLE_REG_P (t);
2859 return true;
2863 /* Return true if T is a GIMPLE variable whose address is not needed. */
2865 bool
2866 is_gimple_non_addressable (tree t)
2868 if (TREE_CODE (t) == SSA_NAME)
2869 t = SSA_NAME_VAR (t);
2871 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2874 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2876 bool
2877 is_gimple_val (tree t)
2879 /* Make loads from volatiles and memory vars explicit. */
2880 if (is_gimple_variable (t)
2881 && is_gimple_reg_type (TREE_TYPE (t))
2882 && !is_gimple_reg (t))
2883 return false;
2885 /* FIXME make these decls. That can happen only when we expose the
2886 entire landing-pad construct at the tree level. */
2887 if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
2888 return true;
2890 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2893 /* Similarly, but accept hard registers as inputs to asm statements. */
2895 bool
2896 is_gimple_asm_val (tree t)
2898 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2899 return true;
2901 return is_gimple_val (t);
2904 /* Return true if T is a GIMPLE minimal lvalue. */
2906 bool
2907 is_gimple_min_lval (tree t)
2909 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2910 return false;
2911 return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
2914 /* Return true if T is a typecast operation. */
2916 bool
2917 is_gimple_cast (tree t)
2919 return (CONVERT_EXPR_P (t)
2920 || TREE_CODE (t) == FIX_TRUNC_EXPR);
2923 /* Return true if T is a valid function operand of a CALL_EXPR. */
2925 bool
2926 is_gimple_call_addr (tree t)
2928 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2931 /* If T makes a function call, return the corresponding CALL_EXPR operand.
2932 Otherwise, return NULL_TREE. */
2934 tree
2935 get_call_expr_in (tree t)
2937 if (TREE_CODE (t) == MODIFY_EXPR)
2938 t = TREE_OPERAND (t, 1);
2939 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2940 t = TREE_OPERAND (t, 0);
2941 if (TREE_CODE (t) == CALL_EXPR)
2942 return t;
2943 return NULL_TREE;
2947 /* Given a memory reference expression T, return its base address.
2948 The base address of a memory reference expression is the main
2949 object being referenced. For instance, the base address for
2950 'array[i].fld[j]' is 'array'. You can think of this as stripping
2951 away the offset part from a memory address.
2953 This function calls handled_component_p to strip away all the inner
2954 parts of the memory reference until it reaches the base object. */
2956 tree
2957 get_base_address (tree t)
2959 while (handled_component_p (t))
2960 t = TREE_OPERAND (t, 0);
2962 if (SSA_VAR_P (t)
2963 || TREE_CODE (t) == STRING_CST
2964 || TREE_CODE (t) == CONSTRUCTOR
2965 || INDIRECT_REF_P (t))
2966 return t;
2967 else
2968 return NULL_TREE;
2971 void
2972 recalculate_side_effects (tree t)
2974 enum tree_code code = TREE_CODE (t);
2975 int len = TREE_OPERAND_LENGTH (t);
2976 int i;
2978 switch (TREE_CODE_CLASS (code))
2980 case tcc_expression:
2981 switch (code)
2983 case INIT_EXPR:
2984 case MODIFY_EXPR:
2985 case VA_ARG_EXPR:
2986 case PREDECREMENT_EXPR:
2987 case PREINCREMENT_EXPR:
2988 case POSTDECREMENT_EXPR:
2989 case POSTINCREMENT_EXPR:
2990 /* All of these have side-effects, no matter what their
2991 operands are. */
2992 return;
2994 default:
2995 break;
2997 /* Fall through. */
2999 case tcc_comparison: /* a comparison expression */
3000 case tcc_unary: /* a unary arithmetic expression */
3001 case tcc_binary: /* a binary arithmetic expression */
3002 case tcc_reference: /* a reference */
3003 case tcc_vl_exp: /* a function call */
3004 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3005 for (i = 0; i < len; ++i)
3007 tree op = TREE_OPERAND (t, i);
3008 if (op && TREE_SIDE_EFFECTS (op))
3009 TREE_SIDE_EFFECTS (t) = 1;
3011 break;
3013 case tcc_constant:
3014 /* No side-effects. */
3015 return;
3017 default:
3018 gcc_unreachable ();
3022 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3023 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3024 we failed to create one. */
3026 tree
3027 canonicalize_cond_expr_cond (tree t)
3029 /* For (bool)x use x != 0. */
3030 if (TREE_CODE (t) == NOP_EXPR
3031 && TREE_TYPE (t) == boolean_type_node)
3033 tree top0 = TREE_OPERAND (t, 0);
3034 t = build2 (NE_EXPR, TREE_TYPE (t),
3035 top0, build_int_cst (TREE_TYPE (top0), 0));
3037 /* For !x use x == 0. */
3038 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3040 tree top0 = TREE_OPERAND (t, 0);
3041 t = build2 (EQ_EXPR, TREE_TYPE (t),
3042 top0, build_int_cst (TREE_TYPE (top0), 0));
3044 /* For cmp ? 1 : 0 use cmp. */
3045 else if (TREE_CODE (t) == COND_EXPR
3046 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3047 && integer_onep (TREE_OPERAND (t, 1))
3048 && integer_zerop (TREE_OPERAND (t, 2)))
3050 tree top0 = TREE_OPERAND (t, 0);
3051 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3052 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3055 if (is_gimple_condexpr (t))
3056 return t;
3058 return NULL_TREE;
3061 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3062 the positions marked by the set ARGS_TO_SKIP. */
3064 gimple
3065 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3067 int i;
3068 tree fn = gimple_call_fn (stmt);
3069 int nargs = gimple_call_num_args (stmt);
3070 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3071 gimple new_stmt;
3073 for (i = 0; i < nargs; i++)
3074 if (!bitmap_bit_p (args_to_skip, i))
3075 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3077 new_stmt = gimple_build_call_vec (fn, vargs);
3078 VEC_free (tree, heap, vargs);
3079 if (gimple_call_lhs (stmt))
3080 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3082 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3083 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3085 gimple_set_block (new_stmt, gimple_block (stmt));
3086 if (gimple_has_location (stmt))
3087 gimple_set_location (new_stmt, gimple_location (stmt));
3089 /* Carry all the flags to the new GIMPLE_CALL. */
3090 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3091 gimple_call_set_tail (new_stmt, gimple_call_tail_p (stmt));
3092 gimple_call_set_cannot_inline (new_stmt, gimple_call_cannot_inline_p (stmt));
3093 gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
3094 gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
3095 gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
3097 gimple_set_modified (new_stmt, true);
3099 return new_stmt;
3103 /* Data structure used to count the number of dereferences to PTR
3104 inside an expression. */
3105 struct count_ptr_d
3107 tree ptr;
3108 unsigned num_stores;
3109 unsigned num_loads;
3112 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
3113 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3115 static tree
3116 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3118 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3119 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3121 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3122 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3123 the address of 'fld' as 'ptr + offsetof(fld)'. */
3124 if (TREE_CODE (*tp) == ADDR_EXPR)
3126 *walk_subtrees = 0;
3127 return NULL_TREE;
3130 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
3132 if (wi_p->is_lhs)
3133 count_p->num_stores++;
3134 else
3135 count_p->num_loads++;
3138 return NULL_TREE;
3141 /* Count the number of direct and indirect uses for pointer PTR in
3142 statement STMT. The number of direct uses is stored in
3143 *NUM_USES_P. Indirect references are counted separately depending
3144 on whether they are store or load operations. The counts are
3145 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3147 void
3148 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3149 unsigned *num_loads_p, unsigned *num_stores_p)
3151 ssa_op_iter i;
3152 tree use;
3154 *num_uses_p = 0;
3155 *num_loads_p = 0;
3156 *num_stores_p = 0;
3158 /* Find out the total number of uses of PTR in STMT. */
3159 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3160 if (use == ptr)
3161 (*num_uses_p)++;
3163 /* Now count the number of indirect references to PTR. This is
3164 truly awful, but we don't have much choice. There are no parent
3165 pointers inside INDIRECT_REFs, so an expression like
3166 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3167 find all the indirect and direct uses of x_1 inside. The only
3168 shortcut we can take is the fact that GIMPLE only allows
3169 INDIRECT_REFs inside the expressions below. */
3170 if (is_gimple_assign (stmt)
3171 || gimple_code (stmt) == GIMPLE_RETURN
3172 || gimple_code (stmt) == GIMPLE_ASM
3173 || is_gimple_call (stmt))
3175 struct walk_stmt_info wi;
3176 struct count_ptr_d count;
3178 count.ptr = ptr;
3179 count.num_stores = 0;
3180 count.num_loads = 0;
3182 memset (&wi, 0, sizeof (wi));
3183 wi.info = &count;
3184 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3186 *num_stores_p = count.num_stores;
3187 *num_loads_p = count.num_loads;
3190 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3193 /* From a tree operand OP return the base of a load or store operation
3194 or NULL_TREE if OP is not a load or a store. */
3196 static tree
3197 get_base_loadstore (tree op)
3199 while (handled_component_p (op))
3200 op = TREE_OPERAND (op, 0);
3201 if (DECL_P (op)
3202 || INDIRECT_REF_P (op)
3203 || TREE_CODE (op) == TARGET_MEM_REF)
3204 return op;
3205 return NULL_TREE;
3208 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3209 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3210 passing the STMT, the base of the operand and DATA to it. The base
3211 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3212 or the argument of an address expression.
3213 Returns the results of these callbacks or'ed. */
3215 bool
3216 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3217 bool (*visit_load)(gimple, tree, void *),
3218 bool (*visit_store)(gimple, tree, void *),
3219 bool (*visit_addr)(gimple, tree, void *))
3221 bool ret = false;
3222 unsigned i;
3223 if (gimple_assign_single_p (stmt))
3225 tree lhs, rhs;
3226 if (visit_store)
3228 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3229 if (lhs)
3230 ret |= visit_store (stmt, lhs, data);
3232 rhs = gimple_assign_rhs1 (stmt);
3233 while (handled_component_p (rhs))
3234 rhs = TREE_OPERAND (rhs, 0);
3235 if (visit_addr)
3237 if (TREE_CODE (rhs) == ADDR_EXPR)
3238 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3239 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3240 && TMR_BASE (rhs) != NULL_TREE
3241 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3242 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3243 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3244 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3245 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3246 0), data);
3247 lhs = gimple_assign_lhs (stmt);
3248 if (TREE_CODE (lhs) == TARGET_MEM_REF
3249 && TMR_BASE (lhs) != NULL_TREE
3250 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3251 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3253 if (visit_load)
3255 rhs = get_base_loadstore (rhs);
3256 if (rhs)
3257 ret |= visit_load (stmt, rhs, data);
3260 else if (visit_addr
3261 && (is_gimple_assign (stmt)
3262 || gimple_code (stmt) == GIMPLE_COND))
3264 for (i = 0; i < gimple_num_ops (stmt); ++i)
3265 if (gimple_op (stmt, i)
3266 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
3267 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
3269 else if (is_gimple_call (stmt))
3271 if (visit_store)
3273 tree lhs = gimple_call_lhs (stmt);
3274 if (lhs)
3276 lhs = get_base_loadstore (lhs);
3277 if (lhs)
3278 ret |= visit_store (stmt, lhs, data);
3281 if (visit_load || visit_addr)
3282 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3284 tree rhs = gimple_call_arg (stmt, i);
3285 if (visit_addr
3286 && TREE_CODE (rhs) == ADDR_EXPR)
3287 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3288 else if (visit_load)
3290 rhs = get_base_loadstore (rhs);
3291 if (rhs)
3292 ret |= visit_load (stmt, rhs, data);
3295 if (visit_addr
3296 && gimple_call_chain (stmt)
3297 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3298 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3299 data);
3300 if (visit_addr
3301 && gimple_call_return_slot_opt_p (stmt)
3302 && gimple_call_lhs (stmt) != NULL_TREE
3303 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3304 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3306 else if (gimple_code (stmt) == GIMPLE_ASM)
3308 unsigned noutputs;
3309 const char *constraint;
3310 const char **oconstraints;
3311 bool allows_mem, allows_reg, is_inout;
3312 noutputs = gimple_asm_noutputs (stmt);
3313 oconstraints = XALLOCAVEC (const char *, noutputs);
3314 if (visit_store || visit_addr)
3315 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3317 tree link = gimple_asm_output_op (stmt, i);
3318 tree op = get_base_loadstore (TREE_VALUE (link));
3319 if (op && visit_store)
3320 ret |= visit_store (stmt, op, data);
3321 if (visit_addr)
3323 constraint = TREE_STRING_POINTER
3324 (TREE_VALUE (TREE_PURPOSE (link)));
3325 oconstraints[i] = constraint;
3326 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3327 &allows_reg, &is_inout);
3328 if (op && !allows_reg && allows_mem)
3329 ret |= visit_addr (stmt, op, data);
3332 if (visit_load || visit_addr)
3333 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3335 tree link = gimple_asm_input_op (stmt, i);
3336 tree op = TREE_VALUE (link);
3337 if (visit_addr
3338 && TREE_CODE (op) == ADDR_EXPR)
3339 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3340 else if (visit_load || visit_addr)
3342 op = get_base_loadstore (op);
3343 if (op)
3345 if (visit_load)
3346 ret |= visit_load (stmt, op, data);
3347 if (visit_addr)
3349 constraint = TREE_STRING_POINTER
3350 (TREE_VALUE (TREE_PURPOSE (link)));
3351 parse_input_constraint (&constraint, 0, 0, noutputs,
3352 0, oconstraints,
3353 &allows_mem, &allows_reg);
3354 if (!allows_reg && allows_mem)
3355 ret |= visit_addr (stmt, op, data);
3361 else if (gimple_code (stmt) == GIMPLE_RETURN)
3363 tree op = gimple_return_retval (stmt);
3364 if (op)
3366 if (visit_addr
3367 && TREE_CODE (op) == ADDR_EXPR)
3368 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3369 else if (visit_load)
3371 op = get_base_loadstore (op);
3372 if (op)
3373 ret |= visit_load (stmt, op, data);
3377 else if (visit_addr
3378 && gimple_code (stmt) == GIMPLE_PHI)
3380 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
3382 tree op = PHI_ARG_DEF (stmt, i);
3383 if (TREE_CODE (op) == ADDR_EXPR)
3384 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3388 return ret;
3391 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
3392 should make a faster clone for this case. */
3394 bool
3395 walk_stmt_load_store_ops (gimple stmt, void *data,
3396 bool (*visit_load)(gimple, tree, void *),
3397 bool (*visit_store)(gimple, tree, void *))
3399 return walk_stmt_load_store_addr_ops (stmt, data,
3400 visit_load, visit_store, NULL);
3403 /* Helper for gimple_ior_addresses_taken_1. */
3405 static bool
3406 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
3407 tree addr, void *data)
3409 bitmap addresses_taken = (bitmap)data;
3410 while (handled_component_p (addr))
3411 addr = TREE_OPERAND (addr, 0);
3412 if (DECL_P (addr))
3414 bitmap_set_bit (addresses_taken, DECL_UID (addr));
3415 return true;
3417 return false;
3420 /* Set the bit for the uid of all decls that have their address taken
3421 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
3422 were any in this stmt. */
3424 bool
3425 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
3427 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
3428 gimple_ior_addresses_taken_1);
3432 /* Return a printable name for symbol DECL. */
3434 const char *
3435 gimple_decl_printable_name (tree decl, int verbosity)
3437 gcc_assert (decl && DECL_NAME (decl));
3439 if (DECL_ASSEMBLER_NAME_SET_P (decl))
3441 const char *str, *mangled_str;
3442 int dmgl_opts = DMGL_NO_OPTS;
3444 if (verbosity >= 2)
3446 dmgl_opts = DMGL_VERBOSE
3447 | DMGL_TYPES
3448 | DMGL_ANSI
3449 | DMGL_GNU_V3
3450 | DMGL_RET_POSTFIX;
3451 if (TREE_CODE (decl) == FUNCTION_DECL)
3452 dmgl_opts |= DMGL_PARAMS;
3455 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3456 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
3457 return (str) ? str : mangled_str;
3460 return IDENTIFIER_POINTER (DECL_NAME (decl));
3464 /* Fold a OBJ_TYPE_REF expression to the address of a function.
3465 KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). Adapted
3466 from cp_fold_obj_type_ref, but it tolerates types with no binfo
3467 data. */
3469 tree
3470 gimple_fold_obj_type_ref (tree ref, tree known_type)
3472 HOST_WIDE_INT index;
3473 HOST_WIDE_INT i;
3474 tree v;
3475 tree fndecl;
3477 if (TYPE_BINFO (known_type) == NULL_TREE)
3478 return NULL_TREE;
3480 v = BINFO_VIRTUALS (TYPE_BINFO (known_type));
3481 index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
3482 i = 0;
3483 while (i != index)
3485 i += (TARGET_VTABLE_USES_DESCRIPTORS
3486 ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
3487 v = TREE_CHAIN (v);
3490 fndecl = TREE_VALUE (v);
3492 #ifdef ENABLE_CHECKING
3493 gcc_assert (tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref),
3494 DECL_VINDEX (fndecl)));
3495 #endif
3497 cgraph_node (fndecl)->local.vtable_method = true;
3499 return build_fold_addr_expr (fndecl);
3502 #include "gt-gimple.h"