Reverting merge from trunk
[official-gcc.git] / gcc / gimple.c
blob2d642616ae331e9eea4b4ad7728fe15c652da6e1
1 /* Gimple IR support functions.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "gimple-iterator.h"
33 #include "gimple-walk.h"
34 #include "gimple.h"
35 #include "gimplify.h"
36 #include "diagnostic.h"
37 #include "value-prof.h"
38 #include "flags.h"
39 #include "alias.h"
40 #include "demangle.h"
41 #include "langhooks.h"
42 #include "bitmap.h"
45 /* All the tuples have their operand vector (if present) at the very bottom
46 of the structure. Therefore, the offset required to find the
47 operands vector the size of the structure minus the size of the 1
48 element tree array at the end (see gimple_ops). */
49 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
50 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
51 EXPORTED_CONST size_t gimple_ops_offset_[] = {
52 #include "gsstruct.def"
54 #undef DEFGSSTRUCT
56 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
57 static const size_t gsstruct_code_size[] = {
58 #include "gsstruct.def"
60 #undef DEFGSSTRUCT
62 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
63 const char *const gimple_code_name[] = {
64 #include "gimple.def"
66 #undef DEFGSCODE
68 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
69 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
70 #include "gimple.def"
72 #undef DEFGSCODE
74 /* Gimple stats. */
76 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
77 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
79 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
80 static const char * const gimple_alloc_kind_names[] = {
81 "assignments",
82 "phi nodes",
83 "conditionals",
84 "everything else"
87 /* Gimple tuple constructors.
88 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
89 be passed a NULL to start with an empty sequence. */
91 /* Set the code for statement G to CODE. */
93 static inline void
94 gimple_set_code (gimple g, enum gimple_code code)
96 g->gsbase.code = code;
99 /* Return the number of bytes needed to hold a GIMPLE statement with
100 code CODE. */
102 static inline size_t
103 gimple_size (enum gimple_code code)
105 return gsstruct_code_size[gss_for_code (code)];
108 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
109 operands. */
111 gimple
112 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
114 size_t size;
115 gimple stmt;
117 size = gimple_size (code);
118 if (num_ops > 0)
119 size += sizeof (tree) * (num_ops - 1);
121 if (GATHER_STATISTICS)
123 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
124 gimple_alloc_counts[(int) kind]++;
125 gimple_alloc_sizes[(int) kind] += size;
128 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
129 gimple_set_code (stmt, code);
130 gimple_set_num_ops (stmt, num_ops);
132 /* Do not call gimple_set_modified here as it has other side
133 effects and this tuple is still not completely built. */
134 stmt->gsbase.modified = 1;
135 gimple_init_singleton (stmt);
137 return stmt;
140 /* Set SUBCODE to be the code of the expression computed by statement G. */
142 static inline void
143 gimple_set_subcode (gimple g, unsigned subcode)
145 /* We only have 16 bits for the RHS code. Assert that we are not
146 overflowing it. */
147 gcc_assert (subcode < (1 << 16));
148 g->gsbase.subcode = subcode;
153 /* Build a tuple with operands. CODE is the statement to build (which
154 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
155 for the new tuple. NUM_OPS is the number of operands to allocate. */
157 #define gimple_build_with_ops(c, s, n) \
158 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
160 static gimple
161 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
162 unsigned num_ops MEM_STAT_DECL)
164 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
165 gimple_set_subcode (s, subcode);
167 return s;
171 /* Build a GIMPLE_RETURN statement returning RETVAL. */
173 gimple
174 gimple_build_return (tree retval)
176 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
177 if (retval)
178 gimple_return_set_retval (s, retval);
179 return s;
182 /* Reset alias information on call S. */
184 void
185 gimple_call_reset_alias_info (gimple s)
187 if (gimple_call_flags (s) & ECF_CONST)
188 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
189 else
190 pt_solution_reset (gimple_call_use_set (s));
191 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
192 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
193 else
194 pt_solution_reset (gimple_call_clobber_set (s));
197 /* Helper for gimple_build_call, gimple_build_call_valist,
198 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
199 components of a GIMPLE_CALL statement to function FN with NARGS
200 arguments. */
202 static inline gimple
203 gimple_build_call_1 (tree fn, unsigned nargs)
205 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
206 if (TREE_CODE (fn) == FUNCTION_DECL)
207 fn = build_fold_addr_expr (fn);
208 gimple_set_op (s, 1, fn);
209 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
210 gimple_call_reset_alias_info (s);
211 return s;
215 /* Build a GIMPLE_CALL statement to function FN with the arguments
216 specified in vector ARGS. */
218 gimple
219 gimple_build_call_vec (tree fn, vec<tree> args)
221 unsigned i;
222 unsigned nargs = args.length ();
223 gimple call = gimple_build_call_1 (fn, nargs);
225 for (i = 0; i < nargs; i++)
226 gimple_call_set_arg (call, i, args[i]);
228 return call;
232 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
233 arguments. The ... are the arguments. */
235 gimple
236 gimple_build_call (tree fn, unsigned nargs, ...)
238 va_list ap;
239 gimple call;
240 unsigned i;
242 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
244 call = gimple_build_call_1 (fn, nargs);
246 va_start (ap, nargs);
247 for (i = 0; i < nargs; i++)
248 gimple_call_set_arg (call, i, va_arg (ap, tree));
249 va_end (ap);
251 return call;
255 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
256 arguments. AP contains the arguments. */
258 gimple
259 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
261 gimple call;
262 unsigned i;
264 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
266 call = gimple_build_call_1 (fn, nargs);
268 for (i = 0; i < nargs; i++)
269 gimple_call_set_arg (call, i, va_arg (ap, tree));
271 return call;
275 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
276 Build the basic components of a GIMPLE_CALL statement to internal
277 function FN with NARGS arguments. */
279 static inline gimple
280 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
282 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
283 s->gsbase.subcode |= GF_CALL_INTERNAL;
284 gimple_call_set_internal_fn (s, fn);
285 gimple_call_reset_alias_info (s);
286 return s;
290 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
291 the number of arguments. The ... are the arguments. */
293 gimple
294 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
296 va_list ap;
297 gimple call;
298 unsigned i;
300 call = gimple_build_call_internal_1 (fn, nargs);
301 va_start (ap, nargs);
302 for (i = 0; i < nargs; i++)
303 gimple_call_set_arg (call, i, va_arg (ap, tree));
304 va_end (ap);
306 return call;
310 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
311 specified in vector ARGS. */
313 gimple
314 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
316 unsigned i, nargs;
317 gimple call;
319 nargs = args.length ();
320 call = gimple_build_call_internal_1 (fn, nargs);
321 for (i = 0; i < nargs; i++)
322 gimple_call_set_arg (call, i, args[i]);
324 return call;
328 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
329 assumed to be in GIMPLE form already. Minimal checking is done of
330 this fact. */
332 gimple
333 gimple_build_call_from_tree (tree t)
335 unsigned i, nargs;
336 gimple call;
337 tree fndecl = get_callee_fndecl (t);
339 gcc_assert (TREE_CODE (t) == CALL_EXPR);
341 nargs = call_expr_nargs (t);
342 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
344 for (i = 0; i < nargs; i++)
345 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
347 gimple_set_block (call, TREE_BLOCK (t));
349 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
350 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
351 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
352 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
353 if (fndecl
354 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
355 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
356 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
357 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
358 else
359 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
360 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
361 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
362 gimple_set_no_warning (call, TREE_NO_WARNING (t));
364 return call;
368 /* Build a GIMPLE_ASSIGN statement.
370 LHS of the assignment.
371 RHS of the assignment which can be unary or binary. */
373 gimple
374 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
376 enum tree_code subcode;
377 tree op1, op2, op3;
379 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
380 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
381 PASS_MEM_STAT);
385 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
386 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
387 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
389 gimple
390 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
391 tree op2, tree op3 MEM_STAT_DECL)
393 unsigned num_ops;
394 gimple p;
396 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
397 code). */
398 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
400 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
401 PASS_MEM_STAT);
402 gimple_assign_set_lhs (p, lhs);
403 gimple_assign_set_rhs1 (p, op1);
404 if (op2)
406 gcc_assert (num_ops > 2);
407 gimple_assign_set_rhs2 (p, op2);
410 if (op3)
412 gcc_assert (num_ops > 3);
413 gimple_assign_set_rhs3 (p, op3);
416 return p;
419 gimple
420 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
421 tree op2 MEM_STAT_DECL)
423 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
424 PASS_MEM_STAT);
428 /* Build a GIMPLE_COND statement.
430 PRED is the condition used to compare LHS and the RHS.
431 T_LABEL is the label to jump to if the condition is true.
432 F_LABEL is the label to jump to otherwise. */
434 gimple
435 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
436 tree t_label, tree f_label)
438 gimple p;
440 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
441 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
442 gimple_cond_set_lhs (p, lhs);
443 gimple_cond_set_rhs (p, rhs);
444 gimple_cond_set_true_label (p, t_label);
445 gimple_cond_set_false_label (p, f_label);
446 return p;
449 /* Build a GIMPLE_COND statement from the conditional expression tree
450 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
452 gimple
453 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
455 enum tree_code code;
456 tree lhs, rhs;
458 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
459 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
462 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
463 boolean expression tree COND. */
465 void
466 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
468 enum tree_code code;
469 tree lhs, rhs;
471 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
472 gimple_cond_set_condition (stmt, code, lhs, rhs);
475 /* Build a GIMPLE_LABEL statement for LABEL. */
477 gimple
478 gimple_build_label (tree label)
480 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
481 gimple_label_set_label (p, label);
482 return p;
485 /* Build a GIMPLE_GOTO statement to label DEST. */
487 gimple
488 gimple_build_goto (tree dest)
490 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
491 gimple_goto_set_dest (p, dest);
492 return p;
496 /* Build a GIMPLE_NOP statement. */
498 gimple
499 gimple_build_nop (void)
501 return gimple_alloc (GIMPLE_NOP, 0);
505 /* Build a GIMPLE_BIND statement.
506 VARS are the variables in BODY.
507 BLOCK is the containing block. */
509 gimple
510 gimple_build_bind (tree vars, gimple_seq body, tree block)
512 gimple p = gimple_alloc (GIMPLE_BIND, 0);
513 gimple_bind_set_vars (p, vars);
514 if (body)
515 gimple_bind_set_body (p, body);
516 if (block)
517 gimple_bind_set_block (p, block);
518 return p;
521 /* Helper function to set the simple fields of a asm stmt.
523 STRING is a pointer to a string that is the asm blocks assembly code.
524 NINPUT is the number of register inputs.
525 NOUTPUT is the number of register outputs.
526 NCLOBBERS is the number of clobbered registers.
529 static inline gimple
530 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
531 unsigned nclobbers, unsigned nlabels)
533 gimple p;
534 int size = strlen (string);
536 /* ASMs with labels cannot have outputs. This should have been
537 enforced by the front end. */
538 gcc_assert (nlabels == 0 || noutputs == 0);
540 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
541 ninputs + noutputs + nclobbers + nlabels);
543 p->gimple_asm.ni = ninputs;
544 p->gimple_asm.no = noutputs;
545 p->gimple_asm.nc = nclobbers;
546 p->gimple_asm.nl = nlabels;
547 p->gimple_asm.string = ggc_alloc_string (string, size);
549 if (GATHER_STATISTICS)
550 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
552 return p;
555 /* Build a GIMPLE_ASM statement.
557 STRING is the assembly code.
558 NINPUT is the number of register inputs.
559 NOUTPUT is the number of register outputs.
560 NCLOBBERS is the number of clobbered registers.
561 INPUTS is a vector of the input register parameters.
562 OUTPUTS is a vector of the output register parameters.
563 CLOBBERS is a vector of the clobbered register parameters.
564 LABELS is a vector of destination labels. */
566 gimple
567 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
568 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
569 vec<tree, va_gc> *labels)
571 gimple p;
572 unsigned i;
574 p = gimple_build_asm_1 (string,
575 vec_safe_length (inputs),
576 vec_safe_length (outputs),
577 vec_safe_length (clobbers),
578 vec_safe_length (labels));
580 for (i = 0; i < vec_safe_length (inputs); i++)
581 gimple_asm_set_input_op (p, i, (*inputs)[i]);
583 for (i = 0; i < vec_safe_length (outputs); i++)
584 gimple_asm_set_output_op (p, i, (*outputs)[i]);
586 for (i = 0; i < vec_safe_length (clobbers); i++)
587 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
589 for (i = 0; i < vec_safe_length (labels); i++)
590 gimple_asm_set_label_op (p, i, (*labels)[i]);
592 return p;
595 /* Build a GIMPLE_CATCH statement.
597 TYPES are the catch types.
598 HANDLER is the exception handler. */
600 gimple
601 gimple_build_catch (tree types, gimple_seq handler)
603 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
604 gimple_catch_set_types (p, types);
605 if (handler)
606 gimple_catch_set_handler (p, handler);
608 return p;
611 /* Build a GIMPLE_EH_FILTER statement.
613 TYPES are the filter's types.
614 FAILURE is the filter's failure action. */
616 gimple
617 gimple_build_eh_filter (tree types, gimple_seq failure)
619 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
620 gimple_eh_filter_set_types (p, types);
621 if (failure)
622 gimple_eh_filter_set_failure (p, failure);
624 return p;
627 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
629 gimple
630 gimple_build_eh_must_not_throw (tree decl)
632 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
634 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
635 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
636 gimple_eh_must_not_throw_set_fndecl (p, decl);
638 return p;
641 /* Build a GIMPLE_EH_ELSE statement. */
643 gimple
644 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
646 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
647 gimple_eh_else_set_n_body (p, n_body);
648 gimple_eh_else_set_e_body (p, e_body);
649 return p;
652 /* Build a GIMPLE_TRY statement.
654 EVAL is the expression to evaluate.
655 CLEANUP is the cleanup expression.
656 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
657 whether this is a try/catch or a try/finally respectively. */
659 gimple
660 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
661 enum gimple_try_flags kind)
663 gimple p;
665 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
666 p = gimple_alloc (GIMPLE_TRY, 0);
667 gimple_set_subcode (p, kind);
668 if (eval)
669 gimple_try_set_eval (p, eval);
670 if (cleanup)
671 gimple_try_set_cleanup (p, cleanup);
673 return p;
676 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
678 CLEANUP is the cleanup expression. */
680 gimple
681 gimple_build_wce (gimple_seq cleanup)
683 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
684 if (cleanup)
685 gimple_wce_set_cleanup (p, cleanup);
687 return p;
691 /* Build a GIMPLE_RESX statement. */
693 gimple
694 gimple_build_resx (int region)
696 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
697 p->gimple_eh_ctrl.region = region;
698 return p;
702 /* The helper for constructing a gimple switch statement.
703 INDEX is the switch's index.
704 NLABELS is the number of labels in the switch excluding the default.
705 DEFAULT_LABEL is the default label for the switch statement. */
707 gimple
708 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
710 /* nlabels + 1 default label + 1 index. */
711 gcc_checking_assert (default_label);
712 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
713 1 + 1 + nlabels);
714 gimple_switch_set_index (p, index);
715 gimple_switch_set_default_label (p, default_label);
716 return p;
719 /* Build a GIMPLE_SWITCH statement.
721 INDEX is the switch's index.
722 DEFAULT_LABEL is the default label
723 ARGS is a vector of labels excluding the default. */
725 gimple
726 gimple_build_switch (tree index, tree default_label, vec<tree> args)
728 unsigned i, nlabels = args.length ();
730 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
732 /* Copy the labels from the vector to the switch statement. */
733 for (i = 0; i < nlabels; i++)
734 gimple_switch_set_label (p, i + 1, args[i]);
736 return p;
739 /* Build a GIMPLE_EH_DISPATCH statement. */
741 gimple
742 gimple_build_eh_dispatch (int region)
744 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
745 p->gimple_eh_ctrl.region = region;
746 return p;
749 /* Build a new GIMPLE_DEBUG_BIND statement.
751 VAR is bound to VALUE; block and location are taken from STMT. */
753 gimple
754 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
756 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
757 (unsigned)GIMPLE_DEBUG_BIND, 2
758 PASS_MEM_STAT);
760 gimple_debug_bind_set_var (p, var);
761 gimple_debug_bind_set_value (p, value);
762 if (stmt)
763 gimple_set_location (p, gimple_location (stmt));
765 return p;
769 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
771 VAR is bound to VALUE; block and location are taken from STMT. */
773 gimple
774 gimple_build_debug_source_bind_stat (tree var, tree value,
775 gimple stmt MEM_STAT_DECL)
777 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
778 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
779 PASS_MEM_STAT);
781 gimple_debug_source_bind_set_var (p, var);
782 gimple_debug_source_bind_set_value (p, value);
783 if (stmt)
784 gimple_set_location (p, gimple_location (stmt));
786 return p;
790 /* Build a GIMPLE_OMP_CRITICAL statement.
792 BODY is the sequence of statements for which only one thread can execute.
793 NAME is optional identifier for this critical block. */
795 gimple
796 gimple_build_omp_critical (gimple_seq body, tree name)
798 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
799 gimple_omp_critical_set_name (p, name);
800 if (body)
801 gimple_omp_set_body (p, body);
803 return p;
806 /* Build a GIMPLE_OMP_FOR statement.
808 BODY is sequence of statements inside the for loop.
809 KIND is the `for' variant.
810 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
811 lastprivate, reductions, ordered, schedule, and nowait.
812 COLLAPSE is the collapse count.
813 PRE_BODY is the sequence of statements that are loop invariant. */
815 gimple
816 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
817 gimple_seq pre_body)
819 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
820 if (body)
821 gimple_omp_set_body (p, body);
822 gimple_omp_for_set_clauses (p, clauses);
823 gimple_omp_for_set_kind (p, kind);
824 p->gimple_omp_for.collapse = collapse;
825 p->gimple_omp_for.iter
826 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
827 if (pre_body)
828 gimple_omp_for_set_pre_body (p, pre_body);
830 return p;
834 /* Build a GIMPLE_OMP_PARALLEL statement.
836 BODY is sequence of statements which are executed in parallel.
837 CLAUSES, are the OMP parallel construct's clauses.
838 CHILD_FN is the function created for the parallel threads to execute.
839 DATA_ARG are the shared data argument(s). */
841 gimple
842 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
843 tree data_arg)
845 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
846 if (body)
847 gimple_omp_set_body (p, body);
848 gimple_omp_parallel_set_clauses (p, clauses);
849 gimple_omp_parallel_set_child_fn (p, child_fn);
850 gimple_omp_parallel_set_data_arg (p, data_arg);
852 return p;
856 /* Build a GIMPLE_OMP_TASK statement.
858 BODY is sequence of statements which are executed by the explicit task.
859 CLAUSES, are the OMP parallel construct's clauses.
860 CHILD_FN is the function created for the parallel threads to execute.
861 DATA_ARG are the shared data argument(s).
862 COPY_FN is the optional function for firstprivate initialization.
863 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
865 gimple
866 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
867 tree data_arg, tree copy_fn, tree arg_size,
868 tree arg_align)
870 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
871 if (body)
872 gimple_omp_set_body (p, body);
873 gimple_omp_task_set_clauses (p, clauses);
874 gimple_omp_task_set_child_fn (p, child_fn);
875 gimple_omp_task_set_data_arg (p, data_arg);
876 gimple_omp_task_set_copy_fn (p, copy_fn);
877 gimple_omp_task_set_arg_size (p, arg_size);
878 gimple_omp_task_set_arg_align (p, arg_align);
880 return p;
884 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
886 BODY is the sequence of statements in the section. */
888 gimple
889 gimple_build_omp_section (gimple_seq body)
891 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
892 if (body)
893 gimple_omp_set_body (p, body);
895 return p;
899 /* Build a GIMPLE_OMP_MASTER statement.
901 BODY is the sequence of statements to be executed by just the master. */
903 gimple
904 gimple_build_omp_master (gimple_seq body)
906 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
907 if (body)
908 gimple_omp_set_body (p, body);
910 return p;
914 /* Build a GIMPLE_OMP_TASKGROUP statement.
916 BODY is the sequence of statements to be executed by the taskgroup
917 construct. */
919 gimple
920 gimple_build_omp_taskgroup (gimple_seq body)
922 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
923 if (body)
924 gimple_omp_set_body (p, body);
926 return p;
930 /* Build a GIMPLE_OMP_CONTINUE statement.
932 CONTROL_DEF is the definition of the control variable.
933 CONTROL_USE is the use of the control variable. */
935 gimple
936 gimple_build_omp_continue (tree control_def, tree control_use)
938 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
939 gimple_omp_continue_set_control_def (p, control_def);
940 gimple_omp_continue_set_control_use (p, control_use);
941 return p;
944 /* Build a GIMPLE_OMP_ORDERED statement.
946 BODY is the sequence of statements inside a loop that will executed in
947 sequence. */
949 gimple
950 gimple_build_omp_ordered (gimple_seq body)
952 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
953 if (body)
954 gimple_omp_set_body (p, body);
956 return p;
960 /* Build a GIMPLE_OMP_RETURN statement.
961 WAIT_P is true if this is a non-waiting return. */
963 gimple
964 gimple_build_omp_return (bool wait_p)
966 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
967 if (wait_p)
968 gimple_omp_return_set_nowait (p);
970 return p;
974 /* Build a GIMPLE_OMP_SECTIONS statement.
976 BODY is a sequence of section statements.
977 CLAUSES are any of the OMP sections contsruct's clauses: private,
978 firstprivate, lastprivate, reduction, and nowait. */
980 gimple
981 gimple_build_omp_sections (gimple_seq body, tree clauses)
983 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
984 if (body)
985 gimple_omp_set_body (p, body);
986 gimple_omp_sections_set_clauses (p, clauses);
988 return p;
992 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
994 gimple
995 gimple_build_omp_sections_switch (void)
997 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1001 /* Build a GIMPLE_OMP_SINGLE statement.
1003 BODY is the sequence of statements that will be executed once.
1004 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1005 copyprivate, nowait. */
1007 gimple
1008 gimple_build_omp_single (gimple_seq body, tree clauses)
1010 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1011 if (body)
1012 gimple_omp_set_body (p, body);
1013 gimple_omp_single_set_clauses (p, clauses);
1015 return p;
1019 /* Build a GIMPLE_OMP_TARGET statement.
1021 BODY is the sequence of statements that will be executed.
1022 CLAUSES are any of the OMP target construct's clauses. */
1024 gimple
1025 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1027 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1028 if (body)
1029 gimple_omp_set_body (p, body);
1030 gimple_omp_target_set_clauses (p, clauses);
1031 gimple_omp_target_set_kind (p, kind);
1033 return p;
1037 /* Build a GIMPLE_OMP_TEAMS statement.
1039 BODY is the sequence of statements that will be executed.
1040 CLAUSES are any of the OMP teams construct's clauses. */
1042 gimple
1043 gimple_build_omp_teams (gimple_seq body, tree clauses)
1045 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1046 if (body)
1047 gimple_omp_set_body (p, body);
1048 gimple_omp_teams_set_clauses (p, clauses);
1050 return p;
1054 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1056 gimple
1057 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1059 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1060 gimple_omp_atomic_load_set_lhs (p, lhs);
1061 gimple_omp_atomic_load_set_rhs (p, rhs);
1062 return p;
1065 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1067 VAL is the value we are storing. */
1069 gimple
1070 gimple_build_omp_atomic_store (tree val)
1072 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1073 gimple_omp_atomic_store_set_val (p, val);
1074 return p;
1077 /* Build a GIMPLE_TRANSACTION statement. */
1079 gimple
1080 gimple_build_transaction (gimple_seq body, tree label)
1082 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1083 gimple_transaction_set_body (p, body);
1084 gimple_transaction_set_label (p, label);
1085 return p;
1088 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1089 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1091 gimple
1092 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1094 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1095 /* Ensure all the predictors fit into the lower bits of the subcode. */
1096 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1097 gimple_predict_set_predictor (p, predictor);
1098 gimple_predict_set_outcome (p, outcome);
1099 return p;
1102 #if defined ENABLE_GIMPLE_CHECKING
1103 /* Complain of a gimple type mismatch and die. */
1105 void
1106 gimple_check_failed (const_gimple gs, const char *file, int line,
1107 const char *function, enum gimple_code code,
1108 enum tree_code subcode)
1110 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1111 gimple_code_name[code],
1112 get_tree_code_name (subcode),
1113 gimple_code_name[gimple_code (gs)],
1114 gs->gsbase.subcode > 0
1115 ? get_tree_code_name ((enum tree_code) gs->gsbase.subcode)
1116 : "",
1117 function, trim_filename (file), line);
1119 #endif /* ENABLE_GIMPLE_CHECKING */
1122 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1123 *SEQ_P is NULL, a new sequence is allocated. */
1125 void
1126 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1128 gimple_stmt_iterator si;
1129 if (gs == NULL)
1130 return;
1132 si = gsi_last (*seq_p);
1133 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1136 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1137 *SEQ_P is NULL, a new sequence is allocated. This function is
1138 similar to gimple_seq_add_stmt, but does not scan the operands.
1139 During gimplification, we need to manipulate statement sequences
1140 before the def/use vectors have been constructed. */
1142 void
1143 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1145 gimple_stmt_iterator si;
1147 if (gs == NULL)
1148 return;
1150 si = gsi_last (*seq_p);
1151 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1154 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1155 NULL, a new sequence is allocated. */
1157 void
1158 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1160 gimple_stmt_iterator si;
1161 if (src == NULL)
1162 return;
1164 si = gsi_last (*dst_p);
1165 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1168 /* Determine whether to assign a location to the statement GS. */
1170 static bool
1171 should_carry_location_p (gimple gs)
1173 /* Don't emit a line note for a label. We particularly don't want to
1174 emit one for the break label, since it doesn't actually correspond
1175 to the beginning of the loop/switch. */
1176 if (gimple_code (gs) == GIMPLE_LABEL)
1177 return false;
1179 return true;
1182 /* Set the location for gimple statement GS to LOCATION. */
1184 static void
1185 annotate_one_with_location (gimple gs, location_t location)
1187 if (!gimple_has_location (gs)
1188 && !gimple_do_not_emit_location_p (gs)
1189 && should_carry_location_p (gs))
1190 gimple_set_location (gs, location);
1193 /* Set LOCATION for all the statements after iterator GSI in sequence
1194 SEQ. If GSI is pointing to the end of the sequence, start with the
1195 first statement in SEQ. */
1197 void
1198 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1199 location_t location)
1201 if (gsi_end_p (gsi))
1202 gsi = gsi_start (seq);
1203 else
1204 gsi_next (&gsi);
1206 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1207 annotate_one_with_location (gsi_stmt (gsi), location);
1210 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1212 void
1213 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1215 gimple_stmt_iterator i;
1217 if (gimple_seq_empty_p (stmt_p))
1218 return;
1220 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1222 gimple gs = gsi_stmt (i);
1223 annotate_one_with_location (gs, location);
1227 /* Helper function of empty_body_p. Return true if STMT is an empty
1228 statement. */
1230 static bool
1231 empty_stmt_p (gimple stmt)
1233 if (gimple_code (stmt) == GIMPLE_NOP)
1234 return true;
1235 if (gimple_code (stmt) == GIMPLE_BIND)
1236 return empty_body_p (gimple_bind_body (stmt));
1237 return false;
1241 /* Return true if BODY contains nothing but empty statements. */
1243 bool
1244 empty_body_p (gimple_seq body)
1246 gimple_stmt_iterator i;
1248 if (gimple_seq_empty_p (body))
1249 return true;
1250 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1251 if (!empty_stmt_p (gsi_stmt (i))
1252 && !is_gimple_debug (gsi_stmt (i)))
1253 return false;
1255 return true;
1259 /* Perform a deep copy of sequence SRC and return the result. */
1261 gimple_seq
1262 gimple_seq_copy (gimple_seq src)
1264 gimple_stmt_iterator gsi;
1265 gimple_seq new_seq = NULL;
1266 gimple stmt;
1268 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1270 stmt = gimple_copy (gsi_stmt (gsi));
1271 gimple_seq_add_stmt (&new_seq, stmt);
1274 return new_seq;
1279 /* Return true if calls C1 and C2 are known to go to the same function. */
1281 bool
1282 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1284 if (gimple_call_internal_p (c1))
1285 return (gimple_call_internal_p (c2)
1286 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1287 else
1288 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1289 || (gimple_call_fndecl (c1)
1290 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1293 /* Detect flags from a GIMPLE_CALL. This is just like
1294 call_expr_flags, but for gimple tuples. */
1297 gimple_call_flags (const_gimple stmt)
1299 int flags;
1300 tree decl = gimple_call_fndecl (stmt);
1302 if (decl)
1303 flags = flags_from_decl_or_type (decl);
1304 else if (gimple_call_internal_p (stmt))
1305 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1306 else
1307 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1309 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1310 flags |= ECF_NOTHROW;
1312 return flags;
1315 /* Return the "fn spec" string for call STMT. */
1317 static tree
1318 gimple_call_fnspec (const_gimple stmt)
1320 tree type, attr;
1322 type = gimple_call_fntype (stmt);
1323 if (!type)
1324 return NULL_TREE;
1326 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1327 if (!attr)
1328 return NULL_TREE;
1330 return TREE_VALUE (TREE_VALUE (attr));
1333 /* Detects argument flags for argument number ARG on call STMT. */
1336 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1338 tree attr = gimple_call_fnspec (stmt);
1340 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1341 return 0;
1343 switch (TREE_STRING_POINTER (attr)[1 + arg])
1345 case 'x':
1346 case 'X':
1347 return EAF_UNUSED;
1349 case 'R':
1350 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1352 case 'r':
1353 return EAF_NOCLOBBER | EAF_NOESCAPE;
1355 case 'W':
1356 return EAF_DIRECT | EAF_NOESCAPE;
1358 case 'w':
1359 return EAF_NOESCAPE;
1361 case '.':
1362 default:
1363 return 0;
1367 /* Detects return flags for the call STMT. */
1370 gimple_call_return_flags (const_gimple stmt)
1372 tree attr;
1374 if (gimple_call_flags (stmt) & ECF_MALLOC)
1375 return ERF_NOALIAS;
1377 attr = gimple_call_fnspec (stmt);
1378 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1379 return 0;
1381 switch (TREE_STRING_POINTER (attr)[0])
1383 case '1':
1384 case '2':
1385 case '3':
1386 case '4':
1387 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1389 case 'm':
1390 return ERF_NOALIAS;
1392 case '.':
1393 default:
1394 return 0;
1399 /* Return true if GS is a copy assignment. */
1401 bool
1402 gimple_assign_copy_p (gimple gs)
1404 return (gimple_assign_single_p (gs)
1405 && is_gimple_val (gimple_op (gs, 1)));
1409 /* Return true if GS is a SSA_NAME copy assignment. */
1411 bool
1412 gimple_assign_ssa_name_copy_p (gimple gs)
1414 return (gimple_assign_single_p (gs)
1415 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1416 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1420 /* Return true if GS is an assignment with a unary RHS, but the
1421 operator has no effect on the assigned value. The logic is adapted
1422 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1423 instances in which STRIP_NOPS was previously applied to the RHS of
1424 an assignment.
1426 NOTE: In the use cases that led to the creation of this function
1427 and of gimple_assign_single_p, it is typical to test for either
1428 condition and to proceed in the same manner. In each case, the
1429 assigned value is represented by the single RHS operand of the
1430 assignment. I suspect there may be cases where gimple_assign_copy_p,
1431 gimple_assign_single_p, or equivalent logic is used where a similar
1432 treatment of unary NOPs is appropriate. */
1434 bool
1435 gimple_assign_unary_nop_p (gimple gs)
1437 return (is_gimple_assign (gs)
1438 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1439 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1440 && gimple_assign_rhs1 (gs) != error_mark_node
1441 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1442 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1445 /* Set BB to be the basic block holding G. */
1447 void
1448 gimple_set_bb (gimple stmt, basic_block bb)
1450 stmt->gsbase.bb = bb;
1452 /* If the statement is a label, add the label to block-to-labels map
1453 so that we can speed up edge creation for GIMPLE_GOTOs. */
1454 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1456 tree t;
1457 int uid;
1459 t = gimple_label_label (stmt);
1460 uid = LABEL_DECL_UID (t);
1461 if (uid == -1)
1463 unsigned old_len = vec_safe_length (label_to_block_map);
1464 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1465 if (old_len <= (unsigned) uid)
1467 unsigned new_len = 3 * uid / 2 + 1;
1469 vec_safe_grow_cleared (label_to_block_map, new_len);
1473 (*label_to_block_map)[uid] = bb;
1478 /* Modify the RHS of the assignment pointed-to by GSI using the
1479 operands in the expression tree EXPR.
1481 NOTE: The statement pointed-to by GSI may be reallocated if it
1482 did not have enough operand slots.
1484 This function is useful to convert an existing tree expression into
1485 the flat representation used for the RHS of a GIMPLE assignment.
1486 It will reallocate memory as needed to expand or shrink the number
1487 of operand slots needed to represent EXPR.
1489 NOTE: If you find yourself building a tree and then calling this
1490 function, you are most certainly doing it the slow way. It is much
1491 better to build a new assignment or to use the function
1492 gimple_assign_set_rhs_with_ops, which does not require an
1493 expression tree to be built. */
1495 void
1496 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1498 enum tree_code subcode;
1499 tree op1, op2, op3;
1501 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1502 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1506 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1507 operands OP1, OP2 and OP3.
1509 NOTE: The statement pointed-to by GSI may be reallocated if it
1510 did not have enough operand slots. */
1512 void
1513 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1514 tree op1, tree op2, tree op3)
1516 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1517 gimple stmt = gsi_stmt (*gsi);
1519 /* If the new CODE needs more operands, allocate a new statement. */
1520 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1522 tree lhs = gimple_assign_lhs (stmt);
1523 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1524 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1525 gimple_init_singleton (new_stmt);
1526 gsi_replace (gsi, new_stmt, true);
1527 stmt = new_stmt;
1529 /* The LHS needs to be reset as this also changes the SSA name
1530 on the LHS. */
1531 gimple_assign_set_lhs (stmt, lhs);
1534 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1535 gimple_set_subcode (stmt, code);
1536 gimple_assign_set_rhs1 (stmt, op1);
1537 if (new_rhs_ops > 1)
1538 gimple_assign_set_rhs2 (stmt, op2);
1539 if (new_rhs_ops > 2)
1540 gimple_assign_set_rhs3 (stmt, op3);
1544 /* Return the LHS of a statement that performs an assignment,
1545 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1546 for a call to a function that returns no value, or for a
1547 statement other than an assignment or a call. */
1549 tree
1550 gimple_get_lhs (const_gimple stmt)
1552 enum gimple_code code = gimple_code (stmt);
1554 if (code == GIMPLE_ASSIGN)
1555 return gimple_assign_lhs (stmt);
1556 else if (code == GIMPLE_CALL)
1557 return gimple_call_lhs (stmt);
1558 else
1559 return NULL_TREE;
1563 /* Set the LHS of a statement that performs an assignment,
1564 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1566 void
1567 gimple_set_lhs (gimple stmt, tree lhs)
1569 enum gimple_code code = gimple_code (stmt);
1571 if (code == GIMPLE_ASSIGN)
1572 gimple_assign_set_lhs (stmt, lhs);
1573 else if (code == GIMPLE_CALL)
1574 gimple_call_set_lhs (stmt, lhs);
1575 else
1576 gcc_unreachable ();
1580 /* Return a deep copy of statement STMT. All the operands from STMT
1581 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1582 and VUSE operand arrays are set to empty in the new copy. The new
1583 copy isn't part of any sequence. */
1585 gimple
1586 gimple_copy (gimple stmt)
1588 enum gimple_code code = gimple_code (stmt);
1589 unsigned num_ops = gimple_num_ops (stmt);
1590 gimple copy = gimple_alloc (code, num_ops);
1591 unsigned i;
1593 /* Shallow copy all the fields from STMT. */
1594 memcpy (copy, stmt, gimple_size (code));
1595 gimple_init_singleton (copy);
1597 /* If STMT has sub-statements, deep-copy them as well. */
1598 if (gimple_has_substatements (stmt))
1600 gimple_seq new_seq;
1601 tree t;
1603 switch (gimple_code (stmt))
1605 case GIMPLE_BIND:
1606 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1607 gimple_bind_set_body (copy, new_seq);
1608 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
1609 gimple_bind_set_block (copy, gimple_bind_block (stmt));
1610 break;
1612 case GIMPLE_CATCH:
1613 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
1614 gimple_catch_set_handler (copy, new_seq);
1615 t = unshare_expr (gimple_catch_types (stmt));
1616 gimple_catch_set_types (copy, t);
1617 break;
1619 case GIMPLE_EH_FILTER:
1620 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
1621 gimple_eh_filter_set_failure (copy, new_seq);
1622 t = unshare_expr (gimple_eh_filter_types (stmt));
1623 gimple_eh_filter_set_types (copy, t);
1624 break;
1626 case GIMPLE_EH_ELSE:
1627 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
1628 gimple_eh_else_set_n_body (copy, new_seq);
1629 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
1630 gimple_eh_else_set_e_body (copy, new_seq);
1631 break;
1633 case GIMPLE_TRY:
1634 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
1635 gimple_try_set_eval (copy, new_seq);
1636 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
1637 gimple_try_set_cleanup (copy, new_seq);
1638 break;
1640 case GIMPLE_OMP_FOR:
1641 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1642 gimple_omp_for_set_pre_body (copy, new_seq);
1643 t = unshare_expr (gimple_omp_for_clauses (stmt));
1644 gimple_omp_for_set_clauses (copy, t);
1645 copy->gimple_omp_for.iter
1646 = ggc_alloc_vec_gimple_omp_for_iter
1647 (gimple_omp_for_collapse (stmt));
1648 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1650 gimple_omp_for_set_cond (copy, i,
1651 gimple_omp_for_cond (stmt, i));
1652 gimple_omp_for_set_index (copy, i,
1653 gimple_omp_for_index (stmt, i));
1654 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1655 gimple_omp_for_set_initial (copy, i, t);
1656 t = unshare_expr (gimple_omp_for_final (stmt, i));
1657 gimple_omp_for_set_final (copy, i, t);
1658 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1659 gimple_omp_for_set_incr (copy, i, t);
1661 goto copy_omp_body;
1663 case GIMPLE_OMP_PARALLEL:
1664 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
1665 gimple_omp_parallel_set_clauses (copy, t);
1666 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
1667 gimple_omp_parallel_set_child_fn (copy, t);
1668 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
1669 gimple_omp_parallel_set_data_arg (copy, t);
1670 goto copy_omp_body;
1672 case GIMPLE_OMP_TASK:
1673 t = unshare_expr (gimple_omp_task_clauses (stmt));
1674 gimple_omp_task_set_clauses (copy, t);
1675 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1676 gimple_omp_task_set_child_fn (copy, t);
1677 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1678 gimple_omp_task_set_data_arg (copy, t);
1679 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1680 gimple_omp_task_set_copy_fn (copy, t);
1681 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1682 gimple_omp_task_set_arg_size (copy, t);
1683 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1684 gimple_omp_task_set_arg_align (copy, t);
1685 goto copy_omp_body;
1687 case GIMPLE_OMP_CRITICAL:
1688 t = unshare_expr (gimple_omp_critical_name (stmt));
1689 gimple_omp_critical_set_name (copy, t);
1690 goto copy_omp_body;
1692 case GIMPLE_OMP_SECTIONS:
1693 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1694 gimple_omp_sections_set_clauses (copy, t);
1695 t = unshare_expr (gimple_omp_sections_control (stmt));
1696 gimple_omp_sections_set_control (copy, t);
1697 /* FALLTHRU */
1699 case GIMPLE_OMP_SINGLE:
1700 case GIMPLE_OMP_TARGET:
1701 case GIMPLE_OMP_TEAMS:
1702 case GIMPLE_OMP_SECTION:
1703 case GIMPLE_OMP_MASTER:
1704 case GIMPLE_OMP_TASKGROUP:
1705 case GIMPLE_OMP_ORDERED:
1706 copy_omp_body:
1707 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1708 gimple_omp_set_body (copy, new_seq);
1709 break;
1711 case GIMPLE_TRANSACTION:
1712 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
1713 gimple_transaction_set_body (copy, new_seq);
1714 break;
1716 case GIMPLE_WITH_CLEANUP_EXPR:
1717 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1718 gimple_wce_set_cleanup (copy, new_seq);
1719 break;
1721 default:
1722 gcc_unreachable ();
1726 /* Make copy of operands. */
1727 for (i = 0; i < num_ops; i++)
1728 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1730 if (gimple_has_mem_ops (stmt))
1732 gimple_set_vdef (copy, gimple_vdef (stmt));
1733 gimple_set_vuse (copy, gimple_vuse (stmt));
1736 /* Clear out SSA operand vectors on COPY. */
1737 if (gimple_has_ops (stmt))
1739 gimple_set_use_ops (copy, NULL);
1741 /* SSA operands need to be updated. */
1742 gimple_set_modified (copy, true);
1745 return copy;
1749 /* Return true if statement S has side-effects. We consider a
1750 statement to have side effects if:
1752 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1753 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1755 bool
1756 gimple_has_side_effects (const_gimple s)
1758 if (is_gimple_debug (s))
1759 return false;
1761 /* We don't have to scan the arguments to check for
1762 volatile arguments, though, at present, we still
1763 do a scan to check for TREE_SIDE_EFFECTS. */
1764 if (gimple_has_volatile_ops (s))
1765 return true;
1767 if (gimple_code (s) == GIMPLE_ASM
1768 && gimple_asm_volatile_p (s))
1769 return true;
1771 if (is_gimple_call (s))
1773 int flags = gimple_call_flags (s);
1775 /* An infinite loop is considered a side effect. */
1776 if (!(flags & (ECF_CONST | ECF_PURE))
1777 || (flags & ECF_LOOPING_CONST_OR_PURE))
1778 return true;
1780 return false;
1783 return false;
1786 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1787 Return true if S can trap. When INCLUDE_MEM is true, check whether
1788 the memory operations could trap. When INCLUDE_STORES is true and
1789 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1791 bool
1792 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1794 tree t, div = NULL_TREE;
1795 enum tree_code op;
1797 if (include_mem)
1799 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1801 for (i = start; i < gimple_num_ops (s); i++)
1802 if (tree_could_trap_p (gimple_op (s, i)))
1803 return true;
1806 switch (gimple_code (s))
1808 case GIMPLE_ASM:
1809 return gimple_asm_volatile_p (s);
1811 case GIMPLE_CALL:
1812 t = gimple_call_fndecl (s);
1813 /* Assume that calls to weak functions may trap. */
1814 if (!t || !DECL_P (t) || DECL_WEAK (t))
1815 return true;
1816 return false;
1818 case GIMPLE_ASSIGN:
1819 t = gimple_expr_type (s);
1820 op = gimple_assign_rhs_code (s);
1821 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1822 div = gimple_assign_rhs2 (s);
1823 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1824 (INTEGRAL_TYPE_P (t)
1825 && TYPE_OVERFLOW_TRAPS (t)),
1826 div));
1828 default:
1829 break;
1832 return false;
1835 /* Return true if statement S can trap. */
1837 bool
1838 gimple_could_trap_p (gimple s)
1840 return gimple_could_trap_p_1 (s, true, true);
1843 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1845 bool
1846 gimple_assign_rhs_could_trap_p (gimple s)
1848 gcc_assert (is_gimple_assign (s));
1849 return gimple_could_trap_p_1 (s, true, false);
1853 /* Print debugging information for gimple stmts generated. */
1855 void
1856 dump_gimple_statistics (void)
1858 int i, total_tuples = 0, total_bytes = 0;
1860 if (! GATHER_STATISTICS)
1862 fprintf (stderr, "No gimple statistics\n");
1863 return;
1866 fprintf (stderr, "\nGIMPLE statements\n");
1867 fprintf (stderr, "Kind Stmts Bytes\n");
1868 fprintf (stderr, "---------------------------------------\n");
1869 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1871 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1872 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1873 total_tuples += gimple_alloc_counts[i];
1874 total_bytes += gimple_alloc_sizes[i];
1876 fprintf (stderr, "---------------------------------------\n");
1877 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
1878 fprintf (stderr, "---------------------------------------\n");
1882 /* Return the number of operands needed on the RHS of a GIMPLE
1883 assignment for an expression with tree code CODE. */
1885 unsigned
1886 get_gimple_rhs_num_ops (enum tree_code code)
1888 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
1890 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
1891 return 1;
1892 else if (rhs_class == GIMPLE_BINARY_RHS)
1893 return 2;
1894 else if (rhs_class == GIMPLE_TERNARY_RHS)
1895 return 3;
1896 else
1897 gcc_unreachable ();
1900 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1901 (unsigned char) \
1902 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1903 : ((TYPE) == tcc_binary \
1904 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1905 : ((TYPE) == tcc_constant \
1906 || (TYPE) == tcc_declaration \
1907 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1908 : ((SYM) == TRUTH_AND_EXPR \
1909 || (SYM) == TRUTH_OR_EXPR \
1910 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1911 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1912 : ((SYM) == COND_EXPR \
1913 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1914 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1915 || (SYM) == DOT_PROD_EXPR \
1916 || (SYM) == REALIGN_LOAD_EXPR \
1917 || (SYM) == VEC_COND_EXPR \
1918 || (SYM) == VEC_PERM_EXPR \
1919 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1920 : ((SYM) == CONSTRUCTOR \
1921 || (SYM) == OBJ_TYPE_REF \
1922 || (SYM) == ASSERT_EXPR \
1923 || (SYM) == ADDR_EXPR \
1924 || (SYM) == WITH_SIZE_EXPR \
1925 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1926 : GIMPLE_INVALID_RHS),
1927 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1929 const unsigned char gimple_rhs_class_table[] = {
1930 #include "all-tree.def"
1933 #undef DEFTREECODE
1934 #undef END_OF_BASE_TREE_CODES
1936 void
1937 recalculate_side_effects (tree t)
1939 enum tree_code code = TREE_CODE (t);
1940 int len = TREE_OPERAND_LENGTH (t);
1941 int i;
1943 switch (TREE_CODE_CLASS (code))
1945 case tcc_expression:
1946 switch (code)
1948 case INIT_EXPR:
1949 case MODIFY_EXPR:
1950 case VA_ARG_EXPR:
1951 case PREDECREMENT_EXPR:
1952 case PREINCREMENT_EXPR:
1953 case POSTDECREMENT_EXPR:
1954 case POSTINCREMENT_EXPR:
1955 /* All of these have side-effects, no matter what their
1956 operands are. */
1957 return;
1959 default:
1960 break;
1962 /* Fall through. */
1964 case tcc_comparison: /* a comparison expression */
1965 case tcc_unary: /* a unary arithmetic expression */
1966 case tcc_binary: /* a binary arithmetic expression */
1967 case tcc_reference: /* a reference */
1968 case tcc_vl_exp: /* a function call */
1969 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1970 for (i = 0; i < len; ++i)
1972 tree op = TREE_OPERAND (t, i);
1973 if (op && TREE_SIDE_EFFECTS (op))
1974 TREE_SIDE_EFFECTS (t) = 1;
1976 break;
1978 case tcc_constant:
1979 /* No side-effects. */
1980 return;
1982 default:
1983 gcc_unreachable ();
1987 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
1988 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
1989 we failed to create one. */
1991 tree
1992 canonicalize_cond_expr_cond (tree t)
1994 /* Strip conversions around boolean operations. */
1995 if (CONVERT_EXPR_P (t)
1996 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
1997 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
1998 == BOOLEAN_TYPE))
1999 t = TREE_OPERAND (t, 0);
2001 /* For !x use x == 0. */
2002 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2004 tree top0 = TREE_OPERAND (t, 0);
2005 t = build2 (EQ_EXPR, TREE_TYPE (t),
2006 top0, build_int_cst (TREE_TYPE (top0), 0));
2008 /* For cmp ? 1 : 0 use cmp. */
2009 else if (TREE_CODE (t) == COND_EXPR
2010 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2011 && integer_onep (TREE_OPERAND (t, 1))
2012 && integer_zerop (TREE_OPERAND (t, 2)))
2014 tree top0 = TREE_OPERAND (t, 0);
2015 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2016 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2018 /* For x ^ y use x != y. */
2019 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2020 t = build2 (NE_EXPR, TREE_TYPE (t),
2021 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2023 if (is_gimple_condexpr (t))
2024 return t;
2026 return NULL_TREE;
2029 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2030 the positions marked by the set ARGS_TO_SKIP. */
2032 gimple
2033 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2035 int i;
2036 int nargs = gimple_call_num_args (stmt);
2037 vec<tree> vargs;
2038 vargs.create (nargs);
2039 gimple new_stmt;
2041 for (i = 0; i < nargs; i++)
2042 if (!bitmap_bit_p (args_to_skip, i))
2043 vargs.quick_push (gimple_call_arg (stmt, i));
2045 if (gimple_call_internal_p (stmt))
2046 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2047 vargs);
2048 else
2049 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2050 vargs.release ();
2051 if (gimple_call_lhs (stmt))
2052 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2054 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2055 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2057 if (gimple_has_location (stmt))
2058 gimple_set_location (new_stmt, gimple_location (stmt));
2059 gimple_call_copy_flags (new_stmt, stmt);
2060 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2062 gimple_set_modified (new_stmt, true);
2064 return new_stmt;
2069 /* Return true if the field decls F1 and F2 are at the same offset.
2071 This is intended to be used on GIMPLE types only. */
2073 bool
2074 gimple_compare_field_offset (tree f1, tree f2)
2076 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2078 tree offset1 = DECL_FIELD_OFFSET (f1);
2079 tree offset2 = DECL_FIELD_OFFSET (f2);
2080 return ((offset1 == offset2
2081 /* Once gimplification is done, self-referential offsets are
2082 instantiated as operand #2 of the COMPONENT_REF built for
2083 each access and reset. Therefore, they are not relevant
2084 anymore and fields are interchangeable provided that they
2085 represent the same access. */
2086 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2087 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2088 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2089 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2090 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2091 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2092 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2093 || operand_equal_p (offset1, offset2, 0))
2094 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2095 DECL_FIELD_BIT_OFFSET (f2)));
2098 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2099 should be, so handle differing ones specially by decomposing
2100 the offset into a byte and bit offset manually. */
2101 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
2102 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
2104 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2105 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2106 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2107 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2108 + bit_offset1 / BITS_PER_UNIT);
2109 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2110 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2111 + bit_offset2 / BITS_PER_UNIT);
2112 if (byte_offset1 != byte_offset2)
2113 return false;
2114 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2117 return false;
2121 /* Return a type the same as TYPE except unsigned or
2122 signed according to UNSIGNEDP. */
2124 static tree
2125 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2127 tree type1;
2129 type1 = TYPE_MAIN_VARIANT (type);
2130 if (type1 == signed_char_type_node
2131 || type1 == char_type_node
2132 || type1 == unsigned_char_type_node)
2133 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2134 if (type1 == integer_type_node || type1 == unsigned_type_node)
2135 return unsignedp ? unsigned_type_node : integer_type_node;
2136 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2137 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2138 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2139 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2140 if (type1 == long_long_integer_type_node
2141 || type1 == long_long_unsigned_type_node)
2142 return unsignedp
2143 ? long_long_unsigned_type_node
2144 : long_long_integer_type_node;
2145 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
2146 return unsignedp
2147 ? int128_unsigned_type_node
2148 : int128_integer_type_node;
2149 #if HOST_BITS_PER_WIDE_INT >= 64
2150 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2151 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2152 #endif
2153 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2154 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2155 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2156 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2157 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2158 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2159 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2160 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2162 #define GIMPLE_FIXED_TYPES(NAME) \
2163 if (type1 == short_ ## NAME ## _type_node \
2164 || type1 == unsigned_short_ ## NAME ## _type_node) \
2165 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2166 : short_ ## NAME ## _type_node; \
2167 if (type1 == NAME ## _type_node \
2168 || type1 == unsigned_ ## NAME ## _type_node) \
2169 return unsignedp ? unsigned_ ## NAME ## _type_node \
2170 : NAME ## _type_node; \
2171 if (type1 == long_ ## NAME ## _type_node \
2172 || type1 == unsigned_long_ ## NAME ## _type_node) \
2173 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2174 : long_ ## NAME ## _type_node; \
2175 if (type1 == long_long_ ## NAME ## _type_node \
2176 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2177 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2178 : long_long_ ## NAME ## _type_node;
2180 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2181 if (type1 == NAME ## _type_node \
2182 || type1 == u ## NAME ## _type_node) \
2183 return unsignedp ? u ## NAME ## _type_node \
2184 : NAME ## _type_node;
2186 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2187 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2188 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2189 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2190 : sat_ ## short_ ## NAME ## _type_node; \
2191 if (type1 == sat_ ## NAME ## _type_node \
2192 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2193 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2194 : sat_ ## NAME ## _type_node; \
2195 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2196 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2197 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2198 : sat_ ## long_ ## NAME ## _type_node; \
2199 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2200 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2201 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2202 : sat_ ## long_long_ ## NAME ## _type_node;
2204 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2205 if (type1 == sat_ ## NAME ## _type_node \
2206 || type1 == sat_ ## u ## NAME ## _type_node) \
2207 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2208 : sat_ ## NAME ## _type_node;
2210 GIMPLE_FIXED_TYPES (fract);
2211 GIMPLE_FIXED_TYPES_SAT (fract);
2212 GIMPLE_FIXED_TYPES (accum);
2213 GIMPLE_FIXED_TYPES_SAT (accum);
2215 GIMPLE_FIXED_MODE_TYPES (qq);
2216 GIMPLE_FIXED_MODE_TYPES (hq);
2217 GIMPLE_FIXED_MODE_TYPES (sq);
2218 GIMPLE_FIXED_MODE_TYPES (dq);
2219 GIMPLE_FIXED_MODE_TYPES (tq);
2220 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2221 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2222 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2223 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2224 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2225 GIMPLE_FIXED_MODE_TYPES (ha);
2226 GIMPLE_FIXED_MODE_TYPES (sa);
2227 GIMPLE_FIXED_MODE_TYPES (da);
2228 GIMPLE_FIXED_MODE_TYPES (ta);
2229 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2230 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2231 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2232 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2234 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2235 the precision; they have precision set to match their range, but
2236 may use a wider mode to match an ABI. If we change modes, we may
2237 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2238 the precision as well, so as to yield correct results for
2239 bit-field types. C++ does not have these separate bit-field
2240 types, and producing a signed or unsigned variant of an
2241 ENUMERAL_TYPE may cause other problems as well. */
2242 if (!INTEGRAL_TYPE_P (type)
2243 || TYPE_UNSIGNED (type) == unsignedp)
2244 return type;
2246 #define TYPE_OK(node) \
2247 (TYPE_MODE (type) == TYPE_MODE (node) \
2248 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2249 if (TYPE_OK (signed_char_type_node))
2250 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2251 if (TYPE_OK (integer_type_node))
2252 return unsignedp ? unsigned_type_node : integer_type_node;
2253 if (TYPE_OK (short_integer_type_node))
2254 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2255 if (TYPE_OK (long_integer_type_node))
2256 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2257 if (TYPE_OK (long_long_integer_type_node))
2258 return (unsignedp
2259 ? long_long_unsigned_type_node
2260 : long_long_integer_type_node);
2261 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
2262 return (unsignedp
2263 ? int128_unsigned_type_node
2264 : int128_integer_type_node);
2266 #if HOST_BITS_PER_WIDE_INT >= 64
2267 if (TYPE_OK (intTI_type_node))
2268 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2269 #endif
2270 if (TYPE_OK (intDI_type_node))
2271 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2272 if (TYPE_OK (intSI_type_node))
2273 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2274 if (TYPE_OK (intHI_type_node))
2275 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2276 if (TYPE_OK (intQI_type_node))
2277 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2279 #undef GIMPLE_FIXED_TYPES
2280 #undef GIMPLE_FIXED_MODE_TYPES
2281 #undef GIMPLE_FIXED_TYPES_SAT
2282 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2283 #undef TYPE_OK
2285 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2289 /* Return an unsigned type the same as TYPE in other respects. */
2291 tree
2292 gimple_unsigned_type (tree type)
2294 return gimple_signed_or_unsigned_type (true, type);
2298 /* Return a signed type the same as TYPE in other respects. */
2300 tree
2301 gimple_signed_type (tree type)
2303 return gimple_signed_or_unsigned_type (false, type);
2307 /* Return the typed-based alias set for T, which may be an expression
2308 or a type. Return -1 if we don't do anything special. */
2310 alias_set_type
2311 gimple_get_alias_set (tree t)
2313 tree u;
2315 /* Permit type-punning when accessing a union, provided the access
2316 is directly through the union. For example, this code does not
2317 permit taking the address of a union member and then storing
2318 through it. Even the type-punning allowed here is a GCC
2319 extension, albeit a common and useful one; the C standard says
2320 that such accesses have implementation-defined behavior. */
2321 for (u = t;
2322 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2323 u = TREE_OPERAND (u, 0))
2324 if (TREE_CODE (u) == COMPONENT_REF
2325 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2326 return 0;
2328 /* That's all the expressions we handle specially. */
2329 if (!TYPE_P (t))
2330 return -1;
2332 /* For convenience, follow the C standard when dealing with
2333 character types. Any object may be accessed via an lvalue that
2334 has character type. */
2335 if (t == char_type_node
2336 || t == signed_char_type_node
2337 || t == unsigned_char_type_node)
2338 return 0;
2340 /* Allow aliasing between signed and unsigned variants of the same
2341 type. We treat the signed variant as canonical. */
2342 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2344 tree t1 = gimple_signed_type (t);
2346 /* t1 == t can happen for boolean nodes which are always unsigned. */
2347 if (t1 != t)
2348 return get_alias_set (t1);
2351 return -1;
2355 /* Helper for gimple_ior_addresses_taken_1. */
2357 static bool
2358 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
2359 tree addr, void *data)
2361 bitmap addresses_taken = (bitmap)data;
2362 addr = get_base_address (addr);
2363 if (addr
2364 && DECL_P (addr))
2366 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2367 return true;
2369 return false;
2372 /* Set the bit for the uid of all decls that have their address taken
2373 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2374 were any in this stmt. */
2376 bool
2377 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2379 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2380 gimple_ior_addresses_taken_1);
2384 /* Return TRUE iff stmt is a call to a built-in function. */
2386 bool
2387 is_gimple_builtin_call (gimple stmt)
2389 tree callee;
2391 if (is_gimple_call (stmt)
2392 && (callee = gimple_call_fndecl (stmt))
2393 && is_builtin_fn (callee)
2394 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2395 return true;
2397 return false;
2400 /* Return true when STMTs arguments match those of FNDECL. */
2402 static bool
2403 validate_call (gimple stmt, tree fndecl)
2405 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2406 unsigned nargs = gimple_call_num_args (stmt);
2407 for (unsigned i = 0; i < nargs; ++i)
2409 /* Variadic args follow. */
2410 if (!targs)
2411 return true;
2412 tree arg = gimple_call_arg (stmt, i);
2413 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2414 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
2416 else if (POINTER_TYPE_P (TREE_TYPE (arg))
2417 && POINTER_TYPE_P (TREE_VALUE (targs)))
2419 else if (TREE_CODE (TREE_TYPE (arg))
2420 != TREE_CODE (TREE_VALUE (targs)))
2421 return false;
2422 targs = TREE_CHAIN (targs);
2424 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2425 return false;
2426 return true;
2429 /* Return true when STMT is builtins call to CLASS. */
2431 bool
2432 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
2434 tree fndecl;
2435 if (is_gimple_call (stmt)
2436 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2437 && DECL_BUILT_IN_CLASS (fndecl) == klass)
2438 return validate_call (stmt, fndecl);
2439 return false;
2442 /* Return true when STMT is builtins call to CODE of CLASS. */
2444 bool
2445 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
2447 tree fndecl;
2448 if (is_gimple_call (stmt)
2449 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2450 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2451 && DECL_FUNCTION_CODE (fndecl) == code)
2452 return validate_call (stmt, fndecl);
2453 return false;
2456 /* Return true if STMT clobbers memory. STMT is required to be a
2457 GIMPLE_ASM. */
2459 bool
2460 gimple_asm_clobbers_memory_p (const_gimple stmt)
2462 unsigned i;
2464 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2466 tree op = gimple_asm_clobber_op (stmt, i);
2467 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2468 return true;
2471 return false;
2474 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2476 void
2477 dump_decl_set (FILE *file, bitmap set)
2479 if (set)
2481 bitmap_iterator bi;
2482 unsigned i;
2484 fprintf (file, "{ ");
2486 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2488 fprintf (file, "D.%u", i);
2489 fprintf (file, " ");
2492 fprintf (file, "}");
2494 else
2495 fprintf (file, "NIL");
2498 /* Return true when CALL is a call stmt that definitely doesn't
2499 free any memory or makes it unavailable otherwise. */
2500 bool
2501 nonfreeing_call_p (gimple call)
2503 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2504 && gimple_call_flags (call) & ECF_LEAF)
2505 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2507 /* Just in case these become ECF_LEAF in the future. */
2508 case BUILT_IN_FREE:
2509 case BUILT_IN_TM_FREE:
2510 case BUILT_IN_REALLOC:
2511 case BUILT_IN_STACK_RESTORE:
2512 return false;
2513 default:
2514 return true;
2517 return false;
2520 /* Callback for walk_stmt_load_store_ops.
2522 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2523 otherwise.
2525 This routine only makes a superficial check for a dereference. Thus
2526 it must only be used if it is safe to return a false negative. */
2527 static bool
2528 check_loadstore (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
2530 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2531 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2532 return true;
2533 return false;
2536 /* If OP can be inferred to be non-zero after STMT executes, return true. */
2538 bool
2539 infer_nonnull_range (gimple stmt, tree op)
2541 /* We can only assume that a pointer dereference will yield
2542 non-NULL if -fdelete-null-pointer-checks is enabled. */
2543 if (!flag_delete_null_pointer_checks
2544 || !POINTER_TYPE_P (TREE_TYPE (op))
2545 || gimple_code (stmt) == GIMPLE_ASM)
2546 return false;
2548 if (walk_stmt_load_store_ops (stmt, (void *)op,
2549 check_loadstore, check_loadstore))
2550 return true;
2552 if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2554 tree fntype = gimple_call_fntype (stmt);
2555 tree attrs = TYPE_ATTRIBUTES (fntype);
2556 for (; attrs; attrs = TREE_CHAIN (attrs))
2558 attrs = lookup_attribute ("nonnull", attrs);
2560 /* If "nonnull" wasn't specified, we know nothing about
2561 the argument. */
2562 if (attrs == NULL_TREE)
2563 return false;
2565 /* If "nonnull" applies to all the arguments, then ARG
2566 is non-null if it's in the argument list. */
2567 if (TREE_VALUE (attrs) == NULL_TREE)
2569 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2571 if (operand_equal_p (op, gimple_call_arg (stmt, i), 0)
2572 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i))))
2573 return true;
2575 return false;
2578 /* Now see if op appears in the nonnull list. */
2579 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2581 int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2582 tree arg = gimple_call_arg (stmt, idx);
2583 if (operand_equal_p (op, arg, 0))
2584 return true;
2589 /* If this function is marked as returning non-null, then we can
2590 infer OP is non-null if it is used in the return statement. */
2591 if (gimple_code (stmt) == GIMPLE_RETURN
2592 && gimple_return_retval (stmt)
2593 && operand_equal_p (gimple_return_retval (stmt), op, 0)
2594 && lookup_attribute ("returns_nonnull",
2595 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2596 return true;
2598 return false;
2601 /* Compare two case labels. Because the front end should already have
2602 made sure that case ranges do not overlap, it is enough to only compare
2603 the CASE_LOW values of each case label. */
2605 static int
2606 compare_case_labels (const void *p1, const void *p2)
2608 const_tree const case1 = *(const_tree const*)p1;
2609 const_tree const case2 = *(const_tree const*)p2;
2611 /* The 'default' case label always goes first. */
2612 if (!CASE_LOW (case1))
2613 return -1;
2614 else if (!CASE_LOW (case2))
2615 return 1;
2616 else
2617 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2620 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2622 void
2623 sort_case_labels (vec<tree> label_vec)
2625 label_vec.qsort (compare_case_labels);
2628 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2630 LABELS is a vector that contains all case labels to look at.
2632 INDEX_TYPE is the type of the switch index expression. Case labels
2633 in LABELS are discarded if their values are not in the value range
2634 covered by INDEX_TYPE. The remaining case label values are folded
2635 to INDEX_TYPE.
2637 If a default case exists in LABELS, it is removed from LABELS and
2638 returned in DEFAULT_CASEP. If no default case exists, but the
2639 case labels already cover the whole range of INDEX_TYPE, a default
2640 case is returned pointing to one of the existing case labels.
2641 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2643 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2644 apply and no action is taken regardless of whether a default case is
2645 found or not. */
2647 void
2648 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2649 tree index_type,
2650 tree *default_casep)
2652 tree min_value, max_value;
2653 tree default_case = NULL_TREE;
2654 size_t i, len;
2656 i = 0;
2657 min_value = TYPE_MIN_VALUE (index_type);
2658 max_value = TYPE_MAX_VALUE (index_type);
2659 while (i < labels.length ())
2661 tree elt = labels[i];
2662 tree low = CASE_LOW (elt);
2663 tree high = CASE_HIGH (elt);
2664 bool remove_element = FALSE;
2666 if (low)
2668 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2669 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2671 /* This is a non-default case label, i.e. it has a value.
2673 See if the case label is reachable within the range of
2674 the index type. Remove out-of-range case values. Turn
2675 case ranges into a canonical form (high > low strictly)
2676 and convert the case label values to the index type.
2678 NB: The type of gimple_switch_index() may be the promoted
2679 type, but the case labels retain the original type. */
2681 if (high)
2683 /* This is a case range. Discard empty ranges.
2684 If the bounds or the range are equal, turn this
2685 into a simple (one-value) case. */
2686 int cmp = tree_int_cst_compare (high, low);
2687 if (cmp < 0)
2688 remove_element = TRUE;
2689 else if (cmp == 0)
2690 high = NULL_TREE;
2693 if (! high)
2695 /* If the simple case value is unreachable, ignore it. */
2696 if ((TREE_CODE (min_value) == INTEGER_CST
2697 && tree_int_cst_compare (low, min_value) < 0)
2698 || (TREE_CODE (max_value) == INTEGER_CST
2699 && tree_int_cst_compare (low, max_value) > 0))
2700 remove_element = TRUE;
2701 else
2702 low = fold_convert (index_type, low);
2704 else
2706 /* If the entire case range is unreachable, ignore it. */
2707 if ((TREE_CODE (min_value) == INTEGER_CST
2708 && tree_int_cst_compare (high, min_value) < 0)
2709 || (TREE_CODE (max_value) == INTEGER_CST
2710 && tree_int_cst_compare (low, max_value) > 0))
2711 remove_element = TRUE;
2712 else
2714 /* If the lower bound is less than the index type's
2715 minimum value, truncate the range bounds. */
2716 if (TREE_CODE (min_value) == INTEGER_CST
2717 && tree_int_cst_compare (low, min_value) < 0)
2718 low = min_value;
2719 low = fold_convert (index_type, low);
2721 /* If the upper bound is greater than the index type's
2722 maximum value, truncate the range bounds. */
2723 if (TREE_CODE (max_value) == INTEGER_CST
2724 && tree_int_cst_compare (high, max_value) > 0)
2725 high = max_value;
2726 high = fold_convert (index_type, high);
2728 /* We may have folded a case range to a one-value case. */
2729 if (tree_int_cst_equal (low, high))
2730 high = NULL_TREE;
2734 CASE_LOW (elt) = low;
2735 CASE_HIGH (elt) = high;
2737 else
2739 gcc_assert (!default_case);
2740 default_case = elt;
2741 /* The default case must be passed separately to the
2742 gimple_build_switch routine. But if DEFAULT_CASEP
2743 is NULL, we do not remove the default case (it would
2744 be completely lost). */
2745 if (default_casep)
2746 remove_element = TRUE;
2749 if (remove_element)
2750 labels.ordered_remove (i);
2751 else
2752 i++;
2754 len = i;
2756 if (!labels.is_empty ())
2757 sort_case_labels (labels);
2759 if (default_casep && !default_case)
2761 /* If the switch has no default label, add one, so that we jump
2762 around the switch body. If the labels already cover the whole
2763 range of the switch index_type, add the default label pointing
2764 to one of the existing labels. */
2765 if (len
2766 && TYPE_MIN_VALUE (index_type)
2767 && TYPE_MAX_VALUE (index_type)
2768 && tree_int_cst_equal (CASE_LOW (labels[0]),
2769 TYPE_MIN_VALUE (index_type)))
2771 tree low, high = CASE_HIGH (labels[len - 1]);
2772 if (!high)
2773 high = CASE_LOW (labels[len - 1]);
2774 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2776 for (i = 1; i < len; i++)
2778 high = CASE_LOW (labels[i]);
2779 low = CASE_HIGH (labels[i - 1]);
2780 if (!low)
2781 low = CASE_LOW (labels[i - 1]);
2782 if ((TREE_INT_CST_LOW (low) + 1
2783 != TREE_INT_CST_LOW (high))
2784 || (TREE_INT_CST_HIGH (low)
2785 + (TREE_INT_CST_LOW (high) == 0)
2786 != TREE_INT_CST_HIGH (high)))
2787 break;
2789 if (i == len)
2791 tree label = CASE_LABEL (labels[0]);
2792 default_case = build_case_label (NULL_TREE, NULL_TREE,
2793 label);
2799 if (default_casep)
2800 *default_casep = default_case;
2803 /* Set the location of all statements in SEQ to LOC. */
2805 void
2806 gimple_seq_set_location (gimple_seq seq, location_t loc)
2808 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2809 gimple_set_location (gsi_stmt (i), loc);