* testsuite/17_intro/static.cc: Ignore AIX TOC reload warnings.
[official-gcc.git] / gcc / gimple.c
blobd48ca6bcd10bb007a2b8fd413eea2777853fc554
1 /* Gimple IR support functions.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "calls.h"
29 #include "stmt.h"
30 #include "stor-layout.h"
31 #include "ggc.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "gimple.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "diagnostic.h"
40 #include "value-prof.h"
41 #include "flags.h"
42 #include "alias.h"
43 #include "demangle.h"
44 #include "langhooks.h"
45 #include "bitmap.h"
48 /* All the tuples have their operand vector (if present) at the very bottom
49 of the structure. Therefore, the offset required to find the
50 operands vector the size of the structure minus the size of the 1
51 element tree array at the end (see gimple_ops). */
52 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
53 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
54 EXPORTED_CONST size_t gimple_ops_offset_[] = {
55 #include "gsstruct.def"
57 #undef DEFGSSTRUCT
59 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
60 static const size_t gsstruct_code_size[] = {
61 #include "gsstruct.def"
63 #undef DEFGSSTRUCT
65 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
66 const char *const gimple_code_name[] = {
67 #include "gimple.def"
69 #undef DEFGSCODE
71 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
72 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
73 #include "gimple.def"
75 #undef DEFGSCODE
77 /* Gimple stats. */
79 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
80 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
82 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
83 static const char * const gimple_alloc_kind_names[] = {
84 "assignments",
85 "phi nodes",
86 "conditionals",
87 "everything else"
90 /* Gimple tuple constructors.
91 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
92 be passed a NULL to start with an empty sequence. */
94 /* Set the code for statement G to CODE. */
96 static inline void
97 gimple_set_code (gimple g, enum gimple_code code)
99 g->code = code;
102 /* Return the number of bytes needed to hold a GIMPLE statement with
103 code CODE. */
105 static inline size_t
106 gimple_size (enum gimple_code code)
108 return gsstruct_code_size[gss_for_code (code)];
111 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
112 operands. */
114 gimple
115 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
117 size_t size;
118 gimple stmt;
120 size = gimple_size (code);
121 if (num_ops > 0)
122 size += sizeof (tree) * (num_ops - 1);
124 if (GATHER_STATISTICS)
126 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
127 gimple_alloc_counts[(int) kind]++;
128 gimple_alloc_sizes[(int) kind] += size;
131 stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
132 gimple_set_code (stmt, code);
133 gimple_set_num_ops (stmt, num_ops);
135 /* Do not call gimple_set_modified here as it has other side
136 effects and this tuple is still not completely built. */
137 stmt->modified = 1;
138 gimple_init_singleton (stmt);
140 return stmt;
143 /* Set SUBCODE to be the code of the expression computed by statement G. */
145 static inline void
146 gimple_set_subcode (gimple g, unsigned subcode)
148 /* We only have 16 bits for the RHS code. Assert that we are not
149 overflowing it. */
150 gcc_assert (subcode < (1 << 16));
151 g->subcode = subcode;
156 /* Build a tuple with operands. CODE is the statement to build (which
157 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
158 for the new tuple. NUM_OPS is the number of operands to allocate. */
160 #define gimple_build_with_ops(c, s, n) \
161 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
163 static gimple
164 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
165 unsigned num_ops MEM_STAT_DECL)
167 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
168 gimple_set_subcode (s, subcode);
170 return s;
174 /* Build a GIMPLE_RETURN statement returning RETVAL. */
176 gimple
177 gimple_build_return (tree retval)
179 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
180 if (retval)
181 gimple_return_set_retval (s, retval);
182 return s;
185 /* Reset alias information on call S. */
187 void
188 gimple_call_reset_alias_info (gimple s)
190 if (gimple_call_flags (s) & ECF_CONST)
191 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
192 else
193 pt_solution_reset (gimple_call_use_set (s));
194 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
195 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
196 else
197 pt_solution_reset (gimple_call_clobber_set (s));
200 /* Helper for gimple_build_call, gimple_build_call_valist,
201 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
202 components of a GIMPLE_CALL statement to function FN with NARGS
203 arguments. */
205 static inline gimple
206 gimple_build_call_1 (tree fn, unsigned nargs)
208 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
209 if (TREE_CODE (fn) == FUNCTION_DECL)
210 fn = build_fold_addr_expr (fn);
211 gimple_set_op (s, 1, fn);
212 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
213 gimple_call_reset_alias_info (s);
214 return s;
218 /* Build a GIMPLE_CALL statement to function FN with the arguments
219 specified in vector ARGS. */
221 gimple
222 gimple_build_call_vec (tree fn, vec<tree> args)
224 unsigned i;
225 unsigned nargs = args.length ();
226 gimple call = gimple_build_call_1 (fn, nargs);
228 for (i = 0; i < nargs; i++)
229 gimple_call_set_arg (call, i, args[i]);
231 return call;
235 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
236 arguments. The ... are the arguments. */
238 gimple
239 gimple_build_call (tree fn, unsigned nargs, ...)
241 va_list ap;
242 gimple call;
243 unsigned i;
245 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
247 call = gimple_build_call_1 (fn, nargs);
249 va_start (ap, nargs);
250 for (i = 0; i < nargs; i++)
251 gimple_call_set_arg (call, i, va_arg (ap, tree));
252 va_end (ap);
254 return call;
258 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
259 arguments. AP contains the arguments. */
261 gimple
262 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
264 gimple call;
265 unsigned i;
267 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
269 call = gimple_build_call_1 (fn, nargs);
271 for (i = 0; i < nargs; i++)
272 gimple_call_set_arg (call, i, va_arg (ap, tree));
274 return call;
278 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
279 Build the basic components of a GIMPLE_CALL statement to internal
280 function FN with NARGS arguments. */
282 static inline gimple
283 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
285 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
286 s->subcode |= GF_CALL_INTERNAL;
287 gimple_call_set_internal_fn (s, fn);
288 gimple_call_reset_alias_info (s);
289 return s;
293 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
294 the number of arguments. The ... are the arguments. */
296 gimple
297 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
299 va_list ap;
300 gimple call;
301 unsigned i;
303 call = gimple_build_call_internal_1 (fn, nargs);
304 va_start (ap, nargs);
305 for (i = 0; i < nargs; i++)
306 gimple_call_set_arg (call, i, va_arg (ap, tree));
307 va_end (ap);
309 return call;
313 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
314 specified in vector ARGS. */
316 gimple
317 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
319 unsigned i, nargs;
320 gimple call;
322 nargs = args.length ();
323 call = gimple_build_call_internal_1 (fn, nargs);
324 for (i = 0; i < nargs; i++)
325 gimple_call_set_arg (call, i, args[i]);
327 return call;
331 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
332 assumed to be in GIMPLE form already. Minimal checking is done of
333 this fact. */
335 gimple
336 gimple_build_call_from_tree (tree t)
338 unsigned i, nargs;
339 gimple call;
340 tree fndecl = get_callee_fndecl (t);
342 gcc_assert (TREE_CODE (t) == CALL_EXPR);
344 nargs = call_expr_nargs (t);
345 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
347 for (i = 0; i < nargs; i++)
348 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
350 gimple_set_block (call, TREE_BLOCK (t));
352 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
353 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
354 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
355 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
356 if (fndecl
357 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
358 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
359 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
360 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
361 else
362 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
363 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
364 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
365 gimple_set_no_warning (call, TREE_NO_WARNING (t));
367 return call;
371 /* Build a GIMPLE_ASSIGN statement.
373 LHS of the assignment.
374 RHS of the assignment which can be unary or binary. */
376 gimple
377 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
379 enum tree_code subcode;
380 tree op1, op2, op3;
382 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
383 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
384 PASS_MEM_STAT);
388 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
389 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
390 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
392 gimple
393 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
394 tree op2, tree op3 MEM_STAT_DECL)
396 unsigned num_ops;
397 gimple p;
399 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
400 code). */
401 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
403 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
404 PASS_MEM_STAT);
405 gimple_assign_set_lhs (p, lhs);
406 gimple_assign_set_rhs1 (p, op1);
407 if (op2)
409 gcc_assert (num_ops > 2);
410 gimple_assign_set_rhs2 (p, op2);
413 if (op3)
415 gcc_assert (num_ops > 3);
416 gimple_assign_set_rhs3 (p, op3);
419 return p;
422 gimple
423 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
424 tree op2 MEM_STAT_DECL)
426 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
427 PASS_MEM_STAT);
431 /* Build a GIMPLE_COND statement.
433 PRED is the condition used to compare LHS and the RHS.
434 T_LABEL is the label to jump to if the condition is true.
435 F_LABEL is the label to jump to otherwise. */
437 gimple
438 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
439 tree t_label, tree f_label)
441 gimple p;
443 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
444 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
445 gimple_cond_set_lhs (p, lhs);
446 gimple_cond_set_rhs (p, rhs);
447 gimple_cond_set_true_label (p, t_label);
448 gimple_cond_set_false_label (p, f_label);
449 return p;
452 /* Build a GIMPLE_COND statement from the conditional expression tree
453 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
455 gimple
456 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
458 enum tree_code code;
459 tree lhs, rhs;
461 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
462 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
465 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
466 boolean expression tree COND. */
468 void
469 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
471 enum tree_code code;
472 tree lhs, rhs;
474 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
475 gimple_cond_set_condition (stmt, code, lhs, rhs);
478 /* Build a GIMPLE_LABEL statement for LABEL. */
480 gimple
481 gimple_build_label (tree label)
483 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
484 gimple_label_set_label (p, label);
485 return p;
488 /* Build a GIMPLE_GOTO statement to label DEST. */
490 gimple
491 gimple_build_goto (tree dest)
493 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
494 gimple_goto_set_dest (p, dest);
495 return p;
499 /* Build a GIMPLE_NOP statement. */
501 gimple
502 gimple_build_nop (void)
504 return gimple_alloc (GIMPLE_NOP, 0);
508 /* Build a GIMPLE_BIND statement.
509 VARS are the variables in BODY.
510 BLOCK is the containing block. */
512 gimple
513 gimple_build_bind (tree vars, gimple_seq body, tree block)
515 gimple p = gimple_alloc (GIMPLE_BIND, 0);
516 gimple_bind_set_vars (p, vars);
517 if (body)
518 gimple_bind_set_body (p, body);
519 if (block)
520 gimple_bind_set_block (p, block);
521 return p;
524 /* Helper function to set the simple fields of a asm stmt.
526 STRING is a pointer to a string that is the asm blocks assembly code.
527 NINPUT is the number of register inputs.
528 NOUTPUT is the number of register outputs.
529 NCLOBBERS is the number of clobbered registers.
532 static inline gimple
533 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
534 unsigned nclobbers, unsigned nlabels)
536 gimple_statement_asm *p;
537 int size = strlen (string);
539 /* ASMs with labels cannot have outputs. This should have been
540 enforced by the front end. */
541 gcc_assert (nlabels == 0 || noutputs == 0);
543 p = as_a <gimple_statement_asm> (
544 gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
545 ninputs + noutputs + nclobbers + nlabels));
547 p->ni = ninputs;
548 p->no = noutputs;
549 p->nc = nclobbers;
550 p->nl = nlabels;
551 p->string = ggc_alloc_string (string, size);
553 if (GATHER_STATISTICS)
554 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
556 return p;
559 /* Build a GIMPLE_ASM statement.
561 STRING is the assembly code.
562 NINPUT is the number of register inputs.
563 NOUTPUT is the number of register outputs.
564 NCLOBBERS is the number of clobbered registers.
565 INPUTS is a vector of the input register parameters.
566 OUTPUTS is a vector of the output register parameters.
567 CLOBBERS is a vector of the clobbered register parameters.
568 LABELS is a vector of destination labels. */
570 gimple
571 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
572 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
573 vec<tree, va_gc> *labels)
575 gimple p;
576 unsigned i;
578 p = gimple_build_asm_1 (string,
579 vec_safe_length (inputs),
580 vec_safe_length (outputs),
581 vec_safe_length (clobbers),
582 vec_safe_length (labels));
584 for (i = 0; i < vec_safe_length (inputs); i++)
585 gimple_asm_set_input_op (p, i, (*inputs)[i]);
587 for (i = 0; i < vec_safe_length (outputs); i++)
588 gimple_asm_set_output_op (p, i, (*outputs)[i]);
590 for (i = 0; i < vec_safe_length (clobbers); i++)
591 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
593 for (i = 0; i < vec_safe_length (labels); i++)
594 gimple_asm_set_label_op (p, i, (*labels)[i]);
596 return p;
599 /* Build a GIMPLE_CATCH statement.
601 TYPES are the catch types.
602 HANDLER is the exception handler. */
604 gimple
605 gimple_build_catch (tree types, gimple_seq handler)
607 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
608 gimple_catch_set_types (p, types);
609 if (handler)
610 gimple_catch_set_handler (p, handler);
612 return p;
615 /* Build a GIMPLE_EH_FILTER statement.
617 TYPES are the filter's types.
618 FAILURE is the filter's failure action. */
620 gimple
621 gimple_build_eh_filter (tree types, gimple_seq failure)
623 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
624 gimple_eh_filter_set_types (p, types);
625 if (failure)
626 gimple_eh_filter_set_failure (p, failure);
628 return p;
631 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
633 gimple
634 gimple_build_eh_must_not_throw (tree decl)
636 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
638 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
639 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
640 gimple_eh_must_not_throw_set_fndecl (p, decl);
642 return p;
645 /* Build a GIMPLE_EH_ELSE statement. */
647 gimple
648 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
650 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
651 gimple_eh_else_set_n_body (p, n_body);
652 gimple_eh_else_set_e_body (p, e_body);
653 return p;
656 /* Build a GIMPLE_TRY statement.
658 EVAL is the expression to evaluate.
659 CLEANUP is the cleanup expression.
660 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
661 whether this is a try/catch or a try/finally respectively. */
663 gimple_statement_try *
664 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
665 enum gimple_try_flags kind)
667 gimple_statement_try *p;
669 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
670 p = as_a <gimple_statement_try> (gimple_alloc (GIMPLE_TRY, 0));
671 gimple_set_subcode (p, kind);
672 if (eval)
673 gimple_try_set_eval (p, eval);
674 if (cleanup)
675 gimple_try_set_cleanup (p, cleanup);
677 return p;
680 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
682 CLEANUP is the cleanup expression. */
684 gimple
685 gimple_build_wce (gimple_seq cleanup)
687 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
688 if (cleanup)
689 gimple_wce_set_cleanup (p, cleanup);
691 return p;
695 /* Build a GIMPLE_RESX statement. */
697 gimple
698 gimple_build_resx (int region)
700 gimple_statement_eh_ctrl *p =
701 as_a <gimple_statement_eh_ctrl> (
702 gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
703 p->region = region;
704 return p;
708 /* The helper for constructing a gimple switch statement.
709 INDEX is the switch's index.
710 NLABELS is the number of labels in the switch excluding the default.
711 DEFAULT_LABEL is the default label for the switch statement. */
713 gimple
714 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
716 /* nlabels + 1 default label + 1 index. */
717 gcc_checking_assert (default_label);
718 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
719 1 + 1 + nlabels);
720 gimple_switch_set_index (p, index);
721 gimple_switch_set_default_label (p, default_label);
722 return p;
725 /* Build a GIMPLE_SWITCH statement.
727 INDEX is the switch's index.
728 DEFAULT_LABEL is the default label
729 ARGS is a vector of labels excluding the default. */
731 gimple
732 gimple_build_switch (tree index, tree default_label, vec<tree> args)
734 unsigned i, nlabels = args.length ();
736 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
738 /* Copy the labels from the vector to the switch statement. */
739 for (i = 0; i < nlabels; i++)
740 gimple_switch_set_label (p, i + 1, args[i]);
742 return p;
745 /* Build a GIMPLE_EH_DISPATCH statement. */
747 gimple
748 gimple_build_eh_dispatch (int region)
750 gimple_statement_eh_ctrl *p =
751 as_a <gimple_statement_eh_ctrl> (
752 gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
753 p->region = region;
754 return p;
757 /* Build a new GIMPLE_DEBUG_BIND statement.
759 VAR is bound to VALUE; block and location are taken from STMT. */
761 gimple
762 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
764 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
765 (unsigned)GIMPLE_DEBUG_BIND, 2
766 PASS_MEM_STAT);
768 gimple_debug_bind_set_var (p, var);
769 gimple_debug_bind_set_value (p, value);
770 if (stmt)
771 gimple_set_location (p, gimple_location (stmt));
773 return p;
777 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
779 VAR is bound to VALUE; block and location are taken from STMT. */
781 gimple
782 gimple_build_debug_source_bind_stat (tree var, tree value,
783 gimple stmt MEM_STAT_DECL)
785 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
786 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
787 PASS_MEM_STAT);
789 gimple_debug_source_bind_set_var (p, var);
790 gimple_debug_source_bind_set_value (p, value);
791 if (stmt)
792 gimple_set_location (p, gimple_location (stmt));
794 return p;
798 /* Build a GIMPLE_OMP_CRITICAL statement.
800 BODY is the sequence of statements for which only one thread can execute.
801 NAME is optional identifier for this critical block. */
803 gimple
804 gimple_build_omp_critical (gimple_seq body, tree name)
806 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
807 gimple_omp_critical_set_name (p, name);
808 if (body)
809 gimple_omp_set_body (p, body);
811 return p;
814 /* Build a GIMPLE_OMP_FOR statement.
816 BODY is sequence of statements inside the for loop.
817 KIND is the `for' variant.
818 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
819 lastprivate, reductions, ordered, schedule, and nowait.
820 COLLAPSE is the collapse count.
821 PRE_BODY is the sequence of statements that are loop invariant. */
823 gimple
824 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
825 gimple_seq pre_body)
827 gimple_statement_omp_for *p =
828 as_a <gimple_statement_omp_for> (gimple_alloc (GIMPLE_OMP_FOR, 0));
829 if (body)
830 gimple_omp_set_body (p, body);
831 gimple_omp_for_set_clauses (p, clauses);
832 gimple_omp_for_set_kind (p, kind);
833 p->collapse = collapse;
834 p->iter = static_cast <struct gimple_omp_for_iter *> (
835 ggc_internal_cleared_vec_alloc_stat (sizeof (*p->iter),
836 collapse MEM_STAT_INFO));
838 if (pre_body)
839 gimple_omp_for_set_pre_body (p, pre_body);
841 return p;
845 /* Build a GIMPLE_OMP_PARALLEL statement.
847 BODY is sequence of statements which are executed in parallel.
848 CLAUSES, are the OMP parallel construct's clauses.
849 CHILD_FN is the function created for the parallel threads to execute.
850 DATA_ARG are the shared data argument(s). */
852 gimple
853 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
854 tree data_arg)
856 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
857 if (body)
858 gimple_omp_set_body (p, body);
859 gimple_omp_parallel_set_clauses (p, clauses);
860 gimple_omp_parallel_set_child_fn (p, child_fn);
861 gimple_omp_parallel_set_data_arg (p, data_arg);
863 return p;
867 /* Build a GIMPLE_OMP_TASK statement.
869 BODY is sequence of statements which are executed by the explicit task.
870 CLAUSES, are the OMP parallel construct's clauses.
871 CHILD_FN is the function created for the parallel threads to execute.
872 DATA_ARG are the shared data argument(s).
873 COPY_FN is the optional function for firstprivate initialization.
874 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
876 gimple
877 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
878 tree data_arg, tree copy_fn, tree arg_size,
879 tree arg_align)
881 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
882 if (body)
883 gimple_omp_set_body (p, body);
884 gimple_omp_task_set_clauses (p, clauses);
885 gimple_omp_task_set_child_fn (p, child_fn);
886 gimple_omp_task_set_data_arg (p, data_arg);
887 gimple_omp_task_set_copy_fn (p, copy_fn);
888 gimple_omp_task_set_arg_size (p, arg_size);
889 gimple_omp_task_set_arg_align (p, arg_align);
891 return p;
895 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
897 BODY is the sequence of statements in the section. */
899 gimple
900 gimple_build_omp_section (gimple_seq body)
902 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
903 if (body)
904 gimple_omp_set_body (p, body);
906 return p;
910 /* Build a GIMPLE_OMP_MASTER statement.
912 BODY is the sequence of statements to be executed by just the master. */
914 gimple
915 gimple_build_omp_master (gimple_seq body)
917 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
918 if (body)
919 gimple_omp_set_body (p, body);
921 return p;
925 /* Build a GIMPLE_OMP_TASKGROUP statement.
927 BODY is the sequence of statements to be executed by the taskgroup
928 construct. */
930 gimple
931 gimple_build_omp_taskgroup (gimple_seq body)
933 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
934 if (body)
935 gimple_omp_set_body (p, body);
937 return p;
941 /* Build a GIMPLE_OMP_CONTINUE statement.
943 CONTROL_DEF is the definition of the control variable.
944 CONTROL_USE is the use of the control variable. */
946 gimple
947 gimple_build_omp_continue (tree control_def, tree control_use)
949 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
950 gimple_omp_continue_set_control_def (p, control_def);
951 gimple_omp_continue_set_control_use (p, control_use);
952 return p;
955 /* Build a GIMPLE_OMP_ORDERED statement.
957 BODY is the sequence of statements inside a loop that will executed in
958 sequence. */
960 gimple
961 gimple_build_omp_ordered (gimple_seq body)
963 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
964 if (body)
965 gimple_omp_set_body (p, body);
967 return p;
971 /* Build a GIMPLE_OMP_RETURN statement.
972 WAIT_P is true if this is a non-waiting return. */
974 gimple
975 gimple_build_omp_return (bool wait_p)
977 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
978 if (wait_p)
979 gimple_omp_return_set_nowait (p);
981 return p;
985 /* Build a GIMPLE_OMP_SECTIONS statement.
987 BODY is a sequence of section statements.
988 CLAUSES are any of the OMP sections contsruct's clauses: private,
989 firstprivate, lastprivate, reduction, and nowait. */
991 gimple
992 gimple_build_omp_sections (gimple_seq body, tree clauses)
994 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
995 if (body)
996 gimple_omp_set_body (p, body);
997 gimple_omp_sections_set_clauses (p, clauses);
999 return p;
1003 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1005 gimple
1006 gimple_build_omp_sections_switch (void)
1008 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1012 /* Build a GIMPLE_OMP_SINGLE statement.
1014 BODY is the sequence of statements that will be executed once.
1015 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1016 copyprivate, nowait. */
1018 gimple
1019 gimple_build_omp_single (gimple_seq body, tree clauses)
1021 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1022 if (body)
1023 gimple_omp_set_body (p, body);
1024 gimple_omp_single_set_clauses (p, clauses);
1026 return p;
1030 /* Build a GIMPLE_OMP_TARGET statement.
1032 BODY is the sequence of statements that will be executed.
1033 CLAUSES are any of the OMP target construct's clauses. */
1035 gimple
1036 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1038 gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
1039 if (body)
1040 gimple_omp_set_body (p, body);
1041 gimple_omp_target_set_clauses (p, clauses);
1042 gimple_omp_target_set_kind (p, kind);
1044 return p;
1048 /* Build a GIMPLE_OMP_TEAMS statement.
1050 BODY is the sequence of statements that will be executed.
1051 CLAUSES are any of the OMP teams construct's clauses. */
1053 gimple
1054 gimple_build_omp_teams (gimple_seq body, tree clauses)
1056 gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
1057 if (body)
1058 gimple_omp_set_body (p, body);
1059 gimple_omp_teams_set_clauses (p, clauses);
1061 return p;
1065 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1067 gimple
1068 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1070 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1071 gimple_omp_atomic_load_set_lhs (p, lhs);
1072 gimple_omp_atomic_load_set_rhs (p, rhs);
1073 return p;
1076 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1078 VAL is the value we are storing. */
1080 gimple
1081 gimple_build_omp_atomic_store (tree val)
1083 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1084 gimple_omp_atomic_store_set_val (p, val);
1085 return p;
1088 /* Build a GIMPLE_TRANSACTION statement. */
1090 gimple
1091 gimple_build_transaction (gimple_seq body, tree label)
1093 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1094 gimple_transaction_set_body (p, body);
1095 gimple_transaction_set_label (p, label);
1096 return p;
1099 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1100 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1102 gimple
1103 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1105 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1106 /* Ensure all the predictors fit into the lower bits of the subcode. */
1107 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1108 gimple_predict_set_predictor (p, predictor);
1109 gimple_predict_set_outcome (p, outcome);
1110 return p;
1113 #if defined ENABLE_GIMPLE_CHECKING
1114 /* Complain of a gimple type mismatch and die. */
1116 void
1117 gimple_check_failed (const_gimple gs, const char *file, int line,
1118 const char *function, enum gimple_code code,
1119 enum tree_code subcode)
1121 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1122 gimple_code_name[code],
1123 get_tree_code_name (subcode),
1124 gimple_code_name[gimple_code (gs)],
1125 gs->subcode > 0
1126 ? get_tree_code_name ((enum tree_code) gs->subcode)
1127 : "",
1128 function, trim_filename (file), line);
1130 #endif /* ENABLE_GIMPLE_CHECKING */
1133 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1134 *SEQ_P is NULL, a new sequence is allocated. */
1136 void
1137 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1139 gimple_stmt_iterator si;
1140 if (gs == NULL)
1141 return;
1143 si = gsi_last (*seq_p);
1144 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1147 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1148 *SEQ_P is NULL, a new sequence is allocated. This function is
1149 similar to gimple_seq_add_stmt, but does not scan the operands.
1150 During gimplification, we need to manipulate statement sequences
1151 before the def/use vectors have been constructed. */
1153 void
1154 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1156 gimple_stmt_iterator si;
1158 if (gs == NULL)
1159 return;
1161 si = gsi_last (*seq_p);
1162 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1165 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1166 NULL, a new sequence is allocated. */
1168 void
1169 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1171 gimple_stmt_iterator si;
1172 if (src == NULL)
1173 return;
1175 si = gsi_last (*dst_p);
1176 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1179 /* Determine whether to assign a location to the statement GS. */
1181 static bool
1182 should_carry_location_p (gimple gs)
1184 /* Don't emit a line note for a label. We particularly don't want to
1185 emit one for the break label, since it doesn't actually correspond
1186 to the beginning of the loop/switch. */
1187 if (gimple_code (gs) == GIMPLE_LABEL)
1188 return false;
1190 return true;
1193 /* Set the location for gimple statement GS to LOCATION. */
1195 static void
1196 annotate_one_with_location (gimple gs, location_t location)
1198 if (!gimple_has_location (gs)
1199 && !gimple_do_not_emit_location_p (gs)
1200 && should_carry_location_p (gs))
1201 gimple_set_location (gs, location);
1204 /* Set LOCATION for all the statements after iterator GSI in sequence
1205 SEQ. If GSI is pointing to the end of the sequence, start with the
1206 first statement in SEQ. */
1208 void
1209 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1210 location_t location)
1212 if (gsi_end_p (gsi))
1213 gsi = gsi_start (seq);
1214 else
1215 gsi_next (&gsi);
1217 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1218 annotate_one_with_location (gsi_stmt (gsi), location);
1221 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1223 void
1224 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1226 gimple_stmt_iterator i;
1228 if (gimple_seq_empty_p (stmt_p))
1229 return;
1231 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1233 gimple gs = gsi_stmt (i);
1234 annotate_one_with_location (gs, location);
1238 /* Helper function of empty_body_p. Return true if STMT is an empty
1239 statement. */
1241 static bool
1242 empty_stmt_p (gimple stmt)
1244 if (gimple_code (stmt) == GIMPLE_NOP)
1245 return true;
1246 if (gimple_code (stmt) == GIMPLE_BIND)
1247 return empty_body_p (gimple_bind_body (stmt));
1248 return false;
1252 /* Return true if BODY contains nothing but empty statements. */
1254 bool
1255 empty_body_p (gimple_seq body)
1257 gimple_stmt_iterator i;
1259 if (gimple_seq_empty_p (body))
1260 return true;
1261 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1262 if (!empty_stmt_p (gsi_stmt (i))
1263 && !is_gimple_debug (gsi_stmt (i)))
1264 return false;
1266 return true;
1270 /* Perform a deep copy of sequence SRC and return the result. */
1272 gimple_seq
1273 gimple_seq_copy (gimple_seq src)
1275 gimple_stmt_iterator gsi;
1276 gimple_seq new_seq = NULL;
1277 gimple stmt;
1279 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1281 stmt = gimple_copy (gsi_stmt (gsi));
1282 gimple_seq_add_stmt (&new_seq, stmt);
1285 return new_seq;
1290 /* Return true if calls C1 and C2 are known to go to the same function. */
1292 bool
1293 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1295 if (gimple_call_internal_p (c1))
1296 return (gimple_call_internal_p (c2)
1297 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1298 else
1299 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1300 || (gimple_call_fndecl (c1)
1301 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1304 /* Detect flags from a GIMPLE_CALL. This is just like
1305 call_expr_flags, but for gimple tuples. */
1308 gimple_call_flags (const_gimple stmt)
1310 int flags;
1311 tree decl = gimple_call_fndecl (stmt);
1313 if (decl)
1314 flags = flags_from_decl_or_type (decl);
1315 else if (gimple_call_internal_p (stmt))
1316 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1317 else
1318 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1320 if (stmt->subcode & GF_CALL_NOTHROW)
1321 flags |= ECF_NOTHROW;
1323 return flags;
1326 /* Return the "fn spec" string for call STMT. */
1328 static tree
1329 gimple_call_fnspec (const_gimple stmt)
1331 tree type, attr;
1333 type = gimple_call_fntype (stmt);
1334 if (!type)
1335 return NULL_TREE;
1337 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1338 if (!attr)
1339 return NULL_TREE;
1341 return TREE_VALUE (TREE_VALUE (attr));
1344 /* Detects argument flags for argument number ARG on call STMT. */
1347 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1349 tree attr = gimple_call_fnspec (stmt);
1351 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1352 return 0;
1354 switch (TREE_STRING_POINTER (attr)[1 + arg])
1356 case 'x':
1357 case 'X':
1358 return EAF_UNUSED;
1360 case 'R':
1361 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1363 case 'r':
1364 return EAF_NOCLOBBER | EAF_NOESCAPE;
1366 case 'W':
1367 return EAF_DIRECT | EAF_NOESCAPE;
1369 case 'w':
1370 return EAF_NOESCAPE;
1372 case '.':
1373 default:
1374 return 0;
1378 /* Detects return flags for the call STMT. */
1381 gimple_call_return_flags (const_gimple stmt)
1383 tree attr;
1385 if (gimple_call_flags (stmt) & ECF_MALLOC)
1386 return ERF_NOALIAS;
1388 attr = gimple_call_fnspec (stmt);
1389 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1390 return 0;
1392 switch (TREE_STRING_POINTER (attr)[0])
1394 case '1':
1395 case '2':
1396 case '3':
1397 case '4':
1398 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1400 case 'm':
1401 return ERF_NOALIAS;
1403 case '.':
1404 default:
1405 return 0;
1410 /* Return true if GS is a copy assignment. */
1412 bool
1413 gimple_assign_copy_p (gimple gs)
1415 return (gimple_assign_single_p (gs)
1416 && is_gimple_val (gimple_op (gs, 1)));
1420 /* Return true if GS is a SSA_NAME copy assignment. */
1422 bool
1423 gimple_assign_ssa_name_copy_p (gimple gs)
1425 return (gimple_assign_single_p (gs)
1426 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1427 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1431 /* Return true if GS is an assignment with a unary RHS, but the
1432 operator has no effect on the assigned value. The logic is adapted
1433 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1434 instances in which STRIP_NOPS was previously applied to the RHS of
1435 an assignment.
1437 NOTE: In the use cases that led to the creation of this function
1438 and of gimple_assign_single_p, it is typical to test for either
1439 condition and to proceed in the same manner. In each case, the
1440 assigned value is represented by the single RHS operand of the
1441 assignment. I suspect there may be cases where gimple_assign_copy_p,
1442 gimple_assign_single_p, or equivalent logic is used where a similar
1443 treatment of unary NOPs is appropriate. */
1445 bool
1446 gimple_assign_unary_nop_p (gimple gs)
1448 return (is_gimple_assign (gs)
1449 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1450 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1451 && gimple_assign_rhs1 (gs) != error_mark_node
1452 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1453 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1456 /* Set BB to be the basic block holding G. */
1458 void
1459 gimple_set_bb (gimple stmt, basic_block bb)
1461 stmt->bb = bb;
1463 /* If the statement is a label, add the label to block-to-labels map
1464 so that we can speed up edge creation for GIMPLE_GOTOs. */
1465 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
1467 tree t;
1468 int uid;
1470 t = gimple_label_label (stmt);
1471 uid = LABEL_DECL_UID (t);
1472 if (uid == -1)
1474 unsigned old_len = vec_safe_length (label_to_block_map);
1475 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1476 if (old_len <= (unsigned) uid)
1478 unsigned new_len = 3 * uid / 2 + 1;
1480 vec_safe_grow_cleared (label_to_block_map, new_len);
1484 (*label_to_block_map)[uid] = bb;
1489 /* Modify the RHS of the assignment pointed-to by GSI using the
1490 operands in the expression tree EXPR.
1492 NOTE: The statement pointed-to by GSI may be reallocated if it
1493 did not have enough operand slots.
1495 This function is useful to convert an existing tree expression into
1496 the flat representation used for the RHS of a GIMPLE assignment.
1497 It will reallocate memory as needed to expand or shrink the number
1498 of operand slots needed to represent EXPR.
1500 NOTE: If you find yourself building a tree and then calling this
1501 function, you are most certainly doing it the slow way. It is much
1502 better to build a new assignment or to use the function
1503 gimple_assign_set_rhs_with_ops, which does not require an
1504 expression tree to be built. */
1506 void
1507 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1509 enum tree_code subcode;
1510 tree op1, op2, op3;
1512 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1513 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1517 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1518 operands OP1, OP2 and OP3.
1520 NOTE: The statement pointed-to by GSI may be reallocated if it
1521 did not have enough operand slots. */
1523 void
1524 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1525 tree op1, tree op2, tree op3)
1527 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1528 gimple stmt = gsi_stmt (*gsi);
1530 /* If the new CODE needs more operands, allocate a new statement. */
1531 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1533 tree lhs = gimple_assign_lhs (stmt);
1534 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1535 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1536 gimple_init_singleton (new_stmt);
1537 gsi_replace (gsi, new_stmt, true);
1538 stmt = new_stmt;
1540 /* The LHS needs to be reset as this also changes the SSA name
1541 on the LHS. */
1542 gimple_assign_set_lhs (stmt, lhs);
1545 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1546 gimple_set_subcode (stmt, code);
1547 gimple_assign_set_rhs1 (stmt, op1);
1548 if (new_rhs_ops > 1)
1549 gimple_assign_set_rhs2 (stmt, op2);
1550 if (new_rhs_ops > 2)
1551 gimple_assign_set_rhs3 (stmt, op3);
1555 /* Return the LHS of a statement that performs an assignment,
1556 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1557 for a call to a function that returns no value, or for a
1558 statement other than an assignment or a call. */
1560 tree
1561 gimple_get_lhs (const_gimple stmt)
1563 enum gimple_code code = gimple_code (stmt);
1565 if (code == GIMPLE_ASSIGN)
1566 return gimple_assign_lhs (stmt);
1567 else if (code == GIMPLE_CALL)
1568 return gimple_call_lhs (stmt);
1569 else
1570 return NULL_TREE;
1574 /* Set the LHS of a statement that performs an assignment,
1575 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1577 void
1578 gimple_set_lhs (gimple stmt, tree lhs)
1580 enum gimple_code code = gimple_code (stmt);
1582 if (code == GIMPLE_ASSIGN)
1583 gimple_assign_set_lhs (stmt, lhs);
1584 else if (code == GIMPLE_CALL)
1585 gimple_call_set_lhs (stmt, lhs);
1586 else
1587 gcc_unreachable ();
1591 /* Return a deep copy of statement STMT. All the operands from STMT
1592 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1593 and VUSE operand arrays are set to empty in the new copy. The new
1594 copy isn't part of any sequence. */
1596 gimple
1597 gimple_copy (gimple stmt)
1599 enum gimple_code code = gimple_code (stmt);
1600 unsigned num_ops = gimple_num_ops (stmt);
1601 gimple copy = gimple_alloc (code, num_ops);
1602 unsigned i;
1604 /* Shallow copy all the fields from STMT. */
1605 memcpy (copy, stmt, gimple_size (code));
1606 gimple_init_singleton (copy);
1608 /* If STMT has sub-statements, deep-copy them as well. */
1609 if (gimple_has_substatements (stmt))
1611 gimple_seq new_seq;
1612 tree t;
1614 switch (gimple_code (stmt))
1616 case GIMPLE_BIND:
1617 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
1618 gimple_bind_set_body (copy, new_seq);
1619 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
1620 gimple_bind_set_block (copy, gimple_bind_block (stmt));
1621 break;
1623 case GIMPLE_CATCH:
1624 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
1625 gimple_catch_set_handler (copy, new_seq);
1626 t = unshare_expr (gimple_catch_types (stmt));
1627 gimple_catch_set_types (copy, t);
1628 break;
1630 case GIMPLE_EH_FILTER:
1631 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
1632 gimple_eh_filter_set_failure (copy, new_seq);
1633 t = unshare_expr (gimple_eh_filter_types (stmt));
1634 gimple_eh_filter_set_types (copy, t);
1635 break;
1637 case GIMPLE_EH_ELSE:
1638 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
1639 gimple_eh_else_set_n_body (copy, new_seq);
1640 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
1641 gimple_eh_else_set_e_body (copy, new_seq);
1642 break;
1644 case GIMPLE_TRY:
1645 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
1646 gimple_try_set_eval (copy, new_seq);
1647 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
1648 gimple_try_set_cleanup (copy, new_seq);
1649 break;
1651 case GIMPLE_OMP_FOR:
1652 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1653 gimple_omp_for_set_pre_body (copy, new_seq);
1654 t = unshare_expr (gimple_omp_for_clauses (stmt));
1655 gimple_omp_for_set_clauses (copy, t);
1657 gimple_statement_omp_for *omp_for_copy =
1658 as_a <gimple_statement_omp_for> (copy);
1659 omp_for_copy->iter =
1660 static_cast <struct gimple_omp_for_iter *> (
1661 ggc_internal_vec_alloc_stat (sizeof (struct gimple_omp_for_iter),
1662 gimple_omp_for_collapse (stmt)
1663 MEM_STAT_INFO));
1665 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1667 gimple_omp_for_set_cond (copy, i,
1668 gimple_omp_for_cond (stmt, i));
1669 gimple_omp_for_set_index (copy, i,
1670 gimple_omp_for_index (stmt, i));
1671 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1672 gimple_omp_for_set_initial (copy, i, t);
1673 t = unshare_expr (gimple_omp_for_final (stmt, i));
1674 gimple_omp_for_set_final (copy, i, t);
1675 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1676 gimple_omp_for_set_incr (copy, i, t);
1678 goto copy_omp_body;
1680 case GIMPLE_OMP_PARALLEL:
1681 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
1682 gimple_omp_parallel_set_clauses (copy, t);
1683 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
1684 gimple_omp_parallel_set_child_fn (copy, t);
1685 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
1686 gimple_omp_parallel_set_data_arg (copy, t);
1687 goto copy_omp_body;
1689 case GIMPLE_OMP_TASK:
1690 t = unshare_expr (gimple_omp_task_clauses (stmt));
1691 gimple_omp_task_set_clauses (copy, t);
1692 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1693 gimple_omp_task_set_child_fn (copy, t);
1694 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1695 gimple_omp_task_set_data_arg (copy, t);
1696 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1697 gimple_omp_task_set_copy_fn (copy, t);
1698 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1699 gimple_omp_task_set_arg_size (copy, t);
1700 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1701 gimple_omp_task_set_arg_align (copy, t);
1702 goto copy_omp_body;
1704 case GIMPLE_OMP_CRITICAL:
1705 t = unshare_expr (gimple_omp_critical_name (stmt));
1706 gimple_omp_critical_set_name (copy, t);
1707 goto copy_omp_body;
1709 case GIMPLE_OMP_SECTIONS:
1710 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1711 gimple_omp_sections_set_clauses (copy, t);
1712 t = unshare_expr (gimple_omp_sections_control (stmt));
1713 gimple_omp_sections_set_control (copy, t);
1714 /* FALLTHRU */
1716 case GIMPLE_OMP_SINGLE:
1717 case GIMPLE_OMP_TARGET:
1718 case GIMPLE_OMP_TEAMS:
1719 case GIMPLE_OMP_SECTION:
1720 case GIMPLE_OMP_MASTER:
1721 case GIMPLE_OMP_TASKGROUP:
1722 case GIMPLE_OMP_ORDERED:
1723 copy_omp_body:
1724 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1725 gimple_omp_set_body (copy, new_seq);
1726 break;
1728 case GIMPLE_TRANSACTION:
1729 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
1730 gimple_transaction_set_body (copy, new_seq);
1731 break;
1733 case GIMPLE_WITH_CLEANUP_EXPR:
1734 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1735 gimple_wce_set_cleanup (copy, new_seq);
1736 break;
1738 default:
1739 gcc_unreachable ();
1743 /* Make copy of operands. */
1744 for (i = 0; i < num_ops; i++)
1745 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1747 if (gimple_has_mem_ops (stmt))
1749 gimple_set_vdef (copy, gimple_vdef (stmt));
1750 gimple_set_vuse (copy, gimple_vuse (stmt));
1753 /* Clear out SSA operand vectors on COPY. */
1754 if (gimple_has_ops (stmt))
1756 gimple_set_use_ops (copy, NULL);
1758 /* SSA operands need to be updated. */
1759 gimple_set_modified (copy, true);
1762 return copy;
1766 /* Return true if statement S has side-effects. We consider a
1767 statement to have side effects if:
1769 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1770 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1772 bool
1773 gimple_has_side_effects (const_gimple s)
1775 if (is_gimple_debug (s))
1776 return false;
1778 /* We don't have to scan the arguments to check for
1779 volatile arguments, though, at present, we still
1780 do a scan to check for TREE_SIDE_EFFECTS. */
1781 if (gimple_has_volatile_ops (s))
1782 return true;
1784 if (gimple_code (s) == GIMPLE_ASM
1785 && gimple_asm_volatile_p (s))
1786 return true;
1788 if (is_gimple_call (s))
1790 int flags = gimple_call_flags (s);
1792 /* An infinite loop is considered a side effect. */
1793 if (!(flags & (ECF_CONST | ECF_PURE))
1794 || (flags & ECF_LOOPING_CONST_OR_PURE))
1795 return true;
1797 return false;
1800 return false;
1803 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1804 Return true if S can trap. When INCLUDE_MEM is true, check whether
1805 the memory operations could trap. When INCLUDE_STORES is true and
1806 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1808 bool
1809 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1811 tree t, div = NULL_TREE;
1812 enum tree_code op;
1814 if (include_mem)
1816 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1818 for (i = start; i < gimple_num_ops (s); i++)
1819 if (tree_could_trap_p (gimple_op (s, i)))
1820 return true;
1823 switch (gimple_code (s))
1825 case GIMPLE_ASM:
1826 return gimple_asm_volatile_p (s);
1828 case GIMPLE_CALL:
1829 t = gimple_call_fndecl (s);
1830 /* Assume that calls to weak functions may trap. */
1831 if (!t || !DECL_P (t) || DECL_WEAK (t))
1832 return true;
1833 return false;
1835 case GIMPLE_ASSIGN:
1836 t = gimple_expr_type (s);
1837 op = gimple_assign_rhs_code (s);
1838 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1839 div = gimple_assign_rhs2 (s);
1840 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1841 (INTEGRAL_TYPE_P (t)
1842 && TYPE_OVERFLOW_TRAPS (t)),
1843 div));
1845 default:
1846 break;
1849 return false;
1852 /* Return true if statement S can trap. */
1854 bool
1855 gimple_could_trap_p (gimple s)
1857 return gimple_could_trap_p_1 (s, true, true);
1860 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1862 bool
1863 gimple_assign_rhs_could_trap_p (gimple s)
1865 gcc_assert (is_gimple_assign (s));
1866 return gimple_could_trap_p_1 (s, true, false);
1870 /* Print debugging information for gimple stmts generated. */
1872 void
1873 dump_gimple_statistics (void)
1875 int i, total_tuples = 0, total_bytes = 0;
1877 if (! GATHER_STATISTICS)
1879 fprintf (stderr, "No gimple statistics\n");
1880 return;
1883 fprintf (stderr, "\nGIMPLE statements\n");
1884 fprintf (stderr, "Kind Stmts Bytes\n");
1885 fprintf (stderr, "---------------------------------------\n");
1886 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1888 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1889 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1890 total_tuples += gimple_alloc_counts[i];
1891 total_bytes += gimple_alloc_sizes[i];
1893 fprintf (stderr, "---------------------------------------\n");
1894 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
1895 fprintf (stderr, "---------------------------------------\n");
1899 /* Return the number of operands needed on the RHS of a GIMPLE
1900 assignment for an expression with tree code CODE. */
1902 unsigned
1903 get_gimple_rhs_num_ops (enum tree_code code)
1905 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
1907 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
1908 return 1;
1909 else if (rhs_class == GIMPLE_BINARY_RHS)
1910 return 2;
1911 else if (rhs_class == GIMPLE_TERNARY_RHS)
1912 return 3;
1913 else
1914 gcc_unreachable ();
1917 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1918 (unsigned char) \
1919 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1920 : ((TYPE) == tcc_binary \
1921 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1922 : ((TYPE) == tcc_constant \
1923 || (TYPE) == tcc_declaration \
1924 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1925 : ((SYM) == TRUTH_AND_EXPR \
1926 || (SYM) == TRUTH_OR_EXPR \
1927 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1928 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1929 : ((SYM) == COND_EXPR \
1930 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1931 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1932 || (SYM) == DOT_PROD_EXPR \
1933 || (SYM) == REALIGN_LOAD_EXPR \
1934 || (SYM) == VEC_COND_EXPR \
1935 || (SYM) == VEC_PERM_EXPR \
1936 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1937 : ((SYM) == CONSTRUCTOR \
1938 || (SYM) == OBJ_TYPE_REF \
1939 || (SYM) == ASSERT_EXPR \
1940 || (SYM) == ADDR_EXPR \
1941 || (SYM) == WITH_SIZE_EXPR \
1942 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1943 : GIMPLE_INVALID_RHS),
1944 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1946 const unsigned char gimple_rhs_class_table[] = {
1947 #include "all-tree.def"
1950 #undef DEFTREECODE
1951 #undef END_OF_BASE_TREE_CODES
1953 void
1954 recalculate_side_effects (tree t)
1956 enum tree_code code = TREE_CODE (t);
1957 int len = TREE_OPERAND_LENGTH (t);
1958 int i;
1960 switch (TREE_CODE_CLASS (code))
1962 case tcc_expression:
1963 switch (code)
1965 case INIT_EXPR:
1966 case MODIFY_EXPR:
1967 case VA_ARG_EXPR:
1968 case PREDECREMENT_EXPR:
1969 case PREINCREMENT_EXPR:
1970 case POSTDECREMENT_EXPR:
1971 case POSTINCREMENT_EXPR:
1972 /* All of these have side-effects, no matter what their
1973 operands are. */
1974 return;
1976 default:
1977 break;
1979 /* Fall through. */
1981 case tcc_comparison: /* a comparison expression */
1982 case tcc_unary: /* a unary arithmetic expression */
1983 case tcc_binary: /* a binary arithmetic expression */
1984 case tcc_reference: /* a reference */
1985 case tcc_vl_exp: /* a function call */
1986 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1987 for (i = 0; i < len; ++i)
1989 tree op = TREE_OPERAND (t, i);
1990 if (op && TREE_SIDE_EFFECTS (op))
1991 TREE_SIDE_EFFECTS (t) = 1;
1993 break;
1995 case tcc_constant:
1996 /* No side-effects. */
1997 return;
1999 default:
2000 gcc_unreachable ();
2004 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2005 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2006 we failed to create one. */
2008 tree
2009 canonicalize_cond_expr_cond (tree t)
2011 /* Strip conversions around boolean operations. */
2012 if (CONVERT_EXPR_P (t)
2013 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2014 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2015 == BOOLEAN_TYPE))
2016 t = TREE_OPERAND (t, 0);
2018 /* For !x use x == 0. */
2019 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2021 tree top0 = TREE_OPERAND (t, 0);
2022 t = build2 (EQ_EXPR, TREE_TYPE (t),
2023 top0, build_int_cst (TREE_TYPE (top0), 0));
2025 /* For cmp ? 1 : 0 use cmp. */
2026 else if (TREE_CODE (t) == COND_EXPR
2027 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2028 && integer_onep (TREE_OPERAND (t, 1))
2029 && integer_zerop (TREE_OPERAND (t, 2)))
2031 tree top0 = TREE_OPERAND (t, 0);
2032 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2033 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2035 /* For x ^ y use x != y. */
2036 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2037 t = build2 (NE_EXPR, TREE_TYPE (t),
2038 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2040 if (is_gimple_condexpr (t))
2041 return t;
2043 return NULL_TREE;
2046 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2047 the positions marked by the set ARGS_TO_SKIP. */
2049 gimple
2050 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2052 int i;
2053 int nargs = gimple_call_num_args (stmt);
2054 vec<tree> vargs;
2055 vargs.create (nargs);
2056 gimple new_stmt;
2058 for (i = 0; i < nargs; i++)
2059 if (!bitmap_bit_p (args_to_skip, i))
2060 vargs.quick_push (gimple_call_arg (stmt, i));
2062 if (gimple_call_internal_p (stmt))
2063 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2064 vargs);
2065 else
2066 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2067 vargs.release ();
2068 if (gimple_call_lhs (stmt))
2069 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2071 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2072 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2074 if (gimple_has_location (stmt))
2075 gimple_set_location (new_stmt, gimple_location (stmt));
2076 gimple_call_copy_flags (new_stmt, stmt);
2077 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2079 gimple_set_modified (new_stmt, true);
2081 return new_stmt;
2086 /* Return true if the field decls F1 and F2 are at the same offset.
2088 This is intended to be used on GIMPLE types only. */
2090 bool
2091 gimple_compare_field_offset (tree f1, tree f2)
2093 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2095 tree offset1 = DECL_FIELD_OFFSET (f1);
2096 tree offset2 = DECL_FIELD_OFFSET (f2);
2097 return ((offset1 == offset2
2098 /* Once gimplification is done, self-referential offsets are
2099 instantiated as operand #2 of the COMPONENT_REF built for
2100 each access and reset. Therefore, they are not relevant
2101 anymore and fields are interchangeable provided that they
2102 represent the same access. */
2103 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2104 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2105 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2106 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2107 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2108 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2109 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2110 || operand_equal_p (offset1, offset2, 0))
2111 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2112 DECL_FIELD_BIT_OFFSET (f2)));
2115 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2116 should be, so handle differing ones specially by decomposing
2117 the offset into a byte and bit offset manually. */
2118 if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2119 && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2121 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2122 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2123 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2124 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2125 + bit_offset1 / BITS_PER_UNIT);
2126 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2127 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2128 + bit_offset2 / BITS_PER_UNIT);
2129 if (byte_offset1 != byte_offset2)
2130 return false;
2131 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2134 return false;
2138 /* Return a type the same as TYPE except unsigned or
2139 signed according to UNSIGNEDP. */
2141 static tree
2142 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2144 tree type1;
2146 type1 = TYPE_MAIN_VARIANT (type);
2147 if (type1 == signed_char_type_node
2148 || type1 == char_type_node
2149 || type1 == unsigned_char_type_node)
2150 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2151 if (type1 == integer_type_node || type1 == unsigned_type_node)
2152 return unsignedp ? unsigned_type_node : integer_type_node;
2153 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2154 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2155 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2156 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2157 if (type1 == long_long_integer_type_node
2158 || type1 == long_long_unsigned_type_node)
2159 return unsignedp
2160 ? long_long_unsigned_type_node
2161 : long_long_integer_type_node;
2162 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
2163 return unsignedp
2164 ? int128_unsigned_type_node
2165 : int128_integer_type_node;
2166 #if HOST_BITS_PER_WIDE_INT >= 64
2167 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2168 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2169 #endif
2170 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2171 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2172 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2173 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2174 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2175 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2176 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2177 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2179 #define GIMPLE_FIXED_TYPES(NAME) \
2180 if (type1 == short_ ## NAME ## _type_node \
2181 || type1 == unsigned_short_ ## NAME ## _type_node) \
2182 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2183 : short_ ## NAME ## _type_node; \
2184 if (type1 == NAME ## _type_node \
2185 || type1 == unsigned_ ## NAME ## _type_node) \
2186 return unsignedp ? unsigned_ ## NAME ## _type_node \
2187 : NAME ## _type_node; \
2188 if (type1 == long_ ## NAME ## _type_node \
2189 || type1 == unsigned_long_ ## NAME ## _type_node) \
2190 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2191 : long_ ## NAME ## _type_node; \
2192 if (type1 == long_long_ ## NAME ## _type_node \
2193 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2194 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2195 : long_long_ ## NAME ## _type_node;
2197 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2198 if (type1 == NAME ## _type_node \
2199 || type1 == u ## NAME ## _type_node) \
2200 return unsignedp ? u ## NAME ## _type_node \
2201 : NAME ## _type_node;
2203 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2204 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2205 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2206 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2207 : sat_ ## short_ ## NAME ## _type_node; \
2208 if (type1 == sat_ ## NAME ## _type_node \
2209 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2210 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2211 : sat_ ## NAME ## _type_node; \
2212 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2213 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2214 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2215 : sat_ ## long_ ## NAME ## _type_node; \
2216 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2217 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2218 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2219 : sat_ ## long_long_ ## NAME ## _type_node;
2221 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2222 if (type1 == sat_ ## NAME ## _type_node \
2223 || type1 == sat_ ## u ## NAME ## _type_node) \
2224 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2225 : sat_ ## NAME ## _type_node;
2227 GIMPLE_FIXED_TYPES (fract);
2228 GIMPLE_FIXED_TYPES_SAT (fract);
2229 GIMPLE_FIXED_TYPES (accum);
2230 GIMPLE_FIXED_TYPES_SAT (accum);
2232 GIMPLE_FIXED_MODE_TYPES (qq);
2233 GIMPLE_FIXED_MODE_TYPES (hq);
2234 GIMPLE_FIXED_MODE_TYPES (sq);
2235 GIMPLE_FIXED_MODE_TYPES (dq);
2236 GIMPLE_FIXED_MODE_TYPES (tq);
2237 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2238 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2239 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2240 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2241 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2242 GIMPLE_FIXED_MODE_TYPES (ha);
2243 GIMPLE_FIXED_MODE_TYPES (sa);
2244 GIMPLE_FIXED_MODE_TYPES (da);
2245 GIMPLE_FIXED_MODE_TYPES (ta);
2246 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2247 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2248 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2249 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2251 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2252 the precision; they have precision set to match their range, but
2253 may use a wider mode to match an ABI. If we change modes, we may
2254 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2255 the precision as well, so as to yield correct results for
2256 bit-field types. C++ does not have these separate bit-field
2257 types, and producing a signed or unsigned variant of an
2258 ENUMERAL_TYPE may cause other problems as well. */
2259 if (!INTEGRAL_TYPE_P (type)
2260 || TYPE_UNSIGNED (type) == unsignedp)
2261 return type;
2263 #define TYPE_OK(node) \
2264 (TYPE_MODE (type) == TYPE_MODE (node) \
2265 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2266 if (TYPE_OK (signed_char_type_node))
2267 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2268 if (TYPE_OK (integer_type_node))
2269 return unsignedp ? unsigned_type_node : integer_type_node;
2270 if (TYPE_OK (short_integer_type_node))
2271 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2272 if (TYPE_OK (long_integer_type_node))
2273 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2274 if (TYPE_OK (long_long_integer_type_node))
2275 return (unsignedp
2276 ? long_long_unsigned_type_node
2277 : long_long_integer_type_node);
2278 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
2279 return (unsignedp
2280 ? int128_unsigned_type_node
2281 : int128_integer_type_node);
2283 #if HOST_BITS_PER_WIDE_INT >= 64
2284 if (TYPE_OK (intTI_type_node))
2285 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2286 #endif
2287 if (TYPE_OK (intDI_type_node))
2288 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2289 if (TYPE_OK (intSI_type_node))
2290 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2291 if (TYPE_OK (intHI_type_node))
2292 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2293 if (TYPE_OK (intQI_type_node))
2294 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2296 #undef GIMPLE_FIXED_TYPES
2297 #undef GIMPLE_FIXED_MODE_TYPES
2298 #undef GIMPLE_FIXED_TYPES_SAT
2299 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2300 #undef TYPE_OK
2302 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2306 /* Return an unsigned type the same as TYPE in other respects. */
2308 tree
2309 gimple_unsigned_type (tree type)
2311 return gimple_signed_or_unsigned_type (true, type);
2315 /* Return a signed type the same as TYPE in other respects. */
2317 tree
2318 gimple_signed_type (tree type)
2320 return gimple_signed_or_unsigned_type (false, type);
2324 /* Return the typed-based alias set for T, which may be an expression
2325 or a type. Return -1 if we don't do anything special. */
2327 alias_set_type
2328 gimple_get_alias_set (tree t)
2330 tree u;
2332 /* Permit type-punning when accessing a union, provided the access
2333 is directly through the union. For example, this code does not
2334 permit taking the address of a union member and then storing
2335 through it. Even the type-punning allowed here is a GCC
2336 extension, albeit a common and useful one; the C standard says
2337 that such accesses have implementation-defined behavior. */
2338 for (u = t;
2339 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2340 u = TREE_OPERAND (u, 0))
2341 if (TREE_CODE (u) == COMPONENT_REF
2342 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2343 return 0;
2345 /* That's all the expressions we handle specially. */
2346 if (!TYPE_P (t))
2347 return -1;
2349 /* For convenience, follow the C standard when dealing with
2350 character types. Any object may be accessed via an lvalue that
2351 has character type. */
2352 if (t == char_type_node
2353 || t == signed_char_type_node
2354 || t == unsigned_char_type_node)
2355 return 0;
2357 /* Allow aliasing between signed and unsigned variants of the same
2358 type. We treat the signed variant as canonical. */
2359 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2361 tree t1 = gimple_signed_type (t);
2363 /* t1 == t can happen for boolean nodes which are always unsigned. */
2364 if (t1 != t)
2365 return get_alias_set (t1);
2368 return -1;
2372 /* Helper for gimple_ior_addresses_taken_1. */
2374 static bool
2375 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
2376 tree addr, void *data)
2378 bitmap addresses_taken = (bitmap)data;
2379 addr = get_base_address (addr);
2380 if (addr
2381 && DECL_P (addr))
2383 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2384 return true;
2386 return false;
2389 /* Set the bit for the uid of all decls that have their address taken
2390 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2391 were any in this stmt. */
2393 bool
2394 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2396 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2397 gimple_ior_addresses_taken_1);
2401 /* Return TRUE iff stmt is a call to a built-in function. */
2403 bool
2404 is_gimple_builtin_call (gimple stmt)
2406 tree callee;
2408 if (is_gimple_call (stmt)
2409 && (callee = gimple_call_fndecl (stmt))
2410 && is_builtin_fn (callee)
2411 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2412 return true;
2414 return false;
2417 /* Return true when STMTs arguments match those of FNDECL. */
2419 static bool
2420 validate_call (gimple stmt, tree fndecl)
2422 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2423 unsigned nargs = gimple_call_num_args (stmt);
2424 for (unsigned i = 0; i < nargs; ++i)
2426 /* Variadic args follow. */
2427 if (!targs)
2428 return true;
2429 tree arg = gimple_call_arg (stmt, i);
2430 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2431 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
2433 else if (POINTER_TYPE_P (TREE_TYPE (arg))
2434 && POINTER_TYPE_P (TREE_VALUE (targs)))
2436 else if (TREE_CODE (TREE_TYPE (arg))
2437 != TREE_CODE (TREE_VALUE (targs)))
2438 return false;
2439 targs = TREE_CHAIN (targs);
2441 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2442 return false;
2443 return true;
2446 /* Return true when STMT is builtins call to CLASS. */
2448 bool
2449 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
2451 tree fndecl;
2452 if (is_gimple_call (stmt)
2453 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2454 && DECL_BUILT_IN_CLASS (fndecl) == klass)
2455 return validate_call (stmt, fndecl);
2456 return false;
2459 /* Return true when STMT is builtins call to CODE of CLASS. */
2461 bool
2462 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
2464 tree fndecl;
2465 if (is_gimple_call (stmt)
2466 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2467 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2468 && DECL_FUNCTION_CODE (fndecl) == code)
2469 return validate_call (stmt, fndecl);
2470 return false;
2473 /* Return true if STMT clobbers memory. STMT is required to be a
2474 GIMPLE_ASM. */
2476 bool
2477 gimple_asm_clobbers_memory_p (const_gimple stmt)
2479 unsigned i;
2481 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2483 tree op = gimple_asm_clobber_op (stmt, i);
2484 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2485 return true;
2488 return false;
2491 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2493 void
2494 dump_decl_set (FILE *file, bitmap set)
2496 if (set)
2498 bitmap_iterator bi;
2499 unsigned i;
2501 fprintf (file, "{ ");
2503 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2505 fprintf (file, "D.%u", i);
2506 fprintf (file, " ");
2509 fprintf (file, "}");
2511 else
2512 fprintf (file, "NIL");
2515 /* Return true when CALL is a call stmt that definitely doesn't
2516 free any memory or makes it unavailable otherwise. */
2517 bool
2518 nonfreeing_call_p (gimple call)
2520 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2521 && gimple_call_flags (call) & ECF_LEAF)
2522 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2524 /* Just in case these become ECF_LEAF in the future. */
2525 case BUILT_IN_FREE:
2526 case BUILT_IN_TM_FREE:
2527 case BUILT_IN_REALLOC:
2528 case BUILT_IN_STACK_RESTORE:
2529 return false;
2530 default:
2531 return true;
2534 return false;
2537 /* Callback for walk_stmt_load_store_ops.
2539 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2540 otherwise.
2542 This routine only makes a superficial check for a dereference. Thus
2543 it must only be used if it is safe to return a false negative. */
2544 static bool
2545 check_loadstore (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
2547 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2548 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2549 return true;
2550 return false;
2553 /* If OP can be inferred to be non-zero after STMT executes, return true. */
2555 bool
2556 infer_nonnull_range (gimple stmt, tree op)
2558 /* We can only assume that a pointer dereference will yield
2559 non-NULL if -fdelete-null-pointer-checks is enabled. */
2560 if (!flag_delete_null_pointer_checks
2561 || !POINTER_TYPE_P (TREE_TYPE (op))
2562 || gimple_code (stmt) == GIMPLE_ASM)
2563 return false;
2565 if (walk_stmt_load_store_ops (stmt, (void *)op,
2566 check_loadstore, check_loadstore))
2567 return true;
2569 if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2571 tree fntype = gimple_call_fntype (stmt);
2572 tree attrs = TYPE_ATTRIBUTES (fntype);
2573 for (; attrs; attrs = TREE_CHAIN (attrs))
2575 attrs = lookup_attribute ("nonnull", attrs);
2577 /* If "nonnull" wasn't specified, we know nothing about
2578 the argument. */
2579 if (attrs == NULL_TREE)
2580 return false;
2582 /* If "nonnull" applies to all the arguments, then ARG
2583 is non-null if it's in the argument list. */
2584 if (TREE_VALUE (attrs) == NULL_TREE)
2586 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2588 if (operand_equal_p (op, gimple_call_arg (stmt, i), 0)
2589 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i))))
2590 return true;
2592 return false;
2595 /* Now see if op appears in the nonnull list. */
2596 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2598 int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2599 tree arg = gimple_call_arg (stmt, idx);
2600 if (operand_equal_p (op, arg, 0))
2601 return true;
2606 /* If this function is marked as returning non-null, then we can
2607 infer OP is non-null if it is used in the return statement. */
2608 if (gimple_code (stmt) == GIMPLE_RETURN
2609 && gimple_return_retval (stmt)
2610 && operand_equal_p (gimple_return_retval (stmt), op, 0)
2611 && lookup_attribute ("returns_nonnull",
2612 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2613 return true;
2615 return false;
2618 /* Compare two case labels. Because the front end should already have
2619 made sure that case ranges do not overlap, it is enough to only compare
2620 the CASE_LOW values of each case label. */
2622 static int
2623 compare_case_labels (const void *p1, const void *p2)
2625 const_tree const case1 = *(const_tree const*)p1;
2626 const_tree const case2 = *(const_tree const*)p2;
2628 /* The 'default' case label always goes first. */
2629 if (!CASE_LOW (case1))
2630 return -1;
2631 else if (!CASE_LOW (case2))
2632 return 1;
2633 else
2634 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2637 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2639 void
2640 sort_case_labels (vec<tree> label_vec)
2642 label_vec.qsort (compare_case_labels);
2645 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2647 LABELS is a vector that contains all case labels to look at.
2649 INDEX_TYPE is the type of the switch index expression. Case labels
2650 in LABELS are discarded if their values are not in the value range
2651 covered by INDEX_TYPE. The remaining case label values are folded
2652 to INDEX_TYPE.
2654 If a default case exists in LABELS, it is removed from LABELS and
2655 returned in DEFAULT_CASEP. If no default case exists, but the
2656 case labels already cover the whole range of INDEX_TYPE, a default
2657 case is returned pointing to one of the existing case labels.
2658 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2660 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2661 apply and no action is taken regardless of whether a default case is
2662 found or not. */
2664 void
2665 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2666 tree index_type,
2667 tree *default_casep)
2669 tree min_value, max_value;
2670 tree default_case = NULL_TREE;
2671 size_t i, len;
2673 i = 0;
2674 min_value = TYPE_MIN_VALUE (index_type);
2675 max_value = TYPE_MAX_VALUE (index_type);
2676 while (i < labels.length ())
2678 tree elt = labels[i];
2679 tree low = CASE_LOW (elt);
2680 tree high = CASE_HIGH (elt);
2681 bool remove_element = FALSE;
2683 if (low)
2685 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2686 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2688 /* This is a non-default case label, i.e. it has a value.
2690 See if the case label is reachable within the range of
2691 the index type. Remove out-of-range case values. Turn
2692 case ranges into a canonical form (high > low strictly)
2693 and convert the case label values to the index type.
2695 NB: The type of gimple_switch_index() may be the promoted
2696 type, but the case labels retain the original type. */
2698 if (high)
2700 /* This is a case range. Discard empty ranges.
2701 If the bounds or the range are equal, turn this
2702 into a simple (one-value) case. */
2703 int cmp = tree_int_cst_compare (high, low);
2704 if (cmp < 0)
2705 remove_element = TRUE;
2706 else if (cmp == 0)
2707 high = NULL_TREE;
2710 if (! high)
2712 /* If the simple case value is unreachable, ignore it. */
2713 if ((TREE_CODE (min_value) == INTEGER_CST
2714 && tree_int_cst_compare (low, min_value) < 0)
2715 || (TREE_CODE (max_value) == INTEGER_CST
2716 && tree_int_cst_compare (low, max_value) > 0))
2717 remove_element = TRUE;
2718 else
2719 low = fold_convert (index_type, low);
2721 else
2723 /* If the entire case range is unreachable, ignore it. */
2724 if ((TREE_CODE (min_value) == INTEGER_CST
2725 && tree_int_cst_compare (high, min_value) < 0)
2726 || (TREE_CODE (max_value) == INTEGER_CST
2727 && tree_int_cst_compare (low, max_value) > 0))
2728 remove_element = TRUE;
2729 else
2731 /* If the lower bound is less than the index type's
2732 minimum value, truncate the range bounds. */
2733 if (TREE_CODE (min_value) == INTEGER_CST
2734 && tree_int_cst_compare (low, min_value) < 0)
2735 low = min_value;
2736 low = fold_convert (index_type, low);
2738 /* If the upper bound is greater than the index type's
2739 maximum value, truncate the range bounds. */
2740 if (TREE_CODE (max_value) == INTEGER_CST
2741 && tree_int_cst_compare (high, max_value) > 0)
2742 high = max_value;
2743 high = fold_convert (index_type, high);
2745 /* We may have folded a case range to a one-value case. */
2746 if (tree_int_cst_equal (low, high))
2747 high = NULL_TREE;
2751 CASE_LOW (elt) = low;
2752 CASE_HIGH (elt) = high;
2754 else
2756 gcc_assert (!default_case);
2757 default_case = elt;
2758 /* The default case must be passed separately to the
2759 gimple_build_switch routine. But if DEFAULT_CASEP
2760 is NULL, we do not remove the default case (it would
2761 be completely lost). */
2762 if (default_casep)
2763 remove_element = TRUE;
2766 if (remove_element)
2767 labels.ordered_remove (i);
2768 else
2769 i++;
2771 len = i;
2773 if (!labels.is_empty ())
2774 sort_case_labels (labels);
2776 if (default_casep && !default_case)
2778 /* If the switch has no default label, add one, so that we jump
2779 around the switch body. If the labels already cover the whole
2780 range of the switch index_type, add the default label pointing
2781 to one of the existing labels. */
2782 if (len
2783 && TYPE_MIN_VALUE (index_type)
2784 && TYPE_MAX_VALUE (index_type)
2785 && tree_int_cst_equal (CASE_LOW (labels[0]),
2786 TYPE_MIN_VALUE (index_type)))
2788 tree low, high = CASE_HIGH (labels[len - 1]);
2789 if (!high)
2790 high = CASE_LOW (labels[len - 1]);
2791 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2793 for (i = 1; i < len; i++)
2795 high = CASE_LOW (labels[i]);
2796 low = CASE_HIGH (labels[i - 1]);
2797 if (!low)
2798 low = CASE_LOW (labels[i - 1]);
2799 if ((TREE_INT_CST_LOW (low) + 1
2800 != TREE_INT_CST_LOW (high))
2801 || (TREE_INT_CST_HIGH (low)
2802 + (TREE_INT_CST_LOW (high) == 0)
2803 != TREE_INT_CST_HIGH (high)))
2804 break;
2806 if (i == len)
2808 tree label = CASE_LABEL (labels[0]);
2809 default_case = build_case_label (NULL_TREE, NULL_TREE,
2810 label);
2816 if (default_casep)
2817 *default_casep = default_case;
2820 /* Set the location of all statements in SEQ to LOC. */
2822 void
2823 gimple_seq_set_location (gimple_seq seq, location_t loc)
2825 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2826 gimple_set_location (gsi_stmt (i), loc);