Concretize gimple_cond_set_{lhs|rhs}
[official-gcc.git] / gcc / gimple.c
bloba244617a4c4f05a22e085b0772b3f3b82f6444dc
1 /* Gimple IR support functions.
3 Copyright (C) 2007-2014 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "calls.h"
29 #include "stmt.h"
30 #include "stor-layout.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "tree-eh.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "gimple-iterator.h"
40 #include "gimple-walk.h"
41 #include "gimple.h"
42 #include "gimplify.h"
43 #include "diagnostic.h"
44 #include "value-prof.h"
45 #include "flags.h"
46 #include "alias.h"
47 #include "demangle.h"
48 #include "langhooks.h"
49 #include "bitmap.h"
52 /* All the tuples have their operand vector (if present) at the very bottom
53 of the structure. Therefore, the offset required to find the
54 operands vector the size of the structure minus the size of the 1
55 element tree array at the end (see gimple_ops). */
56 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
57 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
58 EXPORTED_CONST size_t gimple_ops_offset_[] = {
59 #include "gsstruct.def"
61 #undef DEFGSSTRUCT
63 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
64 static const size_t gsstruct_code_size[] = {
65 #include "gsstruct.def"
67 #undef DEFGSSTRUCT
69 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
70 const char *const gimple_code_name[] = {
71 #include "gimple.def"
73 #undef DEFGSCODE
75 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
76 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
77 #include "gimple.def"
79 #undef DEFGSCODE
81 /* Gimple stats. */
83 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
84 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
86 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
87 static const char * const gimple_alloc_kind_names[] = {
88 "assignments",
89 "phi nodes",
90 "conditionals",
91 "everything else"
94 /* Gimple tuple constructors.
95 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
96 be passed a NULL to start with an empty sequence. */
98 /* Set the code for statement G to CODE. */
100 static inline void
101 gimple_set_code (gimple g, enum gimple_code code)
103 g->code = code;
106 /* Return the number of bytes needed to hold a GIMPLE statement with
107 code CODE. */
109 static inline size_t
110 gimple_size (enum gimple_code code)
112 return gsstruct_code_size[gss_for_code (code)];
115 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
116 operands. */
118 gimple
119 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
121 size_t size;
122 gimple stmt;
124 size = gimple_size (code);
125 if (num_ops > 0)
126 size += sizeof (tree) * (num_ops - 1);
128 if (GATHER_STATISTICS)
130 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
131 gimple_alloc_counts[(int) kind]++;
132 gimple_alloc_sizes[(int) kind] += size;
135 stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT);
136 gimple_set_code (stmt, code);
137 gimple_set_num_ops (stmt, num_ops);
139 /* Do not call gimple_set_modified here as it has other side
140 effects and this tuple is still not completely built. */
141 stmt->modified = 1;
142 gimple_init_singleton (stmt);
144 return stmt;
147 /* Set SUBCODE to be the code of the expression computed by statement G. */
149 static inline void
150 gimple_set_subcode (gimple g, unsigned subcode)
152 /* We only have 16 bits for the RHS code. Assert that we are not
153 overflowing it. */
154 gcc_assert (subcode < (1 << 16));
155 g->subcode = subcode;
160 /* Build a tuple with operands. CODE is the statement to build (which
161 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
162 for the new tuple. NUM_OPS is the number of operands to allocate. */
164 #define gimple_build_with_ops(c, s, n) \
165 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
167 static gimple
168 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
169 unsigned num_ops MEM_STAT_DECL)
171 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
172 gimple_set_subcode (s, subcode);
174 return s;
178 /* Build a GIMPLE_RETURN statement returning RETVAL. */
180 gimple_return
181 gimple_build_return (tree retval)
183 gimple_return s =
184 as_a <gimple_return> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
185 1));
186 if (retval)
187 gimple_return_set_retval (s, retval);
188 return s;
191 /* Reset alias information on call S. */
193 void
194 gimple_call_reset_alias_info (gimple_call s)
196 if (gimple_call_flags (s) & ECF_CONST)
197 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
198 else
199 pt_solution_reset (gimple_call_use_set (s));
200 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
201 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
202 else
203 pt_solution_reset (gimple_call_clobber_set (s));
206 /* Helper for gimple_build_call, gimple_build_call_valist,
207 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
208 components of a GIMPLE_CALL statement to function FN with NARGS
209 arguments. */
211 static inline gimple_call
212 gimple_build_call_1 (tree fn, unsigned nargs)
214 gimple_call s =
215 as_a <gimple_call> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
216 nargs + 3));
217 if (TREE_CODE (fn) == FUNCTION_DECL)
218 fn = build_fold_addr_expr (fn);
219 gimple_set_op (s, 1, fn);
220 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
221 gimple_call_reset_alias_info (s);
222 return s;
226 /* Build a GIMPLE_CALL statement to function FN with the arguments
227 specified in vector ARGS. */
229 gimple_call
230 gimple_build_call_vec (tree fn, vec<tree> args)
232 unsigned i;
233 unsigned nargs = args.length ();
234 gimple_call call = gimple_build_call_1 (fn, nargs);
236 for (i = 0; i < nargs; i++)
237 gimple_call_set_arg (call, i, args[i]);
239 return call;
243 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
244 arguments. The ... are the arguments. */
246 gimple_call
247 gimple_build_call (tree fn, unsigned nargs, ...)
249 va_list ap;
250 gimple_call call;
251 unsigned i;
253 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
255 call = gimple_build_call_1 (fn, nargs);
257 va_start (ap, nargs);
258 for (i = 0; i < nargs; i++)
259 gimple_call_set_arg (call, i, va_arg (ap, tree));
260 va_end (ap);
262 return call;
266 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
267 arguments. AP contains the arguments. */
269 gimple_call
270 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
272 gimple_call call;
273 unsigned i;
275 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
277 call = gimple_build_call_1 (fn, nargs);
279 for (i = 0; i < nargs; i++)
280 gimple_call_set_arg (call, i, va_arg (ap, tree));
282 return call;
286 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
287 Build the basic components of a GIMPLE_CALL statement to internal
288 function FN with NARGS arguments. */
290 static inline gimple_call
291 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
293 gimple_call s =
294 as_a <gimple_call> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
295 nargs + 3));
296 s->subcode |= GF_CALL_INTERNAL;
297 gimple_call_set_internal_fn (s, fn);
298 gimple_call_reset_alias_info (s);
299 return s;
303 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
304 the number of arguments. The ... are the arguments. */
306 gimple_call
307 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
309 va_list ap;
310 gimple_call call;
311 unsigned i;
313 call = gimple_build_call_internal_1 (fn, nargs);
314 va_start (ap, nargs);
315 for (i = 0; i < nargs; i++)
316 gimple_call_set_arg (call, i, va_arg (ap, tree));
317 va_end (ap);
319 return call;
323 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
324 specified in vector ARGS. */
326 gimple_call
327 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
329 unsigned i, nargs;
330 gimple_call call;
332 nargs = args.length ();
333 call = gimple_build_call_internal_1 (fn, nargs);
334 for (i = 0; i < nargs; i++)
335 gimple_call_set_arg (call, i, args[i]);
337 return call;
341 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
342 assumed to be in GIMPLE form already. Minimal checking is done of
343 this fact. */
345 gimple_call
346 gimple_build_call_from_tree (tree t)
348 unsigned i, nargs;
349 gimple_call call;
350 tree fndecl = get_callee_fndecl (t);
352 gcc_assert (TREE_CODE (t) == CALL_EXPR);
354 nargs = call_expr_nargs (t);
355 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
357 for (i = 0; i < nargs; i++)
358 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
360 gimple_set_block (call, TREE_BLOCK (t));
362 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
363 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
364 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
365 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
366 if (fndecl
367 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
368 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
369 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
370 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
371 else
372 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
373 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
374 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
375 gimple_set_no_warning (call, TREE_NO_WARNING (t));
377 return call;
381 /* Build a GIMPLE_ASSIGN statement.
383 LHS of the assignment.
384 RHS of the assignment which can be unary or binary. */
386 gimple_assign
387 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
389 enum tree_code subcode;
390 tree op1, op2, op3;
392 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
393 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
394 PASS_MEM_STAT);
398 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
399 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
400 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
402 gimple_assign
403 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
404 tree op2, tree op3 MEM_STAT_DECL)
406 unsigned num_ops;
407 gimple_assign p;
409 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
410 code). */
411 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
413 p = as_a <gimple_assign> (
414 gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
415 PASS_MEM_STAT));
416 gimple_assign_set_lhs (p, lhs);
417 gimple_assign_set_rhs1 (p, op1);
418 if (op2)
420 gcc_assert (num_ops > 2);
421 gimple_assign_set_rhs2 (p, op2);
424 if (op3)
426 gcc_assert (num_ops > 3);
427 gimple_assign_set_rhs3 (p, op3);
430 return p;
433 gimple_assign
434 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
435 tree op2 MEM_STAT_DECL)
437 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
438 PASS_MEM_STAT);
442 /* Build a GIMPLE_COND statement.
444 PRED is the condition used to compare LHS and the RHS.
445 T_LABEL is the label to jump to if the condition is true.
446 F_LABEL is the label to jump to otherwise. */
448 gimple_cond
449 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
450 tree t_label, tree f_label)
452 gimple_cond p;
454 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
455 p = as_a <gimple_cond> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
456 gimple_cond_set_lhs (p, lhs);
457 gimple_cond_set_rhs (p, rhs);
458 gimple_cond_set_true_label (p, t_label);
459 gimple_cond_set_false_label (p, f_label);
460 return p;
463 /* Build a GIMPLE_COND statement from the conditional expression tree
464 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
466 gimple_cond
467 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
469 enum tree_code code;
470 tree lhs, rhs;
472 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
473 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
476 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
477 boolean expression tree COND. */
479 void
480 gimple_cond_set_condition_from_tree (gimple_cond stmt, tree cond)
482 enum tree_code code;
483 tree lhs, rhs;
485 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
486 gimple_cond_set_condition (stmt, code, lhs, rhs);
489 /* Build a GIMPLE_LABEL statement for LABEL. */
491 gimple_label
492 gimple_build_label (tree label)
494 gimple_label p =
495 as_a <gimple_label> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
496 gimple_label_set_label (p, label);
497 return p;
500 /* Build a GIMPLE_GOTO statement to label DEST. */
502 gimple_goto
503 gimple_build_goto (tree dest)
505 gimple_goto p =
506 as_a <gimple_goto> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
507 gimple_goto_set_dest (p, dest);
508 return p;
512 /* Build a GIMPLE_NOP statement. */
514 gimple
515 gimple_build_nop (void)
517 return gimple_alloc (GIMPLE_NOP, 0);
521 /* Build a GIMPLE_BIND statement.
522 VARS are the variables in BODY.
523 BLOCK is the containing block. */
525 gimple_bind
526 gimple_build_bind (tree vars, gimple_seq body, tree block)
528 gimple_bind p = as_a <gimple_bind> (gimple_alloc (GIMPLE_BIND, 0));
529 gimple_bind_set_vars (p, vars);
530 if (body)
531 gimple_bind_set_body (p, body);
532 if (block)
533 gimple_bind_set_block (p, block);
534 return p;
537 /* Helper function to set the simple fields of a asm stmt.
539 STRING is a pointer to a string that is the asm blocks assembly code.
540 NINPUT is the number of register inputs.
541 NOUTPUT is the number of register outputs.
542 NCLOBBERS is the number of clobbered registers.
545 static inline gimple_asm
546 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
547 unsigned nclobbers, unsigned nlabels)
549 gimple_asm p;
550 int size = strlen (string);
552 /* ASMs with labels cannot have outputs. This should have been
553 enforced by the front end. */
554 gcc_assert (nlabels == 0 || noutputs == 0);
556 p = as_a <gimple_statement_asm *> (
557 gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
558 ninputs + noutputs + nclobbers + nlabels));
560 p->ni = ninputs;
561 p->no = noutputs;
562 p->nc = nclobbers;
563 p->nl = nlabels;
564 p->string = ggc_alloc_string (string, size);
566 if (GATHER_STATISTICS)
567 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
569 return p;
572 /* Build a GIMPLE_ASM statement.
574 STRING is the assembly code.
575 NINPUT is the number of register inputs.
576 NOUTPUT is the number of register outputs.
577 NCLOBBERS is the number of clobbered registers.
578 INPUTS is a vector of the input register parameters.
579 OUTPUTS is a vector of the output register parameters.
580 CLOBBERS is a vector of the clobbered register parameters.
581 LABELS is a vector of destination labels. */
583 gimple_asm
584 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
585 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
586 vec<tree, va_gc> *labels)
588 gimple_asm p;
589 unsigned i;
591 p = gimple_build_asm_1 (string,
592 vec_safe_length (inputs),
593 vec_safe_length (outputs),
594 vec_safe_length (clobbers),
595 vec_safe_length (labels));
597 for (i = 0; i < vec_safe_length (inputs); i++)
598 gimple_asm_set_input_op (p, i, (*inputs)[i]);
600 for (i = 0; i < vec_safe_length (outputs); i++)
601 gimple_asm_set_output_op (p, i, (*outputs)[i]);
603 for (i = 0; i < vec_safe_length (clobbers); i++)
604 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
606 for (i = 0; i < vec_safe_length (labels); i++)
607 gimple_asm_set_label_op (p, i, (*labels)[i]);
609 return p;
612 /* Build a GIMPLE_CATCH statement.
614 TYPES are the catch types.
615 HANDLER is the exception handler. */
617 gimple_catch
618 gimple_build_catch (tree types, gimple_seq handler)
620 gimple_catch p = as_a <gimple_catch> (gimple_alloc (GIMPLE_CATCH, 0));
621 gimple_catch_set_types (p, types);
622 if (handler)
623 gimple_catch_set_handler (p, handler);
625 return p;
628 /* Build a GIMPLE_EH_FILTER statement.
630 TYPES are the filter's types.
631 FAILURE is the filter's failure action. */
633 gimple_eh_filter
634 gimple_build_eh_filter (tree types, gimple_seq failure)
636 gimple_eh_filter p =
637 as_a <gimple_eh_filter> (gimple_alloc (GIMPLE_EH_FILTER, 0));
638 gimple_eh_filter_set_types (p, types);
639 if (failure)
640 gimple_eh_filter_set_failure (p, failure);
642 return p;
645 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
647 gimple_eh_must_not_throw
648 gimple_build_eh_must_not_throw (tree decl)
650 gimple_eh_must_not_throw p =
651 as_a <gimple_eh_must_not_throw> (
652 gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
654 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
655 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
656 gimple_eh_must_not_throw_set_fndecl (p, decl);
658 return p;
661 /* Build a GIMPLE_EH_ELSE statement. */
663 gimple_eh_else
664 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
666 gimple_eh_else p = as_a <gimple_eh_else> (gimple_alloc (GIMPLE_EH_ELSE, 0));
667 gimple_eh_else_set_n_body (p, n_body);
668 gimple_eh_else_set_e_body (p, e_body);
669 return p;
672 /* Build a GIMPLE_TRY statement.
674 EVAL is the expression to evaluate.
675 CLEANUP is the cleanup expression.
676 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
677 whether this is a try/catch or a try/finally respectively. */
679 gimple_statement_try *
680 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
681 enum gimple_try_flags kind)
683 gimple_statement_try *p;
685 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
686 p = as_a <gimple_statement_try *> (gimple_alloc (GIMPLE_TRY, 0));
687 gimple_set_subcode (p, kind);
688 if (eval)
689 gimple_try_set_eval (p, eval);
690 if (cleanup)
691 gimple_try_set_cleanup (p, cleanup);
693 return p;
696 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
698 CLEANUP is the cleanup expression. */
700 gimple
701 gimple_build_wce (gimple_seq cleanup)
703 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
704 if (cleanup)
705 gimple_wce_set_cleanup (p, cleanup);
707 return p;
711 /* Build a GIMPLE_RESX statement. */
713 gimple_resx
714 gimple_build_resx (int region)
716 gimple_resx p =
717 as_a <gimple_resx> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
718 p->region = region;
719 return p;
723 /* The helper for constructing a gimple switch statement.
724 INDEX is the switch's index.
725 NLABELS is the number of labels in the switch excluding the default.
726 DEFAULT_LABEL is the default label for the switch statement. */
728 gimple_switch
729 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
731 /* nlabels + 1 default label + 1 index. */
732 gcc_checking_assert (default_label);
733 gimple_switch p =
734 as_a <gimple_switch> (gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
735 1 + 1 + nlabels));
736 gimple_switch_set_index (p, index);
737 gimple_switch_set_default_label (p, default_label);
738 return p;
741 /* Build a GIMPLE_SWITCH statement.
743 INDEX is the switch's index.
744 DEFAULT_LABEL is the default label
745 ARGS is a vector of labels excluding the default. */
747 gimple_switch
748 gimple_build_switch (tree index, tree default_label, vec<tree> args)
750 unsigned i, nlabels = args.length ();
752 gimple_switch p = gimple_build_switch_nlabels (nlabels, index, default_label);
754 /* Copy the labels from the vector to the switch statement. */
755 for (i = 0; i < nlabels; i++)
756 gimple_switch_set_label (p, i + 1, args[i]);
758 return p;
761 /* Build a GIMPLE_EH_DISPATCH statement. */
763 gimple_eh_dispatch
764 gimple_build_eh_dispatch (int region)
766 gimple_eh_dispatch p =
767 as_a <gimple_eh_dispatch> (
768 gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
769 p->region = region;
770 return p;
773 /* Build a new GIMPLE_DEBUG_BIND statement.
775 VAR is bound to VALUE; block and location are taken from STMT. */
777 gimple_debug
778 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
780 gimple_debug p =
781 as_a <gimple_debug> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
782 (unsigned)GIMPLE_DEBUG_BIND, 2
783 PASS_MEM_STAT));
784 gimple_debug_bind_set_var (p, var);
785 gimple_debug_bind_set_value (p, value);
786 if (stmt)
787 gimple_set_location (p, gimple_location (stmt));
789 return p;
793 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
795 VAR is bound to VALUE; block and location are taken from STMT. */
797 gimple_debug
798 gimple_build_debug_source_bind_stat (tree var, tree value,
799 gimple stmt MEM_STAT_DECL)
801 gimple_debug p =
802 as_a <gimple_debug> (
803 gimple_build_with_ops_stat (GIMPLE_DEBUG,
804 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
805 PASS_MEM_STAT));
807 gimple_debug_source_bind_set_var (p, var);
808 gimple_debug_source_bind_set_value (p, value);
809 if (stmt)
810 gimple_set_location (p, gimple_location (stmt));
812 return p;
816 /* Build a GIMPLE_OMP_CRITICAL statement.
818 BODY is the sequence of statements for which only one thread can execute.
819 NAME is optional identifier for this critical block. */
821 gimple_omp_critical
822 gimple_build_omp_critical (gimple_seq body, tree name)
824 gimple_omp_critical p =
825 as_a <gimple_omp_critical> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
826 gimple_omp_critical_set_name (p, name);
827 if (body)
828 gimple_omp_set_body (p, body);
830 return p;
833 /* Build a GIMPLE_OMP_FOR statement.
835 BODY is sequence of statements inside the for loop.
836 KIND is the `for' variant.
837 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
838 lastprivate, reductions, ordered, schedule, and nowait.
839 COLLAPSE is the collapse count.
840 PRE_BODY is the sequence of statements that are loop invariant. */
842 gimple_omp_for
843 gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
844 gimple_seq pre_body)
846 gimple_omp_for p = as_a <gimple_omp_for> (gimple_alloc (GIMPLE_OMP_FOR, 0));
847 if (body)
848 gimple_omp_set_body (p, body);
849 gimple_omp_for_set_clauses (p, clauses);
850 gimple_omp_for_set_kind (p, kind);
851 p->collapse = collapse;
852 p->iter = ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse);
854 if (pre_body)
855 gimple_omp_for_set_pre_body (p, pre_body);
857 return p;
861 /* Build a GIMPLE_OMP_PARALLEL statement.
863 BODY is sequence of statements which are executed in parallel.
864 CLAUSES, are the OMP parallel construct's clauses.
865 CHILD_FN is the function created for the parallel threads to execute.
866 DATA_ARG are the shared data argument(s). */
868 gimple_omp_parallel
869 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
870 tree data_arg)
872 gimple_omp_parallel p =
873 as_a <gimple_omp_parallel> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
874 if (body)
875 gimple_omp_set_body (p, body);
876 gimple_omp_parallel_set_clauses (p, clauses);
877 gimple_omp_parallel_set_child_fn (p, child_fn);
878 gimple_omp_parallel_set_data_arg (p, data_arg);
880 return p;
884 /* Build a GIMPLE_OMP_TASK statement.
886 BODY is sequence of statements which are executed by the explicit task.
887 CLAUSES, are the OMP parallel construct's clauses.
888 CHILD_FN is the function created for the parallel threads to execute.
889 DATA_ARG are the shared data argument(s).
890 COPY_FN is the optional function for firstprivate initialization.
891 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
893 gimple_omp_task
894 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
895 tree data_arg, tree copy_fn, tree arg_size,
896 tree arg_align)
898 gimple_omp_task p =
899 as_a <gimple_omp_task> (gimple_alloc (GIMPLE_OMP_TASK, 0));
900 if (body)
901 gimple_omp_set_body (p, body);
902 gimple_omp_task_set_clauses (p, clauses);
903 gimple_omp_task_set_child_fn (p, child_fn);
904 gimple_omp_task_set_data_arg (p, data_arg);
905 gimple_omp_task_set_copy_fn (p, copy_fn);
906 gimple_omp_task_set_arg_size (p, arg_size);
907 gimple_omp_task_set_arg_align (p, arg_align);
909 return p;
913 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
915 BODY is the sequence of statements in the section. */
917 gimple
918 gimple_build_omp_section (gimple_seq body)
920 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
921 if (body)
922 gimple_omp_set_body (p, body);
924 return p;
928 /* Build a GIMPLE_OMP_MASTER statement.
930 BODY is the sequence of statements to be executed by just the master. */
932 gimple
933 gimple_build_omp_master (gimple_seq body)
935 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
936 if (body)
937 gimple_omp_set_body (p, body);
939 return p;
943 /* Build a GIMPLE_OMP_TASKGROUP statement.
945 BODY is the sequence of statements to be executed by the taskgroup
946 construct. */
948 gimple
949 gimple_build_omp_taskgroup (gimple_seq body)
951 gimple p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0);
952 if (body)
953 gimple_omp_set_body (p, body);
955 return p;
959 /* Build a GIMPLE_OMP_CONTINUE statement.
961 CONTROL_DEF is the definition of the control variable.
962 CONTROL_USE is the use of the control variable. */
964 gimple_omp_continue
965 gimple_build_omp_continue (tree control_def, tree control_use)
967 gimple_omp_continue p =
968 as_a <gimple_omp_continue> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
969 gimple_omp_continue_set_control_def (p, control_def);
970 gimple_omp_continue_set_control_use (p, control_use);
971 return p;
974 /* Build a GIMPLE_OMP_ORDERED statement.
976 BODY is the sequence of statements inside a loop that will executed in
977 sequence. */
979 gimple
980 gimple_build_omp_ordered (gimple_seq body)
982 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
983 if (body)
984 gimple_omp_set_body (p, body);
986 return p;
990 /* Build a GIMPLE_OMP_RETURN statement.
991 WAIT_P is true if this is a non-waiting return. */
993 gimple
994 gimple_build_omp_return (bool wait_p)
996 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
997 if (wait_p)
998 gimple_omp_return_set_nowait (p);
1000 return p;
1004 /* Build a GIMPLE_OMP_SECTIONS statement.
1006 BODY is a sequence of section statements.
1007 CLAUSES are any of the OMP sections contsruct's clauses: private,
1008 firstprivate, lastprivate, reduction, and nowait. */
1010 gimple_omp_sections
1011 gimple_build_omp_sections (gimple_seq body, tree clauses)
1013 gimple_omp_sections p =
1014 as_a <gimple_omp_sections> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
1015 if (body)
1016 gimple_omp_set_body (p, body);
1017 gimple_omp_sections_set_clauses (p, clauses);
1019 return p;
1023 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1025 gimple
1026 gimple_build_omp_sections_switch (void)
1028 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1032 /* Build a GIMPLE_OMP_SINGLE statement.
1034 BODY is the sequence of statements that will be executed once.
1035 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1036 copyprivate, nowait. */
1038 gimple_omp_single
1039 gimple_build_omp_single (gimple_seq body, tree clauses)
1041 gimple_omp_single p =
1042 as_a <gimple_omp_single> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
1043 if (body)
1044 gimple_omp_set_body (p, body);
1045 gimple_omp_single_set_clauses (p, clauses);
1047 return p;
1051 /* Build a GIMPLE_OMP_TARGET statement.
1053 BODY is the sequence of statements that will be executed.
1054 CLAUSES are any of the OMP target construct's clauses. */
1056 gimple_omp_target
1057 gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
1059 gimple_omp_target p =
1060 as_a <gimple_omp_target> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
1061 if (body)
1062 gimple_omp_set_body (p, body);
1063 gimple_omp_target_set_clauses (p, clauses);
1064 gimple_omp_target_set_kind (p, kind);
1066 return p;
1070 /* Build a GIMPLE_OMP_TEAMS statement.
1072 BODY is the sequence of statements that will be executed.
1073 CLAUSES are any of the OMP teams construct's clauses. */
1075 gimple_omp_teams
1076 gimple_build_omp_teams (gimple_seq body, tree clauses)
1078 gimple_omp_teams p =
1079 as_a <gimple_omp_teams> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
1080 if (body)
1081 gimple_omp_set_body (p, body);
1082 gimple_omp_teams_set_clauses (p, clauses);
1084 return p;
1088 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1090 gimple_omp_atomic_load
1091 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1093 gimple_omp_atomic_load p =
1094 as_a <gimple_omp_atomic_load> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
1095 gimple_omp_atomic_load_set_lhs (p, lhs);
1096 gimple_omp_atomic_load_set_rhs (p, rhs);
1097 return p;
1100 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1102 VAL is the value we are storing. */
1104 gimple_omp_atomic_store
1105 gimple_build_omp_atomic_store (tree val)
1107 gimple_omp_atomic_store p =
1108 as_a <gimple_omp_atomic_store> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
1109 gimple_omp_atomic_store_set_val (p, val);
1110 return p;
1113 /* Build a GIMPLE_TRANSACTION statement. */
1115 gimple_transaction
1116 gimple_build_transaction (gimple_seq body, tree label)
1118 gimple_transaction p =
1119 as_a <gimple_transaction> (gimple_alloc (GIMPLE_TRANSACTION, 0));
1120 gimple_transaction_set_body (p, body);
1121 gimple_transaction_set_label (p, label);
1122 return p;
1125 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1126 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1128 gimple
1129 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1131 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1132 /* Ensure all the predictors fit into the lower bits of the subcode. */
1133 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1134 gimple_predict_set_predictor (p, predictor);
1135 gimple_predict_set_outcome (p, outcome);
1136 return p;
1139 #if defined ENABLE_GIMPLE_CHECKING
1140 /* Complain of a gimple type mismatch and die. */
1142 void
1143 gimple_check_failed (const_gimple gs, const char *file, int line,
1144 const char *function, enum gimple_code code,
1145 enum tree_code subcode)
1147 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1148 gimple_code_name[code],
1149 get_tree_code_name (subcode),
1150 gimple_code_name[gimple_code (gs)],
1151 gs->subcode > 0
1152 ? get_tree_code_name ((enum tree_code) gs->subcode)
1153 : "",
1154 function, trim_filename (file), line);
1156 #endif /* ENABLE_GIMPLE_CHECKING */
1159 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1160 *SEQ_P is NULL, a new sequence is allocated. */
1162 void
1163 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1165 gimple_stmt_iterator si;
1166 if (gs == NULL)
1167 return;
1169 si = gsi_last (*seq_p);
1170 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1173 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1174 *SEQ_P is NULL, a new sequence is allocated. This function is
1175 similar to gimple_seq_add_stmt, but does not scan the operands.
1176 During gimplification, we need to manipulate statement sequences
1177 before the def/use vectors have been constructed. */
1179 void
1180 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
1182 gimple_stmt_iterator si;
1184 if (gs == NULL)
1185 return;
1187 si = gsi_last (*seq_p);
1188 gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
1191 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1192 NULL, a new sequence is allocated. */
1194 void
1195 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1197 gimple_stmt_iterator si;
1198 if (src == NULL)
1199 return;
1201 si = gsi_last (*dst_p);
1202 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1205 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1206 NULL, a new sequence is allocated. This function is
1207 similar to gimple_seq_add_seq, but does not scan the operands. */
1209 void
1210 gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src)
1212 gimple_stmt_iterator si;
1213 if (src == NULL)
1214 return;
1216 si = gsi_last (*dst_p);
1217 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
1220 /* Determine whether to assign a location to the statement GS. */
1222 static bool
1223 should_carry_location_p (gimple gs)
1225 /* Don't emit a line note for a label. We particularly don't want to
1226 emit one for the break label, since it doesn't actually correspond
1227 to the beginning of the loop/switch. */
1228 if (gimple_code (gs) == GIMPLE_LABEL)
1229 return false;
1231 return true;
1234 /* Set the location for gimple statement GS to LOCATION. */
1236 static void
1237 annotate_one_with_location (gimple gs, location_t location)
1239 if (!gimple_has_location (gs)
1240 && !gimple_do_not_emit_location_p (gs)
1241 && should_carry_location_p (gs))
1242 gimple_set_location (gs, location);
1245 /* Set LOCATION for all the statements after iterator GSI in sequence
1246 SEQ. If GSI is pointing to the end of the sequence, start with the
1247 first statement in SEQ. */
1249 void
1250 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
1251 location_t location)
1253 if (gsi_end_p (gsi))
1254 gsi = gsi_start (seq);
1255 else
1256 gsi_next (&gsi);
1258 for (; !gsi_end_p (gsi); gsi_next (&gsi))
1259 annotate_one_with_location (gsi_stmt (gsi), location);
1262 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1264 void
1265 annotate_all_with_location (gimple_seq stmt_p, location_t location)
1267 gimple_stmt_iterator i;
1269 if (gimple_seq_empty_p (stmt_p))
1270 return;
1272 for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
1274 gimple gs = gsi_stmt (i);
1275 annotate_one_with_location (gs, location);
1279 /* Helper function of empty_body_p. Return true if STMT is an empty
1280 statement. */
1282 static bool
1283 empty_stmt_p (gimple stmt)
1285 if (gimple_code (stmt) == GIMPLE_NOP)
1286 return true;
1287 if (gimple_bind bind_stmt = dyn_cast <gimple_bind> (stmt))
1288 return empty_body_p (gimple_bind_body (bind_stmt));
1289 return false;
1293 /* Return true if BODY contains nothing but empty statements. */
1295 bool
1296 empty_body_p (gimple_seq body)
1298 gimple_stmt_iterator i;
1300 if (gimple_seq_empty_p (body))
1301 return true;
1302 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1303 if (!empty_stmt_p (gsi_stmt (i))
1304 && !is_gimple_debug (gsi_stmt (i)))
1305 return false;
1307 return true;
1311 /* Perform a deep copy of sequence SRC and return the result. */
1313 gimple_seq
1314 gimple_seq_copy (gimple_seq src)
1316 gimple_stmt_iterator gsi;
1317 gimple_seq new_seq = NULL;
1318 gimple stmt;
1320 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1322 stmt = gimple_copy (gsi_stmt (gsi));
1323 gimple_seq_add_stmt (&new_seq, stmt);
1326 return new_seq;
1331 /* Return true if calls C1 and C2 are known to go to the same function. */
1333 bool
1334 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1336 if (gimple_call_internal_p (c1))
1337 return (gimple_call_internal_p (c2)
1338 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1339 else
1340 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1341 || (gimple_call_fndecl (c1)
1342 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1345 /* Detect flags from a GIMPLE_CALL. This is just like
1346 call_expr_flags, but for gimple tuples. */
1349 gimple_call_flags (const_gimple stmt)
1351 int flags;
1352 tree decl = gimple_call_fndecl (stmt);
1354 if (decl)
1355 flags = flags_from_decl_or_type (decl);
1356 else if (gimple_call_internal_p (stmt))
1357 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1358 else
1359 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1361 if (stmt->subcode & GF_CALL_NOTHROW)
1362 flags |= ECF_NOTHROW;
1364 return flags;
1367 /* Return the "fn spec" string for call STMT. */
1369 static const_tree
1370 gimple_call_fnspec (const_gimple stmt)
1372 tree type, attr;
1374 if (gimple_call_internal_p (stmt))
1375 return internal_fn_fnspec (gimple_call_internal_fn (stmt));
1377 type = gimple_call_fntype (stmt);
1378 if (!type)
1379 return NULL_TREE;
1381 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1382 if (!attr)
1383 return NULL_TREE;
1385 return TREE_VALUE (TREE_VALUE (attr));
1388 /* Detects argument flags for argument number ARG on call STMT. */
1391 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1393 const_tree attr = gimple_call_fnspec (stmt);
1395 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1396 return 0;
1398 switch (TREE_STRING_POINTER (attr)[1 + arg])
1400 case 'x':
1401 case 'X':
1402 return EAF_UNUSED;
1404 case 'R':
1405 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1407 case 'r':
1408 return EAF_NOCLOBBER | EAF_NOESCAPE;
1410 case 'W':
1411 return EAF_DIRECT | EAF_NOESCAPE;
1413 case 'w':
1414 return EAF_NOESCAPE;
1416 case '.':
1417 default:
1418 return 0;
1422 /* Detects return flags for the call STMT. */
1425 gimple_call_return_flags (const_gimple_call stmt)
1427 const_tree attr;
1429 if (gimple_call_flags (stmt) & ECF_MALLOC)
1430 return ERF_NOALIAS;
1432 attr = gimple_call_fnspec (stmt);
1433 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1434 return 0;
1436 switch (TREE_STRING_POINTER (attr)[0])
1438 case '1':
1439 case '2':
1440 case '3':
1441 case '4':
1442 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1444 case 'm':
1445 return ERF_NOALIAS;
1447 case '.':
1448 default:
1449 return 0;
1454 /* Return true if GS is a copy assignment. */
1456 bool
1457 gimple_assign_copy_p (gimple gs)
1459 return (gimple_assign_single_p (gs)
1460 && is_gimple_val (gimple_op (gs, 1)));
1464 /* Return true if GS is a SSA_NAME copy assignment. */
1466 bool
1467 gimple_assign_ssa_name_copy_p (gimple gs)
1469 return (gimple_assign_single_p (gs)
1470 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1471 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1475 /* Return true if GS is an assignment with a unary RHS, but the
1476 operator has no effect on the assigned value. The logic is adapted
1477 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1478 instances in which STRIP_NOPS was previously applied to the RHS of
1479 an assignment.
1481 NOTE: In the use cases that led to the creation of this function
1482 and of gimple_assign_single_p, it is typical to test for either
1483 condition and to proceed in the same manner. In each case, the
1484 assigned value is represented by the single RHS operand of the
1485 assignment. I suspect there may be cases where gimple_assign_copy_p,
1486 gimple_assign_single_p, or equivalent logic is used where a similar
1487 treatment of unary NOPs is appropriate. */
1489 bool
1490 gimple_assign_unary_nop_p (gimple gs)
1492 return (is_gimple_assign (gs)
1493 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1494 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1495 && gimple_assign_rhs1 (gs) != error_mark_node
1496 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1497 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1500 /* Set BB to be the basic block holding G. */
1502 void
1503 gimple_set_bb (gimple stmt, basic_block bb)
1505 stmt->bb = bb;
1507 if (gimple_code (stmt) != GIMPLE_LABEL)
1508 return;
1510 /* If the statement is a label, add the label to block-to-labels map
1511 so that we can speed up edge creation for GIMPLE_GOTOs. */
1512 if (cfun->cfg)
1514 tree t;
1515 int uid;
1517 t = gimple_label_label (as_a <gimple_label> (stmt));
1518 uid = LABEL_DECL_UID (t);
1519 if (uid == -1)
1521 unsigned old_len =
1522 vec_safe_length (label_to_block_map_for_fn (cfun));
1523 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
1524 if (old_len <= (unsigned) uid)
1526 unsigned new_len = 3 * uid / 2 + 1;
1528 vec_safe_grow_cleared (label_to_block_map_for_fn (cfun),
1529 new_len);
1533 (*label_to_block_map_for_fn (cfun))[uid] = bb;
1538 /* Modify the RHS of the assignment pointed-to by GSI using the
1539 operands in the expression tree EXPR.
1541 NOTE: The statement pointed-to by GSI may be reallocated if it
1542 did not have enough operand slots.
1544 This function is useful to convert an existing tree expression into
1545 the flat representation used for the RHS of a GIMPLE assignment.
1546 It will reallocate memory as needed to expand or shrink the number
1547 of operand slots needed to represent EXPR.
1549 NOTE: If you find yourself building a tree and then calling this
1550 function, you are most certainly doing it the slow way. It is much
1551 better to build a new assignment or to use the function
1552 gimple_assign_set_rhs_with_ops, which does not require an
1553 expression tree to be built. */
1555 void
1556 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
1558 enum tree_code subcode;
1559 tree op1, op2, op3;
1561 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
1562 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
1566 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1567 operands OP1, OP2 and OP3.
1569 NOTE: The statement pointed-to by GSI may be reallocated if it
1570 did not have enough operand slots. */
1572 void
1573 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
1574 tree op1, tree op2, tree op3)
1576 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
1577 gimple stmt = gsi_stmt (*gsi);
1579 /* If the new CODE needs more operands, allocate a new statement. */
1580 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
1582 tree lhs = gimple_assign_lhs (stmt);
1583 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
1584 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
1585 gimple_init_singleton (new_stmt);
1586 gsi_replace (gsi, new_stmt, true);
1587 stmt = new_stmt;
1589 /* The LHS needs to be reset as this also changes the SSA name
1590 on the LHS. */
1591 gimple_assign_set_lhs (stmt, lhs);
1594 gimple_set_num_ops (stmt, new_rhs_ops + 1);
1595 gimple_set_subcode (stmt, code);
1596 gimple_assign_set_rhs1 (stmt, op1);
1597 if (new_rhs_ops > 1)
1598 gimple_assign_set_rhs2 (stmt, op2);
1599 if (new_rhs_ops > 2)
1600 gimple_assign_set_rhs3 (stmt, op3);
1604 /* Return the LHS of a statement that performs an assignment,
1605 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1606 for a call to a function that returns no value, or for a
1607 statement other than an assignment or a call. */
1609 tree
1610 gimple_get_lhs (const_gimple stmt)
1612 enum gimple_code code = gimple_code (stmt);
1614 if (code == GIMPLE_ASSIGN)
1615 return gimple_assign_lhs (stmt);
1616 else if (code == GIMPLE_CALL)
1617 return gimple_call_lhs (stmt);
1618 else
1619 return NULL_TREE;
1623 /* Set the LHS of a statement that performs an assignment,
1624 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1626 void
1627 gimple_set_lhs (gimple stmt, tree lhs)
1629 enum gimple_code code = gimple_code (stmt);
1631 if (code == GIMPLE_ASSIGN)
1632 gimple_assign_set_lhs (stmt, lhs);
1633 else if (code == GIMPLE_CALL)
1634 gimple_call_set_lhs (stmt, lhs);
1635 else
1636 gcc_unreachable ();
1640 /* Return a deep copy of statement STMT. All the operands from STMT
1641 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1642 and VUSE operand arrays are set to empty in the new copy. The new
1643 copy isn't part of any sequence. */
1645 gimple
1646 gimple_copy (gimple stmt)
1648 enum gimple_code code = gimple_code (stmt);
1649 unsigned num_ops = gimple_num_ops (stmt);
1650 gimple copy = gimple_alloc (code, num_ops);
1651 unsigned i;
1653 /* Shallow copy all the fields from STMT. */
1654 memcpy (copy, stmt, gimple_size (code));
1655 gimple_init_singleton (copy);
1657 /* If STMT has sub-statements, deep-copy them as well. */
1658 if (gimple_has_substatements (stmt))
1660 gimple_seq new_seq;
1661 tree t;
1663 switch (gimple_code (stmt))
1665 case GIMPLE_BIND:
1667 gimple_bind bind_stmt = as_a <gimple_bind> (stmt);
1668 gimple_bind bind_copy = as_a <gimple_bind> (copy);
1669 new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
1670 gimple_bind_set_body (bind_copy, new_seq);
1671 gimple_bind_set_vars (bind_copy,
1672 unshare_expr (gimple_bind_vars (bind_stmt)));
1673 gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
1675 break;
1677 case GIMPLE_CATCH:
1679 gimple_catch catch_stmt = as_a <gimple_catch> (stmt);
1680 gimple_catch catch_copy = as_a <gimple_catch> (copy);
1681 new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
1682 gimple_catch_set_handler (catch_copy, new_seq);
1683 t = unshare_expr (gimple_catch_types (catch_stmt));
1684 gimple_catch_set_types (catch_copy, t);
1686 break;
1688 case GIMPLE_EH_FILTER:
1690 gimple_eh_filter eh_filter_stmt = as_a <gimple_eh_filter> (stmt);
1691 gimple_eh_filter eh_filter_copy = as_a <gimple_eh_filter> (copy);
1692 new_seq =
1693 gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
1694 gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
1695 t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
1696 gimple_eh_filter_set_types (eh_filter_copy, t);
1698 break;
1700 case GIMPLE_EH_ELSE:
1702 gimple_eh_else eh_else_stmt = as_a <gimple_eh_else> (stmt);
1703 gimple_eh_else eh_else_copy = as_a <gimple_eh_else> (copy);
1704 new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
1705 gimple_eh_else_set_n_body (eh_else_copy, new_seq);
1706 new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
1707 gimple_eh_else_set_e_body (eh_else_copy, new_seq);
1709 break;
1711 case GIMPLE_TRY:
1713 gimple_try try_stmt = as_a <gimple_try> (stmt);
1714 gimple_try try_copy = as_a <gimple_try> (copy);
1715 new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
1716 gimple_try_set_eval (try_copy, new_seq);
1717 new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
1718 gimple_try_set_cleanup (try_copy, new_seq);
1720 break;
1722 case GIMPLE_OMP_FOR:
1723 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
1724 gimple_omp_for_set_pre_body (copy, new_seq);
1725 t = unshare_expr (gimple_omp_for_clauses (stmt));
1726 gimple_omp_for_set_clauses (copy, t);
1728 gimple_omp_for omp_for_copy = as_a <gimple_omp_for> (copy);
1729 omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
1730 ( gimple_omp_for_collapse (stmt));
1732 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1734 gimple_omp_for_set_cond (copy, i,
1735 gimple_omp_for_cond (stmt, i));
1736 gimple_omp_for_set_index (copy, i,
1737 gimple_omp_for_index (stmt, i));
1738 t = unshare_expr (gimple_omp_for_initial (stmt, i));
1739 gimple_omp_for_set_initial (copy, i, t);
1740 t = unshare_expr (gimple_omp_for_final (stmt, i));
1741 gimple_omp_for_set_final (copy, i, t);
1742 t = unshare_expr (gimple_omp_for_incr (stmt, i));
1743 gimple_omp_for_set_incr (copy, i, t);
1745 goto copy_omp_body;
1747 case GIMPLE_OMP_PARALLEL:
1749 gimple_omp_parallel omp_par_stmt =
1750 as_a <gimple_omp_parallel> (stmt);
1751 gimple_omp_parallel omp_par_copy =
1752 as_a <gimple_omp_parallel> (copy);
1753 t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
1754 gimple_omp_parallel_set_clauses (omp_par_copy, t);
1755 t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
1756 gimple_omp_parallel_set_child_fn (omp_par_copy, t);
1757 t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
1758 gimple_omp_parallel_set_data_arg (omp_par_copy, t);
1760 goto copy_omp_body;
1762 case GIMPLE_OMP_TASK:
1763 t = unshare_expr (gimple_omp_task_clauses (stmt));
1764 gimple_omp_task_set_clauses (copy, t);
1765 t = unshare_expr (gimple_omp_task_child_fn (stmt));
1766 gimple_omp_task_set_child_fn (copy, t);
1767 t = unshare_expr (gimple_omp_task_data_arg (stmt));
1768 gimple_omp_task_set_data_arg (copy, t);
1769 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
1770 gimple_omp_task_set_copy_fn (copy, t);
1771 t = unshare_expr (gimple_omp_task_arg_size (stmt));
1772 gimple_omp_task_set_arg_size (copy, t);
1773 t = unshare_expr (gimple_omp_task_arg_align (stmt));
1774 gimple_omp_task_set_arg_align (copy, t);
1775 goto copy_omp_body;
1777 case GIMPLE_OMP_CRITICAL:
1778 t = unshare_expr (gimple_omp_critical_name (
1779 as_a <gimple_omp_critical> (stmt)));
1780 gimple_omp_critical_set_name (as_a <gimple_omp_critical> (copy), t);
1781 goto copy_omp_body;
1783 case GIMPLE_OMP_SECTIONS:
1784 t = unshare_expr (gimple_omp_sections_clauses (stmt));
1785 gimple_omp_sections_set_clauses (copy, t);
1786 t = unshare_expr (gimple_omp_sections_control (stmt));
1787 gimple_omp_sections_set_control (copy, t);
1788 /* FALLTHRU */
1790 case GIMPLE_OMP_SINGLE:
1791 case GIMPLE_OMP_TARGET:
1792 case GIMPLE_OMP_TEAMS:
1793 case GIMPLE_OMP_SECTION:
1794 case GIMPLE_OMP_MASTER:
1795 case GIMPLE_OMP_TASKGROUP:
1796 case GIMPLE_OMP_ORDERED:
1797 copy_omp_body:
1798 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
1799 gimple_omp_set_body (copy, new_seq);
1800 break;
1802 case GIMPLE_TRANSACTION:
1803 new_seq = gimple_seq_copy (gimple_transaction_body (
1804 as_a <gimple_transaction> (stmt)));
1805 gimple_transaction_set_body (as_a <gimple_transaction> (copy),
1806 new_seq);
1807 break;
1809 case GIMPLE_WITH_CLEANUP_EXPR:
1810 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
1811 gimple_wce_set_cleanup (copy, new_seq);
1812 break;
1814 default:
1815 gcc_unreachable ();
1819 /* Make copy of operands. */
1820 for (i = 0; i < num_ops; i++)
1821 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
1823 if (gimple_has_mem_ops (stmt))
1825 gimple_set_vdef (copy, gimple_vdef (stmt));
1826 gimple_set_vuse (copy, gimple_vuse (stmt));
1829 /* Clear out SSA operand vectors on COPY. */
1830 if (gimple_has_ops (stmt))
1832 gimple_set_use_ops (copy, NULL);
1834 /* SSA operands need to be updated. */
1835 gimple_set_modified (copy, true);
1838 return copy;
1842 /* Return true if statement S has side-effects. We consider a
1843 statement to have side effects if:
1845 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1846 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1848 bool
1849 gimple_has_side_effects (const_gimple s)
1851 if (is_gimple_debug (s))
1852 return false;
1854 /* We don't have to scan the arguments to check for
1855 volatile arguments, though, at present, we still
1856 do a scan to check for TREE_SIDE_EFFECTS. */
1857 if (gimple_has_volatile_ops (s))
1858 return true;
1860 if (gimple_code (s) == GIMPLE_ASM
1861 && gimple_asm_volatile_p (as_a <const_gimple_asm> (s)))
1862 return true;
1864 if (is_gimple_call (s))
1866 int flags = gimple_call_flags (s);
1868 /* An infinite loop is considered a side effect. */
1869 if (!(flags & (ECF_CONST | ECF_PURE))
1870 || (flags & ECF_LOOPING_CONST_OR_PURE))
1871 return true;
1873 return false;
1876 return false;
1879 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1880 Return true if S can trap. When INCLUDE_MEM is true, check whether
1881 the memory operations could trap. When INCLUDE_STORES is true and
1882 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1884 bool
1885 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
1887 tree t, div = NULL_TREE;
1888 enum tree_code op;
1890 if (include_mem)
1892 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
1894 for (i = start; i < gimple_num_ops (s); i++)
1895 if (tree_could_trap_p (gimple_op (s, i)))
1896 return true;
1899 switch (gimple_code (s))
1901 case GIMPLE_ASM:
1902 return gimple_asm_volatile_p (as_a <gimple_asm> (s));
1904 case GIMPLE_CALL:
1905 t = gimple_call_fndecl (s);
1906 /* Assume that calls to weak functions may trap. */
1907 if (!t || !DECL_P (t) || DECL_WEAK (t))
1908 return true;
1909 return false;
1911 case GIMPLE_ASSIGN:
1912 t = gimple_expr_type (s);
1913 op = gimple_assign_rhs_code (s);
1914 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
1915 div = gimple_assign_rhs2 (s);
1916 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
1917 (INTEGRAL_TYPE_P (t)
1918 && TYPE_OVERFLOW_TRAPS (t)),
1919 div));
1921 default:
1922 break;
1925 return false;
1928 /* Return true if statement S can trap. */
1930 bool
1931 gimple_could_trap_p (gimple s)
1933 return gimple_could_trap_p_1 (s, true, true);
1936 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1938 bool
1939 gimple_assign_rhs_could_trap_p (gimple s)
1941 gcc_assert (is_gimple_assign (s));
1942 return gimple_could_trap_p_1 (s, true, false);
1946 /* Print debugging information for gimple stmts generated. */
1948 void
1949 dump_gimple_statistics (void)
1951 int i, total_tuples = 0, total_bytes = 0;
1953 if (! GATHER_STATISTICS)
1955 fprintf (stderr, "No gimple statistics\n");
1956 return;
1959 fprintf (stderr, "\nGIMPLE statements\n");
1960 fprintf (stderr, "Kind Stmts Bytes\n");
1961 fprintf (stderr, "---------------------------------------\n");
1962 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
1964 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
1965 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
1966 total_tuples += gimple_alloc_counts[i];
1967 total_bytes += gimple_alloc_sizes[i];
1969 fprintf (stderr, "---------------------------------------\n");
1970 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
1971 fprintf (stderr, "---------------------------------------\n");
1975 /* Return the number of operands needed on the RHS of a GIMPLE
1976 assignment for an expression with tree code CODE. */
1978 unsigned
1979 get_gimple_rhs_num_ops (enum tree_code code)
1981 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
1983 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
1984 return 1;
1985 else if (rhs_class == GIMPLE_BINARY_RHS)
1986 return 2;
1987 else if (rhs_class == GIMPLE_TERNARY_RHS)
1988 return 3;
1989 else
1990 gcc_unreachable ();
1993 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1994 (unsigned char) \
1995 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1996 : ((TYPE) == tcc_binary \
1997 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1998 : ((TYPE) == tcc_constant \
1999 || (TYPE) == tcc_declaration \
2000 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2001 : ((SYM) == TRUTH_AND_EXPR \
2002 || (SYM) == TRUTH_OR_EXPR \
2003 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2004 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2005 : ((SYM) == COND_EXPR \
2006 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2007 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2008 || (SYM) == DOT_PROD_EXPR \
2009 || (SYM) == SAD_EXPR \
2010 || (SYM) == REALIGN_LOAD_EXPR \
2011 || (SYM) == VEC_COND_EXPR \
2012 || (SYM) == VEC_PERM_EXPR \
2013 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2014 : ((SYM) == CONSTRUCTOR \
2015 || (SYM) == OBJ_TYPE_REF \
2016 || (SYM) == ASSERT_EXPR \
2017 || (SYM) == ADDR_EXPR \
2018 || (SYM) == WITH_SIZE_EXPR \
2019 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2020 : GIMPLE_INVALID_RHS),
2021 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2023 const unsigned char gimple_rhs_class_table[] = {
2024 #include "all-tree.def"
2027 #undef DEFTREECODE
2028 #undef END_OF_BASE_TREE_CODES
2030 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2031 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2032 we failed to create one. */
2034 tree
2035 canonicalize_cond_expr_cond (tree t)
2037 /* Strip conversions around boolean operations. */
2038 if (CONVERT_EXPR_P (t)
2039 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2040 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2041 == BOOLEAN_TYPE))
2042 t = TREE_OPERAND (t, 0);
2044 /* For !x use x == 0. */
2045 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2047 tree top0 = TREE_OPERAND (t, 0);
2048 t = build2 (EQ_EXPR, TREE_TYPE (t),
2049 top0, build_int_cst (TREE_TYPE (top0), 0));
2051 /* For cmp ? 1 : 0 use cmp. */
2052 else if (TREE_CODE (t) == COND_EXPR
2053 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2054 && integer_onep (TREE_OPERAND (t, 1))
2055 && integer_zerop (TREE_OPERAND (t, 2)))
2057 tree top0 = TREE_OPERAND (t, 0);
2058 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2059 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2061 /* For x ^ y use x != y. */
2062 else if (TREE_CODE (t) == BIT_XOR_EXPR)
2063 t = build2 (NE_EXPR, TREE_TYPE (t),
2064 TREE_OPERAND (t, 0), TREE_OPERAND (t, 1));
2066 if (is_gimple_condexpr (t))
2067 return t;
2069 return NULL_TREE;
2072 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2073 the positions marked by the set ARGS_TO_SKIP. */
2075 gimple_call
2076 gimple_call_copy_skip_args (gimple_call stmt, bitmap args_to_skip)
2078 int i;
2079 int nargs = gimple_call_num_args (stmt);
2080 auto_vec<tree> vargs (nargs);
2081 gimple_call new_stmt;
2083 for (i = 0; i < nargs; i++)
2084 if (!bitmap_bit_p (args_to_skip, i))
2085 vargs.quick_push (gimple_call_arg (stmt, i));
2087 if (gimple_call_internal_p (stmt))
2088 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2089 vargs);
2090 else
2091 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2093 if (gimple_call_lhs (stmt))
2094 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2096 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2097 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2099 if (gimple_has_location (stmt))
2100 gimple_set_location (new_stmt, gimple_location (stmt));
2101 gimple_call_copy_flags (new_stmt, stmt);
2102 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2104 gimple_set_modified (new_stmt, true);
2106 return new_stmt;
2111 /* Return true if the field decls F1 and F2 are at the same offset.
2113 This is intended to be used on GIMPLE types only. */
2115 bool
2116 gimple_compare_field_offset (tree f1, tree f2)
2118 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
2120 tree offset1 = DECL_FIELD_OFFSET (f1);
2121 tree offset2 = DECL_FIELD_OFFSET (f2);
2122 return ((offset1 == offset2
2123 /* Once gimplification is done, self-referential offsets are
2124 instantiated as operand #2 of the COMPONENT_REF built for
2125 each access and reset. Therefore, they are not relevant
2126 anymore and fields are interchangeable provided that they
2127 represent the same access. */
2128 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
2129 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
2130 && (DECL_SIZE (f1) == DECL_SIZE (f2)
2131 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
2132 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
2133 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
2134 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
2135 || operand_equal_p (offset1, offset2, 0))
2136 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
2137 DECL_FIELD_BIT_OFFSET (f2)));
2140 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2141 should be, so handle differing ones specially by decomposing
2142 the offset into a byte and bit offset manually. */
2143 if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1))
2144 && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)))
2146 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
2147 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
2148 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
2149 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
2150 + bit_offset1 / BITS_PER_UNIT);
2151 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
2152 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
2153 + bit_offset2 / BITS_PER_UNIT);
2154 if (byte_offset1 != byte_offset2)
2155 return false;
2156 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
2159 return false;
2163 /* Return a type the same as TYPE except unsigned or
2164 signed according to UNSIGNEDP. */
2166 static tree
2167 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
2169 tree type1;
2171 type1 = TYPE_MAIN_VARIANT (type);
2172 if (type1 == signed_char_type_node
2173 || type1 == char_type_node
2174 || type1 == unsigned_char_type_node)
2175 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2176 if (type1 == integer_type_node || type1 == unsigned_type_node)
2177 return unsignedp ? unsigned_type_node : integer_type_node;
2178 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
2179 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2180 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
2181 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2182 if (type1 == long_long_integer_type_node
2183 || type1 == long_long_unsigned_type_node)
2184 return unsignedp
2185 ? long_long_unsigned_type_node
2186 : long_long_integer_type_node;
2187 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
2188 return unsignedp
2189 ? int128_unsigned_type_node
2190 : int128_integer_type_node;
2191 #if HOST_BITS_PER_WIDE_INT >= 64
2192 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
2193 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2194 #endif
2195 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
2196 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2197 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
2198 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2199 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
2200 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2201 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
2202 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2204 #define GIMPLE_FIXED_TYPES(NAME) \
2205 if (type1 == short_ ## NAME ## _type_node \
2206 || type1 == unsigned_short_ ## NAME ## _type_node) \
2207 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2208 : short_ ## NAME ## _type_node; \
2209 if (type1 == NAME ## _type_node \
2210 || type1 == unsigned_ ## NAME ## _type_node) \
2211 return unsignedp ? unsigned_ ## NAME ## _type_node \
2212 : NAME ## _type_node; \
2213 if (type1 == long_ ## NAME ## _type_node \
2214 || type1 == unsigned_long_ ## NAME ## _type_node) \
2215 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2216 : long_ ## NAME ## _type_node; \
2217 if (type1 == long_long_ ## NAME ## _type_node \
2218 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2219 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2220 : long_long_ ## NAME ## _type_node;
2222 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2223 if (type1 == NAME ## _type_node \
2224 || type1 == u ## NAME ## _type_node) \
2225 return unsignedp ? u ## NAME ## _type_node \
2226 : NAME ## _type_node;
2228 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2229 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2230 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2231 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2232 : sat_ ## short_ ## NAME ## _type_node; \
2233 if (type1 == sat_ ## NAME ## _type_node \
2234 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2235 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2236 : sat_ ## NAME ## _type_node; \
2237 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2238 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2239 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2240 : sat_ ## long_ ## NAME ## _type_node; \
2241 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2242 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2243 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2244 : sat_ ## long_long_ ## NAME ## _type_node;
2246 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2247 if (type1 == sat_ ## NAME ## _type_node \
2248 || type1 == sat_ ## u ## NAME ## _type_node) \
2249 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2250 : sat_ ## NAME ## _type_node;
2252 GIMPLE_FIXED_TYPES (fract);
2253 GIMPLE_FIXED_TYPES_SAT (fract);
2254 GIMPLE_FIXED_TYPES (accum);
2255 GIMPLE_FIXED_TYPES_SAT (accum);
2257 GIMPLE_FIXED_MODE_TYPES (qq);
2258 GIMPLE_FIXED_MODE_TYPES (hq);
2259 GIMPLE_FIXED_MODE_TYPES (sq);
2260 GIMPLE_FIXED_MODE_TYPES (dq);
2261 GIMPLE_FIXED_MODE_TYPES (tq);
2262 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
2263 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
2264 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
2265 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
2266 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
2267 GIMPLE_FIXED_MODE_TYPES (ha);
2268 GIMPLE_FIXED_MODE_TYPES (sa);
2269 GIMPLE_FIXED_MODE_TYPES (da);
2270 GIMPLE_FIXED_MODE_TYPES (ta);
2271 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
2272 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
2273 GIMPLE_FIXED_MODE_TYPES_SAT (da);
2274 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
2276 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2277 the precision; they have precision set to match their range, but
2278 may use a wider mode to match an ABI. If we change modes, we may
2279 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2280 the precision as well, so as to yield correct results for
2281 bit-field types. C++ does not have these separate bit-field
2282 types, and producing a signed or unsigned variant of an
2283 ENUMERAL_TYPE may cause other problems as well. */
2284 if (!INTEGRAL_TYPE_P (type)
2285 || TYPE_UNSIGNED (type) == unsignedp)
2286 return type;
2288 #define TYPE_OK(node) \
2289 (TYPE_MODE (type) == TYPE_MODE (node) \
2290 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2291 if (TYPE_OK (signed_char_type_node))
2292 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
2293 if (TYPE_OK (integer_type_node))
2294 return unsignedp ? unsigned_type_node : integer_type_node;
2295 if (TYPE_OK (short_integer_type_node))
2296 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
2297 if (TYPE_OK (long_integer_type_node))
2298 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
2299 if (TYPE_OK (long_long_integer_type_node))
2300 return (unsignedp
2301 ? long_long_unsigned_type_node
2302 : long_long_integer_type_node);
2303 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
2304 return (unsignedp
2305 ? int128_unsigned_type_node
2306 : int128_integer_type_node);
2308 #if HOST_BITS_PER_WIDE_INT >= 64
2309 if (TYPE_OK (intTI_type_node))
2310 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
2311 #endif
2312 if (TYPE_OK (intDI_type_node))
2313 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
2314 if (TYPE_OK (intSI_type_node))
2315 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
2316 if (TYPE_OK (intHI_type_node))
2317 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
2318 if (TYPE_OK (intQI_type_node))
2319 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
2321 #undef GIMPLE_FIXED_TYPES
2322 #undef GIMPLE_FIXED_MODE_TYPES
2323 #undef GIMPLE_FIXED_TYPES_SAT
2324 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2325 #undef TYPE_OK
2327 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
2331 /* Return an unsigned type the same as TYPE in other respects. */
2333 tree
2334 gimple_unsigned_type (tree type)
2336 return gimple_signed_or_unsigned_type (true, type);
2340 /* Return a signed type the same as TYPE in other respects. */
2342 tree
2343 gimple_signed_type (tree type)
2345 return gimple_signed_or_unsigned_type (false, type);
2349 /* Return the typed-based alias set for T, which may be an expression
2350 or a type. Return -1 if we don't do anything special. */
2352 alias_set_type
2353 gimple_get_alias_set (tree t)
2355 tree u;
2357 /* Permit type-punning when accessing a union, provided the access
2358 is directly through the union. For example, this code does not
2359 permit taking the address of a union member and then storing
2360 through it. Even the type-punning allowed here is a GCC
2361 extension, albeit a common and useful one; the C standard says
2362 that such accesses have implementation-defined behavior. */
2363 for (u = t;
2364 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
2365 u = TREE_OPERAND (u, 0))
2366 if (TREE_CODE (u) == COMPONENT_REF
2367 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
2368 return 0;
2370 /* That's all the expressions we handle specially. */
2371 if (!TYPE_P (t))
2372 return -1;
2374 /* For convenience, follow the C standard when dealing with
2375 character types. Any object may be accessed via an lvalue that
2376 has character type. */
2377 if (t == char_type_node
2378 || t == signed_char_type_node
2379 || t == unsigned_char_type_node)
2380 return 0;
2382 /* Allow aliasing between signed and unsigned variants of the same
2383 type. We treat the signed variant as canonical. */
2384 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
2386 tree t1 = gimple_signed_type (t);
2388 /* t1 == t can happen for boolean nodes which are always unsigned. */
2389 if (t1 != t)
2390 return get_alias_set (t1);
2393 return -1;
2397 /* Helper for gimple_ior_addresses_taken_1. */
2399 static bool
2400 gimple_ior_addresses_taken_1 (gimple, tree addr, tree, void *data)
2402 bitmap addresses_taken = (bitmap)data;
2403 addr = get_base_address (addr);
2404 if (addr
2405 && DECL_P (addr))
2407 bitmap_set_bit (addresses_taken, DECL_UID (addr));
2408 return true;
2410 return false;
2413 /* Set the bit for the uid of all decls that have their address taken
2414 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2415 were any in this stmt. */
2417 bool
2418 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
2420 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
2421 gimple_ior_addresses_taken_1);
2425 /* Return true if TYPE1 and TYPE2 are compatible enough for builtin
2426 processing. */
2428 static bool
2429 validate_type (tree type1, tree type2)
2431 if (INTEGRAL_TYPE_P (type1)
2432 && INTEGRAL_TYPE_P (type2))
2434 else if (POINTER_TYPE_P (type1)
2435 && POINTER_TYPE_P (type2))
2437 else if (TREE_CODE (type1)
2438 != TREE_CODE (type2))
2439 return false;
2440 return true;
2443 /* Return true when STMTs arguments and return value match those of FNDECL,
2444 a decl of a builtin function. */
2446 bool
2447 gimple_builtin_call_types_compatible_p (const_gimple stmt, tree fndecl)
2449 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN);
2451 tree ret = gimple_call_lhs (stmt);
2452 if (ret
2453 && !validate_type (TREE_TYPE (ret), TREE_TYPE (TREE_TYPE (fndecl))))
2454 return false;
2456 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2457 unsigned nargs = gimple_call_num_args (stmt);
2458 for (unsigned i = 0; i < nargs; ++i)
2460 /* Variadic args follow. */
2461 if (!targs)
2462 return true;
2463 tree arg = gimple_call_arg (stmt, i);
2464 if (!validate_type (TREE_TYPE (arg), TREE_VALUE (targs)))
2465 return false;
2466 targs = TREE_CHAIN (targs);
2468 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
2469 return false;
2470 return true;
2473 /* Return true when STMT is builtins call. */
2475 bool
2476 gimple_call_builtin_p (const_gimple stmt)
2478 tree fndecl;
2479 if (is_gimple_call (stmt)
2480 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2481 && DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)
2482 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2483 return false;
2486 /* Return true when STMT is builtins call to CLASS. */
2488 bool
2489 gimple_call_builtin_p (const_gimple stmt, enum built_in_class klass)
2491 tree fndecl;
2492 if (is_gimple_call (stmt)
2493 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2494 && DECL_BUILT_IN_CLASS (fndecl) == klass)
2495 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2496 return false;
2499 /* Return true when STMT is builtins call to CODE of CLASS. */
2501 bool
2502 gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
2504 tree fndecl;
2505 if (is_gimple_call (stmt)
2506 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
2507 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2508 && DECL_FUNCTION_CODE (fndecl) == code)
2509 return gimple_builtin_call_types_compatible_p (stmt, fndecl);
2510 return false;
2513 /* Return true if STMT clobbers memory. STMT is required to be a
2514 GIMPLE_ASM. */
2516 bool
2517 gimple_asm_clobbers_memory_p (const_gimple_asm stmt)
2519 unsigned i;
2521 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
2523 tree op = gimple_asm_clobber_op (stmt, i);
2524 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
2525 return true;
2528 return false;
2531 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2533 void
2534 dump_decl_set (FILE *file, bitmap set)
2536 if (set)
2538 bitmap_iterator bi;
2539 unsigned i;
2541 fprintf (file, "{ ");
2543 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
2545 fprintf (file, "D.%u", i);
2546 fprintf (file, " ");
2549 fprintf (file, "}");
2551 else
2552 fprintf (file, "NIL");
2555 /* Return true when CALL is a call stmt that definitely doesn't
2556 free any memory or makes it unavailable otherwise. */
2557 bool
2558 nonfreeing_call_p (gimple call)
2560 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2561 && gimple_call_flags (call) & ECF_LEAF)
2562 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2564 /* Just in case these become ECF_LEAF in the future. */
2565 case BUILT_IN_FREE:
2566 case BUILT_IN_TM_FREE:
2567 case BUILT_IN_REALLOC:
2568 case BUILT_IN_STACK_RESTORE:
2569 return false;
2570 default:
2571 return true;
2574 return false;
2577 /* Callback for walk_stmt_load_store_ops.
2579 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2580 otherwise.
2582 This routine only makes a superficial check for a dereference. Thus
2583 it must only be used if it is safe to return a false negative. */
2584 static bool
2585 check_loadstore (gimple, tree op, tree, void *data)
2587 if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
2588 && operand_equal_p (TREE_OPERAND (op, 0), (tree)data, 0))
2589 return true;
2590 return false;
2593 /* If OP can be inferred to be non-NULL after STMT executes, return true.
2595 DEREFERENCE is TRUE if we can use a pointer dereference to infer a
2596 non-NULL range, FALSE otherwise.
2598 ATTRIBUTE is TRUE if we can use attributes to infer a non-NULL range
2599 for function arguments and return values. FALSE otherwise. */
2601 bool
2602 infer_nonnull_range (gimple stmt, tree op, bool dereference, bool attribute)
2604 /* We can only assume that a pointer dereference will yield
2605 non-NULL if -fdelete-null-pointer-checks is enabled. */
2606 if (!flag_delete_null_pointer_checks
2607 || !POINTER_TYPE_P (TREE_TYPE (op))
2608 || gimple_code (stmt) == GIMPLE_ASM)
2609 return false;
2611 if (dereference
2612 && walk_stmt_load_store_ops (stmt, (void *)op,
2613 check_loadstore, check_loadstore))
2614 return true;
2616 if (attribute
2617 && is_gimple_call (stmt) && !gimple_call_internal_p (stmt))
2619 tree fntype = gimple_call_fntype (stmt);
2620 tree attrs = TYPE_ATTRIBUTES (fntype);
2621 for (; attrs; attrs = TREE_CHAIN (attrs))
2623 attrs = lookup_attribute ("nonnull", attrs);
2625 /* If "nonnull" wasn't specified, we know nothing about
2626 the argument. */
2627 if (attrs == NULL_TREE)
2628 return false;
2630 /* If "nonnull" applies to all the arguments, then ARG
2631 is non-null if it's in the argument list. */
2632 if (TREE_VALUE (attrs) == NULL_TREE)
2634 for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++)
2636 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))
2637 && operand_equal_p (op, gimple_call_arg (stmt, i), 0))
2638 return true;
2640 return false;
2643 /* Now see if op appears in the nonnull list. */
2644 for (tree t = TREE_VALUE (attrs); t; t = TREE_CHAIN (t))
2646 int idx = TREE_INT_CST_LOW (TREE_VALUE (t)) - 1;
2647 tree arg = gimple_call_arg (stmt, idx);
2648 if (operand_equal_p (op, arg, 0))
2649 return true;
2654 /* If this function is marked as returning non-null, then we can
2655 infer OP is non-null if it is used in the return statement. */
2656 if (attribute)
2657 if (gimple_return return_stmt = dyn_cast <gimple_return> (stmt))
2658 if (gimple_return_retval (return_stmt)
2659 && operand_equal_p (gimple_return_retval (return_stmt), op, 0)
2660 && lookup_attribute ("returns_nonnull",
2661 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
2662 return true;
2664 return false;
2667 /* Compare two case labels. Because the front end should already have
2668 made sure that case ranges do not overlap, it is enough to only compare
2669 the CASE_LOW values of each case label. */
2671 static int
2672 compare_case_labels (const void *p1, const void *p2)
2674 const_tree const case1 = *(const_tree const*)p1;
2675 const_tree const case2 = *(const_tree const*)p2;
2677 /* The 'default' case label always goes first. */
2678 if (!CASE_LOW (case1))
2679 return -1;
2680 else if (!CASE_LOW (case2))
2681 return 1;
2682 else
2683 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
2686 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2688 void
2689 sort_case_labels (vec<tree> label_vec)
2691 label_vec.qsort (compare_case_labels);
2694 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2696 LABELS is a vector that contains all case labels to look at.
2698 INDEX_TYPE is the type of the switch index expression. Case labels
2699 in LABELS are discarded if their values are not in the value range
2700 covered by INDEX_TYPE. The remaining case label values are folded
2701 to INDEX_TYPE.
2703 If a default case exists in LABELS, it is removed from LABELS and
2704 returned in DEFAULT_CASEP. If no default case exists, but the
2705 case labels already cover the whole range of INDEX_TYPE, a default
2706 case is returned pointing to one of the existing case labels.
2707 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2709 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2710 apply and no action is taken regardless of whether a default case is
2711 found or not. */
2713 void
2714 preprocess_case_label_vec_for_gimple (vec<tree> labels,
2715 tree index_type,
2716 tree *default_casep)
2718 tree min_value, max_value;
2719 tree default_case = NULL_TREE;
2720 size_t i, len;
2722 i = 0;
2723 min_value = TYPE_MIN_VALUE (index_type);
2724 max_value = TYPE_MAX_VALUE (index_type);
2725 while (i < labels.length ())
2727 tree elt = labels[i];
2728 tree low = CASE_LOW (elt);
2729 tree high = CASE_HIGH (elt);
2730 bool remove_element = FALSE;
2732 if (low)
2734 gcc_checking_assert (TREE_CODE (low) == INTEGER_CST);
2735 gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST);
2737 /* This is a non-default case label, i.e. it has a value.
2739 See if the case label is reachable within the range of
2740 the index type. Remove out-of-range case values. Turn
2741 case ranges into a canonical form (high > low strictly)
2742 and convert the case label values to the index type.
2744 NB: The type of gimple_switch_index() may be the promoted
2745 type, but the case labels retain the original type. */
2747 if (high)
2749 /* This is a case range. Discard empty ranges.
2750 If the bounds or the range are equal, turn this
2751 into a simple (one-value) case. */
2752 int cmp = tree_int_cst_compare (high, low);
2753 if (cmp < 0)
2754 remove_element = TRUE;
2755 else if (cmp == 0)
2756 high = NULL_TREE;
2759 if (! high)
2761 /* If the simple case value is unreachable, ignore it. */
2762 if ((TREE_CODE (min_value) == INTEGER_CST
2763 && tree_int_cst_compare (low, min_value) < 0)
2764 || (TREE_CODE (max_value) == INTEGER_CST
2765 && tree_int_cst_compare (low, max_value) > 0))
2766 remove_element = TRUE;
2767 else
2768 low = fold_convert (index_type, low);
2770 else
2772 /* If the entire case range is unreachable, ignore it. */
2773 if ((TREE_CODE (min_value) == INTEGER_CST
2774 && tree_int_cst_compare (high, min_value) < 0)
2775 || (TREE_CODE (max_value) == INTEGER_CST
2776 && tree_int_cst_compare (low, max_value) > 0))
2777 remove_element = TRUE;
2778 else
2780 /* If the lower bound is less than the index type's
2781 minimum value, truncate the range bounds. */
2782 if (TREE_CODE (min_value) == INTEGER_CST
2783 && tree_int_cst_compare (low, min_value) < 0)
2784 low = min_value;
2785 low = fold_convert (index_type, low);
2787 /* If the upper bound is greater than the index type's
2788 maximum value, truncate the range bounds. */
2789 if (TREE_CODE (max_value) == INTEGER_CST
2790 && tree_int_cst_compare (high, max_value) > 0)
2791 high = max_value;
2792 high = fold_convert (index_type, high);
2794 /* We may have folded a case range to a one-value case. */
2795 if (tree_int_cst_equal (low, high))
2796 high = NULL_TREE;
2800 CASE_LOW (elt) = low;
2801 CASE_HIGH (elt) = high;
2803 else
2805 gcc_assert (!default_case);
2806 default_case = elt;
2807 /* The default case must be passed separately to the
2808 gimple_build_switch routine. But if DEFAULT_CASEP
2809 is NULL, we do not remove the default case (it would
2810 be completely lost). */
2811 if (default_casep)
2812 remove_element = TRUE;
2815 if (remove_element)
2816 labels.ordered_remove (i);
2817 else
2818 i++;
2820 len = i;
2822 if (!labels.is_empty ())
2823 sort_case_labels (labels);
2825 if (default_casep && !default_case)
2827 /* If the switch has no default label, add one, so that we jump
2828 around the switch body. If the labels already cover the whole
2829 range of the switch index_type, add the default label pointing
2830 to one of the existing labels. */
2831 if (len
2832 && TYPE_MIN_VALUE (index_type)
2833 && TYPE_MAX_VALUE (index_type)
2834 && tree_int_cst_equal (CASE_LOW (labels[0]),
2835 TYPE_MIN_VALUE (index_type)))
2837 tree low, high = CASE_HIGH (labels[len - 1]);
2838 if (!high)
2839 high = CASE_LOW (labels[len - 1]);
2840 if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
2842 for (i = 1; i < len; i++)
2844 high = CASE_LOW (labels[i]);
2845 low = CASE_HIGH (labels[i - 1]);
2846 if (!low)
2847 low = CASE_LOW (labels[i - 1]);
2848 if (wi::add (low, 1) != high)
2849 break;
2851 if (i == len)
2853 tree label = CASE_LABEL (labels[0]);
2854 default_case = build_case_label (NULL_TREE, NULL_TREE,
2855 label);
2861 if (default_casep)
2862 *default_casep = default_case;
2865 /* Set the location of all statements in SEQ to LOC. */
2867 void
2868 gimple_seq_set_location (gimple_seq seq, location_t loc)
2870 for (gimple_stmt_iterator i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
2871 gimple_set_location (gsi_stmt (i), loc);