Fix installation of the jit header files
[official-gcc.git] / gcc / gimple-fold.c
blobde5a6c22395d8114000eea7599aba2d4a39f6c6d
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
68 #include "varasm.h"
70 enum strlen_range_kind {
71 /* Compute the exact constant string length. */
72 SRK_STRLEN,
73 /* Compute the maximum constant string length. */
74 SRK_STRLENMAX,
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
80 SRK_LENRANGE,
81 /* Determine the integer value of the argument (not string length). */
82 SRK_INT_VALUE
85 static bool
86 get_range_strlen (tree, bitmap *, strlen_range_kind, c_strlen_data *, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
91 reasons:
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
98 set.
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
103 declaring the body.
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
108 directly. */
110 static bool
111 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
113 varpool_node *vnode;
114 struct cgraph_node *node;
115 symtab_node *snode;
117 if (DECL_ABSTRACT_P (decl))
118 return false;
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
122 || !VAR_OR_FUNCTION_DECL_P (decl))
123 return true;
125 /* Static objects can be referred only if they are defined and not optimized
126 out yet. */
127 if (!TREE_PUBLIC (decl))
129 if (DECL_EXTERNAL (decl))
130 return false;
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab->function_flags_ready)
134 return true;
135 snode = symtab_node::get (decl);
136 if (!snode || !snode->definition)
137 return false;
138 node = dyn_cast <cgraph_node *> (snode);
139 return !node || !node->inlined_to;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
145 if (!from_decl
146 || !VAR_P (from_decl)
147 || (!DECL_EXTERNAL (from_decl)
148 && (vnode = varpool_node::get (from_decl)) != NULL
149 && vnode->definition)
150 || (flag_ltrans
151 && (vnode = varpool_node::get (from_decl)) != NULL
152 && vnode->in_other_partition))
153 return true;
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl)
158 && DECL_EXTERNAL (decl)
159 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
160 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
161 return false;
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
166 return true;
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
177 was privatized. */
178 if (!symtab->function_flags_ready)
179 return true;
181 snode = symtab_node::get (decl);
182 if (!snode
183 || ((!snode->definition || DECL_EXTERNAL (decl))
184 && (!snode->in_other_partition
185 || (!snode->forced_by_abi && !snode->force_output))))
186 return false;
187 node = dyn_cast <cgraph_node *> (snode);
188 return !node || !node->inlined_to;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
193 is made. */
195 tree
196 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
198 if (gimple_in_ssa_p (cfun))
199 return make_ssa_name (type, stmt);
200 else
201 return create_tmp_reg (type);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
208 tree
209 canonicalize_constructor_val (tree cval, tree from_decl)
211 if (CONSTANT_CLASS_P (cval))
212 return cval;
214 tree orig_cval = cval;
215 STRIP_NOPS (cval);
216 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
219 tree ptr = TREE_OPERAND (cval, 0);
220 if (is_gimple_min_invariant (ptr))
221 cval = build1_loc (EXPR_LOCATION (cval),
222 ADDR_EXPR, TREE_TYPE (ptr),
223 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
224 ptr,
225 fold_convert (ptr_type_node,
226 TREE_OPERAND (cval, 1))));
228 if (TREE_CODE (cval) == ADDR_EXPR)
230 tree base = NULL_TREE;
231 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
233 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
234 if (base)
235 TREE_OPERAND (cval, 0) = base;
237 else
238 base = get_base_address (TREE_OPERAND (cval, 0));
239 if (!base)
240 return NULL_TREE;
242 if (VAR_OR_FUNCTION_DECL_P (base)
243 && !can_refer_decl_in_current_unit_p (base, from_decl))
244 return NULL_TREE;
245 if (TREE_TYPE (base) == error_mark_node)
246 return NULL_TREE;
247 if (VAR_P (base))
248 TREE_ADDRESSABLE (base) = 1;
249 else if (TREE_CODE (base) == FUNCTION_DECL)
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base);
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
258 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
261 cval = fold_convert (TREE_TYPE (orig_cval), cval);
262 return cval;
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval) == INTEGER_CST)
267 if (TREE_OVERFLOW_P (cval))
268 cval = drop_tree_overflow (cval);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
270 cval = fold_convert (TREE_TYPE (orig_cval), cval);
271 return cval;
273 return orig_cval;
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
279 tree
280 get_symbol_constant_value (tree sym)
282 tree val = ctor_for_folding (sym);
283 if (val != error_mark_node)
285 if (val)
287 val = canonicalize_constructor_val (unshare_expr (val), sym);
288 if (val && is_gimple_min_invariant (val))
289 return val;
290 else
291 return NULL_TREE;
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
296 if (!val
297 && is_gimple_reg_type (TREE_TYPE (sym)))
298 return build_zero_cst (TREE_TYPE (sym));
301 return NULL_TREE;
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
311 static tree
312 maybe_fold_reference (tree expr, bool is_lhs)
314 tree result;
316 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr) == REALPART_EXPR
318 || TREE_CODE (expr) == IMAGPART_EXPR)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr),
321 TREE_CODE (expr),
322 TREE_TYPE (expr),
323 TREE_OPERAND (expr, 0));
324 else if (TREE_CODE (expr) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr),
327 TREE_CODE (expr),
328 TREE_TYPE (expr),
329 TREE_OPERAND (expr, 0),
330 TREE_OPERAND (expr, 1),
331 TREE_OPERAND (expr, 2));
333 if (!is_lhs
334 && (result = fold_const_aggregate_ref (expr))
335 && is_gimple_min_invariant (result))
336 return result;
338 return NULL_TREE;
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
345 folded. */
347 static tree
348 fold_gimple_assign (gimple_stmt_iterator *si)
350 gimple *stmt = gsi_stmt (*si);
351 enum tree_code subcode = gimple_assign_rhs_code (stmt);
352 location_t loc = gimple_location (stmt);
354 tree result = NULL_TREE;
356 switch (get_gimple_rhs_class (subcode))
358 case GIMPLE_SINGLE_RHS:
360 tree rhs = gimple_assign_rhs1 (stmt);
362 if (TREE_CLOBBER_P (rhs))
363 return NULL_TREE;
365 if (REFERENCE_CLASS_P (rhs))
366 return maybe_fold_reference (rhs, false);
368 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
370 tree val = OBJ_TYPE_REF_EXPR (rhs);
371 if (is_gimple_min_invariant (val))
372 return val;
373 else if (flag_devirtualize && virtual_method_call_p (rhs))
375 bool final;
376 vec <cgraph_node *>targets
377 = possible_polymorphic_call_targets (rhs, stmt, &final);
378 if (final && targets.length () <= 1 && dbg_cnt (devirt))
380 if (dump_enabled_p ())
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets.length () == 1
386 ? targets[0]->name ()
387 : "NULL");
389 if (targets.length () == 1)
391 val = fold_convert (TREE_TYPE (val),
392 build_fold_addr_expr_loc
393 (loc, targets[0]->decl));
394 STRIP_USELESS_TYPE_CONVERSION (val);
396 else
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val = build_int_cst (TREE_TYPE (val), 0);
400 return val;
405 else if (TREE_CODE (rhs) == ADDR_EXPR)
407 tree ref = TREE_OPERAND (rhs, 0);
408 tree tem = maybe_fold_reference (ref, true);
409 if (tem
410 && TREE_CODE (tem) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem, 1)))
412 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
413 else if (tem)
414 result = fold_convert (TREE_TYPE (rhs),
415 build_fold_addr_expr_loc (loc, tem));
416 else if (TREE_CODE (ref) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref, 1)))
418 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
420 if (result)
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result);
428 if (result != rhs && valid_gimple_rhs_p (result))
429 return result;
433 else if (TREE_CODE (rhs) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
437 unsigned i;
438 tree val;
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
441 if (! CONSTANT_CLASS_P (val))
442 return NULL_TREE;
444 return build_vector_from_ctor (TREE_TYPE (rhs),
445 CONSTRUCTOR_ELTS (rhs));
448 else if (DECL_P (rhs))
449 return get_symbol_constant_value (rhs);
451 break;
453 case GIMPLE_UNARY_RHS:
454 break;
456 case GIMPLE_BINARY_RHS:
457 break;
459 case GIMPLE_TERNARY_RHS:
460 result = fold_ternary_loc (loc, subcode,
461 TREE_TYPE (gimple_assign_lhs (stmt)),
462 gimple_assign_rhs1 (stmt),
463 gimple_assign_rhs2 (stmt),
464 gimple_assign_rhs3 (stmt));
466 if (result)
468 STRIP_USELESS_TYPE_CONVERSION (result);
469 if (valid_gimple_rhs_p (result))
470 return result;
472 break;
474 case GIMPLE_INVALID_RHS:
475 gcc_unreachable ();
478 return NULL_TREE;
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
487 static void
488 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
490 gimple *stmt = gsi_stmt (*si_p);
492 if (gimple_has_location (stmt))
493 annotate_all_with_location (stmts, gimple_location (stmt));
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple *laststore = NULL;
498 for (gimple_stmt_iterator i = gsi_last (stmts);
499 !gsi_end_p (i); gsi_prev (&i))
501 gimple *new_stmt = gsi_stmt (i);
502 if ((gimple_assign_single_p (new_stmt)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
504 || (is_gimple_call (new_stmt)
505 && (gimple_call_flags (new_stmt)
506 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
508 tree vdef;
509 if (!laststore)
510 vdef = gimple_vdef (stmt);
511 else
512 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
513 gimple_set_vdef (new_stmt, vdef);
514 if (vdef && TREE_CODE (vdef) == SSA_NAME)
515 SSA_NAME_DEF_STMT (vdef) = new_stmt;
516 laststore = new_stmt;
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse = gimple_vuse (stmt);
523 for (gimple_stmt_iterator i = gsi_start (stmts);
524 !gsi_end_p (i); gsi_next (&i))
526 gimple *new_stmt = gsi_stmt (i);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt))
530 gimple_set_vuse (new_stmt, reaching_vuse);
531 gimple_set_modified (new_stmt, true);
532 if (gimple_vdef (new_stmt))
533 reaching_vuse = gimple_vdef (new_stmt);
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
538 if (reaching_vuse
539 && reaching_vuse == gimple_vuse (stmt))
541 tree vdef = gimple_vdef (stmt);
542 if (vdef
543 && TREE_CODE (vdef) == SSA_NAME)
545 unlink_stmt_vdef (stmt);
546 release_ssa_name (vdef);
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p, stmts, false);
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
564 void
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
567 tree lhs;
568 gimple *stmt, *new_stmt;
569 gimple_stmt_iterator i;
570 gimple_seq stmts = NULL;
572 stmt = gsi_stmt (*si_p);
574 gcc_assert (is_gimple_call (stmt));
576 push_gimplify_context (gimple_in_ssa_p (cfun));
578 lhs = gimple_call_lhs (stmt);
579 if (lhs == NULL_TREE)
581 gimplify_and_add (expr, &stmts);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts))
586 pop_gimplify_context (NULL);
587 if (gimple_in_ssa_p (cfun))
589 unlink_stmt_vdef (stmt);
590 release_defs (stmt);
592 gsi_replace (si_p, gimple_build_nop (), false);
593 return;
596 else
598 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
599 new_stmt = gimple_build_assign (lhs, tmp);
600 i = gsi_last (stmts);
601 gsi_insert_after_without_update (&i, new_stmt,
602 GSI_CONTINUE_LINKING);
605 pop_gimplify_context (NULL);
607 gsi_replace_with_seq_vops (si_p, stmts);
611 /* Replace the call at *GSI with the gimple value VAL. */
613 void
614 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
616 gimple *stmt = gsi_stmt (*gsi);
617 tree lhs = gimple_call_lhs (stmt);
618 gimple *repl;
619 if (lhs)
621 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
622 val = fold_convert (TREE_TYPE (lhs), val);
623 repl = gimple_build_assign (lhs, val);
625 else
626 repl = gimple_build_nop ();
627 tree vdef = gimple_vdef (stmt);
628 if (vdef && TREE_CODE (vdef) == SSA_NAME)
630 unlink_stmt_vdef (stmt);
631 release_ssa_name (vdef);
633 gsi_replace (gsi, repl, false);
636 /* Replace the call at *GSI with the new call REPL and fold that
637 again. */
639 static void
640 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
642 gimple *stmt = gsi_stmt (*gsi);
643 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
644 gimple_set_location (repl, gimple_location (stmt));
645 gimple_move_vops (repl, stmt);
646 gsi_replace (gsi, repl, false);
647 fold_stmt (gsi);
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
652 static bool
653 var_decl_component_p (tree var)
655 tree inner = var;
656 while (handled_component_p (inner))
657 inner = TREE_OPERAND (inner, 0);
658 return (DECL_P (inner)
659 || (TREE_CODE (inner) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
666 static bool
667 size_must_be_zero_p (tree size)
669 if (integer_zerop (size))
670 return true;
672 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
673 return false;
675 tree type = TREE_TYPE (size);
676 int prec = TYPE_PRECISION (type);
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
681 value_range valid_range (build_int_cst (type, 0),
682 wide_int_to_tree (type, ssize_max));
683 value_range vr;
684 get_range_info (size, vr);
685 vr.intersect (&valid_range);
686 return vr.zero_p ();
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
694 be made. */
696 static bool
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
698 tree dest, tree src, enum built_in_function code)
700 gimple *stmt = gsi_stmt (*gsi);
701 tree lhs = gimple_call_lhs (stmt);
702 tree len = gimple_call_arg (stmt, 2);
703 location_t loc = gimple_location (stmt);
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len))
709 gimple *repl;
710 if (gimple_call_lhs (stmt))
711 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
720 gsi_replace (gsi, repl, false);
721 return true;
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src, dest, 0))
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
730 32667). */
731 unlink_stmt_vdef (stmt);
732 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
733 release_ssa_name (gimple_vdef (stmt));
734 if (!lhs)
736 gsi_replace (gsi, gimple_build_nop (), false);
737 return true;
739 goto done;
741 else
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
746 tree srctype
747 = POINTER_TYPE_P (TREE_TYPE (src))
748 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
749 tree desttype
750 = POINTER_TYPE_P (TREE_TYPE (dest))
751 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
752 tree destvar, srcvar, srcoff;
753 unsigned int src_align, dest_align;
754 unsigned HOST_WIDE_INT tmp_len;
755 const char *tmp_str;
757 /* Build accesses at offset zero with a ref-all character type. */
758 tree off0
759 = build_int_cst (build_pointer_type_for_mode (char_type_node,
760 ptr_mode, true), 0);
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align = get_pointer_alignment (src);
767 dest_align = get_pointer_alignment (dest);
768 if (tree_fits_uhwi_p (len)
769 && compare_tree_int (len, MOVE_MAX) <= 0
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src, 1)
777 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
778 && memchr (tmp_str, 0, tmp_len) == NULL)
779 && !(srctype
780 && AGGREGATE_TYPE_P (srctype)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype))
782 && !(desttype
783 && AGGREGATE_TYPE_P (desttype)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
786 unsigned ilen = tree_to_uhwi (len);
787 if (pow2p_hwi (ilen))
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
795 dest, src, len, len,
796 false, false))
797 if (warning != OPT_Wrestrict)
798 return false;
800 scalar_int_mode mode;
801 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
802 if (type
803 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
804 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
807 && (dest_align >= GET_MODE_ALIGNMENT (mode)
808 || !targetm.slow_unaligned_access (mode, dest_align)
809 || (optab_handler (movmisalign_optab, mode)
810 != CODE_FOR_nothing)))
812 tree srctype = type;
813 tree desttype = type;
814 if (src_align < GET_MODE_ALIGNMENT (mode))
815 srctype = build_aligned_type (type, src_align);
816 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
817 tree tem = fold_const_aggregate_ref (srcmem);
818 if (tem)
819 srcmem = tem;
820 else if (src_align < GET_MODE_ALIGNMENT (mode)
821 && targetm.slow_unaligned_access (mode, src_align)
822 && (optab_handler (movmisalign_optab, mode)
823 == CODE_FOR_nothing))
824 srcmem = NULL_TREE;
825 if (srcmem)
827 gimple *new_stmt;
828 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
830 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
831 srcmem
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
833 new_stmt);
834 gimple_assign_set_lhs (new_stmt, srcmem);
835 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
836 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
838 if (dest_align < GET_MODE_ALIGNMENT (mode))
839 desttype = build_aligned_type (type, dest_align);
840 new_stmt
841 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
842 dest, off0),
843 srcmem);
844 gimple_move_vops (new_stmt, stmt);
845 if (!lhs)
847 gsi_replace (gsi, new_stmt, false);
848 return true;
850 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
851 goto done;
857 if (code == BUILT_IN_MEMMOVE)
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
861 really mandatory?
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align || !src_align)
865 return false;
866 if (readonly_data_expr (src)
867 || (tree_fits_uhwi_p (len)
868 && (MIN (src_align, dest_align) / BITS_PER_UNIT
869 >= tree_to_uhwi (len))))
871 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
872 if (!fn)
873 return false;
874 gimple_call_set_fndecl (stmt, fn);
875 gimple_call_set_arg (stmt, 0, dest);
876 gimple_call_set_arg (stmt, 1, src);
877 fold_stmt (gsi);
878 return true;
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src) == ADDR_EXPR
883 && TREE_CODE (dest) == ADDR_EXPR)
885 tree src_base, dest_base, fn;
886 poly_int64 src_offset = 0, dest_offset = 0;
887 poly_uint64 maxsize;
889 srcvar = TREE_OPERAND (src, 0);
890 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
891 if (src_base == NULL)
892 src_base = srcvar;
893 destvar = TREE_OPERAND (dest, 0);
894 dest_base = get_addr_base_and_unit_offset (destvar,
895 &dest_offset);
896 if (dest_base == NULL)
897 dest_base = destvar;
898 if (!poly_int_tree_p (len, &maxsize))
899 maxsize = -1;
900 if (SSA_VAR_P (src_base)
901 && SSA_VAR_P (dest_base))
903 if (operand_equal_p (src_base, dest_base, 0)
904 && ranges_maybe_overlap_p (src_offset, maxsize,
905 dest_offset, maxsize))
906 return false;
908 else if (TREE_CODE (src_base) == MEM_REF
909 && TREE_CODE (dest_base) == MEM_REF)
911 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
912 TREE_OPERAND (dest_base, 0), 0))
913 return false;
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base) + src_offset;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base) + dest_offset;
918 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
919 full_dest_offset, maxsize))
920 return false;
922 else
923 return false;
925 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
926 if (!fn)
927 return false;
928 gimple_call_set_fndecl (stmt, fn);
929 gimple_call_set_arg (stmt, 0, dest);
930 gimple_call_set_arg (stmt, 1, src);
931 fold_stmt (gsi);
932 return true;
935 /* If the destination and source do not alias optimize into
936 memcpy as well. */
937 if ((is_gimple_min_invariant (dest)
938 || TREE_CODE (dest) == SSA_NAME)
939 && (is_gimple_min_invariant (src)
940 || TREE_CODE (src) == SSA_NAME))
942 ao_ref destr, srcr;
943 ao_ref_init_from_ptr_and_size (&destr, dest, len);
944 ao_ref_init_from_ptr_and_size (&srcr, src, len);
945 if (!refs_may_alias_p_1 (&destr, &srcr, false))
947 tree fn;
948 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
949 if (!fn)
950 return false;
951 gimple_call_set_fndecl (stmt, fn);
952 gimple_call_set_arg (stmt, 0, dest);
953 gimple_call_set_arg (stmt, 1, src);
954 fold_stmt (gsi);
955 return true;
959 return false;
962 if (!tree_fits_shwi_p (len))
963 return false;
964 if (!srctype
965 || (AGGREGATE_TYPE_P (srctype)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
967 return false;
968 if (!desttype
969 || (AGGREGATE_TYPE_P (desttype)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
971 return false;
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
978 if (TREE_CODE (srctype) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 srctype = TREE_TYPE (srctype);
981 if (TREE_CODE (desttype) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
983 desttype = TREE_TYPE (desttype);
984 if (TREE_ADDRESSABLE (srctype)
985 || TREE_ADDRESSABLE (desttype))
986 return false;
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype))
991 || TREE_CODE (desttype) == BOOLEAN_TYPE
992 || TREE_CODE (desttype) == ENUMERAL_TYPE)
993 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype))
995 || TREE_CODE (srctype) == BOOLEAN_TYPE
996 || TREE_CODE (srctype) == ENUMERAL_TYPE)
997 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
998 if (!srctype)
999 srctype = desttype;
1000 if (!desttype)
1001 desttype = srctype;
1002 if (!srctype)
1003 return false;
1005 src_align = get_pointer_alignment (src);
1006 dest_align = get_pointer_alignment (dest);
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1013 string store. */
1014 destvar = NULL_TREE;
1015 srcvar = NULL_TREE;
1016 if (TREE_CODE (dest) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest, 0))
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1019 && dest_align >= TYPE_ALIGN (desttype)
1020 && (is_gimple_reg_type (desttype)
1021 || src_align >= TYPE_ALIGN (desttype)))
1022 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 else if (TREE_CODE (src) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1026 && src_align >= TYPE_ALIGN (srctype)
1027 && (is_gimple_reg_type (srctype)
1028 || dest_align >= TYPE_ALIGN (srctype)))
1029 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt)
1034 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1035 && integer_zerop (srcoff)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1037 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1038 srctype = TREE_TYPE (srcvar);
1039 else
1040 return false;
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1044 constraints. */
1045 if (srcvar == NULL_TREE)
1047 if (src_align >= TYPE_ALIGN (desttype))
1048 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1049 else
1051 if (STRICT_ALIGNMENT)
1052 return false;
1053 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1054 src_align);
1055 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1058 else if (destvar == NULL_TREE)
1060 if (dest_align >= TYPE_ALIGN (srctype))
1061 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1062 else
1064 if (STRICT_ALIGNMENT)
1065 return false;
1066 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1067 dest_align);
1068 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1078 dest, src, len, len,
1079 false, false))
1080 if (warning != OPT_Wrestrict)
1081 return false;
1083 gimple *new_stmt;
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1086 tree tem = fold_const_aggregate_ref (srcvar);
1087 if (tem)
1088 srcvar = tem;
1089 if (! is_gimple_min_invariant (srcvar))
1091 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1092 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1093 new_stmt);
1094 gimple_assign_set_lhs (new_stmt, srcvar);
1095 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1098 new_stmt = gimple_build_assign (destvar, srcvar);
1099 goto set_vop_and_replace;
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar) == STRING_CST)
1105 desttype = srctype;
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1110 else
1112 desttype = build_array_type_nelts (unsigned_char_type_node,
1113 tree_to_uhwi (len));
1114 srctype = desttype;
1115 if (src_align > TYPE_ALIGN (srctype))
1116 srctype = build_aligned_type (srctype, src_align);
1117 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1120 if (dest_align > TYPE_ALIGN (desttype))
1121 desttype = build_aligned_type (desttype, dest_align);
1122 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1123 new_stmt = gimple_build_assign (destvar, srcvar);
1125 set_vop_and_replace:
1126 gimple_move_vops (new_stmt, stmt);
1127 if (!lhs)
1129 gsi_replace (gsi, new_stmt, false);
1130 return true;
1132 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1135 done:
1136 gimple_seq stmts = NULL;
1137 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1138 len = NULL_TREE;
1139 else if (code == BUILT_IN_MEMPCPY)
1141 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1142 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1143 TREE_TYPE (dest), dest, len);
1145 else
1146 gcc_unreachable ();
1148 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1149 gimple *repl = gimple_build_assign (lhs, dest);
1150 gsi_replace (gsi, repl, false);
1151 return true;
1154 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1157 static bool
1158 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1160 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1162 if (!fn)
1163 return false;
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1167 gimple *stmt = gsi_stmt (*gsi);
1168 tree a = gimple_call_arg (stmt, 0);
1169 tree b = gimple_call_arg (stmt, 1);
1170 tree len = gimple_call_arg (stmt, 2);
1172 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1173 replace_call_with_call_and_fold (gsi, repl);
1175 return true;
1178 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1181 static bool
1182 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1184 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1186 if (!fn)
1187 return false;
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1193 gimple *stmt = gsi_stmt (*gsi);
1194 tree src = gimple_call_arg (stmt, 0);
1195 tree dest = gimple_call_arg (stmt, 1);
1196 tree len = gimple_call_arg (stmt, 2);
1198 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1199 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1200 replace_call_with_call_and_fold (gsi, repl);
1202 return true;
1205 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1208 static bool
1209 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1211 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1213 if (!fn)
1214 return false;
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1218 gimple *stmt = gsi_stmt (*gsi);
1219 tree dest = gimple_call_arg (stmt, 0);
1220 tree len = gimple_call_arg (stmt, 1);
1222 gimple_seq seq = NULL;
1223 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1224 gimple_seq_add_stmt_without_update (&seq, repl);
1225 gsi_replace_with_seq_vops (gsi, seq);
1226 fold_stmt (gsi);
1228 return true;
1231 /* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1234 static bool
1235 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1237 gimple *stmt = gsi_stmt (*gsi);
1238 tree etype;
1239 unsigned HOST_WIDE_INT length, cval;
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len))
1244 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1245 return true;
1248 if (! tree_fits_uhwi_p (len))
1249 return false;
1251 if (TREE_CODE (c) != INTEGER_CST)
1252 return false;
1254 tree dest = gimple_call_arg (stmt, 0);
1255 tree var = dest;
1256 if (TREE_CODE (var) != ADDR_EXPR)
1257 return false;
1259 var = TREE_OPERAND (var, 0);
1260 if (TREE_THIS_VOLATILE (var))
1261 return false;
1263 etype = TREE_TYPE (var);
1264 if (TREE_CODE (etype) == ARRAY_TYPE)
1265 etype = TREE_TYPE (etype);
1267 if (!INTEGRAL_TYPE_P (etype)
1268 && !POINTER_TYPE_P (etype))
1269 return NULL_TREE;
1271 if (! var_decl_component_p (var))
1272 return NULL_TREE;
1274 length = tree_to_uhwi (len);
1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1278 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1279 return NULL_TREE;
1281 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1282 return NULL_TREE;
1284 if (!type_has_mode_precision_p (etype))
1285 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1286 TYPE_UNSIGNED (etype));
1288 if (integer_zerop (c))
1289 cval = 0;
1290 else
1292 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1293 return NULL_TREE;
1295 cval = TREE_INT_CST_LOW (c);
1296 cval &= 0xff;
1297 cval |= cval << 8;
1298 cval |= cval << 16;
1299 cval |= (cval << 31) << 1;
1302 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1303 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1304 gimple_move_vops (store, stmt);
1305 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1306 if (gimple_call_lhs (stmt))
1308 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1309 gsi_replace (gsi, asgn, false);
1311 else
1313 gimple_stmt_iterator gsi2 = *gsi;
1314 gsi_prev (gsi);
1315 gsi_remove (&gsi2, true);
1318 return true;
1321 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1323 static bool
1324 get_range_strlen_tree (tree arg, bitmap *visited, strlen_range_kind rkind,
1325 c_strlen_data *pdata, unsigned eltsize)
1327 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1329 /* The length computed by this invocation of the function. */
1330 tree val = NULL_TREE;
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1335 PDATA->MAXBOUND. */
1336 bool tight_bound = false;
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1342 tree op = TREE_OPERAND (arg, 0);
1343 if (integer_zerop (TREE_OPERAND (op, 1)))
1345 tree aop0 = TREE_OPERAND (op, 0);
1346 if (TREE_CODE (aop0) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1348 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1349 pdata, eltsize);
1351 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1352 && rkind == SRK_LENRANGE)
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1356 member. */
1357 tree idx = TREE_OPERAND (op, 1);
1359 arg = TREE_OPERAND (op, 0);
1360 tree optype = TREE_TYPE (arg);
1361 if (tree dom = TYPE_DOMAIN (optype))
1362 if (tree bound = TYPE_MAX_VALUE (dom))
1363 if (TREE_CODE (bound) == INTEGER_CST
1364 && TREE_CODE (idx) == INTEGER_CST
1365 && tree_int_cst_lt (bound, idx))
1366 return false;
1370 if (rkind == SRK_INT_VALUE)
1372 /* We are computing the maximum value (not string length). */
1373 val = arg;
1374 if (TREE_CODE (val) != INTEGER_CST
1375 || tree_int_cst_sgn (val) < 0)
1376 return false;
1378 else
1380 c_strlen_data lendata = { };
1381 val = c_strlen (arg, 1, &lendata, eltsize);
1383 if (!val && lendata.decl)
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val = lendata.minlen;
1388 pdata->decl = lendata.decl;
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound = false;
1396 if (!val && rkind == SRK_LENRANGE)
1398 if (TREE_CODE (arg) == ADDR_EXPR)
1399 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1400 pdata, eltsize);
1402 if (TREE_CODE (arg) == ARRAY_REF)
1404 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1409 optype = TREE_TYPE (optype);
1411 /* Avoid arrays of pointers. */
1412 tree eltype = TREE_TYPE (optype);
1413 if (TREE_CODE (optype) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype))
1415 return false;
1417 /* Fail when the array bound is unknown or zero. */
1418 val = TYPE_SIZE_UNIT (optype);
1419 if (!val
1420 || TREE_CODE (val) != INTEGER_CST
1421 || integer_zerop (val))
1422 return false;
1424 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1425 integer_one_node);
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
1429 pdata->minlen = ssize_int (0);
1431 tight_bound = true;
1433 else if (TREE_CODE (arg) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1435 == ARRAY_TYPE))
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
1442 the array were the last member of a struct. */
1444 tree fld = TREE_OPERAND (arg, 1);
1446 tree optype = TREE_TYPE (fld);
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1451 optype = TREE_TYPE (optype);
1453 /* Fail when the array bound is unknown or zero. */
1454 val = TYPE_SIZE_UNIT (optype);
1455 if (!val
1456 || TREE_CODE (val) != INTEGER_CST
1457 || integer_zerop (val))
1458 return false;
1459 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1460 integer_one_node);
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
1464 pdata->minlen = ssize_int (0);
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1472 tight_bound = true;
1474 else if (TREE_CODE (arg) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1486 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1487 && (decl_binds_to_current_def_p (ref)
1488 || !array_at_struct_end_p (arg)))
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val = DECL_SIZE_UNIT (ref);
1493 if (!val
1494 || TREE_CODE (val) != INTEGER_CST
1495 || integer_zerop (val))
1496 return false;
1498 poly_offset_int psiz = wi::to_offset (val);
1499 poly_offset_int poff = mem_ref_offset (arg);
1500 if (known_le (psiz, poff))
1501 return false;
1503 pdata->minlen = ssize_int (0);
1505 /* Subtract the offset and one for the terminating nul. */
1506 psiz -= poff;
1507 psiz -= 1;
1508 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1513 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype = TREE_TYPE (arg);
1519 if (TREE_CODE (argtype) == ARRAY_TYPE)
1521 val = TYPE_SIZE_UNIT (argtype);
1522 if (!val
1523 || TREE_CODE (val) != INTEGER_CST
1524 || integer_zerop (val))
1525 return false;
1526 val = wide_int_to_tree (TREE_TYPE (val),
1527 wi::sub (wi::to_wide (val), 1));
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
1531 pdata->minlen = ssize_int (0);
1534 maxbound = true;
1537 if (!val)
1538 return false;
1540 /* Adjust the lower bound on the string length as necessary. */
1541 if (!pdata->minlen
1542 || (rkind != SRK_STRLEN
1543 && TREE_CODE (pdata->minlen) == INTEGER_CST
1544 && TREE_CODE (val) == INTEGER_CST
1545 && tree_int_cst_lt (val, pdata->minlen)))
1546 pdata->minlen = val;
1548 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1552 bound. */
1553 if (TREE_CODE (val) == INTEGER_CST)
1555 if (tree_int_cst_lt (pdata->maxbound, val))
1556 pdata->maxbound = val;
1558 else
1559 pdata->maxbound = val;
1561 else if (pdata->maxbound || maxbound)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
1565 pdata->maxbound = val;
1567 if (tight_bound)
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
1573 if (rkind == SRK_LENRANGE)
1575 poly_int64 offset;
1576 tree base = get_addr_base_and_unit_offset (arg, &offset);
1577 if (!base)
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base = get_base_address (arg);
1583 offset = 0;
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type = TREE_TYPE (base);
1590 if (TREE_CODE (type) == POINTER_TYPE
1591 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1592 || !(val = DECL_SIZE_UNIT (base)))
1593 val = build_all_ones_cst (size_type_node);
1594 else
1596 val = DECL_SIZE_UNIT (base);
1597 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1598 size_int (offset + 1));
1601 else
1602 return false;
1605 if (pdata->maxlen)
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
1609 if (rkind != SRK_STRLEN)
1611 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1612 || TREE_CODE (val) != INTEGER_CST)
1613 return false;
1615 if (tree_int_cst_lt (pdata->maxlen, val))
1616 pdata->maxlen = val;
1617 return true;
1619 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1623 return false;
1627 pdata->maxlen = val;
1628 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1631 /* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
1639 VISITED is a bitmap of visited variables.
1640 RKIND determines the kind of value or range to obtain (see
1641 strlen_range_kind).
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
1647 static bool
1648 get_range_strlen (tree arg, bitmap *visited,
1649 strlen_range_kind rkind,
1650 c_strlen_data *pdata, unsigned eltsize)
1653 if (TREE_CODE (arg) != SSA_NAME)
1654 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1656 /* If ARG is registered for SSA update we cannot look at its defining
1657 statement. */
1658 if (name_registered_for_update_p (arg))
1659 return false;
1661 /* If we were already here, break the infinite cycle. */
1662 if (!*visited)
1663 *visited = BITMAP_ALLOC (NULL);
1664 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1665 return true;
1667 tree var = arg;
1668 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1670 switch (gimple_code (def_stmt))
1672 case GIMPLE_ASSIGN:
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1675 length. */
1676 if (gimple_assign_single_p (def_stmt)
1677 || gimple_assign_unary_nop_p (def_stmt))
1679 tree rhs = gimple_assign_rhs1 (def_stmt);
1680 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1682 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1684 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1685 gimple_assign_rhs3 (def_stmt) };
1687 for (unsigned int i = 0; i < 2; i++)
1688 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1690 if (rkind != SRK_LENRANGE)
1691 return false;
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1699 diagnostics. */
1700 pdata->maxlen = build_all_ones_cst (size_type_node);
1702 return true;
1704 return false;
1706 case GIMPLE_PHI:
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
1709 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1711 tree arg = gimple_phi_arg (def_stmt, i)->def;
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg == gimple_phi_result (def_stmt))
1720 continue;
1722 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1724 if (rkind != SRK_LENRANGE)
1725 return false;
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1733 diagnostics. */
1734 pdata->maxlen = build_all_ones_cst (size_type_node);
1737 return true;
1739 default:
1740 return false;
1744 /* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
1758 bool
1759 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1761 bitmap visited = NULL;
1762 tree maxbound = pdata->maxbound;
1764 if (!get_range_strlen (arg, &visited, SRK_LENRANGE, pdata, eltsize))
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata->minlen = ssize_int (0);
1770 pdata->maxlen = build_all_ones_cst (size_type_node);
1772 else if (!pdata->minlen)
1773 pdata->minlen = ssize_int (0);
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound && pdata->maxbound == maxbound)
1778 pdata->maxbound = build_all_ones_cst (size_type_node);
1780 if (visited)
1781 BITMAP_FREE (visited);
1783 return !integer_all_onesp (pdata->maxlen);
1786 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
1793 return the maximum size. Otherwise return NULL. */
1795 static tree
1796 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
1804 bitmap visited = NULL;
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1807 is unbounded. */
1808 c_strlen_data lendata = { };
1809 if (!get_range_strlen (arg, &visited, rkind, &lendata, /* eltsize = */1))
1810 lendata.maxlen = NULL_TREE;
1811 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
1812 lendata.maxlen = NULL_TREE;
1814 if (visited)
1815 BITMAP_FREE (visited);
1817 if (nonstr)
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
1822 *nonstr = lendata.decl;
1823 return lendata.maxlen;
1826 /* Fail if the constant array isn't nul-terminated. */
1827 return lendata.decl ? NULL_TREE : lendata.maxlen;
1831 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1835 static bool
1836 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1837 tree dest, tree src)
1839 gimple *stmt = gsi_stmt (*gsi);
1840 location_t loc = gimple_location (stmt);
1841 tree fn;
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src, dest, 0))
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1849 threading). */
1850 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1852 tree func = gimple_call_fndecl (stmt);
1854 warning_at (loc, OPT_Wrestrict,
1855 "%qD source argument is the same as destination",
1856 func);
1859 replace_call_with_value (gsi, dest);
1860 return true;
1863 if (optimize_function_for_size_p (cfun))
1864 return false;
1866 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1867 if (!fn)
1868 return false;
1870 /* Set to non-null if ARG refers to an unterminated array. */
1871 tree nonstr = NULL;
1872 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
1874 if (nonstr)
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt))
1878 warn_string_no_nul (loc, NULL_TREE, "strcpy", src, nonstr);
1879 gimple_set_no_warning (stmt, true);
1880 return false;
1883 if (!len)
1884 return false;
1886 len = fold_convert_loc (loc, size_type_node, len);
1887 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true,
1889 NULL_TREE, true, GSI_SAME_STMT);
1890 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1895 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1899 static bool
1900 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1901 tree dest, tree src, tree len)
1903 gimple *stmt = gsi_stmt (*gsi);
1904 location_t loc = gimple_location (stmt);
1905 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len))
1910 /* Avoid warning if the destination refers to an array/pointer
1911 decorate with attribute nonstring. */
1912 if (!nonstring)
1914 tree fndecl = gimple_call_fndecl (stmt);
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
1918 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1919 if (slen && !integer_zerop (slen))
1920 warning_at (loc, OPT_Wstringop_truncation,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
1923 stmt, fndecl, slen);
1924 else
1925 warning_at (loc, OPT_Wstringop_truncation,
1926 "%G%qD destination unchanged after copying no bytes",
1927 stmt, fndecl);
1930 replace_call_with_value (gsi, dest);
1931 return true;
1934 /* We can't compare slen with len as constants below if len is not a
1935 constant. */
1936 if (TREE_CODE (len) != INTEGER_CST)
1937 return false;
1939 /* Now, we must be passed a constant src ptr parameter. */
1940 tree slen = get_maxval_strlen (src, SRK_STRLEN);
1941 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1942 return false;
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
1950 if (tree_int_cst_lt (ssize, len))
1951 return false;
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi, src, len);
1956 /* OK transform into builtin memcpy. */
1957 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1958 if (!fn)
1959 return false;
1961 len = fold_convert_loc (loc, size_type_node, len);
1962 len = force_gimple_operand_gsi (gsi, len, true,
1963 NULL_TREE, true, GSI_SAME_STMT);
1964 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1965 replace_call_with_call_and_fold (gsi, repl);
1967 return true;
1970 /* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1975 static bool
1976 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1978 gimple *stmt = gsi_stmt (*gsi);
1979 tree str = gimple_call_arg (stmt, 0);
1980 tree c = gimple_call_arg (stmt, 1);
1981 location_t loc = gimple_location (stmt);
1982 const char *p;
1983 char ch;
1985 if (!gimple_call_lhs (stmt))
1986 return false;
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE, str))
1991 return false;
1993 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1995 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1997 if (p1 == NULL)
1999 replace_call_with_value (gsi, integer_zero_node);
2000 return true;
2003 tree len = build_int_cst (size_type_node, p1 - p);
2004 gimple_seq stmts = NULL;
2005 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2006 POINTER_PLUS_EXPR, str, len);
2007 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2008 gsi_replace_with_seq_vops (gsi, stmts);
2009 return true;
2012 if (!integer_zerop (c))
2013 return false;
2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2016 if (is_strrchr && optimize_function_for_size_p (cfun))
2018 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2020 if (strchr_fn)
2022 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2023 replace_call_with_call_and_fold (gsi, repl);
2024 return true;
2027 return false;
2030 tree len;
2031 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2033 if (!strlen_fn)
2034 return false;
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts = NULL;
2038 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2039 gimple_set_location (new_stmt, loc);
2040 len = create_tmp_reg_or_ssa_name (size_type_node);
2041 gimple_call_set_lhs (new_stmt, len);
2042 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2046 POINTER_PLUS_EXPR, str, len);
2047 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2048 gsi_replace_with_seq_vops (gsi, stmts);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2 = *gsi;
2054 gsi_prev (&gsi2);
2055 fold_stmt (&gsi2);
2056 return true;
2059 /* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2063 static bool
2064 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2066 gimple *stmt = gsi_stmt (*gsi);
2067 if (!gimple_call_lhs (stmt))
2068 return false;
2070 tree haystack = gimple_call_arg (stmt, 0);
2071 tree needle = gimple_call_arg (stmt, 1);
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE, haystack)
2076 || !check_nul_terminated_array (NULL_TREE, needle))
2077 return false;
2079 const char *q = c_getstr (needle);
2080 if (q == NULL)
2081 return false;
2083 if (const char *p = c_getstr (haystack))
2085 const char *r = strstr (p, q);
2087 if (r == NULL)
2089 replace_call_with_value (gsi, integer_zero_node);
2090 return true;
2093 tree len = build_int_cst (size_type_node, r - p);
2094 gimple_seq stmts = NULL;
2095 gimple *new_stmt
2096 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2097 haystack, len);
2098 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2099 gsi_replace_with_seq_vops (gsi, stmts);
2100 return true;
2103 /* For strstr (x, "") return x. */
2104 if (q[0] == '\0')
2106 replace_call_with_value (gsi, haystack);
2107 return true;
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2111 if (q[1] == '\0')
2113 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2114 if (strchr_fn)
2116 tree c = build_int_cst (integer_type_node, q[0]);
2117 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2123 return false;
2126 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2127 to the call.
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2144 static bool
2145 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2147 gimple *stmt = gsi_stmt (*gsi);
2148 location_t loc = gimple_location (stmt);
2150 const char *p = c_getstr (src);
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p && *p == '\0')
2155 replace_call_with_value (gsi, dst);
2156 return true;
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2160 return false;
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2163 tree newdst;
2164 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2165 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2167 if (!strlen_fn || !memcpy_fn)
2168 return false;
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
2172 tree len = get_maxval_strlen (src, SRK_STRLEN);
2173 if (! len)
2174 return false;
2176 /* Create strlen (dst). */
2177 gimple_seq stmts = NULL, stmts2;
2178 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2179 gimple_set_location (repl, loc);
2180 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2181 gimple_call_set_lhs (repl, newdst);
2182 gimple_seq_add_stmt_without_update (&stmts, repl);
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2186 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2187 gimple_seq_add_seq_without_update (&stmts, stmts2);
2189 len = fold_convert_loc (loc, size_type_node, len);
2190 len = size_binop_loc (loc, PLUS_EXPR, len,
2191 build_int_cst (size_type_node, 1));
2192 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2193 gimple_seq_add_seq_without_update (&stmts, stmts2);
2195 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2196 gimple_seq_add_stmt_without_update (&stmts, repl);
2197 if (gimple_call_lhs (stmt))
2199 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2200 gimple_seq_add_stmt_without_update (&stmts, repl);
2201 gsi_replace_with_seq_vops (gsi, stmts);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2 = *gsi;
2207 gsi_prev (&gsi2);
2208 fold_stmt (&gsi2);
2210 else
2212 gsi_replace_with_seq_vops (gsi, stmts);
2213 fold_stmt (gsi);
2215 return true;
2218 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2221 static bool
2222 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2224 gimple *stmt = gsi_stmt (*gsi);
2225 tree dest = gimple_call_arg (stmt, 0);
2226 tree src = gimple_call_arg (stmt, 1);
2227 tree size = gimple_call_arg (stmt, 2);
2228 tree fn;
2229 const char *p;
2232 p = c_getstr (src);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p && *p == '\0')
2236 replace_call_with_value (gsi, dest);
2237 return true;
2240 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2241 return false;
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2245 if (!fn)
2246 return false;
2248 gimple *repl = gimple_build_call (fn, 2, dest, src);
2249 replace_call_with_call_and_fold (gsi, repl);
2250 return true;
2253 /* Simplify a call to the strncat builtin. */
2255 static bool
2256 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2258 gimple *stmt = gsi_stmt (*gsi);
2259 tree dst = gimple_call_arg (stmt, 0);
2260 tree src = gimple_call_arg (stmt, 1);
2261 tree len = gimple_call_arg (stmt, 2);
2263 const char *p = c_getstr (src);
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len) || (p && *p == '\0'))
2269 replace_call_with_value (gsi, dst);
2270 return true;
2273 if (TREE_CODE (len) != INTEGER_CST || !p)
2274 return false;
2276 unsigned srclen = strlen (p);
2278 int cmpsrc = compare_tree_int (len, srclen);
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2282 if (cmpsrc < 0)
2283 return false;
2285 unsigned HOST_WIDE_INT dstsize;
2287 bool nowarn = gimple_no_warning_p (stmt);
2289 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2291 int cmpdst = compare_tree_int (len, dstsize);
2293 if (cmpdst >= 0)
2295 tree fndecl = gimple_call_fndecl (stmt);
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc = gimple_location (stmt);
2302 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2303 cmpdst == 0
2304 ? G_("%G%qD specified bound %E equals "
2305 "destination size")
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt, fndecl, len, dstsize);
2309 if (nowarn)
2310 gimple_set_no_warning (stmt, true);
2314 if (!nowarn && cmpsrc == 0)
2316 tree fndecl = gimple_call_fndecl (stmt);
2317 location_t loc = gimple_location (stmt);
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
2323 if (warning_at (loc, OPT_Wstringop_overflow_,
2324 "%G%qD specified bound %E equals source length",
2325 stmt, fndecl, len))
2326 gimple_set_no_warning (stmt, true);
2329 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2331 /* If the replacement _DECL isn't initialized, don't do the
2332 transformation. */
2333 if (!fn)
2334 return false;
2336 /* Otherwise, emit a call to strcat. */
2337 gcall *repl = gimple_build_call (fn, 2, dst, src);
2338 replace_call_with_call_and_fold (gsi, repl);
2339 return true;
2342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2343 LEN, and SIZE. */
2345 static bool
2346 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2348 gimple *stmt = gsi_stmt (*gsi);
2349 tree dest = gimple_call_arg (stmt, 0);
2350 tree src = gimple_call_arg (stmt, 1);
2351 tree len = gimple_call_arg (stmt, 2);
2352 tree size = gimple_call_arg (stmt, 3);
2353 tree fn;
2354 const char *p;
2356 p = c_getstr (src);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p && *p == '\0')
2359 || integer_zerop (len))
2361 replace_call_with_value (gsi, dest);
2362 return true;
2365 if (! tree_fits_uhwi_p (size))
2366 return false;
2368 if (! integer_all_onesp (size))
2370 tree src_len = c_strlen (src, 1);
2371 if (src_len
2372 && tree_fits_uhwi_p (src_len)
2373 && tree_fits_uhwi_p (len)
2374 && ! tree_int_cst_lt (len, src_len))
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2378 if (!fn)
2379 return false;
2381 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2382 replace_call_with_call_and_fold (gsi, repl);
2383 return true;
2385 return false;
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2390 if (!fn)
2391 return false;
2393 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2394 replace_call_with_call_and_fold (gsi, repl);
2395 return true;
2398 /* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2402 static tree
2403 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2405 tree var;
2407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2410 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2412 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2413 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2414 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2416 gimple_assign_set_lhs (stmt, var);
2417 gimple_seq_add_stmt_without_update (stmts, stmt);
2419 return var;
2422 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2424 static bool
2425 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2427 gimple *stmt = gsi_stmt (*gsi);
2428 tree callee = gimple_call_fndecl (stmt);
2429 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2431 tree type = integer_type_node;
2432 tree str1 = gimple_call_arg (stmt, 0);
2433 tree str2 = gimple_call_arg (stmt, 1);
2434 tree lhs = gimple_call_lhs (stmt);
2436 tree bound_node = NULL_TREE;
2437 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt) == 3)
2442 bound_node = gimple_call_arg (stmt, 2);
2443 if (tree_fits_uhwi_p (bound_node))
2444 bound = tree_to_uhwi (bound_node);
2447 /* If the BOUND parameter is zero, return zero. */
2448 if (bound == 0)
2450 replace_call_with_value (gsi, integer_zero_node);
2451 return true;
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1, str2, 0))
2457 replace_call_with_value (gsi, integer_zero_node);
2458 return true;
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2467 const char *p1 = getbyterep (str1, &len1);
2468 const char *p2 = getbyterep (str2, &len2);
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2474 if (p1)
2476 size_t n = strnlen (p1, len1);
2477 if (n < len1)
2478 len1 = nulpos1 = n;
2481 if (p2)
2483 size_t n = strnlen (p2, len2);
2484 if (n < len2)
2485 len2 = nulpos2 = n;
2488 /* For known strings, return an immediate value. */
2489 if (p1 && p2)
2491 int r = 0;
2492 bool known_result = false;
2494 switch (fcode)
2496 case BUILT_IN_STRCMP:
2497 case BUILT_IN_STRCMP_EQ:
2498 if (len1 != nulpos1 || len2 != nulpos2)
2499 break;
2501 r = strcmp (p1, p2);
2502 known_result = true;
2503 break;
2505 case BUILT_IN_STRNCMP:
2506 case BUILT_IN_STRNCMP_EQ:
2508 if (bound == HOST_WIDE_INT_M1U)
2509 break;
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n = bound;
2516 if (len1 == nulpos1 && len1 < n)
2517 n = len1 + 1;
2518 if (len2 == nulpos2 && len2 < n)
2519 n = len2 + 1;
2521 if (MIN (nulpos1, nulpos2) + 1 < n)
2522 break;
2524 r = strncmp (p1, p2, n);
2525 known_result = true;
2526 break;
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP:
2531 break;
2532 case BUILT_IN_STRNCASECMP:
2534 if (bound == HOST_WIDE_INT_M1U)
2535 break;
2536 r = strncmp (p1, p2, bound);
2537 if (r == 0)
2538 known_result = true;
2539 break;
2541 default:
2542 gcc_unreachable ();
2545 if (known_result)
2547 replace_call_with_value (gsi, build_cmp_result (type, r));
2548 return true;
2552 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2553 || fcode == BUILT_IN_STRCMP
2554 || fcode == BUILT_IN_STRCMP_EQ
2555 || fcode == BUILT_IN_STRCASECMP;
2557 location_t loc = gimple_location (stmt);
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
2560 if (p2 && *p2 == '\0' && nonzero_bound)
2562 gimple_seq stmts = NULL;
2563 tree var = gimple_load_first_char (loc, str1, &stmts);
2564 if (lhs)
2566 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2567 gimple_seq_add_stmt_without_update (&stmts, stmt);
2570 gsi_replace_with_seq_vops (gsi, stmts);
2571 return true;
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2575 if (p1 && *p1 == '\0' && nonzero_bound)
2577 gimple_seq stmts = NULL;
2578 tree var = gimple_load_first_char (loc, str2, &stmts);
2580 if (lhs)
2582 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2583 stmt = gimple_build_assign (c, NOP_EXPR, var);
2584 gimple_seq_add_stmt_without_update (&stmts, stmt);
2586 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2587 gimple_seq_add_stmt_without_update (&stmts, stmt);
2590 gsi_replace_with_seq_vops (gsi, stmts);
2591 return true;
2594 /* If BOUND is one, return an expression corresponding to
2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2596 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2598 gimple_seq stmts = NULL;
2599 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2600 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2602 if (lhs)
2604 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2605 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2606 gimple_seq_add_stmt_without_update (&stmts, convert1);
2608 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2609 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2610 gimple_seq_add_stmt_without_update (&stmts, convert2);
2612 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2613 gimple_seq_add_stmt_without_update (&stmts, stmt);
2616 gsi_replace_with_seq_vops (gsi, stmts);
2617 return true;
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode == BUILT_IN_STRNCMP
2624 && bound > 0 && bound < HOST_WIDE_INT_M1U
2625 && ((p2 && len2 < bound && len2 == nulpos2)
2626 || (p1 && len1 < bound && len1 == nulpos1)))
2628 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2629 if (!fn)
2630 return false;
2631 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2632 replace_call_with_call_and_fold (gsi, repl);
2633 return true;
2636 return false;
2639 /* Fold a call to the memchr pointed by GSI iterator. */
2641 static bool
2642 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2644 gimple *stmt = gsi_stmt (*gsi);
2645 tree lhs = gimple_call_lhs (stmt);
2646 tree arg1 = gimple_call_arg (stmt, 0);
2647 tree arg2 = gimple_call_arg (stmt, 1);
2648 tree len = gimple_call_arg (stmt, 2);
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len))
2653 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2654 return true;
2657 char c;
2658 if (TREE_CODE (arg2) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len)
2660 || !target_char_cst_p (arg2, &c))
2661 return false;
2663 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2664 unsigned HOST_WIDE_INT string_length;
2665 const char *p1 = getbyterep (arg1, &string_length);
2667 if (p1)
2669 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2670 if (r == NULL)
2672 tree mem_size, offset_node;
2673 byte_representation (arg1, &offset_node, &mem_size, NULL);
2674 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2675 ? 0 : tree_to_uhwi (offset_node);
2676 /* MEM_SIZE is the size of the array the string literal
2677 is stored in. */
2678 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2679 gcc_checking_assert (string_length <= string_size);
2680 if (length <= string_size)
2682 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2683 return true;
2686 else
2688 unsigned HOST_WIDE_INT offset = r - p1;
2689 gimple_seq stmts = NULL;
2690 if (lhs != NULL_TREE)
2692 tree offset_cst = build_int_cst (sizetype, offset);
2693 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2694 arg1, offset_cst);
2695 gimple_seq_add_stmt_without_update (&stmts, stmt);
2697 else
2698 gimple_seq_add_stmt_without_update (&stmts,
2699 gimple_build_nop ());
2701 gsi_replace_with_seq_vops (gsi, stmts);
2702 return true;
2706 return false;
2709 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2714 was possible. */
2716 static bool
2717 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2718 tree arg0, tree arg1,
2719 bool unlocked)
2721 gimple *stmt = gsi_stmt (*gsi);
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree const fn_fputc = (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC));
2728 tree const fn_fwrite = (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE));
2732 /* If the return value is used, don't do the transformation. */
2733 if (gimple_call_lhs (stmt))
2734 return false;
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
2738 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2739 if (!len
2740 || TREE_CODE (len) != INTEGER_CST)
2741 return false;
2743 switch (compare_tree_int (len, 1))
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi, integer_zero_node);
2747 return true;
2749 case 0: /* length is 1, call fputc. */
2751 const char *p = c_getstr (arg0);
2752 if (p != NULL)
2754 if (!fn_fputc)
2755 return false;
2757 gimple *repl = gimple_build_call (fn_fputc, 2,
2758 build_int_cst
2759 (integer_type_node, p[0]), arg1);
2760 replace_call_with_call_and_fold (gsi, repl);
2761 return true;
2764 /* FALLTHROUGH */
2765 case 1: /* length is greater than 1, call fwrite. */
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun))
2769 return false;
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2772 if (!fn_fwrite)
2773 return false;
2775 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2776 size_one_node, len, arg1);
2777 replace_call_with_call_and_fold (gsi, repl);
2778 return true;
2780 default:
2781 gcc_unreachable ();
2783 return false;
2786 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2792 static bool
2793 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2794 tree dest, tree src, tree len, tree size,
2795 enum built_in_function fcode)
2797 gimple *stmt = gsi_stmt (*gsi);
2798 location_t loc = gimple_location (stmt);
2799 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2800 tree fn;
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2806 if (fcode != BUILT_IN_MEMPCPY_CHK)
2808 replace_call_with_value (gsi, dest);
2809 return true;
2811 else
2813 gimple_seq stmts = NULL;
2814 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2815 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2816 TREE_TYPE (dest), dest, len);
2817 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2818 replace_call_with_value (gsi, temp);
2819 return true;
2823 if (! tree_fits_uhwi_p (size))
2824 return false;
2826 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
2827 if (! integer_all_onesp (size))
2829 if (! tree_fits_uhwi_p (len))
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2836 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2841 if (!fn)
2842 return false;
2844 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2845 replace_call_with_call_and_fold (gsi, repl);
2846 return true;
2848 return false;
2851 else
2852 maxlen = len;
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2858 fn = NULL_TREE;
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2861 switch (fcode)
2863 case BUILT_IN_MEMCPY_CHK:
2864 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2865 break;
2866 case BUILT_IN_MEMPCPY_CHK:
2867 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2868 break;
2869 case BUILT_IN_MEMMOVE_CHK:
2870 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2871 break;
2872 case BUILT_IN_MEMSET_CHK:
2873 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2874 break;
2875 default:
2876 break;
2879 if (!fn)
2880 return false;
2882 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2883 replace_call_with_call_and_fold (gsi, repl);
2884 return true;
2887 /* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2893 static bool
2894 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2895 tree dest,
2896 tree src, tree size,
2897 enum built_in_function fcode)
2899 gimple *stmt = gsi_stmt (*gsi);
2900 location_t loc = gimple_location (stmt);
2901 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2902 tree len, fn;
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2910 threading). */
2911 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2913 tree func = gimple_call_fndecl (stmt);
2915 warning_at (loc, OPT_Wrestrict,
2916 "%qD source argument is the same as destination",
2917 func);
2920 replace_call_with_value (gsi, dest);
2921 return true;
2924 if (! tree_fits_uhwi_p (size))
2925 return false;
2927 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
2928 if (! integer_all_onesp (size))
2930 len = c_strlen (src, 1);
2931 if (! len || ! tree_fits_uhwi_p (len))
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2938 if (fcode == BUILT_IN_STPCPY_CHK)
2940 if (! ignore)
2941 return false;
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2946 if (!fn)
2947 return false;
2949 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2950 replace_call_with_call_and_fold (gsi, repl);
2951 return true;
2954 if (! len || TREE_SIDE_EFFECTS (len))
2955 return false;
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2960 if (!fn)
2961 return false;
2963 gimple_seq stmts = NULL;
2964 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2965 len = gimple_convert (&stmts, loc, size_type_node, len);
2966 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2967 build_int_cst (size_type_node, 1));
2968 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2969 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2970 replace_call_with_call_and_fold (gsi, repl);
2971 return true;
2974 else
2975 maxlen = len;
2977 if (! tree_int_cst_lt (maxlen, size))
2978 return false;
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2984 if (!fn)
2985 return false;
2987 gimple *repl = gimple_build_call (fn, 2, dest, src);
2988 replace_call_with_call_and_fold (gsi, repl);
2989 return true;
2992 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2997 static bool
2998 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2999 tree dest, tree src,
3000 tree len, tree size,
3001 enum built_in_function fcode)
3003 gimple *stmt = gsi_stmt (*gsi);
3004 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3005 tree fn;
3007 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3012 if (fn)
3014 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3015 replace_call_with_call_and_fold (gsi, repl);
3016 return true;
3020 if (! tree_fits_uhwi_p (size))
3021 return false;
3023 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3024 if (! integer_all_onesp (size))
3026 if (! tree_fits_uhwi_p (len))
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3032 return false;
3034 else
3035 maxlen = len;
3037 if (tree_int_cst_lt (size, maxlen))
3038 return false;
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3044 if (!fn)
3045 return false;
3047 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3048 replace_call_with_call_and_fold (gsi, repl);
3049 return true;
3052 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3055 static bool
3056 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3058 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3059 location_t loc = gimple_location (stmt);
3060 tree dest = gimple_call_arg (stmt, 0);
3061 tree src = gimple_call_arg (stmt, 1);
3062 tree fn, lenp1;
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt) == NULL_TREE)
3067 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3070 gimple_call_set_fndecl (stmt, fn);
3071 fold_stmt (gsi);
3072 return true;
3075 /* Set to non-null if ARG refers to an unterminated array. */
3076 c_strlen_data data = { };
3077 /* The size of the unterminated array if SRC referes to one. */
3078 tree size;
3079 /* True if the size is exact/constant, false if it's the lower bound
3080 of a range. */
3081 bool exact;
3082 tree len = c_strlen (src, 1, &data, 1);
3083 if (!len
3084 || TREE_CODE (len) != INTEGER_CST)
3086 data.decl = unterminated_array (src, &size, &exact);
3087 if (!data.decl)
3088 return false;
3091 if (data.decl)
3093 /* Avoid folding calls with unterminated arrays. */
3094 if (!gimple_no_warning_p (stmt))
3095 warn_string_no_nul (loc, NULL_TREE, "stpcpy", src, data.decl, size,
3096 exact);
3097 gimple_set_no_warning (stmt, true);
3098 return false;
3101 if (optimize_function_for_size_p (cfun)
3102 /* If length is zero it's small enough. */
3103 && !integer_zerop (len))
3104 return false;
3106 /* If the source has a known length replace stpcpy with memcpy. */
3107 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3108 if (!fn)
3109 return false;
3111 gimple_seq stmts = NULL;
3112 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3113 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3114 tem, build_int_cst (size_type_node, 1));
3115 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3116 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3117 gimple_move_vops (repl, stmt);
3118 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3119 /* Replace the result with dest + len. */
3120 stmts = NULL;
3121 tem = gimple_convert (&stmts, loc, sizetype, len);
3122 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3123 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3124 POINTER_PLUS_EXPR, dest, tem);
3125 gsi_replace (gsi, ret, false);
3126 /* Finally fold the memcpy call. */
3127 gimple_stmt_iterator gsi2 = *gsi;
3128 gsi_prev (&gsi2);
3129 fold_stmt (&gsi2);
3130 return true;
3133 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3134 NULL_TREE if a normal call should be emitted rather than expanding
3135 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3136 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3137 passed as second argument. */
3139 static bool
3140 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3141 enum built_in_function fcode)
3143 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3144 tree dest, size, len, fn, fmt, flag;
3145 const char *fmt_str;
3147 /* Verify the required arguments in the original call. */
3148 if (gimple_call_num_args (stmt) < 5)
3149 return false;
3151 dest = gimple_call_arg (stmt, 0);
3152 len = gimple_call_arg (stmt, 1);
3153 flag = gimple_call_arg (stmt, 2);
3154 size = gimple_call_arg (stmt, 3);
3155 fmt = gimple_call_arg (stmt, 4);
3157 if (! tree_fits_uhwi_p (size))
3158 return false;
3160 if (! integer_all_onesp (size))
3162 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3163 if (! tree_fits_uhwi_p (len))
3165 /* If LEN is not constant, try MAXLEN too.
3166 For MAXLEN only allow optimizing into non-_ocs function
3167 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3168 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
3169 return false;
3171 else
3172 maxlen = len;
3174 if (tree_int_cst_lt (size, maxlen))
3175 return false;
3178 if (!init_target_chars ())
3179 return false;
3181 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3182 or if format doesn't contain % chars or is "%s". */
3183 if (! integer_zerop (flag))
3185 fmt_str = c_getstr (fmt);
3186 if (fmt_str == NULL)
3187 return false;
3188 if (strchr (fmt_str, target_percent) != NULL
3189 && strcmp (fmt_str, target_percent_s))
3190 return false;
3193 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3194 available. */
3195 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3196 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3197 if (!fn)
3198 return false;
3200 /* Replace the called function and the first 5 argument by 3 retaining
3201 trailing varargs. */
3202 gimple_call_set_fndecl (stmt, fn);
3203 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3204 gimple_call_set_arg (stmt, 0, dest);
3205 gimple_call_set_arg (stmt, 1, len);
3206 gimple_call_set_arg (stmt, 2, fmt);
3207 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3208 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3209 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3210 fold_stmt (gsi);
3211 return true;
3214 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3215 Return NULL_TREE if a normal call should be emitted rather than
3216 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3217 or BUILT_IN_VSPRINTF_CHK. */
3219 static bool
3220 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3221 enum built_in_function fcode)
3223 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3224 tree dest, size, len, fn, fmt, flag;
3225 const char *fmt_str;
3226 unsigned nargs = gimple_call_num_args (stmt);
3228 /* Verify the required arguments in the original call. */
3229 if (nargs < 4)
3230 return false;
3231 dest = gimple_call_arg (stmt, 0);
3232 flag = gimple_call_arg (stmt, 1);
3233 size = gimple_call_arg (stmt, 2);
3234 fmt = gimple_call_arg (stmt, 3);
3236 if (! tree_fits_uhwi_p (size))
3237 return false;
3239 len = NULL_TREE;
3241 if (!init_target_chars ())
3242 return false;
3244 /* Check whether the format is a literal string constant. */
3245 fmt_str = c_getstr (fmt);
3246 if (fmt_str != NULL)
3248 /* If the format doesn't contain % args or %%, we know the size. */
3249 if (strchr (fmt_str, target_percent) == 0)
3251 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3252 len = build_int_cstu (size_type_node, strlen (fmt_str));
3254 /* If the format is "%s" and first ... argument is a string literal,
3255 we know the size too. */
3256 else if (fcode == BUILT_IN_SPRINTF_CHK
3257 && strcmp (fmt_str, target_percent_s) == 0)
3259 tree arg;
3261 if (nargs == 5)
3263 arg = gimple_call_arg (stmt, 4);
3264 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3266 len = c_strlen (arg, 1);
3267 if (! len || ! tree_fits_uhwi_p (len))
3268 len = NULL_TREE;
3274 if (! integer_all_onesp (size))
3276 if (! len || ! tree_int_cst_lt (len, size))
3277 return false;
3280 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3281 or if format doesn't contain % chars or is "%s". */
3282 if (! integer_zerop (flag))
3284 if (fmt_str == NULL)
3285 return false;
3286 if (strchr (fmt_str, target_percent) != NULL
3287 && strcmp (fmt_str, target_percent_s))
3288 return false;
3291 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3292 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3293 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3294 if (!fn)
3295 return false;
3297 /* Replace the called function and the first 4 argument by 2 retaining
3298 trailing varargs. */
3299 gimple_call_set_fndecl (stmt, fn);
3300 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3301 gimple_call_set_arg (stmt, 0, dest);
3302 gimple_call_set_arg (stmt, 1, fmt);
3303 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3304 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3305 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3306 fold_stmt (gsi);
3307 return true;
3310 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3311 ORIG may be null if this is a 2-argument call. We don't attempt to
3312 simplify calls with more than 3 arguments.
3314 Return true if simplification was possible, otherwise false. */
3316 bool
3317 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3319 gimple *stmt = gsi_stmt (*gsi);
3320 tree dest = gimple_call_arg (stmt, 0);
3321 tree fmt = gimple_call_arg (stmt, 1);
3322 tree orig = NULL_TREE;
3323 const char *fmt_str = NULL;
3325 /* Verify the required arguments in the original call. We deal with two
3326 types of sprintf() calls: 'sprintf (str, fmt)' and
3327 'sprintf (dest, "%s", orig)'. */
3328 if (gimple_call_num_args (stmt) > 3)
3329 return false;
3331 if (gimple_call_num_args (stmt) == 3)
3332 orig = gimple_call_arg (stmt, 2);
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str = c_getstr (fmt);
3336 if (fmt_str == NULL)
3337 return false;
3339 if (!init_target_chars ())
3340 return false;
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str, target_percent) == NULL)
3345 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3347 if (!fn)
3348 return false;
3350 /* Don't optimize sprintf (buf, "abc", ptr++). */
3351 if (orig)
3352 return false;
3354 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 'format' is known to contain no % formats. */
3356 gimple_seq stmts = NULL;
3357 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3359 /* Propagate the NO_WARNING bit to avoid issuing the same
3360 warning more than once. */
3361 if (gimple_no_warning_p (stmt))
3362 gimple_set_no_warning (repl, true);
3364 gimple_seq_add_stmt_without_update (&stmts, repl);
3365 if (tree lhs = gimple_call_lhs (stmt))
3367 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3368 strlen (fmt_str)));
3369 gimple_seq_add_stmt_without_update (&stmts, repl);
3370 gsi_replace_with_seq_vops (gsi, stmts);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2 = *gsi;
3376 gsi_prev (&gsi2);
3377 fold_stmt (&gsi2);
3379 else
3381 gsi_replace_with_seq_vops (gsi, stmts);
3382 fold_stmt (gsi);
3384 return true;
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3390 tree fn;
3391 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3393 if (!fn)
3394 return false;
3396 /* Don't crash on sprintf (str1, "%s"). */
3397 if (!orig)
3398 return false;
3400 tree orig_len = NULL_TREE;
3401 if (gimple_call_lhs (stmt))
3403 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3404 if (!orig_len)
3405 return false;
3408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3409 gimple_seq stmts = NULL;
3410 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3412 /* Propagate the NO_WARNING bit to avoid issuing the same
3413 warning more than once. */
3414 if (gimple_no_warning_p (stmt))
3415 gimple_set_no_warning (repl, true);
3417 gimple_seq_add_stmt_without_update (&stmts, repl);
3418 if (tree lhs = gimple_call_lhs (stmt))
3420 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3421 TREE_TYPE (orig_len)))
3422 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3423 repl = gimple_build_assign (lhs, orig_len);
3424 gimple_seq_add_stmt_without_update (&stmts, repl);
3425 gsi_replace_with_seq_vops (gsi, stmts);
3426 /* gsi now points at the assignment to the lhs, get a
3427 stmt iterator to the memcpy call.
3428 ??? We can't use gsi_for_stmt as that doesn't work when the
3429 CFG isn't built yet. */
3430 gimple_stmt_iterator gsi2 = *gsi;
3431 gsi_prev (&gsi2);
3432 fold_stmt (&gsi2);
3434 else
3436 gsi_replace_with_seq_vops (gsi, stmts);
3437 fold_stmt (gsi);
3439 return true;
3441 return false;
3444 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3445 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3446 attempt to simplify calls with more than 4 arguments.
3448 Return true if simplification was possible, otherwise false. */
3450 bool
3451 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3453 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3454 tree dest = gimple_call_arg (stmt, 0);
3455 tree destsize = gimple_call_arg (stmt, 1);
3456 tree fmt = gimple_call_arg (stmt, 2);
3457 tree orig = NULL_TREE;
3458 const char *fmt_str = NULL;
3460 if (gimple_call_num_args (stmt) > 4)
3461 return false;
3463 if (gimple_call_num_args (stmt) == 4)
3464 orig = gimple_call_arg (stmt, 3);
3466 if (!tree_fits_uhwi_p (destsize))
3467 return false;
3468 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str = c_getstr (fmt);
3472 if (fmt_str == NULL)
3473 return false;
3475 if (!init_target_chars ())
3476 return false;
3478 /* If the format doesn't contain % args or %%, use strcpy. */
3479 if (strchr (fmt_str, target_percent) == NULL)
3481 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3482 if (!fn)
3483 return false;
3485 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3486 if (orig)
3487 return false;
3489 /* We could expand this as
3490 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3491 or to
3492 memcpy (str, fmt_with_nul_at_cstm1, cst);
3493 but in the former case that might increase code size
3494 and in the latter case grow .rodata section too much.
3495 So punt for now. */
3496 size_t len = strlen (fmt_str);
3497 if (len >= destlen)
3498 return false;
3500 gimple_seq stmts = NULL;
3501 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3502 gimple_seq_add_stmt_without_update (&stmts, repl);
3503 if (tree lhs = gimple_call_lhs (stmt))
3505 repl = gimple_build_assign (lhs,
3506 build_int_cst (TREE_TYPE (lhs), len));
3507 gimple_seq_add_stmt_without_update (&stmts, repl);
3508 gsi_replace_with_seq_vops (gsi, stmts);
3509 /* gsi now points at the assignment to the lhs, get a
3510 stmt iterator to the memcpy call.
3511 ??? We can't use gsi_for_stmt as that doesn't work when the
3512 CFG isn't built yet. */
3513 gimple_stmt_iterator gsi2 = *gsi;
3514 gsi_prev (&gsi2);
3515 fold_stmt (&gsi2);
3517 else
3519 gsi_replace_with_seq_vops (gsi, stmts);
3520 fold_stmt (gsi);
3522 return true;
3525 /* If the format is "%s", use strcpy if the result isn't used. */
3526 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3528 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3529 if (!fn)
3530 return false;
3532 /* Don't crash on snprintf (str1, cst, "%s"). */
3533 if (!orig)
3534 return false;
3536 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3537 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3538 return false;
3540 /* We could expand this as
3541 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3542 or to
3543 memcpy (str1, str2_with_nul_at_cstm1, cst);
3544 but in the former case that might increase code size
3545 and in the latter case grow .rodata section too much.
3546 So punt for now. */
3547 if (compare_tree_int (orig_len, destlen) >= 0)
3548 return false;
3550 /* Convert snprintf (str1, cst, "%s", str2) into
3551 strcpy (str1, str2) if strlen (str2) < cst. */
3552 gimple_seq stmts = NULL;
3553 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3554 gimple_seq_add_stmt_without_update (&stmts, repl);
3555 if (tree lhs = gimple_call_lhs (stmt))
3557 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3558 TREE_TYPE (orig_len)))
3559 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3560 repl = gimple_build_assign (lhs, orig_len);
3561 gimple_seq_add_stmt_without_update (&stmts, repl);
3562 gsi_replace_with_seq_vops (gsi, stmts);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2 = *gsi;
3568 gsi_prev (&gsi2);
3569 fold_stmt (&gsi2);
3571 else
3573 gsi_replace_with_seq_vops (gsi, stmts);
3574 fold_stmt (gsi);
3576 return true;
3578 return false;
3581 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3582 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3583 more than 3 arguments, and ARG may be null in the 2-argument case.
3585 Return NULL_TREE if no simplification was possible, otherwise return the
3586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3587 code of the function to be simplified. */
3589 static bool
3590 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3591 tree fp, tree fmt, tree arg,
3592 enum built_in_function fcode)
3594 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3595 tree fn_fputc, fn_fputs;
3596 const char *fmt_str = NULL;
3598 /* If the return value is used, don't do the transformation. */
3599 if (gimple_call_lhs (stmt) != NULL_TREE)
3600 return false;
3602 /* Check whether the format is a literal string constant. */
3603 fmt_str = c_getstr (fmt);
3604 if (fmt_str == NULL)
3605 return false;
3607 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3609 /* If we're using an unlocked function, assume the other
3610 unlocked functions exist explicitly. */
3611 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3612 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3614 else
3616 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3617 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3620 if (!init_target_chars ())
3621 return false;
3623 /* If the format doesn't contain % args or %%, use strcpy. */
3624 if (strchr (fmt_str, target_percent) == NULL)
3626 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3627 && arg)
3628 return false;
3630 /* If the format specifier was "", fprintf does nothing. */
3631 if (fmt_str[0] == '\0')
3633 replace_call_with_value (gsi, NULL_TREE);
3634 return true;
3637 /* When "string" doesn't contain %, replace all cases of
3638 fprintf (fp, string) with fputs (string, fp). The fputs
3639 builtin will take care of special cases like length == 1. */
3640 if (fn_fputs)
3642 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3643 replace_call_with_call_and_fold (gsi, repl);
3644 return true;
3648 /* The other optimizations can be done only on the non-va_list variants. */
3649 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3650 return false;
3652 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3653 else if (strcmp (fmt_str, target_percent_s) == 0)
3655 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3656 return false;
3657 if (fn_fputs)
3659 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3660 replace_call_with_call_and_fold (gsi, repl);
3661 return true;
3665 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3666 else if (strcmp (fmt_str, target_percent_c) == 0)
3668 if (!arg
3669 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3670 return false;
3671 if (fn_fputc)
3673 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3674 replace_call_with_call_and_fold (gsi, repl);
3675 return true;
3679 return false;
3682 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3683 FMT and ARG are the arguments to the call; we don't fold cases with
3684 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3686 Return NULL_TREE if no simplification was possible, otherwise return the
3687 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3688 code of the function to be simplified. */
3690 static bool
3691 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3692 tree arg, enum built_in_function fcode)
3694 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3695 tree fn_putchar, fn_puts, newarg;
3696 const char *fmt_str = NULL;
3698 /* If the return value is used, don't do the transformation. */
3699 if (gimple_call_lhs (stmt) != NULL_TREE)
3700 return false;
3702 /* Check whether the format is a literal string constant. */
3703 fmt_str = c_getstr (fmt);
3704 if (fmt_str == NULL)
3705 return false;
3707 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3709 /* If we're using an unlocked function, assume the other
3710 unlocked functions exist explicitly. */
3711 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3712 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3714 else
3716 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3717 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3720 if (!init_target_chars ())
3721 return false;
3723 if (strcmp (fmt_str, target_percent_s) == 0
3724 || strchr (fmt_str, target_percent) == NULL)
3726 const char *str;
3728 if (strcmp (fmt_str, target_percent_s) == 0)
3730 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3731 return false;
3733 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3734 return false;
3736 str = c_getstr (arg);
3737 if (str == NULL)
3738 return false;
3740 else
3742 /* The format specifier doesn't contain any '%' characters. */
3743 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3744 && arg)
3745 return false;
3746 str = fmt_str;
3749 /* If the string was "", printf does nothing. */
3750 if (str[0] == '\0')
3752 replace_call_with_value (gsi, NULL_TREE);
3753 return true;
3756 /* If the string has length of 1, call putchar. */
3757 if (str[1] == '\0')
3759 /* Given printf("c"), (where c is any one character,)
3760 convert "c"[0] to an int and pass that to the replacement
3761 function. */
3762 newarg = build_int_cst (integer_type_node, str[0]);
3763 if (fn_putchar)
3765 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3766 replace_call_with_call_and_fold (gsi, repl);
3767 return true;
3770 else
3772 /* If the string was "string\n", call puts("string"). */
3773 size_t len = strlen (str);
3774 if ((unsigned char)str[len - 1] == target_newline
3775 && (size_t) (int) len == len
3776 && (int) len > 0)
3778 char *newstr;
3780 /* Create a NUL-terminated string that's one char shorter
3781 than the original, stripping off the trailing '\n'. */
3782 newstr = xstrdup (str);
3783 newstr[len - 1] = '\0';
3784 newarg = build_string_literal (len, newstr);
3785 free (newstr);
3786 if (fn_puts)
3788 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3789 replace_call_with_call_and_fold (gsi, repl);
3790 return true;
3793 else
3794 /* We'd like to arrange to call fputs(string,stdout) here,
3795 but we need stdout and don't have a way to get it yet. */
3796 return false;
3800 /* The other optimizations can be done only on the non-va_list variants. */
3801 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3802 return false;
3804 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3805 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3807 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3808 return false;
3809 if (fn_puts)
3811 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3812 replace_call_with_call_and_fold (gsi, repl);
3813 return true;
3817 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3818 else if (strcmp (fmt_str, target_percent_c) == 0)
3820 if (!arg || ! useless_type_conversion_p (integer_type_node,
3821 TREE_TYPE (arg)))
3822 return false;
3823 if (fn_putchar)
3825 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3826 replace_call_with_call_and_fold (gsi, repl);
3827 return true;
3831 return false;
3836 /* Fold a call to __builtin_strlen with known length LEN. */
3838 static bool
3839 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3841 gimple *stmt = gsi_stmt (*gsi);
3842 tree arg = gimple_call_arg (stmt, 0);
3844 wide_int minlen;
3845 wide_int maxlen;
3847 c_strlen_data lendata = { };
3848 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
3849 && !lendata.decl
3850 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
3851 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
3853 /* The range of lengths refers to either a single constant
3854 string or to the longest and shortest constant string
3855 referenced by the argument of the strlen() call, or to
3856 the strings that can possibly be stored in the arrays
3857 the argument refers to. */
3858 minlen = wi::to_wide (lendata.minlen);
3859 maxlen = wi::to_wide (lendata.maxlen);
3861 else
3863 unsigned prec = TYPE_PRECISION (sizetype);
3865 minlen = wi::shwi (0, prec);
3866 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3869 if (minlen == maxlen)
3871 /* Fold the strlen call to a constant. */
3872 tree type = TREE_TYPE (lendata.minlen);
3873 tree len = force_gimple_operand_gsi (gsi,
3874 wide_int_to_tree (type, minlen),
3875 true, NULL, true, GSI_SAME_STMT);
3876 replace_call_with_value (gsi, len);
3877 return true;
3880 /* Set the strlen() range to [0, MAXLEN]. */
3881 if (tree lhs = gimple_call_lhs (stmt))
3882 set_strlen_range (lhs, minlen, maxlen);
3884 return false;
3887 /* Fold a call to __builtin_acc_on_device. */
3889 static bool
3890 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3892 /* Defer folding until we know which compiler we're in. */
3893 if (symtab->state != EXPANSION)
3894 return false;
3896 unsigned val_host = GOMP_DEVICE_HOST;
3897 unsigned val_dev = GOMP_DEVICE_NONE;
3899 #ifdef ACCEL_COMPILER
3900 val_host = GOMP_DEVICE_NOT_HOST;
3901 val_dev = ACCEL_COMPILER_acc_device;
3902 #endif
3904 location_t loc = gimple_location (gsi_stmt (*gsi));
3906 tree host_eq = make_ssa_name (boolean_type_node);
3907 gimple *host_ass = gimple_build_assign
3908 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3909 gimple_set_location (host_ass, loc);
3910 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3912 tree dev_eq = make_ssa_name (boolean_type_node);
3913 gimple *dev_ass = gimple_build_assign
3914 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3915 gimple_set_location (dev_ass, loc);
3916 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3918 tree result = make_ssa_name (boolean_type_node);
3919 gimple *result_ass = gimple_build_assign
3920 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3921 gimple_set_location (result_ass, loc);
3922 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3924 replace_call_with_value (gsi, result);
3926 return true;
3929 /* Fold realloc (0, n) -> malloc (n). */
3931 static bool
3932 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3934 gimple *stmt = gsi_stmt (*gsi);
3935 tree arg = gimple_call_arg (stmt, 0);
3936 tree size = gimple_call_arg (stmt, 1);
3938 if (operand_equal_p (arg, null_pointer_node, 0))
3940 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3941 if (fn_malloc)
3943 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3944 replace_call_with_call_and_fold (gsi, repl);
3945 return true;
3948 return false;
3951 /* Number of bytes into which any type but aggregate or vector types
3952 should fit. */
3953 static constexpr size_t clear_padding_unit
3954 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
3955 /* Buffer size on which __builtin_clear_padding folding code works. */
3956 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
3958 /* Data passed through __builtin_clear_padding folding. */
3959 struct clear_padding_struct {
3960 location_t loc;
3961 /* 0 during __builtin_clear_padding folding, nonzero during
3962 clear_type_padding_in_mask. In that case, instead of clearing the
3963 non-padding bits in union_ptr array clear the padding bits in there. */
3964 bool clear_in_mask;
3965 tree base;
3966 tree alias_type;
3967 gimple_stmt_iterator *gsi;
3968 /* Alignment of buf->base + 0. */
3969 unsigned align;
3970 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
3971 HOST_WIDE_INT off;
3972 /* Number of padding bytes before buf->off that don't have padding clear
3973 code emitted yet. */
3974 HOST_WIDE_INT padding_bytes;
3975 /* The size of the whole object. Never emit code to touch
3976 buf->base + buf->sz or following bytes. */
3977 HOST_WIDE_INT sz;
3978 /* Number of bytes recorded in buf->buf. */
3979 size_t size;
3980 /* When inside union, instead of emitting code we and bits inside of
3981 the union_ptr array. */
3982 unsigned char *union_ptr;
3983 /* Set bits mean padding bits that need to be cleared by the builtin. */
3984 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
3987 /* Emit code to clear padding requested in BUF->buf - set bits
3988 in there stand for padding that should be cleared. FULL is true
3989 if everything from the buffer should be flushed, otherwise
3990 it can leave up to 2 * clear_padding_unit bytes for further
3991 processing. */
3993 static void
3994 clear_padding_flush (clear_padding_struct *buf, bool full)
3996 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
3997 if (!full && buf->size < 2 * clear_padding_unit)
3998 return;
3999 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4000 size_t end = buf->size;
4001 if (!full)
4002 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4003 * clear_padding_unit);
4004 size_t padding_bytes = buf->padding_bytes;
4005 if (buf->union_ptr)
4007 if (buf->clear_in_mask)
4009 /* During clear_type_padding_in_mask, clear the padding
4010 bits set in buf->buf in the buf->union_ptr mask. */
4011 for (size_t i = 0; i < end; i++)
4013 if (buf->buf[i] == (unsigned char) ~0)
4014 padding_bytes++;
4015 else
4017 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4018 0, padding_bytes);
4019 padding_bytes = 0;
4020 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4023 if (full)
4025 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4026 0, padding_bytes);
4027 buf->off = 0;
4028 buf->size = 0;
4029 buf->padding_bytes = 0;
4031 else
4033 memmove (buf->buf, buf->buf + end, buf->size - end);
4034 buf->off += end;
4035 buf->size -= end;
4036 buf->padding_bytes = padding_bytes;
4038 return;
4040 /* Inside of a union, instead of emitting any code, instead
4041 clear all bits in the union_ptr buffer that are clear
4042 in buf. Whole padding bytes don't clear anything. */
4043 for (size_t i = 0; i < end; i++)
4045 if (buf->buf[i] == (unsigned char) ~0)
4046 padding_bytes++;
4047 else
4049 padding_bytes = 0;
4050 buf->union_ptr[buf->off + i] &= buf->buf[i];
4053 if (full)
4055 buf->off = 0;
4056 buf->size = 0;
4057 buf->padding_bytes = 0;
4059 else
4061 memmove (buf->buf, buf->buf + end, buf->size - end);
4062 buf->off += end;
4063 buf->size -= end;
4064 buf->padding_bytes = padding_bytes;
4066 return;
4068 size_t wordsize = UNITS_PER_WORD;
4069 for (size_t i = 0; i < end; i += wordsize)
4071 size_t nonzero_first = wordsize;
4072 size_t nonzero_last = 0;
4073 size_t zero_first = wordsize;
4074 size_t zero_last = 0;
4075 bool all_ones = true, bytes_only = true;
4076 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4077 > (unsigned HOST_WIDE_INT) buf->sz)
4079 gcc_assert (wordsize > 1);
4080 wordsize /= 2;
4081 i -= wordsize;
4082 continue;
4084 for (size_t j = i; j < i + wordsize && j < end; j++)
4086 if (buf->buf[j])
4088 if (nonzero_first == wordsize)
4090 nonzero_first = j - i;
4091 nonzero_last = j - i;
4093 if (nonzero_last != j - i)
4094 all_ones = false;
4095 nonzero_last = j + 1 - i;
4097 else
4099 if (zero_first == wordsize)
4100 zero_first = j - i;
4101 zero_last = j + 1 - i;
4103 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4105 all_ones = false;
4106 bytes_only = false;
4109 size_t padding_end = i;
4110 if (padding_bytes)
4112 if (nonzero_first == 0
4113 && nonzero_last == wordsize
4114 && all_ones)
4116 /* All bits are padding and we had some padding
4117 before too. Just extend it. */
4118 padding_bytes += wordsize;
4119 continue;
4121 if (all_ones && nonzero_first == 0)
4123 padding_bytes += nonzero_last;
4124 padding_end += nonzero_last;
4125 nonzero_first = wordsize;
4126 nonzero_last = 0;
4128 else if (bytes_only && nonzero_first == 0)
4130 gcc_assert (zero_first && zero_first != wordsize);
4131 padding_bytes += zero_first;
4132 padding_end += zero_first;
4134 tree atype, src;
4135 if (padding_bytes == 1)
4137 atype = char_type_node;
4138 src = build_zero_cst (char_type_node);
4140 else
4142 atype = build_array_type_nelts (char_type_node, padding_bytes);
4143 src = build_constructor (atype, NULL);
4145 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4146 build_int_cst (buf->alias_type,
4147 buf->off + padding_end
4148 - padding_bytes));
4149 gimple *g = gimple_build_assign (dst, src);
4150 gimple_set_location (g, buf->loc);
4151 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4152 padding_bytes = 0;
4153 buf->padding_bytes = 0;
4155 if (nonzero_first == wordsize)
4156 /* All bits in a word are 0, there are no padding bits. */
4157 continue;
4158 if (all_ones && nonzero_last == wordsize)
4160 /* All bits between nonzero_first and end of word are padding
4161 bits, start counting padding_bytes. */
4162 padding_bytes = nonzero_last - nonzero_first;
4163 continue;
4165 if (bytes_only)
4167 /* If bitfields aren't involved in this word, prefer storing
4168 individual bytes or groups of them over performing a RMW
4169 operation on the whole word. */
4170 gcc_assert (i + zero_last <= end);
4171 for (size_t j = padding_end; j < i + zero_last; j++)
4173 if (buf->buf[j])
4175 size_t k;
4176 for (k = j; k < i + zero_last; k++)
4177 if (buf->buf[k] == 0)
4178 break;
4179 HOST_WIDE_INT off = buf->off + j;
4180 tree atype, src;
4181 if (k - j == 1)
4183 atype = char_type_node;
4184 src = build_zero_cst (char_type_node);
4186 else
4188 atype = build_array_type_nelts (char_type_node, k - j);
4189 src = build_constructor (atype, NULL);
4191 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4192 buf->base,
4193 build_int_cst (buf->alias_type, off));
4194 gimple *g = gimple_build_assign (dst, src);
4195 gimple_set_location (g, buf->loc);
4196 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4197 j = k;
4200 if (nonzero_last == wordsize)
4201 padding_bytes = nonzero_last - zero_last;
4202 continue;
4204 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4206 if (nonzero_last - nonzero_first <= eltsz
4207 && ((nonzero_first & ~(eltsz - 1))
4208 == ((nonzero_last - 1) & ~(eltsz - 1))))
4210 tree type;
4211 if (eltsz == 1)
4212 type = char_type_node;
4213 else
4214 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4216 size_t start = nonzero_first & ~(eltsz - 1);
4217 HOST_WIDE_INT off = buf->off + i + start;
4218 tree atype = type;
4219 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4220 atype = build_aligned_type (type, buf->align);
4221 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4222 build_int_cst (buf->alias_type, off));
4223 tree src;
4224 gimple *g;
4225 if (all_ones
4226 && nonzero_first == start
4227 && nonzero_last == start + eltsz)
4228 src = build_zero_cst (type);
4229 else
4231 src = make_ssa_name (type);
4232 g = gimple_build_assign (src, unshare_expr (dst));
4233 gimple_set_location (g, buf->loc);
4234 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4235 tree mask = native_interpret_expr (type,
4236 buf->buf + i + start,
4237 eltsz);
4238 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4239 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4240 tree src_masked = make_ssa_name (type);
4241 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4242 src, mask);
4243 gimple_set_location (g, buf->loc);
4244 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4245 src = src_masked;
4247 g = gimple_build_assign (dst, src);
4248 gimple_set_location (g, buf->loc);
4249 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4250 break;
4254 if (full)
4256 if (padding_bytes)
4258 tree atype, src;
4259 if (padding_bytes == 1)
4261 atype = char_type_node;
4262 src = build_zero_cst (char_type_node);
4264 else
4266 atype = build_array_type_nelts (char_type_node, padding_bytes);
4267 src = build_constructor (atype, NULL);
4269 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4270 build_int_cst (buf->alias_type,
4271 buf->off + end
4272 - padding_bytes));
4273 gimple *g = gimple_build_assign (dst, src);
4274 gimple_set_location (g, buf->loc);
4275 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4277 size_t end_rem = end % UNITS_PER_WORD;
4278 buf->off += end - end_rem;
4279 buf->size = end_rem;
4280 memset (buf->buf, 0, buf->size);
4281 buf->padding_bytes = 0;
4283 else
4285 memmove (buf->buf, buf->buf + end, buf->size - end);
4286 buf->off += end;
4287 buf->size -= end;
4288 buf->padding_bytes = padding_bytes;
4292 /* Append PADDING_BYTES padding bytes. */
4294 static void
4295 clear_padding_add_padding (clear_padding_struct *buf,
4296 HOST_WIDE_INT padding_bytes)
4298 if (padding_bytes == 0)
4299 return;
4300 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4301 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4302 clear_padding_flush (buf, false);
4303 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4304 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4306 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4307 padding_bytes -= clear_padding_buf_size - buf->size;
4308 buf->size = clear_padding_buf_size;
4309 clear_padding_flush (buf, false);
4310 gcc_assert (buf->padding_bytes);
4311 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4312 is guaranteed to be all ones. */
4313 padding_bytes += buf->size;
4314 buf->size = padding_bytes % UNITS_PER_WORD;
4315 memset (buf->buf, ~0, buf->size);
4316 buf->off += padding_bytes - buf->size;
4317 buf->padding_bytes += padding_bytes - buf->size;
4319 else
4321 memset (buf->buf + buf->size, ~0, padding_bytes);
4322 buf->size += padding_bytes;
4326 static void clear_padding_type (clear_padding_struct *, tree, HOST_WIDE_INT);
4328 /* Clear padding bits of union type TYPE. */
4330 static void
4331 clear_padding_union (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4333 clear_padding_struct *union_buf;
4334 HOST_WIDE_INT start_off = 0, next_off = 0;
4335 size_t start_size = 0;
4336 if (buf->union_ptr)
4338 start_off = buf->off + buf->size;
4339 next_off = start_off + sz;
4340 start_size = start_off % UNITS_PER_WORD;
4341 start_off -= start_size;
4342 clear_padding_flush (buf, true);
4343 union_buf = buf;
4345 else
4347 if (sz + buf->size > clear_padding_buf_size)
4348 clear_padding_flush (buf, false);
4349 union_buf = XALLOCA (clear_padding_struct);
4350 union_buf->loc = buf->loc;
4351 union_buf->clear_in_mask = buf->clear_in_mask;
4352 union_buf->base = NULL_TREE;
4353 union_buf->alias_type = NULL_TREE;
4354 union_buf->gsi = NULL;
4355 union_buf->align = 0;
4356 union_buf->off = 0;
4357 union_buf->padding_bytes = 0;
4358 union_buf->sz = sz;
4359 union_buf->size = 0;
4360 if (sz + buf->size <= clear_padding_buf_size)
4361 union_buf->union_ptr = buf->buf + buf->size;
4362 else
4363 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4364 memset (union_buf->union_ptr, ~0, sz);
4367 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4368 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4370 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4372 if (TREE_TYPE (field) == error_mark_node)
4373 continue;
4374 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4375 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4376 if (!buf->clear_in_mask)
4377 error_at (buf->loc, "flexible array member %qD does not have "
4378 "well defined padding bits for %qs",
4379 field, "__builtin_clear_padding");
4380 continue;
4382 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4383 gcc_assert (union_buf->size == 0);
4384 union_buf->off = start_off;
4385 union_buf->size = start_size;
4386 memset (union_buf->buf, ~0, start_size);
4387 clear_padding_type (union_buf, TREE_TYPE (field), fldsz);
4388 clear_padding_add_padding (union_buf, sz - fldsz);
4389 clear_padding_flush (union_buf, true);
4392 if (buf == union_buf)
4394 buf->off = next_off;
4395 buf->size = next_off % UNITS_PER_WORD;
4396 buf->off -= buf->size;
4397 memset (buf->buf, ~0, buf->size);
4399 else if (sz + buf->size <= clear_padding_buf_size)
4400 buf->size += sz;
4401 else
4403 unsigned char *union_ptr = union_buf->union_ptr;
4404 while (sz)
4406 clear_padding_flush (buf, false);
4407 HOST_WIDE_INT this_sz
4408 = MIN ((unsigned HOST_WIDE_INT) sz,
4409 clear_padding_buf_size - buf->size);
4410 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4411 buf->size += this_sz;
4412 union_ptr += this_sz;
4413 sz -= this_sz;
4415 XDELETE (union_buf->union_ptr);
4419 /* The only known floating point formats with padding bits are the
4420 IEEE extended ones. */
4422 static bool
4423 clear_padding_real_needs_padding_p (tree type)
4425 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4426 return (fmt->b == 2
4427 && fmt->signbit_ro == fmt->signbit_rw
4428 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4431 /* Return true if TYPE might contain any padding bits. */
4433 static bool
4434 clear_padding_type_may_have_padding_p (tree type)
4436 switch (TREE_CODE (type))
4438 case RECORD_TYPE:
4439 case UNION_TYPE:
4440 return true;
4441 case ARRAY_TYPE:
4442 case COMPLEX_TYPE:
4443 case VECTOR_TYPE:
4444 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4445 case REAL_TYPE:
4446 return clear_padding_real_needs_padding_p (type);
4447 default:
4448 return false;
4452 /* Emit a runtime loop:
4453 for (; buf.base != end; buf.base += sz)
4454 __builtin_clear_padding (buf.base); */
4456 static void
4457 clear_padding_emit_loop (clear_padding_struct *buf, tree type, tree end)
4459 tree l1 = create_artificial_label (buf->loc);
4460 tree l2 = create_artificial_label (buf->loc);
4461 tree l3 = create_artificial_label (buf->loc);
4462 gimple *g = gimple_build_goto (l2);
4463 gimple_set_location (g, buf->loc);
4464 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4465 g = gimple_build_label (l1);
4466 gimple_set_location (g, buf->loc);
4467 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4468 clear_padding_type (buf, type, buf->sz);
4469 clear_padding_flush (buf, true);
4470 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4471 size_int (buf->sz));
4472 gimple_set_location (g, buf->loc);
4473 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4474 g = gimple_build_label (l2);
4475 gimple_set_location (g, buf->loc);
4476 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4477 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4478 gimple_set_location (g, buf->loc);
4479 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4480 g = gimple_build_label (l3);
4481 gimple_set_location (g, buf->loc);
4482 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4485 /* Clear padding bits for TYPE. Called recursively from
4486 gimple_fold_builtin_clear_padding. */
4488 static void
4489 clear_padding_type (clear_padding_struct *buf, tree type, HOST_WIDE_INT sz)
4491 switch (TREE_CODE (type))
4493 case RECORD_TYPE:
4494 HOST_WIDE_INT cur_pos;
4495 cur_pos = 0;
4496 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4497 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4499 tree ftype = TREE_TYPE (field);
4500 if (DECL_BIT_FIELD (field))
4502 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4503 if (fldsz == 0)
4504 continue;
4505 HOST_WIDE_INT pos = int_byte_position (field);
4506 HOST_WIDE_INT bpos
4507 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4508 bpos %= BITS_PER_UNIT;
4509 HOST_WIDE_INT end
4510 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4511 if (pos + end > cur_pos)
4513 clear_padding_add_padding (buf, pos + end - cur_pos);
4514 cur_pos = pos + end;
4516 gcc_assert (cur_pos > pos
4517 && ((unsigned HOST_WIDE_INT) buf->size
4518 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4519 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4520 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4521 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4522 " in %qs", "__builtin_clear_padding");
4523 else if (BYTES_BIG_ENDIAN)
4525 /* Big endian. */
4526 if (bpos + fldsz <= BITS_PER_UNIT)
4527 *p &= ~(((1 << fldsz) - 1)
4528 << (BITS_PER_UNIT - bpos - fldsz));
4529 else
4531 if (bpos)
4533 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4534 p++;
4535 fldsz -= BITS_PER_UNIT - bpos;
4537 memset (p, 0, fldsz / BITS_PER_UNIT);
4538 p += fldsz / BITS_PER_UNIT;
4539 fldsz %= BITS_PER_UNIT;
4540 if (fldsz)
4541 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4544 else
4546 /* Little endian. */
4547 if (bpos + fldsz <= BITS_PER_UNIT)
4548 *p &= ~(((1 << fldsz) - 1) << bpos);
4549 else
4551 if (bpos)
4553 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4554 p++;
4555 fldsz -= BITS_PER_UNIT - bpos;
4557 memset (p, 0, fldsz / BITS_PER_UNIT);
4558 p += fldsz / BITS_PER_UNIT;
4559 fldsz %= BITS_PER_UNIT;
4560 if (fldsz)
4561 *p &= ~((1 << fldsz) - 1);
4565 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4567 if (ftype == error_mark_node)
4568 continue;
4569 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4570 && !COMPLETE_TYPE_P (ftype));
4571 if (!buf->clear_in_mask)
4572 error_at (buf->loc, "flexible array member %qD does not "
4573 "have well defined padding bits for %qs",
4574 field, "__builtin_clear_padding");
4576 else if (is_empty_type (TREE_TYPE (field)))
4577 continue;
4578 else
4580 HOST_WIDE_INT pos = int_byte_position (field);
4581 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4582 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4583 clear_padding_add_padding (buf, pos - cur_pos);
4584 cur_pos = pos;
4585 clear_padding_type (buf, TREE_TYPE (field), fldsz);
4586 cur_pos += fldsz;
4589 gcc_assert (sz >= cur_pos);
4590 clear_padding_add_padding (buf, sz - cur_pos);
4591 break;
4592 case ARRAY_TYPE:
4593 HOST_WIDE_INT nelts, fldsz;
4594 fldsz = int_size_in_bytes (TREE_TYPE (type));
4595 if (fldsz == 0)
4596 break;
4597 nelts = sz / fldsz;
4598 if (nelts > 1
4599 && sz > 8 * UNITS_PER_WORD
4600 && buf->union_ptr == NULL
4601 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4603 /* For sufficiently large array of more than one elements,
4604 emit a runtime loop to keep code size manageable. */
4605 tree base = buf->base;
4606 unsigned int prev_align = buf->align;
4607 HOST_WIDE_INT off = buf->off + buf->size;
4608 HOST_WIDE_INT prev_sz = buf->sz;
4609 clear_padding_flush (buf, true);
4610 tree elttype = TREE_TYPE (type);
4611 buf->base = create_tmp_var (build_pointer_type (elttype));
4612 tree end = make_ssa_name (TREE_TYPE (buf->base));
4613 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4614 base, size_int (off));
4615 gimple_set_location (g, buf->loc);
4616 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4617 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4618 size_int (sz));
4619 gimple_set_location (g, buf->loc);
4620 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4621 buf->sz = fldsz;
4622 buf->align = TYPE_ALIGN (elttype);
4623 buf->off = 0;
4624 buf->size = 0;
4625 clear_padding_emit_loop (buf, elttype, end);
4626 buf->base = base;
4627 buf->sz = prev_sz;
4628 buf->align = prev_align;
4629 buf->size = off % UNITS_PER_WORD;
4630 buf->off = off - buf->size;
4631 memset (buf->buf, 0, buf->size);
4632 break;
4634 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4635 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4636 break;
4637 case UNION_TYPE:
4638 clear_padding_union (buf, type, sz);
4639 break;
4640 case REAL_TYPE:
4641 gcc_assert ((size_t) sz <= clear_padding_unit);
4642 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4643 clear_padding_flush (buf, false);
4644 if (clear_padding_real_needs_padding_p (type))
4646 /* Use native_interpret_expr + native_encode_expr to figure out
4647 which bits are padding. */
4648 memset (buf->buf + buf->size, ~0, sz);
4649 tree cst = native_interpret_expr (type, buf->buf + buf->size, sz);
4650 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4651 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4652 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4653 for (size_t i = 0; i < (size_t) sz; i++)
4654 buf->buf[buf->size + i] ^= ~0;
4656 else
4657 memset (buf->buf + buf->size, 0, sz);
4658 buf->size += sz;
4659 break;
4660 case COMPLEX_TYPE:
4661 fldsz = int_size_in_bytes (TREE_TYPE (type));
4662 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4663 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4664 break;
4665 case VECTOR_TYPE:
4666 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4667 fldsz = int_size_in_bytes (TREE_TYPE (type));
4668 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4669 clear_padding_type (buf, TREE_TYPE (type), fldsz);
4670 break;
4671 case NULLPTR_TYPE:
4672 gcc_assert ((size_t) sz <= clear_padding_unit);
4673 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4674 clear_padding_flush (buf, false);
4675 memset (buf->buf + buf->size, ~0, sz);
4676 buf->size += sz;
4677 break;
4678 default:
4679 gcc_assert ((size_t) sz <= clear_padding_unit);
4680 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4681 clear_padding_flush (buf, false);
4682 memset (buf->buf + buf->size, 0, sz);
4683 buf->size += sz;
4684 break;
4688 /* Clear padding bits of TYPE in MASK. */
4690 void
4691 clear_type_padding_in_mask (tree type, unsigned char *mask)
4693 clear_padding_struct buf;
4694 buf.loc = UNKNOWN_LOCATION;
4695 buf.clear_in_mask = true;
4696 buf.base = NULL_TREE;
4697 buf.alias_type = NULL_TREE;
4698 buf.gsi = NULL;
4699 buf.align = 0;
4700 buf.off = 0;
4701 buf.padding_bytes = 0;
4702 buf.sz = int_size_in_bytes (type);
4703 buf.size = 0;
4704 buf.union_ptr = mask;
4705 clear_padding_type (&buf, type, buf.sz);
4706 clear_padding_flush (&buf, true);
4709 /* Fold __builtin_clear_padding builtin. */
4711 static bool
4712 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4714 gimple *stmt = gsi_stmt (*gsi);
4715 gcc_assert (gimple_call_num_args (stmt) == 2);
4716 tree ptr = gimple_call_arg (stmt, 0);
4717 tree typearg = gimple_call_arg (stmt, 1);
4718 tree type = TREE_TYPE (TREE_TYPE (typearg));
4719 location_t loc = gimple_location (stmt);
4720 clear_padding_struct buf;
4721 gimple_stmt_iterator gsiprev = *gsi;
4722 /* This should be folded during the lower pass. */
4723 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4724 gcc_assert (COMPLETE_TYPE_P (type));
4725 gsi_prev (&gsiprev);
4727 buf.loc = loc;
4728 buf.clear_in_mask = false;
4729 buf.base = ptr;
4730 buf.alias_type = NULL_TREE;
4731 buf.gsi = gsi;
4732 buf.align = get_pointer_alignment (ptr);
4733 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4734 buf.align = MAX (buf.align, talign);
4735 buf.off = 0;
4736 buf.padding_bytes = 0;
4737 buf.size = 0;
4738 buf.sz = int_size_in_bytes (type);
4739 buf.union_ptr = NULL;
4740 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4741 sorry_at (loc, "%s not supported for variable length aggregates",
4742 "__builtin_clear_padding");
4743 /* The implementation currently assumes 8-bit host and target
4744 chars which is the case for all currently supported targets
4745 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4746 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4747 sorry_at (loc, "%s not supported on this target",
4748 "__builtin_clear_padding");
4749 else if (!clear_padding_type_may_have_padding_p (type))
4751 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4753 tree sz = TYPE_SIZE_UNIT (type);
4754 tree elttype = type;
4755 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4756 while (TREE_CODE (elttype) == ARRAY_TYPE
4757 && int_size_in_bytes (elttype) < 0)
4758 elttype = TREE_TYPE (elttype);
4759 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4760 gcc_assert (eltsz >= 0);
4761 if (eltsz)
4763 buf.base = create_tmp_var (build_pointer_type (elttype));
4764 tree end = make_ssa_name (TREE_TYPE (buf.base));
4765 gimple *g = gimple_build_assign (buf.base, ptr);
4766 gimple_set_location (g, loc);
4767 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4768 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4769 gimple_set_location (g, loc);
4770 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4771 buf.sz = eltsz;
4772 buf.align = TYPE_ALIGN (elttype);
4773 buf.alias_type = build_pointer_type (elttype);
4774 clear_padding_emit_loop (&buf, elttype, end);
4777 else
4779 if (!is_gimple_mem_ref_addr (buf.base))
4781 buf.base = make_ssa_name (TREE_TYPE (ptr));
4782 gimple *g = gimple_build_assign (buf.base, ptr);
4783 gimple_set_location (g, loc);
4784 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4786 buf.alias_type = build_pointer_type (type);
4787 clear_padding_type (&buf, type, buf.sz);
4788 clear_padding_flush (&buf, true);
4791 gimple_stmt_iterator gsiprev2 = *gsi;
4792 gsi_prev (&gsiprev2);
4793 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4794 gsi_replace (gsi, gimple_build_nop (), true);
4795 else
4797 gsi_remove (gsi, true);
4798 *gsi = gsiprev2;
4800 return true;
4803 /* Fold the non-target builtin at *GSI and return whether any simplification
4804 was made. */
4806 static bool
4807 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4809 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4810 tree callee = gimple_call_fndecl (stmt);
4812 /* Give up for always_inline inline builtins until they are
4813 inlined. */
4814 if (avoid_folding_inline_builtin (callee))
4815 return false;
4817 unsigned n = gimple_call_num_args (stmt);
4818 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
4819 switch (fcode)
4821 case BUILT_IN_BCMP:
4822 return gimple_fold_builtin_bcmp (gsi);
4823 case BUILT_IN_BCOPY:
4824 return gimple_fold_builtin_bcopy (gsi);
4825 case BUILT_IN_BZERO:
4826 return gimple_fold_builtin_bzero (gsi);
4828 case BUILT_IN_MEMSET:
4829 return gimple_fold_builtin_memset (gsi,
4830 gimple_call_arg (stmt, 1),
4831 gimple_call_arg (stmt, 2));
4832 case BUILT_IN_MEMCPY:
4833 case BUILT_IN_MEMPCPY:
4834 case BUILT_IN_MEMMOVE:
4835 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
4836 gimple_call_arg (stmt, 1), fcode);
4837 case BUILT_IN_SPRINTF_CHK:
4838 case BUILT_IN_VSPRINTF_CHK:
4839 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
4840 case BUILT_IN_STRCAT_CHK:
4841 return gimple_fold_builtin_strcat_chk (gsi);
4842 case BUILT_IN_STRNCAT_CHK:
4843 return gimple_fold_builtin_strncat_chk (gsi);
4844 case BUILT_IN_STRLEN:
4845 return gimple_fold_builtin_strlen (gsi);
4846 case BUILT_IN_STRCPY:
4847 return gimple_fold_builtin_strcpy (gsi,
4848 gimple_call_arg (stmt, 0),
4849 gimple_call_arg (stmt, 1));
4850 case BUILT_IN_STRNCPY:
4851 return gimple_fold_builtin_strncpy (gsi,
4852 gimple_call_arg (stmt, 0),
4853 gimple_call_arg (stmt, 1),
4854 gimple_call_arg (stmt, 2));
4855 case BUILT_IN_STRCAT:
4856 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
4857 gimple_call_arg (stmt, 1));
4858 case BUILT_IN_STRNCAT:
4859 return gimple_fold_builtin_strncat (gsi);
4860 case BUILT_IN_INDEX:
4861 case BUILT_IN_STRCHR:
4862 return gimple_fold_builtin_strchr (gsi, false);
4863 case BUILT_IN_RINDEX:
4864 case BUILT_IN_STRRCHR:
4865 return gimple_fold_builtin_strchr (gsi, true);
4866 case BUILT_IN_STRSTR:
4867 return gimple_fold_builtin_strstr (gsi);
4868 case BUILT_IN_STRCMP:
4869 case BUILT_IN_STRCMP_EQ:
4870 case BUILT_IN_STRCASECMP:
4871 case BUILT_IN_STRNCMP:
4872 case BUILT_IN_STRNCMP_EQ:
4873 case BUILT_IN_STRNCASECMP:
4874 return gimple_fold_builtin_string_compare (gsi);
4875 case BUILT_IN_MEMCHR:
4876 return gimple_fold_builtin_memchr (gsi);
4877 case BUILT_IN_FPUTS:
4878 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4879 gimple_call_arg (stmt, 1), false);
4880 case BUILT_IN_FPUTS_UNLOCKED:
4881 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
4882 gimple_call_arg (stmt, 1), true);
4883 case BUILT_IN_MEMCPY_CHK:
4884 case BUILT_IN_MEMPCPY_CHK:
4885 case BUILT_IN_MEMMOVE_CHK:
4886 case BUILT_IN_MEMSET_CHK:
4887 return gimple_fold_builtin_memory_chk (gsi,
4888 gimple_call_arg (stmt, 0),
4889 gimple_call_arg (stmt, 1),
4890 gimple_call_arg (stmt, 2),
4891 gimple_call_arg (stmt, 3),
4892 fcode);
4893 case BUILT_IN_STPCPY:
4894 return gimple_fold_builtin_stpcpy (gsi);
4895 case BUILT_IN_STRCPY_CHK:
4896 case BUILT_IN_STPCPY_CHK:
4897 return gimple_fold_builtin_stxcpy_chk (gsi,
4898 gimple_call_arg (stmt, 0),
4899 gimple_call_arg (stmt, 1),
4900 gimple_call_arg (stmt, 2),
4901 fcode);
4902 case BUILT_IN_STRNCPY_CHK:
4903 case BUILT_IN_STPNCPY_CHK:
4904 return gimple_fold_builtin_stxncpy_chk (gsi,
4905 gimple_call_arg (stmt, 0),
4906 gimple_call_arg (stmt, 1),
4907 gimple_call_arg (stmt, 2),
4908 gimple_call_arg (stmt, 3),
4909 fcode);
4910 case BUILT_IN_SNPRINTF_CHK:
4911 case BUILT_IN_VSNPRINTF_CHK:
4912 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
4914 case BUILT_IN_FPRINTF:
4915 case BUILT_IN_FPRINTF_UNLOCKED:
4916 case BUILT_IN_VFPRINTF:
4917 if (n == 2 || n == 3)
4918 return gimple_fold_builtin_fprintf (gsi,
4919 gimple_call_arg (stmt, 0),
4920 gimple_call_arg (stmt, 1),
4921 n == 3
4922 ? gimple_call_arg (stmt, 2)
4923 : NULL_TREE,
4924 fcode);
4925 break;
4926 case BUILT_IN_FPRINTF_CHK:
4927 case BUILT_IN_VFPRINTF_CHK:
4928 if (n == 3 || n == 4)
4929 return gimple_fold_builtin_fprintf (gsi,
4930 gimple_call_arg (stmt, 0),
4931 gimple_call_arg (stmt, 2),
4932 n == 4
4933 ? gimple_call_arg (stmt, 3)
4934 : NULL_TREE,
4935 fcode);
4936 break;
4937 case BUILT_IN_PRINTF:
4938 case BUILT_IN_PRINTF_UNLOCKED:
4939 case BUILT_IN_VPRINTF:
4940 if (n == 1 || n == 2)
4941 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
4942 n == 2
4943 ? gimple_call_arg (stmt, 1)
4944 : NULL_TREE, fcode);
4945 break;
4946 case BUILT_IN_PRINTF_CHK:
4947 case BUILT_IN_VPRINTF_CHK:
4948 if (n == 2 || n == 3)
4949 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
4950 n == 3
4951 ? gimple_call_arg (stmt, 2)
4952 : NULL_TREE, fcode);
4953 break;
4954 case BUILT_IN_ACC_ON_DEVICE:
4955 return gimple_fold_builtin_acc_on_device (gsi,
4956 gimple_call_arg (stmt, 0));
4957 case BUILT_IN_REALLOC:
4958 return gimple_fold_builtin_realloc (gsi);
4960 case BUILT_IN_CLEAR_PADDING:
4961 return gimple_fold_builtin_clear_padding (gsi);
4963 default:;
4966 /* Try the generic builtin folder. */
4967 bool ignore = (gimple_call_lhs (stmt) == NULL);
4968 tree result = fold_call_stmt (stmt, ignore);
4969 if (result)
4971 if (ignore)
4972 STRIP_NOPS (result);
4973 else
4974 result = fold_convert (gimple_call_return_type (stmt), result);
4975 if (!update_call_from_tree (gsi, result))
4976 gimplify_and_update_call_from_tree (gsi, result);
4977 return true;
4980 return false;
4983 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4984 function calls to constants, where possible. */
4986 static tree
4987 fold_internal_goacc_dim (const gimple *call)
4989 int axis = oacc_get_ifn_dim_arg (call);
4990 int size = oacc_get_fn_dim_size (current_function_decl, axis);
4991 tree result = NULL_TREE;
4992 tree type = TREE_TYPE (gimple_call_lhs (call));
4994 switch (gimple_call_internal_fn (call))
4996 case IFN_GOACC_DIM_POS:
4997 /* If the size is 1, we know the answer. */
4998 if (size == 1)
4999 result = build_int_cst (type, 0);
5000 break;
5001 case IFN_GOACC_DIM_SIZE:
5002 /* If the size is not dynamic, we know the answer. */
5003 if (size)
5004 result = build_int_cst (type, size);
5005 break;
5006 default:
5007 break;
5010 return result;
5013 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5014 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5015 &var where var is only addressable because of such calls. */
5017 bool
5018 optimize_atomic_compare_exchange_p (gimple *stmt)
5020 if (gimple_call_num_args (stmt) != 6
5021 || !flag_inline_atomics
5022 || !optimize
5023 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5024 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5025 || !gimple_vdef (stmt)
5026 || !gimple_vuse (stmt))
5027 return false;
5029 tree fndecl = gimple_call_fndecl (stmt);
5030 switch (DECL_FUNCTION_CODE (fndecl))
5032 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5033 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5034 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5035 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5036 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5037 break;
5038 default:
5039 return false;
5042 tree expected = gimple_call_arg (stmt, 1);
5043 if (TREE_CODE (expected) != ADDR_EXPR
5044 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5045 return false;
5047 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5048 if (!is_gimple_reg_type (etype)
5049 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5050 || TREE_THIS_VOLATILE (etype)
5051 || VECTOR_TYPE_P (etype)
5052 || TREE_CODE (etype) == COMPLEX_TYPE
5053 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5054 might not preserve all the bits. See PR71716. */
5055 || SCALAR_FLOAT_TYPE_P (etype)
5056 || maybe_ne (TYPE_PRECISION (etype),
5057 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5058 return false;
5060 tree weak = gimple_call_arg (stmt, 3);
5061 if (!integer_zerop (weak) && !integer_onep (weak))
5062 return false;
5064 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5065 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5066 machine_mode mode = TYPE_MODE (itype);
5068 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5069 == CODE_FOR_nothing
5070 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5071 return false;
5073 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5074 return false;
5076 return true;
5079 /* Fold
5080 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5081 into
5082 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5083 i = IMAGPART_EXPR <t>;
5084 r = (_Bool) i;
5085 e = REALPART_EXPR <t>; */
5087 void
5088 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5090 gimple *stmt = gsi_stmt (*gsi);
5091 tree fndecl = gimple_call_fndecl (stmt);
5092 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5093 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5094 tree ctype = build_complex_type (itype);
5095 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5096 bool throws = false;
5097 edge e = NULL;
5098 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5099 expected);
5100 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5101 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5102 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5104 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5105 build1 (VIEW_CONVERT_EXPR, itype,
5106 gimple_assign_lhs (g)));
5107 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5109 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5110 + int_size_in_bytes (itype);
5111 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5112 gimple_call_arg (stmt, 0),
5113 gimple_assign_lhs (g),
5114 gimple_call_arg (stmt, 2),
5115 build_int_cst (integer_type_node, flag),
5116 gimple_call_arg (stmt, 4),
5117 gimple_call_arg (stmt, 5));
5118 tree lhs = make_ssa_name (ctype);
5119 gimple_call_set_lhs (g, lhs);
5120 gimple_move_vops (g, stmt);
5121 tree oldlhs = gimple_call_lhs (stmt);
5122 if (stmt_can_throw_internal (cfun, stmt))
5124 throws = true;
5125 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5127 gimple_call_set_nothrow (as_a <gcall *> (g),
5128 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5129 gimple_call_set_lhs (stmt, NULL_TREE);
5130 gsi_replace (gsi, g, true);
5131 if (oldlhs)
5133 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5134 build1 (IMAGPART_EXPR, itype, lhs));
5135 if (throws)
5137 gsi_insert_on_edge_immediate (e, g);
5138 *gsi = gsi_for_stmt (g);
5140 else
5141 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5142 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5143 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5145 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5146 build1 (REALPART_EXPR, itype, lhs));
5147 if (throws && oldlhs == NULL_TREE)
5149 gsi_insert_on_edge_immediate (e, g);
5150 *gsi = gsi_for_stmt (g);
5152 else
5153 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5154 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5156 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5157 VIEW_CONVERT_EXPR,
5158 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5159 gimple_assign_lhs (g)));
5160 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5162 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5163 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5164 *gsi = gsiret;
5167 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5168 doesn't fit into TYPE. The test for overflow should be regardless of
5169 -fwrapv, and even for unsigned types. */
5171 bool
5172 arith_overflowed_p (enum tree_code code, const_tree type,
5173 const_tree arg0, const_tree arg1)
5175 widest2_int warg0 = widest2_int_cst (arg0);
5176 widest2_int warg1 = widest2_int_cst (arg1);
5177 widest2_int wres;
5178 switch (code)
5180 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5181 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5182 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5183 default: gcc_unreachable ();
5185 signop sign = TYPE_SIGN (type);
5186 if (sign == UNSIGNED && wi::neg_p (wres))
5187 return true;
5188 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5191 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5192 for the memory it references, otherwise return null. VECTYPE is the
5193 type of the memory vector. */
5195 static tree
5196 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5198 tree ptr = gimple_call_arg (call, 0);
5199 tree alias_align = gimple_call_arg (call, 1);
5200 tree mask = gimple_call_arg (call, 2);
5201 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5202 return NULL_TREE;
5204 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5205 if (TYPE_ALIGN (vectype) != align)
5206 vectype = build_aligned_type (vectype, align);
5207 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5208 return fold_build2 (MEM_REF, vectype, ptr, offset);
5211 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5213 static bool
5214 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5216 tree lhs = gimple_call_lhs (call);
5217 if (!lhs)
5218 return false;
5220 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5222 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5223 gimple_set_location (new_stmt, gimple_location (call));
5224 gimple_move_vops (new_stmt, call);
5225 gsi_replace (gsi, new_stmt, false);
5226 return true;
5228 return false;
5231 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5233 static bool
5234 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5236 tree rhs = gimple_call_arg (call, 3);
5237 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5239 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5240 gimple_set_location (new_stmt, gimple_location (call));
5241 gimple_move_vops (new_stmt, call);
5242 gsi_replace (gsi, new_stmt, false);
5243 return true;
5245 return false;
5248 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5249 The statement may be replaced by another statement, e.g., if the call
5250 simplifies to a constant value. Return true if any changes were made.
5251 It is assumed that the operands have been previously folded. */
5253 static bool
5254 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5256 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5257 tree callee;
5258 bool changed = false;
5259 unsigned i;
5261 /* Fold *& in call arguments. */
5262 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5263 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
5265 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
5266 if (tmp)
5268 gimple_call_set_arg (stmt, i, tmp);
5269 changed = true;
5273 /* Check for virtual calls that became direct calls. */
5274 callee = gimple_call_fn (stmt);
5275 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5277 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5279 if (dump_file && virtual_method_call_p (callee)
5280 && !possible_polymorphic_call_target_p
5281 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5282 (OBJ_TYPE_REF_EXPR (callee)))))
5284 fprintf (dump_file,
5285 "Type inheritance inconsistent devirtualization of ");
5286 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5287 fprintf (dump_file, " to ");
5288 print_generic_expr (dump_file, callee, TDF_SLIM);
5289 fprintf (dump_file, "\n");
5292 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5293 changed = true;
5295 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5297 bool final;
5298 vec <cgraph_node *>targets
5299 = possible_polymorphic_call_targets (callee, stmt, &final);
5300 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5302 tree lhs = gimple_call_lhs (stmt);
5303 if (dump_enabled_p ())
5305 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5306 "folding virtual function call to %s\n",
5307 targets.length () == 1
5308 ? targets[0]->name ()
5309 : "__builtin_unreachable");
5311 if (targets.length () == 1)
5313 tree fndecl = targets[0]->decl;
5314 gimple_call_set_fndecl (stmt, fndecl);
5315 changed = true;
5316 /* If changing the call to __cxa_pure_virtual
5317 or similar noreturn function, adjust gimple_call_fntype
5318 too. */
5319 if (gimple_call_noreturn_p (stmt)
5320 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5321 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5322 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5323 == void_type_node))
5324 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5325 /* If the call becomes noreturn, remove the lhs. */
5326 if (lhs
5327 && gimple_call_noreturn_p (stmt)
5328 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5329 || should_remove_lhs_p (lhs)))
5331 if (TREE_CODE (lhs) == SSA_NAME)
5333 tree var = create_tmp_var (TREE_TYPE (lhs));
5334 tree def = get_or_create_ssa_default_def (cfun, var);
5335 gimple *new_stmt = gimple_build_assign (lhs, def);
5336 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5338 gimple_call_set_lhs (stmt, NULL_TREE);
5340 maybe_remove_unused_call_args (cfun, stmt);
5342 else
5344 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5345 gimple *new_stmt = gimple_build_call (fndecl, 0);
5346 gimple_set_location (new_stmt, gimple_location (stmt));
5347 /* If the call had a SSA name as lhs morph that into
5348 an uninitialized value. */
5349 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5351 tree var = create_tmp_var (TREE_TYPE (lhs));
5352 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5353 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5354 set_ssa_default_def (cfun, var, lhs);
5356 gimple_move_vops (new_stmt, stmt);
5357 gsi_replace (gsi, new_stmt, false);
5358 return true;
5364 /* Check for indirect calls that became direct calls, and then
5365 no longer require a static chain. */
5366 if (gimple_call_chain (stmt))
5368 tree fn = gimple_call_fndecl (stmt);
5369 if (fn && !DECL_STATIC_CHAIN (fn))
5371 gimple_call_set_chain (stmt, NULL);
5372 changed = true;
5374 else
5376 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
5377 if (tmp)
5379 gimple_call_set_chain (stmt, tmp);
5380 changed = true;
5385 if (inplace)
5386 return changed;
5388 /* Check for builtins that CCP can handle using information not
5389 available in the generic fold routines. */
5390 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5392 if (gimple_fold_builtin (gsi))
5393 changed = true;
5395 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5397 changed |= targetm.gimple_fold_builtin (gsi);
5399 else if (gimple_call_internal_p (stmt))
5401 enum tree_code subcode = ERROR_MARK;
5402 tree result = NULL_TREE;
5403 bool cplx_result = false;
5404 tree overflow = NULL_TREE;
5405 switch (gimple_call_internal_fn (stmt))
5407 case IFN_BUILTIN_EXPECT:
5408 result = fold_builtin_expect (gimple_location (stmt),
5409 gimple_call_arg (stmt, 0),
5410 gimple_call_arg (stmt, 1),
5411 gimple_call_arg (stmt, 2),
5412 NULL_TREE);
5413 break;
5414 case IFN_UBSAN_OBJECT_SIZE:
5416 tree offset = gimple_call_arg (stmt, 1);
5417 tree objsize = gimple_call_arg (stmt, 2);
5418 if (integer_all_onesp (objsize)
5419 || (TREE_CODE (offset) == INTEGER_CST
5420 && TREE_CODE (objsize) == INTEGER_CST
5421 && tree_int_cst_le (offset, objsize)))
5423 replace_call_with_value (gsi, NULL_TREE);
5424 return true;
5427 break;
5428 case IFN_UBSAN_PTR:
5429 if (integer_zerop (gimple_call_arg (stmt, 1)))
5431 replace_call_with_value (gsi, NULL_TREE);
5432 return true;
5434 break;
5435 case IFN_UBSAN_BOUNDS:
5437 tree index = gimple_call_arg (stmt, 1);
5438 tree bound = gimple_call_arg (stmt, 2);
5439 if (TREE_CODE (index) == INTEGER_CST
5440 && TREE_CODE (bound) == INTEGER_CST)
5442 index = fold_convert (TREE_TYPE (bound), index);
5443 if (TREE_CODE (index) == INTEGER_CST
5444 && tree_int_cst_le (index, bound))
5446 replace_call_with_value (gsi, NULL_TREE);
5447 return true;
5451 break;
5452 case IFN_GOACC_DIM_SIZE:
5453 case IFN_GOACC_DIM_POS:
5454 result = fold_internal_goacc_dim (stmt);
5455 break;
5456 case IFN_UBSAN_CHECK_ADD:
5457 subcode = PLUS_EXPR;
5458 break;
5459 case IFN_UBSAN_CHECK_SUB:
5460 subcode = MINUS_EXPR;
5461 break;
5462 case IFN_UBSAN_CHECK_MUL:
5463 subcode = MULT_EXPR;
5464 break;
5465 case IFN_ADD_OVERFLOW:
5466 subcode = PLUS_EXPR;
5467 cplx_result = true;
5468 break;
5469 case IFN_SUB_OVERFLOW:
5470 subcode = MINUS_EXPR;
5471 cplx_result = true;
5472 break;
5473 case IFN_MUL_OVERFLOW:
5474 subcode = MULT_EXPR;
5475 cplx_result = true;
5476 break;
5477 case IFN_MASK_LOAD:
5478 changed |= gimple_fold_mask_load (gsi, stmt);
5479 break;
5480 case IFN_MASK_STORE:
5481 changed |= gimple_fold_mask_store (gsi, stmt);
5482 break;
5483 default:
5484 break;
5486 if (subcode != ERROR_MARK)
5488 tree arg0 = gimple_call_arg (stmt, 0);
5489 tree arg1 = gimple_call_arg (stmt, 1);
5490 tree type = TREE_TYPE (arg0);
5491 if (cplx_result)
5493 tree lhs = gimple_call_lhs (stmt);
5494 if (lhs == NULL_TREE)
5495 type = NULL_TREE;
5496 else
5497 type = TREE_TYPE (TREE_TYPE (lhs));
5499 if (type == NULL_TREE)
5501 /* x = y + 0; x = y - 0; x = y * 0; */
5502 else if (integer_zerop (arg1))
5503 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5504 /* x = 0 + y; x = 0 * y; */
5505 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5506 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5507 /* x = y - y; */
5508 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5509 result = integer_zero_node;
5510 /* x = y * 1; x = 1 * y; */
5511 else if (subcode == MULT_EXPR && integer_onep (arg1))
5512 result = arg0;
5513 else if (subcode == MULT_EXPR && integer_onep (arg0))
5514 result = arg1;
5515 else if (TREE_CODE (arg0) == INTEGER_CST
5516 && TREE_CODE (arg1) == INTEGER_CST)
5518 if (cplx_result)
5519 result = int_const_binop (subcode, fold_convert (type, arg0),
5520 fold_convert (type, arg1));
5521 else
5522 result = int_const_binop (subcode, arg0, arg1);
5523 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5525 if (cplx_result)
5526 overflow = build_one_cst (type);
5527 else
5528 result = NULL_TREE;
5531 if (result)
5533 if (result == integer_zero_node)
5534 result = build_zero_cst (type);
5535 else if (cplx_result && TREE_TYPE (result) != type)
5537 if (TREE_CODE (result) == INTEGER_CST)
5539 if (arith_overflowed_p (PLUS_EXPR, type, result,
5540 integer_zero_node))
5541 overflow = build_one_cst (type);
5543 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5544 && TYPE_UNSIGNED (type))
5545 || (TYPE_PRECISION (type)
5546 < (TYPE_PRECISION (TREE_TYPE (result))
5547 + (TYPE_UNSIGNED (TREE_TYPE (result))
5548 && !TYPE_UNSIGNED (type)))))
5549 result = NULL_TREE;
5550 if (result)
5551 result = fold_convert (type, result);
5556 if (result)
5558 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5559 result = drop_tree_overflow (result);
5560 if (cplx_result)
5562 if (overflow == NULL_TREE)
5563 overflow = build_zero_cst (TREE_TYPE (result));
5564 tree ctype = build_complex_type (TREE_TYPE (result));
5565 if (TREE_CODE (result) == INTEGER_CST
5566 && TREE_CODE (overflow) == INTEGER_CST)
5567 result = build_complex (ctype, result, overflow);
5568 else
5569 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5570 ctype, result, overflow);
5572 if (!update_call_from_tree (gsi, result))
5573 gimplify_and_update_call_from_tree (gsi, result);
5574 changed = true;
5578 return changed;
5582 /* Return true whether NAME has a use on STMT. */
5584 static bool
5585 has_use_on_stmt (tree name, gimple *stmt)
5587 imm_use_iterator iter;
5588 use_operand_p use_p;
5589 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5590 if (USE_STMT (use_p) == stmt)
5591 return true;
5592 return false;
5595 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5596 gimple_simplify.
5598 Replaces *GSI with the simplification result in RCODE and OPS
5599 and the associated statements in *SEQ. Does the replacement
5600 according to INPLACE and returns true if the operation succeeded. */
5602 static bool
5603 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5604 gimple_match_op *res_op,
5605 gimple_seq *seq, bool inplace)
5607 gimple *stmt = gsi_stmt (*gsi);
5608 tree *ops = res_op->ops;
5609 unsigned int num_ops = res_op->num_ops;
5611 /* Play safe and do not allow abnormals to be mentioned in
5612 newly created statements. See also maybe_push_res_to_seq.
5613 As an exception allow such uses if there was a use of the
5614 same SSA name on the old stmt. */
5615 for (unsigned int i = 0; i < num_ops; ++i)
5616 if (TREE_CODE (ops[i]) == SSA_NAME
5617 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5618 && !has_use_on_stmt (ops[i], stmt))
5619 return false;
5621 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5622 for (unsigned int i = 0; i < 2; ++i)
5623 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5624 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5625 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5626 return false;
5628 /* Don't insert new statements when INPLACE is true, even if we could
5629 reuse STMT for the final statement. */
5630 if (inplace && !gimple_seq_empty_p (*seq))
5631 return false;
5633 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5635 gcc_assert (res_op->code.is_tree_code ());
5636 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
5637 /* GIMPLE_CONDs condition may not throw. */
5638 && (!flag_exceptions
5639 || !cfun->can_throw_non_call_exceptions
5640 || !operation_could_trap_p (res_op->code,
5641 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5642 false, NULL_TREE)))
5643 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
5644 else if (res_op->code == SSA_NAME)
5645 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5646 build_zero_cst (TREE_TYPE (ops[0])));
5647 else if (res_op->code == INTEGER_CST)
5649 if (integer_zerop (ops[0]))
5650 gimple_cond_make_false (cond_stmt);
5651 else
5652 gimple_cond_make_true (cond_stmt);
5654 else if (!inplace)
5656 tree res = maybe_push_res_to_seq (res_op, seq);
5657 if (!res)
5658 return false;
5659 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5660 build_zero_cst (TREE_TYPE (res)));
5662 else
5663 return false;
5664 if (dump_file && (dump_flags & TDF_DETAILS))
5666 fprintf (dump_file, "gimple_simplified to ");
5667 if (!gimple_seq_empty_p (*seq))
5668 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5669 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5670 0, TDF_SLIM);
5672 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5673 return true;
5675 else if (is_gimple_assign (stmt)
5676 && res_op->code.is_tree_code ())
5678 if (!inplace
5679 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
5681 maybe_build_generic_op (res_op);
5682 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
5683 res_op->op_or_null (0),
5684 res_op->op_or_null (1),
5685 res_op->op_or_null (2));
5686 if (dump_file && (dump_flags & TDF_DETAILS))
5688 fprintf (dump_file, "gimple_simplified to ");
5689 if (!gimple_seq_empty_p (*seq))
5690 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5691 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5692 0, TDF_SLIM);
5694 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5695 return true;
5698 else if (res_op->code.is_fn_code ()
5699 && gimple_call_combined_fn (stmt) == res_op->code)
5701 gcc_assert (num_ops == gimple_call_num_args (stmt));
5702 for (unsigned int i = 0; i < num_ops; ++i)
5703 gimple_call_set_arg (stmt, i, ops[i]);
5704 if (dump_file && (dump_flags & TDF_DETAILS))
5706 fprintf (dump_file, "gimple_simplified to ");
5707 if (!gimple_seq_empty_p (*seq))
5708 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5709 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5711 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5712 return true;
5714 else if (!inplace)
5716 if (gimple_has_lhs (stmt))
5718 tree lhs = gimple_get_lhs (stmt);
5719 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5720 return false;
5721 if (dump_file && (dump_flags & TDF_DETAILS))
5723 fprintf (dump_file, "gimple_simplified to ");
5724 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5726 gsi_replace_with_seq_vops (gsi, *seq);
5727 return true;
5729 else
5730 gcc_unreachable ();
5733 return false;
5736 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5738 static bool
5739 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5741 bool res = false;
5742 tree *orig_t = t;
5744 if (TREE_CODE (*t) == ADDR_EXPR)
5745 t = &TREE_OPERAND (*t, 0);
5747 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5748 generic vector extension. The actual vector referenced is
5749 view-converted to an array type for this purpose. If the index
5750 is constant the canonical representation in the middle-end is a
5751 BIT_FIELD_REF so re-write the former to the latter here. */
5752 if (TREE_CODE (*t) == ARRAY_REF
5753 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5754 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5755 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5757 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5758 if (VECTOR_TYPE_P (vtype))
5760 tree low = array_ref_low_bound (*t);
5761 if (TREE_CODE (low) == INTEGER_CST)
5763 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5765 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5766 wi::to_widest (low));
5767 idx = wi::mul (idx, wi::to_widest
5768 (TYPE_SIZE (TREE_TYPE (*t))));
5769 widest_int ext
5770 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5771 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5773 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5774 TREE_TYPE (*t),
5775 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5776 TYPE_SIZE (TREE_TYPE (*t)),
5777 wide_int_to_tree (bitsizetype, idx));
5778 res = true;
5785 while (handled_component_p (*t))
5786 t = &TREE_OPERAND (*t, 0);
5788 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5789 of invariant addresses into a SSA name MEM_REF address. */
5790 if (TREE_CODE (*t) == MEM_REF
5791 || TREE_CODE (*t) == TARGET_MEM_REF)
5793 tree addr = TREE_OPERAND (*t, 0);
5794 if (TREE_CODE (addr) == ADDR_EXPR
5795 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5796 || handled_component_p (TREE_OPERAND (addr, 0))))
5798 tree base;
5799 poly_int64 coffset;
5800 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5801 &coffset);
5802 if (!base)
5804 if (is_debug)
5805 return false;
5806 gcc_unreachable ();
5809 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5810 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5811 TREE_OPERAND (*t, 1),
5812 size_int (coffset));
5813 res = true;
5815 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5816 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5819 /* Canonicalize back MEM_REFs to plain reference trees if the object
5820 accessed is a decl that has the same access semantics as the MEM_REF. */
5821 if (TREE_CODE (*t) == MEM_REF
5822 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5823 && integer_zerop (TREE_OPERAND (*t, 1))
5824 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5826 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5827 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5828 if (/* Same volatile qualification. */
5829 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5830 /* Same TBAA behavior with -fstrict-aliasing. */
5831 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5832 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5833 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5834 /* Same alignment. */
5835 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5836 /* We have to look out here to not drop a required conversion
5837 from the rhs to the lhs if *t appears on the lhs or vice-versa
5838 if it appears on the rhs. Thus require strict type
5839 compatibility. */
5840 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
5842 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5843 res = true;
5847 else if (TREE_CODE (*orig_t) == ADDR_EXPR
5848 && TREE_CODE (*t) == MEM_REF
5849 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
5851 tree base;
5852 poly_int64 coffset;
5853 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
5854 &coffset);
5855 if (base)
5857 gcc_assert (TREE_CODE (base) == MEM_REF);
5858 poly_int64 moffset;
5859 if (mem_ref_offset (base).to_shwi (&moffset))
5861 coffset += moffset;
5862 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
5864 coffset += moffset;
5865 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
5866 return true;
5872 /* Canonicalize TARGET_MEM_REF in particular with respect to
5873 the indexes becoming constant. */
5874 else if (TREE_CODE (*t) == TARGET_MEM_REF)
5876 tree tem = maybe_fold_tmr (*t);
5877 if (tem)
5879 *t = tem;
5880 res = true;
5884 return res;
5887 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5888 distinguishes both cases. */
5890 static bool
5891 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
5893 bool changed = false;
5894 gimple *stmt = gsi_stmt (*gsi);
5895 bool nowarning = gimple_no_warning_p (stmt);
5896 unsigned i;
5897 fold_defer_overflow_warnings ();
5899 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5900 after propagation.
5901 ??? This shouldn't be done in generic folding but in the
5902 propagation helpers which also know whether an address was
5903 propagated.
5904 Also canonicalize operand order. */
5905 switch (gimple_code (stmt))
5907 case GIMPLE_ASSIGN:
5908 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
5910 tree *rhs = gimple_assign_rhs1_ptr (stmt);
5911 if ((REFERENCE_CLASS_P (*rhs)
5912 || TREE_CODE (*rhs) == ADDR_EXPR)
5913 && maybe_canonicalize_mem_ref_addr (rhs))
5914 changed = true;
5915 tree *lhs = gimple_assign_lhs_ptr (stmt);
5916 if (REFERENCE_CLASS_P (*lhs)
5917 && maybe_canonicalize_mem_ref_addr (lhs))
5918 changed = true;
5920 else
5922 /* Canonicalize operand order. */
5923 enum tree_code code = gimple_assign_rhs_code (stmt);
5924 if (TREE_CODE_CLASS (code) == tcc_comparison
5925 || commutative_tree_code (code)
5926 || commutative_ternary_tree_code (code))
5928 tree rhs1 = gimple_assign_rhs1 (stmt);
5929 tree rhs2 = gimple_assign_rhs2 (stmt);
5930 if (tree_swap_operands_p (rhs1, rhs2))
5932 gimple_assign_set_rhs1 (stmt, rhs2);
5933 gimple_assign_set_rhs2 (stmt, rhs1);
5934 if (TREE_CODE_CLASS (code) == tcc_comparison)
5935 gimple_assign_set_rhs_code (stmt,
5936 swap_tree_comparison (code));
5937 changed = true;
5941 break;
5942 case GIMPLE_CALL:
5944 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5946 tree *arg = gimple_call_arg_ptr (stmt, i);
5947 if (REFERENCE_CLASS_P (*arg)
5948 && maybe_canonicalize_mem_ref_addr (arg))
5949 changed = true;
5951 tree *lhs = gimple_call_lhs_ptr (stmt);
5952 if (*lhs
5953 && REFERENCE_CLASS_P (*lhs)
5954 && maybe_canonicalize_mem_ref_addr (lhs))
5955 changed = true;
5956 break;
5958 case GIMPLE_ASM:
5960 gasm *asm_stmt = as_a <gasm *> (stmt);
5961 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
5963 tree link = gimple_asm_output_op (asm_stmt, i);
5964 tree op = TREE_VALUE (link);
5965 if (REFERENCE_CLASS_P (op)
5966 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5967 changed = true;
5969 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5971 tree link = gimple_asm_input_op (asm_stmt, i);
5972 tree op = TREE_VALUE (link);
5973 if ((REFERENCE_CLASS_P (op)
5974 || TREE_CODE (op) == ADDR_EXPR)
5975 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
5976 changed = true;
5979 break;
5980 case GIMPLE_DEBUG:
5981 if (gimple_debug_bind_p (stmt))
5983 tree *val = gimple_debug_bind_get_value_ptr (stmt);
5984 if (*val
5985 && (REFERENCE_CLASS_P (*val)
5986 || TREE_CODE (*val) == ADDR_EXPR)
5987 && maybe_canonicalize_mem_ref_addr (val, true))
5988 changed = true;
5990 break;
5991 case GIMPLE_COND:
5993 /* Canonicalize operand order. */
5994 tree lhs = gimple_cond_lhs (stmt);
5995 tree rhs = gimple_cond_rhs (stmt);
5996 if (tree_swap_operands_p (lhs, rhs))
5998 gcond *gc = as_a <gcond *> (stmt);
5999 gimple_cond_set_lhs (gc, rhs);
6000 gimple_cond_set_rhs (gc, lhs);
6001 gimple_cond_set_code (gc,
6002 swap_tree_comparison (gimple_cond_code (gc)));
6003 changed = true;
6006 default:;
6009 /* Dispatch to pattern-based folding. */
6010 if (!inplace
6011 || is_gimple_assign (stmt)
6012 || gimple_code (stmt) == GIMPLE_COND)
6014 gimple_seq seq = NULL;
6015 gimple_match_op res_op;
6016 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6017 valueize, valueize))
6019 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6020 changed = true;
6021 else
6022 gimple_seq_discard (seq);
6026 stmt = gsi_stmt (*gsi);
6028 /* Fold the main computation performed by the statement. */
6029 switch (gimple_code (stmt))
6031 case GIMPLE_ASSIGN:
6033 /* Try to canonicalize for boolean-typed X the comparisons
6034 X == 0, X == 1, X != 0, and X != 1. */
6035 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6036 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6038 tree lhs = gimple_assign_lhs (stmt);
6039 tree op1 = gimple_assign_rhs1 (stmt);
6040 tree op2 = gimple_assign_rhs2 (stmt);
6041 tree type = TREE_TYPE (op1);
6043 /* Check whether the comparison operands are of the same boolean
6044 type as the result type is.
6045 Check that second operand is an integer-constant with value
6046 one or zero. */
6047 if (TREE_CODE (op2) == INTEGER_CST
6048 && (integer_zerop (op2) || integer_onep (op2))
6049 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6051 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6052 bool is_logical_not = false;
6054 /* X == 0 and X != 1 is a logical-not.of X
6055 X == 1 and X != 0 is X */
6056 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6057 || (cmp_code == NE_EXPR && integer_onep (op2)))
6058 is_logical_not = true;
6060 if (is_logical_not == false)
6061 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6062 /* Only for one-bit precision typed X the transformation
6063 !X -> ~X is valied. */
6064 else if (TYPE_PRECISION (type) == 1)
6065 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6066 /* Otherwise we use !X -> X ^ 1. */
6067 else
6068 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6069 build_int_cst (type, 1));
6070 changed = true;
6071 break;
6075 unsigned old_num_ops = gimple_num_ops (stmt);
6076 tree lhs = gimple_assign_lhs (stmt);
6077 tree new_rhs = fold_gimple_assign (gsi);
6078 if (new_rhs
6079 && !useless_type_conversion_p (TREE_TYPE (lhs),
6080 TREE_TYPE (new_rhs)))
6081 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6082 if (new_rhs
6083 && (!inplace
6084 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6086 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6087 changed = true;
6089 break;
6092 case GIMPLE_CALL:
6093 changed |= gimple_fold_call (gsi, inplace);
6094 break;
6096 case GIMPLE_ASM:
6097 /* Fold *& in asm operands. */
6099 gasm *asm_stmt = as_a <gasm *> (stmt);
6100 size_t noutputs;
6101 const char **oconstraints;
6102 const char *constraint;
6103 bool allows_mem, allows_reg;
6105 noutputs = gimple_asm_noutputs (asm_stmt);
6106 oconstraints = XALLOCAVEC (const char *, noutputs);
6108 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6110 tree link = gimple_asm_output_op (asm_stmt, i);
6111 tree op = TREE_VALUE (link);
6112 oconstraints[i]
6113 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6114 if (REFERENCE_CLASS_P (op)
6115 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
6117 TREE_VALUE (link) = op;
6118 changed = true;
6121 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6123 tree link = gimple_asm_input_op (asm_stmt, i);
6124 tree op = TREE_VALUE (link);
6125 constraint
6126 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6127 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6128 oconstraints, &allows_mem, &allows_reg);
6129 if (REFERENCE_CLASS_P (op)
6130 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
6131 != NULL_TREE)
6133 TREE_VALUE (link) = op;
6134 changed = true;
6138 break;
6140 case GIMPLE_DEBUG:
6141 if (gimple_debug_bind_p (stmt))
6143 tree val = gimple_debug_bind_get_value (stmt);
6144 if (val
6145 && REFERENCE_CLASS_P (val))
6147 tree tem = maybe_fold_reference (val, false);
6148 if (tem)
6150 gimple_debug_bind_set_value (stmt, tem);
6151 changed = true;
6154 else if (val
6155 && TREE_CODE (val) == ADDR_EXPR)
6157 tree ref = TREE_OPERAND (val, 0);
6158 tree tem = maybe_fold_reference (ref, false);
6159 if (tem)
6161 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
6162 gimple_debug_bind_set_value (stmt, tem);
6163 changed = true;
6167 break;
6169 case GIMPLE_RETURN:
6171 greturn *ret_stmt = as_a<greturn *> (stmt);
6172 tree ret = gimple_return_retval(ret_stmt);
6174 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6176 tree val = valueize (ret);
6177 if (val && val != ret
6178 && may_propagate_copy (ret, val))
6180 gimple_return_set_retval (ret_stmt, val);
6181 changed = true;
6185 break;
6187 default:;
6190 stmt = gsi_stmt (*gsi);
6192 /* Fold *& on the lhs. */
6193 if (gimple_has_lhs (stmt))
6195 tree lhs = gimple_get_lhs (stmt);
6196 if (lhs && REFERENCE_CLASS_P (lhs))
6198 tree new_lhs = maybe_fold_reference (lhs, true);
6199 if (new_lhs)
6201 gimple_set_lhs (stmt, new_lhs);
6202 changed = true;
6207 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6208 return changed;
6211 /* Valueziation callback that ends up not following SSA edges. */
6213 tree
6214 no_follow_ssa_edges (tree)
6216 return NULL_TREE;
6219 /* Valueization callback that ends up following single-use SSA edges only. */
6221 tree
6222 follow_single_use_edges (tree val)
6224 if (TREE_CODE (val) == SSA_NAME
6225 && !has_single_use (val))
6226 return NULL_TREE;
6227 return val;
6230 /* Valueization callback that follows all SSA edges. */
6232 tree
6233 follow_all_ssa_edges (tree val)
6235 return val;
6238 /* Fold the statement pointed to by GSI. In some cases, this function may
6239 replace the whole statement with a new one. Returns true iff folding
6240 makes any changes.
6241 The statement pointed to by GSI should be in valid gimple form but may
6242 be in unfolded state as resulting from for example constant propagation
6243 which can produce *&x = 0. */
6245 bool
6246 fold_stmt (gimple_stmt_iterator *gsi)
6248 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6251 bool
6252 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6254 return fold_stmt_1 (gsi, false, valueize);
6257 /* Perform the minimal folding on statement *GSI. Only operations like
6258 *&x created by constant propagation are handled. The statement cannot
6259 be replaced with a new one. Return true if the statement was
6260 changed, false otherwise.
6261 The statement *GSI should be in valid gimple form but may
6262 be in unfolded state as resulting from for example constant propagation
6263 which can produce *&x = 0. */
6265 bool
6266 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6268 gimple *stmt = gsi_stmt (*gsi);
6269 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6270 gcc_assert (gsi_stmt (*gsi) == stmt);
6271 return changed;
6274 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6275 if EXPR is null or we don't know how.
6276 If non-null, the result always has boolean type. */
6278 static tree
6279 canonicalize_bool (tree expr, bool invert)
6281 if (!expr)
6282 return NULL_TREE;
6283 else if (invert)
6285 if (integer_nonzerop (expr))
6286 return boolean_false_node;
6287 else if (integer_zerop (expr))
6288 return boolean_true_node;
6289 else if (TREE_CODE (expr) == SSA_NAME)
6290 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6291 build_int_cst (TREE_TYPE (expr), 0));
6292 else if (COMPARISON_CLASS_P (expr))
6293 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6294 boolean_type_node,
6295 TREE_OPERAND (expr, 0),
6296 TREE_OPERAND (expr, 1));
6297 else
6298 return NULL_TREE;
6300 else
6302 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6303 return expr;
6304 if (integer_nonzerop (expr))
6305 return boolean_true_node;
6306 else if (integer_zerop (expr))
6307 return boolean_false_node;
6308 else if (TREE_CODE (expr) == SSA_NAME)
6309 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6310 build_int_cst (TREE_TYPE (expr), 0));
6311 else if (COMPARISON_CLASS_P (expr))
6312 return fold_build2 (TREE_CODE (expr),
6313 boolean_type_node,
6314 TREE_OPERAND (expr, 0),
6315 TREE_OPERAND (expr, 1));
6316 else
6317 return NULL_TREE;
6321 /* Check to see if a boolean expression EXPR is logically equivalent to the
6322 comparison (OP1 CODE OP2). Check for various identities involving
6323 SSA_NAMEs. */
6325 static bool
6326 same_bool_comparison_p (const_tree expr, enum tree_code code,
6327 const_tree op1, const_tree op2)
6329 gimple *s;
6331 /* The obvious case. */
6332 if (TREE_CODE (expr) == code
6333 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6334 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6335 return true;
6337 /* Check for comparing (name, name != 0) and the case where expr
6338 is an SSA_NAME with a definition matching the comparison. */
6339 if (TREE_CODE (expr) == SSA_NAME
6340 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6342 if (operand_equal_p (expr, op1, 0))
6343 return ((code == NE_EXPR && integer_zerop (op2))
6344 || (code == EQ_EXPR && integer_nonzerop (op2)));
6345 s = SSA_NAME_DEF_STMT (expr);
6346 if (is_gimple_assign (s)
6347 && gimple_assign_rhs_code (s) == code
6348 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6349 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6350 return true;
6353 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6354 of name is a comparison, recurse. */
6355 if (TREE_CODE (op1) == SSA_NAME
6356 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6358 s = SSA_NAME_DEF_STMT (op1);
6359 if (is_gimple_assign (s)
6360 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6362 enum tree_code c = gimple_assign_rhs_code (s);
6363 if ((c == NE_EXPR && integer_zerop (op2))
6364 || (c == EQ_EXPR && integer_nonzerop (op2)))
6365 return same_bool_comparison_p (expr, c,
6366 gimple_assign_rhs1 (s),
6367 gimple_assign_rhs2 (s));
6368 if ((c == EQ_EXPR && integer_zerop (op2))
6369 || (c == NE_EXPR && integer_nonzerop (op2)))
6370 return same_bool_comparison_p (expr,
6371 invert_tree_comparison (c, false),
6372 gimple_assign_rhs1 (s),
6373 gimple_assign_rhs2 (s));
6376 return false;
6379 /* Check to see if two boolean expressions OP1 and OP2 are logically
6380 equivalent. */
6382 static bool
6383 same_bool_result_p (const_tree op1, const_tree op2)
6385 /* Simple cases first. */
6386 if (operand_equal_p (op1, op2, 0))
6387 return true;
6389 /* Check the cases where at least one of the operands is a comparison.
6390 These are a bit smarter than operand_equal_p in that they apply some
6391 identifies on SSA_NAMEs. */
6392 if (COMPARISON_CLASS_P (op2)
6393 && same_bool_comparison_p (op1, TREE_CODE (op2),
6394 TREE_OPERAND (op2, 0),
6395 TREE_OPERAND (op2, 1)))
6396 return true;
6397 if (COMPARISON_CLASS_P (op1)
6398 && same_bool_comparison_p (op2, TREE_CODE (op1),
6399 TREE_OPERAND (op1, 0),
6400 TREE_OPERAND (op1, 1)))
6401 return true;
6403 /* Default case. */
6404 return false;
6407 /* Forward declarations for some mutually recursive functions. */
6409 static tree
6410 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6411 enum tree_code code2, tree op2a, tree op2b);
6412 static tree
6413 and_var_with_comparison (tree type, tree var, bool invert,
6414 enum tree_code code2, tree op2a, tree op2b);
6415 static tree
6416 and_var_with_comparison_1 (tree type, gimple *stmt,
6417 enum tree_code code2, tree op2a, tree op2b);
6418 static tree
6419 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6420 enum tree_code code2, tree op2a, tree op2b);
6421 static tree
6422 or_var_with_comparison (tree, tree var, bool invert,
6423 enum tree_code code2, tree op2a, tree op2b);
6424 static tree
6425 or_var_with_comparison_1 (tree, gimple *stmt,
6426 enum tree_code code2, tree op2a, tree op2b);
6428 /* Helper function for and_comparisons_1: try to simplify the AND of the
6429 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6430 If INVERT is true, invert the value of the VAR before doing the AND.
6431 Return NULL_EXPR if we can't simplify this to a single expression. */
6433 static tree
6434 and_var_with_comparison (tree type, tree var, bool invert,
6435 enum tree_code code2, tree op2a, tree op2b)
6437 tree t;
6438 gimple *stmt = SSA_NAME_DEF_STMT (var);
6440 /* We can only deal with variables whose definitions are assignments. */
6441 if (!is_gimple_assign (stmt))
6442 return NULL_TREE;
6444 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6445 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6446 Then we only have to consider the simpler non-inverted cases. */
6447 if (invert)
6448 t = or_var_with_comparison_1 (type, stmt,
6449 invert_tree_comparison (code2, false),
6450 op2a, op2b);
6451 else
6452 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6453 return canonicalize_bool (t, invert);
6456 /* Try to simplify the AND of the ssa variable defined by the assignment
6457 STMT with the comparison specified by (OP2A CODE2 OP2B).
6458 Return NULL_EXPR if we can't simplify this to a single expression. */
6460 static tree
6461 and_var_with_comparison_1 (tree type, gimple *stmt,
6462 enum tree_code code2, tree op2a, tree op2b)
6464 tree var = gimple_assign_lhs (stmt);
6465 tree true_test_var = NULL_TREE;
6466 tree false_test_var = NULL_TREE;
6467 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6469 /* Check for identities like (var AND (var == 0)) => false. */
6470 if (TREE_CODE (op2a) == SSA_NAME
6471 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6473 if ((code2 == NE_EXPR && integer_zerop (op2b))
6474 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6476 true_test_var = op2a;
6477 if (var == true_test_var)
6478 return var;
6480 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6481 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6483 false_test_var = op2a;
6484 if (var == false_test_var)
6485 return boolean_false_node;
6489 /* If the definition is a comparison, recurse on it. */
6490 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6492 tree t = and_comparisons_1 (type, innercode,
6493 gimple_assign_rhs1 (stmt),
6494 gimple_assign_rhs2 (stmt),
6495 code2,
6496 op2a,
6497 op2b);
6498 if (t)
6499 return t;
6502 /* If the definition is an AND or OR expression, we may be able to
6503 simplify by reassociating. */
6504 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6505 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6507 tree inner1 = gimple_assign_rhs1 (stmt);
6508 tree inner2 = gimple_assign_rhs2 (stmt);
6509 gimple *s;
6510 tree t;
6511 tree partial = NULL_TREE;
6512 bool is_and = (innercode == BIT_AND_EXPR);
6514 /* Check for boolean identities that don't require recursive examination
6515 of inner1/inner2:
6516 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6517 inner1 AND (inner1 OR inner2) => inner1
6518 !inner1 AND (inner1 AND inner2) => false
6519 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6520 Likewise for similar cases involving inner2. */
6521 if (inner1 == true_test_var)
6522 return (is_and ? var : inner1);
6523 else if (inner2 == true_test_var)
6524 return (is_and ? var : inner2);
6525 else if (inner1 == false_test_var)
6526 return (is_and
6527 ? boolean_false_node
6528 : and_var_with_comparison (type, inner2, false, code2, op2a,
6529 op2b));
6530 else if (inner2 == false_test_var)
6531 return (is_and
6532 ? boolean_false_node
6533 : and_var_with_comparison (type, inner1, false, code2, op2a,
6534 op2b));
6536 /* Next, redistribute/reassociate the AND across the inner tests.
6537 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6538 if (TREE_CODE (inner1) == SSA_NAME
6539 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6540 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6541 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6542 gimple_assign_rhs1 (s),
6543 gimple_assign_rhs2 (s),
6544 code2, op2a, op2b)))
6546 /* Handle the AND case, where we are reassociating:
6547 (inner1 AND inner2) AND (op2a code2 op2b)
6548 => (t AND inner2)
6549 If the partial result t is a constant, we win. Otherwise
6550 continue on to try reassociating with the other inner test. */
6551 if (is_and)
6553 if (integer_onep (t))
6554 return inner2;
6555 else if (integer_zerop (t))
6556 return boolean_false_node;
6559 /* Handle the OR case, where we are redistributing:
6560 (inner1 OR inner2) AND (op2a code2 op2b)
6561 => (t OR (inner2 AND (op2a code2 op2b))) */
6562 else if (integer_onep (t))
6563 return boolean_true_node;
6565 /* Save partial result for later. */
6566 partial = t;
6569 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6570 if (TREE_CODE (inner2) == SSA_NAME
6571 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6572 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6573 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6574 gimple_assign_rhs1 (s),
6575 gimple_assign_rhs2 (s),
6576 code2, op2a, op2b)))
6578 /* Handle the AND case, where we are reassociating:
6579 (inner1 AND inner2) AND (op2a code2 op2b)
6580 => (inner1 AND t) */
6581 if (is_and)
6583 if (integer_onep (t))
6584 return inner1;
6585 else if (integer_zerop (t))
6586 return boolean_false_node;
6587 /* If both are the same, we can apply the identity
6588 (x AND x) == x. */
6589 else if (partial && same_bool_result_p (t, partial))
6590 return t;
6593 /* Handle the OR case. where we are redistributing:
6594 (inner1 OR inner2) AND (op2a code2 op2b)
6595 => (t OR (inner1 AND (op2a code2 op2b)))
6596 => (t OR partial) */
6597 else
6599 if (integer_onep (t))
6600 return boolean_true_node;
6601 else if (partial)
6603 /* We already got a simplification for the other
6604 operand to the redistributed OR expression. The
6605 interesting case is when at least one is false.
6606 Or, if both are the same, we can apply the identity
6607 (x OR x) == x. */
6608 if (integer_zerop (partial))
6609 return t;
6610 else if (integer_zerop (t))
6611 return partial;
6612 else if (same_bool_result_p (t, partial))
6613 return t;
6618 return NULL_TREE;
6621 /* Try to simplify the AND of two comparisons defined by
6622 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6623 If this can be done without constructing an intermediate value,
6624 return the resulting tree; otherwise NULL_TREE is returned.
6625 This function is deliberately asymmetric as it recurses on SSA_DEFs
6626 in the first comparison but not the second. */
6628 static tree
6629 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6630 enum tree_code code2, tree op2a, tree op2b)
6632 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6634 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6635 if (operand_equal_p (op1a, op2a, 0)
6636 && operand_equal_p (op1b, op2b, 0))
6638 /* Result will be either NULL_TREE, or a combined comparison. */
6639 tree t = combine_comparisons (UNKNOWN_LOCATION,
6640 TRUTH_ANDIF_EXPR, code1, code2,
6641 truth_type, op1a, op1b);
6642 if (t)
6643 return t;
6646 /* Likewise the swapped case of the above. */
6647 if (operand_equal_p (op1a, op2b, 0)
6648 && operand_equal_p (op1b, op2a, 0))
6650 /* Result will be either NULL_TREE, or a combined comparison. */
6651 tree t = combine_comparisons (UNKNOWN_LOCATION,
6652 TRUTH_ANDIF_EXPR, code1,
6653 swap_tree_comparison (code2),
6654 truth_type, op1a, op1b);
6655 if (t)
6656 return t;
6659 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6660 NAME's definition is a truth value. See if there are any simplifications
6661 that can be done against the NAME's definition. */
6662 if (TREE_CODE (op1a) == SSA_NAME
6663 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6664 && (integer_zerop (op1b) || integer_onep (op1b)))
6666 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6667 || (code1 == NE_EXPR && integer_onep (op1b)));
6668 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6669 switch (gimple_code (stmt))
6671 case GIMPLE_ASSIGN:
6672 /* Try to simplify by copy-propagating the definition. */
6673 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6674 op2b);
6676 case GIMPLE_PHI:
6677 /* If every argument to the PHI produces the same result when
6678 ANDed with the second comparison, we win.
6679 Do not do this unless the type is bool since we need a bool
6680 result here anyway. */
6681 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6683 tree result = NULL_TREE;
6684 unsigned i;
6685 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6687 tree arg = gimple_phi_arg_def (stmt, i);
6689 /* If this PHI has itself as an argument, ignore it.
6690 If all the other args produce the same result,
6691 we're still OK. */
6692 if (arg == gimple_phi_result (stmt))
6693 continue;
6694 else if (TREE_CODE (arg) == INTEGER_CST)
6696 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6698 if (!result)
6699 result = boolean_false_node;
6700 else if (!integer_zerop (result))
6701 return NULL_TREE;
6703 else if (!result)
6704 result = fold_build2 (code2, boolean_type_node,
6705 op2a, op2b);
6706 else if (!same_bool_comparison_p (result,
6707 code2, op2a, op2b))
6708 return NULL_TREE;
6710 else if (TREE_CODE (arg) == SSA_NAME
6711 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6713 tree temp;
6714 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6715 /* In simple cases we can look through PHI nodes,
6716 but we have to be careful with loops.
6717 See PR49073. */
6718 if (! dom_info_available_p (CDI_DOMINATORS)
6719 || gimple_bb (def_stmt) == gimple_bb (stmt)
6720 || dominated_by_p (CDI_DOMINATORS,
6721 gimple_bb (def_stmt),
6722 gimple_bb (stmt)))
6723 return NULL_TREE;
6724 temp = and_var_with_comparison (type, arg, invert, code2,
6725 op2a, op2b);
6726 if (!temp)
6727 return NULL_TREE;
6728 else if (!result)
6729 result = temp;
6730 else if (!same_bool_result_p (result, temp))
6731 return NULL_TREE;
6733 else
6734 return NULL_TREE;
6736 return result;
6739 default:
6740 break;
6743 return NULL_TREE;
6746 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6747 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6748 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6749 simplify this to a single expression. As we are going to lower the cost
6750 of building SSA names / gimple stmts significantly, we need to allocate
6751 them ont the stack. This will cause the code to be a bit ugly. */
6753 static tree
6754 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6755 enum tree_code code1,
6756 tree op1a, tree op1b,
6757 enum tree_code code2, tree op2a,
6758 tree op2b)
6760 /* Allocate gimple stmt1 on the stack. */
6761 gassign *stmt1
6762 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6763 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6764 gimple_assign_set_rhs_code (stmt1, code1);
6765 gimple_assign_set_rhs1 (stmt1, op1a);
6766 gimple_assign_set_rhs2 (stmt1, op1b);
6768 /* Allocate gimple stmt2 on the stack. */
6769 gassign *stmt2
6770 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6771 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6772 gimple_assign_set_rhs_code (stmt2, code2);
6773 gimple_assign_set_rhs1 (stmt2, op2a);
6774 gimple_assign_set_rhs2 (stmt2, op2b);
6776 /* Allocate SSA names(lhs1) on the stack. */
6777 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6778 memset (lhs1, 0, sizeof (tree_ssa_name));
6779 TREE_SET_CODE (lhs1, SSA_NAME);
6780 TREE_TYPE (lhs1) = type;
6781 init_ssa_name_imm_use (lhs1);
6783 /* Allocate SSA names(lhs2) on the stack. */
6784 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6785 memset (lhs2, 0, sizeof (tree_ssa_name));
6786 TREE_SET_CODE (lhs2, SSA_NAME);
6787 TREE_TYPE (lhs2) = type;
6788 init_ssa_name_imm_use (lhs2);
6790 gimple_assign_set_lhs (stmt1, lhs1);
6791 gimple_assign_set_lhs (stmt2, lhs2);
6793 gimple_match_op op (gimple_match_cond::UNCOND, code,
6794 type, gimple_assign_lhs (stmt1),
6795 gimple_assign_lhs (stmt2));
6796 if (op.resimplify (NULL, follow_all_ssa_edges))
6798 if (gimple_simplified_result_is_gimple_val (&op))
6800 tree res = op.ops[0];
6801 if (res == lhs1)
6802 return build2 (code1, type, op1a, op1b);
6803 else if (res == lhs2)
6804 return build2 (code2, type, op2a, op2b);
6805 else
6806 return res;
6808 else if (op.code.is_tree_code ()
6809 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6811 tree op0 = op.ops[0];
6812 tree op1 = op.ops[1];
6813 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6814 return NULL_TREE; /* not simple */
6816 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6820 return NULL_TREE;
6823 /* Try to simplify the AND of two comparisons, specified by
6824 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6825 If this can be simplified to a single expression (without requiring
6826 introducing more SSA variables to hold intermediate values),
6827 return the resulting tree. Otherwise return NULL_TREE.
6828 If the result expression is non-null, it has boolean type. */
6830 tree
6831 maybe_fold_and_comparisons (tree type,
6832 enum tree_code code1, tree op1a, tree op1b,
6833 enum tree_code code2, tree op2a, tree op2b)
6835 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
6836 return t;
6838 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
6839 return t;
6841 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
6842 op1a, op1b, code2, op2a,
6843 op2b))
6844 return t;
6846 return NULL_TREE;
6849 /* Helper function for or_comparisons_1: try to simplify the OR of the
6850 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6851 If INVERT is true, invert the value of VAR before doing the OR.
6852 Return NULL_EXPR if we can't simplify this to a single expression. */
6854 static tree
6855 or_var_with_comparison (tree type, tree var, bool invert,
6856 enum tree_code code2, tree op2a, tree op2b)
6858 tree t;
6859 gimple *stmt = SSA_NAME_DEF_STMT (var);
6861 /* We can only deal with variables whose definitions are assignments. */
6862 if (!is_gimple_assign (stmt))
6863 return NULL_TREE;
6865 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6866 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6867 Then we only have to consider the simpler non-inverted cases. */
6868 if (invert)
6869 t = and_var_with_comparison_1 (type, stmt,
6870 invert_tree_comparison (code2, false),
6871 op2a, op2b);
6872 else
6873 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b);
6874 return canonicalize_bool (t, invert);
6877 /* Try to simplify the OR of the ssa variable defined by the assignment
6878 STMT with the comparison specified by (OP2A CODE2 OP2B).
6879 Return NULL_EXPR if we can't simplify this to a single expression. */
6881 static tree
6882 or_var_with_comparison_1 (tree type, gimple *stmt,
6883 enum tree_code code2, tree op2a, tree op2b)
6885 tree var = gimple_assign_lhs (stmt);
6886 tree true_test_var = NULL_TREE;
6887 tree false_test_var = NULL_TREE;
6888 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6890 /* Check for identities like (var OR (var != 0)) => true . */
6891 if (TREE_CODE (op2a) == SSA_NAME
6892 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6894 if ((code2 == NE_EXPR && integer_zerop (op2b))
6895 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6897 true_test_var = op2a;
6898 if (var == true_test_var)
6899 return var;
6901 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6902 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6904 false_test_var = op2a;
6905 if (var == false_test_var)
6906 return boolean_true_node;
6910 /* If the definition is a comparison, recurse on it. */
6911 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6913 tree t = or_comparisons_1 (type, innercode,
6914 gimple_assign_rhs1 (stmt),
6915 gimple_assign_rhs2 (stmt),
6916 code2,
6917 op2a,
6918 op2b);
6919 if (t)
6920 return t;
6923 /* If the definition is an AND or OR expression, we may be able to
6924 simplify by reassociating. */
6925 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6926 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6928 tree inner1 = gimple_assign_rhs1 (stmt);
6929 tree inner2 = gimple_assign_rhs2 (stmt);
6930 gimple *s;
6931 tree t;
6932 tree partial = NULL_TREE;
6933 bool is_or = (innercode == BIT_IOR_EXPR);
6935 /* Check for boolean identities that don't require recursive examination
6936 of inner1/inner2:
6937 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6938 inner1 OR (inner1 AND inner2) => inner1
6939 !inner1 OR (inner1 OR inner2) => true
6940 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6942 if (inner1 == true_test_var)
6943 return (is_or ? var : inner1);
6944 else if (inner2 == true_test_var)
6945 return (is_or ? var : inner2);
6946 else if (inner1 == false_test_var)
6947 return (is_or
6948 ? boolean_true_node
6949 : or_var_with_comparison (type, inner2, false, code2, op2a,
6950 op2b));
6951 else if (inner2 == false_test_var)
6952 return (is_or
6953 ? boolean_true_node
6954 : or_var_with_comparison (type, inner1, false, code2, op2a,
6955 op2b));
6957 /* Next, redistribute/reassociate the OR across the inner tests.
6958 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6959 if (TREE_CODE (inner1) == SSA_NAME
6960 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6961 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6962 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6963 gimple_assign_rhs1 (s),
6964 gimple_assign_rhs2 (s),
6965 code2, op2a, op2b)))
6967 /* Handle the OR case, where we are reassociating:
6968 (inner1 OR inner2) OR (op2a code2 op2b)
6969 => (t OR inner2)
6970 If the partial result t is a constant, we win. Otherwise
6971 continue on to try reassociating with the other inner test. */
6972 if (is_or)
6974 if (integer_onep (t))
6975 return boolean_true_node;
6976 else if (integer_zerop (t))
6977 return inner2;
6980 /* Handle the AND case, where we are redistributing:
6981 (inner1 AND inner2) OR (op2a code2 op2b)
6982 => (t AND (inner2 OR (op2a code op2b))) */
6983 else if (integer_zerop (t))
6984 return boolean_false_node;
6986 /* Save partial result for later. */
6987 partial = t;
6990 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6991 if (TREE_CODE (inner2) == SSA_NAME
6992 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6993 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6994 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
6995 gimple_assign_rhs1 (s),
6996 gimple_assign_rhs2 (s),
6997 code2, op2a, op2b)))
6999 /* Handle the OR case, where we are reassociating:
7000 (inner1 OR inner2) OR (op2a code2 op2b)
7001 => (inner1 OR t)
7002 => (t OR partial) */
7003 if (is_or)
7005 if (integer_zerop (t))
7006 return inner1;
7007 else if (integer_onep (t))
7008 return boolean_true_node;
7009 /* If both are the same, we can apply the identity
7010 (x OR x) == x. */
7011 else if (partial && same_bool_result_p (t, partial))
7012 return t;
7015 /* Handle the AND case, where we are redistributing:
7016 (inner1 AND inner2) OR (op2a code2 op2b)
7017 => (t AND (inner1 OR (op2a code2 op2b)))
7018 => (t AND partial) */
7019 else
7021 if (integer_zerop (t))
7022 return boolean_false_node;
7023 else if (partial)
7025 /* We already got a simplification for the other
7026 operand to the redistributed AND expression. The
7027 interesting case is when at least one is true.
7028 Or, if both are the same, we can apply the identity
7029 (x AND x) == x. */
7030 if (integer_onep (partial))
7031 return t;
7032 else if (integer_onep (t))
7033 return partial;
7034 else if (same_bool_result_p (t, partial))
7035 return t;
7040 return NULL_TREE;
7043 /* Try to simplify the OR of two comparisons defined by
7044 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7045 If this can be done without constructing an intermediate value,
7046 return the resulting tree; otherwise NULL_TREE is returned.
7047 This function is deliberately asymmetric as it recurses on SSA_DEFs
7048 in the first comparison but not the second. */
7050 static tree
7051 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7052 enum tree_code code2, tree op2a, tree op2b)
7054 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7056 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7057 if (operand_equal_p (op1a, op2a, 0)
7058 && operand_equal_p (op1b, op2b, 0))
7060 /* Result will be either NULL_TREE, or a combined comparison. */
7061 tree t = combine_comparisons (UNKNOWN_LOCATION,
7062 TRUTH_ORIF_EXPR, code1, code2,
7063 truth_type, op1a, op1b);
7064 if (t)
7065 return t;
7068 /* Likewise the swapped case of the above. */
7069 if (operand_equal_p (op1a, op2b, 0)
7070 && operand_equal_p (op1b, op2a, 0))
7072 /* Result will be either NULL_TREE, or a combined comparison. */
7073 tree t = combine_comparisons (UNKNOWN_LOCATION,
7074 TRUTH_ORIF_EXPR, code1,
7075 swap_tree_comparison (code2),
7076 truth_type, op1a, op1b);
7077 if (t)
7078 return t;
7081 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7082 NAME's definition is a truth value. See if there are any simplifications
7083 that can be done against the NAME's definition. */
7084 if (TREE_CODE (op1a) == SSA_NAME
7085 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7086 && (integer_zerop (op1b) || integer_onep (op1b)))
7088 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7089 || (code1 == NE_EXPR && integer_onep (op1b)));
7090 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7091 switch (gimple_code (stmt))
7093 case GIMPLE_ASSIGN:
7094 /* Try to simplify by copy-propagating the definition. */
7095 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7096 op2b);
7098 case GIMPLE_PHI:
7099 /* If every argument to the PHI produces the same result when
7100 ORed with the second comparison, we win.
7101 Do not do this unless the type is bool since we need a bool
7102 result here anyway. */
7103 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7105 tree result = NULL_TREE;
7106 unsigned i;
7107 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7109 tree arg = gimple_phi_arg_def (stmt, i);
7111 /* If this PHI has itself as an argument, ignore it.
7112 If all the other args produce the same result,
7113 we're still OK. */
7114 if (arg == gimple_phi_result (stmt))
7115 continue;
7116 else if (TREE_CODE (arg) == INTEGER_CST)
7118 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7120 if (!result)
7121 result = boolean_true_node;
7122 else if (!integer_onep (result))
7123 return NULL_TREE;
7125 else if (!result)
7126 result = fold_build2 (code2, boolean_type_node,
7127 op2a, op2b);
7128 else if (!same_bool_comparison_p (result,
7129 code2, op2a, op2b))
7130 return NULL_TREE;
7132 else if (TREE_CODE (arg) == SSA_NAME
7133 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7135 tree temp;
7136 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7137 /* In simple cases we can look through PHI nodes,
7138 but we have to be careful with loops.
7139 See PR49073. */
7140 if (! dom_info_available_p (CDI_DOMINATORS)
7141 || gimple_bb (def_stmt) == gimple_bb (stmt)
7142 || dominated_by_p (CDI_DOMINATORS,
7143 gimple_bb (def_stmt),
7144 gimple_bb (stmt)))
7145 return NULL_TREE;
7146 temp = or_var_with_comparison (type, arg, invert, code2,
7147 op2a, op2b);
7148 if (!temp)
7149 return NULL_TREE;
7150 else if (!result)
7151 result = temp;
7152 else if (!same_bool_result_p (result, temp))
7153 return NULL_TREE;
7155 else
7156 return NULL_TREE;
7158 return result;
7161 default:
7162 break;
7165 return NULL_TREE;
7168 /* Try to simplify the OR of two comparisons, specified by
7169 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7170 If this can be simplified to a single expression (without requiring
7171 introducing more SSA variables to hold intermediate values),
7172 return the resulting tree. Otherwise return NULL_TREE.
7173 If the result expression is non-null, it has boolean type. */
7175 tree
7176 maybe_fold_or_comparisons (tree type,
7177 enum tree_code code1, tree op1a, tree op1b,
7178 enum tree_code code2, tree op2a, tree op2b)
7180 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b))
7181 return t;
7183 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b))
7184 return t;
7186 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7187 op1a, op1b, code2, op2a,
7188 op2b))
7189 return t;
7191 return NULL_TREE;
7194 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7196 Either NULL_TREE, a simplified but non-constant or a constant
7197 is returned.
7199 ??? This should go into a gimple-fold-inline.h file to be eventually
7200 privatized with the single valueize function used in the various TUs
7201 to avoid the indirect function call overhead. */
7203 tree
7204 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7205 tree (*gvalueize) (tree))
7207 gimple_match_op res_op;
7208 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7209 edges if there are intermediate VARYING defs. For this reason
7210 do not follow SSA edges here even though SCCVN can technically
7211 just deal fine with that. */
7212 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7214 tree res = NULL_TREE;
7215 if (gimple_simplified_result_is_gimple_val (&res_op))
7216 res = res_op.ops[0];
7217 else if (mprts_hook)
7218 res = mprts_hook (&res_op);
7219 if (res)
7221 if (dump_file && dump_flags & TDF_DETAILS)
7223 fprintf (dump_file, "Match-and-simplified ");
7224 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7225 fprintf (dump_file, " to ");
7226 print_generic_expr (dump_file, res);
7227 fprintf (dump_file, "\n");
7229 return res;
7233 location_t loc = gimple_location (stmt);
7234 switch (gimple_code (stmt))
7236 case GIMPLE_ASSIGN:
7238 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7240 switch (get_gimple_rhs_class (subcode))
7242 case GIMPLE_SINGLE_RHS:
7244 tree rhs = gimple_assign_rhs1 (stmt);
7245 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7247 if (TREE_CODE (rhs) == SSA_NAME)
7249 /* If the RHS is an SSA_NAME, return its known constant value,
7250 if any. */
7251 return (*valueize) (rhs);
7253 /* Handle propagating invariant addresses into address
7254 operations. */
7255 else if (TREE_CODE (rhs) == ADDR_EXPR
7256 && !is_gimple_min_invariant (rhs))
7258 poly_int64 offset = 0;
7259 tree base;
7260 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7261 &offset,
7262 valueize);
7263 if (base
7264 && (CONSTANT_CLASS_P (base)
7265 || decl_address_invariant_p (base)))
7266 return build_invariant_address (TREE_TYPE (rhs),
7267 base, offset);
7269 else if (TREE_CODE (rhs) == CONSTRUCTOR
7270 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7271 && known_eq (CONSTRUCTOR_NELTS (rhs),
7272 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7274 unsigned i, nelts;
7275 tree val;
7277 nelts = CONSTRUCTOR_NELTS (rhs);
7278 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7279 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7281 val = (*valueize) (val);
7282 if (TREE_CODE (val) == INTEGER_CST
7283 || TREE_CODE (val) == REAL_CST
7284 || TREE_CODE (val) == FIXED_CST)
7285 vec.quick_push (val);
7286 else
7287 return NULL_TREE;
7290 return vec.build ();
7292 if (subcode == OBJ_TYPE_REF)
7294 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7295 /* If callee is constant, we can fold away the wrapper. */
7296 if (is_gimple_min_invariant (val))
7297 return val;
7300 if (kind == tcc_reference)
7302 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7303 || TREE_CODE (rhs) == REALPART_EXPR
7304 || TREE_CODE (rhs) == IMAGPART_EXPR)
7305 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7307 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7308 return fold_unary_loc (EXPR_LOCATION (rhs),
7309 TREE_CODE (rhs),
7310 TREE_TYPE (rhs), val);
7312 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7313 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7315 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7316 return fold_ternary_loc (EXPR_LOCATION (rhs),
7317 TREE_CODE (rhs),
7318 TREE_TYPE (rhs), val,
7319 TREE_OPERAND (rhs, 1),
7320 TREE_OPERAND (rhs, 2));
7322 else if (TREE_CODE (rhs) == MEM_REF
7323 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7325 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7326 if (TREE_CODE (val) == ADDR_EXPR
7327 && is_gimple_min_invariant (val))
7329 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7330 unshare_expr (val),
7331 TREE_OPERAND (rhs, 1));
7332 if (tem)
7333 rhs = tem;
7336 return fold_const_aggregate_ref_1 (rhs, valueize);
7338 else if (kind == tcc_declaration)
7339 return get_symbol_constant_value (rhs);
7340 return rhs;
7343 case GIMPLE_UNARY_RHS:
7344 return NULL_TREE;
7346 case GIMPLE_BINARY_RHS:
7347 /* Translate &x + CST into an invariant form suitable for
7348 further propagation. */
7349 if (subcode == POINTER_PLUS_EXPR)
7351 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7352 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7353 if (TREE_CODE (op0) == ADDR_EXPR
7354 && TREE_CODE (op1) == INTEGER_CST)
7356 tree off = fold_convert (ptr_type_node, op1);
7357 return build1_loc
7358 (loc, ADDR_EXPR, TREE_TYPE (op0),
7359 fold_build2 (MEM_REF,
7360 TREE_TYPE (TREE_TYPE (op0)),
7361 unshare_expr (op0), off));
7364 /* Canonicalize bool != 0 and bool == 0 appearing after
7365 valueization. While gimple_simplify handles this
7366 it can get confused by the ~X == 1 -> X == 0 transform
7367 which we cant reduce to a SSA name or a constant
7368 (and we have no way to tell gimple_simplify to not
7369 consider those transforms in the first place). */
7370 else if (subcode == EQ_EXPR
7371 || subcode == NE_EXPR)
7373 tree lhs = gimple_assign_lhs (stmt);
7374 tree op0 = gimple_assign_rhs1 (stmt);
7375 if (useless_type_conversion_p (TREE_TYPE (lhs),
7376 TREE_TYPE (op0)))
7378 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7379 op0 = (*valueize) (op0);
7380 if (TREE_CODE (op0) == INTEGER_CST)
7381 std::swap (op0, op1);
7382 if (TREE_CODE (op1) == INTEGER_CST
7383 && ((subcode == NE_EXPR && integer_zerop (op1))
7384 || (subcode == EQ_EXPR && integer_onep (op1))))
7385 return op0;
7388 return NULL_TREE;
7390 case GIMPLE_TERNARY_RHS:
7392 /* Handle ternary operators that can appear in GIMPLE form. */
7393 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7394 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7395 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7396 return fold_ternary_loc (loc, subcode,
7397 gimple_expr_type (stmt), op0, op1, op2);
7400 default:
7401 gcc_unreachable ();
7405 case GIMPLE_CALL:
7407 tree fn;
7408 gcall *call_stmt = as_a <gcall *> (stmt);
7410 if (gimple_call_internal_p (stmt))
7412 enum tree_code subcode = ERROR_MARK;
7413 switch (gimple_call_internal_fn (stmt))
7415 case IFN_UBSAN_CHECK_ADD:
7416 subcode = PLUS_EXPR;
7417 break;
7418 case IFN_UBSAN_CHECK_SUB:
7419 subcode = MINUS_EXPR;
7420 break;
7421 case IFN_UBSAN_CHECK_MUL:
7422 subcode = MULT_EXPR;
7423 break;
7424 case IFN_BUILTIN_EXPECT:
7426 tree arg0 = gimple_call_arg (stmt, 0);
7427 tree op0 = (*valueize) (arg0);
7428 if (TREE_CODE (op0) == INTEGER_CST)
7429 return op0;
7430 return NULL_TREE;
7432 default:
7433 return NULL_TREE;
7435 tree arg0 = gimple_call_arg (stmt, 0);
7436 tree arg1 = gimple_call_arg (stmt, 1);
7437 tree op0 = (*valueize) (arg0);
7438 tree op1 = (*valueize) (arg1);
7440 if (TREE_CODE (op0) != INTEGER_CST
7441 || TREE_CODE (op1) != INTEGER_CST)
7443 switch (subcode)
7445 case MULT_EXPR:
7446 /* x * 0 = 0 * x = 0 without overflow. */
7447 if (integer_zerop (op0) || integer_zerop (op1))
7448 return build_zero_cst (TREE_TYPE (arg0));
7449 break;
7450 case MINUS_EXPR:
7451 /* y - y = 0 without overflow. */
7452 if (operand_equal_p (op0, op1, 0))
7453 return build_zero_cst (TREE_TYPE (arg0));
7454 break;
7455 default:
7456 break;
7459 tree res
7460 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7461 if (res
7462 && TREE_CODE (res) == INTEGER_CST
7463 && !TREE_OVERFLOW (res))
7464 return res;
7465 return NULL_TREE;
7468 fn = (*valueize) (gimple_call_fn (stmt));
7469 if (TREE_CODE (fn) == ADDR_EXPR
7470 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7471 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7472 && gimple_builtin_call_types_compatible_p (stmt,
7473 TREE_OPERAND (fn, 0)))
7475 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7476 tree retval;
7477 unsigned i;
7478 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7479 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7480 retval = fold_builtin_call_array (loc,
7481 gimple_call_return_type (call_stmt),
7482 fn, gimple_call_num_args (stmt), args);
7483 if (retval)
7485 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7486 STRIP_NOPS (retval);
7487 retval = fold_convert (gimple_call_return_type (call_stmt),
7488 retval);
7490 return retval;
7492 return NULL_TREE;
7495 default:
7496 return NULL_TREE;
7500 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7501 Returns NULL_TREE if folding to a constant is not possible, otherwise
7502 returns a constant according to is_gimple_min_invariant. */
7504 tree
7505 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7507 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7508 if (res && is_gimple_min_invariant (res))
7509 return res;
7510 return NULL_TREE;
7514 /* The following set of functions are supposed to fold references using
7515 their constant initializers. */
7517 /* See if we can find constructor defining value of BASE.
7518 When we know the consructor with constant offset (such as
7519 base is array[40] and we do know constructor of array), then
7520 BIT_OFFSET is adjusted accordingly.
7522 As a special case, return error_mark_node when constructor
7523 is not explicitly available, but it is known to be zero
7524 such as 'static const int a;'. */
7525 static tree
7526 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7527 tree (*valueize)(tree))
7529 poly_int64 bit_offset2, size, max_size;
7530 bool reverse;
7532 if (TREE_CODE (base) == MEM_REF)
7534 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7535 if (!boff.to_shwi (bit_offset))
7536 return NULL_TREE;
7538 if (valueize
7539 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7540 base = valueize (TREE_OPERAND (base, 0));
7541 if (!base || TREE_CODE (base) != ADDR_EXPR)
7542 return NULL_TREE;
7543 base = TREE_OPERAND (base, 0);
7545 else if (valueize
7546 && TREE_CODE (base) == SSA_NAME)
7547 base = valueize (base);
7549 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7550 DECL_INITIAL. If BASE is a nested reference into another
7551 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7552 the inner reference. */
7553 switch (TREE_CODE (base))
7555 case VAR_DECL:
7556 case CONST_DECL:
7558 tree init = ctor_for_folding (base);
7560 /* Our semantic is exact opposite of ctor_for_folding;
7561 NULL means unknown, while error_mark_node is 0. */
7562 if (init == error_mark_node)
7563 return NULL_TREE;
7564 if (!init)
7565 return error_mark_node;
7566 return init;
7569 case VIEW_CONVERT_EXPR:
7570 return get_base_constructor (TREE_OPERAND (base, 0),
7571 bit_offset, valueize);
7573 case ARRAY_REF:
7574 case COMPONENT_REF:
7575 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7576 &reverse);
7577 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7578 return NULL_TREE;
7579 *bit_offset += bit_offset2;
7580 return get_base_constructor (base, bit_offset, valueize);
7582 case CONSTRUCTOR:
7583 return base;
7585 default:
7586 if (CONSTANT_CLASS_P (base))
7587 return base;
7589 return NULL_TREE;
7593 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7594 to the memory at bit OFFSET. When non-null, TYPE is the expected
7595 type of the reference; otherwise the type of the referenced element
7596 is used instead. When SIZE is zero, attempt to fold a reference to
7597 the entire element which OFFSET refers to. Increment *SUBOFF by
7598 the bit offset of the accessed element. */
7600 static tree
7601 fold_array_ctor_reference (tree type, tree ctor,
7602 unsigned HOST_WIDE_INT offset,
7603 unsigned HOST_WIDE_INT size,
7604 tree from_decl,
7605 unsigned HOST_WIDE_INT *suboff)
7607 offset_int low_bound;
7608 offset_int elt_size;
7609 offset_int access_index;
7610 tree domain_type = NULL_TREE;
7611 HOST_WIDE_INT inner_offset;
7613 /* Compute low bound and elt size. */
7614 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7615 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7616 if (domain_type && TYPE_MIN_VALUE (domain_type))
7618 /* Static constructors for variably sized objects make no sense. */
7619 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7620 return NULL_TREE;
7621 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7623 else
7624 low_bound = 0;
7625 /* Static constructors for variably sized objects make no sense. */
7626 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7627 return NULL_TREE;
7628 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7630 /* When TYPE is non-null, verify that it specifies a constant-sized
7631 access of a multiple of the array element size. Avoid division
7632 by zero below when ELT_SIZE is zero, such as with the result of
7633 an initializer for a zero-length array or an empty struct. */
7634 if (elt_size == 0
7635 || (type
7636 && (!TYPE_SIZE_UNIT (type)
7637 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7638 return NULL_TREE;
7640 /* Compute the array index we look for. */
7641 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7642 elt_size);
7643 access_index += low_bound;
7645 /* And offset within the access. */
7646 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7648 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7649 if (size > elt_sz * BITS_PER_UNIT)
7651 /* native_encode_expr constraints. */
7652 if (size > MAX_BITSIZE_MODE_ANY_MODE
7653 || size % BITS_PER_UNIT != 0
7654 || inner_offset % BITS_PER_UNIT != 0
7655 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7656 return NULL_TREE;
7658 unsigned ctor_idx;
7659 tree val = get_array_ctor_element_at_index (ctor, access_index,
7660 &ctor_idx);
7661 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7662 return build_zero_cst (type);
7664 /* native-encode adjacent ctor elements. */
7665 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7666 unsigned bufoff = 0;
7667 offset_int index = 0;
7668 offset_int max_index = access_index;
7669 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7670 if (!val)
7671 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7672 else if (!CONSTANT_CLASS_P (val))
7673 return NULL_TREE;
7674 if (!elt->index)
7676 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7678 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7679 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7681 else
7682 index = max_index = wi::to_offset (elt->index);
7683 index = wi::umax (index, access_index);
7686 if (bufoff + elt_sz > sizeof (buf))
7687 elt_sz = sizeof (buf) - bufoff;
7688 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7689 inner_offset / BITS_PER_UNIT);
7690 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7691 return NULL_TREE;
7692 inner_offset = 0;
7693 bufoff += len;
7695 access_index += 1;
7696 if (wi::cmpu (access_index, index) == 0)
7697 val = elt->value;
7698 else if (wi::cmpu (access_index, max_index) > 0)
7700 ctor_idx++;
7701 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7703 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7704 ++max_index;
7706 else
7708 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7709 index = 0;
7710 max_index = access_index;
7711 if (!elt->index)
7713 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7715 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7716 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7718 else
7719 index = max_index = wi::to_offset (elt->index);
7720 index = wi::umax (index, access_index);
7721 if (wi::cmpu (access_index, index) == 0)
7722 val = elt->value;
7723 else
7724 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7728 while (bufoff < size / BITS_PER_UNIT);
7729 *suboff += size;
7730 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7733 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7735 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7737 /* For the final reference to the entire accessed element
7738 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7739 may be null) in favor of the type of the element, and set
7740 SIZE to the size of the accessed element. */
7741 inner_offset = 0;
7742 type = TREE_TYPE (val);
7743 size = elt_sz * BITS_PER_UNIT;
7745 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7746 && TREE_CODE (val) == CONSTRUCTOR
7747 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7748 /* If this isn't the last element in the CTOR and a CTOR itself
7749 and it does not cover the whole object we are requesting give up
7750 since we're not set up for combining from multiple CTORs. */
7751 return NULL_TREE;
7753 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7754 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7755 suboff);
7758 /* Memory not explicitly mentioned in constructor is 0 (or
7759 the reference is out of range). */
7760 return type ? build_zero_cst (type) : NULL_TREE;
7763 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7764 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7765 is the expected type of the reference; otherwise the type of
7766 the referenced member is used instead. When SIZE is zero,
7767 attempt to fold a reference to the entire member which OFFSET
7768 refers to; in this case. Increment *SUBOFF by the bit offset
7769 of the accessed member. */
7771 static tree
7772 fold_nonarray_ctor_reference (tree type, tree ctor,
7773 unsigned HOST_WIDE_INT offset,
7774 unsigned HOST_WIDE_INT size,
7775 tree from_decl,
7776 unsigned HOST_WIDE_INT *suboff)
7778 unsigned HOST_WIDE_INT cnt;
7779 tree cfield, cval;
7781 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7782 cval)
7784 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7785 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7786 tree field_size = DECL_SIZE (cfield);
7788 if (!field_size)
7790 /* Determine the size of the flexible array member from
7791 the size of the initializer provided for it. */
7792 field_size = TYPE_SIZE (TREE_TYPE (cval));
7795 /* Variable sized objects in static constructors makes no sense,
7796 but field_size can be NULL for flexible array members. */
7797 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7798 && TREE_CODE (byte_offset) == INTEGER_CST
7799 && (field_size != NULL_TREE
7800 ? TREE_CODE (field_size) == INTEGER_CST
7801 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7803 /* Compute bit offset of the field. */
7804 offset_int bitoffset
7805 = (wi::to_offset (field_offset)
7806 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7807 /* Compute bit offset where the field ends. */
7808 offset_int bitoffset_end;
7809 if (field_size != NULL_TREE)
7810 bitoffset_end = bitoffset + wi::to_offset (field_size);
7811 else
7812 bitoffset_end = 0;
7814 /* Compute the bit offset of the end of the desired access.
7815 As a special case, if the size of the desired access is
7816 zero, assume the access is to the entire field (and let
7817 the caller make any necessary adjustments by storing
7818 the actual bounds of the field in FIELDBOUNDS). */
7819 offset_int access_end = offset_int (offset);
7820 if (size)
7821 access_end += size;
7822 else
7823 access_end = bitoffset_end;
7825 /* Is there any overlap between the desired access at
7826 [OFFSET, OFFSET+SIZE) and the offset of the field within
7827 the object at [BITOFFSET, BITOFFSET_END)? */
7828 if (wi::cmps (access_end, bitoffset) > 0
7829 && (field_size == NULL_TREE
7830 || wi::lts_p (offset, bitoffset_end)))
7832 *suboff += bitoffset.to_uhwi ();
7834 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
7836 /* For the final reference to the entire accessed member
7837 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7838 be null) in favor of the type of the member, and set
7839 SIZE to the size of the accessed member. */
7840 offset = bitoffset.to_uhwi ();
7841 type = TREE_TYPE (cval);
7842 size = (bitoffset_end - bitoffset).to_uhwi ();
7845 /* We do have overlap. Now see if the field is large enough
7846 to cover the access. Give up for accesses that extend
7847 beyond the end of the object or that span multiple fields. */
7848 if (wi::cmps (access_end, bitoffset_end) > 0)
7849 return NULL_TREE;
7850 if (offset < bitoffset)
7851 return NULL_TREE;
7853 offset_int inner_offset = offset_int (offset) - bitoffset;
7854 return fold_ctor_reference (type, cval,
7855 inner_offset.to_uhwi (), size,
7856 from_decl, suboff);
7860 if (!type)
7861 return NULL_TREE;
7863 return build_zero_cst (type);
7866 /* CTOR is value initializing memory. Fold a reference of TYPE and
7867 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7868 is zero, attempt to fold a reference to the entire subobject
7869 which OFFSET refers to. This is used when folding accesses to
7870 string members of aggregates. When non-null, set *SUBOFF to
7871 the bit offset of the accessed subobject. */
7873 tree
7874 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
7875 const poly_uint64 &poly_size, tree from_decl,
7876 unsigned HOST_WIDE_INT *suboff /* = NULL */)
7878 tree ret;
7880 /* We found the field with exact match. */
7881 if (type
7882 && useless_type_conversion_p (type, TREE_TYPE (ctor))
7883 && known_eq (poly_offset, 0U))
7884 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7886 /* The remaining optimizations need a constant size and offset. */
7887 unsigned HOST_WIDE_INT size, offset;
7888 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
7889 return NULL_TREE;
7891 /* We are at the end of walk, see if we can view convert the
7892 result. */
7893 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
7894 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7895 && !compare_tree_int (TYPE_SIZE (type), size)
7896 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
7898 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
7899 if (ret)
7901 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
7902 if (ret)
7903 STRIP_USELESS_TYPE_CONVERSION (ret);
7905 return ret;
7907 /* For constants and byte-aligned/sized reads try to go through
7908 native_encode/interpret. */
7909 if (CONSTANT_CLASS_P (ctor)
7910 && BITS_PER_UNIT == 8
7911 && offset % BITS_PER_UNIT == 0
7912 && offset / BITS_PER_UNIT <= INT_MAX
7913 && size % BITS_PER_UNIT == 0
7914 && size <= MAX_BITSIZE_MODE_ANY_MODE
7915 && can_native_interpret_type_p (type))
7917 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7918 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
7919 offset / BITS_PER_UNIT);
7920 if (len > 0)
7921 return native_interpret_expr (type, buf, len);
7923 if (TREE_CODE (ctor) == CONSTRUCTOR)
7925 unsigned HOST_WIDE_INT dummy = 0;
7926 if (!suboff)
7927 suboff = &dummy;
7929 tree ret;
7930 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
7931 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
7932 ret = fold_array_ctor_reference (type, ctor, offset, size,
7933 from_decl, suboff);
7934 else
7935 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
7936 from_decl, suboff);
7938 /* Fall back to native_encode_initializer. Needs to be done
7939 only in the outermost fold_ctor_reference call (because it itself
7940 recurses into CONSTRUCTORs) and doesn't update suboff. */
7941 if (ret == NULL_TREE
7942 && suboff == &dummy
7943 && BITS_PER_UNIT == 8
7944 && offset % BITS_PER_UNIT == 0
7945 && offset / BITS_PER_UNIT <= INT_MAX
7946 && size % BITS_PER_UNIT == 0
7947 && size <= MAX_BITSIZE_MODE_ANY_MODE
7948 && can_native_interpret_type_p (type))
7950 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7951 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
7952 offset / BITS_PER_UNIT);
7953 if (len > 0)
7954 return native_interpret_expr (type, buf, len);
7957 return ret;
7960 return NULL_TREE;
7963 /* Return the tree representing the element referenced by T if T is an
7964 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7965 names using VALUEIZE. Return NULL_TREE otherwise. */
7967 tree
7968 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
7970 tree ctor, idx, base;
7971 poly_int64 offset, size, max_size;
7972 tree tem;
7973 bool reverse;
7975 if (TREE_THIS_VOLATILE (t))
7976 return NULL_TREE;
7978 if (DECL_P (t))
7979 return get_symbol_constant_value (t);
7981 tem = fold_read_from_constant_string (t);
7982 if (tem)
7983 return tem;
7985 switch (TREE_CODE (t))
7987 case ARRAY_REF:
7988 case ARRAY_RANGE_REF:
7989 /* Constant indexes are handled well by get_base_constructor.
7990 Only special case variable offsets.
7991 FIXME: This code can't handle nested references with variable indexes
7992 (they will be handled only by iteration of ccp). Perhaps we can bring
7993 get_ref_base_and_extent here and make it use a valueize callback. */
7994 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
7995 && valueize
7996 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
7997 && poly_int_tree_p (idx))
7999 tree low_bound, unit_size;
8001 /* If the resulting bit-offset is constant, track it. */
8002 if ((low_bound = array_ref_low_bound (t),
8003 poly_int_tree_p (low_bound))
8004 && (unit_size = array_ref_element_size (t),
8005 tree_fits_uhwi_p (unit_size)))
8007 poly_offset_int woffset
8008 = wi::sext (wi::to_poly_offset (idx)
8009 - wi::to_poly_offset (low_bound),
8010 TYPE_PRECISION (sizetype));
8011 woffset *= tree_to_uhwi (unit_size);
8012 woffset *= BITS_PER_UNIT;
8013 if (woffset.to_shwi (&offset))
8015 base = TREE_OPERAND (t, 0);
8016 ctor = get_base_constructor (base, &offset, valueize);
8017 /* Empty constructor. Always fold to 0. */
8018 if (ctor == error_mark_node)
8019 return build_zero_cst (TREE_TYPE (t));
8020 /* Out of bound array access. Value is undefined,
8021 but don't fold. */
8022 if (maybe_lt (offset, 0))
8023 return NULL_TREE;
8024 /* We cannot determine ctor. */
8025 if (!ctor)
8026 return NULL_TREE;
8027 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8028 tree_to_uhwi (unit_size)
8029 * BITS_PER_UNIT,
8030 base);
8034 /* Fallthru. */
8036 case COMPONENT_REF:
8037 case BIT_FIELD_REF:
8038 case TARGET_MEM_REF:
8039 case MEM_REF:
8040 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8041 ctor = get_base_constructor (base, &offset, valueize);
8043 /* Empty constructor. Always fold to 0. */
8044 if (ctor == error_mark_node)
8045 return build_zero_cst (TREE_TYPE (t));
8046 /* We do not know precise address. */
8047 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8048 return NULL_TREE;
8049 /* We cannot determine ctor. */
8050 if (!ctor)
8051 return NULL_TREE;
8053 /* Out of bound array access. Value is undefined, but don't fold. */
8054 if (maybe_lt (offset, 0))
8055 return NULL_TREE;
8057 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8058 if (tem)
8059 return tem;
8061 /* For bit field reads try to read the representative and
8062 adjust. */
8063 if (TREE_CODE (t) == COMPONENT_REF
8064 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8065 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8067 HOST_WIDE_INT csize, coffset;
8068 tree field = TREE_OPERAND (t, 1);
8069 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8070 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8071 && size.is_constant (&csize)
8072 && offset.is_constant (&coffset)
8073 && (coffset % BITS_PER_UNIT != 0
8074 || csize % BITS_PER_UNIT != 0)
8075 && !reverse
8076 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8078 poly_int64 bitoffset;
8079 poly_uint64 field_offset, repr_offset;
8080 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8081 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8082 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8083 else
8084 bitoffset = 0;
8085 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8086 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8087 HOST_WIDE_INT bitoff;
8088 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8089 - TYPE_PRECISION (TREE_TYPE (field)));
8090 if (bitoffset.is_constant (&bitoff)
8091 && bitoff >= 0
8092 && bitoff <= diff)
8094 offset -= bitoff;
8095 size = tree_to_uhwi (DECL_SIZE (repr));
8097 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8098 size, base);
8099 if (tem && TREE_CODE (tem) == INTEGER_CST)
8101 if (!BYTES_BIG_ENDIAN)
8102 tem = wide_int_to_tree (TREE_TYPE (field),
8103 wi::lrshift (wi::to_wide (tem),
8104 bitoff));
8105 else
8106 tem = wide_int_to_tree (TREE_TYPE (field),
8107 wi::lrshift (wi::to_wide (tem),
8108 diff - bitoff));
8109 return tem;
8114 break;
8116 case REALPART_EXPR:
8117 case IMAGPART_EXPR:
8119 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8120 if (c && TREE_CODE (c) == COMPLEX_CST)
8121 return fold_build1_loc (EXPR_LOCATION (t),
8122 TREE_CODE (t), TREE_TYPE (t), c);
8123 break;
8126 default:
8127 break;
8130 return NULL_TREE;
8133 tree
8134 fold_const_aggregate_ref (tree t)
8136 return fold_const_aggregate_ref_1 (t, NULL);
8139 /* Lookup virtual method with index TOKEN in a virtual table V
8140 at OFFSET.
8141 Set CAN_REFER if non-NULL to false if method
8142 is not referable or if the virtual table is ill-formed (such as rewriten
8143 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8145 tree
8146 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8147 tree v,
8148 unsigned HOST_WIDE_INT offset,
8149 bool *can_refer)
8151 tree vtable = v, init, fn;
8152 unsigned HOST_WIDE_INT size;
8153 unsigned HOST_WIDE_INT elt_size, access_index;
8154 tree domain_type;
8156 if (can_refer)
8157 *can_refer = true;
8159 /* First of all double check we have virtual table. */
8160 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8162 /* Pass down that we lost track of the target. */
8163 if (can_refer)
8164 *can_refer = false;
8165 return NULL_TREE;
8168 init = ctor_for_folding (v);
8170 /* The virtual tables should always be born with constructors
8171 and we always should assume that they are avaialble for
8172 folding. At the moment we do not stream them in all cases,
8173 but it should never happen that ctor seem unreachable. */
8174 gcc_assert (init);
8175 if (init == error_mark_node)
8177 /* Pass down that we lost track of the target. */
8178 if (can_refer)
8179 *can_refer = false;
8180 return NULL_TREE;
8182 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8183 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8184 offset *= BITS_PER_UNIT;
8185 offset += token * size;
8187 /* Lookup the value in the constructor that is assumed to be array.
8188 This is equivalent to
8189 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8190 offset, size, NULL);
8191 but in a constant time. We expect that frontend produced a simple
8192 array without indexed initializers. */
8194 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8195 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8196 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8197 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8199 access_index = offset / BITS_PER_UNIT / elt_size;
8200 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8202 /* The C++ FE can now produce indexed fields, and we check if the indexes
8203 match. */
8204 if (access_index < CONSTRUCTOR_NELTS (init))
8206 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8207 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8208 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8209 STRIP_NOPS (fn);
8211 else
8212 fn = NULL;
8214 /* For type inconsistent program we may end up looking up virtual method
8215 in virtual table that does not contain TOKEN entries. We may overrun
8216 the virtual table and pick up a constant or RTTI info pointer.
8217 In any case the call is undefined. */
8218 if (!fn
8219 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8220 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8221 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
8222 else
8224 fn = TREE_OPERAND (fn, 0);
8226 /* When cgraph node is missing and function is not public, we cannot
8227 devirtualize. This can happen in WHOPR when the actual method
8228 ends up in other partition, because we found devirtualization
8229 possibility too late. */
8230 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8232 if (can_refer)
8234 *can_refer = false;
8235 return fn;
8237 return NULL_TREE;
8241 /* Make sure we create a cgraph node for functions we'll reference.
8242 They can be non-existent if the reference comes from an entry
8243 of an external vtable for example. */
8244 cgraph_node::get_create (fn);
8246 return fn;
8249 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8250 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8251 KNOWN_BINFO carries the binfo describing the true type of
8252 OBJ_TYPE_REF_OBJECT(REF).
8253 Set CAN_REFER if non-NULL to false if method
8254 is not referable or if the virtual table is ill-formed (such as rewriten
8255 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8257 tree
8258 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8259 bool *can_refer)
8261 unsigned HOST_WIDE_INT offset;
8262 tree v;
8264 v = BINFO_VTABLE (known_binfo);
8265 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8266 if (!v)
8267 return NULL_TREE;
8269 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8271 if (can_refer)
8272 *can_refer = false;
8273 return NULL_TREE;
8275 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8278 /* Given a pointer value T, return a simplified version of an
8279 indirection through T, or NULL_TREE if no simplification is
8280 possible. Note that the resulting type may be different from
8281 the type pointed to in the sense that it is still compatible
8282 from the langhooks point of view. */
8284 tree
8285 gimple_fold_indirect_ref (tree t)
8287 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8288 tree sub = t;
8289 tree subtype;
8291 STRIP_NOPS (sub);
8292 subtype = TREE_TYPE (sub);
8293 if (!POINTER_TYPE_P (subtype)
8294 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8295 return NULL_TREE;
8297 if (TREE_CODE (sub) == ADDR_EXPR)
8299 tree op = TREE_OPERAND (sub, 0);
8300 tree optype = TREE_TYPE (op);
8301 /* *&p => p */
8302 if (useless_type_conversion_p (type, optype))
8303 return op;
8305 /* *(foo *)&fooarray => fooarray[0] */
8306 if (TREE_CODE (optype) == ARRAY_TYPE
8307 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8308 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8310 tree type_domain = TYPE_DOMAIN (optype);
8311 tree min_val = size_zero_node;
8312 if (type_domain && TYPE_MIN_VALUE (type_domain))
8313 min_val = TYPE_MIN_VALUE (type_domain);
8314 if (TREE_CODE (min_val) == INTEGER_CST)
8315 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8317 /* *(foo *)&complexfoo => __real__ complexfoo */
8318 else if (TREE_CODE (optype) == COMPLEX_TYPE
8319 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8320 return fold_build1 (REALPART_EXPR, type, op);
8321 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8322 else if (TREE_CODE (optype) == VECTOR_TYPE
8323 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8325 tree part_width = TYPE_SIZE (type);
8326 tree index = bitsize_int (0);
8327 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8331 /* *(p + CST) -> ... */
8332 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8333 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8335 tree addr = TREE_OPERAND (sub, 0);
8336 tree off = TREE_OPERAND (sub, 1);
8337 tree addrtype;
8339 STRIP_NOPS (addr);
8340 addrtype = TREE_TYPE (addr);
8342 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8343 if (TREE_CODE (addr) == ADDR_EXPR
8344 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8345 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8346 && tree_fits_uhwi_p (off))
8348 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8349 tree part_width = TYPE_SIZE (type);
8350 unsigned HOST_WIDE_INT part_widthi
8351 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8352 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8353 tree index = bitsize_int (indexi);
8354 if (known_lt (offset / part_widthi,
8355 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8356 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8357 part_width, index);
8360 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8361 if (TREE_CODE (addr) == ADDR_EXPR
8362 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8363 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8365 tree size = TYPE_SIZE_UNIT (type);
8366 if (tree_int_cst_equal (size, off))
8367 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8370 /* *(p + CST) -> MEM_REF <p, CST>. */
8371 if (TREE_CODE (addr) != ADDR_EXPR
8372 || DECL_P (TREE_OPERAND (addr, 0)))
8373 return fold_build2 (MEM_REF, type,
8374 addr,
8375 wide_int_to_tree (ptype, wi::to_wide (off)));
8378 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8379 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8380 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8381 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8383 tree type_domain;
8384 tree min_val = size_zero_node;
8385 tree osub = sub;
8386 sub = gimple_fold_indirect_ref (sub);
8387 if (! sub)
8388 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8389 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8390 if (type_domain && TYPE_MIN_VALUE (type_domain))
8391 min_val = TYPE_MIN_VALUE (type_domain);
8392 if (TREE_CODE (min_val) == INTEGER_CST)
8393 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8396 return NULL_TREE;
8399 /* Return true if CODE is an operation that when operating on signed
8400 integer types involves undefined behavior on overflow and the
8401 operation can be expressed with unsigned arithmetic. */
8403 bool
8404 arith_code_with_undefined_signed_overflow (tree_code code)
8406 switch (code)
8408 case ABS_EXPR:
8409 case PLUS_EXPR:
8410 case MINUS_EXPR:
8411 case MULT_EXPR:
8412 case NEGATE_EXPR:
8413 case POINTER_PLUS_EXPR:
8414 return true;
8415 default:
8416 return false;
8420 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8421 operation that can be transformed to unsigned arithmetic by converting
8422 its operand, carrying out the operation in the corresponding unsigned
8423 type and converting the result back to the original type.
8425 Returns a sequence of statements that replace STMT and also contain
8426 a modified form of STMT itself. */
8428 gimple_seq
8429 rewrite_to_defined_overflow (gimple *stmt)
8431 if (dump_file && (dump_flags & TDF_DETAILS))
8433 fprintf (dump_file, "rewriting stmt with undefined signed "
8434 "overflow ");
8435 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8438 tree lhs = gimple_assign_lhs (stmt);
8439 tree type = unsigned_type_for (TREE_TYPE (lhs));
8440 gimple_seq stmts = NULL;
8441 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8442 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8443 else
8444 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8446 tree op = gimple_op (stmt, i);
8447 op = gimple_convert (&stmts, type, op);
8448 gimple_set_op (stmt, i, op);
8450 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8451 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8452 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8453 gimple_set_modified (stmt, true);
8454 gimple_seq_add_stmt (&stmts, stmt);
8455 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8456 gimple_seq_add_stmt (&stmts, cvt);
8458 return stmts;
8462 /* The valueization hook we use for the gimple_build API simplification.
8463 This makes us match fold_buildN behavior by only combining with
8464 statements in the sequence(s) we are currently building. */
8466 static tree
8467 gimple_build_valueize (tree op)
8469 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8470 return op;
8471 return NULL_TREE;
8474 /* Build the expression CODE OP0 of type TYPE with location LOC,
8475 simplifying it first if possible. Returns the built
8476 expression value and appends statements possibly defining it
8477 to SEQ. */
8479 tree
8480 gimple_build (gimple_seq *seq, location_t loc,
8481 enum tree_code code, tree type, tree op0)
8483 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
8484 if (!res)
8486 res = create_tmp_reg_or_ssa_name (type);
8487 gimple *stmt;
8488 if (code == REALPART_EXPR
8489 || code == IMAGPART_EXPR
8490 || code == VIEW_CONVERT_EXPR)
8491 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8492 else
8493 stmt = gimple_build_assign (res, code, op0);
8494 gimple_set_location (stmt, loc);
8495 gimple_seq_add_stmt_without_update (seq, stmt);
8497 return res;
8500 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8501 simplifying it first if possible. Returns the built
8502 expression value and appends statements possibly defining it
8503 to SEQ. */
8505 tree
8506 gimple_build (gimple_seq *seq, location_t loc,
8507 enum tree_code code, tree type, tree op0, tree op1)
8509 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
8510 if (!res)
8512 res = create_tmp_reg_or_ssa_name (type);
8513 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8514 gimple_set_location (stmt, loc);
8515 gimple_seq_add_stmt_without_update (seq, stmt);
8517 return res;
8520 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8521 simplifying it first if possible. Returns the built
8522 expression value and appends statements possibly defining it
8523 to SEQ. */
8525 tree
8526 gimple_build (gimple_seq *seq, location_t loc,
8527 enum tree_code code, tree type, tree op0, tree op1, tree op2)
8529 tree res = gimple_simplify (code, type, op0, op1, op2,
8530 seq, gimple_build_valueize);
8531 if (!res)
8533 res = create_tmp_reg_or_ssa_name (type);
8534 gimple *stmt;
8535 if (code == BIT_FIELD_REF)
8536 stmt = gimple_build_assign (res, code,
8537 build3 (code, type, op0, op1, op2));
8538 else
8539 stmt = gimple_build_assign (res, code, op0, op1, op2);
8540 gimple_set_location (stmt, loc);
8541 gimple_seq_add_stmt_without_update (seq, stmt);
8543 return res;
8546 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8547 void) with a location LOC. Returns the built expression value (or NULL_TREE
8548 if TYPE is void) and appends statements possibly defining it to SEQ. */
8550 tree
8551 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn, tree type)
8553 tree res = NULL_TREE;
8554 gcall *stmt;
8555 if (internal_fn_p (fn))
8556 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8557 else
8559 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8560 stmt = gimple_build_call (decl, 0);
8562 if (!VOID_TYPE_P (type))
8564 res = create_tmp_reg_or_ssa_name (type);
8565 gimple_call_set_lhs (stmt, res);
8567 gimple_set_location (stmt, loc);
8568 gimple_seq_add_stmt_without_update (seq, stmt);
8569 return res;
8572 /* Build the call FN (ARG0) with a result of type TYPE
8573 (or no result if TYPE is void) with location LOC,
8574 simplifying it first if possible. Returns the built
8575 expression value (or NULL_TREE if TYPE is void) and appends
8576 statements possibly defining it to SEQ. */
8578 tree
8579 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8580 tree type, tree arg0)
8582 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
8583 if (!res)
8585 gcall *stmt;
8586 if (internal_fn_p (fn))
8587 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8588 else
8590 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8591 stmt = gimple_build_call (decl, 1, arg0);
8593 if (!VOID_TYPE_P (type))
8595 res = create_tmp_reg_or_ssa_name (type);
8596 gimple_call_set_lhs (stmt, res);
8598 gimple_set_location (stmt, loc);
8599 gimple_seq_add_stmt_without_update (seq, stmt);
8601 return res;
8604 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8605 (or no result if TYPE is void) with location LOC,
8606 simplifying it first if possible. Returns the built
8607 expression value (or NULL_TREE if TYPE is void) and appends
8608 statements possibly defining it to SEQ. */
8610 tree
8611 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8612 tree type, tree arg0, tree arg1)
8614 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
8615 if (!res)
8617 gcall *stmt;
8618 if (internal_fn_p (fn))
8619 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8620 else
8622 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8623 stmt = gimple_build_call (decl, 2, arg0, arg1);
8625 if (!VOID_TYPE_P (type))
8627 res = create_tmp_reg_or_ssa_name (type);
8628 gimple_call_set_lhs (stmt, res);
8630 gimple_set_location (stmt, loc);
8631 gimple_seq_add_stmt_without_update (seq, stmt);
8633 return res;
8636 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8637 (or no result if TYPE is void) with location LOC,
8638 simplifying it first if possible. Returns the built
8639 expression value (or NULL_TREE if TYPE is void) and appends
8640 statements possibly defining it to SEQ. */
8642 tree
8643 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
8644 tree type, tree arg0, tree arg1, tree arg2)
8646 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8647 seq, gimple_build_valueize);
8648 if (!res)
8650 gcall *stmt;
8651 if (internal_fn_p (fn))
8652 stmt = gimple_build_call_internal (as_internal_fn (fn),
8653 3, arg0, arg1, arg2);
8654 else
8656 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8657 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8659 if (!VOID_TYPE_P (type))
8661 res = create_tmp_reg_or_ssa_name (type);
8662 gimple_call_set_lhs (stmt, res);
8664 gimple_set_location (stmt, loc);
8665 gimple_seq_add_stmt_without_update (seq, stmt);
8667 return res;
8670 /* Build the conversion (TYPE) OP with a result of type TYPE
8671 with location LOC if such conversion is neccesary in GIMPLE,
8672 simplifying it first.
8673 Returns the built expression value and appends
8674 statements possibly defining it to SEQ. */
8676 tree
8677 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
8679 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8680 return op;
8681 return gimple_build (seq, loc, NOP_EXPR, type, op);
8684 /* Build the conversion (ptrofftype) OP with a result of a type
8685 compatible with ptrofftype with location LOC if such conversion
8686 is neccesary in GIMPLE, simplifying it first.
8687 Returns the built expression value and appends
8688 statements possibly defining it to SEQ. */
8690 tree
8691 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
8693 if (ptrofftype_p (TREE_TYPE (op)))
8694 return op;
8695 return gimple_convert (seq, loc, sizetype, op);
8698 /* Build a vector of type TYPE in which each element has the value OP.
8699 Return a gimple value for the result, appending any new statements
8700 to SEQ. */
8702 tree
8703 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
8704 tree op)
8706 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
8707 && !CONSTANT_CLASS_P (op))
8708 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
8710 tree res, vec = build_vector_from_val (type, op);
8711 if (is_gimple_val (vec))
8712 return vec;
8713 if (gimple_in_ssa_p (cfun))
8714 res = make_ssa_name (type);
8715 else
8716 res = create_tmp_reg (type);
8717 gimple *stmt = gimple_build_assign (res, vec);
8718 gimple_set_location (stmt, loc);
8719 gimple_seq_add_stmt_without_update (seq, stmt);
8720 return res;
8723 /* Build a vector from BUILDER, handling the case in which some elements
8724 are non-constant. Return a gimple value for the result, appending any
8725 new instructions to SEQ.
8727 BUILDER must not have a stepped encoding on entry. This is because
8728 the function is not geared up to handle the arithmetic that would
8729 be needed in the variable case, and any code building a vector that
8730 is known to be constant should use BUILDER->build () directly. */
8732 tree
8733 gimple_build_vector (gimple_seq *seq, location_t loc,
8734 tree_vector_builder *builder)
8736 gcc_assert (builder->nelts_per_pattern () <= 2);
8737 unsigned int encoded_nelts = builder->encoded_nelts ();
8738 for (unsigned int i = 0; i < encoded_nelts; ++i)
8739 if (!CONSTANT_CLASS_P ((*builder)[i]))
8741 tree type = builder->type ();
8742 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
8743 vec<constructor_elt, va_gc> *v;
8744 vec_alloc (v, nelts);
8745 for (i = 0; i < nelts; ++i)
8746 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
8748 tree res;
8749 if (gimple_in_ssa_p (cfun))
8750 res = make_ssa_name (type);
8751 else
8752 res = create_tmp_reg (type);
8753 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
8754 gimple_set_location (stmt, loc);
8755 gimple_seq_add_stmt_without_update (seq, stmt);
8756 return res;
8758 return builder->build ();
8761 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8762 and generate a value guaranteed to be rounded upwards to ALIGN.
8764 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8766 tree
8767 gimple_build_round_up (gimple_seq *seq, location_t loc, tree type,
8768 tree old_size, unsigned HOST_WIDE_INT align)
8770 unsigned HOST_WIDE_INT tg_mask = align - 1;
8771 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8772 gcc_assert (INTEGRAL_TYPE_P (type));
8773 tree tree_mask = build_int_cst (type, tg_mask);
8774 tree oversize = gimple_build (seq, loc, PLUS_EXPR, type, old_size,
8775 tree_mask);
8777 tree mask = build_int_cst (type, -align);
8778 return gimple_build (seq, loc, BIT_AND_EXPR, type, oversize, mask);
8781 /* Return true if the result of assignment STMT is known to be non-negative.
8782 If the return value is based on the assumption that signed overflow is
8783 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8784 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8786 static bool
8787 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8788 int depth)
8790 enum tree_code code = gimple_assign_rhs_code (stmt);
8791 switch (get_gimple_rhs_class (code))
8793 case GIMPLE_UNARY_RHS:
8794 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8795 gimple_expr_type (stmt),
8796 gimple_assign_rhs1 (stmt),
8797 strict_overflow_p, depth);
8798 case GIMPLE_BINARY_RHS:
8799 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
8800 gimple_expr_type (stmt),
8801 gimple_assign_rhs1 (stmt),
8802 gimple_assign_rhs2 (stmt),
8803 strict_overflow_p, depth);
8804 case GIMPLE_TERNARY_RHS:
8805 return false;
8806 case GIMPLE_SINGLE_RHS:
8807 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
8808 strict_overflow_p, depth);
8809 case GIMPLE_INVALID_RHS:
8810 break;
8812 gcc_unreachable ();
8815 /* Return true if return value of call STMT is known to be non-negative.
8816 If the return value is based on the assumption that signed overflow is
8817 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8818 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8820 static bool
8821 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8822 int depth)
8824 tree arg0 = gimple_call_num_args (stmt) > 0 ?
8825 gimple_call_arg (stmt, 0) : NULL_TREE;
8826 tree arg1 = gimple_call_num_args (stmt) > 1 ?
8827 gimple_call_arg (stmt, 1) : NULL_TREE;
8829 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
8830 gimple_call_combined_fn (stmt),
8831 arg0,
8832 arg1,
8833 strict_overflow_p, depth);
8836 /* Return true if return value of call STMT is known to be non-negative.
8837 If the return value is based on the assumption that signed overflow is
8838 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8839 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8841 static bool
8842 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8843 int depth)
8845 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8847 tree arg = gimple_phi_arg_def (stmt, i);
8848 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
8849 return false;
8851 return true;
8854 /* Return true if STMT is known to compute a non-negative value.
8855 If the return value is based on the assumption that signed overflow is
8856 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8857 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8859 bool
8860 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
8861 int depth)
8863 switch (gimple_code (stmt))
8865 case GIMPLE_ASSIGN:
8866 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
8867 depth);
8868 case GIMPLE_CALL:
8869 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
8870 depth);
8871 case GIMPLE_PHI:
8872 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
8873 depth);
8874 default:
8875 return false;
8879 /* Return true if the floating-point value computed by assignment STMT
8880 is known to have an integer value. We also allow +Inf, -Inf and NaN
8881 to be considered integer values. Return false for signaling NaN.
8883 DEPTH is the current nesting depth of the query. */
8885 static bool
8886 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
8888 enum tree_code code = gimple_assign_rhs_code (stmt);
8889 switch (get_gimple_rhs_class (code))
8891 case GIMPLE_UNARY_RHS:
8892 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
8893 gimple_assign_rhs1 (stmt), depth);
8894 case GIMPLE_BINARY_RHS:
8895 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
8896 gimple_assign_rhs1 (stmt),
8897 gimple_assign_rhs2 (stmt), depth);
8898 case GIMPLE_TERNARY_RHS:
8899 return false;
8900 case GIMPLE_SINGLE_RHS:
8901 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
8902 case GIMPLE_INVALID_RHS:
8903 break;
8905 gcc_unreachable ();
8908 /* Return true if the floating-point value computed by call STMT is known
8909 to have an integer value. We also allow +Inf, -Inf and NaN to be
8910 considered integer values. Return false for signaling NaN.
8912 DEPTH is the current nesting depth of the query. */
8914 static bool
8915 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
8917 tree arg0 = (gimple_call_num_args (stmt) > 0
8918 ? gimple_call_arg (stmt, 0)
8919 : NULL_TREE);
8920 tree arg1 = (gimple_call_num_args (stmt) > 1
8921 ? gimple_call_arg (stmt, 1)
8922 : NULL_TREE);
8923 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
8924 arg0, arg1, depth);
8927 /* Return true if the floating-point result of phi STMT is known to have
8928 an integer value. We also allow +Inf, -Inf and NaN to be considered
8929 integer values. Return false for signaling NaN.
8931 DEPTH is the current nesting depth of the query. */
8933 static bool
8934 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
8936 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
8938 tree arg = gimple_phi_arg_def (stmt, i);
8939 if (!integer_valued_real_single_p (arg, depth + 1))
8940 return false;
8942 return true;
8945 /* Return true if the floating-point value computed by STMT is known
8946 to have an integer value. We also allow +Inf, -Inf and NaN to be
8947 considered integer values. Return false for signaling NaN.
8949 DEPTH is the current nesting depth of the query. */
8951 bool
8952 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
8954 switch (gimple_code (stmt))
8956 case GIMPLE_ASSIGN:
8957 return gimple_assign_integer_valued_real_p (stmt, depth);
8958 case GIMPLE_CALL:
8959 return gimple_call_integer_valued_real_p (stmt, depth);
8960 case GIMPLE_PHI:
8961 return gimple_phi_integer_valued_real_p (stmt, depth);
8962 default:
8963 return false;