2018-06-09 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / gimple-fold.c
blobc1d84420c6ef9a6d6c7644e00ee5b7373e582814
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 location_t loc = gimple_location_safe (stmt);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
356 : "NULL");
358 if (targets.length () == 1)
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
369 return val;
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 if (result)
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410 if (! CONSTANT_CLASS_P (val))
411 return NULL_TREE;
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
417 else if (DECL_P (rhs))
418 return get_symbol_constant_value (rhs);
420 break;
422 case GIMPLE_UNARY_RHS:
423 break;
425 case GIMPLE_BINARY_RHS:
426 break;
428 case GIMPLE_TERNARY_RHS:
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
435 if (result)
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
441 break;
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
447 return NULL_TREE;
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
456 static void
457 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 gimple *stmt = gsi_stmt (*si_p);
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple *laststore = NULL;
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
495 gimple *new_stmt = gsi_stmt (i);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 gimple *stmt, *new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = NULL;
541 stmt = gsi_stmt (*si_p);
543 gcc_assert (is_gimple_call (stmt));
545 push_gimplify_context (gimple_in_ssa_p (cfun));
547 lhs = gimple_call_lhs (stmt);
548 if (lhs == NULL_TREE)
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
555 pop_gimplify_context (NULL);
556 if (gimple_in_ssa_p (cfun))
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
561 gsi_replace (si_p, gimple_build_nop (), false);
562 return;
565 else
567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
574 pop_gimplify_context (NULL);
576 gsi_replace_with_seq_vops (si_p, stmts);
580 /* Replace the call at *GSI with the gimple value VAL. */
582 void
583 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 gimple *stmt = gsi_stmt (*gsi);
586 tree lhs = gimple_call_lhs (stmt);
587 gimple *repl;
588 if (lhs)
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
602 gsi_replace (gsi, repl, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
606 again. */
608 static void
609 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 gimple *stmt = gsi_stmt (*gsi);
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 gimple_set_vdef (repl, gimple_vdef (stmt));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
622 gsi_replace (gsi, repl, false);
623 fold_stmt (gsi);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
628 static bool
629 var_decl_component_p (tree var)
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return (DECL_P (inner)
635 || (TREE_CODE (inner) == MEM_REF
636 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
639 /* If the SIZE argument representing the size of an object is in a range
640 of values of which exactly one is valid (and that is zero), return
641 true, otherwise false. */
643 static bool
644 size_must_be_zero_p (tree size)
646 if (integer_zerop (size))
647 return true;
649 if (TREE_CODE (size) != SSA_NAME)
650 return false;
652 wide_int min, max;
653 enum value_range_type rtype = get_range_info (size, &min, &max);
654 if (rtype != VR_ANTI_RANGE)
655 return false;
657 tree type = TREE_TYPE (size);
658 int prec = TYPE_PRECISION (type);
660 wide_int wone = wi::one (prec);
662 /* Compute the value of SSIZE_MAX, the largest positive value that
663 can be stored in ssize_t, the signed counterpart of size_t. */
664 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
666 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
669 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
670 diagnose (otherwise undefined) overlapping copies without preventing
671 folding. When folded, GCC guarantees that overlapping memcpy has
672 the same semantics as memmove. Call to the library memcpy need not
673 provide the same guarantee. Return false if no simplification can
674 be made. */
676 static bool
677 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
678 tree dest, tree src, int endp)
680 gimple *stmt = gsi_stmt (*gsi);
681 tree lhs = gimple_call_lhs (stmt);
682 tree len = gimple_call_arg (stmt, 2);
683 tree destvar, srcvar;
684 location_t loc = gimple_location (stmt);
686 bool nowarn = gimple_no_warning_p (stmt);
688 /* If the LEN parameter is a constant zero or in range where
689 the only valid value is zero, return DEST. */
690 if (size_must_be_zero_p (len))
692 gimple *repl;
693 if (gimple_call_lhs (stmt))
694 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
695 else
696 repl = gimple_build_nop ();
697 tree vdef = gimple_vdef (stmt);
698 if (vdef && TREE_CODE (vdef) == SSA_NAME)
700 unlink_stmt_vdef (stmt);
701 release_ssa_name (vdef);
703 gsi_replace (gsi, repl, false);
704 return true;
707 /* If SRC and DEST are the same (and not volatile), return
708 DEST{,+LEN,+LEN-1}. */
709 if (operand_equal_p (src, dest, 0))
711 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
712 It's safe and may even be emitted by GCC itself (see bug
713 32667). */
714 unlink_stmt_vdef (stmt);
715 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
716 release_ssa_name (gimple_vdef (stmt));
717 if (!lhs)
719 gsi_replace (gsi, gimple_build_nop (), false);
720 return true;
722 goto done;
724 else
726 tree srctype, desttype;
727 unsigned int src_align, dest_align;
728 tree off0;
730 /* Build accesses at offset zero with a ref-all character type. */
731 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
732 ptr_mode, true), 0);
734 /* If we can perform the copy efficiently with first doing all loads
735 and then all stores inline it that way. Currently efficiently
736 means that we can load all the memory into a single integer
737 register which is what MOVE_MAX gives us. */
738 src_align = get_pointer_alignment (src);
739 dest_align = get_pointer_alignment (dest);
740 if (tree_fits_uhwi_p (len)
741 && compare_tree_int (len, MOVE_MAX) <= 0
742 /* ??? Don't transform copies from strings with known length this
743 confuses the tree-ssa-strlen.c. This doesn't handle
744 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
745 reason. */
746 && !c_strlen (src, 2))
748 unsigned ilen = tree_to_uhwi (len);
749 if (pow2p_hwi (ilen))
751 /* Detect invalid bounds and overlapping copies and issue
752 either -Warray-bounds or -Wrestrict. */
753 if (!nowarn
754 && check_bounds_or_overlap (as_a <gcall *>(stmt),
755 dest, src, len, len))
756 gimple_set_no_warning (stmt, true);
758 scalar_int_mode mode;
759 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
760 if (type
761 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
762 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
763 /* If the destination pointer is not aligned we must be able
764 to emit an unaligned store. */
765 && (dest_align >= GET_MODE_ALIGNMENT (mode)
766 || !targetm.slow_unaligned_access (mode, dest_align)
767 || (optab_handler (movmisalign_optab, mode)
768 != CODE_FOR_nothing)))
770 tree srctype = type;
771 tree desttype = type;
772 if (src_align < GET_MODE_ALIGNMENT (mode))
773 srctype = build_aligned_type (type, src_align);
774 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
775 tree tem = fold_const_aggregate_ref (srcmem);
776 if (tem)
777 srcmem = tem;
778 else if (src_align < GET_MODE_ALIGNMENT (mode)
779 && targetm.slow_unaligned_access (mode, src_align)
780 && (optab_handler (movmisalign_optab, mode)
781 == CODE_FOR_nothing))
782 srcmem = NULL_TREE;
783 if (srcmem)
785 gimple *new_stmt;
786 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
788 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
789 srcmem
790 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
791 new_stmt);
792 gimple_assign_set_lhs (new_stmt, srcmem);
793 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
794 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
796 if (dest_align < GET_MODE_ALIGNMENT (mode))
797 desttype = build_aligned_type (type, dest_align);
798 new_stmt
799 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
800 dest, off0),
801 srcmem);
802 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
803 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
804 if (gimple_vdef (new_stmt)
805 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
806 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
807 if (!lhs)
809 gsi_replace (gsi, new_stmt, false);
810 return true;
812 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
813 goto done;
819 if (endp == 3)
821 /* Both DEST and SRC must be pointer types.
822 ??? This is what old code did. Is the testing for pointer types
823 really mandatory?
825 If either SRC is readonly or length is 1, we can use memcpy. */
826 if (!dest_align || !src_align)
827 return false;
828 if (readonly_data_expr (src)
829 || (tree_fits_uhwi_p (len)
830 && (MIN (src_align, dest_align) / BITS_PER_UNIT
831 >= tree_to_uhwi (len))))
833 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
834 if (!fn)
835 return false;
836 gimple_call_set_fndecl (stmt, fn);
837 gimple_call_set_arg (stmt, 0, dest);
838 gimple_call_set_arg (stmt, 1, src);
839 fold_stmt (gsi);
840 return true;
843 /* If *src and *dest can't overlap, optimize into memcpy as well. */
844 if (TREE_CODE (src) == ADDR_EXPR
845 && TREE_CODE (dest) == ADDR_EXPR)
847 tree src_base, dest_base, fn;
848 poly_int64 src_offset = 0, dest_offset = 0;
849 poly_uint64 maxsize;
851 srcvar = TREE_OPERAND (src, 0);
852 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
853 if (src_base == NULL)
854 src_base = srcvar;
855 destvar = TREE_OPERAND (dest, 0);
856 dest_base = get_addr_base_and_unit_offset (destvar,
857 &dest_offset);
858 if (dest_base == NULL)
859 dest_base = destvar;
860 if (!poly_int_tree_p (len, &maxsize))
861 maxsize = -1;
862 if (SSA_VAR_P (src_base)
863 && SSA_VAR_P (dest_base))
865 if (operand_equal_p (src_base, dest_base, 0)
866 && ranges_maybe_overlap_p (src_offset, maxsize,
867 dest_offset, maxsize))
868 return false;
870 else if (TREE_CODE (src_base) == MEM_REF
871 && TREE_CODE (dest_base) == MEM_REF)
873 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
874 TREE_OPERAND (dest_base, 0), 0))
875 return false;
876 poly_offset_int full_src_offset
877 = mem_ref_offset (src_base) + src_offset;
878 poly_offset_int full_dest_offset
879 = mem_ref_offset (dest_base) + dest_offset;
880 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
881 full_dest_offset, maxsize))
882 return false;
884 else
885 return false;
887 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
888 if (!fn)
889 return false;
890 gimple_call_set_fndecl (stmt, fn);
891 gimple_call_set_arg (stmt, 0, dest);
892 gimple_call_set_arg (stmt, 1, src);
893 fold_stmt (gsi);
894 return true;
897 /* If the destination and source do not alias optimize into
898 memcpy as well. */
899 if ((is_gimple_min_invariant (dest)
900 || TREE_CODE (dest) == SSA_NAME)
901 && (is_gimple_min_invariant (src)
902 || TREE_CODE (src) == SSA_NAME))
904 ao_ref destr, srcr;
905 ao_ref_init_from_ptr_and_size (&destr, dest, len);
906 ao_ref_init_from_ptr_and_size (&srcr, src, len);
907 if (!refs_may_alias_p_1 (&destr, &srcr, false))
909 tree fn;
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
921 return false;
924 if (!tree_fits_shwi_p (len))
925 return false;
926 if (!POINTER_TYPE_P (TREE_TYPE (src))
927 || !POINTER_TYPE_P (TREE_TYPE (dest)))
928 return false;
929 /* In the following try to find a type that is most natural to be
930 used for the memcpy source and destination and that allows
931 the most optimization when memcpy is turned into a plain assignment
932 using that type. In theory we could always use a char[len] type
933 but that only gains us that the destination and source possibly
934 no longer will have their address taken. */
935 srctype = TREE_TYPE (TREE_TYPE (src));
936 if (TREE_CODE (srctype) == ARRAY_TYPE
937 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
938 srctype = TREE_TYPE (srctype);
939 desttype = TREE_TYPE (TREE_TYPE (dest));
940 if (TREE_CODE (desttype) == ARRAY_TYPE
941 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
942 desttype = TREE_TYPE (desttype);
943 if (TREE_ADDRESSABLE (srctype)
944 || TREE_ADDRESSABLE (desttype))
945 return false;
947 /* Make sure we are not copying using a floating-point mode or
948 a type whose size possibly does not match its precision. */
949 if (FLOAT_MODE_P (TYPE_MODE (desttype))
950 || TREE_CODE (desttype) == BOOLEAN_TYPE
951 || TREE_CODE (desttype) == ENUMERAL_TYPE)
952 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
953 if (FLOAT_MODE_P (TYPE_MODE (srctype))
954 || TREE_CODE (srctype) == BOOLEAN_TYPE
955 || TREE_CODE (srctype) == ENUMERAL_TYPE)
956 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
957 if (!srctype)
958 srctype = desttype;
959 if (!desttype)
960 desttype = srctype;
961 if (!srctype)
962 return false;
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (dest_align < TYPE_ALIGN (desttype)
967 || src_align < TYPE_ALIGN (srctype))
968 return false;
970 destvar = NULL_TREE;
971 if (TREE_CODE (dest) == ADDR_EXPR
972 && var_decl_component_p (TREE_OPERAND (dest, 0))
973 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
974 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
976 srcvar = NULL_TREE;
977 if (TREE_CODE (src) == ADDR_EXPR
978 && var_decl_component_p (TREE_OPERAND (src, 0))
979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
981 if (!destvar
982 || src_align >= TYPE_ALIGN (desttype))
983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
984 src, off0);
985 else if (!STRICT_ALIGNMENT)
987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
988 src_align);
989 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
993 if (srcvar == NULL_TREE && destvar == NULL_TREE)
994 return false;
996 if (srcvar == NULL_TREE)
998 if (src_align >= TYPE_ALIGN (desttype))
999 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1000 else
1002 if (STRICT_ALIGNMENT)
1003 return false;
1004 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1005 src_align);
1006 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1009 else if (destvar == NULL_TREE)
1011 if (dest_align >= TYPE_ALIGN (srctype))
1012 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1013 else
1015 if (STRICT_ALIGNMENT)
1016 return false;
1017 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1018 dest_align);
1019 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 /* Detect invalid bounds and overlapping copies and issue either
1024 -Warray-bounds or -Wrestrict. */
1025 if (!nowarn)
1026 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1028 gimple *new_stmt;
1029 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 tree tem = fold_const_aggregate_ref (srcvar);
1032 if (tem)
1033 srcvar = tem;
1034 if (! is_gimple_min_invariant (srcvar))
1036 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1037 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1038 new_stmt);
1039 gimple_assign_set_lhs (new_stmt, srcvar);
1040 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1041 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 new_stmt = gimple_build_assign (destvar, srcvar);
1044 goto set_vop_and_replace;
1047 /* We get an aggregate copy. Use an unsigned char[] type to
1048 perform the copying to preserve padding and to avoid any issues
1049 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1050 desttype = build_array_type_nelts (unsigned_char_type_node,
1051 tree_to_uhwi (len));
1052 srctype = desttype;
1053 if (src_align > TYPE_ALIGN (srctype))
1054 srctype = build_aligned_type (srctype, src_align);
1055 if (dest_align > TYPE_ALIGN (desttype))
1056 desttype = build_aligned_type (desttype, dest_align);
1057 new_stmt
1058 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1059 fold_build2 (MEM_REF, srctype, src, off0));
1060 set_vop_and_replace:
1061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1062 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1063 if (gimple_vdef (new_stmt)
1064 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1065 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1066 if (!lhs)
1068 gsi_replace (gsi, new_stmt, false);
1069 return true;
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1074 done:
1075 gimple_seq stmts = NULL;
1076 if (endp == 0 || endp == 3)
1077 len = NULL_TREE;
1078 else if (endp == 2)
1079 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1080 ssize_int (1));
1081 if (endp == 2 || endp == 1)
1083 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1084 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1085 TREE_TYPE (dest), dest, len);
1088 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1089 gimple *repl = gimple_build_assign (lhs, dest);
1090 gsi_replace (gsi, repl, false);
1091 return true;
1094 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1095 to built-in memcmp (a, b, len). */
1097 static bool
1098 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1100 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1102 if (!fn)
1103 return false;
1105 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1107 gimple *stmt = gsi_stmt (*gsi);
1108 tree a = gimple_call_arg (stmt, 0);
1109 tree b = gimple_call_arg (stmt, 1);
1110 tree len = gimple_call_arg (stmt, 2);
1112 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1113 replace_call_with_call_and_fold (gsi, repl);
1115 return true;
1118 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1119 to built-in memmove (dest, src, len). */
1121 static bool
1122 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1124 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1126 if (!fn)
1127 return false;
1129 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1130 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1131 len) into memmove (dest, src, len). */
1133 gimple *stmt = gsi_stmt (*gsi);
1134 tree src = gimple_call_arg (stmt, 0);
1135 tree dest = gimple_call_arg (stmt, 1);
1136 tree len = gimple_call_arg (stmt, 2);
1138 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1139 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1140 replace_call_with_call_and_fold (gsi, repl);
1142 return true;
1145 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1146 to built-in memset (dest, 0, len). */
1148 static bool
1149 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1151 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1153 if (!fn)
1154 return false;
1156 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree dest = gimple_call_arg (stmt, 0);
1160 tree len = gimple_call_arg (stmt, 1);
1162 gimple_seq seq = NULL;
1163 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1164 gimple_seq_add_stmt_without_update (&seq, repl);
1165 gsi_replace_with_seq_vops (gsi, seq);
1166 fold_stmt (gsi);
1168 return true;
1171 /* Fold function call to builtin memset or bzero at *GSI setting the
1172 memory of size LEN to VAL. Return whether a simplification was made. */
1174 static bool
1175 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1177 gimple *stmt = gsi_stmt (*gsi);
1178 tree etype;
1179 unsigned HOST_WIDE_INT length, cval;
1181 /* If the LEN parameter is zero, return DEST. */
1182 if (integer_zerop (len))
1184 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1185 return true;
1188 if (! tree_fits_uhwi_p (len))
1189 return false;
1191 if (TREE_CODE (c) != INTEGER_CST)
1192 return false;
1194 tree dest = gimple_call_arg (stmt, 0);
1195 tree var = dest;
1196 if (TREE_CODE (var) != ADDR_EXPR)
1197 return false;
1199 var = TREE_OPERAND (var, 0);
1200 if (TREE_THIS_VOLATILE (var))
1201 return false;
1203 etype = TREE_TYPE (var);
1204 if (TREE_CODE (etype) == ARRAY_TYPE)
1205 etype = TREE_TYPE (etype);
1207 if (!INTEGRAL_TYPE_P (etype)
1208 && !POINTER_TYPE_P (etype))
1209 return NULL_TREE;
1211 if (! var_decl_component_p (var))
1212 return NULL_TREE;
1214 length = tree_to_uhwi (len);
1215 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1216 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1217 return NULL_TREE;
1219 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1220 return NULL_TREE;
1222 if (integer_zerop (c))
1223 cval = 0;
1224 else
1226 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1227 return NULL_TREE;
1229 cval = TREE_INT_CST_LOW (c);
1230 cval &= 0xff;
1231 cval |= cval << 8;
1232 cval |= cval << 16;
1233 cval |= (cval << 31) << 1;
1236 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1237 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1238 gimple_set_vuse (store, gimple_vuse (stmt));
1239 tree vdef = gimple_vdef (stmt);
1240 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1242 gimple_set_vdef (store, gimple_vdef (stmt));
1243 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1245 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1246 if (gimple_call_lhs (stmt))
1248 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1249 gsi_replace (gsi, asgn, false);
1251 else
1253 gimple_stmt_iterator gsi2 = *gsi;
1254 gsi_prev (gsi);
1255 gsi_remove (&gsi2, true);
1258 return true;
1262 /* Obtain the minimum and maximum string length or minimum and maximum
1263 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1264 If ARG is an SSA name variable, follow its use-def chains. When
1265 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1266 if we are unable to determine the length or value, return false.
1267 VISITED is a bitmap of visited variables.
1268 TYPE is 0 if string length should be obtained, 1 for maximum string
1269 length and 2 for maximum value ARG can have.
1270 When FUZZY is non-zero and the length of a string cannot be determined,
1271 the function instead considers as the maximum possible length the
1272 size of a character array it may refer to. If FUZZY is 2, it will handle
1273 PHIs and COND_EXPRs optimistically, if we can determine string length
1274 minimum and maximum, it will use the minimum from the ones where it
1275 can be determined.
1276 Set *FLEXP to true if the range of the string lengths has been
1277 obtained from the upper bound of an array at the end of a struct.
1278 Such an array may hold a string that's longer than its upper bound
1279 due to it being used as a poor-man's flexible array member. */
1281 static bool
1282 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1283 int fuzzy, bool *flexp)
1285 tree var, val = NULL_TREE;
1286 gimple *def_stmt;
1288 /* The minimum and maximum length. */
1289 tree *const minlen = length;
1290 tree *const maxlen = length + 1;
1292 if (TREE_CODE (arg) != SSA_NAME)
1294 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1295 if (TREE_CODE (arg) == ADDR_EXPR
1296 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1298 tree op = TREE_OPERAND (arg, 0);
1299 if (integer_zerop (TREE_OPERAND (op, 1)))
1301 tree aop0 = TREE_OPERAND (op, 0);
1302 if (TREE_CODE (aop0) == INDIRECT_REF
1303 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1304 return get_range_strlen (TREE_OPERAND (aop0, 0),
1305 length, visited, type, fuzzy, flexp);
1307 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1309 /* Fail if an array is the last member of a struct object
1310 since it could be treated as a (fake) flexible array
1311 member. */
1312 tree idx = TREE_OPERAND (op, 1);
1314 arg = TREE_OPERAND (op, 0);
1315 tree optype = TREE_TYPE (arg);
1316 if (tree dom = TYPE_DOMAIN (optype))
1317 if (tree bound = TYPE_MAX_VALUE (dom))
1318 if (TREE_CODE (bound) == INTEGER_CST
1319 && TREE_CODE (idx) == INTEGER_CST
1320 && tree_int_cst_lt (bound, idx))
1321 return false;
1325 if (type == 2)
1327 val = arg;
1328 if (TREE_CODE (val) != INTEGER_CST
1329 || tree_int_cst_sgn (val) < 0)
1330 return false;
1332 else
1333 val = c_strlen (arg, 1);
1335 if (!val && fuzzy)
1337 if (TREE_CODE (arg) == ADDR_EXPR)
1338 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1339 visited, type, fuzzy, flexp);
1341 if (TREE_CODE (arg) == ARRAY_REF)
1343 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1345 /* Determine the "innermost" array type. */
1346 while (TREE_CODE (type) == ARRAY_TYPE
1347 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1348 type = TREE_TYPE (type);
1350 /* Avoid arrays of pointers. */
1351 tree eltype = TREE_TYPE (type);
1352 if (TREE_CODE (type) != ARRAY_TYPE
1353 || !INTEGRAL_TYPE_P (eltype))
1354 return false;
1356 val = TYPE_SIZE_UNIT (type);
1357 if (!val || integer_zerop (val))
1358 return false;
1360 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1361 integer_one_node);
1362 /* Set the minimum size to zero since the string in
1363 the array could have zero length. */
1364 *minlen = ssize_int (0);
1366 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1367 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1368 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1369 *flexp = true;
1371 else if (TREE_CODE (arg) == COMPONENT_REF
1372 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1373 == ARRAY_TYPE))
1375 /* Use the type of the member array to determine the upper
1376 bound on the length of the array. This may be overly
1377 optimistic if the array itself isn't NUL-terminated and
1378 the caller relies on the subsequent member to contain
1379 the NUL but that would only be considered valid if
1380 the array were the last member of a struct.
1381 Set *FLEXP to true if the array whose bound is being
1382 used is at the end of a struct. */
1383 if (array_at_struct_end_p (arg))
1384 *flexp = true;
1386 arg = TREE_OPERAND (arg, 1);
1388 tree type = TREE_TYPE (arg);
1390 while (TREE_CODE (type) == ARRAY_TYPE
1391 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1392 type = TREE_TYPE (type);
1394 /* Fail when the array bound is unknown or zero. */
1395 val = TYPE_SIZE_UNIT (type);
1396 if (!val || integer_zerop (val))
1397 return false;
1398 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1399 integer_one_node);
1400 /* Set the minimum size to zero since the string in
1401 the array could have zero length. */
1402 *minlen = ssize_int (0);
1405 if (VAR_P (arg))
1407 tree type = TREE_TYPE (arg);
1408 if (POINTER_TYPE_P (type))
1409 type = TREE_TYPE (type);
1411 if (TREE_CODE (type) == ARRAY_TYPE)
1413 val = TYPE_SIZE_UNIT (type);
1414 if (!val
1415 || TREE_CODE (val) != INTEGER_CST
1416 || integer_zerop (val))
1417 return false;
1418 val = wide_int_to_tree (TREE_TYPE (val),
1419 wi::sub (wi::to_wide (val), 1));
1420 /* Set the minimum size to zero since the string in
1421 the array could have zero length. */
1422 *minlen = ssize_int (0);
1427 if (!val)
1428 return false;
1430 if (!*minlen
1431 || (type > 0
1432 && TREE_CODE (*minlen) == INTEGER_CST
1433 && TREE_CODE (val) == INTEGER_CST
1434 && tree_int_cst_lt (val, *minlen)))
1435 *minlen = val;
1437 if (*maxlen)
1439 if (type > 0)
1441 if (TREE_CODE (*maxlen) != INTEGER_CST
1442 || TREE_CODE (val) != INTEGER_CST)
1443 return false;
1445 if (tree_int_cst_lt (*maxlen, val))
1446 *maxlen = val;
1447 return true;
1449 else if (simple_cst_equal (val, *maxlen) != 1)
1450 return false;
1453 *maxlen = val;
1454 return true;
1457 /* If ARG is registered for SSA update we cannot look at its defining
1458 statement. */
1459 if (name_registered_for_update_p (arg))
1460 return false;
1462 /* If we were already here, break the infinite cycle. */
1463 if (!*visited)
1464 *visited = BITMAP_ALLOC (NULL);
1465 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1466 return true;
1468 var = arg;
1469 def_stmt = SSA_NAME_DEF_STMT (var);
1471 switch (gimple_code (def_stmt))
1473 case GIMPLE_ASSIGN:
1474 /* The RHS of the statement defining VAR must either have a
1475 constant length or come from another SSA_NAME with a constant
1476 length. */
1477 if (gimple_assign_single_p (def_stmt)
1478 || gimple_assign_unary_nop_p (def_stmt))
1480 tree rhs = gimple_assign_rhs1 (def_stmt);
1481 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1483 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1485 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1486 gimple_assign_rhs3 (def_stmt) };
1488 for (unsigned int i = 0; i < 2; i++)
1489 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1490 flexp))
1492 if (fuzzy == 2)
1493 *maxlen = build_all_ones_cst (size_type_node);
1494 else
1495 return false;
1497 return true;
1499 return false;
1501 case GIMPLE_PHI:
1502 /* All the arguments of the PHI node must have the same constant
1503 length. */
1504 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1506 tree arg = gimple_phi_arg (def_stmt, i)->def;
1508 /* If this PHI has itself as an argument, we cannot
1509 determine the string length of this argument. However,
1510 if we can find a constant string length for the other
1511 PHI args then we can still be sure that this is a
1512 constant string length. So be optimistic and just
1513 continue with the next argument. */
1514 if (arg == gimple_phi_result (def_stmt))
1515 continue;
1517 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1519 if (fuzzy == 2)
1520 *maxlen = build_all_ones_cst (size_type_node);
1521 else
1522 return false;
1525 return true;
1527 default:
1528 return false;
1532 /* Determine the minimum and maximum value or string length that ARG
1533 refers to and store each in the first two elements of MINMAXLEN.
1534 For expressions that point to strings of unknown lengths that are
1535 character arrays, use the upper bound of the array as the maximum
1536 length. For example, given an expression like 'x ? array : "xyz"'
1537 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1538 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1539 stored in array.
1540 Return true if the range of the string lengths has been obtained
1541 from the upper bound of an array at the end of a struct. Such
1542 an array may hold a string that's longer than its upper bound
1543 due to it being used as a poor-man's flexible array member.
1545 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1546 and false if PHIs and COND_EXPRs are to be handled optimistically,
1547 if we can determine string length minimum and maximum; it will use
1548 the minimum from the ones where it can be determined.
1549 STRICT false should be only used for warning code. */
1551 bool
1552 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1554 bitmap visited = NULL;
1556 minmaxlen[0] = NULL_TREE;
1557 minmaxlen[1] = NULL_TREE;
1559 bool flexarray = false;
1560 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1561 &flexarray))
1563 minmaxlen[0] = NULL_TREE;
1564 minmaxlen[1] = NULL_TREE;
1567 if (visited)
1568 BITMAP_FREE (visited);
1570 return flexarray;
1573 tree
1574 get_maxval_strlen (tree arg, int type)
1576 bitmap visited = NULL;
1577 tree len[2] = { NULL_TREE, NULL_TREE };
1579 bool dummy;
1580 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1581 len[1] = NULL_TREE;
1582 if (visited)
1583 BITMAP_FREE (visited);
1585 return len[1];
1589 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1590 If LEN is not NULL, it represents the length of the string to be
1591 copied. Return NULL_TREE if no simplification can be made. */
1593 static bool
1594 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1595 tree dest, tree src)
1597 gimple *stmt = gsi_stmt (*gsi);
1598 location_t loc = gimple_location (stmt);
1599 tree fn;
1601 /* If SRC and DEST are the same (and not volatile), return DEST. */
1602 if (operand_equal_p (src, dest, 0))
1604 /* Issue -Wrestrict unless the pointers are null (those do
1605 not point to objects and so do not indicate an overlap;
1606 such calls could be the result of sanitization and jump
1607 threading). */
1608 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1610 tree func = gimple_call_fndecl (stmt);
1612 warning_at (loc, OPT_Wrestrict,
1613 "%qD source argument is the same as destination",
1614 func);
1617 replace_call_with_value (gsi, dest);
1618 return true;
1621 if (optimize_function_for_size_p (cfun))
1622 return false;
1624 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1625 if (!fn)
1626 return false;
1628 tree len = get_maxval_strlen (src, 0);
1629 if (!len)
1630 return false;
1632 len = fold_convert_loc (loc, size_type_node, len);
1633 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1634 len = force_gimple_operand_gsi (gsi, len, true,
1635 NULL_TREE, true, GSI_SAME_STMT);
1636 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1637 replace_call_with_call_and_fold (gsi, repl);
1638 return true;
1641 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1642 If SLEN is not NULL, it represents the length of the source string.
1643 Return NULL_TREE if no simplification can be made. */
1645 static bool
1646 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1647 tree dest, tree src, tree len)
1649 gimple *stmt = gsi_stmt (*gsi);
1650 location_t loc = gimple_location (stmt);
1651 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1653 /* If the LEN parameter is zero, return DEST. */
1654 if (integer_zerop (len))
1656 /* Avoid warning if the destination refers to a an array/pointer
1657 decorate with attribute nonstring. */
1658 if (!nonstring)
1660 tree fndecl = gimple_call_fndecl (stmt);
1661 gcall *call = as_a <gcall *> (stmt);
1663 /* Warn about the lack of nul termination: the result is not
1664 a (nul-terminated) string. */
1665 tree slen = get_maxval_strlen (src, 0);
1666 if (slen && !integer_zerop (slen))
1667 warning_at (loc, OPT_Wstringop_truncation,
1668 "%G%qD destination unchanged after copying no bytes "
1669 "from a string of length %E",
1670 call, fndecl, slen);
1671 else
1672 warning_at (loc, OPT_Wstringop_truncation,
1673 "%G%qD destination unchanged after copying no bytes",
1674 call, fndecl);
1677 replace_call_with_value (gsi, dest);
1678 return true;
1681 /* We can't compare slen with len as constants below if len is not a
1682 constant. */
1683 if (TREE_CODE (len) != INTEGER_CST)
1684 return false;
1686 /* Now, we must be passed a constant src ptr parameter. */
1687 tree slen = get_maxval_strlen (src, 0);
1688 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1689 return false;
1691 /* The size of the source string including the terminating nul. */
1692 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1694 /* We do not support simplification of this case, though we do
1695 support it when expanding trees into RTL. */
1696 /* FIXME: generate a call to __builtin_memset. */
1697 if (tree_int_cst_lt (ssize, len))
1698 return false;
1700 /* Diagnose truncation that leaves the copy unterminated. */
1701 maybe_diag_stxncpy_trunc (*gsi, src, len);
1703 /* OK transform into builtin memcpy. */
1704 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1705 if (!fn)
1706 return false;
1708 len = fold_convert_loc (loc, size_type_node, len);
1709 len = force_gimple_operand_gsi (gsi, len, true,
1710 NULL_TREE, true, GSI_SAME_STMT);
1711 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1712 replace_call_with_call_and_fold (gsi, repl);
1714 return true;
1717 /* Fold function call to builtin strchr or strrchr.
1718 If both arguments are constant, evaluate and fold the result,
1719 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1720 In general strlen is significantly faster than strchr
1721 due to being a simpler operation. */
1722 static bool
1723 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1725 gimple *stmt = gsi_stmt (*gsi);
1726 tree str = gimple_call_arg (stmt, 0);
1727 tree c = gimple_call_arg (stmt, 1);
1728 location_t loc = gimple_location (stmt);
1729 const char *p;
1730 char ch;
1732 if (!gimple_call_lhs (stmt))
1733 return false;
1735 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1737 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1739 if (p1 == NULL)
1741 replace_call_with_value (gsi, integer_zero_node);
1742 return true;
1745 tree len = build_int_cst (size_type_node, p1 - p);
1746 gimple_seq stmts = NULL;
1747 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1748 POINTER_PLUS_EXPR, str, len);
1749 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1750 gsi_replace_with_seq_vops (gsi, stmts);
1751 return true;
1754 if (!integer_zerop (c))
1755 return false;
1757 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1758 if (is_strrchr && optimize_function_for_size_p (cfun))
1760 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1762 if (strchr_fn)
1764 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1765 replace_call_with_call_and_fold (gsi, repl);
1766 return true;
1769 return false;
1772 tree len;
1773 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1775 if (!strlen_fn)
1776 return false;
1778 /* Create newstr = strlen (str). */
1779 gimple_seq stmts = NULL;
1780 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1781 gimple_set_location (new_stmt, loc);
1782 len = create_tmp_reg_or_ssa_name (size_type_node);
1783 gimple_call_set_lhs (new_stmt, len);
1784 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1786 /* Create (str p+ strlen (str)). */
1787 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1788 POINTER_PLUS_EXPR, str, len);
1789 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1790 gsi_replace_with_seq_vops (gsi, stmts);
1791 /* gsi now points at the assignment to the lhs, get a
1792 stmt iterator to the strlen.
1793 ??? We can't use gsi_for_stmt as that doesn't work when the
1794 CFG isn't built yet. */
1795 gimple_stmt_iterator gsi2 = *gsi;
1796 gsi_prev (&gsi2);
1797 fold_stmt (&gsi2);
1798 return true;
1801 /* Fold function call to builtin strstr.
1802 If both arguments are constant, evaluate and fold the result,
1803 additionally fold strstr (x, "") into x and strstr (x, "c")
1804 into strchr (x, 'c'). */
1805 static bool
1806 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1808 gimple *stmt = gsi_stmt (*gsi);
1809 tree haystack = gimple_call_arg (stmt, 0);
1810 tree needle = gimple_call_arg (stmt, 1);
1811 const char *p, *q;
1813 if (!gimple_call_lhs (stmt))
1814 return false;
1816 q = c_getstr (needle);
1817 if (q == NULL)
1818 return false;
1820 if ((p = c_getstr (haystack)))
1822 const char *r = strstr (p, q);
1824 if (r == NULL)
1826 replace_call_with_value (gsi, integer_zero_node);
1827 return true;
1830 tree len = build_int_cst (size_type_node, r - p);
1831 gimple_seq stmts = NULL;
1832 gimple *new_stmt
1833 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1834 haystack, len);
1835 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1836 gsi_replace_with_seq_vops (gsi, stmts);
1837 return true;
1840 /* For strstr (x, "") return x. */
1841 if (q[0] == '\0')
1843 replace_call_with_value (gsi, haystack);
1844 return true;
1847 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1848 if (q[1] == '\0')
1850 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1851 if (strchr_fn)
1853 tree c = build_int_cst (integer_type_node, q[0]);
1854 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1855 replace_call_with_call_and_fold (gsi, repl);
1856 return true;
1860 return false;
1863 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1864 to the call.
1866 Return NULL_TREE if no simplification was possible, otherwise return the
1867 simplified form of the call as a tree.
1869 The simplified form may be a constant or other expression which
1870 computes the same value, but in a more efficient manner (including
1871 calls to other builtin functions).
1873 The call may contain arguments which need to be evaluated, but
1874 which are not useful to determine the result of the call. In
1875 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1876 COMPOUND_EXPR will be an argument which must be evaluated.
1877 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1878 COMPOUND_EXPR in the chain will contain the tree for the simplified
1879 form of the builtin function call. */
1881 static bool
1882 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1884 gimple *stmt = gsi_stmt (*gsi);
1885 location_t loc = gimple_location (stmt);
1887 const char *p = c_getstr (src);
1889 /* If the string length is zero, return the dst parameter. */
1890 if (p && *p == '\0')
1892 replace_call_with_value (gsi, dst);
1893 return true;
1896 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1897 return false;
1899 /* See if we can store by pieces into (dst + strlen(dst)). */
1900 tree newdst;
1901 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1902 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1904 if (!strlen_fn || !memcpy_fn)
1905 return false;
1907 /* If the length of the source string isn't computable don't
1908 split strcat into strlen and memcpy. */
1909 tree len = get_maxval_strlen (src, 0);
1910 if (! len)
1911 return false;
1913 /* Create strlen (dst). */
1914 gimple_seq stmts = NULL, stmts2;
1915 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1916 gimple_set_location (repl, loc);
1917 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1918 gimple_call_set_lhs (repl, newdst);
1919 gimple_seq_add_stmt_without_update (&stmts, repl);
1921 /* Create (dst p+ strlen (dst)). */
1922 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1923 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1924 gimple_seq_add_seq_without_update (&stmts, stmts2);
1926 len = fold_convert_loc (loc, size_type_node, len);
1927 len = size_binop_loc (loc, PLUS_EXPR, len,
1928 build_int_cst (size_type_node, 1));
1929 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1930 gimple_seq_add_seq_without_update (&stmts, stmts2);
1932 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1933 gimple_seq_add_stmt_without_update (&stmts, repl);
1934 if (gimple_call_lhs (stmt))
1936 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1937 gimple_seq_add_stmt_without_update (&stmts, repl);
1938 gsi_replace_with_seq_vops (gsi, stmts);
1939 /* gsi now points at the assignment to the lhs, get a
1940 stmt iterator to the memcpy call.
1941 ??? We can't use gsi_for_stmt as that doesn't work when the
1942 CFG isn't built yet. */
1943 gimple_stmt_iterator gsi2 = *gsi;
1944 gsi_prev (&gsi2);
1945 fold_stmt (&gsi2);
1947 else
1949 gsi_replace_with_seq_vops (gsi, stmts);
1950 fold_stmt (gsi);
1952 return true;
1955 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1956 are the arguments to the call. */
1958 static bool
1959 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1961 gimple *stmt = gsi_stmt (*gsi);
1962 tree dest = gimple_call_arg (stmt, 0);
1963 tree src = gimple_call_arg (stmt, 1);
1964 tree size = gimple_call_arg (stmt, 2);
1965 tree fn;
1966 const char *p;
1969 p = c_getstr (src);
1970 /* If the SRC parameter is "", return DEST. */
1971 if (p && *p == '\0')
1973 replace_call_with_value (gsi, dest);
1974 return true;
1977 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1978 return false;
1980 /* If __builtin_strcat_chk is used, assume strcat is available. */
1981 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1982 if (!fn)
1983 return false;
1985 gimple *repl = gimple_build_call (fn, 2, dest, src);
1986 replace_call_with_call_and_fold (gsi, repl);
1987 return true;
1990 /* Simplify a call to the strncat builtin. */
1992 static bool
1993 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1995 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1996 tree dst = gimple_call_arg (stmt, 0);
1997 tree src = gimple_call_arg (stmt, 1);
1998 tree len = gimple_call_arg (stmt, 2);
2000 const char *p = c_getstr (src);
2002 /* If the requested length is zero, or the src parameter string
2003 length is zero, return the dst parameter. */
2004 if (integer_zerop (len) || (p && *p == '\0'))
2006 replace_call_with_value (gsi, dst);
2007 return true;
2010 if (TREE_CODE (len) != INTEGER_CST || !p)
2011 return false;
2013 unsigned srclen = strlen (p);
2015 int cmpsrc = compare_tree_int (len, srclen);
2017 /* Return early if the requested len is less than the string length.
2018 Warnings will be issued elsewhere later. */
2019 if (cmpsrc < 0)
2020 return false;
2022 unsigned HOST_WIDE_INT dstsize;
2024 bool nowarn = gimple_no_warning_p (stmt);
2026 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2028 int cmpdst = compare_tree_int (len, dstsize);
2030 if (cmpdst >= 0)
2032 tree fndecl = gimple_call_fndecl (stmt);
2034 /* Strncat copies (at most) LEN bytes and always appends
2035 the terminating NUL so the specified bound should never
2036 be equal to (or greater than) the size of the destination.
2037 If it is, the copy could overflow. */
2038 location_t loc = gimple_location (stmt);
2039 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2040 cmpdst == 0
2041 ? G_("%G%qD specified bound %E equals "
2042 "destination size")
2043 : G_("%G%qD specified bound %E exceeds "
2044 "destination size %wu"),
2045 stmt, fndecl, len, dstsize);
2046 if (nowarn)
2047 gimple_set_no_warning (stmt, true);
2051 if (!nowarn && cmpsrc == 0)
2053 tree fndecl = gimple_call_fndecl (stmt);
2055 /* To avoid certain truncation the specified bound should also
2056 not be equal to (or less than) the length of the source. */
2057 location_t loc = gimple_location (stmt);
2058 if (warning_at (loc, OPT_Wstringop_overflow_,
2059 "%G%qD specified bound %E equals source length",
2060 stmt, fndecl, len))
2061 gimple_set_no_warning (stmt, true);
2064 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2066 /* If the replacement _DECL isn't initialized, don't do the
2067 transformation. */
2068 if (!fn)
2069 return false;
2071 /* Otherwise, emit a call to strcat. */
2072 gcall *repl = gimple_build_call (fn, 2, dst, src);
2073 replace_call_with_call_and_fold (gsi, repl);
2074 return true;
2077 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2078 LEN, and SIZE. */
2080 static bool
2081 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2083 gimple *stmt = gsi_stmt (*gsi);
2084 tree dest = gimple_call_arg (stmt, 0);
2085 tree src = gimple_call_arg (stmt, 1);
2086 tree len = gimple_call_arg (stmt, 2);
2087 tree size = gimple_call_arg (stmt, 3);
2088 tree fn;
2089 const char *p;
2091 p = c_getstr (src);
2092 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2093 if ((p && *p == '\0')
2094 || integer_zerop (len))
2096 replace_call_with_value (gsi, dest);
2097 return true;
2100 if (! tree_fits_uhwi_p (size))
2101 return false;
2103 if (! integer_all_onesp (size))
2105 tree src_len = c_strlen (src, 1);
2106 if (src_len
2107 && tree_fits_uhwi_p (src_len)
2108 && tree_fits_uhwi_p (len)
2109 && ! tree_int_cst_lt (len, src_len))
2111 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2112 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2113 if (!fn)
2114 return false;
2116 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2117 replace_call_with_call_and_fold (gsi, repl);
2118 return true;
2120 return false;
2123 /* If __builtin_strncat_chk is used, assume strncat is available. */
2124 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2125 if (!fn)
2126 return false;
2128 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2129 replace_call_with_call_and_fold (gsi, repl);
2130 return true;
2133 /* Build and append gimple statements to STMTS that would load a first
2134 character of a memory location identified by STR. LOC is location
2135 of the statement. */
2137 static tree
2138 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2140 tree var;
2142 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2143 tree cst_uchar_ptr_node
2144 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2145 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2147 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2148 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2149 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2151 gimple_assign_set_lhs (stmt, var);
2152 gimple_seq_add_stmt_without_update (stmts, stmt);
2154 return var;
2157 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2158 FCODE is the name of the builtin. */
2160 static bool
2161 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2163 gimple *stmt = gsi_stmt (*gsi);
2164 tree callee = gimple_call_fndecl (stmt);
2165 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2167 tree type = integer_type_node;
2168 tree str1 = gimple_call_arg (stmt, 0);
2169 tree str2 = gimple_call_arg (stmt, 1);
2170 tree lhs = gimple_call_lhs (stmt);
2171 HOST_WIDE_INT length = -1;
2173 /* Handle strncmp and strncasecmp functions. */
2174 if (gimple_call_num_args (stmt) == 3)
2176 tree len = gimple_call_arg (stmt, 2);
2177 if (tree_fits_uhwi_p (len))
2178 length = tree_to_uhwi (len);
2181 /* If the LEN parameter is zero, return zero. */
2182 if (length == 0)
2184 replace_call_with_value (gsi, integer_zero_node);
2185 return true;
2188 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2189 if (operand_equal_p (str1, str2, 0))
2191 replace_call_with_value (gsi, integer_zero_node);
2192 return true;
2195 const char *p1 = c_getstr (str1);
2196 const char *p2 = c_getstr (str2);
2198 /* For known strings, return an immediate value. */
2199 if (p1 && p2)
2201 int r = 0;
2202 bool known_result = false;
2204 switch (fcode)
2206 case BUILT_IN_STRCMP:
2207 case BUILT_IN_STRCMP_EQ:
2209 r = strcmp (p1, p2);
2210 known_result = true;
2211 break;
2213 case BUILT_IN_STRNCMP:
2214 case BUILT_IN_STRNCMP_EQ:
2216 if (length == -1)
2217 break;
2218 r = strncmp (p1, p2, length);
2219 known_result = true;
2220 break;
2222 /* Only handleable situation is where the string are equal (result 0),
2223 which is already handled by operand_equal_p case. */
2224 case BUILT_IN_STRCASECMP:
2225 break;
2226 case BUILT_IN_STRNCASECMP:
2228 if (length == -1)
2229 break;
2230 r = strncmp (p1, p2, length);
2231 if (r == 0)
2232 known_result = true;
2233 break;
2235 default:
2236 gcc_unreachable ();
2239 if (known_result)
2241 replace_call_with_value (gsi, build_cmp_result (type, r));
2242 return true;
2246 bool nonzero_length = length >= 1
2247 || fcode == BUILT_IN_STRCMP
2248 || fcode == BUILT_IN_STRCMP_EQ
2249 || fcode == BUILT_IN_STRCASECMP;
2251 location_t loc = gimple_location (stmt);
2253 /* If the second arg is "", return *(const unsigned char*)arg1. */
2254 if (p2 && *p2 == '\0' && nonzero_length)
2256 gimple_seq stmts = NULL;
2257 tree var = gimple_load_first_char (loc, str1, &stmts);
2258 if (lhs)
2260 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2261 gimple_seq_add_stmt_without_update (&stmts, stmt);
2264 gsi_replace_with_seq_vops (gsi, stmts);
2265 return true;
2268 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2269 if (p1 && *p1 == '\0' && nonzero_length)
2271 gimple_seq stmts = NULL;
2272 tree var = gimple_load_first_char (loc, str2, &stmts);
2274 if (lhs)
2276 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2277 stmt = gimple_build_assign (c, NOP_EXPR, var);
2278 gimple_seq_add_stmt_without_update (&stmts, stmt);
2280 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2281 gimple_seq_add_stmt_without_update (&stmts, stmt);
2284 gsi_replace_with_seq_vops (gsi, stmts);
2285 return true;
2288 /* If len parameter is one, return an expression corresponding to
2289 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2290 if (fcode == BUILT_IN_STRNCMP && length == 1)
2292 gimple_seq stmts = NULL;
2293 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2294 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2296 if (lhs)
2298 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2299 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2300 gimple_seq_add_stmt_without_update (&stmts, convert1);
2302 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2303 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2304 gimple_seq_add_stmt_without_update (&stmts, convert2);
2306 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2307 gimple_seq_add_stmt_without_update (&stmts, stmt);
2310 gsi_replace_with_seq_vops (gsi, stmts);
2311 return true;
2314 /* If length is larger than the length of one constant string,
2315 replace strncmp with corresponding strcmp */
2316 if (fcode == BUILT_IN_STRNCMP
2317 && length > 0
2318 && ((p2 && (size_t) length > strlen (p2))
2319 || (p1 && (size_t) length > strlen (p1))))
2321 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2322 if (!fn)
2323 return false;
2324 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2325 replace_call_with_call_and_fold (gsi, repl);
2326 return true;
2329 return false;
2332 /* Fold a call to the memchr pointed by GSI iterator. */
2334 static bool
2335 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2337 gimple *stmt = gsi_stmt (*gsi);
2338 tree lhs = gimple_call_lhs (stmt);
2339 tree arg1 = gimple_call_arg (stmt, 0);
2340 tree arg2 = gimple_call_arg (stmt, 1);
2341 tree len = gimple_call_arg (stmt, 2);
2343 /* If the LEN parameter is zero, return zero. */
2344 if (integer_zerop (len))
2346 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2347 return true;
2350 char c;
2351 if (TREE_CODE (arg2) != INTEGER_CST
2352 || !tree_fits_uhwi_p (len)
2353 || !target_char_cst_p (arg2, &c))
2354 return false;
2356 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2357 unsigned HOST_WIDE_INT string_length;
2358 const char *p1 = c_getstr (arg1, &string_length);
2360 if (p1)
2362 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2363 if (r == NULL)
2365 if (length <= string_length)
2367 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2368 return true;
2371 else
2373 unsigned HOST_WIDE_INT offset = r - p1;
2374 gimple_seq stmts = NULL;
2375 if (lhs != NULL_TREE)
2377 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2378 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2379 arg1, offset_cst);
2380 gimple_seq_add_stmt_without_update (&stmts, stmt);
2382 else
2383 gimple_seq_add_stmt_without_update (&stmts,
2384 gimple_build_nop ());
2386 gsi_replace_with_seq_vops (gsi, stmts);
2387 return true;
2391 return false;
2394 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2395 to the call. IGNORE is true if the value returned
2396 by the builtin will be ignored. UNLOCKED is true is true if this
2397 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2398 the known length of the string. Return NULL_TREE if no simplification
2399 was possible. */
2401 static bool
2402 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2403 tree arg0, tree arg1,
2404 bool unlocked)
2406 gimple *stmt = gsi_stmt (*gsi);
2408 /* If we're using an unlocked function, assume the other unlocked
2409 functions exist explicitly. */
2410 tree const fn_fputc = (unlocked
2411 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2412 : builtin_decl_implicit (BUILT_IN_FPUTC));
2413 tree const fn_fwrite = (unlocked
2414 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2415 : builtin_decl_implicit (BUILT_IN_FWRITE));
2417 /* If the return value is used, don't do the transformation. */
2418 if (gimple_call_lhs (stmt))
2419 return false;
2421 /* Get the length of the string passed to fputs. If the length
2422 can't be determined, punt. */
2423 tree len = get_maxval_strlen (arg0, 0);
2424 if (!len
2425 || TREE_CODE (len) != INTEGER_CST)
2426 return false;
2428 switch (compare_tree_int (len, 1))
2430 case -1: /* length is 0, delete the call entirely . */
2431 replace_call_with_value (gsi, integer_zero_node);
2432 return true;
2434 case 0: /* length is 1, call fputc. */
2436 const char *p = c_getstr (arg0);
2437 if (p != NULL)
2439 if (!fn_fputc)
2440 return false;
2442 gimple *repl = gimple_build_call (fn_fputc, 2,
2443 build_int_cst
2444 (integer_type_node, p[0]), arg1);
2445 replace_call_with_call_and_fold (gsi, repl);
2446 return true;
2449 /* FALLTHROUGH */
2450 case 1: /* length is greater than 1, call fwrite. */
2452 /* If optimizing for size keep fputs. */
2453 if (optimize_function_for_size_p (cfun))
2454 return false;
2455 /* New argument list transforming fputs(string, stream) to
2456 fwrite(string, 1, len, stream). */
2457 if (!fn_fwrite)
2458 return false;
2460 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2461 size_one_node, len, arg1);
2462 replace_call_with_call_and_fold (gsi, repl);
2463 return true;
2465 default:
2466 gcc_unreachable ();
2468 return false;
2471 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2472 DEST, SRC, LEN, and SIZE are the arguments to the call.
2473 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2474 code of the builtin. If MAXLEN is not NULL, it is maximum length
2475 passed as third argument. */
2477 static bool
2478 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2479 tree dest, tree src, tree len, tree size,
2480 enum built_in_function fcode)
2482 gimple *stmt = gsi_stmt (*gsi);
2483 location_t loc = gimple_location (stmt);
2484 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2485 tree fn;
2487 /* If SRC and DEST are the same (and not volatile), return DEST
2488 (resp. DEST+LEN for __mempcpy_chk). */
2489 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2491 if (fcode != BUILT_IN_MEMPCPY_CHK)
2493 replace_call_with_value (gsi, dest);
2494 return true;
2496 else
2498 gimple_seq stmts = NULL;
2499 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2500 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2501 TREE_TYPE (dest), dest, len);
2502 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2503 replace_call_with_value (gsi, temp);
2504 return true;
2508 if (! tree_fits_uhwi_p (size))
2509 return false;
2511 tree maxlen = get_maxval_strlen (len, 2);
2512 if (! integer_all_onesp (size))
2514 if (! tree_fits_uhwi_p (len))
2516 /* If LEN is not constant, try MAXLEN too.
2517 For MAXLEN only allow optimizing into non-_ocs function
2518 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2519 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2521 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2523 /* (void) __mempcpy_chk () can be optimized into
2524 (void) __memcpy_chk (). */
2525 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2526 if (!fn)
2527 return false;
2529 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2530 replace_call_with_call_and_fold (gsi, repl);
2531 return true;
2533 return false;
2536 else
2537 maxlen = len;
2539 if (tree_int_cst_lt (size, maxlen))
2540 return false;
2543 fn = NULL_TREE;
2544 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2545 mem{cpy,pcpy,move,set} is available. */
2546 switch (fcode)
2548 case BUILT_IN_MEMCPY_CHK:
2549 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2550 break;
2551 case BUILT_IN_MEMPCPY_CHK:
2552 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2553 break;
2554 case BUILT_IN_MEMMOVE_CHK:
2555 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2556 break;
2557 case BUILT_IN_MEMSET_CHK:
2558 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2559 break;
2560 default:
2561 break;
2564 if (!fn)
2565 return false;
2567 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2568 replace_call_with_call_and_fold (gsi, repl);
2569 return true;
2572 /* Fold a call to the __st[rp]cpy_chk builtin.
2573 DEST, SRC, and SIZE are the arguments to the call.
2574 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2575 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2576 strings passed as second argument. */
2578 static bool
2579 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2580 tree dest,
2581 tree src, tree size,
2582 enum built_in_function fcode)
2584 gimple *stmt = gsi_stmt (*gsi);
2585 location_t loc = gimple_location (stmt);
2586 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2587 tree len, fn;
2589 /* If SRC and DEST are the same (and not volatile), return DEST. */
2590 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2592 /* Issue -Wrestrict unless the pointers are null (those do
2593 not point to objects and so do not indicate an overlap;
2594 such calls could be the result of sanitization and jump
2595 threading). */
2596 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2598 tree func = gimple_call_fndecl (stmt);
2600 warning_at (loc, OPT_Wrestrict,
2601 "%qD source argument is the same as destination",
2602 func);
2605 replace_call_with_value (gsi, dest);
2606 return true;
2609 if (! tree_fits_uhwi_p (size))
2610 return false;
2612 tree maxlen = get_maxval_strlen (src, 1);
2613 if (! integer_all_onesp (size))
2615 len = c_strlen (src, 1);
2616 if (! len || ! tree_fits_uhwi_p (len))
2618 /* If LEN is not constant, try MAXLEN too.
2619 For MAXLEN only allow optimizing into non-_ocs function
2620 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2621 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2623 if (fcode == BUILT_IN_STPCPY_CHK)
2625 if (! ignore)
2626 return false;
2628 /* If return value of __stpcpy_chk is ignored,
2629 optimize into __strcpy_chk. */
2630 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2631 if (!fn)
2632 return false;
2634 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2635 replace_call_with_call_and_fold (gsi, repl);
2636 return true;
2639 if (! len || TREE_SIDE_EFFECTS (len))
2640 return false;
2642 /* If c_strlen returned something, but not a constant,
2643 transform __strcpy_chk into __memcpy_chk. */
2644 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2645 if (!fn)
2646 return false;
2648 gimple_seq stmts = NULL;
2649 len = gimple_convert (&stmts, loc, size_type_node, len);
2650 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2651 build_int_cst (size_type_node, 1));
2652 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2653 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2654 replace_call_with_call_and_fold (gsi, repl);
2655 return true;
2658 else
2659 maxlen = len;
2661 if (! tree_int_cst_lt (maxlen, size))
2662 return false;
2665 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2666 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2667 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2668 if (!fn)
2669 return false;
2671 gimple *repl = gimple_build_call (fn, 2, dest, src);
2672 replace_call_with_call_and_fold (gsi, repl);
2673 return true;
2676 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2677 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2678 length passed as third argument. IGNORE is true if return value can be
2679 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2681 static bool
2682 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2683 tree dest, tree src,
2684 tree len, tree size,
2685 enum built_in_function fcode)
2687 gimple *stmt = gsi_stmt (*gsi);
2688 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2689 tree fn;
2691 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2693 /* If return value of __stpncpy_chk is ignored,
2694 optimize into __strncpy_chk. */
2695 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2696 if (fn)
2698 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2699 replace_call_with_call_and_fold (gsi, repl);
2700 return true;
2704 if (! tree_fits_uhwi_p (size))
2705 return false;
2707 tree maxlen = get_maxval_strlen (len, 2);
2708 if (! integer_all_onesp (size))
2710 if (! tree_fits_uhwi_p (len))
2712 /* If LEN is not constant, try MAXLEN too.
2713 For MAXLEN only allow optimizing into non-_ocs function
2714 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2715 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2716 return false;
2718 else
2719 maxlen = len;
2721 if (tree_int_cst_lt (size, maxlen))
2722 return false;
2725 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2726 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2727 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2728 if (!fn)
2729 return false;
2731 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2732 replace_call_with_call_and_fold (gsi, repl);
2733 return true;
2736 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2737 Return NULL_TREE if no simplification can be made. */
2739 static bool
2740 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2742 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2743 location_t loc = gimple_location (stmt);
2744 tree dest = gimple_call_arg (stmt, 0);
2745 tree src = gimple_call_arg (stmt, 1);
2746 tree fn, len, lenp1;
2748 /* If the result is unused, replace stpcpy with strcpy. */
2749 if (gimple_call_lhs (stmt) == NULL_TREE)
2751 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2752 if (!fn)
2753 return false;
2754 gimple_call_set_fndecl (stmt, fn);
2755 fold_stmt (gsi);
2756 return true;
2759 len = c_strlen (src, 1);
2760 if (!len
2761 || TREE_CODE (len) != INTEGER_CST)
2762 return false;
2764 if (optimize_function_for_size_p (cfun)
2765 /* If length is zero it's small enough. */
2766 && !integer_zerop (len))
2767 return false;
2769 /* If the source has a known length replace stpcpy with memcpy. */
2770 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2771 if (!fn)
2772 return false;
2774 gimple_seq stmts = NULL;
2775 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2776 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2777 tem, build_int_cst (size_type_node, 1));
2778 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2779 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2780 gimple_set_vuse (repl, gimple_vuse (stmt));
2781 gimple_set_vdef (repl, gimple_vdef (stmt));
2782 if (gimple_vdef (repl)
2783 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2784 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2785 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2786 /* Replace the result with dest + len. */
2787 stmts = NULL;
2788 tem = gimple_convert (&stmts, loc, sizetype, len);
2789 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2790 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2791 POINTER_PLUS_EXPR, dest, tem);
2792 gsi_replace (gsi, ret, false);
2793 /* Finally fold the memcpy call. */
2794 gimple_stmt_iterator gsi2 = *gsi;
2795 gsi_prev (&gsi2);
2796 fold_stmt (&gsi2);
2797 return true;
2800 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2801 NULL_TREE if a normal call should be emitted rather than expanding
2802 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2803 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2804 passed as second argument. */
2806 static bool
2807 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2808 enum built_in_function fcode)
2810 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2811 tree dest, size, len, fn, fmt, flag;
2812 const char *fmt_str;
2814 /* Verify the required arguments in the original call. */
2815 if (gimple_call_num_args (stmt) < 5)
2816 return false;
2818 dest = gimple_call_arg (stmt, 0);
2819 len = gimple_call_arg (stmt, 1);
2820 flag = gimple_call_arg (stmt, 2);
2821 size = gimple_call_arg (stmt, 3);
2822 fmt = gimple_call_arg (stmt, 4);
2824 if (! tree_fits_uhwi_p (size))
2825 return false;
2827 if (! integer_all_onesp (size))
2829 tree maxlen = get_maxval_strlen (len, 2);
2830 if (! tree_fits_uhwi_p (len))
2832 /* If LEN is not constant, try MAXLEN too.
2833 For MAXLEN only allow optimizing into non-_ocs function
2834 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2835 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2836 return false;
2838 else
2839 maxlen = len;
2841 if (tree_int_cst_lt (size, maxlen))
2842 return false;
2845 if (!init_target_chars ())
2846 return false;
2848 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2849 or if format doesn't contain % chars or is "%s". */
2850 if (! integer_zerop (flag))
2852 fmt_str = c_getstr (fmt);
2853 if (fmt_str == NULL)
2854 return false;
2855 if (strchr (fmt_str, target_percent) != NULL
2856 && strcmp (fmt_str, target_percent_s))
2857 return false;
2860 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2861 available. */
2862 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2863 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2864 if (!fn)
2865 return false;
2867 /* Replace the called function and the first 5 argument by 3 retaining
2868 trailing varargs. */
2869 gimple_call_set_fndecl (stmt, fn);
2870 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2871 gimple_call_set_arg (stmt, 0, dest);
2872 gimple_call_set_arg (stmt, 1, len);
2873 gimple_call_set_arg (stmt, 2, fmt);
2874 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2875 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2876 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2877 fold_stmt (gsi);
2878 return true;
2881 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2882 Return NULL_TREE if a normal call should be emitted rather than
2883 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2884 or BUILT_IN_VSPRINTF_CHK. */
2886 static bool
2887 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2888 enum built_in_function fcode)
2890 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2891 tree dest, size, len, fn, fmt, flag;
2892 const char *fmt_str;
2893 unsigned nargs = gimple_call_num_args (stmt);
2895 /* Verify the required arguments in the original call. */
2896 if (nargs < 4)
2897 return false;
2898 dest = gimple_call_arg (stmt, 0);
2899 flag = gimple_call_arg (stmt, 1);
2900 size = gimple_call_arg (stmt, 2);
2901 fmt = gimple_call_arg (stmt, 3);
2903 if (! tree_fits_uhwi_p (size))
2904 return false;
2906 len = NULL_TREE;
2908 if (!init_target_chars ())
2909 return false;
2911 /* Check whether the format is a literal string constant. */
2912 fmt_str = c_getstr (fmt);
2913 if (fmt_str != NULL)
2915 /* If the format doesn't contain % args or %%, we know the size. */
2916 if (strchr (fmt_str, target_percent) == 0)
2918 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2919 len = build_int_cstu (size_type_node, strlen (fmt_str));
2921 /* If the format is "%s" and first ... argument is a string literal,
2922 we know the size too. */
2923 else if (fcode == BUILT_IN_SPRINTF_CHK
2924 && strcmp (fmt_str, target_percent_s) == 0)
2926 tree arg;
2928 if (nargs == 5)
2930 arg = gimple_call_arg (stmt, 4);
2931 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2933 len = c_strlen (arg, 1);
2934 if (! len || ! tree_fits_uhwi_p (len))
2935 len = NULL_TREE;
2941 if (! integer_all_onesp (size))
2943 if (! len || ! tree_int_cst_lt (len, size))
2944 return false;
2947 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2948 or if format doesn't contain % chars or is "%s". */
2949 if (! integer_zerop (flag))
2951 if (fmt_str == NULL)
2952 return false;
2953 if (strchr (fmt_str, target_percent) != NULL
2954 && strcmp (fmt_str, target_percent_s))
2955 return false;
2958 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2959 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2960 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2961 if (!fn)
2962 return false;
2964 /* Replace the called function and the first 4 argument by 2 retaining
2965 trailing varargs. */
2966 gimple_call_set_fndecl (stmt, fn);
2967 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2968 gimple_call_set_arg (stmt, 0, dest);
2969 gimple_call_set_arg (stmt, 1, fmt);
2970 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2971 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2972 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2973 fold_stmt (gsi);
2974 return true;
2977 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2978 ORIG may be null if this is a 2-argument call. We don't attempt to
2979 simplify calls with more than 3 arguments.
2981 Return true if simplification was possible, otherwise false. */
2983 bool
2984 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2986 gimple *stmt = gsi_stmt (*gsi);
2987 tree dest = gimple_call_arg (stmt, 0);
2988 tree fmt = gimple_call_arg (stmt, 1);
2989 tree orig = NULL_TREE;
2990 const char *fmt_str = NULL;
2992 /* Verify the required arguments in the original call. We deal with two
2993 types of sprintf() calls: 'sprintf (str, fmt)' and
2994 'sprintf (dest, "%s", orig)'. */
2995 if (gimple_call_num_args (stmt) > 3)
2996 return false;
2998 if (gimple_call_num_args (stmt) == 3)
2999 orig = gimple_call_arg (stmt, 2);
3001 /* Check whether the format is a literal string constant. */
3002 fmt_str = c_getstr (fmt);
3003 if (fmt_str == NULL)
3004 return false;
3006 if (!init_target_chars ())
3007 return false;
3009 /* If the format doesn't contain % args or %%, use strcpy. */
3010 if (strchr (fmt_str, target_percent) == NULL)
3012 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3014 if (!fn)
3015 return false;
3017 /* Don't optimize sprintf (buf, "abc", ptr++). */
3018 if (orig)
3019 return false;
3021 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3022 'format' is known to contain no % formats. */
3023 gimple_seq stmts = NULL;
3024 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3025 gimple_seq_add_stmt_without_update (&stmts, repl);
3026 if (gimple_call_lhs (stmt))
3028 repl = gimple_build_assign (gimple_call_lhs (stmt),
3029 build_int_cst (integer_type_node,
3030 strlen (fmt_str)));
3031 gimple_seq_add_stmt_without_update (&stmts, repl);
3032 gsi_replace_with_seq_vops (gsi, stmts);
3033 /* gsi now points at the assignment to the lhs, get a
3034 stmt iterator to the memcpy call.
3035 ??? We can't use gsi_for_stmt as that doesn't work when the
3036 CFG isn't built yet. */
3037 gimple_stmt_iterator gsi2 = *gsi;
3038 gsi_prev (&gsi2);
3039 fold_stmt (&gsi2);
3041 else
3043 gsi_replace_with_seq_vops (gsi, stmts);
3044 fold_stmt (gsi);
3046 return true;
3049 /* If the format is "%s", use strcpy if the result isn't used. */
3050 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3052 tree fn;
3053 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3055 if (!fn)
3056 return false;
3058 /* Don't crash on sprintf (str1, "%s"). */
3059 if (!orig)
3060 return false;
3062 tree orig_len = NULL_TREE;
3063 if (gimple_call_lhs (stmt))
3065 orig_len = get_maxval_strlen (orig, 0);
3066 if (!orig_len)
3067 return false;
3070 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3071 gimple_seq stmts = NULL;
3072 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3073 gimple_seq_add_stmt_without_update (&stmts, repl);
3074 if (gimple_call_lhs (stmt))
3076 if (!useless_type_conversion_p (integer_type_node,
3077 TREE_TYPE (orig_len)))
3078 orig_len = fold_convert (integer_type_node, orig_len);
3079 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3080 gimple_seq_add_stmt_without_update (&stmts, repl);
3081 gsi_replace_with_seq_vops (gsi, stmts);
3082 /* gsi now points at the assignment to the lhs, get a
3083 stmt iterator to the memcpy call.
3084 ??? We can't use gsi_for_stmt as that doesn't work when the
3085 CFG isn't built yet. */
3086 gimple_stmt_iterator gsi2 = *gsi;
3087 gsi_prev (&gsi2);
3088 fold_stmt (&gsi2);
3090 else
3092 gsi_replace_with_seq_vops (gsi, stmts);
3093 fold_stmt (gsi);
3095 return true;
3097 return false;
3100 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3101 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3102 attempt to simplify calls with more than 4 arguments.
3104 Return true if simplification was possible, otherwise false. */
3106 bool
3107 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3109 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3110 tree dest = gimple_call_arg (stmt, 0);
3111 tree destsize = gimple_call_arg (stmt, 1);
3112 tree fmt = gimple_call_arg (stmt, 2);
3113 tree orig = NULL_TREE;
3114 const char *fmt_str = NULL;
3116 if (gimple_call_num_args (stmt) > 4)
3117 return false;
3119 if (gimple_call_num_args (stmt) == 4)
3120 orig = gimple_call_arg (stmt, 3);
3122 if (!tree_fits_uhwi_p (destsize))
3123 return false;
3124 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3126 /* Check whether the format is a literal string constant. */
3127 fmt_str = c_getstr (fmt);
3128 if (fmt_str == NULL)
3129 return false;
3131 if (!init_target_chars ())
3132 return false;
3134 /* If the format doesn't contain % args or %%, use strcpy. */
3135 if (strchr (fmt_str, target_percent) == NULL)
3137 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3138 if (!fn)
3139 return false;
3141 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3142 if (orig)
3143 return false;
3145 /* We could expand this as
3146 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3147 or to
3148 memcpy (str, fmt_with_nul_at_cstm1, cst);
3149 but in the former case that might increase code size
3150 and in the latter case grow .rodata section too much.
3151 So punt for now. */
3152 size_t len = strlen (fmt_str);
3153 if (len >= destlen)
3154 return false;
3156 gimple_seq stmts = NULL;
3157 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3158 gimple_seq_add_stmt_without_update (&stmts, repl);
3159 if (gimple_call_lhs (stmt))
3161 repl = gimple_build_assign (gimple_call_lhs (stmt),
3162 build_int_cst (integer_type_node, len));
3163 gimple_seq_add_stmt_without_update (&stmts, repl);
3164 gsi_replace_with_seq_vops (gsi, stmts);
3165 /* gsi now points at the assignment to the lhs, get a
3166 stmt iterator to the memcpy call.
3167 ??? We can't use gsi_for_stmt as that doesn't work when the
3168 CFG isn't built yet. */
3169 gimple_stmt_iterator gsi2 = *gsi;
3170 gsi_prev (&gsi2);
3171 fold_stmt (&gsi2);
3173 else
3175 gsi_replace_with_seq_vops (gsi, stmts);
3176 fold_stmt (gsi);
3178 return true;
3181 /* If the format is "%s", use strcpy if the result isn't used. */
3182 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3184 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3185 if (!fn)
3186 return false;
3188 /* Don't crash on snprintf (str1, cst, "%s"). */
3189 if (!orig)
3190 return false;
3192 tree orig_len = get_maxval_strlen (orig, 0);
3193 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3194 return false;
3196 /* We could expand this as
3197 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3198 or to
3199 memcpy (str1, str2_with_nul_at_cstm1, cst);
3200 but in the former case that might increase code size
3201 and in the latter case grow .rodata section too much.
3202 So punt for now. */
3203 if (compare_tree_int (orig_len, destlen) >= 0)
3204 return false;
3206 /* Convert snprintf (str1, cst, "%s", str2) into
3207 strcpy (str1, str2) if strlen (str2) < cst. */
3208 gimple_seq stmts = NULL;
3209 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3210 gimple_seq_add_stmt_without_update (&stmts, repl);
3211 if (gimple_call_lhs (stmt))
3213 if (!useless_type_conversion_p (integer_type_node,
3214 TREE_TYPE (orig_len)))
3215 orig_len = fold_convert (integer_type_node, orig_len);
3216 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3217 gimple_seq_add_stmt_without_update (&stmts, repl);
3218 gsi_replace_with_seq_vops (gsi, stmts);
3219 /* gsi now points at the assignment to the lhs, get a
3220 stmt iterator to the memcpy call.
3221 ??? We can't use gsi_for_stmt as that doesn't work when the
3222 CFG isn't built yet. */
3223 gimple_stmt_iterator gsi2 = *gsi;
3224 gsi_prev (&gsi2);
3225 fold_stmt (&gsi2);
3227 else
3229 gsi_replace_with_seq_vops (gsi, stmts);
3230 fold_stmt (gsi);
3232 return true;
3234 return false;
3237 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3238 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3239 more than 3 arguments, and ARG may be null in the 2-argument case.
3241 Return NULL_TREE if no simplification was possible, otherwise return the
3242 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3243 code of the function to be simplified. */
3245 static bool
3246 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3247 tree fp, tree fmt, tree arg,
3248 enum built_in_function fcode)
3250 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3251 tree fn_fputc, fn_fputs;
3252 const char *fmt_str = NULL;
3254 /* If the return value is used, don't do the transformation. */
3255 if (gimple_call_lhs (stmt) != NULL_TREE)
3256 return false;
3258 /* Check whether the format is a literal string constant. */
3259 fmt_str = c_getstr (fmt);
3260 if (fmt_str == NULL)
3261 return false;
3263 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3265 /* If we're using an unlocked function, assume the other
3266 unlocked functions exist explicitly. */
3267 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3268 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3270 else
3272 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3273 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3276 if (!init_target_chars ())
3277 return false;
3279 /* If the format doesn't contain % args or %%, use strcpy. */
3280 if (strchr (fmt_str, target_percent) == NULL)
3282 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3283 && arg)
3284 return false;
3286 /* If the format specifier was "", fprintf does nothing. */
3287 if (fmt_str[0] == '\0')
3289 replace_call_with_value (gsi, NULL_TREE);
3290 return true;
3293 /* When "string" doesn't contain %, replace all cases of
3294 fprintf (fp, string) with fputs (string, fp). The fputs
3295 builtin will take care of special cases like length == 1. */
3296 if (fn_fputs)
3298 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3299 replace_call_with_call_and_fold (gsi, repl);
3300 return true;
3304 /* The other optimizations can be done only on the non-va_list variants. */
3305 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3306 return false;
3308 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3309 else if (strcmp (fmt_str, target_percent_s) == 0)
3311 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3312 return false;
3313 if (fn_fputs)
3315 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3316 replace_call_with_call_and_fold (gsi, repl);
3317 return true;
3321 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3322 else if (strcmp (fmt_str, target_percent_c) == 0)
3324 if (!arg
3325 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3326 return false;
3327 if (fn_fputc)
3329 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3330 replace_call_with_call_and_fold (gsi, repl);
3331 return true;
3335 return false;
3338 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3339 FMT and ARG are the arguments to the call; we don't fold cases with
3340 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3342 Return NULL_TREE if no simplification was possible, otherwise return the
3343 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3344 code of the function to be simplified. */
3346 static bool
3347 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3348 tree arg, enum built_in_function fcode)
3350 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3351 tree fn_putchar, fn_puts, newarg;
3352 const char *fmt_str = NULL;
3354 /* If the return value is used, don't do the transformation. */
3355 if (gimple_call_lhs (stmt) != NULL_TREE)
3356 return false;
3358 /* Check whether the format is a literal string constant. */
3359 fmt_str = c_getstr (fmt);
3360 if (fmt_str == NULL)
3361 return false;
3363 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3365 /* If we're using an unlocked function, assume the other
3366 unlocked functions exist explicitly. */
3367 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3368 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3370 else
3372 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3373 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3376 if (!init_target_chars ())
3377 return false;
3379 if (strcmp (fmt_str, target_percent_s) == 0
3380 || strchr (fmt_str, target_percent) == NULL)
3382 const char *str;
3384 if (strcmp (fmt_str, target_percent_s) == 0)
3386 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3387 return false;
3389 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3390 return false;
3392 str = c_getstr (arg);
3393 if (str == NULL)
3394 return false;
3396 else
3398 /* The format specifier doesn't contain any '%' characters. */
3399 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3400 && arg)
3401 return false;
3402 str = fmt_str;
3405 /* If the string was "", printf does nothing. */
3406 if (str[0] == '\0')
3408 replace_call_with_value (gsi, NULL_TREE);
3409 return true;
3412 /* If the string has length of 1, call putchar. */
3413 if (str[1] == '\0')
3415 /* Given printf("c"), (where c is any one character,)
3416 convert "c"[0] to an int and pass that to the replacement
3417 function. */
3418 newarg = build_int_cst (integer_type_node, str[0]);
3419 if (fn_putchar)
3421 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3422 replace_call_with_call_and_fold (gsi, repl);
3423 return true;
3426 else
3428 /* If the string was "string\n", call puts("string"). */
3429 size_t len = strlen (str);
3430 if ((unsigned char)str[len - 1] == target_newline
3431 && (size_t) (int) len == len
3432 && (int) len > 0)
3434 char *newstr;
3435 tree offset_node, string_cst;
3437 /* Create a NUL-terminated string that's one char shorter
3438 than the original, stripping off the trailing '\n'. */
3439 newarg = build_string_literal (len, str);
3440 string_cst = string_constant (newarg, &offset_node);
3441 gcc_checking_assert (string_cst
3442 && (TREE_STRING_LENGTH (string_cst)
3443 == (int) len)
3444 && integer_zerop (offset_node)
3445 && (unsigned char)
3446 TREE_STRING_POINTER (string_cst)[len - 1]
3447 == target_newline);
3448 /* build_string_literal creates a new STRING_CST,
3449 modify it in place to avoid double copying. */
3450 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3451 newstr[len - 1] = '\0';
3452 if (fn_puts)
3454 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3455 replace_call_with_call_and_fold (gsi, repl);
3456 return true;
3459 else
3460 /* We'd like to arrange to call fputs(string,stdout) here,
3461 but we need stdout and don't have a way to get it yet. */
3462 return false;
3466 /* The other optimizations can be done only on the non-va_list variants. */
3467 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3468 return false;
3470 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3471 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3473 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3474 return false;
3475 if (fn_puts)
3477 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3478 replace_call_with_call_and_fold (gsi, repl);
3479 return true;
3483 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3484 else if (strcmp (fmt_str, target_percent_c) == 0)
3486 if (!arg || ! useless_type_conversion_p (integer_type_node,
3487 TREE_TYPE (arg)))
3488 return false;
3489 if (fn_putchar)
3491 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3492 replace_call_with_call_and_fold (gsi, repl);
3493 return true;
3497 return false;
3502 /* Fold a call to __builtin_strlen with known length LEN. */
3504 static bool
3505 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3507 gimple *stmt = gsi_stmt (*gsi);
3509 wide_int minlen;
3510 wide_int maxlen;
3512 tree lenrange[2];
3513 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3514 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3515 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3517 /* The range of lengths refers to either a single constant
3518 string or to the longest and shortest constant string
3519 referenced by the argument of the strlen() call, or to
3520 the strings that can possibly be stored in the arrays
3521 the argument refers to. */
3522 minlen = wi::to_wide (lenrange[0]);
3523 maxlen = wi::to_wide (lenrange[1]);
3525 else
3527 unsigned prec = TYPE_PRECISION (sizetype);
3529 minlen = wi::shwi (0, prec);
3530 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3533 if (minlen == maxlen)
3535 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3536 true, GSI_SAME_STMT);
3537 replace_call_with_value (gsi, lenrange[0]);
3538 return true;
3541 tree lhs = gimple_call_lhs (stmt);
3542 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3543 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3545 return false;
3548 /* Fold a call to __builtin_acc_on_device. */
3550 static bool
3551 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3553 /* Defer folding until we know which compiler we're in. */
3554 if (symtab->state != EXPANSION)
3555 return false;
3557 unsigned val_host = GOMP_DEVICE_HOST;
3558 unsigned val_dev = GOMP_DEVICE_NONE;
3560 #ifdef ACCEL_COMPILER
3561 val_host = GOMP_DEVICE_NOT_HOST;
3562 val_dev = ACCEL_COMPILER_acc_device;
3563 #endif
3565 location_t loc = gimple_location (gsi_stmt (*gsi));
3567 tree host_eq = make_ssa_name (boolean_type_node);
3568 gimple *host_ass = gimple_build_assign
3569 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3570 gimple_set_location (host_ass, loc);
3571 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3573 tree dev_eq = make_ssa_name (boolean_type_node);
3574 gimple *dev_ass = gimple_build_assign
3575 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3576 gimple_set_location (dev_ass, loc);
3577 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3579 tree result = make_ssa_name (boolean_type_node);
3580 gimple *result_ass = gimple_build_assign
3581 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3582 gimple_set_location (result_ass, loc);
3583 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3585 replace_call_with_value (gsi, result);
3587 return true;
3590 /* Fold realloc (0, n) -> malloc (n). */
3592 static bool
3593 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3595 gimple *stmt = gsi_stmt (*gsi);
3596 tree arg = gimple_call_arg (stmt, 0);
3597 tree size = gimple_call_arg (stmt, 1);
3599 if (operand_equal_p (arg, null_pointer_node, 0))
3601 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3602 if (fn_malloc)
3604 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3605 replace_call_with_call_and_fold (gsi, repl);
3606 return true;
3609 return false;
3612 /* Fold the non-target builtin at *GSI and return whether any simplification
3613 was made. */
3615 static bool
3616 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3618 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3619 tree callee = gimple_call_fndecl (stmt);
3621 /* Give up for always_inline inline builtins until they are
3622 inlined. */
3623 if (avoid_folding_inline_builtin (callee))
3624 return false;
3626 unsigned n = gimple_call_num_args (stmt);
3627 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3628 switch (fcode)
3630 case BUILT_IN_BCMP:
3631 return gimple_fold_builtin_bcmp (gsi);
3632 case BUILT_IN_BCOPY:
3633 return gimple_fold_builtin_bcopy (gsi);
3634 case BUILT_IN_BZERO:
3635 return gimple_fold_builtin_bzero (gsi);
3637 case BUILT_IN_MEMSET:
3638 return gimple_fold_builtin_memset (gsi,
3639 gimple_call_arg (stmt, 1),
3640 gimple_call_arg (stmt, 2));
3641 case BUILT_IN_MEMCPY:
3642 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3643 gimple_call_arg (stmt, 1), 0);
3644 case BUILT_IN_MEMPCPY:
3645 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3646 gimple_call_arg (stmt, 1), 1);
3647 case BUILT_IN_MEMMOVE:
3648 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3649 gimple_call_arg (stmt, 1), 3);
3650 case BUILT_IN_SPRINTF_CHK:
3651 case BUILT_IN_VSPRINTF_CHK:
3652 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3653 case BUILT_IN_STRCAT_CHK:
3654 return gimple_fold_builtin_strcat_chk (gsi);
3655 case BUILT_IN_STRNCAT_CHK:
3656 return gimple_fold_builtin_strncat_chk (gsi);
3657 case BUILT_IN_STRLEN:
3658 return gimple_fold_builtin_strlen (gsi);
3659 case BUILT_IN_STRCPY:
3660 return gimple_fold_builtin_strcpy (gsi,
3661 gimple_call_arg (stmt, 0),
3662 gimple_call_arg (stmt, 1));
3663 case BUILT_IN_STRNCPY:
3664 return gimple_fold_builtin_strncpy (gsi,
3665 gimple_call_arg (stmt, 0),
3666 gimple_call_arg (stmt, 1),
3667 gimple_call_arg (stmt, 2));
3668 case BUILT_IN_STRCAT:
3669 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3670 gimple_call_arg (stmt, 1));
3671 case BUILT_IN_STRNCAT:
3672 return gimple_fold_builtin_strncat (gsi);
3673 case BUILT_IN_INDEX:
3674 case BUILT_IN_STRCHR:
3675 return gimple_fold_builtin_strchr (gsi, false);
3676 case BUILT_IN_RINDEX:
3677 case BUILT_IN_STRRCHR:
3678 return gimple_fold_builtin_strchr (gsi, true);
3679 case BUILT_IN_STRSTR:
3680 return gimple_fold_builtin_strstr (gsi);
3681 case BUILT_IN_STRCMP:
3682 case BUILT_IN_STRCMP_EQ:
3683 case BUILT_IN_STRCASECMP:
3684 case BUILT_IN_STRNCMP:
3685 case BUILT_IN_STRNCMP_EQ:
3686 case BUILT_IN_STRNCASECMP:
3687 return gimple_fold_builtin_string_compare (gsi);
3688 case BUILT_IN_MEMCHR:
3689 return gimple_fold_builtin_memchr (gsi);
3690 case BUILT_IN_FPUTS:
3691 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3692 gimple_call_arg (stmt, 1), false);
3693 case BUILT_IN_FPUTS_UNLOCKED:
3694 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3695 gimple_call_arg (stmt, 1), true);
3696 case BUILT_IN_MEMCPY_CHK:
3697 case BUILT_IN_MEMPCPY_CHK:
3698 case BUILT_IN_MEMMOVE_CHK:
3699 case BUILT_IN_MEMSET_CHK:
3700 return gimple_fold_builtin_memory_chk (gsi,
3701 gimple_call_arg (stmt, 0),
3702 gimple_call_arg (stmt, 1),
3703 gimple_call_arg (stmt, 2),
3704 gimple_call_arg (stmt, 3),
3705 fcode);
3706 case BUILT_IN_STPCPY:
3707 return gimple_fold_builtin_stpcpy (gsi);
3708 case BUILT_IN_STRCPY_CHK:
3709 case BUILT_IN_STPCPY_CHK:
3710 return gimple_fold_builtin_stxcpy_chk (gsi,
3711 gimple_call_arg (stmt, 0),
3712 gimple_call_arg (stmt, 1),
3713 gimple_call_arg (stmt, 2),
3714 fcode);
3715 case BUILT_IN_STRNCPY_CHK:
3716 case BUILT_IN_STPNCPY_CHK:
3717 return gimple_fold_builtin_stxncpy_chk (gsi,
3718 gimple_call_arg (stmt, 0),
3719 gimple_call_arg (stmt, 1),
3720 gimple_call_arg (stmt, 2),
3721 gimple_call_arg (stmt, 3),
3722 fcode);
3723 case BUILT_IN_SNPRINTF_CHK:
3724 case BUILT_IN_VSNPRINTF_CHK:
3725 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3727 case BUILT_IN_FPRINTF:
3728 case BUILT_IN_FPRINTF_UNLOCKED:
3729 case BUILT_IN_VFPRINTF:
3730 if (n == 2 || n == 3)
3731 return gimple_fold_builtin_fprintf (gsi,
3732 gimple_call_arg (stmt, 0),
3733 gimple_call_arg (stmt, 1),
3734 n == 3
3735 ? gimple_call_arg (stmt, 2)
3736 : NULL_TREE,
3737 fcode);
3738 break;
3739 case BUILT_IN_FPRINTF_CHK:
3740 case BUILT_IN_VFPRINTF_CHK:
3741 if (n == 3 || n == 4)
3742 return gimple_fold_builtin_fprintf (gsi,
3743 gimple_call_arg (stmt, 0),
3744 gimple_call_arg (stmt, 2),
3745 n == 4
3746 ? gimple_call_arg (stmt, 3)
3747 : NULL_TREE,
3748 fcode);
3749 break;
3750 case BUILT_IN_PRINTF:
3751 case BUILT_IN_PRINTF_UNLOCKED:
3752 case BUILT_IN_VPRINTF:
3753 if (n == 1 || n == 2)
3754 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3755 n == 2
3756 ? gimple_call_arg (stmt, 1)
3757 : NULL_TREE, fcode);
3758 break;
3759 case BUILT_IN_PRINTF_CHK:
3760 case BUILT_IN_VPRINTF_CHK:
3761 if (n == 2 || n == 3)
3762 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3763 n == 3
3764 ? gimple_call_arg (stmt, 2)
3765 : NULL_TREE, fcode);
3766 break;
3767 case BUILT_IN_ACC_ON_DEVICE:
3768 return gimple_fold_builtin_acc_on_device (gsi,
3769 gimple_call_arg (stmt, 0));
3770 case BUILT_IN_REALLOC:
3771 return gimple_fold_builtin_realloc (gsi);
3773 default:;
3776 /* Try the generic builtin folder. */
3777 bool ignore = (gimple_call_lhs (stmt) == NULL);
3778 tree result = fold_call_stmt (stmt, ignore);
3779 if (result)
3781 if (ignore)
3782 STRIP_NOPS (result);
3783 else
3784 result = fold_convert (gimple_call_return_type (stmt), result);
3785 if (!update_call_from_tree (gsi, result))
3786 gimplify_and_update_call_from_tree (gsi, result);
3787 return true;
3790 return false;
3793 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3794 function calls to constants, where possible. */
3796 static tree
3797 fold_internal_goacc_dim (const gimple *call)
3799 int axis = oacc_get_ifn_dim_arg (call);
3800 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3801 tree result = NULL_TREE;
3802 tree type = TREE_TYPE (gimple_call_lhs (call));
3804 switch (gimple_call_internal_fn (call))
3806 case IFN_GOACC_DIM_POS:
3807 /* If the size is 1, we know the answer. */
3808 if (size == 1)
3809 result = build_int_cst (type, 0);
3810 break;
3811 case IFN_GOACC_DIM_SIZE:
3812 /* If the size is not dynamic, we know the answer. */
3813 if (size)
3814 result = build_int_cst (type, size);
3815 break;
3816 default:
3817 break;
3820 return result;
3823 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3824 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3825 &var where var is only addressable because of such calls. */
3827 bool
3828 optimize_atomic_compare_exchange_p (gimple *stmt)
3830 if (gimple_call_num_args (stmt) != 6
3831 || !flag_inline_atomics
3832 || !optimize
3833 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3834 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3835 || !gimple_vdef (stmt)
3836 || !gimple_vuse (stmt))
3837 return false;
3839 tree fndecl = gimple_call_fndecl (stmt);
3840 switch (DECL_FUNCTION_CODE (fndecl))
3842 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3843 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3844 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3845 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3846 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3847 break;
3848 default:
3849 return false;
3852 tree expected = gimple_call_arg (stmt, 1);
3853 if (TREE_CODE (expected) != ADDR_EXPR
3854 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3855 return false;
3857 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3858 if (!is_gimple_reg_type (etype)
3859 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3860 || TREE_THIS_VOLATILE (etype)
3861 || VECTOR_TYPE_P (etype)
3862 || TREE_CODE (etype) == COMPLEX_TYPE
3863 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3864 might not preserve all the bits. See PR71716. */
3865 || SCALAR_FLOAT_TYPE_P (etype)
3866 || maybe_ne (TYPE_PRECISION (etype),
3867 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3868 return false;
3870 tree weak = gimple_call_arg (stmt, 3);
3871 if (!integer_zerop (weak) && !integer_onep (weak))
3872 return false;
3874 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3875 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3876 machine_mode mode = TYPE_MODE (itype);
3878 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3879 == CODE_FOR_nothing
3880 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3881 return false;
3883 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3884 return false;
3886 return true;
3889 /* Fold
3890 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3891 into
3892 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3893 i = IMAGPART_EXPR <t>;
3894 r = (_Bool) i;
3895 e = REALPART_EXPR <t>; */
3897 void
3898 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3900 gimple *stmt = gsi_stmt (*gsi);
3901 tree fndecl = gimple_call_fndecl (stmt);
3902 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3903 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3904 tree ctype = build_complex_type (itype);
3905 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3906 bool throws = false;
3907 edge e = NULL;
3908 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3909 expected);
3910 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3911 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3912 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3914 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3915 build1 (VIEW_CONVERT_EXPR, itype,
3916 gimple_assign_lhs (g)));
3917 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3919 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3920 + int_size_in_bytes (itype);
3921 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3922 gimple_call_arg (stmt, 0),
3923 gimple_assign_lhs (g),
3924 gimple_call_arg (stmt, 2),
3925 build_int_cst (integer_type_node, flag),
3926 gimple_call_arg (stmt, 4),
3927 gimple_call_arg (stmt, 5));
3928 tree lhs = make_ssa_name (ctype);
3929 gimple_call_set_lhs (g, lhs);
3930 gimple_set_vdef (g, gimple_vdef (stmt));
3931 gimple_set_vuse (g, gimple_vuse (stmt));
3932 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3933 tree oldlhs = gimple_call_lhs (stmt);
3934 if (stmt_can_throw_internal (stmt))
3936 throws = true;
3937 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3939 gimple_call_set_nothrow (as_a <gcall *> (g),
3940 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3941 gimple_call_set_lhs (stmt, NULL_TREE);
3942 gsi_replace (gsi, g, true);
3943 if (oldlhs)
3945 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3946 build1 (IMAGPART_EXPR, itype, lhs));
3947 if (throws)
3949 gsi_insert_on_edge_immediate (e, g);
3950 *gsi = gsi_for_stmt (g);
3952 else
3953 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3954 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3955 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3957 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3958 build1 (REALPART_EXPR, itype, lhs));
3959 if (throws && oldlhs == NULL_TREE)
3961 gsi_insert_on_edge_immediate (e, g);
3962 *gsi = gsi_for_stmt (g);
3964 else
3965 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3966 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3968 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3969 VIEW_CONVERT_EXPR,
3970 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3971 gimple_assign_lhs (g)));
3972 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3974 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3975 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3976 *gsi = gsiret;
3979 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3980 doesn't fit into TYPE. The test for overflow should be regardless of
3981 -fwrapv, and even for unsigned types. */
3983 bool
3984 arith_overflowed_p (enum tree_code code, const_tree type,
3985 const_tree arg0, const_tree arg1)
3987 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3988 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3989 widest2_int_cst;
3990 widest2_int warg0 = widest2_int_cst (arg0);
3991 widest2_int warg1 = widest2_int_cst (arg1);
3992 widest2_int wres;
3993 switch (code)
3995 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3996 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3997 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3998 default: gcc_unreachable ();
4000 signop sign = TYPE_SIGN (type);
4001 if (sign == UNSIGNED && wi::neg_p (wres))
4002 return true;
4003 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4006 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4007 The statement may be replaced by another statement, e.g., if the call
4008 simplifies to a constant value. Return true if any changes were made.
4009 It is assumed that the operands have been previously folded. */
4011 static bool
4012 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4014 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4015 tree callee;
4016 bool changed = false;
4017 unsigned i;
4019 /* Fold *& in call arguments. */
4020 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4021 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4023 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4024 if (tmp)
4026 gimple_call_set_arg (stmt, i, tmp);
4027 changed = true;
4031 /* Check for virtual calls that became direct calls. */
4032 callee = gimple_call_fn (stmt);
4033 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4035 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4037 if (dump_file && virtual_method_call_p (callee)
4038 && !possible_polymorphic_call_target_p
4039 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4040 (OBJ_TYPE_REF_EXPR (callee)))))
4042 fprintf (dump_file,
4043 "Type inheritance inconsistent devirtualization of ");
4044 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4045 fprintf (dump_file, " to ");
4046 print_generic_expr (dump_file, callee, TDF_SLIM);
4047 fprintf (dump_file, "\n");
4050 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4051 changed = true;
4053 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4055 bool final;
4056 vec <cgraph_node *>targets
4057 = possible_polymorphic_call_targets (callee, stmt, &final);
4058 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4060 tree lhs = gimple_call_lhs (stmt);
4061 if (dump_enabled_p ())
4063 location_t loc = gimple_location_safe (stmt);
4064 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4065 "folding virtual function call to %s\n",
4066 targets.length () == 1
4067 ? targets[0]->name ()
4068 : "__builtin_unreachable");
4070 if (targets.length () == 1)
4072 tree fndecl = targets[0]->decl;
4073 gimple_call_set_fndecl (stmt, fndecl);
4074 changed = true;
4075 /* If changing the call to __cxa_pure_virtual
4076 or similar noreturn function, adjust gimple_call_fntype
4077 too. */
4078 if (gimple_call_noreturn_p (stmt)
4079 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4080 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4081 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4082 == void_type_node))
4083 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4084 /* If the call becomes noreturn, remove the lhs. */
4085 if (lhs
4086 && gimple_call_noreturn_p (stmt)
4087 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4088 || should_remove_lhs_p (lhs)))
4090 if (TREE_CODE (lhs) == SSA_NAME)
4092 tree var = create_tmp_var (TREE_TYPE (lhs));
4093 tree def = get_or_create_ssa_default_def (cfun, var);
4094 gimple *new_stmt = gimple_build_assign (lhs, def);
4095 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4097 gimple_call_set_lhs (stmt, NULL_TREE);
4099 maybe_remove_unused_call_args (cfun, stmt);
4101 else
4103 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4104 gimple *new_stmt = gimple_build_call (fndecl, 0);
4105 gimple_set_location (new_stmt, gimple_location (stmt));
4106 /* If the call had a SSA name as lhs morph that into
4107 an uninitialized value. */
4108 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4110 tree var = create_tmp_var (TREE_TYPE (lhs));
4111 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4112 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4113 set_ssa_default_def (cfun, var, lhs);
4115 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4116 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4117 gsi_replace (gsi, new_stmt, false);
4118 return true;
4124 /* Check for indirect calls that became direct calls, and then
4125 no longer require a static chain. */
4126 if (gimple_call_chain (stmt))
4128 tree fn = gimple_call_fndecl (stmt);
4129 if (fn && !DECL_STATIC_CHAIN (fn))
4131 gimple_call_set_chain (stmt, NULL);
4132 changed = true;
4134 else
4136 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4137 if (tmp)
4139 gimple_call_set_chain (stmt, tmp);
4140 changed = true;
4145 if (inplace)
4146 return changed;
4148 /* Check for builtins that CCP can handle using information not
4149 available in the generic fold routines. */
4150 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4152 if (gimple_fold_builtin (gsi))
4153 changed = true;
4155 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4157 changed |= targetm.gimple_fold_builtin (gsi);
4159 else if (gimple_call_internal_p (stmt))
4161 enum tree_code subcode = ERROR_MARK;
4162 tree result = NULL_TREE;
4163 bool cplx_result = false;
4164 tree overflow = NULL_TREE;
4165 switch (gimple_call_internal_fn (stmt))
4167 case IFN_BUILTIN_EXPECT:
4168 result = fold_builtin_expect (gimple_location (stmt),
4169 gimple_call_arg (stmt, 0),
4170 gimple_call_arg (stmt, 1),
4171 gimple_call_arg (stmt, 2));
4172 break;
4173 case IFN_UBSAN_OBJECT_SIZE:
4175 tree offset = gimple_call_arg (stmt, 1);
4176 tree objsize = gimple_call_arg (stmt, 2);
4177 if (integer_all_onesp (objsize)
4178 || (TREE_CODE (offset) == INTEGER_CST
4179 && TREE_CODE (objsize) == INTEGER_CST
4180 && tree_int_cst_le (offset, objsize)))
4182 replace_call_with_value (gsi, NULL_TREE);
4183 return true;
4186 break;
4187 case IFN_UBSAN_PTR:
4188 if (integer_zerop (gimple_call_arg (stmt, 1)))
4190 replace_call_with_value (gsi, NULL_TREE);
4191 return true;
4193 break;
4194 case IFN_UBSAN_BOUNDS:
4196 tree index = gimple_call_arg (stmt, 1);
4197 tree bound = gimple_call_arg (stmt, 2);
4198 if (TREE_CODE (index) == INTEGER_CST
4199 && TREE_CODE (bound) == INTEGER_CST)
4201 index = fold_convert (TREE_TYPE (bound), index);
4202 if (TREE_CODE (index) == INTEGER_CST
4203 && tree_int_cst_le (index, bound))
4205 replace_call_with_value (gsi, NULL_TREE);
4206 return true;
4210 break;
4211 case IFN_GOACC_DIM_SIZE:
4212 case IFN_GOACC_DIM_POS:
4213 result = fold_internal_goacc_dim (stmt);
4214 break;
4215 case IFN_UBSAN_CHECK_ADD:
4216 subcode = PLUS_EXPR;
4217 break;
4218 case IFN_UBSAN_CHECK_SUB:
4219 subcode = MINUS_EXPR;
4220 break;
4221 case IFN_UBSAN_CHECK_MUL:
4222 subcode = MULT_EXPR;
4223 break;
4224 case IFN_ADD_OVERFLOW:
4225 subcode = PLUS_EXPR;
4226 cplx_result = true;
4227 break;
4228 case IFN_SUB_OVERFLOW:
4229 subcode = MINUS_EXPR;
4230 cplx_result = true;
4231 break;
4232 case IFN_MUL_OVERFLOW:
4233 subcode = MULT_EXPR;
4234 cplx_result = true;
4235 break;
4236 default:
4237 break;
4239 if (subcode != ERROR_MARK)
4241 tree arg0 = gimple_call_arg (stmt, 0);
4242 tree arg1 = gimple_call_arg (stmt, 1);
4243 tree type = TREE_TYPE (arg0);
4244 if (cplx_result)
4246 tree lhs = gimple_call_lhs (stmt);
4247 if (lhs == NULL_TREE)
4248 type = NULL_TREE;
4249 else
4250 type = TREE_TYPE (TREE_TYPE (lhs));
4252 if (type == NULL_TREE)
4254 /* x = y + 0; x = y - 0; x = y * 0; */
4255 else if (integer_zerop (arg1))
4256 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4257 /* x = 0 + y; x = 0 * y; */
4258 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4259 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4260 /* x = y - y; */
4261 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4262 result = integer_zero_node;
4263 /* x = y * 1; x = 1 * y; */
4264 else if (subcode == MULT_EXPR && integer_onep (arg1))
4265 result = arg0;
4266 else if (subcode == MULT_EXPR && integer_onep (arg0))
4267 result = arg1;
4268 else if (TREE_CODE (arg0) == INTEGER_CST
4269 && TREE_CODE (arg1) == INTEGER_CST)
4271 if (cplx_result)
4272 result = int_const_binop (subcode, fold_convert (type, arg0),
4273 fold_convert (type, arg1));
4274 else
4275 result = int_const_binop (subcode, arg0, arg1);
4276 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4278 if (cplx_result)
4279 overflow = build_one_cst (type);
4280 else
4281 result = NULL_TREE;
4284 if (result)
4286 if (result == integer_zero_node)
4287 result = build_zero_cst (type);
4288 else if (cplx_result && TREE_TYPE (result) != type)
4290 if (TREE_CODE (result) == INTEGER_CST)
4292 if (arith_overflowed_p (PLUS_EXPR, type, result,
4293 integer_zero_node))
4294 overflow = build_one_cst (type);
4296 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4297 && TYPE_UNSIGNED (type))
4298 || (TYPE_PRECISION (type)
4299 < (TYPE_PRECISION (TREE_TYPE (result))
4300 + (TYPE_UNSIGNED (TREE_TYPE (result))
4301 && !TYPE_UNSIGNED (type)))))
4302 result = NULL_TREE;
4303 if (result)
4304 result = fold_convert (type, result);
4309 if (result)
4311 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4312 result = drop_tree_overflow (result);
4313 if (cplx_result)
4315 if (overflow == NULL_TREE)
4316 overflow = build_zero_cst (TREE_TYPE (result));
4317 tree ctype = build_complex_type (TREE_TYPE (result));
4318 if (TREE_CODE (result) == INTEGER_CST
4319 && TREE_CODE (overflow) == INTEGER_CST)
4320 result = build_complex (ctype, result, overflow);
4321 else
4322 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4323 ctype, result, overflow);
4325 if (!update_call_from_tree (gsi, result))
4326 gimplify_and_update_call_from_tree (gsi, result);
4327 changed = true;
4331 return changed;
4335 /* Return true whether NAME has a use on STMT. */
4337 static bool
4338 has_use_on_stmt (tree name, gimple *stmt)
4340 imm_use_iterator iter;
4341 use_operand_p use_p;
4342 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4343 if (USE_STMT (use_p) == stmt)
4344 return true;
4345 return false;
4348 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4349 gimple_simplify.
4351 Replaces *GSI with the simplification result in RCODE and OPS
4352 and the associated statements in *SEQ. Does the replacement
4353 according to INPLACE and returns true if the operation succeeded. */
4355 static bool
4356 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4357 gimple_match_op *res_op,
4358 gimple_seq *seq, bool inplace)
4360 gimple *stmt = gsi_stmt (*gsi);
4361 tree *ops = res_op->ops;
4362 unsigned int num_ops = res_op->num_ops;
4364 /* Play safe and do not allow abnormals to be mentioned in
4365 newly created statements. See also maybe_push_res_to_seq.
4366 As an exception allow such uses if there was a use of the
4367 same SSA name on the old stmt. */
4368 for (unsigned int i = 0; i < num_ops; ++i)
4369 if (TREE_CODE (ops[i]) == SSA_NAME
4370 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4371 && !has_use_on_stmt (ops[i], stmt))
4372 return false;
4374 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4375 for (unsigned int i = 0; i < 2; ++i)
4376 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4377 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4378 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4379 return false;
4381 /* Don't insert new statements when INPLACE is true, even if we could
4382 reuse STMT for the final statement. */
4383 if (inplace && !gimple_seq_empty_p (*seq))
4384 return false;
4386 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4388 gcc_assert (res_op->code.is_tree_code ());
4389 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4390 /* GIMPLE_CONDs condition may not throw. */
4391 && (!flag_exceptions
4392 || !cfun->can_throw_non_call_exceptions
4393 || !operation_could_trap_p (res_op->code,
4394 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4395 false, NULL_TREE)))
4396 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4397 else if (res_op->code == SSA_NAME)
4398 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4399 build_zero_cst (TREE_TYPE (ops[0])));
4400 else if (res_op->code == INTEGER_CST)
4402 if (integer_zerop (ops[0]))
4403 gimple_cond_make_false (cond_stmt);
4404 else
4405 gimple_cond_make_true (cond_stmt);
4407 else if (!inplace)
4409 tree res = maybe_push_res_to_seq (res_op, seq);
4410 if (!res)
4411 return false;
4412 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4413 build_zero_cst (TREE_TYPE (res)));
4415 else
4416 return false;
4417 if (dump_file && (dump_flags & TDF_DETAILS))
4419 fprintf (dump_file, "gimple_simplified to ");
4420 if (!gimple_seq_empty_p (*seq))
4421 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4422 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4423 0, TDF_SLIM);
4425 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4426 return true;
4428 else if (is_gimple_assign (stmt)
4429 && res_op->code.is_tree_code ())
4431 if (!inplace
4432 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4434 maybe_build_generic_op (res_op);
4435 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4436 res_op->op_or_null (0),
4437 res_op->op_or_null (1),
4438 res_op->op_or_null (2));
4439 if (dump_file && (dump_flags & TDF_DETAILS))
4441 fprintf (dump_file, "gimple_simplified to ");
4442 if (!gimple_seq_empty_p (*seq))
4443 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4444 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4445 0, TDF_SLIM);
4447 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4448 return true;
4451 else if (res_op->code.is_fn_code ()
4452 && gimple_call_combined_fn (stmt) == res_op->code)
4454 gcc_assert (num_ops == gimple_call_num_args (stmt));
4455 for (unsigned int i = 0; i < num_ops; ++i)
4456 gimple_call_set_arg (stmt, i, ops[i]);
4457 if (dump_file && (dump_flags & TDF_DETAILS))
4459 fprintf (dump_file, "gimple_simplified to ");
4460 if (!gimple_seq_empty_p (*seq))
4461 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4462 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4464 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4465 return true;
4467 else if (!inplace)
4469 if (gimple_has_lhs (stmt))
4471 tree lhs = gimple_get_lhs (stmt);
4472 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4473 return false;
4474 if (dump_file && (dump_flags & TDF_DETAILS))
4476 fprintf (dump_file, "gimple_simplified to ");
4477 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4479 gsi_replace_with_seq_vops (gsi, *seq);
4480 return true;
4482 else
4483 gcc_unreachable ();
4486 return false;
4489 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4491 static bool
4492 maybe_canonicalize_mem_ref_addr (tree *t)
4494 bool res = false;
4496 if (TREE_CODE (*t) == ADDR_EXPR)
4497 t = &TREE_OPERAND (*t, 0);
4499 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4500 generic vector extension. The actual vector referenced is
4501 view-converted to an array type for this purpose. If the index
4502 is constant the canonical representation in the middle-end is a
4503 BIT_FIELD_REF so re-write the former to the latter here. */
4504 if (TREE_CODE (*t) == ARRAY_REF
4505 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4506 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4507 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4509 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4510 if (VECTOR_TYPE_P (vtype))
4512 tree low = array_ref_low_bound (*t);
4513 if (TREE_CODE (low) == INTEGER_CST)
4515 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4517 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4518 wi::to_widest (low));
4519 idx = wi::mul (idx, wi::to_widest
4520 (TYPE_SIZE (TREE_TYPE (*t))));
4521 widest_int ext
4522 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4523 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4525 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4526 TREE_TYPE (*t),
4527 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4528 TYPE_SIZE (TREE_TYPE (*t)),
4529 wide_int_to_tree (bitsizetype, idx));
4530 res = true;
4537 while (handled_component_p (*t))
4538 t = &TREE_OPERAND (*t, 0);
4540 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4541 of invariant addresses into a SSA name MEM_REF address. */
4542 if (TREE_CODE (*t) == MEM_REF
4543 || TREE_CODE (*t) == TARGET_MEM_REF)
4545 tree addr = TREE_OPERAND (*t, 0);
4546 if (TREE_CODE (addr) == ADDR_EXPR
4547 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4548 || handled_component_p (TREE_OPERAND (addr, 0))))
4550 tree base;
4551 poly_int64 coffset;
4552 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4553 &coffset);
4554 if (!base)
4555 gcc_unreachable ();
4557 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4558 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4559 TREE_OPERAND (*t, 1),
4560 size_int (coffset));
4561 res = true;
4563 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4564 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4567 /* Canonicalize back MEM_REFs to plain reference trees if the object
4568 accessed is a decl that has the same access semantics as the MEM_REF. */
4569 if (TREE_CODE (*t) == MEM_REF
4570 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4571 && integer_zerop (TREE_OPERAND (*t, 1))
4572 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4574 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4575 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4576 if (/* Same volatile qualification. */
4577 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4578 /* Same TBAA behavior with -fstrict-aliasing. */
4579 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4580 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4581 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4582 /* Same alignment. */
4583 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4584 /* We have to look out here to not drop a required conversion
4585 from the rhs to the lhs if *t appears on the lhs or vice-versa
4586 if it appears on the rhs. Thus require strict type
4587 compatibility. */
4588 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4590 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4591 res = true;
4595 /* Canonicalize TARGET_MEM_REF in particular with respect to
4596 the indexes becoming constant. */
4597 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4599 tree tem = maybe_fold_tmr (*t);
4600 if (tem)
4602 *t = tem;
4603 res = true;
4607 return res;
4610 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4611 distinguishes both cases. */
4613 static bool
4614 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4616 bool changed = false;
4617 gimple *stmt = gsi_stmt (*gsi);
4618 bool nowarning = gimple_no_warning_p (stmt);
4619 unsigned i;
4620 fold_defer_overflow_warnings ();
4622 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4623 after propagation.
4624 ??? This shouldn't be done in generic folding but in the
4625 propagation helpers which also know whether an address was
4626 propagated.
4627 Also canonicalize operand order. */
4628 switch (gimple_code (stmt))
4630 case GIMPLE_ASSIGN:
4631 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4633 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4634 if ((REFERENCE_CLASS_P (*rhs)
4635 || TREE_CODE (*rhs) == ADDR_EXPR)
4636 && maybe_canonicalize_mem_ref_addr (rhs))
4637 changed = true;
4638 tree *lhs = gimple_assign_lhs_ptr (stmt);
4639 if (REFERENCE_CLASS_P (*lhs)
4640 && maybe_canonicalize_mem_ref_addr (lhs))
4641 changed = true;
4643 else
4645 /* Canonicalize operand order. */
4646 enum tree_code code = gimple_assign_rhs_code (stmt);
4647 if (TREE_CODE_CLASS (code) == tcc_comparison
4648 || commutative_tree_code (code)
4649 || commutative_ternary_tree_code (code))
4651 tree rhs1 = gimple_assign_rhs1 (stmt);
4652 tree rhs2 = gimple_assign_rhs2 (stmt);
4653 if (tree_swap_operands_p (rhs1, rhs2))
4655 gimple_assign_set_rhs1 (stmt, rhs2);
4656 gimple_assign_set_rhs2 (stmt, rhs1);
4657 if (TREE_CODE_CLASS (code) == tcc_comparison)
4658 gimple_assign_set_rhs_code (stmt,
4659 swap_tree_comparison (code));
4660 changed = true;
4664 break;
4665 case GIMPLE_CALL:
4667 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4669 tree *arg = gimple_call_arg_ptr (stmt, i);
4670 if (REFERENCE_CLASS_P (*arg)
4671 && maybe_canonicalize_mem_ref_addr (arg))
4672 changed = true;
4674 tree *lhs = gimple_call_lhs_ptr (stmt);
4675 if (*lhs
4676 && REFERENCE_CLASS_P (*lhs)
4677 && maybe_canonicalize_mem_ref_addr (lhs))
4678 changed = true;
4679 break;
4681 case GIMPLE_ASM:
4683 gasm *asm_stmt = as_a <gasm *> (stmt);
4684 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4686 tree link = gimple_asm_output_op (asm_stmt, i);
4687 tree op = TREE_VALUE (link);
4688 if (REFERENCE_CLASS_P (op)
4689 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4690 changed = true;
4692 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4694 tree link = gimple_asm_input_op (asm_stmt, i);
4695 tree op = TREE_VALUE (link);
4696 if ((REFERENCE_CLASS_P (op)
4697 || TREE_CODE (op) == ADDR_EXPR)
4698 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4699 changed = true;
4702 break;
4703 case GIMPLE_DEBUG:
4704 if (gimple_debug_bind_p (stmt))
4706 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4707 if (*val
4708 && (REFERENCE_CLASS_P (*val)
4709 || TREE_CODE (*val) == ADDR_EXPR)
4710 && maybe_canonicalize_mem_ref_addr (val))
4711 changed = true;
4713 break;
4714 case GIMPLE_COND:
4716 /* Canonicalize operand order. */
4717 tree lhs = gimple_cond_lhs (stmt);
4718 tree rhs = gimple_cond_rhs (stmt);
4719 if (tree_swap_operands_p (lhs, rhs))
4721 gcond *gc = as_a <gcond *> (stmt);
4722 gimple_cond_set_lhs (gc, rhs);
4723 gimple_cond_set_rhs (gc, lhs);
4724 gimple_cond_set_code (gc,
4725 swap_tree_comparison (gimple_cond_code (gc)));
4726 changed = true;
4729 default:;
4732 /* Dispatch to pattern-based folding. */
4733 if (!inplace
4734 || is_gimple_assign (stmt)
4735 || gimple_code (stmt) == GIMPLE_COND)
4737 gimple_seq seq = NULL;
4738 gimple_match_op res_op;
4739 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4740 valueize, valueize))
4742 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4743 changed = true;
4744 else
4745 gimple_seq_discard (seq);
4749 stmt = gsi_stmt (*gsi);
4751 /* Fold the main computation performed by the statement. */
4752 switch (gimple_code (stmt))
4754 case GIMPLE_ASSIGN:
4756 /* Try to canonicalize for boolean-typed X the comparisons
4757 X == 0, X == 1, X != 0, and X != 1. */
4758 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4759 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4761 tree lhs = gimple_assign_lhs (stmt);
4762 tree op1 = gimple_assign_rhs1 (stmt);
4763 tree op2 = gimple_assign_rhs2 (stmt);
4764 tree type = TREE_TYPE (op1);
4766 /* Check whether the comparison operands are of the same boolean
4767 type as the result type is.
4768 Check that second operand is an integer-constant with value
4769 one or zero. */
4770 if (TREE_CODE (op2) == INTEGER_CST
4771 && (integer_zerop (op2) || integer_onep (op2))
4772 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4774 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4775 bool is_logical_not = false;
4777 /* X == 0 and X != 1 is a logical-not.of X
4778 X == 1 and X != 0 is X */
4779 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4780 || (cmp_code == NE_EXPR && integer_onep (op2)))
4781 is_logical_not = true;
4783 if (is_logical_not == false)
4784 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4785 /* Only for one-bit precision typed X the transformation
4786 !X -> ~X is valied. */
4787 else if (TYPE_PRECISION (type) == 1)
4788 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4789 /* Otherwise we use !X -> X ^ 1. */
4790 else
4791 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4792 build_int_cst (type, 1));
4793 changed = true;
4794 break;
4798 unsigned old_num_ops = gimple_num_ops (stmt);
4799 tree lhs = gimple_assign_lhs (stmt);
4800 tree new_rhs = fold_gimple_assign (gsi);
4801 if (new_rhs
4802 && !useless_type_conversion_p (TREE_TYPE (lhs),
4803 TREE_TYPE (new_rhs)))
4804 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4805 if (new_rhs
4806 && (!inplace
4807 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4809 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4810 changed = true;
4812 break;
4815 case GIMPLE_CALL:
4816 changed |= gimple_fold_call (gsi, inplace);
4817 break;
4819 case GIMPLE_ASM:
4820 /* Fold *& in asm operands. */
4822 gasm *asm_stmt = as_a <gasm *> (stmt);
4823 size_t noutputs;
4824 const char **oconstraints;
4825 const char *constraint;
4826 bool allows_mem, allows_reg;
4828 noutputs = gimple_asm_noutputs (asm_stmt);
4829 oconstraints = XALLOCAVEC (const char *, noutputs);
4831 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4833 tree link = gimple_asm_output_op (asm_stmt, i);
4834 tree op = TREE_VALUE (link);
4835 oconstraints[i]
4836 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4837 if (REFERENCE_CLASS_P (op)
4838 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4840 TREE_VALUE (link) = op;
4841 changed = true;
4844 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4846 tree link = gimple_asm_input_op (asm_stmt, i);
4847 tree op = TREE_VALUE (link);
4848 constraint
4849 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4850 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4851 oconstraints, &allows_mem, &allows_reg);
4852 if (REFERENCE_CLASS_P (op)
4853 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4854 != NULL_TREE)
4856 TREE_VALUE (link) = op;
4857 changed = true;
4861 break;
4863 case GIMPLE_DEBUG:
4864 if (gimple_debug_bind_p (stmt))
4866 tree val = gimple_debug_bind_get_value (stmt);
4867 if (val
4868 && REFERENCE_CLASS_P (val))
4870 tree tem = maybe_fold_reference (val, false);
4871 if (tem)
4873 gimple_debug_bind_set_value (stmt, tem);
4874 changed = true;
4877 else if (val
4878 && TREE_CODE (val) == ADDR_EXPR)
4880 tree ref = TREE_OPERAND (val, 0);
4881 tree tem = maybe_fold_reference (ref, false);
4882 if (tem)
4884 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4885 gimple_debug_bind_set_value (stmt, tem);
4886 changed = true;
4890 break;
4892 case GIMPLE_RETURN:
4894 greturn *ret_stmt = as_a<greturn *> (stmt);
4895 tree ret = gimple_return_retval(ret_stmt);
4897 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4899 tree val = valueize (ret);
4900 if (val && val != ret
4901 && may_propagate_copy (ret, val))
4903 gimple_return_set_retval (ret_stmt, val);
4904 changed = true;
4908 break;
4910 default:;
4913 stmt = gsi_stmt (*gsi);
4915 /* Fold *& on the lhs. */
4916 if (gimple_has_lhs (stmt))
4918 tree lhs = gimple_get_lhs (stmt);
4919 if (lhs && REFERENCE_CLASS_P (lhs))
4921 tree new_lhs = maybe_fold_reference (lhs, true);
4922 if (new_lhs)
4924 gimple_set_lhs (stmt, new_lhs);
4925 changed = true;
4930 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4931 return changed;
4934 /* Valueziation callback that ends up not following SSA edges. */
4936 tree
4937 no_follow_ssa_edges (tree)
4939 return NULL_TREE;
4942 /* Valueization callback that ends up following single-use SSA edges only. */
4944 tree
4945 follow_single_use_edges (tree val)
4947 if (TREE_CODE (val) == SSA_NAME
4948 && !has_single_use (val))
4949 return NULL_TREE;
4950 return val;
4953 /* Valueization callback that follows all SSA edges. */
4955 tree
4956 follow_all_ssa_edges (tree val)
4958 return val;
4961 /* Fold the statement pointed to by GSI. In some cases, this function may
4962 replace the whole statement with a new one. Returns true iff folding
4963 makes any changes.
4964 The statement pointed to by GSI should be in valid gimple form but may
4965 be in unfolded state as resulting from for example constant propagation
4966 which can produce *&x = 0. */
4968 bool
4969 fold_stmt (gimple_stmt_iterator *gsi)
4971 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4974 bool
4975 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4977 return fold_stmt_1 (gsi, false, valueize);
4980 /* Perform the minimal folding on statement *GSI. Only operations like
4981 *&x created by constant propagation are handled. The statement cannot
4982 be replaced with a new one. Return true if the statement was
4983 changed, false otherwise.
4984 The statement *GSI should be in valid gimple form but may
4985 be in unfolded state as resulting from for example constant propagation
4986 which can produce *&x = 0. */
4988 bool
4989 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4991 gimple *stmt = gsi_stmt (*gsi);
4992 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4993 gcc_assert (gsi_stmt (*gsi) == stmt);
4994 return changed;
4997 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4998 if EXPR is null or we don't know how.
4999 If non-null, the result always has boolean type. */
5001 static tree
5002 canonicalize_bool (tree expr, bool invert)
5004 if (!expr)
5005 return NULL_TREE;
5006 else if (invert)
5008 if (integer_nonzerop (expr))
5009 return boolean_false_node;
5010 else if (integer_zerop (expr))
5011 return boolean_true_node;
5012 else if (TREE_CODE (expr) == SSA_NAME)
5013 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5014 build_int_cst (TREE_TYPE (expr), 0));
5015 else if (COMPARISON_CLASS_P (expr))
5016 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5017 boolean_type_node,
5018 TREE_OPERAND (expr, 0),
5019 TREE_OPERAND (expr, 1));
5020 else
5021 return NULL_TREE;
5023 else
5025 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5026 return expr;
5027 if (integer_nonzerop (expr))
5028 return boolean_true_node;
5029 else if (integer_zerop (expr))
5030 return boolean_false_node;
5031 else if (TREE_CODE (expr) == SSA_NAME)
5032 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5033 build_int_cst (TREE_TYPE (expr), 0));
5034 else if (COMPARISON_CLASS_P (expr))
5035 return fold_build2 (TREE_CODE (expr),
5036 boolean_type_node,
5037 TREE_OPERAND (expr, 0),
5038 TREE_OPERAND (expr, 1));
5039 else
5040 return NULL_TREE;
5044 /* Check to see if a boolean expression EXPR is logically equivalent to the
5045 comparison (OP1 CODE OP2). Check for various identities involving
5046 SSA_NAMEs. */
5048 static bool
5049 same_bool_comparison_p (const_tree expr, enum tree_code code,
5050 const_tree op1, const_tree op2)
5052 gimple *s;
5054 /* The obvious case. */
5055 if (TREE_CODE (expr) == code
5056 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5057 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5058 return true;
5060 /* Check for comparing (name, name != 0) and the case where expr
5061 is an SSA_NAME with a definition matching the comparison. */
5062 if (TREE_CODE (expr) == SSA_NAME
5063 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5065 if (operand_equal_p (expr, op1, 0))
5066 return ((code == NE_EXPR && integer_zerop (op2))
5067 || (code == EQ_EXPR && integer_nonzerop (op2)));
5068 s = SSA_NAME_DEF_STMT (expr);
5069 if (is_gimple_assign (s)
5070 && gimple_assign_rhs_code (s) == code
5071 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5072 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5073 return true;
5076 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5077 of name is a comparison, recurse. */
5078 if (TREE_CODE (op1) == SSA_NAME
5079 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5081 s = SSA_NAME_DEF_STMT (op1);
5082 if (is_gimple_assign (s)
5083 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5085 enum tree_code c = gimple_assign_rhs_code (s);
5086 if ((c == NE_EXPR && integer_zerop (op2))
5087 || (c == EQ_EXPR && integer_nonzerop (op2)))
5088 return same_bool_comparison_p (expr, c,
5089 gimple_assign_rhs1 (s),
5090 gimple_assign_rhs2 (s));
5091 if ((c == EQ_EXPR && integer_zerop (op2))
5092 || (c == NE_EXPR && integer_nonzerop (op2)))
5093 return same_bool_comparison_p (expr,
5094 invert_tree_comparison (c, false),
5095 gimple_assign_rhs1 (s),
5096 gimple_assign_rhs2 (s));
5099 return false;
5102 /* Check to see if two boolean expressions OP1 and OP2 are logically
5103 equivalent. */
5105 static bool
5106 same_bool_result_p (const_tree op1, const_tree op2)
5108 /* Simple cases first. */
5109 if (operand_equal_p (op1, op2, 0))
5110 return true;
5112 /* Check the cases where at least one of the operands is a comparison.
5113 These are a bit smarter than operand_equal_p in that they apply some
5114 identifies on SSA_NAMEs. */
5115 if (COMPARISON_CLASS_P (op2)
5116 && same_bool_comparison_p (op1, TREE_CODE (op2),
5117 TREE_OPERAND (op2, 0),
5118 TREE_OPERAND (op2, 1)))
5119 return true;
5120 if (COMPARISON_CLASS_P (op1)
5121 && same_bool_comparison_p (op2, TREE_CODE (op1),
5122 TREE_OPERAND (op1, 0),
5123 TREE_OPERAND (op1, 1)))
5124 return true;
5126 /* Default case. */
5127 return false;
5130 /* Forward declarations for some mutually recursive functions. */
5132 static tree
5133 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5134 enum tree_code code2, tree op2a, tree op2b);
5135 static tree
5136 and_var_with_comparison (tree var, bool invert,
5137 enum tree_code code2, tree op2a, tree op2b);
5138 static tree
5139 and_var_with_comparison_1 (gimple *stmt,
5140 enum tree_code code2, tree op2a, tree op2b);
5141 static tree
5142 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5143 enum tree_code code2, tree op2a, tree op2b);
5144 static tree
5145 or_var_with_comparison (tree var, bool invert,
5146 enum tree_code code2, tree op2a, tree op2b);
5147 static tree
5148 or_var_with_comparison_1 (gimple *stmt,
5149 enum tree_code code2, tree op2a, tree op2b);
5151 /* Helper function for and_comparisons_1: try to simplify the AND of the
5152 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5153 If INVERT is true, invert the value of the VAR before doing the AND.
5154 Return NULL_EXPR if we can't simplify this to a single expression. */
5156 static tree
5157 and_var_with_comparison (tree var, bool invert,
5158 enum tree_code code2, tree op2a, tree op2b)
5160 tree t;
5161 gimple *stmt = SSA_NAME_DEF_STMT (var);
5163 /* We can only deal with variables whose definitions are assignments. */
5164 if (!is_gimple_assign (stmt))
5165 return NULL_TREE;
5167 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5168 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5169 Then we only have to consider the simpler non-inverted cases. */
5170 if (invert)
5171 t = or_var_with_comparison_1 (stmt,
5172 invert_tree_comparison (code2, false),
5173 op2a, op2b);
5174 else
5175 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5176 return canonicalize_bool (t, invert);
5179 /* Try to simplify the AND of the ssa variable defined by the assignment
5180 STMT with the comparison specified by (OP2A CODE2 OP2B).
5181 Return NULL_EXPR if we can't simplify this to a single expression. */
5183 static tree
5184 and_var_with_comparison_1 (gimple *stmt,
5185 enum tree_code code2, tree op2a, tree op2b)
5187 tree var = gimple_assign_lhs (stmt);
5188 tree true_test_var = NULL_TREE;
5189 tree false_test_var = NULL_TREE;
5190 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5192 /* Check for identities like (var AND (var == 0)) => false. */
5193 if (TREE_CODE (op2a) == SSA_NAME
5194 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5196 if ((code2 == NE_EXPR && integer_zerop (op2b))
5197 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5199 true_test_var = op2a;
5200 if (var == true_test_var)
5201 return var;
5203 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5204 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5206 false_test_var = op2a;
5207 if (var == false_test_var)
5208 return boolean_false_node;
5212 /* If the definition is a comparison, recurse on it. */
5213 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5215 tree t = and_comparisons_1 (innercode,
5216 gimple_assign_rhs1 (stmt),
5217 gimple_assign_rhs2 (stmt),
5218 code2,
5219 op2a,
5220 op2b);
5221 if (t)
5222 return t;
5225 /* If the definition is an AND or OR expression, we may be able to
5226 simplify by reassociating. */
5227 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5228 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5230 tree inner1 = gimple_assign_rhs1 (stmt);
5231 tree inner2 = gimple_assign_rhs2 (stmt);
5232 gimple *s;
5233 tree t;
5234 tree partial = NULL_TREE;
5235 bool is_and = (innercode == BIT_AND_EXPR);
5237 /* Check for boolean identities that don't require recursive examination
5238 of inner1/inner2:
5239 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5240 inner1 AND (inner1 OR inner2) => inner1
5241 !inner1 AND (inner1 AND inner2) => false
5242 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5243 Likewise for similar cases involving inner2. */
5244 if (inner1 == true_test_var)
5245 return (is_and ? var : inner1);
5246 else if (inner2 == true_test_var)
5247 return (is_and ? var : inner2);
5248 else if (inner1 == false_test_var)
5249 return (is_and
5250 ? boolean_false_node
5251 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5252 else if (inner2 == false_test_var)
5253 return (is_and
5254 ? boolean_false_node
5255 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5257 /* Next, redistribute/reassociate the AND across the inner tests.
5258 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5259 if (TREE_CODE (inner1) == SSA_NAME
5260 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5261 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5262 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5263 gimple_assign_rhs1 (s),
5264 gimple_assign_rhs2 (s),
5265 code2, op2a, op2b)))
5267 /* Handle the AND case, where we are reassociating:
5268 (inner1 AND inner2) AND (op2a code2 op2b)
5269 => (t AND inner2)
5270 If the partial result t is a constant, we win. Otherwise
5271 continue on to try reassociating with the other inner test. */
5272 if (is_and)
5274 if (integer_onep (t))
5275 return inner2;
5276 else if (integer_zerop (t))
5277 return boolean_false_node;
5280 /* Handle the OR case, where we are redistributing:
5281 (inner1 OR inner2) AND (op2a code2 op2b)
5282 => (t OR (inner2 AND (op2a code2 op2b))) */
5283 else if (integer_onep (t))
5284 return boolean_true_node;
5286 /* Save partial result for later. */
5287 partial = t;
5290 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5291 if (TREE_CODE (inner2) == SSA_NAME
5292 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5293 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5294 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5295 gimple_assign_rhs1 (s),
5296 gimple_assign_rhs2 (s),
5297 code2, op2a, op2b)))
5299 /* Handle the AND case, where we are reassociating:
5300 (inner1 AND inner2) AND (op2a code2 op2b)
5301 => (inner1 AND t) */
5302 if (is_and)
5304 if (integer_onep (t))
5305 return inner1;
5306 else if (integer_zerop (t))
5307 return boolean_false_node;
5308 /* If both are the same, we can apply the identity
5309 (x AND x) == x. */
5310 else if (partial && same_bool_result_p (t, partial))
5311 return t;
5314 /* Handle the OR case. where we are redistributing:
5315 (inner1 OR inner2) AND (op2a code2 op2b)
5316 => (t OR (inner1 AND (op2a code2 op2b)))
5317 => (t OR partial) */
5318 else
5320 if (integer_onep (t))
5321 return boolean_true_node;
5322 else if (partial)
5324 /* We already got a simplification for the other
5325 operand to the redistributed OR expression. The
5326 interesting case is when at least one is false.
5327 Or, if both are the same, we can apply the identity
5328 (x OR x) == x. */
5329 if (integer_zerop (partial))
5330 return t;
5331 else if (integer_zerop (t))
5332 return partial;
5333 else if (same_bool_result_p (t, partial))
5334 return t;
5339 return NULL_TREE;
5342 /* Try to simplify the AND of two comparisons defined by
5343 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5344 If this can be done without constructing an intermediate value,
5345 return the resulting tree; otherwise NULL_TREE is returned.
5346 This function is deliberately asymmetric as it recurses on SSA_DEFs
5347 in the first comparison but not the second. */
5349 static tree
5350 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5351 enum tree_code code2, tree op2a, tree op2b)
5353 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5355 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5356 if (operand_equal_p (op1a, op2a, 0)
5357 && operand_equal_p (op1b, op2b, 0))
5359 /* Result will be either NULL_TREE, or a combined comparison. */
5360 tree t = combine_comparisons (UNKNOWN_LOCATION,
5361 TRUTH_ANDIF_EXPR, code1, code2,
5362 truth_type, op1a, op1b);
5363 if (t)
5364 return t;
5367 /* Likewise the swapped case of the above. */
5368 if (operand_equal_p (op1a, op2b, 0)
5369 && operand_equal_p (op1b, op2a, 0))
5371 /* Result will be either NULL_TREE, or a combined comparison. */
5372 tree t = combine_comparisons (UNKNOWN_LOCATION,
5373 TRUTH_ANDIF_EXPR, code1,
5374 swap_tree_comparison (code2),
5375 truth_type, op1a, op1b);
5376 if (t)
5377 return t;
5380 /* If both comparisons are of the same value against constants, we might
5381 be able to merge them. */
5382 if (operand_equal_p (op1a, op2a, 0)
5383 && TREE_CODE (op1b) == INTEGER_CST
5384 && TREE_CODE (op2b) == INTEGER_CST)
5386 int cmp = tree_int_cst_compare (op1b, op2b);
5388 /* If we have (op1a == op1b), we should either be able to
5389 return that or FALSE, depending on whether the constant op1b
5390 also satisfies the other comparison against op2b. */
5391 if (code1 == EQ_EXPR)
5393 bool done = true;
5394 bool val;
5395 switch (code2)
5397 case EQ_EXPR: val = (cmp == 0); break;
5398 case NE_EXPR: val = (cmp != 0); break;
5399 case LT_EXPR: val = (cmp < 0); break;
5400 case GT_EXPR: val = (cmp > 0); break;
5401 case LE_EXPR: val = (cmp <= 0); break;
5402 case GE_EXPR: val = (cmp >= 0); break;
5403 default: done = false;
5405 if (done)
5407 if (val)
5408 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5409 else
5410 return boolean_false_node;
5413 /* Likewise if the second comparison is an == comparison. */
5414 else if (code2 == EQ_EXPR)
5416 bool done = true;
5417 bool val;
5418 switch (code1)
5420 case EQ_EXPR: val = (cmp == 0); break;
5421 case NE_EXPR: val = (cmp != 0); break;
5422 case LT_EXPR: val = (cmp > 0); break;
5423 case GT_EXPR: val = (cmp < 0); break;
5424 case LE_EXPR: val = (cmp >= 0); break;
5425 case GE_EXPR: val = (cmp <= 0); break;
5426 default: done = false;
5428 if (done)
5430 if (val)
5431 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5432 else
5433 return boolean_false_node;
5437 /* Same business with inequality tests. */
5438 else if (code1 == NE_EXPR)
5440 bool val;
5441 switch (code2)
5443 case EQ_EXPR: val = (cmp != 0); break;
5444 case NE_EXPR: val = (cmp == 0); break;
5445 case LT_EXPR: val = (cmp >= 0); break;
5446 case GT_EXPR: val = (cmp <= 0); break;
5447 case LE_EXPR: val = (cmp > 0); break;
5448 case GE_EXPR: val = (cmp < 0); break;
5449 default:
5450 val = false;
5452 if (val)
5453 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5455 else if (code2 == NE_EXPR)
5457 bool val;
5458 switch (code1)
5460 case EQ_EXPR: val = (cmp == 0); break;
5461 case NE_EXPR: val = (cmp != 0); break;
5462 case LT_EXPR: val = (cmp <= 0); break;
5463 case GT_EXPR: val = (cmp >= 0); break;
5464 case LE_EXPR: val = (cmp < 0); break;
5465 case GE_EXPR: val = (cmp > 0); break;
5466 default:
5467 val = false;
5469 if (val)
5470 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5473 /* Chose the more restrictive of two < or <= comparisons. */
5474 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5475 && (code2 == LT_EXPR || code2 == LE_EXPR))
5477 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5478 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5479 else
5480 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5483 /* Likewise chose the more restrictive of two > or >= comparisons. */
5484 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5485 && (code2 == GT_EXPR || code2 == GE_EXPR))
5487 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5488 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5489 else
5490 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5493 /* Check for singleton ranges. */
5494 else if (cmp == 0
5495 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5496 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5497 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5499 /* Check for disjoint ranges. */
5500 else if (cmp <= 0
5501 && (code1 == LT_EXPR || code1 == LE_EXPR)
5502 && (code2 == GT_EXPR || code2 == GE_EXPR))
5503 return boolean_false_node;
5504 else if (cmp >= 0
5505 && (code1 == GT_EXPR || code1 == GE_EXPR)
5506 && (code2 == LT_EXPR || code2 == LE_EXPR))
5507 return boolean_false_node;
5510 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5511 NAME's definition is a truth value. See if there are any simplifications
5512 that can be done against the NAME's definition. */
5513 if (TREE_CODE (op1a) == SSA_NAME
5514 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5515 && (integer_zerop (op1b) || integer_onep (op1b)))
5517 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5518 || (code1 == NE_EXPR && integer_onep (op1b)));
5519 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5520 switch (gimple_code (stmt))
5522 case GIMPLE_ASSIGN:
5523 /* Try to simplify by copy-propagating the definition. */
5524 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5526 case GIMPLE_PHI:
5527 /* If every argument to the PHI produces the same result when
5528 ANDed with the second comparison, we win.
5529 Do not do this unless the type is bool since we need a bool
5530 result here anyway. */
5531 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5533 tree result = NULL_TREE;
5534 unsigned i;
5535 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5537 tree arg = gimple_phi_arg_def (stmt, i);
5539 /* If this PHI has itself as an argument, ignore it.
5540 If all the other args produce the same result,
5541 we're still OK. */
5542 if (arg == gimple_phi_result (stmt))
5543 continue;
5544 else if (TREE_CODE (arg) == INTEGER_CST)
5546 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5548 if (!result)
5549 result = boolean_false_node;
5550 else if (!integer_zerop (result))
5551 return NULL_TREE;
5553 else if (!result)
5554 result = fold_build2 (code2, boolean_type_node,
5555 op2a, op2b);
5556 else if (!same_bool_comparison_p (result,
5557 code2, op2a, op2b))
5558 return NULL_TREE;
5560 else if (TREE_CODE (arg) == SSA_NAME
5561 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5563 tree temp;
5564 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5565 /* In simple cases we can look through PHI nodes,
5566 but we have to be careful with loops.
5567 See PR49073. */
5568 if (! dom_info_available_p (CDI_DOMINATORS)
5569 || gimple_bb (def_stmt) == gimple_bb (stmt)
5570 || dominated_by_p (CDI_DOMINATORS,
5571 gimple_bb (def_stmt),
5572 gimple_bb (stmt)))
5573 return NULL_TREE;
5574 temp = and_var_with_comparison (arg, invert, code2,
5575 op2a, op2b);
5576 if (!temp)
5577 return NULL_TREE;
5578 else if (!result)
5579 result = temp;
5580 else if (!same_bool_result_p (result, temp))
5581 return NULL_TREE;
5583 else
5584 return NULL_TREE;
5586 return result;
5589 default:
5590 break;
5593 return NULL_TREE;
5596 /* Try to simplify the AND of two comparisons, specified by
5597 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5598 If this can be simplified to a single expression (without requiring
5599 introducing more SSA variables to hold intermediate values),
5600 return the resulting tree. Otherwise return NULL_TREE.
5601 If the result expression is non-null, it has boolean type. */
5603 tree
5604 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5605 enum tree_code code2, tree op2a, tree op2b)
5607 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5608 if (t)
5609 return t;
5610 else
5611 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5614 /* Helper function for or_comparisons_1: try to simplify the OR of the
5615 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5616 If INVERT is true, invert the value of VAR before doing the OR.
5617 Return NULL_EXPR if we can't simplify this to a single expression. */
5619 static tree
5620 or_var_with_comparison (tree var, bool invert,
5621 enum tree_code code2, tree op2a, tree op2b)
5623 tree t;
5624 gimple *stmt = SSA_NAME_DEF_STMT (var);
5626 /* We can only deal with variables whose definitions are assignments. */
5627 if (!is_gimple_assign (stmt))
5628 return NULL_TREE;
5630 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5631 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5632 Then we only have to consider the simpler non-inverted cases. */
5633 if (invert)
5634 t = and_var_with_comparison_1 (stmt,
5635 invert_tree_comparison (code2, false),
5636 op2a, op2b);
5637 else
5638 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5639 return canonicalize_bool (t, invert);
5642 /* Try to simplify the OR of the ssa variable defined by the assignment
5643 STMT with the comparison specified by (OP2A CODE2 OP2B).
5644 Return NULL_EXPR if we can't simplify this to a single expression. */
5646 static tree
5647 or_var_with_comparison_1 (gimple *stmt,
5648 enum tree_code code2, tree op2a, tree op2b)
5650 tree var = gimple_assign_lhs (stmt);
5651 tree true_test_var = NULL_TREE;
5652 tree false_test_var = NULL_TREE;
5653 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5655 /* Check for identities like (var OR (var != 0)) => true . */
5656 if (TREE_CODE (op2a) == SSA_NAME
5657 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5659 if ((code2 == NE_EXPR && integer_zerop (op2b))
5660 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5662 true_test_var = op2a;
5663 if (var == true_test_var)
5664 return var;
5666 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5667 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5669 false_test_var = op2a;
5670 if (var == false_test_var)
5671 return boolean_true_node;
5675 /* If the definition is a comparison, recurse on it. */
5676 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5678 tree t = or_comparisons_1 (innercode,
5679 gimple_assign_rhs1 (stmt),
5680 gimple_assign_rhs2 (stmt),
5681 code2,
5682 op2a,
5683 op2b);
5684 if (t)
5685 return t;
5688 /* If the definition is an AND or OR expression, we may be able to
5689 simplify by reassociating. */
5690 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5691 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5693 tree inner1 = gimple_assign_rhs1 (stmt);
5694 tree inner2 = gimple_assign_rhs2 (stmt);
5695 gimple *s;
5696 tree t;
5697 tree partial = NULL_TREE;
5698 bool is_or = (innercode == BIT_IOR_EXPR);
5700 /* Check for boolean identities that don't require recursive examination
5701 of inner1/inner2:
5702 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5703 inner1 OR (inner1 AND inner2) => inner1
5704 !inner1 OR (inner1 OR inner2) => true
5705 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5707 if (inner1 == true_test_var)
5708 return (is_or ? var : inner1);
5709 else if (inner2 == true_test_var)
5710 return (is_or ? var : inner2);
5711 else if (inner1 == false_test_var)
5712 return (is_or
5713 ? boolean_true_node
5714 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5715 else if (inner2 == false_test_var)
5716 return (is_or
5717 ? boolean_true_node
5718 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5720 /* Next, redistribute/reassociate the OR across the inner tests.
5721 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5722 if (TREE_CODE (inner1) == SSA_NAME
5723 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5724 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5725 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5726 gimple_assign_rhs1 (s),
5727 gimple_assign_rhs2 (s),
5728 code2, op2a, op2b)))
5730 /* Handle the OR case, where we are reassociating:
5731 (inner1 OR inner2) OR (op2a code2 op2b)
5732 => (t OR inner2)
5733 If the partial result t is a constant, we win. Otherwise
5734 continue on to try reassociating with the other inner test. */
5735 if (is_or)
5737 if (integer_onep (t))
5738 return boolean_true_node;
5739 else if (integer_zerop (t))
5740 return inner2;
5743 /* Handle the AND case, where we are redistributing:
5744 (inner1 AND inner2) OR (op2a code2 op2b)
5745 => (t AND (inner2 OR (op2a code op2b))) */
5746 else if (integer_zerop (t))
5747 return boolean_false_node;
5749 /* Save partial result for later. */
5750 partial = t;
5753 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5754 if (TREE_CODE (inner2) == SSA_NAME
5755 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5756 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5757 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5758 gimple_assign_rhs1 (s),
5759 gimple_assign_rhs2 (s),
5760 code2, op2a, op2b)))
5762 /* Handle the OR case, where we are reassociating:
5763 (inner1 OR inner2) OR (op2a code2 op2b)
5764 => (inner1 OR t)
5765 => (t OR partial) */
5766 if (is_or)
5768 if (integer_zerop (t))
5769 return inner1;
5770 else if (integer_onep (t))
5771 return boolean_true_node;
5772 /* If both are the same, we can apply the identity
5773 (x OR x) == x. */
5774 else if (partial && same_bool_result_p (t, partial))
5775 return t;
5778 /* Handle the AND case, where we are redistributing:
5779 (inner1 AND inner2) OR (op2a code2 op2b)
5780 => (t AND (inner1 OR (op2a code2 op2b)))
5781 => (t AND partial) */
5782 else
5784 if (integer_zerop (t))
5785 return boolean_false_node;
5786 else if (partial)
5788 /* We already got a simplification for the other
5789 operand to the redistributed AND expression. The
5790 interesting case is when at least one is true.
5791 Or, if both are the same, we can apply the identity
5792 (x AND x) == x. */
5793 if (integer_onep (partial))
5794 return t;
5795 else if (integer_onep (t))
5796 return partial;
5797 else if (same_bool_result_p (t, partial))
5798 return t;
5803 return NULL_TREE;
5806 /* Try to simplify the OR of two comparisons defined by
5807 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5808 If this can be done without constructing an intermediate value,
5809 return the resulting tree; otherwise NULL_TREE is returned.
5810 This function is deliberately asymmetric as it recurses on SSA_DEFs
5811 in the first comparison but not the second. */
5813 static tree
5814 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5815 enum tree_code code2, tree op2a, tree op2b)
5817 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5819 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5820 if (operand_equal_p (op1a, op2a, 0)
5821 && operand_equal_p (op1b, op2b, 0))
5823 /* Result will be either NULL_TREE, or a combined comparison. */
5824 tree t = combine_comparisons (UNKNOWN_LOCATION,
5825 TRUTH_ORIF_EXPR, code1, code2,
5826 truth_type, op1a, op1b);
5827 if (t)
5828 return t;
5831 /* Likewise the swapped case of the above. */
5832 if (operand_equal_p (op1a, op2b, 0)
5833 && operand_equal_p (op1b, op2a, 0))
5835 /* Result will be either NULL_TREE, or a combined comparison. */
5836 tree t = combine_comparisons (UNKNOWN_LOCATION,
5837 TRUTH_ORIF_EXPR, code1,
5838 swap_tree_comparison (code2),
5839 truth_type, op1a, op1b);
5840 if (t)
5841 return t;
5844 /* If both comparisons are of the same value against constants, we might
5845 be able to merge them. */
5846 if (operand_equal_p (op1a, op2a, 0)
5847 && TREE_CODE (op1b) == INTEGER_CST
5848 && TREE_CODE (op2b) == INTEGER_CST)
5850 int cmp = tree_int_cst_compare (op1b, op2b);
5852 /* If we have (op1a != op1b), we should either be able to
5853 return that or TRUE, depending on whether the constant op1b
5854 also satisfies the other comparison against op2b. */
5855 if (code1 == NE_EXPR)
5857 bool done = true;
5858 bool val;
5859 switch (code2)
5861 case EQ_EXPR: val = (cmp == 0); break;
5862 case NE_EXPR: val = (cmp != 0); break;
5863 case LT_EXPR: val = (cmp < 0); break;
5864 case GT_EXPR: val = (cmp > 0); break;
5865 case LE_EXPR: val = (cmp <= 0); break;
5866 case GE_EXPR: val = (cmp >= 0); break;
5867 default: done = false;
5869 if (done)
5871 if (val)
5872 return boolean_true_node;
5873 else
5874 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5877 /* Likewise if the second comparison is a != comparison. */
5878 else if (code2 == NE_EXPR)
5880 bool done = true;
5881 bool val;
5882 switch (code1)
5884 case EQ_EXPR: val = (cmp == 0); break;
5885 case NE_EXPR: val = (cmp != 0); break;
5886 case LT_EXPR: val = (cmp > 0); break;
5887 case GT_EXPR: val = (cmp < 0); break;
5888 case LE_EXPR: val = (cmp >= 0); break;
5889 case GE_EXPR: val = (cmp <= 0); break;
5890 default: done = false;
5892 if (done)
5894 if (val)
5895 return boolean_true_node;
5896 else
5897 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5901 /* See if an equality test is redundant with the other comparison. */
5902 else if (code1 == EQ_EXPR)
5904 bool val;
5905 switch (code2)
5907 case EQ_EXPR: val = (cmp == 0); break;
5908 case NE_EXPR: val = (cmp != 0); break;
5909 case LT_EXPR: val = (cmp < 0); break;
5910 case GT_EXPR: val = (cmp > 0); break;
5911 case LE_EXPR: val = (cmp <= 0); break;
5912 case GE_EXPR: val = (cmp >= 0); break;
5913 default:
5914 val = false;
5916 if (val)
5917 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5919 else if (code2 == EQ_EXPR)
5921 bool val;
5922 switch (code1)
5924 case EQ_EXPR: val = (cmp == 0); break;
5925 case NE_EXPR: val = (cmp != 0); break;
5926 case LT_EXPR: val = (cmp > 0); break;
5927 case GT_EXPR: val = (cmp < 0); break;
5928 case LE_EXPR: val = (cmp >= 0); break;
5929 case GE_EXPR: val = (cmp <= 0); break;
5930 default:
5931 val = false;
5933 if (val)
5934 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5937 /* Chose the less restrictive of two < or <= comparisons. */
5938 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5939 && (code2 == LT_EXPR || code2 == LE_EXPR))
5941 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5942 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5943 else
5944 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5947 /* Likewise chose the less restrictive of two > or >= comparisons. */
5948 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5949 && (code2 == GT_EXPR || code2 == GE_EXPR))
5951 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5952 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5953 else
5954 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5957 /* Check for singleton ranges. */
5958 else if (cmp == 0
5959 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5960 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5961 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5963 /* Check for less/greater pairs that don't restrict the range at all. */
5964 else if (cmp >= 0
5965 && (code1 == LT_EXPR || code1 == LE_EXPR)
5966 && (code2 == GT_EXPR || code2 == GE_EXPR))
5967 return boolean_true_node;
5968 else if (cmp <= 0
5969 && (code1 == GT_EXPR || code1 == GE_EXPR)
5970 && (code2 == LT_EXPR || code2 == LE_EXPR))
5971 return boolean_true_node;
5974 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5975 NAME's definition is a truth value. See if there are any simplifications
5976 that can be done against the NAME's definition. */
5977 if (TREE_CODE (op1a) == SSA_NAME
5978 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5979 && (integer_zerop (op1b) || integer_onep (op1b)))
5981 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5982 || (code1 == NE_EXPR && integer_onep (op1b)));
5983 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5984 switch (gimple_code (stmt))
5986 case GIMPLE_ASSIGN:
5987 /* Try to simplify by copy-propagating the definition. */
5988 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5990 case GIMPLE_PHI:
5991 /* If every argument to the PHI produces the same result when
5992 ORed with the second comparison, we win.
5993 Do not do this unless the type is bool since we need a bool
5994 result here anyway. */
5995 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5997 tree result = NULL_TREE;
5998 unsigned i;
5999 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6001 tree arg = gimple_phi_arg_def (stmt, i);
6003 /* If this PHI has itself as an argument, ignore it.
6004 If all the other args produce the same result,
6005 we're still OK. */
6006 if (arg == gimple_phi_result (stmt))
6007 continue;
6008 else if (TREE_CODE (arg) == INTEGER_CST)
6010 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6012 if (!result)
6013 result = boolean_true_node;
6014 else if (!integer_onep (result))
6015 return NULL_TREE;
6017 else if (!result)
6018 result = fold_build2 (code2, boolean_type_node,
6019 op2a, op2b);
6020 else if (!same_bool_comparison_p (result,
6021 code2, op2a, op2b))
6022 return NULL_TREE;
6024 else if (TREE_CODE (arg) == SSA_NAME
6025 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6027 tree temp;
6028 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6029 /* In simple cases we can look through PHI nodes,
6030 but we have to be careful with loops.
6031 See PR49073. */
6032 if (! dom_info_available_p (CDI_DOMINATORS)
6033 || gimple_bb (def_stmt) == gimple_bb (stmt)
6034 || dominated_by_p (CDI_DOMINATORS,
6035 gimple_bb (def_stmt),
6036 gimple_bb (stmt)))
6037 return NULL_TREE;
6038 temp = or_var_with_comparison (arg, invert, code2,
6039 op2a, op2b);
6040 if (!temp)
6041 return NULL_TREE;
6042 else if (!result)
6043 result = temp;
6044 else if (!same_bool_result_p (result, temp))
6045 return NULL_TREE;
6047 else
6048 return NULL_TREE;
6050 return result;
6053 default:
6054 break;
6057 return NULL_TREE;
6060 /* Try to simplify the OR of two comparisons, specified by
6061 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6062 If this can be simplified to a single expression (without requiring
6063 introducing more SSA variables to hold intermediate values),
6064 return the resulting tree. Otherwise return NULL_TREE.
6065 If the result expression is non-null, it has boolean type. */
6067 tree
6068 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6069 enum tree_code code2, tree op2a, tree op2b)
6071 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6072 if (t)
6073 return t;
6074 else
6075 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6079 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6081 Either NULL_TREE, a simplified but non-constant or a constant
6082 is returned.
6084 ??? This should go into a gimple-fold-inline.h file to be eventually
6085 privatized with the single valueize function used in the various TUs
6086 to avoid the indirect function call overhead. */
6088 tree
6089 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6090 tree (*gvalueize) (tree))
6092 gimple_match_op res_op;
6093 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6094 edges if there are intermediate VARYING defs. For this reason
6095 do not follow SSA edges here even though SCCVN can technically
6096 just deal fine with that. */
6097 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6099 tree res = NULL_TREE;
6100 if (gimple_simplified_result_is_gimple_val (&res_op))
6101 res = res_op.ops[0];
6102 else if (mprts_hook)
6103 res = mprts_hook (&res_op);
6104 if (res)
6106 if (dump_file && dump_flags & TDF_DETAILS)
6108 fprintf (dump_file, "Match-and-simplified ");
6109 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6110 fprintf (dump_file, " to ");
6111 print_generic_expr (dump_file, res);
6112 fprintf (dump_file, "\n");
6114 return res;
6118 location_t loc = gimple_location (stmt);
6119 switch (gimple_code (stmt))
6121 case GIMPLE_ASSIGN:
6123 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6125 switch (get_gimple_rhs_class (subcode))
6127 case GIMPLE_SINGLE_RHS:
6129 tree rhs = gimple_assign_rhs1 (stmt);
6130 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6132 if (TREE_CODE (rhs) == SSA_NAME)
6134 /* If the RHS is an SSA_NAME, return its known constant value,
6135 if any. */
6136 return (*valueize) (rhs);
6138 /* Handle propagating invariant addresses into address
6139 operations. */
6140 else if (TREE_CODE (rhs) == ADDR_EXPR
6141 && !is_gimple_min_invariant (rhs))
6143 poly_int64 offset = 0;
6144 tree base;
6145 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6146 &offset,
6147 valueize);
6148 if (base
6149 && (CONSTANT_CLASS_P (base)
6150 || decl_address_invariant_p (base)))
6151 return build_invariant_address (TREE_TYPE (rhs),
6152 base, offset);
6154 else if (TREE_CODE (rhs) == CONSTRUCTOR
6155 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6156 && known_eq (CONSTRUCTOR_NELTS (rhs),
6157 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6159 unsigned i, nelts;
6160 tree val;
6162 nelts = CONSTRUCTOR_NELTS (rhs);
6163 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6164 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6166 val = (*valueize) (val);
6167 if (TREE_CODE (val) == INTEGER_CST
6168 || TREE_CODE (val) == REAL_CST
6169 || TREE_CODE (val) == FIXED_CST)
6170 vec.quick_push (val);
6171 else
6172 return NULL_TREE;
6175 return vec.build ();
6177 if (subcode == OBJ_TYPE_REF)
6179 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6180 /* If callee is constant, we can fold away the wrapper. */
6181 if (is_gimple_min_invariant (val))
6182 return val;
6185 if (kind == tcc_reference)
6187 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6188 || TREE_CODE (rhs) == REALPART_EXPR
6189 || TREE_CODE (rhs) == IMAGPART_EXPR)
6190 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6192 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6193 return fold_unary_loc (EXPR_LOCATION (rhs),
6194 TREE_CODE (rhs),
6195 TREE_TYPE (rhs), val);
6197 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6198 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6200 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6201 return fold_ternary_loc (EXPR_LOCATION (rhs),
6202 TREE_CODE (rhs),
6203 TREE_TYPE (rhs), val,
6204 TREE_OPERAND (rhs, 1),
6205 TREE_OPERAND (rhs, 2));
6207 else if (TREE_CODE (rhs) == MEM_REF
6208 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6210 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6211 if (TREE_CODE (val) == ADDR_EXPR
6212 && is_gimple_min_invariant (val))
6214 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6215 unshare_expr (val),
6216 TREE_OPERAND (rhs, 1));
6217 if (tem)
6218 rhs = tem;
6221 return fold_const_aggregate_ref_1 (rhs, valueize);
6223 else if (kind == tcc_declaration)
6224 return get_symbol_constant_value (rhs);
6225 return rhs;
6228 case GIMPLE_UNARY_RHS:
6229 return NULL_TREE;
6231 case GIMPLE_BINARY_RHS:
6232 /* Translate &x + CST into an invariant form suitable for
6233 further propagation. */
6234 if (subcode == POINTER_PLUS_EXPR)
6236 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6237 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6238 if (TREE_CODE (op0) == ADDR_EXPR
6239 && TREE_CODE (op1) == INTEGER_CST)
6241 tree off = fold_convert (ptr_type_node, op1);
6242 return build_fold_addr_expr_loc
6243 (loc,
6244 fold_build2 (MEM_REF,
6245 TREE_TYPE (TREE_TYPE (op0)),
6246 unshare_expr (op0), off));
6249 /* Canonicalize bool != 0 and bool == 0 appearing after
6250 valueization. While gimple_simplify handles this
6251 it can get confused by the ~X == 1 -> X == 0 transform
6252 which we cant reduce to a SSA name or a constant
6253 (and we have no way to tell gimple_simplify to not
6254 consider those transforms in the first place). */
6255 else if (subcode == EQ_EXPR
6256 || subcode == NE_EXPR)
6258 tree lhs = gimple_assign_lhs (stmt);
6259 tree op0 = gimple_assign_rhs1 (stmt);
6260 if (useless_type_conversion_p (TREE_TYPE (lhs),
6261 TREE_TYPE (op0)))
6263 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6264 op0 = (*valueize) (op0);
6265 if (TREE_CODE (op0) == INTEGER_CST)
6266 std::swap (op0, op1);
6267 if (TREE_CODE (op1) == INTEGER_CST
6268 && ((subcode == NE_EXPR && integer_zerop (op1))
6269 || (subcode == EQ_EXPR && integer_onep (op1))))
6270 return op0;
6273 return NULL_TREE;
6275 case GIMPLE_TERNARY_RHS:
6277 /* Handle ternary operators that can appear in GIMPLE form. */
6278 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6279 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6280 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6281 return fold_ternary_loc (loc, subcode,
6282 gimple_expr_type (stmt), op0, op1, op2);
6285 default:
6286 gcc_unreachable ();
6290 case GIMPLE_CALL:
6292 tree fn;
6293 gcall *call_stmt = as_a <gcall *> (stmt);
6295 if (gimple_call_internal_p (stmt))
6297 enum tree_code subcode = ERROR_MARK;
6298 switch (gimple_call_internal_fn (stmt))
6300 case IFN_UBSAN_CHECK_ADD:
6301 subcode = PLUS_EXPR;
6302 break;
6303 case IFN_UBSAN_CHECK_SUB:
6304 subcode = MINUS_EXPR;
6305 break;
6306 case IFN_UBSAN_CHECK_MUL:
6307 subcode = MULT_EXPR;
6308 break;
6309 case IFN_BUILTIN_EXPECT:
6311 tree arg0 = gimple_call_arg (stmt, 0);
6312 tree op0 = (*valueize) (arg0);
6313 if (TREE_CODE (op0) == INTEGER_CST)
6314 return op0;
6315 return NULL_TREE;
6317 default:
6318 return NULL_TREE;
6320 tree arg0 = gimple_call_arg (stmt, 0);
6321 tree arg1 = gimple_call_arg (stmt, 1);
6322 tree op0 = (*valueize) (arg0);
6323 tree op1 = (*valueize) (arg1);
6325 if (TREE_CODE (op0) != INTEGER_CST
6326 || TREE_CODE (op1) != INTEGER_CST)
6328 switch (subcode)
6330 case MULT_EXPR:
6331 /* x * 0 = 0 * x = 0 without overflow. */
6332 if (integer_zerop (op0) || integer_zerop (op1))
6333 return build_zero_cst (TREE_TYPE (arg0));
6334 break;
6335 case MINUS_EXPR:
6336 /* y - y = 0 without overflow. */
6337 if (operand_equal_p (op0, op1, 0))
6338 return build_zero_cst (TREE_TYPE (arg0));
6339 break;
6340 default:
6341 break;
6344 tree res
6345 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6346 if (res
6347 && TREE_CODE (res) == INTEGER_CST
6348 && !TREE_OVERFLOW (res))
6349 return res;
6350 return NULL_TREE;
6353 fn = (*valueize) (gimple_call_fn (stmt));
6354 if (TREE_CODE (fn) == ADDR_EXPR
6355 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6356 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6357 && gimple_builtin_call_types_compatible_p (stmt,
6358 TREE_OPERAND (fn, 0)))
6360 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6361 tree retval;
6362 unsigned i;
6363 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6364 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6365 retval = fold_builtin_call_array (loc,
6366 gimple_call_return_type (call_stmt),
6367 fn, gimple_call_num_args (stmt), args);
6368 if (retval)
6370 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6371 STRIP_NOPS (retval);
6372 retval = fold_convert (gimple_call_return_type (call_stmt),
6373 retval);
6375 return retval;
6377 return NULL_TREE;
6380 default:
6381 return NULL_TREE;
6385 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6386 Returns NULL_TREE if folding to a constant is not possible, otherwise
6387 returns a constant according to is_gimple_min_invariant. */
6389 tree
6390 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6392 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6393 if (res && is_gimple_min_invariant (res))
6394 return res;
6395 return NULL_TREE;
6399 /* The following set of functions are supposed to fold references using
6400 their constant initializers. */
6402 /* See if we can find constructor defining value of BASE.
6403 When we know the consructor with constant offset (such as
6404 base is array[40] and we do know constructor of array), then
6405 BIT_OFFSET is adjusted accordingly.
6407 As a special case, return error_mark_node when constructor
6408 is not explicitly available, but it is known to be zero
6409 such as 'static const int a;'. */
6410 static tree
6411 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6412 tree (*valueize)(tree))
6414 poly_int64 bit_offset2, size, max_size;
6415 bool reverse;
6417 if (TREE_CODE (base) == MEM_REF)
6419 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6420 if (!boff.to_shwi (bit_offset))
6421 return NULL_TREE;
6423 if (valueize
6424 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6425 base = valueize (TREE_OPERAND (base, 0));
6426 if (!base || TREE_CODE (base) != ADDR_EXPR)
6427 return NULL_TREE;
6428 base = TREE_OPERAND (base, 0);
6430 else if (valueize
6431 && TREE_CODE (base) == SSA_NAME)
6432 base = valueize (base);
6434 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6435 DECL_INITIAL. If BASE is a nested reference into another
6436 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6437 the inner reference. */
6438 switch (TREE_CODE (base))
6440 case VAR_DECL:
6441 case CONST_DECL:
6443 tree init = ctor_for_folding (base);
6445 /* Our semantic is exact opposite of ctor_for_folding;
6446 NULL means unknown, while error_mark_node is 0. */
6447 if (init == error_mark_node)
6448 return NULL_TREE;
6449 if (!init)
6450 return error_mark_node;
6451 return init;
6454 case VIEW_CONVERT_EXPR:
6455 return get_base_constructor (TREE_OPERAND (base, 0),
6456 bit_offset, valueize);
6458 case ARRAY_REF:
6459 case COMPONENT_REF:
6460 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6461 &reverse);
6462 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6463 return NULL_TREE;
6464 *bit_offset += bit_offset2;
6465 return get_base_constructor (base, bit_offset, valueize);
6467 case CONSTRUCTOR:
6468 return base;
6470 default:
6471 if (CONSTANT_CLASS_P (base))
6472 return base;
6474 return NULL_TREE;
6478 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6479 SIZE to the memory at bit OFFSET. */
6481 static tree
6482 fold_array_ctor_reference (tree type, tree ctor,
6483 unsigned HOST_WIDE_INT offset,
6484 unsigned HOST_WIDE_INT size,
6485 tree from_decl)
6487 offset_int low_bound;
6488 offset_int elt_size;
6489 offset_int access_index;
6490 tree domain_type = NULL_TREE;
6491 HOST_WIDE_INT inner_offset;
6493 /* Compute low bound and elt size. */
6494 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6495 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6496 if (domain_type && TYPE_MIN_VALUE (domain_type))
6498 /* Static constructors for variably sized objects makes no sense. */
6499 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6500 return NULL_TREE;
6501 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6503 else
6504 low_bound = 0;
6505 /* Static constructors for variably sized objects makes no sense. */
6506 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6507 return NULL_TREE;
6508 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6510 /* We can handle only constantly sized accesses that are known to not
6511 be larger than size of array element. */
6512 if (!TYPE_SIZE_UNIT (type)
6513 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6514 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6515 || elt_size == 0)
6516 return NULL_TREE;
6518 /* Compute the array index we look for. */
6519 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6520 elt_size);
6521 access_index += low_bound;
6523 /* And offset within the access. */
6524 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6526 /* See if the array field is large enough to span whole access. We do not
6527 care to fold accesses spanning multiple array indexes. */
6528 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6529 return NULL_TREE;
6530 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6531 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6533 /* When memory is not explicitely mentioned in constructor,
6534 it is 0 (or out of range). */
6535 return build_zero_cst (type);
6538 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6539 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6541 static tree
6542 fold_nonarray_ctor_reference (tree type, tree ctor,
6543 unsigned HOST_WIDE_INT offset,
6544 unsigned HOST_WIDE_INT size,
6545 tree from_decl)
6547 unsigned HOST_WIDE_INT cnt;
6548 tree cfield, cval;
6550 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6551 cval)
6553 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6554 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6555 tree field_size = DECL_SIZE (cfield);
6556 offset_int bitoffset;
6557 offset_int bitoffset_end, access_end;
6559 /* Variable sized objects in static constructors makes no sense,
6560 but field_size can be NULL for flexible array members. */
6561 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6562 && TREE_CODE (byte_offset) == INTEGER_CST
6563 && (field_size != NULL_TREE
6564 ? TREE_CODE (field_size) == INTEGER_CST
6565 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6567 /* Compute bit offset of the field. */
6568 bitoffset = (wi::to_offset (field_offset)
6569 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6570 /* Compute bit offset where the field ends. */
6571 if (field_size != NULL_TREE)
6572 bitoffset_end = bitoffset + wi::to_offset (field_size);
6573 else
6574 bitoffset_end = 0;
6576 access_end = offset_int (offset) + size;
6578 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6579 [BITOFFSET, BITOFFSET_END)? */
6580 if (wi::cmps (access_end, bitoffset) > 0
6581 && (field_size == NULL_TREE
6582 || wi::lts_p (offset, bitoffset_end)))
6584 offset_int inner_offset = offset_int (offset) - bitoffset;
6585 /* We do have overlap. Now see if field is large enough to
6586 cover the access. Give up for accesses spanning multiple
6587 fields. */
6588 if (wi::cmps (access_end, bitoffset_end) > 0)
6589 return NULL_TREE;
6590 if (offset < bitoffset)
6591 return NULL_TREE;
6592 return fold_ctor_reference (type, cval,
6593 inner_offset.to_uhwi (), size,
6594 from_decl);
6597 /* When memory is not explicitely mentioned in constructor, it is 0. */
6598 return build_zero_cst (type);
6601 /* CTOR is value initializing memory, fold reference of type TYPE and
6602 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6604 tree
6605 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6606 poly_uint64 poly_size, tree from_decl)
6608 tree ret;
6610 /* We found the field with exact match. */
6611 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6612 && known_eq (poly_offset, 0U))
6613 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6615 /* The remaining optimizations need a constant size and offset. */
6616 unsigned HOST_WIDE_INT size, offset;
6617 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6618 return NULL_TREE;
6620 /* We are at the end of walk, see if we can view convert the
6621 result. */
6622 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6623 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6624 && !compare_tree_int (TYPE_SIZE (type), size)
6625 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6627 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6628 if (ret)
6630 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6631 if (ret)
6632 STRIP_USELESS_TYPE_CONVERSION (ret);
6634 return ret;
6636 /* For constants and byte-aligned/sized reads try to go through
6637 native_encode/interpret. */
6638 if (CONSTANT_CLASS_P (ctor)
6639 && BITS_PER_UNIT == 8
6640 && offset % BITS_PER_UNIT == 0
6641 && size % BITS_PER_UNIT == 0
6642 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6644 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6645 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6646 offset / BITS_PER_UNIT);
6647 if (len > 0)
6648 return native_interpret_expr (type, buf, len);
6650 if (TREE_CODE (ctor) == CONSTRUCTOR)
6653 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6654 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6655 return fold_array_ctor_reference (type, ctor, offset, size,
6656 from_decl);
6657 else
6658 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6659 from_decl);
6662 return NULL_TREE;
6665 /* Return the tree representing the element referenced by T if T is an
6666 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6667 names using VALUEIZE. Return NULL_TREE otherwise. */
6669 tree
6670 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6672 tree ctor, idx, base;
6673 poly_int64 offset, size, max_size;
6674 tree tem;
6675 bool reverse;
6677 if (TREE_THIS_VOLATILE (t))
6678 return NULL_TREE;
6680 if (DECL_P (t))
6681 return get_symbol_constant_value (t);
6683 tem = fold_read_from_constant_string (t);
6684 if (tem)
6685 return tem;
6687 switch (TREE_CODE (t))
6689 case ARRAY_REF:
6690 case ARRAY_RANGE_REF:
6691 /* Constant indexes are handled well by get_base_constructor.
6692 Only special case variable offsets.
6693 FIXME: This code can't handle nested references with variable indexes
6694 (they will be handled only by iteration of ccp). Perhaps we can bring
6695 get_ref_base_and_extent here and make it use a valueize callback. */
6696 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6697 && valueize
6698 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6699 && poly_int_tree_p (idx))
6701 tree low_bound, unit_size;
6703 /* If the resulting bit-offset is constant, track it. */
6704 if ((low_bound = array_ref_low_bound (t),
6705 poly_int_tree_p (low_bound))
6706 && (unit_size = array_ref_element_size (t),
6707 tree_fits_uhwi_p (unit_size)))
6709 poly_offset_int woffset
6710 = wi::sext (wi::to_poly_offset (idx)
6711 - wi::to_poly_offset (low_bound),
6712 TYPE_PRECISION (TREE_TYPE (idx)));
6714 if (woffset.to_shwi (&offset))
6716 /* TODO: This code seems wrong, multiply then check
6717 to see if it fits. */
6718 offset *= tree_to_uhwi (unit_size);
6719 offset *= BITS_PER_UNIT;
6721 base = TREE_OPERAND (t, 0);
6722 ctor = get_base_constructor (base, &offset, valueize);
6723 /* Empty constructor. Always fold to 0. */
6724 if (ctor == error_mark_node)
6725 return build_zero_cst (TREE_TYPE (t));
6726 /* Out of bound array access. Value is undefined,
6727 but don't fold. */
6728 if (maybe_lt (offset, 0))
6729 return NULL_TREE;
6730 /* We can not determine ctor. */
6731 if (!ctor)
6732 return NULL_TREE;
6733 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6734 tree_to_uhwi (unit_size)
6735 * BITS_PER_UNIT,
6736 base);
6740 /* Fallthru. */
6742 case COMPONENT_REF:
6743 case BIT_FIELD_REF:
6744 case TARGET_MEM_REF:
6745 case MEM_REF:
6746 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6747 ctor = get_base_constructor (base, &offset, valueize);
6749 /* Empty constructor. Always fold to 0. */
6750 if (ctor == error_mark_node)
6751 return build_zero_cst (TREE_TYPE (t));
6752 /* We do not know precise address. */
6753 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6754 return NULL_TREE;
6755 /* We can not determine ctor. */
6756 if (!ctor)
6757 return NULL_TREE;
6759 /* Out of bound array access. Value is undefined, but don't fold. */
6760 if (maybe_lt (offset, 0))
6761 return NULL_TREE;
6763 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6764 base);
6766 case REALPART_EXPR:
6767 case IMAGPART_EXPR:
6769 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6770 if (c && TREE_CODE (c) == COMPLEX_CST)
6771 return fold_build1_loc (EXPR_LOCATION (t),
6772 TREE_CODE (t), TREE_TYPE (t), c);
6773 break;
6776 default:
6777 break;
6780 return NULL_TREE;
6783 tree
6784 fold_const_aggregate_ref (tree t)
6786 return fold_const_aggregate_ref_1 (t, NULL);
6789 /* Lookup virtual method with index TOKEN in a virtual table V
6790 at OFFSET.
6791 Set CAN_REFER if non-NULL to false if method
6792 is not referable or if the virtual table is ill-formed (such as rewriten
6793 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6795 tree
6796 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6797 tree v,
6798 unsigned HOST_WIDE_INT offset,
6799 bool *can_refer)
6801 tree vtable = v, init, fn;
6802 unsigned HOST_WIDE_INT size;
6803 unsigned HOST_WIDE_INT elt_size, access_index;
6804 tree domain_type;
6806 if (can_refer)
6807 *can_refer = true;
6809 /* First of all double check we have virtual table. */
6810 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6812 /* Pass down that we lost track of the target. */
6813 if (can_refer)
6814 *can_refer = false;
6815 return NULL_TREE;
6818 init = ctor_for_folding (v);
6820 /* The virtual tables should always be born with constructors
6821 and we always should assume that they are avaialble for
6822 folding. At the moment we do not stream them in all cases,
6823 but it should never happen that ctor seem unreachable. */
6824 gcc_assert (init);
6825 if (init == error_mark_node)
6827 /* Pass down that we lost track of the target. */
6828 if (can_refer)
6829 *can_refer = false;
6830 return NULL_TREE;
6832 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6833 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6834 offset *= BITS_PER_UNIT;
6835 offset += token * size;
6837 /* Lookup the value in the constructor that is assumed to be array.
6838 This is equivalent to
6839 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6840 offset, size, NULL);
6841 but in a constant time. We expect that frontend produced a simple
6842 array without indexed initializers. */
6844 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6845 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6846 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6847 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6849 access_index = offset / BITS_PER_UNIT / elt_size;
6850 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6852 /* This code makes an assumption that there are no
6853 indexed fileds produced by C++ FE, so we can directly index the array. */
6854 if (access_index < CONSTRUCTOR_NELTS (init))
6856 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6857 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6858 STRIP_NOPS (fn);
6860 else
6861 fn = NULL;
6863 /* For type inconsistent program we may end up looking up virtual method
6864 in virtual table that does not contain TOKEN entries. We may overrun
6865 the virtual table and pick up a constant or RTTI info pointer.
6866 In any case the call is undefined. */
6867 if (!fn
6868 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6869 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6870 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6871 else
6873 fn = TREE_OPERAND (fn, 0);
6875 /* When cgraph node is missing and function is not public, we cannot
6876 devirtualize. This can happen in WHOPR when the actual method
6877 ends up in other partition, because we found devirtualization
6878 possibility too late. */
6879 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6881 if (can_refer)
6883 *can_refer = false;
6884 return fn;
6886 return NULL_TREE;
6890 /* Make sure we create a cgraph node for functions we'll reference.
6891 They can be non-existent if the reference comes from an entry
6892 of an external vtable for example. */
6893 cgraph_node::get_create (fn);
6895 return fn;
6898 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6899 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6900 KNOWN_BINFO carries the binfo describing the true type of
6901 OBJ_TYPE_REF_OBJECT(REF).
6902 Set CAN_REFER if non-NULL to false if method
6903 is not referable or if the virtual table is ill-formed (such as rewriten
6904 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6906 tree
6907 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6908 bool *can_refer)
6910 unsigned HOST_WIDE_INT offset;
6911 tree v;
6913 v = BINFO_VTABLE (known_binfo);
6914 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6915 if (!v)
6916 return NULL_TREE;
6918 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6920 if (can_refer)
6921 *can_refer = false;
6922 return NULL_TREE;
6924 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6927 /* Given a pointer value T, return a simplified version of an
6928 indirection through T, or NULL_TREE if no simplification is
6929 possible. Note that the resulting type may be different from
6930 the type pointed to in the sense that it is still compatible
6931 from the langhooks point of view. */
6933 tree
6934 gimple_fold_indirect_ref (tree t)
6936 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6937 tree sub = t;
6938 tree subtype;
6940 STRIP_NOPS (sub);
6941 subtype = TREE_TYPE (sub);
6942 if (!POINTER_TYPE_P (subtype)
6943 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6944 return NULL_TREE;
6946 if (TREE_CODE (sub) == ADDR_EXPR)
6948 tree op = TREE_OPERAND (sub, 0);
6949 tree optype = TREE_TYPE (op);
6950 /* *&p => p */
6951 if (useless_type_conversion_p (type, optype))
6952 return op;
6954 /* *(foo *)&fooarray => fooarray[0] */
6955 if (TREE_CODE (optype) == ARRAY_TYPE
6956 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6957 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6959 tree type_domain = TYPE_DOMAIN (optype);
6960 tree min_val = size_zero_node;
6961 if (type_domain && TYPE_MIN_VALUE (type_domain))
6962 min_val = TYPE_MIN_VALUE (type_domain);
6963 if (TREE_CODE (min_val) == INTEGER_CST)
6964 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6966 /* *(foo *)&complexfoo => __real__ complexfoo */
6967 else if (TREE_CODE (optype) == COMPLEX_TYPE
6968 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6969 return fold_build1 (REALPART_EXPR, type, op);
6970 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6971 else if (TREE_CODE (optype) == VECTOR_TYPE
6972 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6974 tree part_width = TYPE_SIZE (type);
6975 tree index = bitsize_int (0);
6976 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6980 /* *(p + CST) -> ... */
6981 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6982 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6984 tree addr = TREE_OPERAND (sub, 0);
6985 tree off = TREE_OPERAND (sub, 1);
6986 tree addrtype;
6988 STRIP_NOPS (addr);
6989 addrtype = TREE_TYPE (addr);
6991 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6992 if (TREE_CODE (addr) == ADDR_EXPR
6993 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6994 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6995 && tree_fits_uhwi_p (off))
6997 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6998 tree part_width = TYPE_SIZE (type);
6999 unsigned HOST_WIDE_INT part_widthi
7000 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7001 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7002 tree index = bitsize_int (indexi);
7003 if (known_lt (offset / part_widthi,
7004 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7005 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7006 part_width, index);
7009 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7010 if (TREE_CODE (addr) == ADDR_EXPR
7011 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7012 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7014 tree size = TYPE_SIZE_UNIT (type);
7015 if (tree_int_cst_equal (size, off))
7016 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7019 /* *(p + CST) -> MEM_REF <p, CST>. */
7020 if (TREE_CODE (addr) != ADDR_EXPR
7021 || DECL_P (TREE_OPERAND (addr, 0)))
7022 return fold_build2 (MEM_REF, type,
7023 addr,
7024 wide_int_to_tree (ptype, wi::to_wide (off)));
7027 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7028 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7029 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7030 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7032 tree type_domain;
7033 tree min_val = size_zero_node;
7034 tree osub = sub;
7035 sub = gimple_fold_indirect_ref (sub);
7036 if (! sub)
7037 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7038 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7039 if (type_domain && TYPE_MIN_VALUE (type_domain))
7040 min_val = TYPE_MIN_VALUE (type_domain);
7041 if (TREE_CODE (min_val) == INTEGER_CST)
7042 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7045 return NULL_TREE;
7048 /* Return true if CODE is an operation that when operating on signed
7049 integer types involves undefined behavior on overflow and the
7050 operation can be expressed with unsigned arithmetic. */
7052 bool
7053 arith_code_with_undefined_signed_overflow (tree_code code)
7055 switch (code)
7057 case PLUS_EXPR:
7058 case MINUS_EXPR:
7059 case MULT_EXPR:
7060 case NEGATE_EXPR:
7061 case POINTER_PLUS_EXPR:
7062 return true;
7063 default:
7064 return false;
7068 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7069 operation that can be transformed to unsigned arithmetic by converting
7070 its operand, carrying out the operation in the corresponding unsigned
7071 type and converting the result back to the original type.
7073 Returns a sequence of statements that replace STMT and also contain
7074 a modified form of STMT itself. */
7076 gimple_seq
7077 rewrite_to_defined_overflow (gimple *stmt)
7079 if (dump_file && (dump_flags & TDF_DETAILS))
7081 fprintf (dump_file, "rewriting stmt with undefined signed "
7082 "overflow ");
7083 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7086 tree lhs = gimple_assign_lhs (stmt);
7087 tree type = unsigned_type_for (TREE_TYPE (lhs));
7088 gimple_seq stmts = NULL;
7089 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7091 tree op = gimple_op (stmt, i);
7092 op = gimple_convert (&stmts, type, op);
7093 gimple_set_op (stmt, i, op);
7095 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7096 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7097 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7098 gimple_seq_add_stmt (&stmts, stmt);
7099 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7100 gimple_seq_add_stmt (&stmts, cvt);
7102 return stmts;
7106 /* The valueization hook we use for the gimple_build API simplification.
7107 This makes us match fold_buildN behavior by only combining with
7108 statements in the sequence(s) we are currently building. */
7110 static tree
7111 gimple_build_valueize (tree op)
7113 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7114 return op;
7115 return NULL_TREE;
7118 /* Build the expression CODE OP0 of type TYPE with location LOC,
7119 simplifying it first if possible. Returns the built
7120 expression value and appends statements possibly defining it
7121 to SEQ. */
7123 tree
7124 gimple_build (gimple_seq *seq, location_t loc,
7125 enum tree_code code, tree type, tree op0)
7127 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7128 if (!res)
7130 res = create_tmp_reg_or_ssa_name (type);
7131 gimple *stmt;
7132 if (code == REALPART_EXPR
7133 || code == IMAGPART_EXPR
7134 || code == VIEW_CONVERT_EXPR)
7135 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7136 else
7137 stmt = gimple_build_assign (res, code, op0);
7138 gimple_set_location (stmt, loc);
7139 gimple_seq_add_stmt_without_update (seq, stmt);
7141 return res;
7144 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7145 simplifying it first if possible. Returns the built
7146 expression value and appends statements possibly defining it
7147 to SEQ. */
7149 tree
7150 gimple_build (gimple_seq *seq, location_t loc,
7151 enum tree_code code, tree type, tree op0, tree op1)
7153 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7154 if (!res)
7156 res = create_tmp_reg_or_ssa_name (type);
7157 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7158 gimple_set_location (stmt, loc);
7159 gimple_seq_add_stmt_without_update (seq, stmt);
7161 return res;
7164 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7165 simplifying it first if possible. Returns the built
7166 expression value and appends statements possibly defining it
7167 to SEQ. */
7169 tree
7170 gimple_build (gimple_seq *seq, location_t loc,
7171 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7173 tree res = gimple_simplify (code, type, op0, op1, op2,
7174 seq, gimple_build_valueize);
7175 if (!res)
7177 res = create_tmp_reg_or_ssa_name (type);
7178 gimple *stmt;
7179 if (code == BIT_FIELD_REF)
7180 stmt = gimple_build_assign (res, code,
7181 build3 (code, type, op0, op1, op2));
7182 else
7183 stmt = gimple_build_assign (res, code, op0, op1, op2);
7184 gimple_set_location (stmt, loc);
7185 gimple_seq_add_stmt_without_update (seq, stmt);
7187 return res;
7190 /* Build the call FN (ARG0) with a result of type TYPE
7191 (or no result if TYPE is void) with location LOC,
7192 simplifying it first if possible. Returns the built
7193 expression value (or NULL_TREE if TYPE is void) and appends
7194 statements possibly defining it to SEQ. */
7196 tree
7197 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7198 tree type, tree arg0)
7200 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7201 if (!res)
7203 gcall *stmt;
7204 if (internal_fn_p (fn))
7205 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7206 else
7208 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7209 stmt = gimple_build_call (decl, 1, arg0);
7211 if (!VOID_TYPE_P (type))
7213 res = create_tmp_reg_or_ssa_name (type);
7214 gimple_call_set_lhs (stmt, res);
7216 gimple_set_location (stmt, loc);
7217 gimple_seq_add_stmt_without_update (seq, stmt);
7219 return res;
7222 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7223 (or no result if TYPE is void) with location LOC,
7224 simplifying it first if possible. Returns the built
7225 expression value (or NULL_TREE if TYPE is void) and appends
7226 statements possibly defining it to SEQ. */
7228 tree
7229 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7230 tree type, tree arg0, tree arg1)
7232 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7233 if (!res)
7235 gcall *stmt;
7236 if (internal_fn_p (fn))
7237 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7238 else
7240 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7241 stmt = gimple_build_call (decl, 2, arg0, arg1);
7243 if (!VOID_TYPE_P (type))
7245 res = create_tmp_reg_or_ssa_name (type);
7246 gimple_call_set_lhs (stmt, res);
7248 gimple_set_location (stmt, loc);
7249 gimple_seq_add_stmt_without_update (seq, stmt);
7251 return res;
7254 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7255 (or no result if TYPE is void) with location LOC,
7256 simplifying it first if possible. Returns the built
7257 expression value (or NULL_TREE if TYPE is void) and appends
7258 statements possibly defining it to SEQ. */
7260 tree
7261 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7262 tree type, tree arg0, tree arg1, tree arg2)
7264 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7265 seq, gimple_build_valueize);
7266 if (!res)
7268 gcall *stmt;
7269 if (internal_fn_p (fn))
7270 stmt = gimple_build_call_internal (as_internal_fn (fn),
7271 3, arg0, arg1, arg2);
7272 else
7274 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7275 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7277 if (!VOID_TYPE_P (type))
7279 res = create_tmp_reg_or_ssa_name (type);
7280 gimple_call_set_lhs (stmt, res);
7282 gimple_set_location (stmt, loc);
7283 gimple_seq_add_stmt_without_update (seq, stmt);
7285 return res;
7288 /* Build the conversion (TYPE) OP with a result of type TYPE
7289 with location LOC if such conversion is neccesary in GIMPLE,
7290 simplifying it first.
7291 Returns the built expression value and appends
7292 statements possibly defining it to SEQ. */
7294 tree
7295 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7297 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7298 return op;
7299 return gimple_build (seq, loc, NOP_EXPR, type, op);
7302 /* Build the conversion (ptrofftype) OP with a result of a type
7303 compatible with ptrofftype with location LOC if such conversion
7304 is neccesary in GIMPLE, simplifying it first.
7305 Returns the built expression value and appends
7306 statements possibly defining it to SEQ. */
7308 tree
7309 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7311 if (ptrofftype_p (TREE_TYPE (op)))
7312 return op;
7313 return gimple_convert (seq, loc, sizetype, op);
7316 /* Build a vector of type TYPE in which each element has the value OP.
7317 Return a gimple value for the result, appending any new statements
7318 to SEQ. */
7320 tree
7321 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7322 tree op)
7324 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7325 && !CONSTANT_CLASS_P (op))
7326 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7328 tree res, vec = build_vector_from_val (type, op);
7329 if (is_gimple_val (vec))
7330 return vec;
7331 if (gimple_in_ssa_p (cfun))
7332 res = make_ssa_name (type);
7333 else
7334 res = create_tmp_reg (type);
7335 gimple *stmt = gimple_build_assign (res, vec);
7336 gimple_set_location (stmt, loc);
7337 gimple_seq_add_stmt_without_update (seq, stmt);
7338 return res;
7341 /* Build a vector from BUILDER, handling the case in which some elements
7342 are non-constant. Return a gimple value for the result, appending any
7343 new instructions to SEQ.
7345 BUILDER must not have a stepped encoding on entry. This is because
7346 the function is not geared up to handle the arithmetic that would
7347 be needed in the variable case, and any code building a vector that
7348 is known to be constant should use BUILDER->build () directly. */
7350 tree
7351 gimple_build_vector (gimple_seq *seq, location_t loc,
7352 tree_vector_builder *builder)
7354 gcc_assert (builder->nelts_per_pattern () <= 2);
7355 unsigned int encoded_nelts = builder->encoded_nelts ();
7356 for (unsigned int i = 0; i < encoded_nelts; ++i)
7357 if (!TREE_CONSTANT ((*builder)[i]))
7359 tree type = builder->type ();
7360 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7361 vec<constructor_elt, va_gc> *v;
7362 vec_alloc (v, nelts);
7363 for (i = 0; i < nelts; ++i)
7364 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7366 tree res;
7367 if (gimple_in_ssa_p (cfun))
7368 res = make_ssa_name (type);
7369 else
7370 res = create_tmp_reg (type);
7371 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7372 gimple_set_location (stmt, loc);
7373 gimple_seq_add_stmt_without_update (seq, stmt);
7374 return res;
7376 return builder->build ();
7379 /* Return true if the result of assignment STMT is known to be non-negative.
7380 If the return value is based on the assumption that signed overflow is
7381 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7382 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7384 static bool
7385 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7386 int depth)
7388 enum tree_code code = gimple_assign_rhs_code (stmt);
7389 switch (get_gimple_rhs_class (code))
7391 case GIMPLE_UNARY_RHS:
7392 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7393 gimple_expr_type (stmt),
7394 gimple_assign_rhs1 (stmt),
7395 strict_overflow_p, depth);
7396 case GIMPLE_BINARY_RHS:
7397 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7398 gimple_expr_type (stmt),
7399 gimple_assign_rhs1 (stmt),
7400 gimple_assign_rhs2 (stmt),
7401 strict_overflow_p, depth);
7402 case GIMPLE_TERNARY_RHS:
7403 return false;
7404 case GIMPLE_SINGLE_RHS:
7405 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7406 strict_overflow_p, depth);
7407 case GIMPLE_INVALID_RHS:
7408 break;
7410 gcc_unreachable ();
7413 /* Return true if return value of call STMT is known to be non-negative.
7414 If the return value is based on the assumption that signed overflow is
7415 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7416 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7418 static bool
7419 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7420 int depth)
7422 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7423 gimple_call_arg (stmt, 0) : NULL_TREE;
7424 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7425 gimple_call_arg (stmt, 1) : NULL_TREE;
7427 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7428 gimple_call_combined_fn (stmt),
7429 arg0,
7430 arg1,
7431 strict_overflow_p, depth);
7434 /* Return true if return value of call STMT is known to be non-negative.
7435 If the return value is based on the assumption that signed overflow is
7436 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7437 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7439 static bool
7440 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7441 int depth)
7443 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7445 tree arg = gimple_phi_arg_def (stmt, i);
7446 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7447 return false;
7449 return true;
7452 /* Return true if STMT is known to compute a non-negative value.
7453 If the return value is based on the assumption that signed overflow is
7454 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7455 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7457 bool
7458 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7459 int depth)
7461 switch (gimple_code (stmt))
7463 case GIMPLE_ASSIGN:
7464 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7465 depth);
7466 case GIMPLE_CALL:
7467 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7468 depth);
7469 case GIMPLE_PHI:
7470 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7471 depth);
7472 default:
7473 return false;
7477 /* Return true if the floating-point value computed by assignment STMT
7478 is known to have an integer value. We also allow +Inf, -Inf and NaN
7479 to be considered integer values. Return false for signaling NaN.
7481 DEPTH is the current nesting depth of the query. */
7483 static bool
7484 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7486 enum tree_code code = gimple_assign_rhs_code (stmt);
7487 switch (get_gimple_rhs_class (code))
7489 case GIMPLE_UNARY_RHS:
7490 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7491 gimple_assign_rhs1 (stmt), depth);
7492 case GIMPLE_BINARY_RHS:
7493 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7494 gimple_assign_rhs1 (stmt),
7495 gimple_assign_rhs2 (stmt), depth);
7496 case GIMPLE_TERNARY_RHS:
7497 return false;
7498 case GIMPLE_SINGLE_RHS:
7499 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7500 case GIMPLE_INVALID_RHS:
7501 break;
7503 gcc_unreachable ();
7506 /* Return true if the floating-point value computed by call STMT is known
7507 to have an integer value. We also allow +Inf, -Inf and NaN to be
7508 considered integer values. Return false for signaling NaN.
7510 DEPTH is the current nesting depth of the query. */
7512 static bool
7513 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7515 tree arg0 = (gimple_call_num_args (stmt) > 0
7516 ? gimple_call_arg (stmt, 0)
7517 : NULL_TREE);
7518 tree arg1 = (gimple_call_num_args (stmt) > 1
7519 ? gimple_call_arg (stmt, 1)
7520 : NULL_TREE);
7521 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7522 arg0, arg1, depth);
7525 /* Return true if the floating-point result of phi STMT is known to have
7526 an integer value. We also allow +Inf, -Inf and NaN to be considered
7527 integer values. Return false for signaling NaN.
7529 DEPTH is the current nesting depth of the query. */
7531 static bool
7532 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7534 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7536 tree arg = gimple_phi_arg_def (stmt, i);
7537 if (!integer_valued_real_single_p (arg, depth + 1))
7538 return false;
7540 return true;
7543 /* Return true if the floating-point value computed by STMT is known
7544 to have an integer value. We also allow +Inf, -Inf and NaN to be
7545 considered integer values. Return false for signaling NaN.
7547 DEPTH is the current nesting depth of the query. */
7549 bool
7550 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7552 switch (gimple_code (stmt))
7554 case GIMPLE_ASSIGN:
7555 return gimple_assign_integer_valued_real_p (stmt, depth);
7556 case GIMPLE_CALL:
7557 return gimple_call_integer_valued_real_p (stmt, depth);
7558 case GIMPLE_PHI:
7559 return gimple_phi_integer_valued_real_p (stmt, depth);
7560 default:
7561 return false;