pa.c (pa_som_asm_init_sections): Fix comment.
[official-gcc.git] / gcc / gimple-fold.c
blob87ce3d887ce94aa6776744adbff6c716d5628975
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 location_t loc = gimple_location_safe (stmt);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
356 : "NULL");
358 if (targets.length () == 1)
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
369 return val;
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 if (result)
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410 if (! CONSTANT_CLASS_P (val))
411 return NULL_TREE;
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
417 else if (DECL_P (rhs))
418 return get_symbol_constant_value (rhs);
420 break;
422 case GIMPLE_UNARY_RHS:
423 break;
425 case GIMPLE_BINARY_RHS:
426 break;
428 case GIMPLE_TERNARY_RHS:
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
435 if (result)
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
441 break;
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
447 return NULL_TREE;
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
456 static void
457 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 gimple *stmt = gsi_stmt (*si_p);
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple *laststore = NULL;
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
495 gimple *new_stmt = gsi_stmt (i);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 gimple *stmt, *new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = NULL;
541 stmt = gsi_stmt (*si_p);
543 gcc_assert (is_gimple_call (stmt));
545 push_gimplify_context (gimple_in_ssa_p (cfun));
547 lhs = gimple_call_lhs (stmt);
548 if (lhs == NULL_TREE)
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
555 pop_gimplify_context (NULL);
556 if (gimple_in_ssa_p (cfun))
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
561 gsi_replace (si_p, gimple_build_nop (), false);
562 return;
565 else
567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
574 pop_gimplify_context (NULL);
576 gsi_replace_with_seq_vops (si_p, stmts);
580 /* Replace the call at *GSI with the gimple value VAL. */
582 void
583 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 gimple *stmt = gsi_stmt (*gsi);
586 tree lhs = gimple_call_lhs (stmt);
587 gimple *repl;
588 if (lhs)
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
602 gsi_replace (gsi, repl, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
606 again. */
608 static void
609 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 gimple *stmt = gsi_stmt (*gsi);
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 gimple_set_vdef (repl, gimple_vdef (stmt));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
622 gsi_replace (gsi, repl, false);
623 fold_stmt (gsi);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
628 static bool
629 var_decl_component_p (tree var)
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return SSA_VAR_P (inner);
637 /* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
641 static bool
642 size_must_be_zero_p (tree size)
644 if (integer_zerop (size))
645 return true;
647 if (TREE_CODE (size) != SSA_NAME)
648 return false;
650 wide_int min, max;
651 enum value_range_type rtype = get_range_info (size, &min, &max);
652 if (rtype != VR_ANTI_RANGE)
653 return false;
655 tree type = TREE_TYPE (size);
656 int prec = TYPE_PRECISION (type);
658 wide_int wone = wi::one (prec);
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
667 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
672 be made. */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 tree func = gimple_call_fndecl (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
686 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
688 && !nowarn);
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len))
694 gimple *repl;
695 if (gimple_call_lhs (stmt))
696 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
697 else
698 repl = gimple_build_nop ();
699 tree vdef = gimple_vdef (stmt);
700 if (vdef && TREE_CODE (vdef) == SSA_NAME)
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
705 gsi_replace (gsi, repl, false);
706 return true;
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src, dest, 0))
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
716 function. */
717 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
718 warning_at (loc, OPT_Wrestrict,
719 "%qD source argument is the same as destination",
720 func);
722 unlink_stmt_vdef (stmt);
723 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 release_ssa_name (gimple_vdef (stmt));
725 if (!lhs)
727 gsi_replace (gsi, gimple_build_nop (), false);
728 return true;
730 goto done;
732 else
734 tree srctype, desttype;
735 unsigned int src_align, dest_align;
736 tree off0;
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun->decl)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len)
747 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
748 return false;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2))
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
829 gsi_replace (gsi, new_stmt, false);
830 return true;
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
839 if (endp == 3)
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
867 tree src_base, dest_base, fn;
868 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
869 HOST_WIDE_INT maxsize;
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (tree_fits_uhwi_p (len))
881 maxsize = tree_to_uhwi (len);
882 else
883 maxsize = -1;
884 if (SSA_VAR_P (src_base)
885 && SSA_VAR_P (dest_base))
887 if (operand_equal_p (src_base, dest_base, 0)
888 && ranges_overlap_p (src_offset, maxsize,
889 dest_offset, maxsize))
890 return false;
892 else if (TREE_CODE (src_base) == MEM_REF
893 && TREE_CODE (dest_base) == MEM_REF)
895 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
896 TREE_OPERAND (dest_base, 0), 0))
897 return false;
898 offset_int off = mem_ref_offset (src_base) + src_offset;
899 if (!wi::fits_shwi_p (off))
900 return false;
901 src_offset = off.to_shwi ();
903 off = mem_ref_offset (dest_base) + dest_offset;
904 if (!wi::fits_shwi_p (off))
905 return false;
906 dest_offset = off.to_shwi ();
907 if (ranges_overlap_p (src_offset, maxsize,
908 dest_offset, maxsize))
909 return false;
911 else
912 return false;
914 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
915 if (!fn)
916 return false;
917 gimple_call_set_fndecl (stmt, fn);
918 gimple_call_set_arg (stmt, 0, dest);
919 gimple_call_set_arg (stmt, 1, src);
920 fold_stmt (gsi);
921 return true;
924 /* If the destination and source do not alias optimize into
925 memcpy as well. */
926 if ((is_gimple_min_invariant (dest)
927 || TREE_CODE (dest) == SSA_NAME)
928 && (is_gimple_min_invariant (src)
929 || TREE_CODE (src) == SSA_NAME))
931 ao_ref destr, srcr;
932 ao_ref_init_from_ptr_and_size (&destr, dest, len);
933 ao_ref_init_from_ptr_and_size (&srcr, src, len);
934 if (!refs_may_alias_p_1 (&destr, &srcr, false))
936 tree fn;
937 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
938 if (!fn)
939 return false;
940 gimple_call_set_fndecl (stmt, fn);
941 gimple_call_set_arg (stmt, 0, dest);
942 gimple_call_set_arg (stmt, 1, src);
943 fold_stmt (gsi);
944 return true;
948 return false;
951 if (!tree_fits_shwi_p (len))
952 return false;
953 if (!POINTER_TYPE_P (TREE_TYPE (src))
954 || !POINTER_TYPE_P (TREE_TYPE (dest)))
955 return false;
956 /* In the following try to find a type that is most natural to be
957 used for the memcpy source and destination and that allows
958 the most optimization when memcpy is turned into a plain assignment
959 using that type. In theory we could always use a char[len] type
960 but that only gains us that the destination and source possibly
961 no longer will have their address taken. */
962 srctype = TREE_TYPE (TREE_TYPE (src));
963 if (TREE_CODE (srctype) == ARRAY_TYPE
964 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
965 srctype = TREE_TYPE (srctype);
966 desttype = TREE_TYPE (TREE_TYPE (dest));
967 if (TREE_CODE (desttype) == ARRAY_TYPE
968 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
969 desttype = TREE_TYPE (desttype);
970 if (TREE_ADDRESSABLE (srctype)
971 || TREE_ADDRESSABLE (desttype))
972 return false;
974 /* Make sure we are not copying using a floating-point mode or
975 a type whose size possibly does not match its precision. */
976 if (FLOAT_MODE_P (TYPE_MODE (desttype))
977 || TREE_CODE (desttype) == BOOLEAN_TYPE
978 || TREE_CODE (desttype) == ENUMERAL_TYPE)
979 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
980 if (FLOAT_MODE_P (TYPE_MODE (srctype))
981 || TREE_CODE (srctype) == BOOLEAN_TYPE
982 || TREE_CODE (srctype) == ENUMERAL_TYPE)
983 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
984 if (!srctype)
985 srctype = desttype;
986 if (!desttype)
987 desttype = srctype;
988 if (!srctype)
989 return false;
991 src_align = get_pointer_alignment (src);
992 dest_align = get_pointer_alignment (dest);
993 if (dest_align < TYPE_ALIGN (desttype)
994 || src_align < TYPE_ALIGN (srctype))
995 return false;
997 destvar = NULL_TREE;
998 if (TREE_CODE (dest) == ADDR_EXPR
999 && var_decl_component_p (TREE_OPERAND (dest, 0))
1000 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1001 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1003 srcvar = NULL_TREE;
1004 if (TREE_CODE (src) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (src, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1008 if (!destvar
1009 || src_align >= TYPE_ALIGN (desttype))
1010 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1011 src, off0);
1012 else if (!STRICT_ALIGNMENT)
1014 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1015 src_align);
1016 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1020 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1021 return false;
1023 if (srcvar == NULL_TREE)
1025 if (src_align >= TYPE_ALIGN (desttype))
1026 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1027 else
1029 if (STRICT_ALIGNMENT)
1030 return false;
1031 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1032 src_align);
1033 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1036 else if (destvar == NULL_TREE)
1038 if (dest_align >= TYPE_ALIGN (srctype))
1039 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1040 else
1042 if (STRICT_ALIGNMENT)
1043 return false;
1044 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1045 dest_align);
1046 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1050 /* Detect invalid bounds and overlapping copies and issue either
1051 -Warray-bounds or -Wrestrict. */
1052 if (!nowarn)
1053 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1055 gimple *new_stmt;
1056 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1058 tree tem = fold_const_aggregate_ref (srcvar);
1059 if (tem)
1060 srcvar = tem;
1061 if (! is_gimple_min_invariant (srcvar))
1063 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1064 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1065 new_stmt);
1066 gimple_assign_set_lhs (new_stmt, srcvar);
1067 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1068 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1070 new_stmt = gimple_build_assign (destvar, srcvar);
1071 goto set_vop_and_replace;
1074 /* We get an aggregate copy. Use an unsigned char[] type to
1075 perform the copying to preserve padding and to avoid any issues
1076 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1077 desttype = build_array_type_nelts (unsigned_char_type_node,
1078 tree_to_uhwi (len));
1079 srctype = desttype;
1080 if (src_align > TYPE_ALIGN (srctype))
1081 srctype = build_aligned_type (srctype, src_align);
1082 if (dest_align > TYPE_ALIGN (desttype))
1083 desttype = build_aligned_type (desttype, dest_align);
1084 new_stmt
1085 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1086 fold_build2 (MEM_REF, srctype, src, off0));
1087 set_vop_and_replace:
1088 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1089 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1090 if (gimple_vdef (new_stmt)
1091 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1092 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1093 if (!lhs)
1095 gsi_replace (gsi, new_stmt, false);
1096 return true;
1098 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1101 done:
1102 gimple_seq stmts = NULL;
1103 if (endp == 0 || endp == 3)
1104 len = NULL_TREE;
1105 else if (endp == 2)
1106 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1107 ssize_int (1));
1108 if (endp == 2 || endp == 1)
1110 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1111 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1112 TREE_TYPE (dest), dest, len);
1115 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1116 gimple *repl = gimple_build_assign (lhs, dest);
1117 gsi_replace (gsi, repl, false);
1118 return true;
1121 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1122 to built-in memcmp (a, b, len). */
1124 static bool
1125 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1127 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1129 if (!fn)
1130 return false;
1132 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1134 gimple *stmt = gsi_stmt (*gsi);
1135 tree a = gimple_call_arg (stmt, 0);
1136 tree b = gimple_call_arg (stmt, 1);
1137 tree len = gimple_call_arg (stmt, 2);
1139 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1140 replace_call_with_call_and_fold (gsi, repl);
1142 return true;
1145 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1146 to built-in memmove (dest, src, len). */
1148 static bool
1149 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1151 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1153 if (!fn)
1154 return false;
1156 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1157 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1158 len) into memmove (dest, src, len). */
1160 gimple *stmt = gsi_stmt (*gsi);
1161 tree src = gimple_call_arg (stmt, 0);
1162 tree dest = gimple_call_arg (stmt, 1);
1163 tree len = gimple_call_arg (stmt, 2);
1165 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1166 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1167 replace_call_with_call_and_fold (gsi, repl);
1169 return true;
1172 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1173 to built-in memset (dest, 0, len). */
1175 static bool
1176 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1178 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1180 if (!fn)
1181 return false;
1183 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1185 gimple *stmt = gsi_stmt (*gsi);
1186 tree dest = gimple_call_arg (stmt, 0);
1187 tree len = gimple_call_arg (stmt, 1);
1189 gimple_seq seq = NULL;
1190 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1191 gimple_seq_add_stmt_without_update (&seq, repl);
1192 gsi_replace_with_seq_vops (gsi, seq);
1193 fold_stmt (gsi);
1195 return true;
1198 /* Fold function call to builtin memset or bzero at *GSI setting the
1199 memory of size LEN to VAL. Return whether a simplification was made. */
1201 static bool
1202 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1204 gimple *stmt = gsi_stmt (*gsi);
1205 tree etype;
1206 unsigned HOST_WIDE_INT length, cval;
1208 /* If the LEN parameter is zero, return DEST. */
1209 if (integer_zerop (len))
1211 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1212 return true;
1215 if (! tree_fits_uhwi_p (len))
1216 return false;
1218 if (TREE_CODE (c) != INTEGER_CST)
1219 return false;
1221 tree dest = gimple_call_arg (stmt, 0);
1222 tree var = dest;
1223 if (TREE_CODE (var) != ADDR_EXPR)
1224 return false;
1226 var = TREE_OPERAND (var, 0);
1227 if (TREE_THIS_VOLATILE (var))
1228 return false;
1230 etype = TREE_TYPE (var);
1231 if (TREE_CODE (etype) == ARRAY_TYPE)
1232 etype = TREE_TYPE (etype);
1234 if (!INTEGRAL_TYPE_P (etype)
1235 && !POINTER_TYPE_P (etype))
1236 return NULL_TREE;
1238 if (! var_decl_component_p (var))
1239 return NULL_TREE;
1241 length = tree_to_uhwi (len);
1242 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1243 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1244 return NULL_TREE;
1246 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1247 return NULL_TREE;
1249 if (integer_zerop (c))
1250 cval = 0;
1251 else
1253 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1254 return NULL_TREE;
1256 cval = TREE_INT_CST_LOW (c);
1257 cval &= 0xff;
1258 cval |= cval << 8;
1259 cval |= cval << 16;
1260 cval |= (cval << 31) << 1;
1263 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1264 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1265 gimple_set_vuse (store, gimple_vuse (stmt));
1266 tree vdef = gimple_vdef (stmt);
1267 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1269 gimple_set_vdef (store, gimple_vdef (stmt));
1270 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1272 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1273 if (gimple_call_lhs (stmt))
1275 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1276 gsi_replace (gsi, asgn, false);
1278 else
1280 gimple_stmt_iterator gsi2 = *gsi;
1281 gsi_prev (gsi);
1282 gsi_remove (&gsi2, true);
1285 return true;
1289 /* Obtain the minimum and maximum string length or minimum and maximum
1290 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1291 If ARG is an SSA name variable, follow its use-def chains. When
1292 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1293 if we are unable to determine the length or value, return False.
1294 VISITED is a bitmap of visited variables.
1295 TYPE is 0 if string length should be obtained, 1 for maximum string
1296 length and 2 for maximum value ARG can have.
1297 When FUZZY is set and the length of a string cannot be determined,
1298 the function instead considers as the maximum possible length the
1299 size of a character array it may refer to.
1300 Set *FLEXP to true if the range of the string lengths has been
1301 obtained from the upper bound of an array at the end of a struct.
1302 Such an array may hold a string that's longer than its upper bound
1303 due to it being used as a poor-man's flexible array member. */
1305 static bool
1306 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1307 bool fuzzy, bool *flexp)
1309 tree var, val;
1310 gimple *def_stmt;
1312 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1313 but MINLEN may be cleared during the execution of the function. */
1314 tree *minlen = length;
1315 tree *const maxlen = length + 1;
1317 if (TREE_CODE (arg) != SSA_NAME)
1319 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1320 if (TREE_CODE (arg) == ADDR_EXPR
1321 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1322 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1324 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1325 if (TREE_CODE (aop0) == INDIRECT_REF
1326 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1327 return get_range_strlen (TREE_OPERAND (aop0, 0),
1328 length, visited, type, fuzzy, flexp);
1331 if (type == 2)
1333 val = arg;
1334 if (TREE_CODE (val) != INTEGER_CST
1335 || tree_int_cst_sgn (val) < 0)
1336 return false;
1338 else
1339 val = c_strlen (arg, 1);
1341 if (!val && fuzzy)
1343 if (TREE_CODE (arg) == ADDR_EXPR)
1344 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1345 visited, type, fuzzy, flexp);
1347 if (TREE_CODE (arg) == COMPONENT_REF
1348 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1350 /* Use the type of the member array to determine the upper
1351 bound on the length of the array. This may be overly
1352 optimistic if the array itself isn't NUL-terminated and
1353 the caller relies on the subsequent member to contain
1354 the NUL.
1355 Set *FLEXP to true if the array whose bound is being
1356 used is at the end of a struct. */
1357 if (array_at_struct_end_p (arg))
1358 *flexp = true;
1360 arg = TREE_OPERAND (arg, 1);
1361 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1362 if (!val || integer_zerop (val))
1363 return false;
1364 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1365 integer_one_node);
1366 /* Set the minimum size to zero since the string in
1367 the array could have zero length. */
1368 *minlen = ssize_int (0);
1371 if (VAR_P (arg)
1372 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
1374 val = TYPE_SIZE_UNIT (TREE_TYPE (arg));
1375 if (!val || TREE_CODE (val) != INTEGER_CST || integer_zerop (val))
1376 return false;
1377 val = wide_int_to_tree (TREE_TYPE (val),
1378 wi::sub(wi::to_wide (val), 1));
1379 /* Set the minimum size to zero since the string in
1380 the array could have zero length. */
1381 *minlen = ssize_int (0);
1385 if (!val)
1386 return false;
1388 if (minlen
1389 && (!*minlen
1390 || (type > 0
1391 && TREE_CODE (*minlen) == INTEGER_CST
1392 && TREE_CODE (val) == INTEGER_CST
1393 && tree_int_cst_lt (val, *minlen))))
1394 *minlen = val;
1396 if (*maxlen)
1398 if (type > 0)
1400 if (TREE_CODE (*maxlen) != INTEGER_CST
1401 || TREE_CODE (val) != INTEGER_CST)
1402 return false;
1404 if (tree_int_cst_lt (*maxlen, val))
1405 *maxlen = val;
1406 return true;
1408 else if (simple_cst_equal (val, *maxlen) != 1)
1409 return false;
1412 *maxlen = val;
1413 return true;
1416 /* If ARG is registered for SSA update we cannot look at its defining
1417 statement. */
1418 if (name_registered_for_update_p (arg))
1419 return false;
1421 /* If we were already here, break the infinite cycle. */
1422 if (!*visited)
1423 *visited = BITMAP_ALLOC (NULL);
1424 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1425 return true;
1427 var = arg;
1428 def_stmt = SSA_NAME_DEF_STMT (var);
1430 switch (gimple_code (def_stmt))
1432 case GIMPLE_ASSIGN:
1433 /* The RHS of the statement defining VAR must either have a
1434 constant length or come from another SSA_NAME with a constant
1435 length. */
1436 if (gimple_assign_single_p (def_stmt)
1437 || gimple_assign_unary_nop_p (def_stmt))
1439 tree rhs = gimple_assign_rhs1 (def_stmt);
1440 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1442 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1444 tree op2 = gimple_assign_rhs2 (def_stmt);
1445 tree op3 = gimple_assign_rhs3 (def_stmt);
1446 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1447 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1449 return false;
1451 case GIMPLE_PHI:
1453 /* All the arguments of the PHI node must have the same constant
1454 length. */
1455 unsigned i;
1457 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1459 tree arg = gimple_phi_arg (def_stmt, i)->def;
1461 /* If this PHI has itself as an argument, we cannot
1462 determine the string length of this argument. However,
1463 if we can find a constant string length for the other
1464 PHI args then we can still be sure that this is a
1465 constant string length. So be optimistic and just
1466 continue with the next argument. */
1467 if (arg == gimple_phi_result (def_stmt))
1468 continue;
1470 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1472 if (fuzzy)
1473 *maxlen = build_all_ones_cst (size_type_node);
1474 else
1475 return false;
1479 return true;
1481 default:
1482 return false;
1486 /* Determine the minimum and maximum value or string length that ARG
1487 refers to and store each in the first two elements of MINMAXLEN.
1488 For expressions that point to strings of unknown lengths that are
1489 character arrays, use the upper bound of the array as the maximum
1490 length. For example, given an expression like 'x ? array : "xyz"'
1491 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1492 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1493 stored in array.
1494 Return true if the range of the string lengths has been obtained
1495 from the upper bound of an array at the end of a struct. Such
1496 an array may hold a string that's longer than its upper bound
1497 due to it being used as a poor-man's flexible array member. */
1499 bool
1500 get_range_strlen (tree arg, tree minmaxlen[2])
1502 bitmap visited = NULL;
1504 minmaxlen[0] = NULL_TREE;
1505 minmaxlen[1] = NULL_TREE;
1507 bool flexarray = false;
1508 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1510 if (visited)
1511 BITMAP_FREE (visited);
1513 return flexarray;
1516 tree
1517 get_maxval_strlen (tree arg, int type)
1519 bitmap visited = NULL;
1520 tree len[2] = { NULL_TREE, NULL_TREE };
1522 bool dummy;
1523 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1524 len[1] = NULL_TREE;
1525 if (visited)
1526 BITMAP_FREE (visited);
1528 return len[1];
1532 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1533 If LEN is not NULL, it represents the length of the string to be
1534 copied. Return NULL_TREE if no simplification can be made. */
1536 static bool
1537 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1538 tree dest, tree src)
1540 gimple *stmt = gsi_stmt (*gsi);
1541 location_t loc = gimple_location (stmt);
1542 tree fn;
1544 /* If SRC and DEST are the same (and not volatile), return DEST. */
1545 if (operand_equal_p (src, dest, 0))
1547 tree func = gimple_call_fndecl (stmt);
1549 warning_at (loc, OPT_Wrestrict,
1550 "%qD source argument is the same as destination",
1551 func);
1553 replace_call_with_value (gsi, dest);
1554 return true;
1557 if (optimize_function_for_size_p (cfun))
1558 return false;
1560 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1561 if (!fn)
1562 return false;
1564 tree len = get_maxval_strlen (src, 0);
1565 if (!len)
1566 return false;
1568 len = fold_convert_loc (loc, size_type_node, len);
1569 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1570 len = force_gimple_operand_gsi (gsi, len, true,
1571 NULL_TREE, true, GSI_SAME_STMT);
1572 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1573 replace_call_with_call_and_fold (gsi, repl);
1574 return true;
1577 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1578 If SLEN is not NULL, it represents the length of the source string.
1579 Return NULL_TREE if no simplification can be made. */
1581 static bool
1582 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1583 tree dest, tree src, tree len)
1585 gimple *stmt = gsi_stmt (*gsi);
1586 location_t loc = gimple_location (stmt);
1587 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1589 /* If the LEN parameter is zero, return DEST. */
1590 if (integer_zerop (len))
1592 /* Avoid warning if the destination refers to a an array/pointer
1593 decorate with attribute nonstring. */
1594 if (!nonstring)
1596 tree fndecl = gimple_call_fndecl (stmt);
1597 gcall *call = as_a <gcall *> (stmt);
1599 /* Warn about the lack of nul termination: the result is not
1600 a (nul-terminated) string. */
1601 tree slen = get_maxval_strlen (src, 0);
1602 if (slen && !integer_zerop (slen))
1603 warning_at (loc, OPT_Wstringop_truncation,
1604 "%G%qD destination unchanged after copying no bytes "
1605 "from a string of length %E",
1606 call, fndecl, slen);
1607 else
1608 warning_at (loc, OPT_Wstringop_truncation,
1609 "%G%qD destination unchanged after copying no bytes",
1610 call, fndecl);
1613 replace_call_with_value (gsi, dest);
1614 return true;
1617 /* We can't compare slen with len as constants below if len is not a
1618 constant. */
1619 if (TREE_CODE (len) != INTEGER_CST)
1620 return false;
1622 /* Now, we must be passed a constant src ptr parameter. */
1623 tree slen = get_maxval_strlen (src, 0);
1624 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1625 return false;
1627 /* The size of the source string including the terminating nul. */
1628 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1630 /* We do not support simplification of this case, though we do
1631 support it when expanding trees into RTL. */
1632 /* FIXME: generate a call to __builtin_memset. */
1633 if (tree_int_cst_lt (ssize, len))
1634 return false;
1636 if (!nonstring)
1638 if (tree_int_cst_lt (len, slen))
1640 tree fndecl = gimple_call_fndecl (stmt);
1641 gcall *call = as_a <gcall *> (stmt);
1643 warning_at (loc, OPT_Wstringop_truncation,
1644 (tree_int_cst_equal (size_one_node, len)
1645 ? G_("%G%qD output truncated copying %E byte "
1646 "from a string of length %E")
1647 : G_("%G%qD output truncated copying %E bytes "
1648 "from a string of length %E")),
1649 call, fndecl, len, slen);
1651 else if (tree_int_cst_equal (len, slen))
1653 tree fndecl = gimple_call_fndecl (stmt);
1654 gcall *call = as_a <gcall *> (stmt);
1656 warning_at (loc, OPT_Wstringop_truncation,
1657 (tree_int_cst_equal (size_one_node, len)
1658 ? G_("%G%qD output truncated before terminating nul "
1659 "copying %E byte from a string of the same "
1660 "length")
1661 : G_("%G%qD output truncated before terminating nul "
1662 "copying %E bytes from a string of the same "
1663 "length")),
1664 call, fndecl, len);
1668 /* OK transform into builtin memcpy. */
1669 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1670 if (!fn)
1671 return false;
1673 len = fold_convert_loc (loc, size_type_node, len);
1674 len = force_gimple_operand_gsi (gsi, len, true,
1675 NULL_TREE, true, GSI_SAME_STMT);
1676 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1677 replace_call_with_call_and_fold (gsi, repl);
1679 return true;
1682 /* Fold function call to builtin strchr or strrchr.
1683 If both arguments are constant, evaluate and fold the result,
1684 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1685 In general strlen is significantly faster than strchr
1686 due to being a simpler operation. */
1687 static bool
1688 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1690 gimple *stmt = gsi_stmt (*gsi);
1691 tree str = gimple_call_arg (stmt, 0);
1692 tree c = gimple_call_arg (stmt, 1);
1693 location_t loc = gimple_location (stmt);
1694 const char *p;
1695 char ch;
1697 if (!gimple_call_lhs (stmt))
1698 return false;
1700 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1702 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1704 if (p1 == NULL)
1706 replace_call_with_value (gsi, integer_zero_node);
1707 return true;
1710 tree len = build_int_cst (size_type_node, p1 - p);
1711 gimple_seq stmts = NULL;
1712 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1713 POINTER_PLUS_EXPR, str, len);
1714 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1715 gsi_replace_with_seq_vops (gsi, stmts);
1716 return true;
1719 if (!integer_zerop (c))
1720 return false;
1722 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1723 if (is_strrchr && optimize_function_for_size_p (cfun))
1725 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1727 if (strchr_fn)
1729 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1730 replace_call_with_call_and_fold (gsi, repl);
1731 return true;
1734 return false;
1737 tree len;
1738 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1740 if (!strlen_fn)
1741 return false;
1743 /* Create newstr = strlen (str). */
1744 gimple_seq stmts = NULL;
1745 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1746 gimple_set_location (new_stmt, loc);
1747 len = create_tmp_reg_or_ssa_name (size_type_node);
1748 gimple_call_set_lhs (new_stmt, len);
1749 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1751 /* Create (str p+ strlen (str)). */
1752 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1753 POINTER_PLUS_EXPR, str, len);
1754 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1755 gsi_replace_with_seq_vops (gsi, stmts);
1756 /* gsi now points at the assignment to the lhs, get a
1757 stmt iterator to the strlen.
1758 ??? We can't use gsi_for_stmt as that doesn't work when the
1759 CFG isn't built yet. */
1760 gimple_stmt_iterator gsi2 = *gsi;
1761 gsi_prev (&gsi2);
1762 fold_stmt (&gsi2);
1763 return true;
1766 /* Fold function call to builtin strstr.
1767 If both arguments are constant, evaluate and fold the result,
1768 additionally fold strstr (x, "") into x and strstr (x, "c")
1769 into strchr (x, 'c'). */
1770 static bool
1771 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1773 gimple *stmt = gsi_stmt (*gsi);
1774 tree haystack = gimple_call_arg (stmt, 0);
1775 tree needle = gimple_call_arg (stmt, 1);
1776 const char *p, *q;
1778 if (!gimple_call_lhs (stmt))
1779 return false;
1781 q = c_getstr (needle);
1782 if (q == NULL)
1783 return false;
1785 if ((p = c_getstr (haystack)))
1787 const char *r = strstr (p, q);
1789 if (r == NULL)
1791 replace_call_with_value (gsi, integer_zero_node);
1792 return true;
1795 tree len = build_int_cst (size_type_node, r - p);
1796 gimple_seq stmts = NULL;
1797 gimple *new_stmt
1798 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1799 haystack, len);
1800 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1801 gsi_replace_with_seq_vops (gsi, stmts);
1802 return true;
1805 /* For strstr (x, "") return x. */
1806 if (q[0] == '\0')
1808 replace_call_with_value (gsi, haystack);
1809 return true;
1812 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1813 if (q[1] == '\0')
1815 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1816 if (strchr_fn)
1818 tree c = build_int_cst (integer_type_node, q[0]);
1819 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1820 replace_call_with_call_and_fold (gsi, repl);
1821 return true;
1825 return false;
1828 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1829 to the call.
1831 Return NULL_TREE if no simplification was possible, otherwise return the
1832 simplified form of the call as a tree.
1834 The simplified form may be a constant or other expression which
1835 computes the same value, but in a more efficient manner (including
1836 calls to other builtin functions).
1838 The call may contain arguments which need to be evaluated, but
1839 which are not useful to determine the result of the call. In
1840 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1841 COMPOUND_EXPR will be an argument which must be evaluated.
1842 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1843 COMPOUND_EXPR in the chain will contain the tree for the simplified
1844 form of the builtin function call. */
1846 static bool
1847 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1849 gimple *stmt = gsi_stmt (*gsi);
1850 location_t loc = gimple_location (stmt);
1852 const char *p = c_getstr (src);
1854 /* If the string length is zero, return the dst parameter. */
1855 if (p && *p == '\0')
1857 replace_call_with_value (gsi, dst);
1858 return true;
1861 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1862 return false;
1864 /* See if we can store by pieces into (dst + strlen(dst)). */
1865 tree newdst;
1866 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1867 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1869 if (!strlen_fn || !memcpy_fn)
1870 return false;
1872 /* If the length of the source string isn't computable don't
1873 split strcat into strlen and memcpy. */
1874 tree len = get_maxval_strlen (src, 0);
1875 if (! len)
1876 return false;
1878 /* Create strlen (dst). */
1879 gimple_seq stmts = NULL, stmts2;
1880 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1881 gimple_set_location (repl, loc);
1882 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1883 gimple_call_set_lhs (repl, newdst);
1884 gimple_seq_add_stmt_without_update (&stmts, repl);
1886 /* Create (dst p+ strlen (dst)). */
1887 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1888 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1889 gimple_seq_add_seq_without_update (&stmts, stmts2);
1891 len = fold_convert_loc (loc, size_type_node, len);
1892 len = size_binop_loc (loc, PLUS_EXPR, len,
1893 build_int_cst (size_type_node, 1));
1894 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1895 gimple_seq_add_seq_without_update (&stmts, stmts2);
1897 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1898 gimple_seq_add_stmt_without_update (&stmts, repl);
1899 if (gimple_call_lhs (stmt))
1901 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1902 gimple_seq_add_stmt_without_update (&stmts, repl);
1903 gsi_replace_with_seq_vops (gsi, stmts);
1904 /* gsi now points at the assignment to the lhs, get a
1905 stmt iterator to the memcpy call.
1906 ??? We can't use gsi_for_stmt as that doesn't work when the
1907 CFG isn't built yet. */
1908 gimple_stmt_iterator gsi2 = *gsi;
1909 gsi_prev (&gsi2);
1910 fold_stmt (&gsi2);
1912 else
1914 gsi_replace_with_seq_vops (gsi, stmts);
1915 fold_stmt (gsi);
1917 return true;
1920 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1921 are the arguments to the call. */
1923 static bool
1924 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1926 gimple *stmt = gsi_stmt (*gsi);
1927 tree dest = gimple_call_arg (stmt, 0);
1928 tree src = gimple_call_arg (stmt, 1);
1929 tree size = gimple_call_arg (stmt, 2);
1930 tree fn;
1931 const char *p;
1934 p = c_getstr (src);
1935 /* If the SRC parameter is "", return DEST. */
1936 if (p && *p == '\0')
1938 replace_call_with_value (gsi, dest);
1939 return true;
1942 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1943 return false;
1945 /* If __builtin_strcat_chk is used, assume strcat is available. */
1946 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1947 if (!fn)
1948 return false;
1950 gimple *repl = gimple_build_call (fn, 2, dest, src);
1951 replace_call_with_call_and_fold (gsi, repl);
1952 return true;
1955 /* Simplify a call to the strncat builtin. */
1957 static bool
1958 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1960 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1961 tree dst = gimple_call_arg (stmt, 0);
1962 tree src = gimple_call_arg (stmt, 1);
1963 tree len = gimple_call_arg (stmt, 2);
1965 const char *p = c_getstr (src);
1967 /* If the requested length is zero, or the src parameter string
1968 length is zero, return the dst parameter. */
1969 if (integer_zerop (len) || (p && *p == '\0'))
1971 replace_call_with_value (gsi, dst);
1972 return true;
1975 if (TREE_CODE (len) != INTEGER_CST || !p)
1976 return false;
1978 unsigned srclen = strlen (p);
1980 int cmpsrc = compare_tree_int (len, srclen);
1982 /* Return early if the requested len is less than the string length.
1983 Warnings will be issued elsewhere later. */
1984 if (cmpsrc < 0)
1985 return false;
1987 unsigned HOST_WIDE_INT dstsize;
1989 bool nowarn = gimple_no_warning_p (stmt);
1991 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
1993 int cmpdst = compare_tree_int (len, dstsize);
1995 if (cmpdst >= 0)
1997 tree fndecl = gimple_call_fndecl (stmt);
1999 /* Strncat copies (at most) LEN bytes and always appends
2000 the terminating NUL so the specified bound should never
2001 be equal to (or greater than) the size of the destination.
2002 If it is, the copy could overflow. */
2003 location_t loc = gimple_location (stmt);
2004 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2005 cmpdst == 0
2006 ? G_("%G%qD specified bound %E equals "
2007 "destination size")
2008 : G_("%G%qD specified bound %E exceeds "
2009 "destination size %wu"),
2010 stmt, fndecl, len, dstsize);
2011 if (nowarn)
2012 gimple_set_no_warning (stmt, true);
2016 if (!nowarn && cmpsrc == 0)
2018 tree fndecl = gimple_call_fndecl (stmt);
2020 /* To avoid certain truncation the specified bound should also
2021 not be equal to (or less than) the length of the source. */
2022 location_t loc = gimple_location (stmt);
2023 if (warning_at (loc, OPT_Wstringop_overflow_,
2024 "%G%qD specified bound %E equals source length",
2025 stmt, fndecl, len))
2026 gimple_set_no_warning (stmt, true);
2029 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2031 /* If the replacement _DECL isn't initialized, don't do the
2032 transformation. */
2033 if (!fn)
2034 return false;
2036 /* Otherwise, emit a call to strcat. */
2037 gcall *repl = gimple_build_call (fn, 2, dst, src);
2038 replace_call_with_call_and_fold (gsi, repl);
2039 return true;
2042 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2043 LEN, and SIZE. */
2045 static bool
2046 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2048 gimple *stmt = gsi_stmt (*gsi);
2049 tree dest = gimple_call_arg (stmt, 0);
2050 tree src = gimple_call_arg (stmt, 1);
2051 tree len = gimple_call_arg (stmt, 2);
2052 tree size = gimple_call_arg (stmt, 3);
2053 tree fn;
2054 const char *p;
2056 p = c_getstr (src);
2057 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2058 if ((p && *p == '\0')
2059 || integer_zerop (len))
2061 replace_call_with_value (gsi, dest);
2062 return true;
2065 if (! tree_fits_uhwi_p (size))
2066 return false;
2068 if (! integer_all_onesp (size))
2070 tree src_len = c_strlen (src, 1);
2071 if (src_len
2072 && tree_fits_uhwi_p (src_len)
2073 && tree_fits_uhwi_p (len)
2074 && ! tree_int_cst_lt (len, src_len))
2076 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2077 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2078 if (!fn)
2079 return false;
2081 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2082 replace_call_with_call_and_fold (gsi, repl);
2083 return true;
2085 return false;
2088 /* If __builtin_strncat_chk is used, assume strncat is available. */
2089 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2090 if (!fn)
2091 return false;
2093 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2094 replace_call_with_call_and_fold (gsi, repl);
2095 return true;
2098 /* Build and append gimple statements to STMTS that would load a first
2099 character of a memory location identified by STR. LOC is location
2100 of the statement. */
2102 static tree
2103 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2105 tree var;
2107 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2108 tree cst_uchar_ptr_node
2109 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2110 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2112 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2113 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2114 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2116 gimple_assign_set_lhs (stmt, var);
2117 gimple_seq_add_stmt_without_update (stmts, stmt);
2119 return var;
2122 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2123 FCODE is the name of the builtin. */
2125 static bool
2126 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2128 gimple *stmt = gsi_stmt (*gsi);
2129 tree callee = gimple_call_fndecl (stmt);
2130 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2132 tree type = integer_type_node;
2133 tree str1 = gimple_call_arg (stmt, 0);
2134 tree str2 = gimple_call_arg (stmt, 1);
2135 tree lhs = gimple_call_lhs (stmt);
2136 HOST_WIDE_INT length = -1;
2138 /* Handle strncmp and strncasecmp functions. */
2139 if (gimple_call_num_args (stmt) == 3)
2141 tree len = gimple_call_arg (stmt, 2);
2142 if (tree_fits_uhwi_p (len))
2143 length = tree_to_uhwi (len);
2146 /* If the LEN parameter is zero, return zero. */
2147 if (length == 0)
2149 replace_call_with_value (gsi, integer_zero_node);
2150 return true;
2153 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2154 if (operand_equal_p (str1, str2, 0))
2156 replace_call_with_value (gsi, integer_zero_node);
2157 return true;
2160 const char *p1 = c_getstr (str1);
2161 const char *p2 = c_getstr (str2);
2163 /* For known strings, return an immediate value. */
2164 if (p1 && p2)
2166 int r = 0;
2167 bool known_result = false;
2169 switch (fcode)
2171 case BUILT_IN_STRCMP:
2173 r = strcmp (p1, p2);
2174 known_result = true;
2175 break;
2177 case BUILT_IN_STRNCMP:
2179 if (length == -1)
2180 break;
2181 r = strncmp (p1, p2, length);
2182 known_result = true;
2183 break;
2185 /* Only handleable situation is where the string are equal (result 0),
2186 which is already handled by operand_equal_p case. */
2187 case BUILT_IN_STRCASECMP:
2188 break;
2189 case BUILT_IN_STRNCASECMP:
2191 if (length == -1)
2192 break;
2193 r = strncmp (p1, p2, length);
2194 if (r == 0)
2195 known_result = true;
2196 break;
2198 default:
2199 gcc_unreachable ();
2202 if (known_result)
2204 replace_call_with_value (gsi, build_cmp_result (type, r));
2205 return true;
2209 bool nonzero_length = length >= 1
2210 || fcode == BUILT_IN_STRCMP
2211 || fcode == BUILT_IN_STRCASECMP;
2213 location_t loc = gimple_location (stmt);
2215 /* If the second arg is "", return *(const unsigned char*)arg1. */
2216 if (p2 && *p2 == '\0' && nonzero_length)
2218 gimple_seq stmts = NULL;
2219 tree var = gimple_load_first_char (loc, str1, &stmts);
2220 if (lhs)
2222 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2223 gimple_seq_add_stmt_without_update (&stmts, stmt);
2226 gsi_replace_with_seq_vops (gsi, stmts);
2227 return true;
2230 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2231 if (p1 && *p1 == '\0' && nonzero_length)
2233 gimple_seq stmts = NULL;
2234 tree var = gimple_load_first_char (loc, str2, &stmts);
2236 if (lhs)
2238 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2239 stmt = gimple_build_assign (c, NOP_EXPR, var);
2240 gimple_seq_add_stmt_without_update (&stmts, stmt);
2242 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2243 gimple_seq_add_stmt_without_update (&stmts, stmt);
2246 gsi_replace_with_seq_vops (gsi, stmts);
2247 return true;
2250 /* If len parameter is one, return an expression corresponding to
2251 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2252 if (fcode == BUILT_IN_STRNCMP && length == 1)
2254 gimple_seq stmts = NULL;
2255 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2256 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2258 if (lhs)
2260 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2261 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2262 gimple_seq_add_stmt_without_update (&stmts, convert1);
2264 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2265 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2266 gimple_seq_add_stmt_without_update (&stmts, convert2);
2268 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2269 gimple_seq_add_stmt_without_update (&stmts, stmt);
2272 gsi_replace_with_seq_vops (gsi, stmts);
2273 return true;
2276 /* If length is larger than the length of one constant string,
2277 replace strncmp with corresponding strcmp */
2278 if (fcode == BUILT_IN_STRNCMP
2279 && length > 0
2280 && ((p2 && (size_t) length > strlen (p2))
2281 || (p1 && (size_t) length > strlen (p1))))
2283 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2284 if (!fn)
2285 return false;
2286 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2287 replace_call_with_call_and_fold (gsi, repl);
2288 return true;
2291 return false;
2294 /* Fold a call to the memchr pointed by GSI iterator. */
2296 static bool
2297 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2299 gimple *stmt = gsi_stmt (*gsi);
2300 tree lhs = gimple_call_lhs (stmt);
2301 tree arg1 = gimple_call_arg (stmt, 0);
2302 tree arg2 = gimple_call_arg (stmt, 1);
2303 tree len = gimple_call_arg (stmt, 2);
2305 /* If the LEN parameter is zero, return zero. */
2306 if (integer_zerop (len))
2308 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2309 return true;
2312 char c;
2313 if (TREE_CODE (arg2) != INTEGER_CST
2314 || !tree_fits_uhwi_p (len)
2315 || !target_char_cst_p (arg2, &c))
2316 return false;
2318 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2319 unsigned HOST_WIDE_INT string_length;
2320 const char *p1 = c_getstr (arg1, &string_length);
2322 if (p1)
2324 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2325 if (r == NULL)
2327 if (length <= string_length)
2329 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2330 return true;
2333 else
2335 unsigned HOST_WIDE_INT offset = r - p1;
2336 gimple_seq stmts = NULL;
2337 if (lhs != NULL_TREE)
2339 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2340 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2341 arg1, offset_cst);
2342 gimple_seq_add_stmt_without_update (&stmts, stmt);
2344 else
2345 gimple_seq_add_stmt_without_update (&stmts,
2346 gimple_build_nop ());
2348 gsi_replace_with_seq_vops (gsi, stmts);
2349 return true;
2353 return false;
2356 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2357 to the call. IGNORE is true if the value returned
2358 by the builtin will be ignored. UNLOCKED is true is true if this
2359 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2360 the known length of the string. Return NULL_TREE if no simplification
2361 was possible. */
2363 static bool
2364 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2365 tree arg0, tree arg1,
2366 bool unlocked)
2368 gimple *stmt = gsi_stmt (*gsi);
2370 /* If we're using an unlocked function, assume the other unlocked
2371 functions exist explicitly. */
2372 tree const fn_fputc = (unlocked
2373 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2374 : builtin_decl_implicit (BUILT_IN_FPUTC));
2375 tree const fn_fwrite = (unlocked
2376 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2377 : builtin_decl_implicit (BUILT_IN_FWRITE));
2379 /* If the return value is used, don't do the transformation. */
2380 if (gimple_call_lhs (stmt))
2381 return false;
2383 /* Get the length of the string passed to fputs. If the length
2384 can't be determined, punt. */
2385 tree len = get_maxval_strlen (arg0, 0);
2386 if (!len
2387 || TREE_CODE (len) != INTEGER_CST)
2388 return false;
2390 switch (compare_tree_int (len, 1))
2392 case -1: /* length is 0, delete the call entirely . */
2393 replace_call_with_value (gsi, integer_zero_node);
2394 return true;
2396 case 0: /* length is 1, call fputc. */
2398 const char *p = c_getstr (arg0);
2399 if (p != NULL)
2401 if (!fn_fputc)
2402 return false;
2404 gimple *repl = gimple_build_call (fn_fputc, 2,
2405 build_int_cst
2406 (integer_type_node, p[0]), arg1);
2407 replace_call_with_call_and_fold (gsi, repl);
2408 return true;
2411 /* FALLTHROUGH */
2412 case 1: /* length is greater than 1, call fwrite. */
2414 /* If optimizing for size keep fputs. */
2415 if (optimize_function_for_size_p (cfun))
2416 return false;
2417 /* New argument list transforming fputs(string, stream) to
2418 fwrite(string, 1, len, stream). */
2419 if (!fn_fwrite)
2420 return false;
2422 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2423 size_one_node, len, arg1);
2424 replace_call_with_call_and_fold (gsi, repl);
2425 return true;
2427 default:
2428 gcc_unreachable ();
2430 return false;
2433 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2434 DEST, SRC, LEN, and SIZE are the arguments to the call.
2435 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2436 code of the builtin. If MAXLEN is not NULL, it is maximum length
2437 passed as third argument. */
2439 static bool
2440 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2441 tree dest, tree src, tree len, tree size,
2442 enum built_in_function fcode)
2444 gimple *stmt = gsi_stmt (*gsi);
2445 location_t loc = gimple_location (stmt);
2446 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2447 tree fn;
2449 /* If SRC and DEST are the same (and not volatile), return DEST
2450 (resp. DEST+LEN for __mempcpy_chk). */
2451 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2453 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2455 tree func = gimple_call_fndecl (stmt);
2457 warning_at (loc, OPT_Wrestrict,
2458 "%qD source argument is the same as destination",
2459 func);
2462 if (fcode != BUILT_IN_MEMPCPY_CHK)
2464 replace_call_with_value (gsi, dest);
2465 return true;
2467 else
2469 gimple_seq stmts = NULL;
2470 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2471 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2472 TREE_TYPE (dest), dest, len);
2473 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2474 replace_call_with_value (gsi, temp);
2475 return true;
2479 if (! tree_fits_uhwi_p (size))
2480 return false;
2482 tree maxlen = get_maxval_strlen (len, 2);
2483 if (! integer_all_onesp (size))
2485 if (! tree_fits_uhwi_p (len))
2487 /* If LEN is not constant, try MAXLEN too.
2488 For MAXLEN only allow optimizing into non-_ocs function
2489 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2490 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2492 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2494 /* (void) __mempcpy_chk () can be optimized into
2495 (void) __memcpy_chk (). */
2496 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2497 if (!fn)
2498 return false;
2500 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2501 replace_call_with_call_and_fold (gsi, repl);
2502 return true;
2504 return false;
2507 else
2508 maxlen = len;
2510 if (tree_int_cst_lt (size, maxlen))
2511 return false;
2514 fn = NULL_TREE;
2515 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2516 mem{cpy,pcpy,move,set} is available. */
2517 switch (fcode)
2519 case BUILT_IN_MEMCPY_CHK:
2520 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2521 break;
2522 case BUILT_IN_MEMPCPY_CHK:
2523 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2524 break;
2525 case BUILT_IN_MEMMOVE_CHK:
2526 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2527 break;
2528 case BUILT_IN_MEMSET_CHK:
2529 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2530 break;
2531 default:
2532 break;
2535 if (!fn)
2536 return false;
2538 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2539 replace_call_with_call_and_fold (gsi, repl);
2540 return true;
2543 /* Fold a call to the __st[rp]cpy_chk builtin.
2544 DEST, SRC, and SIZE are the arguments to the call.
2545 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2546 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2547 strings passed as second argument. */
2549 static bool
2550 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2551 tree dest,
2552 tree src, tree size,
2553 enum built_in_function fcode)
2555 gimple *stmt = gsi_stmt (*gsi);
2556 location_t loc = gimple_location (stmt);
2557 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2558 tree len, fn;
2560 /* If SRC and DEST are the same (and not volatile), return DEST. */
2561 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2563 tree func = gimple_call_fndecl (stmt);
2565 warning_at (loc, OPT_Wrestrict,
2566 "%qD source argument is the same as destination",
2567 func);
2569 replace_call_with_value (gsi, dest);
2570 return true;
2573 if (! tree_fits_uhwi_p (size))
2574 return false;
2576 tree maxlen = get_maxval_strlen (src, 1);
2577 if (! integer_all_onesp (size))
2579 len = c_strlen (src, 1);
2580 if (! len || ! tree_fits_uhwi_p (len))
2582 /* If LEN is not constant, try MAXLEN too.
2583 For MAXLEN only allow optimizing into non-_ocs function
2584 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2585 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2587 if (fcode == BUILT_IN_STPCPY_CHK)
2589 if (! ignore)
2590 return false;
2592 /* If return value of __stpcpy_chk is ignored,
2593 optimize into __strcpy_chk. */
2594 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2595 if (!fn)
2596 return false;
2598 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2599 replace_call_with_call_and_fold (gsi, repl);
2600 return true;
2603 if (! len || TREE_SIDE_EFFECTS (len))
2604 return false;
2606 /* If c_strlen returned something, but not a constant,
2607 transform __strcpy_chk into __memcpy_chk. */
2608 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2609 if (!fn)
2610 return false;
2612 gimple_seq stmts = NULL;
2613 len = gimple_convert (&stmts, loc, size_type_node, len);
2614 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2615 build_int_cst (size_type_node, 1));
2616 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2617 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2618 replace_call_with_call_and_fold (gsi, repl);
2619 return true;
2622 else
2623 maxlen = len;
2625 if (! tree_int_cst_lt (maxlen, size))
2626 return false;
2629 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2630 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2631 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2632 if (!fn)
2633 return false;
2635 gimple *repl = gimple_build_call (fn, 2, dest, src);
2636 replace_call_with_call_and_fold (gsi, repl);
2637 return true;
2640 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2641 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2642 length passed as third argument. IGNORE is true if return value can be
2643 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2645 static bool
2646 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2647 tree dest, tree src,
2648 tree len, tree size,
2649 enum built_in_function fcode)
2651 gimple *stmt = gsi_stmt (*gsi);
2652 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2653 tree fn;
2655 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2657 /* If return value of __stpncpy_chk is ignored,
2658 optimize into __strncpy_chk. */
2659 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2660 if (fn)
2662 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2663 replace_call_with_call_and_fold (gsi, repl);
2664 return true;
2668 if (! tree_fits_uhwi_p (size))
2669 return false;
2671 tree maxlen = get_maxval_strlen (len, 2);
2672 if (! integer_all_onesp (size))
2674 if (! tree_fits_uhwi_p (len))
2676 /* If LEN is not constant, try MAXLEN too.
2677 For MAXLEN only allow optimizing into non-_ocs function
2678 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2679 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2680 return false;
2682 else
2683 maxlen = len;
2685 if (tree_int_cst_lt (size, maxlen))
2686 return false;
2689 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2690 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2691 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2692 if (!fn)
2693 return false;
2695 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2696 replace_call_with_call_and_fold (gsi, repl);
2697 return true;
2700 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2701 Return NULL_TREE if no simplification can be made. */
2703 static bool
2704 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2706 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2707 location_t loc = gimple_location (stmt);
2708 tree dest = gimple_call_arg (stmt, 0);
2709 tree src = gimple_call_arg (stmt, 1);
2710 tree fn, len, lenp1;
2712 /* If the result is unused, replace stpcpy with strcpy. */
2713 if (gimple_call_lhs (stmt) == NULL_TREE)
2715 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2716 if (!fn)
2717 return false;
2718 gimple_call_set_fndecl (stmt, fn);
2719 fold_stmt (gsi);
2720 return true;
2723 len = c_strlen (src, 1);
2724 if (!len
2725 || TREE_CODE (len) != INTEGER_CST)
2726 return false;
2728 if (optimize_function_for_size_p (cfun)
2729 /* If length is zero it's small enough. */
2730 && !integer_zerop (len))
2731 return false;
2733 /* If the source has a known length replace stpcpy with memcpy. */
2734 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2735 if (!fn)
2736 return false;
2738 gimple_seq stmts = NULL;
2739 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2740 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2741 tem, build_int_cst (size_type_node, 1));
2742 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2743 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2744 gimple_set_vuse (repl, gimple_vuse (stmt));
2745 gimple_set_vdef (repl, gimple_vdef (stmt));
2746 if (gimple_vdef (repl)
2747 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2748 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2749 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2750 /* Replace the result with dest + len. */
2751 stmts = NULL;
2752 tem = gimple_convert (&stmts, loc, sizetype, len);
2753 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2754 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2755 POINTER_PLUS_EXPR, dest, tem);
2756 gsi_replace (gsi, ret, false);
2757 /* Finally fold the memcpy call. */
2758 gimple_stmt_iterator gsi2 = *gsi;
2759 gsi_prev (&gsi2);
2760 fold_stmt (&gsi2);
2761 return true;
2764 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2765 NULL_TREE if a normal call should be emitted rather than expanding
2766 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2767 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2768 passed as second argument. */
2770 static bool
2771 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2772 enum built_in_function fcode)
2774 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2775 tree dest, size, len, fn, fmt, flag;
2776 const char *fmt_str;
2778 /* Verify the required arguments in the original call. */
2779 if (gimple_call_num_args (stmt) < 5)
2780 return false;
2782 dest = gimple_call_arg (stmt, 0);
2783 len = gimple_call_arg (stmt, 1);
2784 flag = gimple_call_arg (stmt, 2);
2785 size = gimple_call_arg (stmt, 3);
2786 fmt = gimple_call_arg (stmt, 4);
2788 if (! tree_fits_uhwi_p (size))
2789 return false;
2791 if (! integer_all_onesp (size))
2793 tree maxlen = get_maxval_strlen (len, 2);
2794 if (! tree_fits_uhwi_p (len))
2796 /* If LEN is not constant, try MAXLEN too.
2797 For MAXLEN only allow optimizing into non-_ocs function
2798 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2799 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2800 return false;
2802 else
2803 maxlen = len;
2805 if (tree_int_cst_lt (size, maxlen))
2806 return false;
2809 if (!init_target_chars ())
2810 return false;
2812 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2813 or if format doesn't contain % chars or is "%s". */
2814 if (! integer_zerop (flag))
2816 fmt_str = c_getstr (fmt);
2817 if (fmt_str == NULL)
2818 return false;
2819 if (strchr (fmt_str, target_percent) != NULL
2820 && strcmp (fmt_str, target_percent_s))
2821 return false;
2824 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2825 available. */
2826 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2827 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2828 if (!fn)
2829 return false;
2831 /* Replace the called function and the first 5 argument by 3 retaining
2832 trailing varargs. */
2833 gimple_call_set_fndecl (stmt, fn);
2834 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2835 gimple_call_set_arg (stmt, 0, dest);
2836 gimple_call_set_arg (stmt, 1, len);
2837 gimple_call_set_arg (stmt, 2, fmt);
2838 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2839 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2840 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2841 fold_stmt (gsi);
2842 return true;
2845 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2846 Return NULL_TREE if a normal call should be emitted rather than
2847 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2848 or BUILT_IN_VSPRINTF_CHK. */
2850 static bool
2851 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2852 enum built_in_function fcode)
2854 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2855 tree dest, size, len, fn, fmt, flag;
2856 const char *fmt_str;
2857 unsigned nargs = gimple_call_num_args (stmt);
2859 /* Verify the required arguments in the original call. */
2860 if (nargs < 4)
2861 return false;
2862 dest = gimple_call_arg (stmt, 0);
2863 flag = gimple_call_arg (stmt, 1);
2864 size = gimple_call_arg (stmt, 2);
2865 fmt = gimple_call_arg (stmt, 3);
2867 if (! tree_fits_uhwi_p (size))
2868 return false;
2870 len = NULL_TREE;
2872 if (!init_target_chars ())
2873 return false;
2875 /* Check whether the format is a literal string constant. */
2876 fmt_str = c_getstr (fmt);
2877 if (fmt_str != NULL)
2879 /* If the format doesn't contain % args or %%, we know the size. */
2880 if (strchr (fmt_str, target_percent) == 0)
2882 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2883 len = build_int_cstu (size_type_node, strlen (fmt_str));
2885 /* If the format is "%s" and first ... argument is a string literal,
2886 we know the size too. */
2887 else if (fcode == BUILT_IN_SPRINTF_CHK
2888 && strcmp (fmt_str, target_percent_s) == 0)
2890 tree arg;
2892 if (nargs == 5)
2894 arg = gimple_call_arg (stmt, 4);
2895 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2897 len = c_strlen (arg, 1);
2898 if (! len || ! tree_fits_uhwi_p (len))
2899 len = NULL_TREE;
2905 if (! integer_all_onesp (size))
2907 if (! len || ! tree_int_cst_lt (len, size))
2908 return false;
2911 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2912 or if format doesn't contain % chars or is "%s". */
2913 if (! integer_zerop (flag))
2915 if (fmt_str == NULL)
2916 return false;
2917 if (strchr (fmt_str, target_percent) != NULL
2918 && strcmp (fmt_str, target_percent_s))
2919 return false;
2922 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2923 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2924 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2925 if (!fn)
2926 return false;
2928 /* Replace the called function and the first 4 argument by 2 retaining
2929 trailing varargs. */
2930 gimple_call_set_fndecl (stmt, fn);
2931 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2932 gimple_call_set_arg (stmt, 0, dest);
2933 gimple_call_set_arg (stmt, 1, fmt);
2934 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2935 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2936 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2937 fold_stmt (gsi);
2938 return true;
2941 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2942 ORIG may be null if this is a 2-argument call. We don't attempt to
2943 simplify calls with more than 3 arguments.
2945 Return true if simplification was possible, otherwise false. */
2947 bool
2948 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2950 gimple *stmt = gsi_stmt (*gsi);
2951 tree dest = gimple_call_arg (stmt, 0);
2952 tree fmt = gimple_call_arg (stmt, 1);
2953 tree orig = NULL_TREE;
2954 const char *fmt_str = NULL;
2956 /* Verify the required arguments in the original call. We deal with two
2957 types of sprintf() calls: 'sprintf (str, fmt)' and
2958 'sprintf (dest, "%s", orig)'. */
2959 if (gimple_call_num_args (stmt) > 3)
2960 return false;
2962 if (gimple_call_num_args (stmt) == 3)
2963 orig = gimple_call_arg (stmt, 2);
2965 /* Check whether the format is a literal string constant. */
2966 fmt_str = c_getstr (fmt);
2967 if (fmt_str == NULL)
2968 return false;
2970 if (!init_target_chars ())
2971 return false;
2973 /* If the format doesn't contain % args or %%, use strcpy. */
2974 if (strchr (fmt_str, target_percent) == NULL)
2976 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2978 if (!fn)
2979 return false;
2981 /* Don't optimize sprintf (buf, "abc", ptr++). */
2982 if (orig)
2983 return false;
2985 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2986 'format' is known to contain no % formats. */
2987 gimple_seq stmts = NULL;
2988 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
2989 gimple_seq_add_stmt_without_update (&stmts, repl);
2990 if (gimple_call_lhs (stmt))
2992 repl = gimple_build_assign (gimple_call_lhs (stmt),
2993 build_int_cst (integer_type_node,
2994 strlen (fmt_str)));
2995 gimple_seq_add_stmt_without_update (&stmts, repl);
2996 gsi_replace_with_seq_vops (gsi, stmts);
2997 /* gsi now points at the assignment to the lhs, get a
2998 stmt iterator to the memcpy call.
2999 ??? We can't use gsi_for_stmt as that doesn't work when the
3000 CFG isn't built yet. */
3001 gimple_stmt_iterator gsi2 = *gsi;
3002 gsi_prev (&gsi2);
3003 fold_stmt (&gsi2);
3005 else
3007 gsi_replace_with_seq_vops (gsi, stmts);
3008 fold_stmt (gsi);
3010 return true;
3013 /* If the format is "%s", use strcpy if the result isn't used. */
3014 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3016 tree fn;
3017 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3019 if (!fn)
3020 return false;
3022 /* Don't crash on sprintf (str1, "%s"). */
3023 if (!orig)
3024 return false;
3026 tree orig_len = NULL_TREE;
3027 if (gimple_call_lhs (stmt))
3029 orig_len = get_maxval_strlen (orig, 0);
3030 if (!orig_len)
3031 return false;
3034 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3035 gimple_seq stmts = NULL;
3036 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3037 gimple_seq_add_stmt_without_update (&stmts, repl);
3038 if (gimple_call_lhs (stmt))
3040 if (!useless_type_conversion_p (integer_type_node,
3041 TREE_TYPE (orig_len)))
3042 orig_len = fold_convert (integer_type_node, orig_len);
3043 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3044 gimple_seq_add_stmt_without_update (&stmts, repl);
3045 gsi_replace_with_seq_vops (gsi, stmts);
3046 /* gsi now points at the assignment to the lhs, get a
3047 stmt iterator to the memcpy call.
3048 ??? We can't use gsi_for_stmt as that doesn't work when the
3049 CFG isn't built yet. */
3050 gimple_stmt_iterator gsi2 = *gsi;
3051 gsi_prev (&gsi2);
3052 fold_stmt (&gsi2);
3054 else
3056 gsi_replace_with_seq_vops (gsi, stmts);
3057 fold_stmt (gsi);
3059 return true;
3061 return false;
3064 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3065 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3066 attempt to simplify calls with more than 4 arguments.
3068 Return true if simplification was possible, otherwise false. */
3070 bool
3071 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3073 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3074 tree dest = gimple_call_arg (stmt, 0);
3075 tree destsize = gimple_call_arg (stmt, 1);
3076 tree fmt = gimple_call_arg (stmt, 2);
3077 tree orig = NULL_TREE;
3078 const char *fmt_str = NULL;
3080 if (gimple_call_num_args (stmt) > 4)
3081 return false;
3083 if (gimple_call_num_args (stmt) == 4)
3084 orig = gimple_call_arg (stmt, 3);
3086 if (!tree_fits_uhwi_p (destsize))
3087 return false;
3088 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3090 /* Check whether the format is a literal string constant. */
3091 fmt_str = c_getstr (fmt);
3092 if (fmt_str == NULL)
3093 return false;
3095 if (!init_target_chars ())
3096 return false;
3098 /* If the format doesn't contain % args or %%, use strcpy. */
3099 if (strchr (fmt_str, target_percent) == NULL)
3101 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3102 if (!fn)
3103 return false;
3105 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3106 if (orig)
3107 return false;
3109 /* We could expand this as
3110 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3111 or to
3112 memcpy (str, fmt_with_nul_at_cstm1, cst);
3113 but in the former case that might increase code size
3114 and in the latter case grow .rodata section too much.
3115 So punt for now. */
3116 size_t len = strlen (fmt_str);
3117 if (len >= destlen)
3118 return false;
3120 gimple_seq stmts = NULL;
3121 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3122 gimple_seq_add_stmt_without_update (&stmts, repl);
3123 if (gimple_call_lhs (stmt))
3125 repl = gimple_build_assign (gimple_call_lhs (stmt),
3126 build_int_cst (integer_type_node, len));
3127 gimple_seq_add_stmt_without_update (&stmts, repl);
3128 gsi_replace_with_seq_vops (gsi, stmts);
3129 /* gsi now points at the assignment to the lhs, get a
3130 stmt iterator to the memcpy call.
3131 ??? We can't use gsi_for_stmt as that doesn't work when the
3132 CFG isn't built yet. */
3133 gimple_stmt_iterator gsi2 = *gsi;
3134 gsi_prev (&gsi2);
3135 fold_stmt (&gsi2);
3137 else
3139 gsi_replace_with_seq_vops (gsi, stmts);
3140 fold_stmt (gsi);
3142 return true;
3145 /* If the format is "%s", use strcpy if the result isn't used. */
3146 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3148 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3149 if (!fn)
3150 return false;
3152 /* Don't crash on snprintf (str1, cst, "%s"). */
3153 if (!orig)
3154 return false;
3156 tree orig_len = get_maxval_strlen (orig, 0);
3157 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3158 return false;
3160 /* We could expand this as
3161 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3162 or to
3163 memcpy (str1, str2_with_nul_at_cstm1, cst);
3164 but in the former case that might increase code size
3165 and in the latter case grow .rodata section too much.
3166 So punt for now. */
3167 if (compare_tree_int (orig_len, destlen) >= 0)
3168 return false;
3170 /* Convert snprintf (str1, cst, "%s", str2) into
3171 strcpy (str1, str2) if strlen (str2) < cst. */
3172 gimple_seq stmts = NULL;
3173 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3174 gimple_seq_add_stmt_without_update (&stmts, repl);
3175 if (gimple_call_lhs (stmt))
3177 if (!useless_type_conversion_p (integer_type_node,
3178 TREE_TYPE (orig_len)))
3179 orig_len = fold_convert (integer_type_node, orig_len);
3180 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3181 gimple_seq_add_stmt_without_update (&stmts, repl);
3182 gsi_replace_with_seq_vops (gsi, stmts);
3183 /* gsi now points at the assignment to the lhs, get a
3184 stmt iterator to the memcpy call.
3185 ??? We can't use gsi_for_stmt as that doesn't work when the
3186 CFG isn't built yet. */
3187 gimple_stmt_iterator gsi2 = *gsi;
3188 gsi_prev (&gsi2);
3189 fold_stmt (&gsi2);
3191 else
3193 gsi_replace_with_seq_vops (gsi, stmts);
3194 fold_stmt (gsi);
3196 return true;
3198 return false;
3201 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3202 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3203 more than 3 arguments, and ARG may be null in the 2-argument case.
3205 Return NULL_TREE if no simplification was possible, otherwise return the
3206 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3207 code of the function to be simplified. */
3209 static bool
3210 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3211 tree fp, tree fmt, tree arg,
3212 enum built_in_function fcode)
3214 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3215 tree fn_fputc, fn_fputs;
3216 const char *fmt_str = NULL;
3218 /* If the return value is used, don't do the transformation. */
3219 if (gimple_call_lhs (stmt) != NULL_TREE)
3220 return false;
3222 /* Check whether the format is a literal string constant. */
3223 fmt_str = c_getstr (fmt);
3224 if (fmt_str == NULL)
3225 return false;
3227 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3229 /* If we're using an unlocked function, assume the other
3230 unlocked functions exist explicitly. */
3231 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3232 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3234 else
3236 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3237 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3240 if (!init_target_chars ())
3241 return false;
3243 /* If the format doesn't contain % args or %%, use strcpy. */
3244 if (strchr (fmt_str, target_percent) == NULL)
3246 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3247 && arg)
3248 return false;
3250 /* If the format specifier was "", fprintf does nothing. */
3251 if (fmt_str[0] == '\0')
3253 replace_call_with_value (gsi, NULL_TREE);
3254 return true;
3257 /* When "string" doesn't contain %, replace all cases of
3258 fprintf (fp, string) with fputs (string, fp). The fputs
3259 builtin will take care of special cases like length == 1. */
3260 if (fn_fputs)
3262 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3263 replace_call_with_call_and_fold (gsi, repl);
3264 return true;
3268 /* The other optimizations can be done only on the non-va_list variants. */
3269 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3270 return false;
3272 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3273 else if (strcmp (fmt_str, target_percent_s) == 0)
3275 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3276 return false;
3277 if (fn_fputs)
3279 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3280 replace_call_with_call_and_fold (gsi, repl);
3281 return true;
3285 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3286 else if (strcmp (fmt_str, target_percent_c) == 0)
3288 if (!arg
3289 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3290 return false;
3291 if (fn_fputc)
3293 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3294 replace_call_with_call_and_fold (gsi, repl);
3295 return true;
3299 return false;
3302 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3303 FMT and ARG are the arguments to the call; we don't fold cases with
3304 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3306 Return NULL_TREE if no simplification was possible, otherwise return the
3307 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3308 code of the function to be simplified. */
3310 static bool
3311 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3312 tree arg, enum built_in_function fcode)
3314 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3315 tree fn_putchar, fn_puts, newarg;
3316 const char *fmt_str = NULL;
3318 /* If the return value is used, don't do the transformation. */
3319 if (gimple_call_lhs (stmt) != NULL_TREE)
3320 return false;
3322 /* Check whether the format is a literal string constant. */
3323 fmt_str = c_getstr (fmt);
3324 if (fmt_str == NULL)
3325 return false;
3327 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3329 /* If we're using an unlocked function, assume the other
3330 unlocked functions exist explicitly. */
3331 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3332 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3334 else
3336 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3337 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3340 if (!init_target_chars ())
3341 return false;
3343 if (strcmp (fmt_str, target_percent_s) == 0
3344 || strchr (fmt_str, target_percent) == NULL)
3346 const char *str;
3348 if (strcmp (fmt_str, target_percent_s) == 0)
3350 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3351 return false;
3353 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3354 return false;
3356 str = c_getstr (arg);
3357 if (str == NULL)
3358 return false;
3360 else
3362 /* The format specifier doesn't contain any '%' characters. */
3363 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3364 && arg)
3365 return false;
3366 str = fmt_str;
3369 /* If the string was "", printf does nothing. */
3370 if (str[0] == '\0')
3372 replace_call_with_value (gsi, NULL_TREE);
3373 return true;
3376 /* If the string has length of 1, call putchar. */
3377 if (str[1] == '\0')
3379 /* Given printf("c"), (where c is any one character,)
3380 convert "c"[0] to an int and pass that to the replacement
3381 function. */
3382 newarg = build_int_cst (integer_type_node, str[0]);
3383 if (fn_putchar)
3385 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3386 replace_call_with_call_and_fold (gsi, repl);
3387 return true;
3390 else
3392 /* If the string was "string\n", call puts("string"). */
3393 size_t len = strlen (str);
3394 if ((unsigned char)str[len - 1] == target_newline
3395 && (size_t) (int) len == len
3396 && (int) len > 0)
3398 char *newstr;
3399 tree offset_node, string_cst;
3401 /* Create a NUL-terminated string that's one char shorter
3402 than the original, stripping off the trailing '\n'. */
3403 newarg = build_string_literal (len, str);
3404 string_cst = string_constant (newarg, &offset_node);
3405 gcc_checking_assert (string_cst
3406 && (TREE_STRING_LENGTH (string_cst)
3407 == (int) len)
3408 && integer_zerop (offset_node)
3409 && (unsigned char)
3410 TREE_STRING_POINTER (string_cst)[len - 1]
3411 == target_newline);
3412 /* build_string_literal creates a new STRING_CST,
3413 modify it in place to avoid double copying. */
3414 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3415 newstr[len - 1] = '\0';
3416 if (fn_puts)
3418 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3419 replace_call_with_call_and_fold (gsi, repl);
3420 return true;
3423 else
3424 /* We'd like to arrange to call fputs(string,stdout) here,
3425 but we need stdout and don't have a way to get it yet. */
3426 return false;
3430 /* The other optimizations can be done only on the non-va_list variants. */
3431 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3432 return false;
3434 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3435 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3437 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3438 return false;
3439 if (fn_puts)
3441 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3442 replace_call_with_call_and_fold (gsi, repl);
3443 return true;
3447 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3448 else if (strcmp (fmt_str, target_percent_c) == 0)
3450 if (!arg || ! useless_type_conversion_p (integer_type_node,
3451 TREE_TYPE (arg)))
3452 return false;
3453 if (fn_putchar)
3455 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3456 replace_call_with_call_and_fold (gsi, repl);
3457 return true;
3461 return false;
3466 /* Fold a call to __builtin_strlen with known length LEN. */
3468 static bool
3469 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3471 gimple *stmt = gsi_stmt (*gsi);
3472 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
3473 if (!len)
3474 return false;
3475 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
3476 replace_call_with_value (gsi, len);
3477 return true;
3480 /* Fold a call to __builtin_acc_on_device. */
3482 static bool
3483 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3485 /* Defer folding until we know which compiler we're in. */
3486 if (symtab->state != EXPANSION)
3487 return false;
3489 unsigned val_host = GOMP_DEVICE_HOST;
3490 unsigned val_dev = GOMP_DEVICE_NONE;
3492 #ifdef ACCEL_COMPILER
3493 val_host = GOMP_DEVICE_NOT_HOST;
3494 val_dev = ACCEL_COMPILER_acc_device;
3495 #endif
3497 location_t loc = gimple_location (gsi_stmt (*gsi));
3499 tree host_eq = make_ssa_name (boolean_type_node);
3500 gimple *host_ass = gimple_build_assign
3501 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3502 gimple_set_location (host_ass, loc);
3503 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3505 tree dev_eq = make_ssa_name (boolean_type_node);
3506 gimple *dev_ass = gimple_build_assign
3507 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3508 gimple_set_location (dev_ass, loc);
3509 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3511 tree result = make_ssa_name (boolean_type_node);
3512 gimple *result_ass = gimple_build_assign
3513 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3514 gimple_set_location (result_ass, loc);
3515 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3517 replace_call_with_value (gsi, result);
3519 return true;
3522 /* Fold realloc (0, n) -> malloc (n). */
3524 static bool
3525 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3527 gimple *stmt = gsi_stmt (*gsi);
3528 tree arg = gimple_call_arg (stmt, 0);
3529 tree size = gimple_call_arg (stmt, 1);
3531 if (operand_equal_p (arg, null_pointer_node, 0))
3533 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3534 if (fn_malloc)
3536 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3537 replace_call_with_call_and_fold (gsi, repl);
3538 return true;
3541 return false;
3544 /* Fold the non-target builtin at *GSI and return whether any simplification
3545 was made. */
3547 static bool
3548 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3550 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3551 tree callee = gimple_call_fndecl (stmt);
3553 /* Give up for always_inline inline builtins until they are
3554 inlined. */
3555 if (avoid_folding_inline_builtin (callee))
3556 return false;
3558 unsigned n = gimple_call_num_args (stmt);
3559 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3560 switch (fcode)
3562 case BUILT_IN_BCMP:
3563 return gimple_fold_builtin_bcmp (gsi);
3564 case BUILT_IN_BCOPY:
3565 return gimple_fold_builtin_bcopy (gsi);
3566 case BUILT_IN_BZERO:
3567 return gimple_fold_builtin_bzero (gsi);
3569 case BUILT_IN_MEMSET:
3570 return gimple_fold_builtin_memset (gsi,
3571 gimple_call_arg (stmt, 1),
3572 gimple_call_arg (stmt, 2));
3573 case BUILT_IN_MEMCPY:
3574 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3575 gimple_call_arg (stmt, 1), 0);
3576 case BUILT_IN_MEMPCPY:
3577 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3578 gimple_call_arg (stmt, 1), 1);
3579 case BUILT_IN_MEMMOVE:
3580 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3581 gimple_call_arg (stmt, 1), 3);
3582 case BUILT_IN_SPRINTF_CHK:
3583 case BUILT_IN_VSPRINTF_CHK:
3584 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3585 case BUILT_IN_STRCAT_CHK:
3586 return gimple_fold_builtin_strcat_chk (gsi);
3587 case BUILT_IN_STRNCAT_CHK:
3588 return gimple_fold_builtin_strncat_chk (gsi);
3589 case BUILT_IN_STRLEN:
3590 return gimple_fold_builtin_strlen (gsi);
3591 case BUILT_IN_STRCPY:
3592 return gimple_fold_builtin_strcpy (gsi,
3593 gimple_call_arg (stmt, 0),
3594 gimple_call_arg (stmt, 1));
3595 case BUILT_IN_STRNCPY:
3596 return gimple_fold_builtin_strncpy (gsi,
3597 gimple_call_arg (stmt, 0),
3598 gimple_call_arg (stmt, 1),
3599 gimple_call_arg (stmt, 2));
3600 case BUILT_IN_STRCAT:
3601 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3602 gimple_call_arg (stmt, 1));
3603 case BUILT_IN_STRNCAT:
3604 return gimple_fold_builtin_strncat (gsi);
3605 case BUILT_IN_INDEX:
3606 case BUILT_IN_STRCHR:
3607 return gimple_fold_builtin_strchr (gsi, false);
3608 case BUILT_IN_RINDEX:
3609 case BUILT_IN_STRRCHR:
3610 return gimple_fold_builtin_strchr (gsi, true);
3611 case BUILT_IN_STRSTR:
3612 return gimple_fold_builtin_strstr (gsi);
3613 case BUILT_IN_STRCMP:
3614 case BUILT_IN_STRCASECMP:
3615 case BUILT_IN_STRNCMP:
3616 case BUILT_IN_STRNCASECMP:
3617 return gimple_fold_builtin_string_compare (gsi);
3618 case BUILT_IN_MEMCHR:
3619 return gimple_fold_builtin_memchr (gsi);
3620 case BUILT_IN_FPUTS:
3621 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3622 gimple_call_arg (stmt, 1), false);
3623 case BUILT_IN_FPUTS_UNLOCKED:
3624 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3625 gimple_call_arg (stmt, 1), true);
3626 case BUILT_IN_MEMCPY_CHK:
3627 case BUILT_IN_MEMPCPY_CHK:
3628 case BUILT_IN_MEMMOVE_CHK:
3629 case BUILT_IN_MEMSET_CHK:
3630 return gimple_fold_builtin_memory_chk (gsi,
3631 gimple_call_arg (stmt, 0),
3632 gimple_call_arg (stmt, 1),
3633 gimple_call_arg (stmt, 2),
3634 gimple_call_arg (stmt, 3),
3635 fcode);
3636 case BUILT_IN_STPCPY:
3637 return gimple_fold_builtin_stpcpy (gsi);
3638 case BUILT_IN_STRCPY_CHK:
3639 case BUILT_IN_STPCPY_CHK:
3640 return gimple_fold_builtin_stxcpy_chk (gsi,
3641 gimple_call_arg (stmt, 0),
3642 gimple_call_arg (stmt, 1),
3643 gimple_call_arg (stmt, 2),
3644 fcode);
3645 case BUILT_IN_STRNCPY_CHK:
3646 case BUILT_IN_STPNCPY_CHK:
3647 return gimple_fold_builtin_stxncpy_chk (gsi,
3648 gimple_call_arg (stmt, 0),
3649 gimple_call_arg (stmt, 1),
3650 gimple_call_arg (stmt, 2),
3651 gimple_call_arg (stmt, 3),
3652 fcode);
3653 case BUILT_IN_SNPRINTF_CHK:
3654 case BUILT_IN_VSNPRINTF_CHK:
3655 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3657 case BUILT_IN_FPRINTF:
3658 case BUILT_IN_FPRINTF_UNLOCKED:
3659 case BUILT_IN_VFPRINTF:
3660 if (n == 2 || n == 3)
3661 return gimple_fold_builtin_fprintf (gsi,
3662 gimple_call_arg (stmt, 0),
3663 gimple_call_arg (stmt, 1),
3664 n == 3
3665 ? gimple_call_arg (stmt, 2)
3666 : NULL_TREE,
3667 fcode);
3668 break;
3669 case BUILT_IN_FPRINTF_CHK:
3670 case BUILT_IN_VFPRINTF_CHK:
3671 if (n == 3 || n == 4)
3672 return gimple_fold_builtin_fprintf (gsi,
3673 gimple_call_arg (stmt, 0),
3674 gimple_call_arg (stmt, 2),
3675 n == 4
3676 ? gimple_call_arg (stmt, 3)
3677 : NULL_TREE,
3678 fcode);
3679 break;
3680 case BUILT_IN_PRINTF:
3681 case BUILT_IN_PRINTF_UNLOCKED:
3682 case BUILT_IN_VPRINTF:
3683 if (n == 1 || n == 2)
3684 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3685 n == 2
3686 ? gimple_call_arg (stmt, 1)
3687 : NULL_TREE, fcode);
3688 break;
3689 case BUILT_IN_PRINTF_CHK:
3690 case BUILT_IN_VPRINTF_CHK:
3691 if (n == 2 || n == 3)
3692 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3693 n == 3
3694 ? gimple_call_arg (stmt, 2)
3695 : NULL_TREE, fcode);
3696 break;
3697 case BUILT_IN_ACC_ON_DEVICE:
3698 return gimple_fold_builtin_acc_on_device (gsi,
3699 gimple_call_arg (stmt, 0));
3700 case BUILT_IN_REALLOC:
3701 return gimple_fold_builtin_realloc (gsi);
3703 default:;
3706 /* Try the generic builtin folder. */
3707 bool ignore = (gimple_call_lhs (stmt) == NULL);
3708 tree result = fold_call_stmt (stmt, ignore);
3709 if (result)
3711 if (ignore)
3712 STRIP_NOPS (result);
3713 else
3714 result = fold_convert (gimple_call_return_type (stmt), result);
3715 if (!update_call_from_tree (gsi, result))
3716 gimplify_and_update_call_from_tree (gsi, result);
3717 return true;
3720 return false;
3723 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3724 function calls to constants, where possible. */
3726 static tree
3727 fold_internal_goacc_dim (const gimple *call)
3729 int axis = oacc_get_ifn_dim_arg (call);
3730 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3731 bool is_pos = gimple_call_internal_fn (call) == IFN_GOACC_DIM_POS;
3732 tree result = NULL_TREE;
3734 /* If the size is 1, or we only want the size and it is not dynamic,
3735 we know the answer. */
3736 if (size == 1 || (!is_pos && size))
3738 tree type = TREE_TYPE (gimple_call_lhs (call));
3739 result = build_int_cst (type, size - is_pos);
3742 return result;
3745 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3746 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3747 &var where var is only addressable because of such calls. */
3749 bool
3750 optimize_atomic_compare_exchange_p (gimple *stmt)
3752 if (gimple_call_num_args (stmt) != 6
3753 || !flag_inline_atomics
3754 || !optimize
3755 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3756 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3757 || !gimple_vdef (stmt)
3758 || !gimple_vuse (stmt))
3759 return false;
3761 tree fndecl = gimple_call_fndecl (stmt);
3762 switch (DECL_FUNCTION_CODE (fndecl))
3764 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3765 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3766 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3767 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3768 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3769 break;
3770 default:
3771 return false;
3774 tree expected = gimple_call_arg (stmt, 1);
3775 if (TREE_CODE (expected) != ADDR_EXPR
3776 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3777 return false;
3779 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3780 if (!is_gimple_reg_type (etype)
3781 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3782 || TREE_THIS_VOLATILE (etype)
3783 || VECTOR_TYPE_P (etype)
3784 || TREE_CODE (etype) == COMPLEX_TYPE
3785 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3786 might not preserve all the bits. See PR71716. */
3787 || SCALAR_FLOAT_TYPE_P (etype)
3788 || TYPE_PRECISION (etype) != GET_MODE_BITSIZE (TYPE_MODE (etype)))
3789 return false;
3791 tree weak = gimple_call_arg (stmt, 3);
3792 if (!integer_zerop (weak) && !integer_onep (weak))
3793 return false;
3795 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3796 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3797 machine_mode mode = TYPE_MODE (itype);
3799 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3800 == CODE_FOR_nothing
3801 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3802 return false;
3804 if (int_size_in_bytes (etype) != GET_MODE_SIZE (mode))
3805 return false;
3807 return true;
3810 /* Fold
3811 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3812 into
3813 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3814 i = IMAGPART_EXPR <t>;
3815 r = (_Bool) i;
3816 e = REALPART_EXPR <t>; */
3818 void
3819 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3821 gimple *stmt = gsi_stmt (*gsi);
3822 tree fndecl = gimple_call_fndecl (stmt);
3823 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3824 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3825 tree ctype = build_complex_type (itype);
3826 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3827 bool throws = false;
3828 edge e = NULL;
3829 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3830 expected);
3831 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3832 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3833 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3835 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3836 build1 (VIEW_CONVERT_EXPR, itype,
3837 gimple_assign_lhs (g)));
3838 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3840 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3841 + int_size_in_bytes (itype);
3842 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3843 gimple_call_arg (stmt, 0),
3844 gimple_assign_lhs (g),
3845 gimple_call_arg (stmt, 2),
3846 build_int_cst (integer_type_node, flag),
3847 gimple_call_arg (stmt, 4),
3848 gimple_call_arg (stmt, 5));
3849 tree lhs = make_ssa_name (ctype);
3850 gimple_call_set_lhs (g, lhs);
3851 gimple_set_vdef (g, gimple_vdef (stmt));
3852 gimple_set_vuse (g, gimple_vuse (stmt));
3853 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3854 tree oldlhs = gimple_call_lhs (stmt);
3855 if (stmt_can_throw_internal (stmt))
3857 throws = true;
3858 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3860 gimple_call_set_nothrow (as_a <gcall *> (g),
3861 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3862 gimple_call_set_lhs (stmt, NULL_TREE);
3863 gsi_replace (gsi, g, true);
3864 if (oldlhs)
3866 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3867 build1 (IMAGPART_EXPR, itype, lhs));
3868 if (throws)
3870 gsi_insert_on_edge_immediate (e, g);
3871 *gsi = gsi_for_stmt (g);
3873 else
3874 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3875 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3876 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3878 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3879 build1 (REALPART_EXPR, itype, lhs));
3880 if (throws && oldlhs == NULL_TREE)
3882 gsi_insert_on_edge_immediate (e, g);
3883 *gsi = gsi_for_stmt (g);
3885 else
3886 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3887 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3889 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3890 VIEW_CONVERT_EXPR,
3891 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3892 gimple_assign_lhs (g)));
3893 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3895 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3896 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3897 *gsi = gsiret;
3900 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3901 doesn't fit into TYPE. The test for overflow should be regardless of
3902 -fwrapv, and even for unsigned types. */
3904 bool
3905 arith_overflowed_p (enum tree_code code, const_tree type,
3906 const_tree arg0, const_tree arg1)
3908 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3909 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3910 widest2_int_cst;
3911 widest2_int warg0 = widest2_int_cst (arg0);
3912 widest2_int warg1 = widest2_int_cst (arg1);
3913 widest2_int wres;
3914 switch (code)
3916 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3917 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3918 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3919 default: gcc_unreachable ();
3921 signop sign = TYPE_SIGN (type);
3922 if (sign == UNSIGNED && wi::neg_p (wres))
3923 return true;
3924 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3927 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3928 The statement may be replaced by another statement, e.g., if the call
3929 simplifies to a constant value. Return true if any changes were made.
3930 It is assumed that the operands have been previously folded. */
3932 static bool
3933 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
3935 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3936 tree callee;
3937 bool changed = false;
3938 unsigned i;
3940 /* Fold *& in call arguments. */
3941 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3942 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3944 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3945 if (tmp)
3947 gimple_call_set_arg (stmt, i, tmp);
3948 changed = true;
3952 /* Check for virtual calls that became direct calls. */
3953 callee = gimple_call_fn (stmt);
3954 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3956 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3958 if (dump_file && virtual_method_call_p (callee)
3959 && !possible_polymorphic_call_target_p
3960 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3961 (OBJ_TYPE_REF_EXPR (callee)))))
3963 fprintf (dump_file,
3964 "Type inheritance inconsistent devirtualization of ");
3965 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3966 fprintf (dump_file, " to ");
3967 print_generic_expr (dump_file, callee, TDF_SLIM);
3968 fprintf (dump_file, "\n");
3971 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
3972 changed = true;
3974 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
3976 bool final;
3977 vec <cgraph_node *>targets
3978 = possible_polymorphic_call_targets (callee, stmt, &final);
3979 if (final && targets.length () <= 1 && dbg_cnt (devirt))
3981 tree lhs = gimple_call_lhs (stmt);
3982 if (dump_enabled_p ())
3984 location_t loc = gimple_location_safe (stmt);
3985 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3986 "folding virtual function call to %s\n",
3987 targets.length () == 1
3988 ? targets[0]->name ()
3989 : "__builtin_unreachable");
3991 if (targets.length () == 1)
3993 tree fndecl = targets[0]->decl;
3994 gimple_call_set_fndecl (stmt, fndecl);
3995 changed = true;
3996 /* If changing the call to __cxa_pure_virtual
3997 or similar noreturn function, adjust gimple_call_fntype
3998 too. */
3999 if (gimple_call_noreturn_p (stmt)
4000 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4001 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4002 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4003 == void_type_node))
4004 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4005 /* If the call becomes noreturn, remove the lhs. */
4006 if (lhs
4007 && gimple_call_noreturn_p (stmt)
4008 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4009 || should_remove_lhs_p (lhs)))
4011 if (TREE_CODE (lhs) == SSA_NAME)
4013 tree var = create_tmp_var (TREE_TYPE (lhs));
4014 tree def = get_or_create_ssa_default_def (cfun, var);
4015 gimple *new_stmt = gimple_build_assign (lhs, def);
4016 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4018 gimple_call_set_lhs (stmt, NULL_TREE);
4020 maybe_remove_unused_call_args (cfun, stmt);
4022 else
4024 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4025 gimple *new_stmt = gimple_build_call (fndecl, 0);
4026 gimple_set_location (new_stmt, gimple_location (stmt));
4027 /* If the call had a SSA name as lhs morph that into
4028 an uninitialized value. */
4029 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4031 tree var = create_tmp_var (TREE_TYPE (lhs));
4032 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4033 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4034 set_ssa_default_def (cfun, var, lhs);
4036 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4037 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4038 gsi_replace (gsi, new_stmt, false);
4039 return true;
4045 /* Check for indirect calls that became direct calls, and then
4046 no longer require a static chain. */
4047 if (gimple_call_chain (stmt))
4049 tree fn = gimple_call_fndecl (stmt);
4050 if (fn && !DECL_STATIC_CHAIN (fn))
4052 gimple_call_set_chain (stmt, NULL);
4053 changed = true;
4055 else
4057 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4058 if (tmp)
4060 gimple_call_set_chain (stmt, tmp);
4061 changed = true;
4066 if (inplace)
4067 return changed;
4069 /* Check for builtins that CCP can handle using information not
4070 available in the generic fold routines. */
4071 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4073 if (gimple_fold_builtin (gsi))
4074 changed = true;
4076 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4078 changed |= targetm.gimple_fold_builtin (gsi);
4080 else if (gimple_call_internal_p (stmt))
4082 enum tree_code subcode = ERROR_MARK;
4083 tree result = NULL_TREE;
4084 bool cplx_result = false;
4085 tree overflow = NULL_TREE;
4086 switch (gimple_call_internal_fn (stmt))
4088 case IFN_BUILTIN_EXPECT:
4089 result = fold_builtin_expect (gimple_location (stmt),
4090 gimple_call_arg (stmt, 0),
4091 gimple_call_arg (stmt, 1),
4092 gimple_call_arg (stmt, 2));
4093 break;
4094 case IFN_UBSAN_OBJECT_SIZE:
4096 tree offset = gimple_call_arg (stmt, 1);
4097 tree objsize = gimple_call_arg (stmt, 2);
4098 if (integer_all_onesp (objsize)
4099 || (TREE_CODE (offset) == INTEGER_CST
4100 && TREE_CODE (objsize) == INTEGER_CST
4101 && tree_int_cst_le (offset, objsize)))
4103 replace_call_with_value (gsi, NULL_TREE);
4104 return true;
4107 break;
4108 case IFN_UBSAN_PTR:
4109 if (integer_zerop (gimple_call_arg (stmt, 1)))
4111 replace_call_with_value (gsi, NULL_TREE);
4112 return true;
4114 break;
4115 case IFN_UBSAN_BOUNDS:
4117 tree index = gimple_call_arg (stmt, 1);
4118 tree bound = gimple_call_arg (stmt, 2);
4119 if (TREE_CODE (index) == INTEGER_CST
4120 && TREE_CODE (bound) == INTEGER_CST)
4122 index = fold_convert (TREE_TYPE (bound), index);
4123 if (TREE_CODE (index) == INTEGER_CST
4124 && tree_int_cst_le (index, bound))
4126 replace_call_with_value (gsi, NULL_TREE);
4127 return true;
4131 break;
4132 case IFN_GOACC_DIM_SIZE:
4133 case IFN_GOACC_DIM_POS:
4134 result = fold_internal_goacc_dim (stmt);
4135 break;
4136 case IFN_UBSAN_CHECK_ADD:
4137 subcode = PLUS_EXPR;
4138 break;
4139 case IFN_UBSAN_CHECK_SUB:
4140 subcode = MINUS_EXPR;
4141 break;
4142 case IFN_UBSAN_CHECK_MUL:
4143 subcode = MULT_EXPR;
4144 break;
4145 case IFN_ADD_OVERFLOW:
4146 subcode = PLUS_EXPR;
4147 cplx_result = true;
4148 break;
4149 case IFN_SUB_OVERFLOW:
4150 subcode = MINUS_EXPR;
4151 cplx_result = true;
4152 break;
4153 case IFN_MUL_OVERFLOW:
4154 subcode = MULT_EXPR;
4155 cplx_result = true;
4156 break;
4157 default:
4158 break;
4160 if (subcode != ERROR_MARK)
4162 tree arg0 = gimple_call_arg (stmt, 0);
4163 tree arg1 = gimple_call_arg (stmt, 1);
4164 tree type = TREE_TYPE (arg0);
4165 if (cplx_result)
4167 tree lhs = gimple_call_lhs (stmt);
4168 if (lhs == NULL_TREE)
4169 type = NULL_TREE;
4170 else
4171 type = TREE_TYPE (TREE_TYPE (lhs));
4173 if (type == NULL_TREE)
4175 /* x = y + 0; x = y - 0; x = y * 0; */
4176 else if (integer_zerop (arg1))
4177 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4178 /* x = 0 + y; x = 0 * y; */
4179 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4180 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4181 /* x = y - y; */
4182 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4183 result = integer_zero_node;
4184 /* x = y * 1; x = 1 * y; */
4185 else if (subcode == MULT_EXPR && integer_onep (arg1))
4186 result = arg0;
4187 else if (subcode == MULT_EXPR && integer_onep (arg0))
4188 result = arg1;
4189 else if (TREE_CODE (arg0) == INTEGER_CST
4190 && TREE_CODE (arg1) == INTEGER_CST)
4192 if (cplx_result)
4193 result = int_const_binop (subcode, fold_convert (type, arg0),
4194 fold_convert (type, arg1));
4195 else
4196 result = int_const_binop (subcode, arg0, arg1);
4197 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4199 if (cplx_result)
4200 overflow = build_one_cst (type);
4201 else
4202 result = NULL_TREE;
4205 if (result)
4207 if (result == integer_zero_node)
4208 result = build_zero_cst (type);
4209 else if (cplx_result && TREE_TYPE (result) != type)
4211 if (TREE_CODE (result) == INTEGER_CST)
4213 if (arith_overflowed_p (PLUS_EXPR, type, result,
4214 integer_zero_node))
4215 overflow = build_one_cst (type);
4217 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4218 && TYPE_UNSIGNED (type))
4219 || (TYPE_PRECISION (type)
4220 < (TYPE_PRECISION (TREE_TYPE (result))
4221 + (TYPE_UNSIGNED (TREE_TYPE (result))
4222 && !TYPE_UNSIGNED (type)))))
4223 result = NULL_TREE;
4224 if (result)
4225 result = fold_convert (type, result);
4230 if (result)
4232 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4233 result = drop_tree_overflow (result);
4234 if (cplx_result)
4236 if (overflow == NULL_TREE)
4237 overflow = build_zero_cst (TREE_TYPE (result));
4238 tree ctype = build_complex_type (TREE_TYPE (result));
4239 if (TREE_CODE (result) == INTEGER_CST
4240 && TREE_CODE (overflow) == INTEGER_CST)
4241 result = build_complex (ctype, result, overflow);
4242 else
4243 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4244 ctype, result, overflow);
4246 if (!update_call_from_tree (gsi, result))
4247 gimplify_and_update_call_from_tree (gsi, result);
4248 changed = true;
4252 return changed;
4256 /* Return true whether NAME has a use on STMT. */
4258 static bool
4259 has_use_on_stmt (tree name, gimple *stmt)
4261 imm_use_iterator iter;
4262 use_operand_p use_p;
4263 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4264 if (USE_STMT (use_p) == stmt)
4265 return true;
4266 return false;
4269 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4270 gimple_simplify.
4272 Replaces *GSI with the simplification result in RCODE and OPS
4273 and the associated statements in *SEQ. Does the replacement
4274 according to INPLACE and returns true if the operation succeeded. */
4276 static bool
4277 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4278 code_helper rcode, tree *ops,
4279 gimple_seq *seq, bool inplace)
4281 gimple *stmt = gsi_stmt (*gsi);
4283 /* Play safe and do not allow abnormals to be mentioned in
4284 newly created statements. See also maybe_push_res_to_seq.
4285 As an exception allow such uses if there was a use of the
4286 same SSA name on the old stmt. */
4287 if ((TREE_CODE (ops[0]) == SSA_NAME
4288 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4289 && !has_use_on_stmt (ops[0], stmt))
4290 || (ops[1]
4291 && TREE_CODE (ops[1]) == SSA_NAME
4292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4293 && !has_use_on_stmt (ops[1], stmt))
4294 || (ops[2]
4295 && TREE_CODE (ops[2]) == SSA_NAME
4296 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4297 && !has_use_on_stmt (ops[2], stmt))
4298 || (COMPARISON_CLASS_P (ops[0])
4299 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4300 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4301 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4302 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4303 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4304 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4305 return false;
4307 /* Don't insert new statements when INPLACE is true, even if we could
4308 reuse STMT for the final statement. */
4309 if (inplace && !gimple_seq_empty_p (*seq))
4310 return false;
4312 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4314 gcc_assert (rcode.is_tree_code ());
4315 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4316 /* GIMPLE_CONDs condition may not throw. */
4317 && (!flag_exceptions
4318 || !cfun->can_throw_non_call_exceptions
4319 || !operation_could_trap_p (rcode,
4320 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4321 false, NULL_TREE)))
4322 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4323 else if (rcode == SSA_NAME)
4324 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4325 build_zero_cst (TREE_TYPE (ops[0])));
4326 else if (rcode == INTEGER_CST)
4328 if (integer_zerop (ops[0]))
4329 gimple_cond_make_false (cond_stmt);
4330 else
4331 gimple_cond_make_true (cond_stmt);
4333 else if (!inplace)
4335 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4336 ops, seq);
4337 if (!res)
4338 return false;
4339 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4340 build_zero_cst (TREE_TYPE (res)));
4342 else
4343 return false;
4344 if (dump_file && (dump_flags & TDF_DETAILS))
4346 fprintf (dump_file, "gimple_simplified to ");
4347 if (!gimple_seq_empty_p (*seq))
4348 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4349 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4350 0, TDF_SLIM);
4352 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4353 return true;
4355 else if (is_gimple_assign (stmt)
4356 && rcode.is_tree_code ())
4358 if (!inplace
4359 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4361 maybe_build_generic_op (rcode,
4362 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4363 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4364 if (dump_file && (dump_flags & TDF_DETAILS))
4366 fprintf (dump_file, "gimple_simplified to ");
4367 if (!gimple_seq_empty_p (*seq))
4368 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4369 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4370 0, TDF_SLIM);
4372 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4373 return true;
4376 else if (rcode.is_fn_code ()
4377 && gimple_call_combined_fn (stmt) == rcode)
4379 unsigned i;
4380 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4382 gcc_assert (ops[i] != NULL_TREE);
4383 gimple_call_set_arg (stmt, i, ops[i]);
4385 if (i < 3)
4386 gcc_assert (ops[i] == NULL_TREE);
4387 if (dump_file && (dump_flags & TDF_DETAILS))
4389 fprintf (dump_file, "gimple_simplified to ");
4390 if (!gimple_seq_empty_p (*seq))
4391 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4392 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4394 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4395 return true;
4397 else if (!inplace)
4399 if (gimple_has_lhs (stmt))
4401 tree lhs = gimple_get_lhs (stmt);
4402 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4403 ops, seq, lhs))
4404 return false;
4405 if (dump_file && (dump_flags & TDF_DETAILS))
4407 fprintf (dump_file, "gimple_simplified to ");
4408 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4410 gsi_replace_with_seq_vops (gsi, *seq);
4411 return true;
4413 else
4414 gcc_unreachable ();
4417 return false;
4420 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4422 static bool
4423 maybe_canonicalize_mem_ref_addr (tree *t)
4425 bool res = false;
4427 if (TREE_CODE (*t) == ADDR_EXPR)
4428 t = &TREE_OPERAND (*t, 0);
4430 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4431 generic vector extension. The actual vector referenced is
4432 view-converted to an array type for this purpose. If the index
4433 is constant the canonical representation in the middle-end is a
4434 BIT_FIELD_REF so re-write the former to the latter here. */
4435 if (TREE_CODE (*t) == ARRAY_REF
4436 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4437 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4438 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4440 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4441 if (VECTOR_TYPE_P (vtype))
4443 tree low = array_ref_low_bound (*t);
4444 if (TREE_CODE (low) == INTEGER_CST)
4446 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4448 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4449 wi::to_widest (low));
4450 idx = wi::mul (idx, wi::to_widest
4451 (TYPE_SIZE (TREE_TYPE (*t))));
4452 widest_int ext
4453 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4454 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4456 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4457 TREE_TYPE (*t),
4458 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4459 TYPE_SIZE (TREE_TYPE (*t)),
4460 wide_int_to_tree (bitsizetype, idx));
4461 res = true;
4468 while (handled_component_p (*t))
4469 t = &TREE_OPERAND (*t, 0);
4471 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4472 of invariant addresses into a SSA name MEM_REF address. */
4473 if (TREE_CODE (*t) == MEM_REF
4474 || TREE_CODE (*t) == TARGET_MEM_REF)
4476 tree addr = TREE_OPERAND (*t, 0);
4477 if (TREE_CODE (addr) == ADDR_EXPR
4478 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4479 || handled_component_p (TREE_OPERAND (addr, 0))))
4481 tree base;
4482 HOST_WIDE_INT coffset;
4483 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4484 &coffset);
4485 if (!base)
4486 gcc_unreachable ();
4488 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4489 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4490 TREE_OPERAND (*t, 1),
4491 size_int (coffset));
4492 res = true;
4494 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4495 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4498 /* Canonicalize back MEM_REFs to plain reference trees if the object
4499 accessed is a decl that has the same access semantics as the MEM_REF. */
4500 if (TREE_CODE (*t) == MEM_REF
4501 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4502 && integer_zerop (TREE_OPERAND (*t, 1))
4503 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4505 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4506 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4507 if (/* Same volatile qualification. */
4508 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4509 /* Same TBAA behavior with -fstrict-aliasing. */
4510 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4511 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4512 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4513 /* Same alignment. */
4514 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4515 /* We have to look out here to not drop a required conversion
4516 from the rhs to the lhs if *t appears on the lhs or vice-versa
4517 if it appears on the rhs. Thus require strict type
4518 compatibility. */
4519 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4521 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4522 res = true;
4526 /* Canonicalize TARGET_MEM_REF in particular with respect to
4527 the indexes becoming constant. */
4528 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4530 tree tem = maybe_fold_tmr (*t);
4531 if (tem)
4533 *t = tem;
4534 res = true;
4538 return res;
4541 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4542 distinguishes both cases. */
4544 static bool
4545 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4547 bool changed = false;
4548 gimple *stmt = gsi_stmt (*gsi);
4549 bool nowarning = gimple_no_warning_p (stmt);
4550 unsigned i;
4551 fold_defer_overflow_warnings ();
4553 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4554 after propagation.
4555 ??? This shouldn't be done in generic folding but in the
4556 propagation helpers which also know whether an address was
4557 propagated.
4558 Also canonicalize operand order. */
4559 switch (gimple_code (stmt))
4561 case GIMPLE_ASSIGN:
4562 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4564 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4565 if ((REFERENCE_CLASS_P (*rhs)
4566 || TREE_CODE (*rhs) == ADDR_EXPR)
4567 && maybe_canonicalize_mem_ref_addr (rhs))
4568 changed = true;
4569 tree *lhs = gimple_assign_lhs_ptr (stmt);
4570 if (REFERENCE_CLASS_P (*lhs)
4571 && maybe_canonicalize_mem_ref_addr (lhs))
4572 changed = true;
4574 else
4576 /* Canonicalize operand order. */
4577 enum tree_code code = gimple_assign_rhs_code (stmt);
4578 if (TREE_CODE_CLASS (code) == tcc_comparison
4579 || commutative_tree_code (code)
4580 || commutative_ternary_tree_code (code))
4582 tree rhs1 = gimple_assign_rhs1 (stmt);
4583 tree rhs2 = gimple_assign_rhs2 (stmt);
4584 if (tree_swap_operands_p (rhs1, rhs2))
4586 gimple_assign_set_rhs1 (stmt, rhs2);
4587 gimple_assign_set_rhs2 (stmt, rhs1);
4588 if (TREE_CODE_CLASS (code) == tcc_comparison)
4589 gimple_assign_set_rhs_code (stmt,
4590 swap_tree_comparison (code));
4591 changed = true;
4595 break;
4596 case GIMPLE_CALL:
4598 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4600 tree *arg = gimple_call_arg_ptr (stmt, i);
4601 if (REFERENCE_CLASS_P (*arg)
4602 && maybe_canonicalize_mem_ref_addr (arg))
4603 changed = true;
4605 tree *lhs = gimple_call_lhs_ptr (stmt);
4606 if (*lhs
4607 && REFERENCE_CLASS_P (*lhs)
4608 && maybe_canonicalize_mem_ref_addr (lhs))
4609 changed = true;
4610 break;
4612 case GIMPLE_ASM:
4614 gasm *asm_stmt = as_a <gasm *> (stmt);
4615 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4617 tree link = gimple_asm_output_op (asm_stmt, i);
4618 tree op = TREE_VALUE (link);
4619 if (REFERENCE_CLASS_P (op)
4620 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4621 changed = true;
4623 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4625 tree link = gimple_asm_input_op (asm_stmt, i);
4626 tree op = TREE_VALUE (link);
4627 if ((REFERENCE_CLASS_P (op)
4628 || TREE_CODE (op) == ADDR_EXPR)
4629 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4630 changed = true;
4633 break;
4634 case GIMPLE_DEBUG:
4635 if (gimple_debug_bind_p (stmt))
4637 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4638 if (*val
4639 && (REFERENCE_CLASS_P (*val)
4640 || TREE_CODE (*val) == ADDR_EXPR)
4641 && maybe_canonicalize_mem_ref_addr (val))
4642 changed = true;
4644 break;
4645 case GIMPLE_COND:
4647 /* Canonicalize operand order. */
4648 tree lhs = gimple_cond_lhs (stmt);
4649 tree rhs = gimple_cond_rhs (stmt);
4650 if (tree_swap_operands_p (lhs, rhs))
4652 gcond *gc = as_a <gcond *> (stmt);
4653 gimple_cond_set_lhs (gc, rhs);
4654 gimple_cond_set_rhs (gc, lhs);
4655 gimple_cond_set_code (gc,
4656 swap_tree_comparison (gimple_cond_code (gc)));
4657 changed = true;
4660 default:;
4663 /* Dispatch to pattern-based folding. */
4664 if (!inplace
4665 || is_gimple_assign (stmt)
4666 || gimple_code (stmt) == GIMPLE_COND)
4668 gimple_seq seq = NULL;
4669 code_helper rcode;
4670 tree ops[3] = {};
4671 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4672 valueize, valueize))
4674 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4675 changed = true;
4676 else
4677 gimple_seq_discard (seq);
4681 stmt = gsi_stmt (*gsi);
4683 /* Fold the main computation performed by the statement. */
4684 switch (gimple_code (stmt))
4686 case GIMPLE_ASSIGN:
4688 /* Try to canonicalize for boolean-typed X the comparisons
4689 X == 0, X == 1, X != 0, and X != 1. */
4690 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4691 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4693 tree lhs = gimple_assign_lhs (stmt);
4694 tree op1 = gimple_assign_rhs1 (stmt);
4695 tree op2 = gimple_assign_rhs2 (stmt);
4696 tree type = TREE_TYPE (op1);
4698 /* Check whether the comparison operands are of the same boolean
4699 type as the result type is.
4700 Check that second operand is an integer-constant with value
4701 one or zero. */
4702 if (TREE_CODE (op2) == INTEGER_CST
4703 && (integer_zerop (op2) || integer_onep (op2))
4704 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4706 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4707 bool is_logical_not = false;
4709 /* X == 0 and X != 1 is a logical-not.of X
4710 X == 1 and X != 0 is X */
4711 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4712 || (cmp_code == NE_EXPR && integer_onep (op2)))
4713 is_logical_not = true;
4715 if (is_logical_not == false)
4716 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4717 /* Only for one-bit precision typed X the transformation
4718 !X -> ~X is valied. */
4719 else if (TYPE_PRECISION (type) == 1)
4720 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4721 /* Otherwise we use !X -> X ^ 1. */
4722 else
4723 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4724 build_int_cst (type, 1));
4725 changed = true;
4726 break;
4730 unsigned old_num_ops = gimple_num_ops (stmt);
4731 tree lhs = gimple_assign_lhs (stmt);
4732 tree new_rhs = fold_gimple_assign (gsi);
4733 if (new_rhs
4734 && !useless_type_conversion_p (TREE_TYPE (lhs),
4735 TREE_TYPE (new_rhs)))
4736 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4737 if (new_rhs
4738 && (!inplace
4739 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4741 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4742 changed = true;
4744 break;
4747 case GIMPLE_CALL:
4748 changed |= gimple_fold_call (gsi, inplace);
4749 break;
4751 case GIMPLE_ASM:
4752 /* Fold *& in asm operands. */
4754 gasm *asm_stmt = as_a <gasm *> (stmt);
4755 size_t noutputs;
4756 const char **oconstraints;
4757 const char *constraint;
4758 bool allows_mem, allows_reg;
4760 noutputs = gimple_asm_noutputs (asm_stmt);
4761 oconstraints = XALLOCAVEC (const char *, noutputs);
4763 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4765 tree link = gimple_asm_output_op (asm_stmt, i);
4766 tree op = TREE_VALUE (link);
4767 oconstraints[i]
4768 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4769 if (REFERENCE_CLASS_P (op)
4770 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4772 TREE_VALUE (link) = op;
4773 changed = true;
4776 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4778 tree link = gimple_asm_input_op (asm_stmt, i);
4779 tree op = TREE_VALUE (link);
4780 constraint
4781 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4782 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4783 oconstraints, &allows_mem, &allows_reg);
4784 if (REFERENCE_CLASS_P (op)
4785 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4786 != NULL_TREE)
4788 TREE_VALUE (link) = op;
4789 changed = true;
4793 break;
4795 case GIMPLE_DEBUG:
4796 if (gimple_debug_bind_p (stmt))
4798 tree val = gimple_debug_bind_get_value (stmt);
4799 if (val
4800 && REFERENCE_CLASS_P (val))
4802 tree tem = maybe_fold_reference (val, false);
4803 if (tem)
4805 gimple_debug_bind_set_value (stmt, tem);
4806 changed = true;
4809 else if (val
4810 && TREE_CODE (val) == ADDR_EXPR)
4812 tree ref = TREE_OPERAND (val, 0);
4813 tree tem = maybe_fold_reference (ref, false);
4814 if (tem)
4816 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4817 gimple_debug_bind_set_value (stmt, tem);
4818 changed = true;
4822 break;
4824 case GIMPLE_RETURN:
4826 greturn *ret_stmt = as_a<greturn *> (stmt);
4827 tree ret = gimple_return_retval(ret_stmt);
4829 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4831 tree val = valueize (ret);
4832 if (val && val != ret
4833 && may_propagate_copy (ret, val))
4835 gimple_return_set_retval (ret_stmt, val);
4836 changed = true;
4840 break;
4842 default:;
4845 stmt = gsi_stmt (*gsi);
4847 /* Fold *& on the lhs. */
4848 if (gimple_has_lhs (stmt))
4850 tree lhs = gimple_get_lhs (stmt);
4851 if (lhs && REFERENCE_CLASS_P (lhs))
4853 tree new_lhs = maybe_fold_reference (lhs, true);
4854 if (new_lhs)
4856 gimple_set_lhs (stmt, new_lhs);
4857 changed = true;
4862 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4863 return changed;
4866 /* Valueziation callback that ends up not following SSA edges. */
4868 tree
4869 no_follow_ssa_edges (tree)
4871 return NULL_TREE;
4874 /* Valueization callback that ends up following single-use SSA edges only. */
4876 tree
4877 follow_single_use_edges (tree val)
4879 if (TREE_CODE (val) == SSA_NAME
4880 && !has_single_use (val))
4881 return NULL_TREE;
4882 return val;
4885 /* Fold the statement pointed to by GSI. In some cases, this function may
4886 replace the whole statement with a new one. Returns true iff folding
4887 makes any changes.
4888 The statement pointed to by GSI should be in valid gimple form but may
4889 be in unfolded state as resulting from for example constant propagation
4890 which can produce *&x = 0. */
4892 bool
4893 fold_stmt (gimple_stmt_iterator *gsi)
4895 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4898 bool
4899 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4901 return fold_stmt_1 (gsi, false, valueize);
4904 /* Perform the minimal folding on statement *GSI. Only operations like
4905 *&x created by constant propagation are handled. The statement cannot
4906 be replaced with a new one. Return true if the statement was
4907 changed, false otherwise.
4908 The statement *GSI should be in valid gimple form but may
4909 be in unfolded state as resulting from for example constant propagation
4910 which can produce *&x = 0. */
4912 bool
4913 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4915 gimple *stmt = gsi_stmt (*gsi);
4916 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4917 gcc_assert (gsi_stmt (*gsi) == stmt);
4918 return changed;
4921 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4922 if EXPR is null or we don't know how.
4923 If non-null, the result always has boolean type. */
4925 static tree
4926 canonicalize_bool (tree expr, bool invert)
4928 if (!expr)
4929 return NULL_TREE;
4930 else if (invert)
4932 if (integer_nonzerop (expr))
4933 return boolean_false_node;
4934 else if (integer_zerop (expr))
4935 return boolean_true_node;
4936 else if (TREE_CODE (expr) == SSA_NAME)
4937 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
4938 build_int_cst (TREE_TYPE (expr), 0));
4939 else if (COMPARISON_CLASS_P (expr))
4940 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
4941 boolean_type_node,
4942 TREE_OPERAND (expr, 0),
4943 TREE_OPERAND (expr, 1));
4944 else
4945 return NULL_TREE;
4947 else
4949 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4950 return expr;
4951 if (integer_nonzerop (expr))
4952 return boolean_true_node;
4953 else if (integer_zerop (expr))
4954 return boolean_false_node;
4955 else if (TREE_CODE (expr) == SSA_NAME)
4956 return fold_build2 (NE_EXPR, boolean_type_node, expr,
4957 build_int_cst (TREE_TYPE (expr), 0));
4958 else if (COMPARISON_CLASS_P (expr))
4959 return fold_build2 (TREE_CODE (expr),
4960 boolean_type_node,
4961 TREE_OPERAND (expr, 0),
4962 TREE_OPERAND (expr, 1));
4963 else
4964 return NULL_TREE;
4968 /* Check to see if a boolean expression EXPR is logically equivalent to the
4969 comparison (OP1 CODE OP2). Check for various identities involving
4970 SSA_NAMEs. */
4972 static bool
4973 same_bool_comparison_p (const_tree expr, enum tree_code code,
4974 const_tree op1, const_tree op2)
4976 gimple *s;
4978 /* The obvious case. */
4979 if (TREE_CODE (expr) == code
4980 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
4981 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
4982 return true;
4984 /* Check for comparing (name, name != 0) and the case where expr
4985 is an SSA_NAME with a definition matching the comparison. */
4986 if (TREE_CODE (expr) == SSA_NAME
4987 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
4989 if (operand_equal_p (expr, op1, 0))
4990 return ((code == NE_EXPR && integer_zerop (op2))
4991 || (code == EQ_EXPR && integer_nonzerop (op2)));
4992 s = SSA_NAME_DEF_STMT (expr);
4993 if (is_gimple_assign (s)
4994 && gimple_assign_rhs_code (s) == code
4995 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
4996 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
4997 return true;
5000 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5001 of name is a comparison, recurse. */
5002 if (TREE_CODE (op1) == SSA_NAME
5003 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5005 s = SSA_NAME_DEF_STMT (op1);
5006 if (is_gimple_assign (s)
5007 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5009 enum tree_code c = gimple_assign_rhs_code (s);
5010 if ((c == NE_EXPR && integer_zerop (op2))
5011 || (c == EQ_EXPR && integer_nonzerop (op2)))
5012 return same_bool_comparison_p (expr, c,
5013 gimple_assign_rhs1 (s),
5014 gimple_assign_rhs2 (s));
5015 if ((c == EQ_EXPR && integer_zerop (op2))
5016 || (c == NE_EXPR && integer_nonzerop (op2)))
5017 return same_bool_comparison_p (expr,
5018 invert_tree_comparison (c, false),
5019 gimple_assign_rhs1 (s),
5020 gimple_assign_rhs2 (s));
5023 return false;
5026 /* Check to see if two boolean expressions OP1 and OP2 are logically
5027 equivalent. */
5029 static bool
5030 same_bool_result_p (const_tree op1, const_tree op2)
5032 /* Simple cases first. */
5033 if (operand_equal_p (op1, op2, 0))
5034 return true;
5036 /* Check the cases where at least one of the operands is a comparison.
5037 These are a bit smarter than operand_equal_p in that they apply some
5038 identifies on SSA_NAMEs. */
5039 if (COMPARISON_CLASS_P (op2)
5040 && same_bool_comparison_p (op1, TREE_CODE (op2),
5041 TREE_OPERAND (op2, 0),
5042 TREE_OPERAND (op2, 1)))
5043 return true;
5044 if (COMPARISON_CLASS_P (op1)
5045 && same_bool_comparison_p (op2, TREE_CODE (op1),
5046 TREE_OPERAND (op1, 0),
5047 TREE_OPERAND (op1, 1)))
5048 return true;
5050 /* Default case. */
5051 return false;
5054 /* Forward declarations for some mutually recursive functions. */
5056 static tree
5057 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5058 enum tree_code code2, tree op2a, tree op2b);
5059 static tree
5060 and_var_with_comparison (tree var, bool invert,
5061 enum tree_code code2, tree op2a, tree op2b);
5062 static tree
5063 and_var_with_comparison_1 (gimple *stmt,
5064 enum tree_code code2, tree op2a, tree op2b);
5065 static tree
5066 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5067 enum tree_code code2, tree op2a, tree op2b);
5068 static tree
5069 or_var_with_comparison (tree var, bool invert,
5070 enum tree_code code2, tree op2a, tree op2b);
5071 static tree
5072 or_var_with_comparison_1 (gimple *stmt,
5073 enum tree_code code2, tree op2a, tree op2b);
5075 /* Helper function for and_comparisons_1: try to simplify the AND of the
5076 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5077 If INVERT is true, invert the value of the VAR before doing the AND.
5078 Return NULL_EXPR if we can't simplify this to a single expression. */
5080 static tree
5081 and_var_with_comparison (tree var, bool invert,
5082 enum tree_code code2, tree op2a, tree op2b)
5084 tree t;
5085 gimple *stmt = SSA_NAME_DEF_STMT (var);
5087 /* We can only deal with variables whose definitions are assignments. */
5088 if (!is_gimple_assign (stmt))
5089 return NULL_TREE;
5091 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5092 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5093 Then we only have to consider the simpler non-inverted cases. */
5094 if (invert)
5095 t = or_var_with_comparison_1 (stmt,
5096 invert_tree_comparison (code2, false),
5097 op2a, op2b);
5098 else
5099 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5100 return canonicalize_bool (t, invert);
5103 /* Try to simplify the AND of the ssa variable defined by the assignment
5104 STMT with the comparison specified by (OP2A CODE2 OP2B).
5105 Return NULL_EXPR if we can't simplify this to a single expression. */
5107 static tree
5108 and_var_with_comparison_1 (gimple *stmt,
5109 enum tree_code code2, tree op2a, tree op2b)
5111 tree var = gimple_assign_lhs (stmt);
5112 tree true_test_var = NULL_TREE;
5113 tree false_test_var = NULL_TREE;
5114 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5116 /* Check for identities like (var AND (var == 0)) => false. */
5117 if (TREE_CODE (op2a) == SSA_NAME
5118 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5120 if ((code2 == NE_EXPR && integer_zerop (op2b))
5121 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5123 true_test_var = op2a;
5124 if (var == true_test_var)
5125 return var;
5127 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5128 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5130 false_test_var = op2a;
5131 if (var == false_test_var)
5132 return boolean_false_node;
5136 /* If the definition is a comparison, recurse on it. */
5137 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5139 tree t = and_comparisons_1 (innercode,
5140 gimple_assign_rhs1 (stmt),
5141 gimple_assign_rhs2 (stmt),
5142 code2,
5143 op2a,
5144 op2b);
5145 if (t)
5146 return t;
5149 /* If the definition is an AND or OR expression, we may be able to
5150 simplify by reassociating. */
5151 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5152 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5154 tree inner1 = gimple_assign_rhs1 (stmt);
5155 tree inner2 = gimple_assign_rhs2 (stmt);
5156 gimple *s;
5157 tree t;
5158 tree partial = NULL_TREE;
5159 bool is_and = (innercode == BIT_AND_EXPR);
5161 /* Check for boolean identities that don't require recursive examination
5162 of inner1/inner2:
5163 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5164 inner1 AND (inner1 OR inner2) => inner1
5165 !inner1 AND (inner1 AND inner2) => false
5166 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5167 Likewise for similar cases involving inner2. */
5168 if (inner1 == true_test_var)
5169 return (is_and ? var : inner1);
5170 else if (inner2 == true_test_var)
5171 return (is_and ? var : inner2);
5172 else if (inner1 == false_test_var)
5173 return (is_and
5174 ? boolean_false_node
5175 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5176 else if (inner2 == false_test_var)
5177 return (is_and
5178 ? boolean_false_node
5179 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5181 /* Next, redistribute/reassociate the AND across the inner tests.
5182 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5183 if (TREE_CODE (inner1) == SSA_NAME
5184 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5185 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5186 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5187 gimple_assign_rhs1 (s),
5188 gimple_assign_rhs2 (s),
5189 code2, op2a, op2b)))
5191 /* Handle the AND case, where we are reassociating:
5192 (inner1 AND inner2) AND (op2a code2 op2b)
5193 => (t AND inner2)
5194 If the partial result t is a constant, we win. Otherwise
5195 continue on to try reassociating with the other inner test. */
5196 if (is_and)
5198 if (integer_onep (t))
5199 return inner2;
5200 else if (integer_zerop (t))
5201 return boolean_false_node;
5204 /* Handle the OR case, where we are redistributing:
5205 (inner1 OR inner2) AND (op2a code2 op2b)
5206 => (t OR (inner2 AND (op2a code2 op2b))) */
5207 else if (integer_onep (t))
5208 return boolean_true_node;
5210 /* Save partial result for later. */
5211 partial = t;
5214 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5215 if (TREE_CODE (inner2) == SSA_NAME
5216 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5217 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5218 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5219 gimple_assign_rhs1 (s),
5220 gimple_assign_rhs2 (s),
5221 code2, op2a, op2b)))
5223 /* Handle the AND case, where we are reassociating:
5224 (inner1 AND inner2) AND (op2a code2 op2b)
5225 => (inner1 AND t) */
5226 if (is_and)
5228 if (integer_onep (t))
5229 return inner1;
5230 else if (integer_zerop (t))
5231 return boolean_false_node;
5232 /* If both are the same, we can apply the identity
5233 (x AND x) == x. */
5234 else if (partial && same_bool_result_p (t, partial))
5235 return t;
5238 /* Handle the OR case. where we are redistributing:
5239 (inner1 OR inner2) AND (op2a code2 op2b)
5240 => (t OR (inner1 AND (op2a code2 op2b)))
5241 => (t OR partial) */
5242 else
5244 if (integer_onep (t))
5245 return boolean_true_node;
5246 else if (partial)
5248 /* We already got a simplification for the other
5249 operand to the redistributed OR expression. The
5250 interesting case is when at least one is false.
5251 Or, if both are the same, we can apply the identity
5252 (x OR x) == x. */
5253 if (integer_zerop (partial))
5254 return t;
5255 else if (integer_zerop (t))
5256 return partial;
5257 else if (same_bool_result_p (t, partial))
5258 return t;
5263 return NULL_TREE;
5266 /* Try to simplify the AND of two comparisons defined by
5267 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5268 If this can be done without constructing an intermediate value,
5269 return the resulting tree; otherwise NULL_TREE is returned.
5270 This function is deliberately asymmetric as it recurses on SSA_DEFs
5271 in the first comparison but not the second. */
5273 static tree
5274 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5275 enum tree_code code2, tree op2a, tree op2b)
5277 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5279 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5280 if (operand_equal_p (op1a, op2a, 0)
5281 && operand_equal_p (op1b, op2b, 0))
5283 /* Result will be either NULL_TREE, or a combined comparison. */
5284 tree t = combine_comparisons (UNKNOWN_LOCATION,
5285 TRUTH_ANDIF_EXPR, code1, code2,
5286 truth_type, op1a, op1b);
5287 if (t)
5288 return t;
5291 /* Likewise the swapped case of the above. */
5292 if (operand_equal_p (op1a, op2b, 0)
5293 && operand_equal_p (op1b, op2a, 0))
5295 /* Result will be either NULL_TREE, or a combined comparison. */
5296 tree t = combine_comparisons (UNKNOWN_LOCATION,
5297 TRUTH_ANDIF_EXPR, code1,
5298 swap_tree_comparison (code2),
5299 truth_type, op1a, op1b);
5300 if (t)
5301 return t;
5304 /* If both comparisons are of the same value against constants, we might
5305 be able to merge them. */
5306 if (operand_equal_p (op1a, op2a, 0)
5307 && TREE_CODE (op1b) == INTEGER_CST
5308 && TREE_CODE (op2b) == INTEGER_CST)
5310 int cmp = tree_int_cst_compare (op1b, op2b);
5312 /* If we have (op1a == op1b), we should either be able to
5313 return that or FALSE, depending on whether the constant op1b
5314 also satisfies the other comparison against op2b. */
5315 if (code1 == EQ_EXPR)
5317 bool done = true;
5318 bool val;
5319 switch (code2)
5321 case EQ_EXPR: val = (cmp == 0); break;
5322 case NE_EXPR: val = (cmp != 0); break;
5323 case LT_EXPR: val = (cmp < 0); break;
5324 case GT_EXPR: val = (cmp > 0); break;
5325 case LE_EXPR: val = (cmp <= 0); break;
5326 case GE_EXPR: val = (cmp >= 0); break;
5327 default: done = false;
5329 if (done)
5331 if (val)
5332 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5333 else
5334 return boolean_false_node;
5337 /* Likewise if the second comparison is an == comparison. */
5338 else if (code2 == EQ_EXPR)
5340 bool done = true;
5341 bool val;
5342 switch (code1)
5344 case EQ_EXPR: val = (cmp == 0); break;
5345 case NE_EXPR: val = (cmp != 0); break;
5346 case LT_EXPR: val = (cmp > 0); break;
5347 case GT_EXPR: val = (cmp < 0); break;
5348 case LE_EXPR: val = (cmp >= 0); break;
5349 case GE_EXPR: val = (cmp <= 0); break;
5350 default: done = false;
5352 if (done)
5354 if (val)
5355 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5356 else
5357 return boolean_false_node;
5361 /* Same business with inequality tests. */
5362 else if (code1 == NE_EXPR)
5364 bool val;
5365 switch (code2)
5367 case EQ_EXPR: val = (cmp != 0); break;
5368 case NE_EXPR: val = (cmp == 0); break;
5369 case LT_EXPR: val = (cmp >= 0); break;
5370 case GT_EXPR: val = (cmp <= 0); break;
5371 case LE_EXPR: val = (cmp > 0); break;
5372 case GE_EXPR: val = (cmp < 0); break;
5373 default:
5374 val = false;
5376 if (val)
5377 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5379 else if (code2 == NE_EXPR)
5381 bool val;
5382 switch (code1)
5384 case EQ_EXPR: val = (cmp == 0); break;
5385 case NE_EXPR: val = (cmp != 0); break;
5386 case LT_EXPR: val = (cmp <= 0); break;
5387 case GT_EXPR: val = (cmp >= 0); break;
5388 case LE_EXPR: val = (cmp < 0); break;
5389 case GE_EXPR: val = (cmp > 0); break;
5390 default:
5391 val = false;
5393 if (val)
5394 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5397 /* Chose the more restrictive of two < or <= comparisons. */
5398 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5399 && (code2 == LT_EXPR || code2 == LE_EXPR))
5401 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5402 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5403 else
5404 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5407 /* Likewise chose the more restrictive of two > or >= comparisons. */
5408 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5409 && (code2 == GT_EXPR || code2 == GE_EXPR))
5411 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5412 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5413 else
5414 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5417 /* Check for singleton ranges. */
5418 else if (cmp == 0
5419 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5420 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5421 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5423 /* Check for disjoint ranges. */
5424 else if (cmp <= 0
5425 && (code1 == LT_EXPR || code1 == LE_EXPR)
5426 && (code2 == GT_EXPR || code2 == GE_EXPR))
5427 return boolean_false_node;
5428 else if (cmp >= 0
5429 && (code1 == GT_EXPR || code1 == GE_EXPR)
5430 && (code2 == LT_EXPR || code2 == LE_EXPR))
5431 return boolean_false_node;
5434 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5435 NAME's definition is a truth value. See if there are any simplifications
5436 that can be done against the NAME's definition. */
5437 if (TREE_CODE (op1a) == SSA_NAME
5438 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5439 && (integer_zerop (op1b) || integer_onep (op1b)))
5441 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5442 || (code1 == NE_EXPR && integer_onep (op1b)));
5443 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5444 switch (gimple_code (stmt))
5446 case GIMPLE_ASSIGN:
5447 /* Try to simplify by copy-propagating the definition. */
5448 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5450 case GIMPLE_PHI:
5451 /* If every argument to the PHI produces the same result when
5452 ANDed with the second comparison, we win.
5453 Do not do this unless the type is bool since we need a bool
5454 result here anyway. */
5455 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5457 tree result = NULL_TREE;
5458 unsigned i;
5459 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5461 tree arg = gimple_phi_arg_def (stmt, i);
5463 /* If this PHI has itself as an argument, ignore it.
5464 If all the other args produce the same result,
5465 we're still OK. */
5466 if (arg == gimple_phi_result (stmt))
5467 continue;
5468 else if (TREE_CODE (arg) == INTEGER_CST)
5470 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5472 if (!result)
5473 result = boolean_false_node;
5474 else if (!integer_zerop (result))
5475 return NULL_TREE;
5477 else if (!result)
5478 result = fold_build2 (code2, boolean_type_node,
5479 op2a, op2b);
5480 else if (!same_bool_comparison_p (result,
5481 code2, op2a, op2b))
5482 return NULL_TREE;
5484 else if (TREE_CODE (arg) == SSA_NAME
5485 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5487 tree temp;
5488 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5489 /* In simple cases we can look through PHI nodes,
5490 but we have to be careful with loops.
5491 See PR49073. */
5492 if (! dom_info_available_p (CDI_DOMINATORS)
5493 || gimple_bb (def_stmt) == gimple_bb (stmt)
5494 || dominated_by_p (CDI_DOMINATORS,
5495 gimple_bb (def_stmt),
5496 gimple_bb (stmt)))
5497 return NULL_TREE;
5498 temp = and_var_with_comparison (arg, invert, code2,
5499 op2a, op2b);
5500 if (!temp)
5501 return NULL_TREE;
5502 else if (!result)
5503 result = temp;
5504 else if (!same_bool_result_p (result, temp))
5505 return NULL_TREE;
5507 else
5508 return NULL_TREE;
5510 return result;
5513 default:
5514 break;
5517 return NULL_TREE;
5520 /* Try to simplify the AND of two comparisons, specified by
5521 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5522 If this can be simplified to a single expression (without requiring
5523 introducing more SSA variables to hold intermediate values),
5524 return the resulting tree. Otherwise return NULL_TREE.
5525 If the result expression is non-null, it has boolean type. */
5527 tree
5528 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5529 enum tree_code code2, tree op2a, tree op2b)
5531 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5532 if (t)
5533 return t;
5534 else
5535 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5538 /* Helper function for or_comparisons_1: try to simplify the OR of the
5539 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5540 If INVERT is true, invert the value of VAR before doing the OR.
5541 Return NULL_EXPR if we can't simplify this to a single expression. */
5543 static tree
5544 or_var_with_comparison (tree var, bool invert,
5545 enum tree_code code2, tree op2a, tree op2b)
5547 tree t;
5548 gimple *stmt = SSA_NAME_DEF_STMT (var);
5550 /* We can only deal with variables whose definitions are assignments. */
5551 if (!is_gimple_assign (stmt))
5552 return NULL_TREE;
5554 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5555 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5556 Then we only have to consider the simpler non-inverted cases. */
5557 if (invert)
5558 t = and_var_with_comparison_1 (stmt,
5559 invert_tree_comparison (code2, false),
5560 op2a, op2b);
5561 else
5562 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5563 return canonicalize_bool (t, invert);
5566 /* Try to simplify the OR of the ssa variable defined by the assignment
5567 STMT with the comparison specified by (OP2A CODE2 OP2B).
5568 Return NULL_EXPR if we can't simplify this to a single expression. */
5570 static tree
5571 or_var_with_comparison_1 (gimple *stmt,
5572 enum tree_code code2, tree op2a, tree op2b)
5574 tree var = gimple_assign_lhs (stmt);
5575 tree true_test_var = NULL_TREE;
5576 tree false_test_var = NULL_TREE;
5577 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5579 /* Check for identities like (var OR (var != 0)) => true . */
5580 if (TREE_CODE (op2a) == SSA_NAME
5581 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5583 if ((code2 == NE_EXPR && integer_zerop (op2b))
5584 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5586 true_test_var = op2a;
5587 if (var == true_test_var)
5588 return var;
5590 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5591 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5593 false_test_var = op2a;
5594 if (var == false_test_var)
5595 return boolean_true_node;
5599 /* If the definition is a comparison, recurse on it. */
5600 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5602 tree t = or_comparisons_1 (innercode,
5603 gimple_assign_rhs1 (stmt),
5604 gimple_assign_rhs2 (stmt),
5605 code2,
5606 op2a,
5607 op2b);
5608 if (t)
5609 return t;
5612 /* If the definition is an AND or OR expression, we may be able to
5613 simplify by reassociating. */
5614 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5615 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5617 tree inner1 = gimple_assign_rhs1 (stmt);
5618 tree inner2 = gimple_assign_rhs2 (stmt);
5619 gimple *s;
5620 tree t;
5621 tree partial = NULL_TREE;
5622 bool is_or = (innercode == BIT_IOR_EXPR);
5624 /* Check for boolean identities that don't require recursive examination
5625 of inner1/inner2:
5626 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5627 inner1 OR (inner1 AND inner2) => inner1
5628 !inner1 OR (inner1 OR inner2) => true
5629 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5631 if (inner1 == true_test_var)
5632 return (is_or ? var : inner1);
5633 else if (inner2 == true_test_var)
5634 return (is_or ? var : inner2);
5635 else if (inner1 == false_test_var)
5636 return (is_or
5637 ? boolean_true_node
5638 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5639 else if (inner2 == false_test_var)
5640 return (is_or
5641 ? boolean_true_node
5642 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5644 /* Next, redistribute/reassociate the OR across the inner tests.
5645 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5646 if (TREE_CODE (inner1) == SSA_NAME
5647 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5648 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5649 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5650 gimple_assign_rhs1 (s),
5651 gimple_assign_rhs2 (s),
5652 code2, op2a, op2b)))
5654 /* Handle the OR case, where we are reassociating:
5655 (inner1 OR inner2) OR (op2a code2 op2b)
5656 => (t OR inner2)
5657 If the partial result t is a constant, we win. Otherwise
5658 continue on to try reassociating with the other inner test. */
5659 if (is_or)
5661 if (integer_onep (t))
5662 return boolean_true_node;
5663 else if (integer_zerop (t))
5664 return inner2;
5667 /* Handle the AND case, where we are redistributing:
5668 (inner1 AND inner2) OR (op2a code2 op2b)
5669 => (t AND (inner2 OR (op2a code op2b))) */
5670 else if (integer_zerop (t))
5671 return boolean_false_node;
5673 /* Save partial result for later. */
5674 partial = t;
5677 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5678 if (TREE_CODE (inner2) == SSA_NAME
5679 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5680 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5681 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5682 gimple_assign_rhs1 (s),
5683 gimple_assign_rhs2 (s),
5684 code2, op2a, op2b)))
5686 /* Handle the OR case, where we are reassociating:
5687 (inner1 OR inner2) OR (op2a code2 op2b)
5688 => (inner1 OR t)
5689 => (t OR partial) */
5690 if (is_or)
5692 if (integer_zerop (t))
5693 return inner1;
5694 else if (integer_onep (t))
5695 return boolean_true_node;
5696 /* If both are the same, we can apply the identity
5697 (x OR x) == x. */
5698 else if (partial && same_bool_result_p (t, partial))
5699 return t;
5702 /* Handle the AND case, where we are redistributing:
5703 (inner1 AND inner2) OR (op2a code2 op2b)
5704 => (t AND (inner1 OR (op2a code2 op2b)))
5705 => (t AND partial) */
5706 else
5708 if (integer_zerop (t))
5709 return boolean_false_node;
5710 else if (partial)
5712 /* We already got a simplification for the other
5713 operand to the redistributed AND expression. The
5714 interesting case is when at least one is true.
5715 Or, if both are the same, we can apply the identity
5716 (x AND x) == x. */
5717 if (integer_onep (partial))
5718 return t;
5719 else if (integer_onep (t))
5720 return partial;
5721 else if (same_bool_result_p (t, partial))
5722 return t;
5727 return NULL_TREE;
5730 /* Try to simplify the OR of two comparisons defined by
5731 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5732 If this can be done without constructing an intermediate value,
5733 return the resulting tree; otherwise NULL_TREE is returned.
5734 This function is deliberately asymmetric as it recurses on SSA_DEFs
5735 in the first comparison but not the second. */
5737 static tree
5738 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5739 enum tree_code code2, tree op2a, tree op2b)
5741 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5743 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5744 if (operand_equal_p (op1a, op2a, 0)
5745 && operand_equal_p (op1b, op2b, 0))
5747 /* Result will be either NULL_TREE, or a combined comparison. */
5748 tree t = combine_comparisons (UNKNOWN_LOCATION,
5749 TRUTH_ORIF_EXPR, code1, code2,
5750 truth_type, op1a, op1b);
5751 if (t)
5752 return t;
5755 /* Likewise the swapped case of the above. */
5756 if (operand_equal_p (op1a, op2b, 0)
5757 && operand_equal_p (op1b, op2a, 0))
5759 /* Result will be either NULL_TREE, or a combined comparison. */
5760 tree t = combine_comparisons (UNKNOWN_LOCATION,
5761 TRUTH_ORIF_EXPR, code1,
5762 swap_tree_comparison (code2),
5763 truth_type, op1a, op1b);
5764 if (t)
5765 return t;
5768 /* If both comparisons are of the same value against constants, we might
5769 be able to merge them. */
5770 if (operand_equal_p (op1a, op2a, 0)
5771 && TREE_CODE (op1b) == INTEGER_CST
5772 && TREE_CODE (op2b) == INTEGER_CST)
5774 int cmp = tree_int_cst_compare (op1b, op2b);
5776 /* If we have (op1a != op1b), we should either be able to
5777 return that or TRUE, depending on whether the constant op1b
5778 also satisfies the other comparison against op2b. */
5779 if (code1 == NE_EXPR)
5781 bool done = true;
5782 bool val;
5783 switch (code2)
5785 case EQ_EXPR: val = (cmp == 0); break;
5786 case NE_EXPR: val = (cmp != 0); break;
5787 case LT_EXPR: val = (cmp < 0); break;
5788 case GT_EXPR: val = (cmp > 0); break;
5789 case LE_EXPR: val = (cmp <= 0); break;
5790 case GE_EXPR: val = (cmp >= 0); break;
5791 default: done = false;
5793 if (done)
5795 if (val)
5796 return boolean_true_node;
5797 else
5798 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5801 /* Likewise if the second comparison is a != comparison. */
5802 else if (code2 == NE_EXPR)
5804 bool done = true;
5805 bool val;
5806 switch (code1)
5808 case EQ_EXPR: val = (cmp == 0); break;
5809 case NE_EXPR: val = (cmp != 0); break;
5810 case LT_EXPR: val = (cmp > 0); break;
5811 case GT_EXPR: val = (cmp < 0); break;
5812 case LE_EXPR: val = (cmp >= 0); break;
5813 case GE_EXPR: val = (cmp <= 0); break;
5814 default: done = false;
5816 if (done)
5818 if (val)
5819 return boolean_true_node;
5820 else
5821 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5825 /* See if an equality test is redundant with the other comparison. */
5826 else if (code1 == EQ_EXPR)
5828 bool val;
5829 switch (code2)
5831 case EQ_EXPR: val = (cmp == 0); break;
5832 case NE_EXPR: val = (cmp != 0); break;
5833 case LT_EXPR: val = (cmp < 0); break;
5834 case GT_EXPR: val = (cmp > 0); break;
5835 case LE_EXPR: val = (cmp <= 0); break;
5836 case GE_EXPR: val = (cmp >= 0); break;
5837 default:
5838 val = false;
5840 if (val)
5841 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5843 else if (code2 == EQ_EXPR)
5845 bool val;
5846 switch (code1)
5848 case EQ_EXPR: val = (cmp == 0); break;
5849 case NE_EXPR: val = (cmp != 0); break;
5850 case LT_EXPR: val = (cmp > 0); break;
5851 case GT_EXPR: val = (cmp < 0); break;
5852 case LE_EXPR: val = (cmp >= 0); break;
5853 case GE_EXPR: val = (cmp <= 0); break;
5854 default:
5855 val = false;
5857 if (val)
5858 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5861 /* Chose the less restrictive of two < or <= comparisons. */
5862 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5863 && (code2 == LT_EXPR || code2 == LE_EXPR))
5865 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5866 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5867 else
5868 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5871 /* Likewise chose the less restrictive of two > or >= comparisons. */
5872 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5873 && (code2 == GT_EXPR || code2 == GE_EXPR))
5875 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5876 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5877 else
5878 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5881 /* Check for singleton ranges. */
5882 else if (cmp == 0
5883 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5884 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5885 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5887 /* Check for less/greater pairs that don't restrict the range at all. */
5888 else if (cmp >= 0
5889 && (code1 == LT_EXPR || code1 == LE_EXPR)
5890 && (code2 == GT_EXPR || code2 == GE_EXPR))
5891 return boolean_true_node;
5892 else if (cmp <= 0
5893 && (code1 == GT_EXPR || code1 == GE_EXPR)
5894 && (code2 == LT_EXPR || code2 == LE_EXPR))
5895 return boolean_true_node;
5898 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5899 NAME's definition is a truth value. See if there are any simplifications
5900 that can be done against the NAME's definition. */
5901 if (TREE_CODE (op1a) == SSA_NAME
5902 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5903 && (integer_zerop (op1b) || integer_onep (op1b)))
5905 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5906 || (code1 == NE_EXPR && integer_onep (op1b)));
5907 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5908 switch (gimple_code (stmt))
5910 case GIMPLE_ASSIGN:
5911 /* Try to simplify by copy-propagating the definition. */
5912 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5914 case GIMPLE_PHI:
5915 /* If every argument to the PHI produces the same result when
5916 ORed with the second comparison, we win.
5917 Do not do this unless the type is bool since we need a bool
5918 result here anyway. */
5919 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5921 tree result = NULL_TREE;
5922 unsigned i;
5923 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5925 tree arg = gimple_phi_arg_def (stmt, i);
5927 /* If this PHI has itself as an argument, ignore it.
5928 If all the other args produce the same result,
5929 we're still OK. */
5930 if (arg == gimple_phi_result (stmt))
5931 continue;
5932 else if (TREE_CODE (arg) == INTEGER_CST)
5934 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
5936 if (!result)
5937 result = boolean_true_node;
5938 else if (!integer_onep (result))
5939 return NULL_TREE;
5941 else if (!result)
5942 result = fold_build2 (code2, boolean_type_node,
5943 op2a, op2b);
5944 else if (!same_bool_comparison_p (result,
5945 code2, op2a, op2b))
5946 return NULL_TREE;
5948 else if (TREE_CODE (arg) == SSA_NAME
5949 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5951 tree temp;
5952 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5953 /* In simple cases we can look through PHI nodes,
5954 but we have to be careful with loops.
5955 See PR49073. */
5956 if (! dom_info_available_p (CDI_DOMINATORS)
5957 || gimple_bb (def_stmt) == gimple_bb (stmt)
5958 || dominated_by_p (CDI_DOMINATORS,
5959 gimple_bb (def_stmt),
5960 gimple_bb (stmt)))
5961 return NULL_TREE;
5962 temp = or_var_with_comparison (arg, invert, code2,
5963 op2a, op2b);
5964 if (!temp)
5965 return NULL_TREE;
5966 else if (!result)
5967 result = temp;
5968 else if (!same_bool_result_p (result, temp))
5969 return NULL_TREE;
5971 else
5972 return NULL_TREE;
5974 return result;
5977 default:
5978 break;
5981 return NULL_TREE;
5984 /* Try to simplify the OR of two comparisons, specified by
5985 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5986 If this can be simplified to a single expression (without requiring
5987 introducing more SSA variables to hold intermediate values),
5988 return the resulting tree. Otherwise return NULL_TREE.
5989 If the result expression is non-null, it has boolean type. */
5991 tree
5992 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
5993 enum tree_code code2, tree op2a, tree op2b)
5995 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5996 if (t)
5997 return t;
5998 else
5999 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6003 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6005 Either NULL_TREE, a simplified but non-constant or a constant
6006 is returned.
6008 ??? This should go into a gimple-fold-inline.h file to be eventually
6009 privatized with the single valueize function used in the various TUs
6010 to avoid the indirect function call overhead. */
6012 tree
6013 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6014 tree (*gvalueize) (tree))
6016 code_helper rcode;
6017 tree ops[3] = {};
6018 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6019 edges if there are intermediate VARYING defs. For this reason
6020 do not follow SSA edges here even though SCCVN can technically
6021 just deal fine with that. */
6022 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6024 tree res = NULL_TREE;
6025 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6026 res = ops[0];
6027 else if (mprts_hook)
6028 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6029 if (res)
6031 if (dump_file && dump_flags & TDF_DETAILS)
6033 fprintf (dump_file, "Match-and-simplified ");
6034 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6035 fprintf (dump_file, " to ");
6036 print_generic_expr (dump_file, res);
6037 fprintf (dump_file, "\n");
6039 return res;
6043 location_t loc = gimple_location (stmt);
6044 switch (gimple_code (stmt))
6046 case GIMPLE_ASSIGN:
6048 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6050 switch (get_gimple_rhs_class (subcode))
6052 case GIMPLE_SINGLE_RHS:
6054 tree rhs = gimple_assign_rhs1 (stmt);
6055 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6057 if (TREE_CODE (rhs) == SSA_NAME)
6059 /* If the RHS is an SSA_NAME, return its known constant value,
6060 if any. */
6061 return (*valueize) (rhs);
6063 /* Handle propagating invariant addresses into address
6064 operations. */
6065 else if (TREE_CODE (rhs) == ADDR_EXPR
6066 && !is_gimple_min_invariant (rhs))
6068 HOST_WIDE_INT offset = 0;
6069 tree base;
6070 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6071 &offset,
6072 valueize);
6073 if (base
6074 && (CONSTANT_CLASS_P (base)
6075 || decl_address_invariant_p (base)))
6076 return build_invariant_address (TREE_TYPE (rhs),
6077 base, offset);
6079 else if (TREE_CODE (rhs) == CONSTRUCTOR
6080 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6081 && (CONSTRUCTOR_NELTS (rhs)
6082 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6084 unsigned i, nelts;
6085 tree val;
6087 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs));
6088 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6089 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6091 val = (*valueize) (val);
6092 if (TREE_CODE (val) == INTEGER_CST
6093 || TREE_CODE (val) == REAL_CST
6094 || TREE_CODE (val) == FIXED_CST)
6095 vec.quick_push (val);
6096 else
6097 return NULL_TREE;
6100 return vec.build ();
6102 if (subcode == OBJ_TYPE_REF)
6104 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6105 /* If callee is constant, we can fold away the wrapper. */
6106 if (is_gimple_min_invariant (val))
6107 return val;
6110 if (kind == tcc_reference)
6112 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6113 || TREE_CODE (rhs) == REALPART_EXPR
6114 || TREE_CODE (rhs) == IMAGPART_EXPR)
6115 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6117 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6118 return fold_unary_loc (EXPR_LOCATION (rhs),
6119 TREE_CODE (rhs),
6120 TREE_TYPE (rhs), val);
6122 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6123 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6125 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6126 return fold_ternary_loc (EXPR_LOCATION (rhs),
6127 TREE_CODE (rhs),
6128 TREE_TYPE (rhs), val,
6129 TREE_OPERAND (rhs, 1),
6130 TREE_OPERAND (rhs, 2));
6132 else if (TREE_CODE (rhs) == MEM_REF
6133 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6135 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6136 if (TREE_CODE (val) == ADDR_EXPR
6137 && is_gimple_min_invariant (val))
6139 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6140 unshare_expr (val),
6141 TREE_OPERAND (rhs, 1));
6142 if (tem)
6143 rhs = tem;
6146 return fold_const_aggregate_ref_1 (rhs, valueize);
6148 else if (kind == tcc_declaration)
6149 return get_symbol_constant_value (rhs);
6150 return rhs;
6153 case GIMPLE_UNARY_RHS:
6154 return NULL_TREE;
6156 case GIMPLE_BINARY_RHS:
6157 /* Translate &x + CST into an invariant form suitable for
6158 further propagation. */
6159 if (subcode == POINTER_PLUS_EXPR)
6161 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6162 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6163 if (TREE_CODE (op0) == ADDR_EXPR
6164 && TREE_CODE (op1) == INTEGER_CST)
6166 tree off = fold_convert (ptr_type_node, op1);
6167 return build_fold_addr_expr_loc
6168 (loc,
6169 fold_build2 (MEM_REF,
6170 TREE_TYPE (TREE_TYPE (op0)),
6171 unshare_expr (op0), off));
6174 /* Canonicalize bool != 0 and bool == 0 appearing after
6175 valueization. While gimple_simplify handles this
6176 it can get confused by the ~X == 1 -> X == 0 transform
6177 which we cant reduce to a SSA name or a constant
6178 (and we have no way to tell gimple_simplify to not
6179 consider those transforms in the first place). */
6180 else if (subcode == EQ_EXPR
6181 || subcode == NE_EXPR)
6183 tree lhs = gimple_assign_lhs (stmt);
6184 tree op0 = gimple_assign_rhs1 (stmt);
6185 if (useless_type_conversion_p (TREE_TYPE (lhs),
6186 TREE_TYPE (op0)))
6188 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6189 op0 = (*valueize) (op0);
6190 if (TREE_CODE (op0) == INTEGER_CST)
6191 std::swap (op0, op1);
6192 if (TREE_CODE (op1) == INTEGER_CST
6193 && ((subcode == NE_EXPR && integer_zerop (op1))
6194 || (subcode == EQ_EXPR && integer_onep (op1))))
6195 return op0;
6198 return NULL_TREE;
6200 case GIMPLE_TERNARY_RHS:
6202 /* Handle ternary operators that can appear in GIMPLE form. */
6203 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6204 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6205 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6206 return fold_ternary_loc (loc, subcode,
6207 gimple_expr_type (stmt), op0, op1, op2);
6210 default:
6211 gcc_unreachable ();
6215 case GIMPLE_CALL:
6217 tree fn;
6218 gcall *call_stmt = as_a <gcall *> (stmt);
6220 if (gimple_call_internal_p (stmt))
6222 enum tree_code subcode = ERROR_MARK;
6223 switch (gimple_call_internal_fn (stmt))
6225 case IFN_UBSAN_CHECK_ADD:
6226 subcode = PLUS_EXPR;
6227 break;
6228 case IFN_UBSAN_CHECK_SUB:
6229 subcode = MINUS_EXPR;
6230 break;
6231 case IFN_UBSAN_CHECK_MUL:
6232 subcode = MULT_EXPR;
6233 break;
6234 case IFN_BUILTIN_EXPECT:
6236 tree arg0 = gimple_call_arg (stmt, 0);
6237 tree op0 = (*valueize) (arg0);
6238 if (TREE_CODE (op0) == INTEGER_CST)
6239 return op0;
6240 return NULL_TREE;
6242 default:
6243 return NULL_TREE;
6245 tree arg0 = gimple_call_arg (stmt, 0);
6246 tree arg1 = gimple_call_arg (stmt, 1);
6247 tree op0 = (*valueize) (arg0);
6248 tree op1 = (*valueize) (arg1);
6250 if (TREE_CODE (op0) != INTEGER_CST
6251 || TREE_CODE (op1) != INTEGER_CST)
6253 switch (subcode)
6255 case MULT_EXPR:
6256 /* x * 0 = 0 * x = 0 without overflow. */
6257 if (integer_zerop (op0) || integer_zerop (op1))
6258 return build_zero_cst (TREE_TYPE (arg0));
6259 break;
6260 case MINUS_EXPR:
6261 /* y - y = 0 without overflow. */
6262 if (operand_equal_p (op0, op1, 0))
6263 return build_zero_cst (TREE_TYPE (arg0));
6264 break;
6265 default:
6266 break;
6269 tree res
6270 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6271 if (res
6272 && TREE_CODE (res) == INTEGER_CST
6273 && !TREE_OVERFLOW (res))
6274 return res;
6275 return NULL_TREE;
6278 fn = (*valueize) (gimple_call_fn (stmt));
6279 if (TREE_CODE (fn) == ADDR_EXPR
6280 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6281 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6282 && gimple_builtin_call_types_compatible_p (stmt,
6283 TREE_OPERAND (fn, 0)))
6285 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6286 tree retval;
6287 unsigned i;
6288 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6289 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6290 retval = fold_builtin_call_array (loc,
6291 gimple_call_return_type (call_stmt),
6292 fn, gimple_call_num_args (stmt), args);
6293 if (retval)
6295 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6296 STRIP_NOPS (retval);
6297 retval = fold_convert (gimple_call_return_type (call_stmt),
6298 retval);
6300 return retval;
6302 return NULL_TREE;
6305 default:
6306 return NULL_TREE;
6310 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6311 Returns NULL_TREE if folding to a constant is not possible, otherwise
6312 returns a constant according to is_gimple_min_invariant. */
6314 tree
6315 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6317 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6318 if (res && is_gimple_min_invariant (res))
6319 return res;
6320 return NULL_TREE;
6324 /* The following set of functions are supposed to fold references using
6325 their constant initializers. */
6327 /* See if we can find constructor defining value of BASE.
6328 When we know the consructor with constant offset (such as
6329 base is array[40] and we do know constructor of array), then
6330 BIT_OFFSET is adjusted accordingly.
6332 As a special case, return error_mark_node when constructor
6333 is not explicitly available, but it is known to be zero
6334 such as 'static const int a;'. */
6335 static tree
6336 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
6337 tree (*valueize)(tree))
6339 HOST_WIDE_INT bit_offset2, size, max_size;
6340 bool reverse;
6342 if (TREE_CODE (base) == MEM_REF)
6344 if (!integer_zerop (TREE_OPERAND (base, 1)))
6346 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6347 return NULL_TREE;
6348 *bit_offset += (mem_ref_offset (base).to_short_addr ()
6349 * BITS_PER_UNIT);
6352 if (valueize
6353 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6354 base = valueize (TREE_OPERAND (base, 0));
6355 if (!base || TREE_CODE (base) != ADDR_EXPR)
6356 return NULL_TREE;
6357 base = TREE_OPERAND (base, 0);
6359 else if (valueize
6360 && TREE_CODE (base) == SSA_NAME)
6361 base = valueize (base);
6363 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6364 DECL_INITIAL. If BASE is a nested reference into another
6365 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6366 the inner reference. */
6367 switch (TREE_CODE (base))
6369 case VAR_DECL:
6370 case CONST_DECL:
6372 tree init = ctor_for_folding (base);
6374 /* Our semantic is exact opposite of ctor_for_folding;
6375 NULL means unknown, while error_mark_node is 0. */
6376 if (init == error_mark_node)
6377 return NULL_TREE;
6378 if (!init)
6379 return error_mark_node;
6380 return init;
6383 case VIEW_CONVERT_EXPR:
6384 return get_base_constructor (TREE_OPERAND (base, 0),
6385 bit_offset, valueize);
6387 case ARRAY_REF:
6388 case COMPONENT_REF:
6389 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6390 &reverse);
6391 if (max_size == -1 || size != max_size)
6392 return NULL_TREE;
6393 *bit_offset += bit_offset2;
6394 return get_base_constructor (base, bit_offset, valueize);
6396 case CONSTRUCTOR:
6397 return base;
6399 default:
6400 if (CONSTANT_CLASS_P (base))
6401 return base;
6403 return NULL_TREE;
6407 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6408 SIZE to the memory at bit OFFSET. */
6410 static tree
6411 fold_array_ctor_reference (tree type, tree ctor,
6412 unsigned HOST_WIDE_INT offset,
6413 unsigned HOST_WIDE_INT size,
6414 tree from_decl)
6416 offset_int low_bound;
6417 offset_int elt_size;
6418 offset_int access_index;
6419 tree domain_type = NULL_TREE;
6420 HOST_WIDE_INT inner_offset;
6422 /* Compute low bound and elt size. */
6423 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6424 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6425 if (domain_type && TYPE_MIN_VALUE (domain_type))
6427 /* Static constructors for variably sized objects makes no sense. */
6428 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6429 return NULL_TREE;
6430 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6432 else
6433 low_bound = 0;
6434 /* Static constructors for variably sized objects makes no sense. */
6435 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6436 return NULL_TREE;
6437 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6439 /* We can handle only constantly sized accesses that are known to not
6440 be larger than size of array element. */
6441 if (!TYPE_SIZE_UNIT (type)
6442 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6443 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6444 || elt_size == 0)
6445 return NULL_TREE;
6447 /* Compute the array index we look for. */
6448 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6449 elt_size);
6450 access_index += low_bound;
6452 /* And offset within the access. */
6453 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6455 /* See if the array field is large enough to span whole access. We do not
6456 care to fold accesses spanning multiple array indexes. */
6457 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6458 return NULL_TREE;
6459 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6460 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6462 /* When memory is not explicitely mentioned in constructor,
6463 it is 0 (or out of range). */
6464 return build_zero_cst (type);
6467 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6468 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6470 static tree
6471 fold_nonarray_ctor_reference (tree type, tree ctor,
6472 unsigned HOST_WIDE_INT offset,
6473 unsigned HOST_WIDE_INT size,
6474 tree from_decl)
6476 unsigned HOST_WIDE_INT cnt;
6477 tree cfield, cval;
6479 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6480 cval)
6482 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6483 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6484 tree field_size = DECL_SIZE (cfield);
6485 offset_int bitoffset;
6486 offset_int bitoffset_end, access_end;
6488 /* Variable sized objects in static constructors makes no sense,
6489 but field_size can be NULL for flexible array members. */
6490 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6491 && TREE_CODE (byte_offset) == INTEGER_CST
6492 && (field_size != NULL_TREE
6493 ? TREE_CODE (field_size) == INTEGER_CST
6494 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6496 /* Compute bit offset of the field. */
6497 bitoffset = (wi::to_offset (field_offset)
6498 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6499 /* Compute bit offset where the field ends. */
6500 if (field_size != NULL_TREE)
6501 bitoffset_end = bitoffset + wi::to_offset (field_size);
6502 else
6503 bitoffset_end = 0;
6505 access_end = offset_int (offset) + size;
6507 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6508 [BITOFFSET, BITOFFSET_END)? */
6509 if (wi::cmps (access_end, bitoffset) > 0
6510 && (field_size == NULL_TREE
6511 || wi::lts_p (offset, bitoffset_end)))
6513 offset_int inner_offset = offset_int (offset) - bitoffset;
6514 /* We do have overlap. Now see if field is large enough to
6515 cover the access. Give up for accesses spanning multiple
6516 fields. */
6517 if (wi::cmps (access_end, bitoffset_end) > 0)
6518 return NULL_TREE;
6519 if (offset < bitoffset)
6520 return NULL_TREE;
6521 return fold_ctor_reference (type, cval,
6522 inner_offset.to_uhwi (), size,
6523 from_decl);
6526 /* When memory is not explicitely mentioned in constructor, it is 0. */
6527 return build_zero_cst (type);
6530 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6531 to the memory at bit OFFSET. */
6533 tree
6534 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
6535 unsigned HOST_WIDE_INT size, tree from_decl)
6537 tree ret;
6539 /* We found the field with exact match. */
6540 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6541 && !offset)
6542 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6544 /* We are at the end of walk, see if we can view convert the
6545 result. */
6546 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6547 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6548 && !compare_tree_int (TYPE_SIZE (type), size)
6549 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6551 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6552 if (ret)
6554 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6555 if (ret)
6556 STRIP_USELESS_TYPE_CONVERSION (ret);
6558 return ret;
6560 /* For constants and byte-aligned/sized reads try to go through
6561 native_encode/interpret. */
6562 if (CONSTANT_CLASS_P (ctor)
6563 && BITS_PER_UNIT == 8
6564 && offset % BITS_PER_UNIT == 0
6565 && size % BITS_PER_UNIT == 0
6566 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6568 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6569 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6570 offset / BITS_PER_UNIT);
6571 if (len > 0)
6572 return native_interpret_expr (type, buf, len);
6574 if (TREE_CODE (ctor) == CONSTRUCTOR)
6577 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6578 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6579 return fold_array_ctor_reference (type, ctor, offset, size,
6580 from_decl);
6581 else
6582 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6583 from_decl);
6586 return NULL_TREE;
6589 /* Return the tree representing the element referenced by T if T is an
6590 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6591 names using VALUEIZE. Return NULL_TREE otherwise. */
6593 tree
6594 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6596 tree ctor, idx, base;
6597 HOST_WIDE_INT offset, size, max_size;
6598 tree tem;
6599 bool reverse;
6601 if (TREE_THIS_VOLATILE (t))
6602 return NULL_TREE;
6604 if (DECL_P (t))
6605 return get_symbol_constant_value (t);
6607 tem = fold_read_from_constant_string (t);
6608 if (tem)
6609 return tem;
6611 switch (TREE_CODE (t))
6613 case ARRAY_REF:
6614 case ARRAY_RANGE_REF:
6615 /* Constant indexes are handled well by get_base_constructor.
6616 Only special case variable offsets.
6617 FIXME: This code can't handle nested references with variable indexes
6618 (they will be handled only by iteration of ccp). Perhaps we can bring
6619 get_ref_base_and_extent here and make it use a valueize callback. */
6620 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6621 && valueize
6622 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6623 && TREE_CODE (idx) == INTEGER_CST)
6625 tree low_bound, unit_size;
6627 /* If the resulting bit-offset is constant, track it. */
6628 if ((low_bound = array_ref_low_bound (t),
6629 TREE_CODE (low_bound) == INTEGER_CST)
6630 && (unit_size = array_ref_element_size (t),
6631 tree_fits_uhwi_p (unit_size)))
6633 offset_int woffset
6634 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
6635 TYPE_PRECISION (TREE_TYPE (idx)));
6637 if (wi::fits_shwi_p (woffset))
6639 offset = woffset.to_shwi ();
6640 /* TODO: This code seems wrong, multiply then check
6641 to see if it fits. */
6642 offset *= tree_to_uhwi (unit_size);
6643 offset *= BITS_PER_UNIT;
6645 base = TREE_OPERAND (t, 0);
6646 ctor = get_base_constructor (base, &offset, valueize);
6647 /* Empty constructor. Always fold to 0. */
6648 if (ctor == error_mark_node)
6649 return build_zero_cst (TREE_TYPE (t));
6650 /* Out of bound array access. Value is undefined,
6651 but don't fold. */
6652 if (offset < 0)
6653 return NULL_TREE;
6654 /* We can not determine ctor. */
6655 if (!ctor)
6656 return NULL_TREE;
6657 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6658 tree_to_uhwi (unit_size)
6659 * BITS_PER_UNIT,
6660 base);
6664 /* Fallthru. */
6666 case COMPONENT_REF:
6667 case BIT_FIELD_REF:
6668 case TARGET_MEM_REF:
6669 case MEM_REF:
6670 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6671 ctor = get_base_constructor (base, &offset, valueize);
6673 /* Empty constructor. Always fold to 0. */
6674 if (ctor == error_mark_node)
6675 return build_zero_cst (TREE_TYPE (t));
6676 /* We do not know precise address. */
6677 if (max_size == -1 || max_size != size)
6678 return NULL_TREE;
6679 /* We can not determine ctor. */
6680 if (!ctor)
6681 return NULL_TREE;
6683 /* Out of bound array access. Value is undefined, but don't fold. */
6684 if (offset < 0)
6685 return NULL_TREE;
6687 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6688 base);
6690 case REALPART_EXPR:
6691 case IMAGPART_EXPR:
6693 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6694 if (c && TREE_CODE (c) == COMPLEX_CST)
6695 return fold_build1_loc (EXPR_LOCATION (t),
6696 TREE_CODE (t), TREE_TYPE (t), c);
6697 break;
6700 default:
6701 break;
6704 return NULL_TREE;
6707 tree
6708 fold_const_aggregate_ref (tree t)
6710 return fold_const_aggregate_ref_1 (t, NULL);
6713 /* Lookup virtual method with index TOKEN in a virtual table V
6714 at OFFSET.
6715 Set CAN_REFER if non-NULL to false if method
6716 is not referable or if the virtual table is ill-formed (such as rewriten
6717 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6719 tree
6720 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6721 tree v,
6722 unsigned HOST_WIDE_INT offset,
6723 bool *can_refer)
6725 tree vtable = v, init, fn;
6726 unsigned HOST_WIDE_INT size;
6727 unsigned HOST_WIDE_INT elt_size, access_index;
6728 tree domain_type;
6730 if (can_refer)
6731 *can_refer = true;
6733 /* First of all double check we have virtual table. */
6734 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6736 /* Pass down that we lost track of the target. */
6737 if (can_refer)
6738 *can_refer = false;
6739 return NULL_TREE;
6742 init = ctor_for_folding (v);
6744 /* The virtual tables should always be born with constructors
6745 and we always should assume that they are avaialble for
6746 folding. At the moment we do not stream them in all cases,
6747 but it should never happen that ctor seem unreachable. */
6748 gcc_assert (init);
6749 if (init == error_mark_node)
6751 /* Pass down that we lost track of the target. */
6752 if (can_refer)
6753 *can_refer = false;
6754 return NULL_TREE;
6756 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6757 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6758 offset *= BITS_PER_UNIT;
6759 offset += token * size;
6761 /* Lookup the value in the constructor that is assumed to be array.
6762 This is equivalent to
6763 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6764 offset, size, NULL);
6765 but in a constant time. We expect that frontend produced a simple
6766 array without indexed initializers. */
6768 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6769 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6770 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6771 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6773 access_index = offset / BITS_PER_UNIT / elt_size;
6774 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6776 /* This code makes an assumption that there are no
6777 indexed fileds produced by C++ FE, so we can directly index the array. */
6778 if (access_index < CONSTRUCTOR_NELTS (init))
6780 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6781 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6782 STRIP_NOPS (fn);
6784 else
6785 fn = NULL;
6787 /* For type inconsistent program we may end up looking up virtual method
6788 in virtual table that does not contain TOKEN entries. We may overrun
6789 the virtual table and pick up a constant or RTTI info pointer.
6790 In any case the call is undefined. */
6791 if (!fn
6792 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6793 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6794 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6795 else
6797 fn = TREE_OPERAND (fn, 0);
6799 /* When cgraph node is missing and function is not public, we cannot
6800 devirtualize. This can happen in WHOPR when the actual method
6801 ends up in other partition, because we found devirtualization
6802 possibility too late. */
6803 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6805 if (can_refer)
6807 *can_refer = false;
6808 return fn;
6810 return NULL_TREE;
6814 /* Make sure we create a cgraph node for functions we'll reference.
6815 They can be non-existent if the reference comes from an entry
6816 of an external vtable for example. */
6817 cgraph_node::get_create (fn);
6819 return fn;
6822 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6823 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6824 KNOWN_BINFO carries the binfo describing the true type of
6825 OBJ_TYPE_REF_OBJECT(REF).
6826 Set CAN_REFER if non-NULL to false if method
6827 is not referable or if the virtual table is ill-formed (such as rewriten
6828 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6830 tree
6831 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6832 bool *can_refer)
6834 unsigned HOST_WIDE_INT offset;
6835 tree v;
6837 v = BINFO_VTABLE (known_binfo);
6838 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6839 if (!v)
6840 return NULL_TREE;
6842 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6844 if (can_refer)
6845 *can_refer = false;
6846 return NULL_TREE;
6848 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6851 /* Given a pointer value T, return a simplified version of an
6852 indirection through T, or NULL_TREE if no simplification is
6853 possible. Note that the resulting type may be different from
6854 the type pointed to in the sense that it is still compatible
6855 from the langhooks point of view. */
6857 tree
6858 gimple_fold_indirect_ref (tree t)
6860 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6861 tree sub = t;
6862 tree subtype;
6864 STRIP_NOPS (sub);
6865 subtype = TREE_TYPE (sub);
6866 if (!POINTER_TYPE_P (subtype)
6867 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6868 return NULL_TREE;
6870 if (TREE_CODE (sub) == ADDR_EXPR)
6872 tree op = TREE_OPERAND (sub, 0);
6873 tree optype = TREE_TYPE (op);
6874 /* *&p => p */
6875 if (useless_type_conversion_p (type, optype))
6876 return op;
6878 /* *(foo *)&fooarray => fooarray[0] */
6879 if (TREE_CODE (optype) == ARRAY_TYPE
6880 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6881 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6883 tree type_domain = TYPE_DOMAIN (optype);
6884 tree min_val = size_zero_node;
6885 if (type_domain && TYPE_MIN_VALUE (type_domain))
6886 min_val = TYPE_MIN_VALUE (type_domain);
6887 if (TREE_CODE (min_val) == INTEGER_CST)
6888 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6890 /* *(foo *)&complexfoo => __real__ complexfoo */
6891 else if (TREE_CODE (optype) == COMPLEX_TYPE
6892 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6893 return fold_build1 (REALPART_EXPR, type, op);
6894 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6895 else if (TREE_CODE (optype) == VECTOR_TYPE
6896 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6898 tree part_width = TYPE_SIZE (type);
6899 tree index = bitsize_int (0);
6900 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6904 /* *(p + CST) -> ... */
6905 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6906 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6908 tree addr = TREE_OPERAND (sub, 0);
6909 tree off = TREE_OPERAND (sub, 1);
6910 tree addrtype;
6912 STRIP_NOPS (addr);
6913 addrtype = TREE_TYPE (addr);
6915 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6916 if (TREE_CODE (addr) == ADDR_EXPR
6917 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
6918 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
6919 && tree_fits_uhwi_p (off))
6921 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
6922 tree part_width = TYPE_SIZE (type);
6923 unsigned HOST_WIDE_INT part_widthi
6924 = tree_to_shwi (part_width) / BITS_PER_UNIT;
6925 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
6926 tree index = bitsize_int (indexi);
6927 if (offset / part_widthi
6928 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
6929 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
6930 part_width, index);
6933 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6934 if (TREE_CODE (addr) == ADDR_EXPR
6935 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
6936 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
6938 tree size = TYPE_SIZE_UNIT (type);
6939 if (tree_int_cst_equal (size, off))
6940 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
6943 /* *(p + CST) -> MEM_REF <p, CST>. */
6944 if (TREE_CODE (addr) != ADDR_EXPR
6945 || DECL_P (TREE_OPERAND (addr, 0)))
6946 return fold_build2 (MEM_REF, type,
6947 addr,
6948 wide_int_to_tree (ptype, wi::to_wide (off)));
6951 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6952 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
6953 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
6954 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
6956 tree type_domain;
6957 tree min_val = size_zero_node;
6958 tree osub = sub;
6959 sub = gimple_fold_indirect_ref (sub);
6960 if (! sub)
6961 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
6962 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
6963 if (type_domain && TYPE_MIN_VALUE (type_domain))
6964 min_val = TYPE_MIN_VALUE (type_domain);
6965 if (TREE_CODE (min_val) == INTEGER_CST)
6966 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
6969 return NULL_TREE;
6972 /* Return true if CODE is an operation that when operating on signed
6973 integer types involves undefined behavior on overflow and the
6974 operation can be expressed with unsigned arithmetic. */
6976 bool
6977 arith_code_with_undefined_signed_overflow (tree_code code)
6979 switch (code)
6981 case PLUS_EXPR:
6982 case MINUS_EXPR:
6983 case MULT_EXPR:
6984 case NEGATE_EXPR:
6985 case POINTER_PLUS_EXPR:
6986 return true;
6987 default:
6988 return false;
6992 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6993 operation that can be transformed to unsigned arithmetic by converting
6994 its operand, carrying out the operation in the corresponding unsigned
6995 type and converting the result back to the original type.
6997 Returns a sequence of statements that replace STMT and also contain
6998 a modified form of STMT itself. */
7000 gimple_seq
7001 rewrite_to_defined_overflow (gimple *stmt)
7003 if (dump_file && (dump_flags & TDF_DETAILS))
7005 fprintf (dump_file, "rewriting stmt with undefined signed "
7006 "overflow ");
7007 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7010 tree lhs = gimple_assign_lhs (stmt);
7011 tree type = unsigned_type_for (TREE_TYPE (lhs));
7012 gimple_seq stmts = NULL;
7013 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7015 tree op = gimple_op (stmt, i);
7016 op = gimple_convert (&stmts, type, op);
7017 gimple_set_op (stmt, i, op);
7019 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7020 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7021 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7022 gimple_seq_add_stmt (&stmts, stmt);
7023 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7024 gimple_seq_add_stmt (&stmts, cvt);
7026 return stmts;
7030 /* The valueization hook we use for the gimple_build API simplification.
7031 This makes us match fold_buildN behavior by only combining with
7032 statements in the sequence(s) we are currently building. */
7034 static tree
7035 gimple_build_valueize (tree op)
7037 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7038 return op;
7039 return NULL_TREE;
7042 /* Build the expression CODE OP0 of type TYPE with location LOC,
7043 simplifying it first if possible. Returns the built
7044 expression value and appends statements possibly defining it
7045 to SEQ. */
7047 tree
7048 gimple_build (gimple_seq *seq, location_t loc,
7049 enum tree_code code, tree type, tree op0)
7051 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7052 if (!res)
7054 res = create_tmp_reg_or_ssa_name (type);
7055 gimple *stmt;
7056 if (code == REALPART_EXPR
7057 || code == IMAGPART_EXPR
7058 || code == VIEW_CONVERT_EXPR)
7059 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7060 else
7061 stmt = gimple_build_assign (res, code, op0);
7062 gimple_set_location (stmt, loc);
7063 gimple_seq_add_stmt_without_update (seq, stmt);
7065 return res;
7068 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7069 simplifying it first if possible. Returns the built
7070 expression value and appends statements possibly defining it
7071 to SEQ. */
7073 tree
7074 gimple_build (gimple_seq *seq, location_t loc,
7075 enum tree_code code, tree type, tree op0, tree op1)
7077 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7078 if (!res)
7080 res = create_tmp_reg_or_ssa_name (type);
7081 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7082 gimple_set_location (stmt, loc);
7083 gimple_seq_add_stmt_without_update (seq, stmt);
7085 return res;
7088 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7089 simplifying it first if possible. Returns the built
7090 expression value and appends statements possibly defining it
7091 to SEQ. */
7093 tree
7094 gimple_build (gimple_seq *seq, location_t loc,
7095 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7097 tree res = gimple_simplify (code, type, op0, op1, op2,
7098 seq, gimple_build_valueize);
7099 if (!res)
7101 res = create_tmp_reg_or_ssa_name (type);
7102 gimple *stmt;
7103 if (code == BIT_FIELD_REF)
7104 stmt = gimple_build_assign (res, code,
7105 build3 (code, type, op0, op1, op2));
7106 else
7107 stmt = gimple_build_assign (res, code, op0, op1, op2);
7108 gimple_set_location (stmt, loc);
7109 gimple_seq_add_stmt_without_update (seq, stmt);
7111 return res;
7114 /* Build the call FN (ARG0) with a result of type TYPE
7115 (or no result if TYPE is void) with location LOC,
7116 simplifying it first if possible. Returns the built
7117 expression value (or NULL_TREE if TYPE is void) and appends
7118 statements possibly defining it to SEQ. */
7120 tree
7121 gimple_build (gimple_seq *seq, location_t loc,
7122 enum built_in_function fn, tree type, tree arg0)
7124 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7125 if (!res)
7127 tree decl = builtin_decl_implicit (fn);
7128 gimple *stmt = gimple_build_call (decl, 1, arg0);
7129 if (!VOID_TYPE_P (type))
7131 res = create_tmp_reg_or_ssa_name (type);
7132 gimple_call_set_lhs (stmt, res);
7134 gimple_set_location (stmt, loc);
7135 gimple_seq_add_stmt_without_update (seq, stmt);
7137 return res;
7140 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7141 (or no result if TYPE is void) with location LOC,
7142 simplifying it first if possible. Returns the built
7143 expression value (or NULL_TREE if TYPE is void) and appends
7144 statements possibly defining it to SEQ. */
7146 tree
7147 gimple_build (gimple_seq *seq, location_t loc,
7148 enum built_in_function fn, tree type, tree arg0, tree arg1)
7150 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7151 if (!res)
7153 tree decl = builtin_decl_implicit (fn);
7154 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7155 if (!VOID_TYPE_P (type))
7157 res = create_tmp_reg_or_ssa_name (type);
7158 gimple_call_set_lhs (stmt, res);
7160 gimple_set_location (stmt, loc);
7161 gimple_seq_add_stmt_without_update (seq, stmt);
7163 return res;
7166 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7167 (or no result if TYPE is void) with location LOC,
7168 simplifying it first if possible. Returns the built
7169 expression value (or NULL_TREE if TYPE is void) and appends
7170 statements possibly defining it to SEQ. */
7172 tree
7173 gimple_build (gimple_seq *seq, location_t loc,
7174 enum built_in_function fn, tree type,
7175 tree arg0, tree arg1, tree arg2)
7177 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7178 seq, gimple_build_valueize);
7179 if (!res)
7181 tree decl = builtin_decl_implicit (fn);
7182 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7183 if (!VOID_TYPE_P (type))
7185 res = create_tmp_reg_or_ssa_name (type);
7186 gimple_call_set_lhs (stmt, res);
7188 gimple_set_location (stmt, loc);
7189 gimple_seq_add_stmt_without_update (seq, stmt);
7191 return res;
7194 /* Build the conversion (TYPE) OP with a result of type TYPE
7195 with location LOC if such conversion is neccesary in GIMPLE,
7196 simplifying it first.
7197 Returns the built expression value and appends
7198 statements possibly defining it to SEQ. */
7200 tree
7201 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7203 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7204 return op;
7205 return gimple_build (seq, loc, NOP_EXPR, type, op);
7208 /* Build the conversion (ptrofftype) OP with a result of a type
7209 compatible with ptrofftype with location LOC if such conversion
7210 is neccesary in GIMPLE, simplifying it first.
7211 Returns the built expression value and appends
7212 statements possibly defining it to SEQ. */
7214 tree
7215 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7217 if (ptrofftype_p (TREE_TYPE (op)))
7218 return op;
7219 return gimple_convert (seq, loc, sizetype, op);
7222 /* Build a vector of type TYPE in which each element has the value OP.
7223 Return a gimple value for the result, appending any new statements
7224 to SEQ. */
7226 tree
7227 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7228 tree op)
7230 tree res, vec = build_vector_from_val (type, op);
7231 if (is_gimple_val (vec))
7232 return vec;
7233 if (gimple_in_ssa_p (cfun))
7234 res = make_ssa_name (type);
7235 else
7236 res = create_tmp_reg (type);
7237 gimple *stmt = gimple_build_assign (res, vec);
7238 gimple_set_location (stmt, loc);
7239 gimple_seq_add_stmt_without_update (seq, stmt);
7240 return res;
7243 /* Build a vector from BUILDER, handling the case in which some elements
7244 are non-constant. Return a gimple value for the result, appending any
7245 new instructions to SEQ.
7247 BUILDER must not have a stepped encoding on entry. This is because
7248 the function is not geared up to handle the arithmetic that would
7249 be needed in the variable case, and any code building a vector that
7250 is known to be constant should use BUILDER->build () directly. */
7252 tree
7253 gimple_build_vector (gimple_seq *seq, location_t loc,
7254 tree_vector_builder *builder)
7256 gcc_assert (builder->nelts_per_pattern () <= 2);
7257 unsigned int encoded_nelts = builder->encoded_nelts ();
7258 for (unsigned int i = 0; i < encoded_nelts; ++i)
7259 if (!TREE_CONSTANT ((*builder)[i]))
7261 tree type = builder->type ();
7262 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
7263 vec<constructor_elt, va_gc> *v;
7264 vec_alloc (v, nelts);
7265 for (i = 0; i < nelts; ++i)
7266 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7268 tree res;
7269 if (gimple_in_ssa_p (cfun))
7270 res = make_ssa_name (type);
7271 else
7272 res = create_tmp_reg (type);
7273 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7274 gimple_set_location (stmt, loc);
7275 gimple_seq_add_stmt_without_update (seq, stmt);
7276 return res;
7278 return builder->build ();
7281 /* Return true if the result of assignment STMT is known to be non-negative.
7282 If the return value is based on the assumption that signed overflow is
7283 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7284 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7286 static bool
7287 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7288 int depth)
7290 enum tree_code code = gimple_assign_rhs_code (stmt);
7291 switch (get_gimple_rhs_class (code))
7293 case GIMPLE_UNARY_RHS:
7294 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7295 gimple_expr_type (stmt),
7296 gimple_assign_rhs1 (stmt),
7297 strict_overflow_p, depth);
7298 case GIMPLE_BINARY_RHS:
7299 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7300 gimple_expr_type (stmt),
7301 gimple_assign_rhs1 (stmt),
7302 gimple_assign_rhs2 (stmt),
7303 strict_overflow_p, depth);
7304 case GIMPLE_TERNARY_RHS:
7305 return false;
7306 case GIMPLE_SINGLE_RHS:
7307 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7308 strict_overflow_p, depth);
7309 case GIMPLE_INVALID_RHS:
7310 break;
7312 gcc_unreachable ();
7315 /* Return true if return value of call STMT is known to be non-negative.
7316 If the return value is based on the assumption that signed overflow is
7317 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7318 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7320 static bool
7321 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7322 int depth)
7324 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7325 gimple_call_arg (stmt, 0) : NULL_TREE;
7326 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7327 gimple_call_arg (stmt, 1) : NULL_TREE;
7329 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7330 gimple_call_combined_fn (stmt),
7331 arg0,
7332 arg1,
7333 strict_overflow_p, depth);
7336 /* Return true if return value of call STMT is known to be non-negative.
7337 If the return value is based on the assumption that signed overflow is
7338 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7339 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7341 static bool
7342 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7343 int depth)
7345 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7347 tree arg = gimple_phi_arg_def (stmt, i);
7348 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7349 return false;
7351 return true;
7354 /* Return true if STMT is known to compute a non-negative value.
7355 If the return value is based on the assumption that signed overflow is
7356 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7357 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7359 bool
7360 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7361 int depth)
7363 switch (gimple_code (stmt))
7365 case GIMPLE_ASSIGN:
7366 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7367 depth);
7368 case GIMPLE_CALL:
7369 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7370 depth);
7371 case GIMPLE_PHI:
7372 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7373 depth);
7374 default:
7375 return false;
7379 /* Return true if the floating-point value computed by assignment STMT
7380 is known to have an integer value. We also allow +Inf, -Inf and NaN
7381 to be considered integer values. Return false for signaling NaN.
7383 DEPTH is the current nesting depth of the query. */
7385 static bool
7386 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7388 enum tree_code code = gimple_assign_rhs_code (stmt);
7389 switch (get_gimple_rhs_class (code))
7391 case GIMPLE_UNARY_RHS:
7392 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7393 gimple_assign_rhs1 (stmt), depth);
7394 case GIMPLE_BINARY_RHS:
7395 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7396 gimple_assign_rhs1 (stmt),
7397 gimple_assign_rhs2 (stmt), depth);
7398 case GIMPLE_TERNARY_RHS:
7399 return false;
7400 case GIMPLE_SINGLE_RHS:
7401 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7402 case GIMPLE_INVALID_RHS:
7403 break;
7405 gcc_unreachable ();
7408 /* Return true if the floating-point value computed by call STMT is known
7409 to have an integer value. We also allow +Inf, -Inf and NaN to be
7410 considered integer values. Return false for signaling NaN.
7412 DEPTH is the current nesting depth of the query. */
7414 static bool
7415 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7417 tree arg0 = (gimple_call_num_args (stmt) > 0
7418 ? gimple_call_arg (stmt, 0)
7419 : NULL_TREE);
7420 tree arg1 = (gimple_call_num_args (stmt) > 1
7421 ? gimple_call_arg (stmt, 1)
7422 : NULL_TREE);
7423 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7424 arg0, arg1, depth);
7427 /* Return true if the floating-point result of phi STMT is known to have
7428 an integer value. We also allow +Inf, -Inf and NaN to be considered
7429 integer values. Return false for signaling NaN.
7431 DEPTH is the current nesting depth of the query. */
7433 static bool
7434 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7436 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7438 tree arg = gimple_phi_arg_def (stmt, i);
7439 if (!integer_valued_real_single_p (arg, depth + 1))
7440 return false;
7442 return true;
7445 /* Return true if the floating-point value computed by STMT is known
7446 to have an integer value. We also allow +Inf, -Inf and NaN to be
7447 considered integer values. Return false for signaling NaN.
7449 DEPTH is the current nesting depth of the query. */
7451 bool
7452 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7454 switch (gimple_code (stmt))
7456 case GIMPLE_ASSIGN:
7457 return gimple_assign_integer_valued_real_p (stmt, depth);
7458 case GIMPLE_CALL:
7459 return gimple_call_integer_valued_real_p (stmt, depth);
7460 case GIMPLE_PHI:
7461 return gimple_phi_integer_valued_real_p (stmt, depth);
7462 default:
7463 return false;