* es.po: Update.
[official-gcc.git] / gcc / gimple-fold.c
blob386169235553cb1bf915e517dc02ebdc7b042964
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 location_t loc = gimple_location_safe (stmt);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets.length () == 1
355 ? targets[0]->name ()
356 : "NULL");
358 if (targets.length () == 1)
360 val = fold_convert (TREE_TYPE (val),
361 build_fold_addr_expr_loc
362 (loc, targets[0]->decl));
363 STRIP_USELESS_TYPE_CONVERSION (val);
365 else
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val = build_int_cst (TREE_TYPE (val), 0);
369 return val;
374 else if (TREE_CODE (rhs) == ADDR_EXPR)
376 tree ref = TREE_OPERAND (rhs, 0);
377 tree tem = maybe_fold_reference (ref, true);
378 if (tem
379 && TREE_CODE (tem) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem, 1)))
381 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
382 else if (tem)
383 result = fold_convert (TREE_TYPE (rhs),
384 build_fold_addr_expr_loc (loc, tem));
385 else if (TREE_CODE (ref) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref, 1)))
387 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
389 if (result)
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result);
397 if (result != rhs && valid_gimple_rhs_p (result))
398 return result;
402 else if (TREE_CODE (rhs) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
406 unsigned i;
407 tree val;
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
410 if (! CONSTANT_CLASS_P (val))
411 return NULL_TREE;
413 return build_vector_from_ctor (TREE_TYPE (rhs),
414 CONSTRUCTOR_ELTS (rhs));
417 else if (DECL_P (rhs))
418 return get_symbol_constant_value (rhs);
420 break;
422 case GIMPLE_UNARY_RHS:
423 break;
425 case GIMPLE_BINARY_RHS:
426 break;
428 case GIMPLE_TERNARY_RHS:
429 result = fold_ternary_loc (loc, subcode,
430 TREE_TYPE (gimple_assign_lhs (stmt)),
431 gimple_assign_rhs1 (stmt),
432 gimple_assign_rhs2 (stmt),
433 gimple_assign_rhs3 (stmt));
435 if (result)
437 STRIP_USELESS_TYPE_CONVERSION (result);
438 if (valid_gimple_rhs_p (result))
439 return result;
441 break;
443 case GIMPLE_INVALID_RHS:
444 gcc_unreachable ();
447 return NULL_TREE;
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
456 static void
457 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
459 gimple *stmt = gsi_stmt (*si_p);
461 if (gimple_has_location (stmt))
462 annotate_all_with_location (stmts, gimple_location (stmt));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple *laststore = NULL;
467 for (gimple_stmt_iterator i = gsi_last (stmts);
468 !gsi_end_p (i); gsi_prev (&i))
470 gimple *new_stmt = gsi_stmt (i);
471 if ((gimple_assign_single_p (new_stmt)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
473 || (is_gimple_call (new_stmt)
474 && (gimple_call_flags (new_stmt)
475 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
477 tree vdef;
478 if (!laststore)
479 vdef = gimple_vdef (stmt);
480 else
481 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
482 gimple_set_vdef (new_stmt, vdef);
483 if (vdef && TREE_CODE (vdef) == SSA_NAME)
484 SSA_NAME_DEF_STMT (vdef) = new_stmt;
485 laststore = new_stmt;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse = gimple_vuse (stmt);
492 for (gimple_stmt_iterator i = gsi_start (stmts);
493 !gsi_end_p (i); gsi_next (&i))
495 gimple *new_stmt = gsi_stmt (i);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt))
499 gimple_set_vuse (new_stmt, reaching_vuse);
500 gimple_set_modified (new_stmt, true);
501 if (gimple_vdef (new_stmt))
502 reaching_vuse = gimple_vdef (new_stmt);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
507 if (reaching_vuse
508 && reaching_vuse == gimple_vuse (stmt))
510 tree vdef = gimple_vdef (stmt);
511 if (vdef
512 && TREE_CODE (vdef) == SSA_NAME)
514 unlink_stmt_vdef (stmt);
515 release_ssa_name (vdef);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p, stmts, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
536 tree lhs;
537 gimple *stmt, *new_stmt;
538 gimple_stmt_iterator i;
539 gimple_seq stmts = NULL;
541 stmt = gsi_stmt (*si_p);
543 gcc_assert (is_gimple_call (stmt));
545 push_gimplify_context (gimple_in_ssa_p (cfun));
547 lhs = gimple_call_lhs (stmt);
548 if (lhs == NULL_TREE)
550 gimplify_and_add (expr, &stmts);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts))
555 pop_gimplify_context (NULL);
556 if (gimple_in_ssa_p (cfun))
558 unlink_stmt_vdef (stmt);
559 release_defs (stmt);
561 gsi_replace (si_p, gimple_build_nop (), false);
562 return;
565 else
567 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
568 new_stmt = gimple_build_assign (lhs, tmp);
569 i = gsi_last (stmts);
570 gsi_insert_after_without_update (&i, new_stmt,
571 GSI_CONTINUE_LINKING);
574 pop_gimplify_context (NULL);
576 gsi_replace_with_seq_vops (si_p, stmts);
580 /* Replace the call at *GSI with the gimple value VAL. */
582 void
583 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
585 gimple *stmt = gsi_stmt (*gsi);
586 tree lhs = gimple_call_lhs (stmt);
587 gimple *repl;
588 if (lhs)
590 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
591 val = fold_convert (TREE_TYPE (lhs), val);
592 repl = gimple_build_assign (lhs, val);
594 else
595 repl = gimple_build_nop ();
596 tree vdef = gimple_vdef (stmt);
597 if (vdef && TREE_CODE (vdef) == SSA_NAME)
599 unlink_stmt_vdef (stmt);
600 release_ssa_name (vdef);
602 gsi_replace (gsi, repl, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
606 again. */
608 static void
609 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
611 gimple *stmt = gsi_stmt (*gsi);
612 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
613 gimple_set_location (repl, gimple_location (stmt));
614 if (gimple_vdef (stmt)
615 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
617 gimple_set_vdef (repl, gimple_vdef (stmt));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
620 if (gimple_vuse (stmt))
621 gimple_set_vuse (repl, gimple_vuse (stmt));
622 gsi_replace (gsi, repl, false);
623 fold_stmt (gsi);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
628 static bool
629 var_decl_component_p (tree var)
631 tree inner = var;
632 while (handled_component_p (inner))
633 inner = TREE_OPERAND (inner, 0);
634 return SSA_VAR_P (inner);
637 /* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
641 static bool
642 size_must_be_zero_p (tree size)
644 if (integer_zerop (size))
645 return true;
647 if (TREE_CODE (size) != SSA_NAME)
648 return false;
650 wide_int min, max;
651 enum value_range_type rtype = get_range_info (size, &min, &max);
652 if (rtype != VR_ANTI_RANGE)
653 return false;
655 tree type = TREE_TYPE (size);
656 int prec = TYPE_PRECISION (type);
658 wide_int wone = wi::one (prec);
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
664 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
667 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
672 be made. */
674 static bool
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
676 tree dest, tree src, int endp)
678 gimple *stmt = gsi_stmt (*gsi);
679 tree lhs = gimple_call_lhs (stmt);
680 tree len = gimple_call_arg (stmt, 2);
681 tree destvar, srcvar;
682 location_t loc = gimple_location (stmt);
684 tree func = gimple_call_fndecl (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
686 bool check_overlap = (DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func) != BUILT_IN_MEMMOVE_CHK
688 && !nowarn);
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len))
694 gimple *repl;
695 if (gimple_call_lhs (stmt))
696 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
697 else
698 repl = gimple_build_nop ();
699 tree vdef = gimple_vdef (stmt);
700 if (vdef && TREE_CODE (vdef) == SSA_NAME)
702 unlink_stmt_vdef (stmt);
703 release_ssa_name (vdef);
705 gsi_replace (gsi, repl, false);
706 return true;
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src, dest, 0))
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
716 function. */
717 if (check_overlap && *IDENTIFIER_POINTER (DECL_NAME (func)) != '_')
718 warning_at (loc, OPT_Wrestrict,
719 "%qD source argument is the same as destination",
720 func);
722 unlink_stmt_vdef (stmt);
723 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
724 release_ssa_name (gimple_vdef (stmt));
725 if (!lhs)
727 gsi_replace (gsi, gimple_build_nop (), false);
728 return true;
730 goto done;
732 else
734 tree srctype, desttype;
735 unsigned int src_align, dest_align;
736 tree off0;
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun->decl)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len)
747 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
748 return false;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
752 ptr_mode, true), 0);
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align = get_pointer_alignment (src);
759 dest_align = get_pointer_alignment (dest);
760 if (tree_fits_uhwi_p (len)
761 && compare_tree_int (len, MOVE_MAX) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
765 reason. */
766 && !c_strlen (src, 2))
768 unsigned ilen = tree_to_uhwi (len);
769 if (pow2p_hwi (ilen))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
773 if (!nowarn
774 && check_bounds_or_overlap (as_a <gcall *>(stmt),
775 dest, src, len, len))
776 gimple_set_no_warning (stmt, true);
778 scalar_int_mode mode;
779 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
780 if (type
781 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
782 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align >= GET_MODE_ALIGNMENT (mode)
786 || !targetm.slow_unaligned_access (mode, dest_align)
787 || (optab_handler (movmisalign_optab, mode)
788 != CODE_FOR_nothing)))
790 tree srctype = type;
791 tree desttype = type;
792 if (src_align < GET_MODE_ALIGNMENT (mode))
793 srctype = build_aligned_type (type, src_align);
794 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
795 tree tem = fold_const_aggregate_ref (srcmem);
796 if (tem)
797 srcmem = tem;
798 else if (src_align < GET_MODE_ALIGNMENT (mode)
799 && targetm.slow_unaligned_access (mode, src_align)
800 && (optab_handler (movmisalign_optab, mode)
801 == CODE_FOR_nothing))
802 srcmem = NULL_TREE;
803 if (srcmem)
805 gimple *new_stmt;
806 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
808 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
809 srcmem
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
811 new_stmt);
812 gimple_assign_set_lhs (new_stmt, srcmem);
813 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
814 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
816 if (dest_align < GET_MODE_ALIGNMENT (mode))
817 desttype = build_aligned_type (type, dest_align);
818 new_stmt
819 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
820 dest, off0),
821 srcmem);
822 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
823 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
824 if (gimple_vdef (new_stmt)
825 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
827 if (!lhs)
829 gsi_replace (gsi, new_stmt, false);
830 return true;
832 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
833 goto done;
839 if (endp == 3)
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
843 really mandatory?
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align || !src_align)
847 return false;
848 if (readonly_data_expr (src)
849 || (tree_fits_uhwi_p (len)
850 && (MIN (src_align, dest_align) / BITS_PER_UNIT
851 >= tree_to_uhwi (len))))
853 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
854 if (!fn)
855 return false;
856 gimple_call_set_fndecl (stmt, fn);
857 gimple_call_set_arg (stmt, 0, dest);
858 gimple_call_set_arg (stmt, 1, src);
859 fold_stmt (gsi);
860 return true;
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src) == ADDR_EXPR
865 && TREE_CODE (dest) == ADDR_EXPR)
867 tree src_base, dest_base, fn;
868 poly_int64 src_offset = 0, dest_offset = 0;
869 poly_uint64 maxsize;
871 srcvar = TREE_OPERAND (src, 0);
872 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
873 if (src_base == NULL)
874 src_base = srcvar;
875 destvar = TREE_OPERAND (dest, 0);
876 dest_base = get_addr_base_and_unit_offset (destvar,
877 &dest_offset);
878 if (dest_base == NULL)
879 dest_base = destvar;
880 if (!poly_int_tree_p (len, &maxsize))
881 maxsize = -1;
882 if (SSA_VAR_P (src_base)
883 && SSA_VAR_P (dest_base))
885 if (operand_equal_p (src_base, dest_base, 0)
886 && ranges_maybe_overlap_p (src_offset, maxsize,
887 dest_offset, maxsize))
888 return false;
890 else if (TREE_CODE (src_base) == MEM_REF
891 && TREE_CODE (dest_base) == MEM_REF)
893 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
894 TREE_OPERAND (dest_base, 0), 0))
895 return false;
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base) + src_offset;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base) + dest_offset;
900 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
901 full_dest_offset, maxsize))
902 return false;
904 else
905 return false;
907 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
908 if (!fn)
909 return false;
910 gimple_call_set_fndecl (stmt, fn);
911 gimple_call_set_arg (stmt, 0, dest);
912 gimple_call_set_arg (stmt, 1, src);
913 fold_stmt (gsi);
914 return true;
917 /* If the destination and source do not alias optimize into
918 memcpy as well. */
919 if ((is_gimple_min_invariant (dest)
920 || TREE_CODE (dest) == SSA_NAME)
921 && (is_gimple_min_invariant (src)
922 || TREE_CODE (src) == SSA_NAME))
924 ao_ref destr, srcr;
925 ao_ref_init_from_ptr_and_size (&destr, dest, len);
926 ao_ref_init_from_ptr_and_size (&srcr, src, len);
927 if (!refs_may_alias_p_1 (&destr, &srcr, false))
929 tree fn;
930 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
931 if (!fn)
932 return false;
933 gimple_call_set_fndecl (stmt, fn);
934 gimple_call_set_arg (stmt, 0, dest);
935 gimple_call_set_arg (stmt, 1, src);
936 fold_stmt (gsi);
937 return true;
941 return false;
944 if (!tree_fits_shwi_p (len))
945 return false;
946 if (!POINTER_TYPE_P (TREE_TYPE (src))
947 || !POINTER_TYPE_P (TREE_TYPE (dest)))
948 return false;
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype = TREE_TYPE (TREE_TYPE (src));
956 if (TREE_CODE (srctype) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
958 srctype = TREE_TYPE (srctype);
959 desttype = TREE_TYPE (TREE_TYPE (dest));
960 if (TREE_CODE (desttype) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
962 desttype = TREE_TYPE (desttype);
963 if (TREE_ADDRESSABLE (srctype)
964 || TREE_ADDRESSABLE (desttype))
965 return false;
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype))
970 || TREE_CODE (desttype) == BOOLEAN_TYPE
971 || TREE_CODE (desttype) == ENUMERAL_TYPE)
972 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype))
974 || TREE_CODE (srctype) == BOOLEAN_TYPE
975 || TREE_CODE (srctype) == ENUMERAL_TYPE)
976 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
977 if (!srctype)
978 srctype = desttype;
979 if (!desttype)
980 desttype = srctype;
981 if (!srctype)
982 return false;
984 src_align = get_pointer_alignment (src);
985 dest_align = get_pointer_alignment (dest);
986 if (dest_align < TYPE_ALIGN (desttype)
987 || src_align < TYPE_ALIGN (srctype))
988 return false;
990 destvar = NULL_TREE;
991 if (TREE_CODE (dest) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
994 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
996 srcvar = NULL_TREE;
997 if (TREE_CODE (src) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1001 if (!destvar
1002 || src_align >= TYPE_ALIGN (desttype))
1003 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1004 src, off0);
1005 else if (!STRICT_ALIGNMENT)
1007 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1008 src_align);
1009 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1013 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1014 return false;
1016 if (srcvar == NULL_TREE)
1018 if (src_align >= TYPE_ALIGN (desttype))
1019 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1020 else
1022 if (STRICT_ALIGNMENT)
1023 return false;
1024 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1025 src_align);
1026 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1029 else if (destvar == NULL_TREE)
1031 if (dest_align >= TYPE_ALIGN (srctype))
1032 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1033 else
1035 if (STRICT_ALIGNMENT)
1036 return false;
1037 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1038 dest_align);
1039 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1045 if (!nowarn)
1046 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1048 gimple *new_stmt;
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1051 tree tem = fold_const_aggregate_ref (srcvar);
1052 if (tem)
1053 srcvar = tem;
1054 if (! is_gimple_min_invariant (srcvar))
1056 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1057 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1058 new_stmt);
1059 gimple_assign_set_lhs (new_stmt, srcvar);
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1063 new_stmt = gimple_build_assign (destvar, srcvar);
1064 goto set_vop_and_replace;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype = build_array_type_nelts (unsigned_char_type_node,
1071 tree_to_uhwi (len));
1072 srctype = desttype;
1073 if (src_align > TYPE_ALIGN (srctype))
1074 srctype = build_aligned_type (srctype, src_align);
1075 if (dest_align > TYPE_ALIGN (desttype))
1076 desttype = build_aligned_type (desttype, dest_align);
1077 new_stmt
1078 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1079 fold_build2 (MEM_REF, srctype, src, off0));
1080 set_vop_and_replace:
1081 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1082 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1083 if (gimple_vdef (new_stmt)
1084 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1086 if (!lhs)
1088 gsi_replace (gsi, new_stmt, false);
1089 return true;
1091 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1094 done:
1095 gimple_seq stmts = NULL;
1096 if (endp == 0 || endp == 3)
1097 len = NULL_TREE;
1098 else if (endp == 2)
1099 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1100 ssize_int (1));
1101 if (endp == 2 || endp == 1)
1103 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1104 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1105 TREE_TYPE (dest), dest, len);
1108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1109 gimple *repl = gimple_build_assign (lhs, dest);
1110 gsi_replace (gsi, repl, false);
1111 return true;
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1117 static bool
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1120 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1122 if (!fn)
1123 return false;
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple *stmt = gsi_stmt (*gsi);
1128 tree a = gimple_call_arg (stmt, 0);
1129 tree b = gimple_call_arg (stmt, 1);
1130 tree len = gimple_call_arg (stmt, 2);
1132 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1133 replace_call_with_call_and_fold (gsi, repl);
1135 return true;
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1141 static bool
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1144 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1146 if (!fn)
1147 return false;
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple *stmt = gsi_stmt (*gsi);
1154 tree src = gimple_call_arg (stmt, 0);
1155 tree dest = gimple_call_arg (stmt, 1);
1156 tree len = gimple_call_arg (stmt, 2);
1158 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1159 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1160 replace_call_with_call_and_fold (gsi, repl);
1162 return true;
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1168 static bool
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1171 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1173 if (!fn)
1174 return false;
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple *stmt = gsi_stmt (*gsi);
1179 tree dest = gimple_call_arg (stmt, 0);
1180 tree len = gimple_call_arg (stmt, 1);
1182 gimple_seq seq = NULL;
1183 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1184 gimple_seq_add_stmt_without_update (&seq, repl);
1185 gsi_replace_with_seq_vops (gsi, seq);
1186 fold_stmt (gsi);
1188 return true;
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1194 static bool
1195 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1197 gimple *stmt = gsi_stmt (*gsi);
1198 tree etype;
1199 unsigned HOST_WIDE_INT length, cval;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len))
1204 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1205 return true;
1208 if (! tree_fits_uhwi_p (len))
1209 return false;
1211 if (TREE_CODE (c) != INTEGER_CST)
1212 return false;
1214 tree dest = gimple_call_arg (stmt, 0);
1215 tree var = dest;
1216 if (TREE_CODE (var) != ADDR_EXPR)
1217 return false;
1219 var = TREE_OPERAND (var, 0);
1220 if (TREE_THIS_VOLATILE (var))
1221 return false;
1223 etype = TREE_TYPE (var);
1224 if (TREE_CODE (etype) == ARRAY_TYPE)
1225 etype = TREE_TYPE (etype);
1227 if (!INTEGRAL_TYPE_P (etype)
1228 && !POINTER_TYPE_P (etype))
1229 return NULL_TREE;
1231 if (! var_decl_component_p (var))
1232 return NULL_TREE;
1234 length = tree_to_uhwi (len);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1236 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1237 return NULL_TREE;
1239 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1240 return NULL_TREE;
1242 if (integer_zerop (c))
1243 cval = 0;
1244 else
1246 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1247 return NULL_TREE;
1249 cval = TREE_INT_CST_LOW (c);
1250 cval &= 0xff;
1251 cval |= cval << 8;
1252 cval |= cval << 16;
1253 cval |= (cval << 31) << 1;
1256 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1257 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1258 gimple_set_vuse (store, gimple_vuse (stmt));
1259 tree vdef = gimple_vdef (stmt);
1260 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1262 gimple_set_vdef (store, gimple_vdef (stmt));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1265 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1266 if (gimple_call_lhs (stmt))
1268 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1269 gsi_replace (gsi, asgn, false);
1271 else
1273 gimple_stmt_iterator gsi2 = *gsi;
1274 gsi_prev (gsi);
1275 gsi_remove (&gsi2, true);
1278 return true;
1282 /* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
1298 static bool
1299 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1300 bool fuzzy, bool *flexp)
1302 tree var, val = NULL_TREE;
1303 gimple *def_stmt;
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree *minlen = length;
1308 tree *const maxlen = length + 1;
1310 if (TREE_CODE (arg) != SSA_NAME)
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1316 tree op = TREE_OPERAND (arg, 0);
1317 if (integer_zerop (TREE_OPERAND (op, 1)))
1319 tree aop0 = TREE_OPERAND (op, 0);
1320 if (TREE_CODE (aop0) == INDIRECT_REF
1321 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1322 return get_range_strlen (TREE_OPERAND (aop0, 0),
1323 length, visited, type, fuzzy, flexp);
1325 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1327 /* Fail if an array is the last member of a struct object
1328 since it could be treated as a (fake) flexible array
1329 member. */
1330 tree idx = TREE_OPERAND (op, 1);
1332 arg = TREE_OPERAND (op, 0);
1333 tree optype = TREE_TYPE (arg);
1334 if (tree dom = TYPE_DOMAIN (optype))
1335 if (tree bound = TYPE_MAX_VALUE (dom))
1336 if (TREE_CODE (bound) == INTEGER_CST
1337 && TREE_CODE (idx) == INTEGER_CST
1338 && tree_int_cst_lt (bound, idx))
1339 return false;
1343 if (type == 2)
1345 val = arg;
1346 if (TREE_CODE (val) != INTEGER_CST
1347 || tree_int_cst_sgn (val) < 0)
1348 return false;
1350 else
1351 val = c_strlen (arg, 1);
1353 if (!val && fuzzy)
1355 if (TREE_CODE (arg) == ADDR_EXPR)
1356 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1357 visited, type, fuzzy, flexp);
1359 if (TREE_CODE (arg) == ARRAY_REF)
1361 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1363 /* Determine the "innermost" array type. */
1364 while (TREE_CODE (type) == ARRAY_TYPE
1365 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1366 type = TREE_TYPE (type);
1368 /* Avoid arrays of pointers. */
1369 tree eltype = TREE_TYPE (type);
1370 if (TREE_CODE (type) != ARRAY_TYPE
1371 || !INTEGRAL_TYPE_P (eltype))
1372 return false;
1374 val = TYPE_SIZE_UNIT (type);
1375 if (!val || integer_zerop (val))
1376 return false;
1378 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1379 integer_one_node);
1380 /* Set the minimum size to zero since the string in
1381 the array could have zero length. */
1382 *minlen = ssize_int (0);
1384 else if (TREE_CODE (arg) == COMPONENT_REF
1385 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1))) == ARRAY_TYPE)
1387 /* Use the type of the member array to determine the upper
1388 bound on the length of the array. This may be overly
1389 optimistic if the array itself isn't NUL-terminated and
1390 the caller relies on the subsequent member to contain
1391 the NUL but that would only be considered valid if
1392 the array were the last member of a struct.
1393 Set *FLEXP to true if the array whose bound is being
1394 used is at the end of a struct. */
1395 if (array_at_struct_end_p (arg))
1396 *flexp = true;
1398 arg = TREE_OPERAND (arg, 1);
1400 tree type = TREE_TYPE (arg);
1402 while (TREE_CODE (type) == ARRAY_TYPE
1403 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1404 type = TREE_TYPE (type);
1406 /* Fail when the array bound is unknown or zero. */
1407 val = TYPE_SIZE_UNIT (type);
1408 if (!val || integer_zerop (val))
1409 return false;
1410 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1411 integer_one_node);
1412 /* Set the minimum size to zero since the string in
1413 the array could have zero length. */
1414 *minlen = ssize_int (0);
1417 if (VAR_P (arg))
1419 tree type = TREE_TYPE (arg);
1420 if (POINTER_TYPE_P (type))
1421 type = TREE_TYPE (type);
1423 if (TREE_CODE (type) == ARRAY_TYPE)
1425 val = TYPE_SIZE_UNIT (type);
1426 if (!val
1427 || TREE_CODE (val) != INTEGER_CST
1428 || integer_zerop (val))
1429 return false;
1430 val = wide_int_to_tree (TREE_TYPE (val),
1431 wi::sub(wi::to_wide (val), 1));
1432 /* Set the minimum size to zero since the string in
1433 the array could have zero length. */
1434 *minlen = ssize_int (0);
1439 if (!val)
1440 return false;
1442 if (minlen
1443 && (!*minlen
1444 || (type > 0
1445 && TREE_CODE (*minlen) == INTEGER_CST
1446 && TREE_CODE (val) == INTEGER_CST
1447 && tree_int_cst_lt (val, *minlen))))
1448 *minlen = val;
1450 if (*maxlen)
1452 if (type > 0)
1454 if (TREE_CODE (*maxlen) != INTEGER_CST
1455 || TREE_CODE (val) != INTEGER_CST)
1456 return false;
1458 if (tree_int_cst_lt (*maxlen, val))
1459 *maxlen = val;
1460 return true;
1462 else if (simple_cst_equal (val, *maxlen) != 1)
1463 return false;
1466 *maxlen = val;
1467 return true;
1470 /* If ARG is registered for SSA update we cannot look at its defining
1471 statement. */
1472 if (name_registered_for_update_p (arg))
1473 return false;
1475 /* If we were already here, break the infinite cycle. */
1476 if (!*visited)
1477 *visited = BITMAP_ALLOC (NULL);
1478 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1479 return true;
1481 var = arg;
1482 def_stmt = SSA_NAME_DEF_STMT (var);
1484 switch (gimple_code (def_stmt))
1486 case GIMPLE_ASSIGN:
1487 /* The RHS of the statement defining VAR must either have a
1488 constant length or come from another SSA_NAME with a constant
1489 length. */
1490 if (gimple_assign_single_p (def_stmt)
1491 || gimple_assign_unary_nop_p (def_stmt))
1493 tree rhs = gimple_assign_rhs1 (def_stmt);
1494 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1496 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1498 tree op2 = gimple_assign_rhs2 (def_stmt);
1499 tree op3 = gimple_assign_rhs3 (def_stmt);
1500 return get_range_strlen (op2, length, visited, type, fuzzy, flexp)
1501 && get_range_strlen (op3, length, visited, type, fuzzy, flexp);
1503 return false;
1505 case GIMPLE_PHI:
1507 /* All the arguments of the PHI node must have the same constant
1508 length. */
1509 unsigned i;
1511 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1513 tree arg = gimple_phi_arg (def_stmt, i)->def;
1515 /* If this PHI has itself as an argument, we cannot
1516 determine the string length of this argument. However,
1517 if we can find a constant string length for the other
1518 PHI args then we can still be sure that this is a
1519 constant string length. So be optimistic and just
1520 continue with the next argument. */
1521 if (arg == gimple_phi_result (def_stmt))
1522 continue;
1524 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1526 if (fuzzy)
1527 *maxlen = build_all_ones_cst (size_type_node);
1528 else
1529 return false;
1533 return true;
1535 default:
1536 return false;
1540 /* Determine the minimum and maximum value or string length that ARG
1541 refers to and store each in the first two elements of MINMAXLEN.
1542 For expressions that point to strings of unknown lengths that are
1543 character arrays, use the upper bound of the array as the maximum
1544 length. For example, given an expression like 'x ? array : "xyz"'
1545 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1546 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1547 stored in array.
1548 Return true if the range of the string lengths has been obtained
1549 from the upper bound of an array at the end of a struct. Such
1550 an array may hold a string that's longer than its upper bound
1551 due to it being used as a poor-man's flexible array member. */
1553 bool
1554 get_range_strlen (tree arg, tree minmaxlen[2])
1556 bitmap visited = NULL;
1558 minmaxlen[0] = NULL_TREE;
1559 minmaxlen[1] = NULL_TREE;
1561 bool flexarray = false;
1562 get_range_strlen (arg, minmaxlen, &visited, 1, true, &flexarray);
1564 if (visited)
1565 BITMAP_FREE (visited);
1567 return flexarray;
1570 tree
1571 get_maxval_strlen (tree arg, int type)
1573 bitmap visited = NULL;
1574 tree len[2] = { NULL_TREE, NULL_TREE };
1576 bool dummy;
1577 if (!get_range_strlen (arg, len, &visited, type, false, &dummy))
1578 len[1] = NULL_TREE;
1579 if (visited)
1580 BITMAP_FREE (visited);
1582 return len[1];
1586 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1587 If LEN is not NULL, it represents the length of the string to be
1588 copied. Return NULL_TREE if no simplification can be made. */
1590 static bool
1591 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1592 tree dest, tree src)
1594 gimple *stmt = gsi_stmt (*gsi);
1595 location_t loc = gimple_location (stmt);
1596 tree fn;
1598 /* If SRC and DEST are the same (and not volatile), return DEST. */
1599 if (operand_equal_p (src, dest, 0))
1601 tree func = gimple_call_fndecl (stmt);
1603 warning_at (loc, OPT_Wrestrict,
1604 "%qD source argument is the same as destination",
1605 func);
1607 replace_call_with_value (gsi, dest);
1608 return true;
1611 if (optimize_function_for_size_p (cfun))
1612 return false;
1614 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1615 if (!fn)
1616 return false;
1618 tree len = get_maxval_strlen (src, 0);
1619 if (!len)
1620 return false;
1622 len = fold_convert_loc (loc, size_type_node, len);
1623 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1624 len = force_gimple_operand_gsi (gsi, len, true,
1625 NULL_TREE, true, GSI_SAME_STMT);
1626 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1627 replace_call_with_call_and_fold (gsi, repl);
1628 return true;
1631 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1632 If SLEN is not NULL, it represents the length of the source string.
1633 Return NULL_TREE if no simplification can be made. */
1635 static bool
1636 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1637 tree dest, tree src, tree len)
1639 gimple *stmt = gsi_stmt (*gsi);
1640 location_t loc = gimple_location (stmt);
1641 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1643 /* If the LEN parameter is zero, return DEST. */
1644 if (integer_zerop (len))
1646 /* Avoid warning if the destination refers to a an array/pointer
1647 decorate with attribute nonstring. */
1648 if (!nonstring)
1650 tree fndecl = gimple_call_fndecl (stmt);
1651 gcall *call = as_a <gcall *> (stmt);
1653 /* Warn about the lack of nul termination: the result is not
1654 a (nul-terminated) string. */
1655 tree slen = get_maxval_strlen (src, 0);
1656 if (slen && !integer_zerop (slen))
1657 warning_at (loc, OPT_Wstringop_truncation,
1658 "%G%qD destination unchanged after copying no bytes "
1659 "from a string of length %E",
1660 call, fndecl, slen);
1661 else
1662 warning_at (loc, OPT_Wstringop_truncation,
1663 "%G%qD destination unchanged after copying no bytes",
1664 call, fndecl);
1667 replace_call_with_value (gsi, dest);
1668 return true;
1671 /* We can't compare slen with len as constants below if len is not a
1672 constant. */
1673 if (TREE_CODE (len) != INTEGER_CST)
1674 return false;
1676 /* Now, we must be passed a constant src ptr parameter. */
1677 tree slen = get_maxval_strlen (src, 0);
1678 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1679 return false;
1681 /* The size of the source string including the terminating nul. */
1682 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1684 /* We do not support simplification of this case, though we do
1685 support it when expanding trees into RTL. */
1686 /* FIXME: generate a call to __builtin_memset. */
1687 if (tree_int_cst_lt (ssize, len))
1688 return false;
1690 if (!nonstring)
1692 if (tree_int_cst_lt (len, slen))
1694 tree fndecl = gimple_call_fndecl (stmt);
1695 gcall *call = as_a <gcall *> (stmt);
1697 warning_at (loc, OPT_Wstringop_truncation,
1698 (tree_int_cst_equal (size_one_node, len)
1699 ? G_("%G%qD output truncated copying %E byte "
1700 "from a string of length %E")
1701 : G_("%G%qD output truncated copying %E bytes "
1702 "from a string of length %E")),
1703 call, fndecl, len, slen);
1705 else if (tree_int_cst_equal (len, slen))
1707 tree fndecl = gimple_call_fndecl (stmt);
1708 gcall *call = as_a <gcall *> (stmt);
1710 warning_at (loc, OPT_Wstringop_truncation,
1711 (tree_int_cst_equal (size_one_node, len)
1712 ? G_("%G%qD output truncated before terminating nul "
1713 "copying %E byte from a string of the same "
1714 "length")
1715 : G_("%G%qD output truncated before terminating nul "
1716 "copying %E bytes from a string of the same "
1717 "length")),
1718 call, fndecl, len);
1722 /* OK transform into builtin memcpy. */
1723 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1724 if (!fn)
1725 return false;
1727 len = fold_convert_loc (loc, size_type_node, len);
1728 len = force_gimple_operand_gsi (gsi, len, true,
1729 NULL_TREE, true, GSI_SAME_STMT);
1730 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1731 replace_call_with_call_and_fold (gsi, repl);
1733 return true;
1736 /* Fold function call to builtin strchr or strrchr.
1737 If both arguments are constant, evaluate and fold the result,
1738 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1739 In general strlen is significantly faster than strchr
1740 due to being a simpler operation. */
1741 static bool
1742 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1744 gimple *stmt = gsi_stmt (*gsi);
1745 tree str = gimple_call_arg (stmt, 0);
1746 tree c = gimple_call_arg (stmt, 1);
1747 location_t loc = gimple_location (stmt);
1748 const char *p;
1749 char ch;
1751 if (!gimple_call_lhs (stmt))
1752 return false;
1754 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1756 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1758 if (p1 == NULL)
1760 replace_call_with_value (gsi, integer_zero_node);
1761 return true;
1764 tree len = build_int_cst (size_type_node, p1 - p);
1765 gimple_seq stmts = NULL;
1766 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1767 POINTER_PLUS_EXPR, str, len);
1768 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1769 gsi_replace_with_seq_vops (gsi, stmts);
1770 return true;
1773 if (!integer_zerop (c))
1774 return false;
1776 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1777 if (is_strrchr && optimize_function_for_size_p (cfun))
1779 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1781 if (strchr_fn)
1783 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1784 replace_call_with_call_and_fold (gsi, repl);
1785 return true;
1788 return false;
1791 tree len;
1792 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1794 if (!strlen_fn)
1795 return false;
1797 /* Create newstr = strlen (str). */
1798 gimple_seq stmts = NULL;
1799 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1800 gimple_set_location (new_stmt, loc);
1801 len = create_tmp_reg_or_ssa_name (size_type_node);
1802 gimple_call_set_lhs (new_stmt, len);
1803 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1805 /* Create (str p+ strlen (str)). */
1806 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1807 POINTER_PLUS_EXPR, str, len);
1808 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1809 gsi_replace_with_seq_vops (gsi, stmts);
1810 /* gsi now points at the assignment to the lhs, get a
1811 stmt iterator to the strlen.
1812 ??? We can't use gsi_for_stmt as that doesn't work when the
1813 CFG isn't built yet. */
1814 gimple_stmt_iterator gsi2 = *gsi;
1815 gsi_prev (&gsi2);
1816 fold_stmt (&gsi2);
1817 return true;
1820 /* Fold function call to builtin strstr.
1821 If both arguments are constant, evaluate and fold the result,
1822 additionally fold strstr (x, "") into x and strstr (x, "c")
1823 into strchr (x, 'c'). */
1824 static bool
1825 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1827 gimple *stmt = gsi_stmt (*gsi);
1828 tree haystack = gimple_call_arg (stmt, 0);
1829 tree needle = gimple_call_arg (stmt, 1);
1830 const char *p, *q;
1832 if (!gimple_call_lhs (stmt))
1833 return false;
1835 q = c_getstr (needle);
1836 if (q == NULL)
1837 return false;
1839 if ((p = c_getstr (haystack)))
1841 const char *r = strstr (p, q);
1843 if (r == NULL)
1845 replace_call_with_value (gsi, integer_zero_node);
1846 return true;
1849 tree len = build_int_cst (size_type_node, r - p);
1850 gimple_seq stmts = NULL;
1851 gimple *new_stmt
1852 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1853 haystack, len);
1854 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1855 gsi_replace_with_seq_vops (gsi, stmts);
1856 return true;
1859 /* For strstr (x, "") return x. */
1860 if (q[0] == '\0')
1862 replace_call_with_value (gsi, haystack);
1863 return true;
1866 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1867 if (q[1] == '\0')
1869 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1870 if (strchr_fn)
1872 tree c = build_int_cst (integer_type_node, q[0]);
1873 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1874 replace_call_with_call_and_fold (gsi, repl);
1875 return true;
1879 return false;
1882 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1883 to the call.
1885 Return NULL_TREE if no simplification was possible, otherwise return the
1886 simplified form of the call as a tree.
1888 The simplified form may be a constant or other expression which
1889 computes the same value, but in a more efficient manner (including
1890 calls to other builtin functions).
1892 The call may contain arguments which need to be evaluated, but
1893 which are not useful to determine the result of the call. In
1894 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1895 COMPOUND_EXPR will be an argument which must be evaluated.
1896 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1897 COMPOUND_EXPR in the chain will contain the tree for the simplified
1898 form of the builtin function call. */
1900 static bool
1901 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1903 gimple *stmt = gsi_stmt (*gsi);
1904 location_t loc = gimple_location (stmt);
1906 const char *p = c_getstr (src);
1908 /* If the string length is zero, return the dst parameter. */
1909 if (p && *p == '\0')
1911 replace_call_with_value (gsi, dst);
1912 return true;
1915 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1916 return false;
1918 /* See if we can store by pieces into (dst + strlen(dst)). */
1919 tree newdst;
1920 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1921 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1923 if (!strlen_fn || !memcpy_fn)
1924 return false;
1926 /* If the length of the source string isn't computable don't
1927 split strcat into strlen and memcpy. */
1928 tree len = get_maxval_strlen (src, 0);
1929 if (! len)
1930 return false;
1932 /* Create strlen (dst). */
1933 gimple_seq stmts = NULL, stmts2;
1934 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1935 gimple_set_location (repl, loc);
1936 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1937 gimple_call_set_lhs (repl, newdst);
1938 gimple_seq_add_stmt_without_update (&stmts, repl);
1940 /* Create (dst p+ strlen (dst)). */
1941 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1942 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1943 gimple_seq_add_seq_without_update (&stmts, stmts2);
1945 len = fold_convert_loc (loc, size_type_node, len);
1946 len = size_binop_loc (loc, PLUS_EXPR, len,
1947 build_int_cst (size_type_node, 1));
1948 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1949 gimple_seq_add_seq_without_update (&stmts, stmts2);
1951 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1952 gimple_seq_add_stmt_without_update (&stmts, repl);
1953 if (gimple_call_lhs (stmt))
1955 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1956 gimple_seq_add_stmt_without_update (&stmts, repl);
1957 gsi_replace_with_seq_vops (gsi, stmts);
1958 /* gsi now points at the assignment to the lhs, get a
1959 stmt iterator to the memcpy call.
1960 ??? We can't use gsi_for_stmt as that doesn't work when the
1961 CFG isn't built yet. */
1962 gimple_stmt_iterator gsi2 = *gsi;
1963 gsi_prev (&gsi2);
1964 fold_stmt (&gsi2);
1966 else
1968 gsi_replace_with_seq_vops (gsi, stmts);
1969 fold_stmt (gsi);
1971 return true;
1974 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1975 are the arguments to the call. */
1977 static bool
1978 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1980 gimple *stmt = gsi_stmt (*gsi);
1981 tree dest = gimple_call_arg (stmt, 0);
1982 tree src = gimple_call_arg (stmt, 1);
1983 tree size = gimple_call_arg (stmt, 2);
1984 tree fn;
1985 const char *p;
1988 p = c_getstr (src);
1989 /* If the SRC parameter is "", return DEST. */
1990 if (p && *p == '\0')
1992 replace_call_with_value (gsi, dest);
1993 return true;
1996 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1997 return false;
1999 /* If __builtin_strcat_chk is used, assume strcat is available. */
2000 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2001 if (!fn)
2002 return false;
2004 gimple *repl = gimple_build_call (fn, 2, dest, src);
2005 replace_call_with_call_and_fold (gsi, repl);
2006 return true;
2009 /* Simplify a call to the strncat builtin. */
2011 static bool
2012 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2014 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2015 tree dst = gimple_call_arg (stmt, 0);
2016 tree src = gimple_call_arg (stmt, 1);
2017 tree len = gimple_call_arg (stmt, 2);
2019 const char *p = c_getstr (src);
2021 /* If the requested length is zero, or the src parameter string
2022 length is zero, return the dst parameter. */
2023 if (integer_zerop (len) || (p && *p == '\0'))
2025 replace_call_with_value (gsi, dst);
2026 return true;
2029 if (TREE_CODE (len) != INTEGER_CST || !p)
2030 return false;
2032 unsigned srclen = strlen (p);
2034 int cmpsrc = compare_tree_int (len, srclen);
2036 /* Return early if the requested len is less than the string length.
2037 Warnings will be issued elsewhere later. */
2038 if (cmpsrc < 0)
2039 return false;
2041 unsigned HOST_WIDE_INT dstsize;
2043 bool nowarn = gimple_no_warning_p (stmt);
2045 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2047 int cmpdst = compare_tree_int (len, dstsize);
2049 if (cmpdst >= 0)
2051 tree fndecl = gimple_call_fndecl (stmt);
2053 /* Strncat copies (at most) LEN bytes and always appends
2054 the terminating NUL so the specified bound should never
2055 be equal to (or greater than) the size of the destination.
2056 If it is, the copy could overflow. */
2057 location_t loc = gimple_location (stmt);
2058 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2059 cmpdst == 0
2060 ? G_("%G%qD specified bound %E equals "
2061 "destination size")
2062 : G_("%G%qD specified bound %E exceeds "
2063 "destination size %wu"),
2064 stmt, fndecl, len, dstsize);
2065 if (nowarn)
2066 gimple_set_no_warning (stmt, true);
2070 if (!nowarn && cmpsrc == 0)
2072 tree fndecl = gimple_call_fndecl (stmt);
2074 /* To avoid certain truncation the specified bound should also
2075 not be equal to (or less than) the length of the source. */
2076 location_t loc = gimple_location (stmt);
2077 if (warning_at (loc, OPT_Wstringop_overflow_,
2078 "%G%qD specified bound %E equals source length",
2079 stmt, fndecl, len))
2080 gimple_set_no_warning (stmt, true);
2083 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2085 /* If the replacement _DECL isn't initialized, don't do the
2086 transformation. */
2087 if (!fn)
2088 return false;
2090 /* Otherwise, emit a call to strcat. */
2091 gcall *repl = gimple_build_call (fn, 2, dst, src);
2092 replace_call_with_call_and_fold (gsi, repl);
2093 return true;
2096 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2097 LEN, and SIZE. */
2099 static bool
2100 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2102 gimple *stmt = gsi_stmt (*gsi);
2103 tree dest = gimple_call_arg (stmt, 0);
2104 tree src = gimple_call_arg (stmt, 1);
2105 tree len = gimple_call_arg (stmt, 2);
2106 tree size = gimple_call_arg (stmt, 3);
2107 tree fn;
2108 const char *p;
2110 p = c_getstr (src);
2111 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2112 if ((p && *p == '\0')
2113 || integer_zerop (len))
2115 replace_call_with_value (gsi, dest);
2116 return true;
2119 if (! tree_fits_uhwi_p (size))
2120 return false;
2122 if (! integer_all_onesp (size))
2124 tree src_len = c_strlen (src, 1);
2125 if (src_len
2126 && tree_fits_uhwi_p (src_len)
2127 && tree_fits_uhwi_p (len)
2128 && ! tree_int_cst_lt (len, src_len))
2130 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2131 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2132 if (!fn)
2133 return false;
2135 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2136 replace_call_with_call_and_fold (gsi, repl);
2137 return true;
2139 return false;
2142 /* If __builtin_strncat_chk is used, assume strncat is available. */
2143 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2144 if (!fn)
2145 return false;
2147 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2148 replace_call_with_call_and_fold (gsi, repl);
2149 return true;
2152 /* Build and append gimple statements to STMTS that would load a first
2153 character of a memory location identified by STR. LOC is location
2154 of the statement. */
2156 static tree
2157 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2159 tree var;
2161 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2162 tree cst_uchar_ptr_node
2163 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2164 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2166 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2167 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2168 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2170 gimple_assign_set_lhs (stmt, var);
2171 gimple_seq_add_stmt_without_update (stmts, stmt);
2173 return var;
2176 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2177 FCODE is the name of the builtin. */
2179 static bool
2180 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2182 gimple *stmt = gsi_stmt (*gsi);
2183 tree callee = gimple_call_fndecl (stmt);
2184 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2186 tree type = integer_type_node;
2187 tree str1 = gimple_call_arg (stmt, 0);
2188 tree str2 = gimple_call_arg (stmt, 1);
2189 tree lhs = gimple_call_lhs (stmt);
2190 HOST_WIDE_INT length = -1;
2192 /* Handle strncmp and strncasecmp functions. */
2193 if (gimple_call_num_args (stmt) == 3)
2195 tree len = gimple_call_arg (stmt, 2);
2196 if (tree_fits_uhwi_p (len))
2197 length = tree_to_uhwi (len);
2200 /* If the LEN parameter is zero, return zero. */
2201 if (length == 0)
2203 replace_call_with_value (gsi, integer_zero_node);
2204 return true;
2207 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2208 if (operand_equal_p (str1, str2, 0))
2210 replace_call_with_value (gsi, integer_zero_node);
2211 return true;
2214 const char *p1 = c_getstr (str1);
2215 const char *p2 = c_getstr (str2);
2217 /* For known strings, return an immediate value. */
2218 if (p1 && p2)
2220 int r = 0;
2221 bool known_result = false;
2223 switch (fcode)
2225 case BUILT_IN_STRCMP:
2227 r = strcmp (p1, p2);
2228 known_result = true;
2229 break;
2231 case BUILT_IN_STRNCMP:
2233 if (length == -1)
2234 break;
2235 r = strncmp (p1, p2, length);
2236 known_result = true;
2237 break;
2239 /* Only handleable situation is where the string are equal (result 0),
2240 which is already handled by operand_equal_p case. */
2241 case BUILT_IN_STRCASECMP:
2242 break;
2243 case BUILT_IN_STRNCASECMP:
2245 if (length == -1)
2246 break;
2247 r = strncmp (p1, p2, length);
2248 if (r == 0)
2249 known_result = true;
2250 break;
2252 default:
2253 gcc_unreachable ();
2256 if (known_result)
2258 replace_call_with_value (gsi, build_cmp_result (type, r));
2259 return true;
2263 bool nonzero_length = length >= 1
2264 || fcode == BUILT_IN_STRCMP
2265 || fcode == BUILT_IN_STRCASECMP;
2267 location_t loc = gimple_location (stmt);
2269 /* If the second arg is "", return *(const unsigned char*)arg1. */
2270 if (p2 && *p2 == '\0' && nonzero_length)
2272 gimple_seq stmts = NULL;
2273 tree var = gimple_load_first_char (loc, str1, &stmts);
2274 if (lhs)
2276 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2277 gimple_seq_add_stmt_without_update (&stmts, stmt);
2280 gsi_replace_with_seq_vops (gsi, stmts);
2281 return true;
2284 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2285 if (p1 && *p1 == '\0' && nonzero_length)
2287 gimple_seq stmts = NULL;
2288 tree var = gimple_load_first_char (loc, str2, &stmts);
2290 if (lhs)
2292 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2293 stmt = gimple_build_assign (c, NOP_EXPR, var);
2294 gimple_seq_add_stmt_without_update (&stmts, stmt);
2296 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2297 gimple_seq_add_stmt_without_update (&stmts, stmt);
2300 gsi_replace_with_seq_vops (gsi, stmts);
2301 return true;
2304 /* If len parameter is one, return an expression corresponding to
2305 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2306 if (fcode == BUILT_IN_STRNCMP && length == 1)
2308 gimple_seq stmts = NULL;
2309 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2310 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2312 if (lhs)
2314 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2315 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2316 gimple_seq_add_stmt_without_update (&stmts, convert1);
2318 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2319 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2320 gimple_seq_add_stmt_without_update (&stmts, convert2);
2322 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2323 gimple_seq_add_stmt_without_update (&stmts, stmt);
2326 gsi_replace_with_seq_vops (gsi, stmts);
2327 return true;
2330 /* If length is larger than the length of one constant string,
2331 replace strncmp with corresponding strcmp */
2332 if (fcode == BUILT_IN_STRNCMP
2333 && length > 0
2334 && ((p2 && (size_t) length > strlen (p2))
2335 || (p1 && (size_t) length > strlen (p1))))
2337 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2338 if (!fn)
2339 return false;
2340 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2341 replace_call_with_call_and_fold (gsi, repl);
2342 return true;
2345 return false;
2348 /* Fold a call to the memchr pointed by GSI iterator. */
2350 static bool
2351 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2353 gimple *stmt = gsi_stmt (*gsi);
2354 tree lhs = gimple_call_lhs (stmt);
2355 tree arg1 = gimple_call_arg (stmt, 0);
2356 tree arg2 = gimple_call_arg (stmt, 1);
2357 tree len = gimple_call_arg (stmt, 2);
2359 /* If the LEN parameter is zero, return zero. */
2360 if (integer_zerop (len))
2362 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2363 return true;
2366 char c;
2367 if (TREE_CODE (arg2) != INTEGER_CST
2368 || !tree_fits_uhwi_p (len)
2369 || !target_char_cst_p (arg2, &c))
2370 return false;
2372 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2373 unsigned HOST_WIDE_INT string_length;
2374 const char *p1 = c_getstr (arg1, &string_length);
2376 if (p1)
2378 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2379 if (r == NULL)
2381 if (length <= string_length)
2383 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2384 return true;
2387 else
2389 unsigned HOST_WIDE_INT offset = r - p1;
2390 gimple_seq stmts = NULL;
2391 if (lhs != NULL_TREE)
2393 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2394 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2395 arg1, offset_cst);
2396 gimple_seq_add_stmt_without_update (&stmts, stmt);
2398 else
2399 gimple_seq_add_stmt_without_update (&stmts,
2400 gimple_build_nop ());
2402 gsi_replace_with_seq_vops (gsi, stmts);
2403 return true;
2407 return false;
2410 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2411 to the call. IGNORE is true if the value returned
2412 by the builtin will be ignored. UNLOCKED is true is true if this
2413 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2414 the known length of the string. Return NULL_TREE if no simplification
2415 was possible. */
2417 static bool
2418 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2419 tree arg0, tree arg1,
2420 bool unlocked)
2422 gimple *stmt = gsi_stmt (*gsi);
2424 /* If we're using an unlocked function, assume the other unlocked
2425 functions exist explicitly. */
2426 tree const fn_fputc = (unlocked
2427 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2428 : builtin_decl_implicit (BUILT_IN_FPUTC));
2429 tree const fn_fwrite = (unlocked
2430 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2431 : builtin_decl_implicit (BUILT_IN_FWRITE));
2433 /* If the return value is used, don't do the transformation. */
2434 if (gimple_call_lhs (stmt))
2435 return false;
2437 /* Get the length of the string passed to fputs. If the length
2438 can't be determined, punt. */
2439 tree len = get_maxval_strlen (arg0, 0);
2440 if (!len
2441 || TREE_CODE (len) != INTEGER_CST)
2442 return false;
2444 switch (compare_tree_int (len, 1))
2446 case -1: /* length is 0, delete the call entirely . */
2447 replace_call_with_value (gsi, integer_zero_node);
2448 return true;
2450 case 0: /* length is 1, call fputc. */
2452 const char *p = c_getstr (arg0);
2453 if (p != NULL)
2455 if (!fn_fputc)
2456 return false;
2458 gimple *repl = gimple_build_call (fn_fputc, 2,
2459 build_int_cst
2460 (integer_type_node, p[0]), arg1);
2461 replace_call_with_call_and_fold (gsi, repl);
2462 return true;
2465 /* FALLTHROUGH */
2466 case 1: /* length is greater than 1, call fwrite. */
2468 /* If optimizing for size keep fputs. */
2469 if (optimize_function_for_size_p (cfun))
2470 return false;
2471 /* New argument list transforming fputs(string, stream) to
2472 fwrite(string, 1, len, stream). */
2473 if (!fn_fwrite)
2474 return false;
2476 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2477 size_one_node, len, arg1);
2478 replace_call_with_call_and_fold (gsi, repl);
2479 return true;
2481 default:
2482 gcc_unreachable ();
2484 return false;
2487 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2488 DEST, SRC, LEN, and SIZE are the arguments to the call.
2489 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2490 code of the builtin. If MAXLEN is not NULL, it is maximum length
2491 passed as third argument. */
2493 static bool
2494 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2495 tree dest, tree src, tree len, tree size,
2496 enum built_in_function fcode)
2498 gimple *stmt = gsi_stmt (*gsi);
2499 location_t loc = gimple_location (stmt);
2500 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2501 tree fn;
2503 /* If SRC and DEST are the same (and not volatile), return DEST
2504 (resp. DEST+LEN for __mempcpy_chk). */
2505 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2507 if (fcode != BUILT_IN_MEMMOVE && fcode != BUILT_IN_MEMMOVE_CHK)
2509 tree func = gimple_call_fndecl (stmt);
2511 warning_at (loc, OPT_Wrestrict,
2512 "%qD source argument is the same as destination",
2513 func);
2516 if (fcode != BUILT_IN_MEMPCPY_CHK)
2518 replace_call_with_value (gsi, dest);
2519 return true;
2521 else
2523 gimple_seq stmts = NULL;
2524 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2525 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2526 TREE_TYPE (dest), dest, len);
2527 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2528 replace_call_with_value (gsi, temp);
2529 return true;
2533 if (! tree_fits_uhwi_p (size))
2534 return false;
2536 tree maxlen = get_maxval_strlen (len, 2);
2537 if (! integer_all_onesp (size))
2539 if (! tree_fits_uhwi_p (len))
2541 /* If LEN is not constant, try MAXLEN too.
2542 For MAXLEN only allow optimizing into non-_ocs function
2543 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2544 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2546 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2548 /* (void) __mempcpy_chk () can be optimized into
2549 (void) __memcpy_chk (). */
2550 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2551 if (!fn)
2552 return false;
2554 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2555 replace_call_with_call_and_fold (gsi, repl);
2556 return true;
2558 return false;
2561 else
2562 maxlen = len;
2564 if (tree_int_cst_lt (size, maxlen))
2565 return false;
2568 fn = NULL_TREE;
2569 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2570 mem{cpy,pcpy,move,set} is available. */
2571 switch (fcode)
2573 case BUILT_IN_MEMCPY_CHK:
2574 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2575 break;
2576 case BUILT_IN_MEMPCPY_CHK:
2577 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2578 break;
2579 case BUILT_IN_MEMMOVE_CHK:
2580 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2581 break;
2582 case BUILT_IN_MEMSET_CHK:
2583 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2584 break;
2585 default:
2586 break;
2589 if (!fn)
2590 return false;
2592 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2593 replace_call_with_call_and_fold (gsi, repl);
2594 return true;
2597 /* Fold a call to the __st[rp]cpy_chk builtin.
2598 DEST, SRC, and SIZE are the arguments to the call.
2599 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2600 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2601 strings passed as second argument. */
2603 static bool
2604 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2605 tree dest,
2606 tree src, tree size,
2607 enum built_in_function fcode)
2609 gimple *stmt = gsi_stmt (*gsi);
2610 location_t loc = gimple_location (stmt);
2611 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2612 tree len, fn;
2614 /* If SRC and DEST are the same (and not volatile), return DEST. */
2615 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2617 tree func = gimple_call_fndecl (stmt);
2619 warning_at (loc, OPT_Wrestrict,
2620 "%qD source argument is the same as destination",
2621 func);
2623 replace_call_with_value (gsi, dest);
2624 return true;
2627 if (! tree_fits_uhwi_p (size))
2628 return false;
2630 tree maxlen = get_maxval_strlen (src, 1);
2631 if (! integer_all_onesp (size))
2633 len = c_strlen (src, 1);
2634 if (! len || ! tree_fits_uhwi_p (len))
2636 /* If LEN is not constant, try MAXLEN too.
2637 For MAXLEN only allow optimizing into non-_ocs function
2638 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2639 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2641 if (fcode == BUILT_IN_STPCPY_CHK)
2643 if (! ignore)
2644 return false;
2646 /* If return value of __stpcpy_chk is ignored,
2647 optimize into __strcpy_chk. */
2648 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2649 if (!fn)
2650 return false;
2652 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2653 replace_call_with_call_and_fold (gsi, repl);
2654 return true;
2657 if (! len || TREE_SIDE_EFFECTS (len))
2658 return false;
2660 /* If c_strlen returned something, but not a constant,
2661 transform __strcpy_chk into __memcpy_chk. */
2662 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2663 if (!fn)
2664 return false;
2666 gimple_seq stmts = NULL;
2667 len = gimple_convert (&stmts, loc, size_type_node, len);
2668 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2669 build_int_cst (size_type_node, 1));
2670 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2671 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2672 replace_call_with_call_and_fold (gsi, repl);
2673 return true;
2676 else
2677 maxlen = len;
2679 if (! tree_int_cst_lt (maxlen, size))
2680 return false;
2683 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2684 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2685 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2686 if (!fn)
2687 return false;
2689 gimple *repl = gimple_build_call (fn, 2, dest, src);
2690 replace_call_with_call_and_fold (gsi, repl);
2691 return true;
2694 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2695 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2696 length passed as third argument. IGNORE is true if return value can be
2697 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2699 static bool
2700 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2701 tree dest, tree src,
2702 tree len, tree size,
2703 enum built_in_function fcode)
2705 gimple *stmt = gsi_stmt (*gsi);
2706 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2707 tree fn;
2709 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2711 /* If return value of __stpncpy_chk is ignored,
2712 optimize into __strncpy_chk. */
2713 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2714 if (fn)
2716 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2717 replace_call_with_call_and_fold (gsi, repl);
2718 return true;
2722 if (! tree_fits_uhwi_p (size))
2723 return false;
2725 tree maxlen = get_maxval_strlen (len, 2);
2726 if (! integer_all_onesp (size))
2728 if (! tree_fits_uhwi_p (len))
2730 /* If LEN is not constant, try MAXLEN too.
2731 For MAXLEN only allow optimizing into non-_ocs function
2732 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2733 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2734 return false;
2736 else
2737 maxlen = len;
2739 if (tree_int_cst_lt (size, maxlen))
2740 return false;
2743 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2744 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2745 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2746 if (!fn)
2747 return false;
2749 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2750 replace_call_with_call_and_fold (gsi, repl);
2751 return true;
2754 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2755 Return NULL_TREE if no simplification can be made. */
2757 static bool
2758 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2760 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2761 location_t loc = gimple_location (stmt);
2762 tree dest = gimple_call_arg (stmt, 0);
2763 tree src = gimple_call_arg (stmt, 1);
2764 tree fn, len, lenp1;
2766 /* If the result is unused, replace stpcpy with strcpy. */
2767 if (gimple_call_lhs (stmt) == NULL_TREE)
2769 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2770 if (!fn)
2771 return false;
2772 gimple_call_set_fndecl (stmt, fn);
2773 fold_stmt (gsi);
2774 return true;
2777 len = c_strlen (src, 1);
2778 if (!len
2779 || TREE_CODE (len) != INTEGER_CST)
2780 return false;
2782 if (optimize_function_for_size_p (cfun)
2783 /* If length is zero it's small enough. */
2784 && !integer_zerop (len))
2785 return false;
2787 /* If the source has a known length replace stpcpy with memcpy. */
2788 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2789 if (!fn)
2790 return false;
2792 gimple_seq stmts = NULL;
2793 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2794 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2795 tem, build_int_cst (size_type_node, 1));
2796 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2797 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2798 gimple_set_vuse (repl, gimple_vuse (stmt));
2799 gimple_set_vdef (repl, gimple_vdef (stmt));
2800 if (gimple_vdef (repl)
2801 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2802 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2803 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2804 /* Replace the result with dest + len. */
2805 stmts = NULL;
2806 tem = gimple_convert (&stmts, loc, sizetype, len);
2807 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2808 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2809 POINTER_PLUS_EXPR, dest, tem);
2810 gsi_replace (gsi, ret, false);
2811 /* Finally fold the memcpy call. */
2812 gimple_stmt_iterator gsi2 = *gsi;
2813 gsi_prev (&gsi2);
2814 fold_stmt (&gsi2);
2815 return true;
2818 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2819 NULL_TREE if a normal call should be emitted rather than expanding
2820 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2821 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2822 passed as second argument. */
2824 static bool
2825 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2826 enum built_in_function fcode)
2828 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2829 tree dest, size, len, fn, fmt, flag;
2830 const char *fmt_str;
2832 /* Verify the required arguments in the original call. */
2833 if (gimple_call_num_args (stmt) < 5)
2834 return false;
2836 dest = gimple_call_arg (stmt, 0);
2837 len = gimple_call_arg (stmt, 1);
2838 flag = gimple_call_arg (stmt, 2);
2839 size = gimple_call_arg (stmt, 3);
2840 fmt = gimple_call_arg (stmt, 4);
2842 if (! tree_fits_uhwi_p (size))
2843 return false;
2845 if (! integer_all_onesp (size))
2847 tree maxlen = get_maxval_strlen (len, 2);
2848 if (! tree_fits_uhwi_p (len))
2850 /* If LEN is not constant, try MAXLEN too.
2851 For MAXLEN only allow optimizing into non-_ocs function
2852 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2853 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2854 return false;
2856 else
2857 maxlen = len;
2859 if (tree_int_cst_lt (size, maxlen))
2860 return false;
2863 if (!init_target_chars ())
2864 return false;
2866 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2867 or if format doesn't contain % chars or is "%s". */
2868 if (! integer_zerop (flag))
2870 fmt_str = c_getstr (fmt);
2871 if (fmt_str == NULL)
2872 return false;
2873 if (strchr (fmt_str, target_percent) != NULL
2874 && strcmp (fmt_str, target_percent_s))
2875 return false;
2878 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2879 available. */
2880 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2881 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2882 if (!fn)
2883 return false;
2885 /* Replace the called function and the first 5 argument by 3 retaining
2886 trailing varargs. */
2887 gimple_call_set_fndecl (stmt, fn);
2888 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2889 gimple_call_set_arg (stmt, 0, dest);
2890 gimple_call_set_arg (stmt, 1, len);
2891 gimple_call_set_arg (stmt, 2, fmt);
2892 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2893 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2894 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2895 fold_stmt (gsi);
2896 return true;
2899 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2900 Return NULL_TREE if a normal call should be emitted rather than
2901 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2902 or BUILT_IN_VSPRINTF_CHK. */
2904 static bool
2905 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2906 enum built_in_function fcode)
2908 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2909 tree dest, size, len, fn, fmt, flag;
2910 const char *fmt_str;
2911 unsigned nargs = gimple_call_num_args (stmt);
2913 /* Verify the required arguments in the original call. */
2914 if (nargs < 4)
2915 return false;
2916 dest = gimple_call_arg (stmt, 0);
2917 flag = gimple_call_arg (stmt, 1);
2918 size = gimple_call_arg (stmt, 2);
2919 fmt = gimple_call_arg (stmt, 3);
2921 if (! tree_fits_uhwi_p (size))
2922 return false;
2924 len = NULL_TREE;
2926 if (!init_target_chars ())
2927 return false;
2929 /* Check whether the format is a literal string constant. */
2930 fmt_str = c_getstr (fmt);
2931 if (fmt_str != NULL)
2933 /* If the format doesn't contain % args or %%, we know the size. */
2934 if (strchr (fmt_str, target_percent) == 0)
2936 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2937 len = build_int_cstu (size_type_node, strlen (fmt_str));
2939 /* If the format is "%s" and first ... argument is a string literal,
2940 we know the size too. */
2941 else if (fcode == BUILT_IN_SPRINTF_CHK
2942 && strcmp (fmt_str, target_percent_s) == 0)
2944 tree arg;
2946 if (nargs == 5)
2948 arg = gimple_call_arg (stmt, 4);
2949 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2951 len = c_strlen (arg, 1);
2952 if (! len || ! tree_fits_uhwi_p (len))
2953 len = NULL_TREE;
2959 if (! integer_all_onesp (size))
2961 if (! len || ! tree_int_cst_lt (len, size))
2962 return false;
2965 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2966 or if format doesn't contain % chars or is "%s". */
2967 if (! integer_zerop (flag))
2969 if (fmt_str == NULL)
2970 return false;
2971 if (strchr (fmt_str, target_percent) != NULL
2972 && strcmp (fmt_str, target_percent_s))
2973 return false;
2976 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2977 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2978 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2979 if (!fn)
2980 return false;
2982 /* Replace the called function and the first 4 argument by 2 retaining
2983 trailing varargs. */
2984 gimple_call_set_fndecl (stmt, fn);
2985 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2986 gimple_call_set_arg (stmt, 0, dest);
2987 gimple_call_set_arg (stmt, 1, fmt);
2988 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2989 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2990 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2991 fold_stmt (gsi);
2992 return true;
2995 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2996 ORIG may be null if this is a 2-argument call. We don't attempt to
2997 simplify calls with more than 3 arguments.
2999 Return true if simplification was possible, otherwise false. */
3001 bool
3002 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3004 gimple *stmt = gsi_stmt (*gsi);
3005 tree dest = gimple_call_arg (stmt, 0);
3006 tree fmt = gimple_call_arg (stmt, 1);
3007 tree orig = NULL_TREE;
3008 const char *fmt_str = NULL;
3010 /* Verify the required arguments in the original call. We deal with two
3011 types of sprintf() calls: 'sprintf (str, fmt)' and
3012 'sprintf (dest, "%s", orig)'. */
3013 if (gimple_call_num_args (stmt) > 3)
3014 return false;
3016 if (gimple_call_num_args (stmt) == 3)
3017 orig = gimple_call_arg (stmt, 2);
3019 /* Check whether the format is a literal string constant. */
3020 fmt_str = c_getstr (fmt);
3021 if (fmt_str == NULL)
3022 return false;
3024 if (!init_target_chars ())
3025 return false;
3027 /* If the format doesn't contain % args or %%, use strcpy. */
3028 if (strchr (fmt_str, target_percent) == NULL)
3030 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3032 if (!fn)
3033 return false;
3035 /* Don't optimize sprintf (buf, "abc", ptr++). */
3036 if (orig)
3037 return false;
3039 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3040 'format' is known to contain no % formats. */
3041 gimple_seq stmts = NULL;
3042 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3043 gimple_seq_add_stmt_without_update (&stmts, repl);
3044 if (gimple_call_lhs (stmt))
3046 repl = gimple_build_assign (gimple_call_lhs (stmt),
3047 build_int_cst (integer_type_node,
3048 strlen (fmt_str)));
3049 gimple_seq_add_stmt_without_update (&stmts, repl);
3050 gsi_replace_with_seq_vops (gsi, stmts);
3051 /* gsi now points at the assignment to the lhs, get a
3052 stmt iterator to the memcpy call.
3053 ??? We can't use gsi_for_stmt as that doesn't work when the
3054 CFG isn't built yet. */
3055 gimple_stmt_iterator gsi2 = *gsi;
3056 gsi_prev (&gsi2);
3057 fold_stmt (&gsi2);
3059 else
3061 gsi_replace_with_seq_vops (gsi, stmts);
3062 fold_stmt (gsi);
3064 return true;
3067 /* If the format is "%s", use strcpy if the result isn't used. */
3068 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3070 tree fn;
3071 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3073 if (!fn)
3074 return false;
3076 /* Don't crash on sprintf (str1, "%s"). */
3077 if (!orig)
3078 return false;
3080 tree orig_len = NULL_TREE;
3081 if (gimple_call_lhs (stmt))
3083 orig_len = get_maxval_strlen (orig, 0);
3084 if (!orig_len)
3085 return false;
3088 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3089 gimple_seq stmts = NULL;
3090 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3091 gimple_seq_add_stmt_without_update (&stmts, repl);
3092 if (gimple_call_lhs (stmt))
3094 if (!useless_type_conversion_p (integer_type_node,
3095 TREE_TYPE (orig_len)))
3096 orig_len = fold_convert (integer_type_node, orig_len);
3097 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3098 gimple_seq_add_stmt_without_update (&stmts, repl);
3099 gsi_replace_with_seq_vops (gsi, stmts);
3100 /* gsi now points at the assignment to the lhs, get a
3101 stmt iterator to the memcpy call.
3102 ??? We can't use gsi_for_stmt as that doesn't work when the
3103 CFG isn't built yet. */
3104 gimple_stmt_iterator gsi2 = *gsi;
3105 gsi_prev (&gsi2);
3106 fold_stmt (&gsi2);
3108 else
3110 gsi_replace_with_seq_vops (gsi, stmts);
3111 fold_stmt (gsi);
3113 return true;
3115 return false;
3118 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3119 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3120 attempt to simplify calls with more than 4 arguments.
3122 Return true if simplification was possible, otherwise false. */
3124 bool
3125 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3127 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3128 tree dest = gimple_call_arg (stmt, 0);
3129 tree destsize = gimple_call_arg (stmt, 1);
3130 tree fmt = gimple_call_arg (stmt, 2);
3131 tree orig = NULL_TREE;
3132 const char *fmt_str = NULL;
3134 if (gimple_call_num_args (stmt) > 4)
3135 return false;
3137 if (gimple_call_num_args (stmt) == 4)
3138 orig = gimple_call_arg (stmt, 3);
3140 if (!tree_fits_uhwi_p (destsize))
3141 return false;
3142 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3144 /* Check whether the format is a literal string constant. */
3145 fmt_str = c_getstr (fmt);
3146 if (fmt_str == NULL)
3147 return false;
3149 if (!init_target_chars ())
3150 return false;
3152 /* If the format doesn't contain % args or %%, use strcpy. */
3153 if (strchr (fmt_str, target_percent) == NULL)
3155 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3156 if (!fn)
3157 return false;
3159 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3160 if (orig)
3161 return false;
3163 /* We could expand this as
3164 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3165 or to
3166 memcpy (str, fmt_with_nul_at_cstm1, cst);
3167 but in the former case that might increase code size
3168 and in the latter case grow .rodata section too much.
3169 So punt for now. */
3170 size_t len = strlen (fmt_str);
3171 if (len >= destlen)
3172 return false;
3174 gimple_seq stmts = NULL;
3175 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3176 gimple_seq_add_stmt_without_update (&stmts, repl);
3177 if (gimple_call_lhs (stmt))
3179 repl = gimple_build_assign (gimple_call_lhs (stmt),
3180 build_int_cst (integer_type_node, len));
3181 gimple_seq_add_stmt_without_update (&stmts, repl);
3182 gsi_replace_with_seq_vops (gsi, stmts);
3183 /* gsi now points at the assignment to the lhs, get a
3184 stmt iterator to the memcpy call.
3185 ??? We can't use gsi_for_stmt as that doesn't work when the
3186 CFG isn't built yet. */
3187 gimple_stmt_iterator gsi2 = *gsi;
3188 gsi_prev (&gsi2);
3189 fold_stmt (&gsi2);
3191 else
3193 gsi_replace_with_seq_vops (gsi, stmts);
3194 fold_stmt (gsi);
3196 return true;
3199 /* If the format is "%s", use strcpy if the result isn't used. */
3200 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3202 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3203 if (!fn)
3204 return false;
3206 /* Don't crash on snprintf (str1, cst, "%s"). */
3207 if (!orig)
3208 return false;
3210 tree orig_len = get_maxval_strlen (orig, 0);
3211 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3212 return false;
3214 /* We could expand this as
3215 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3216 or to
3217 memcpy (str1, str2_with_nul_at_cstm1, cst);
3218 but in the former case that might increase code size
3219 and in the latter case grow .rodata section too much.
3220 So punt for now. */
3221 if (compare_tree_int (orig_len, destlen) >= 0)
3222 return false;
3224 /* Convert snprintf (str1, cst, "%s", str2) into
3225 strcpy (str1, str2) if strlen (str2) < cst. */
3226 gimple_seq stmts = NULL;
3227 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3228 gimple_seq_add_stmt_without_update (&stmts, repl);
3229 if (gimple_call_lhs (stmt))
3231 if (!useless_type_conversion_p (integer_type_node,
3232 TREE_TYPE (orig_len)))
3233 orig_len = fold_convert (integer_type_node, orig_len);
3234 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3235 gimple_seq_add_stmt_without_update (&stmts, repl);
3236 gsi_replace_with_seq_vops (gsi, stmts);
3237 /* gsi now points at the assignment to the lhs, get a
3238 stmt iterator to the memcpy call.
3239 ??? We can't use gsi_for_stmt as that doesn't work when the
3240 CFG isn't built yet. */
3241 gimple_stmt_iterator gsi2 = *gsi;
3242 gsi_prev (&gsi2);
3243 fold_stmt (&gsi2);
3245 else
3247 gsi_replace_with_seq_vops (gsi, stmts);
3248 fold_stmt (gsi);
3250 return true;
3252 return false;
3255 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3256 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3257 more than 3 arguments, and ARG may be null in the 2-argument case.
3259 Return NULL_TREE if no simplification was possible, otherwise return the
3260 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3261 code of the function to be simplified. */
3263 static bool
3264 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3265 tree fp, tree fmt, tree arg,
3266 enum built_in_function fcode)
3268 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3269 tree fn_fputc, fn_fputs;
3270 const char *fmt_str = NULL;
3272 /* If the return value is used, don't do the transformation. */
3273 if (gimple_call_lhs (stmt) != NULL_TREE)
3274 return false;
3276 /* Check whether the format is a literal string constant. */
3277 fmt_str = c_getstr (fmt);
3278 if (fmt_str == NULL)
3279 return false;
3281 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3283 /* If we're using an unlocked function, assume the other
3284 unlocked functions exist explicitly. */
3285 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3286 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3288 else
3290 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3291 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3294 if (!init_target_chars ())
3295 return false;
3297 /* If the format doesn't contain % args or %%, use strcpy. */
3298 if (strchr (fmt_str, target_percent) == NULL)
3300 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3301 && arg)
3302 return false;
3304 /* If the format specifier was "", fprintf does nothing. */
3305 if (fmt_str[0] == '\0')
3307 replace_call_with_value (gsi, NULL_TREE);
3308 return true;
3311 /* When "string" doesn't contain %, replace all cases of
3312 fprintf (fp, string) with fputs (string, fp). The fputs
3313 builtin will take care of special cases like length == 1. */
3314 if (fn_fputs)
3316 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3317 replace_call_with_call_and_fold (gsi, repl);
3318 return true;
3322 /* The other optimizations can be done only on the non-va_list variants. */
3323 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3324 return false;
3326 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3327 else if (strcmp (fmt_str, target_percent_s) == 0)
3329 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3330 return false;
3331 if (fn_fputs)
3333 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3334 replace_call_with_call_and_fold (gsi, repl);
3335 return true;
3339 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3340 else if (strcmp (fmt_str, target_percent_c) == 0)
3342 if (!arg
3343 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3344 return false;
3345 if (fn_fputc)
3347 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3348 replace_call_with_call_and_fold (gsi, repl);
3349 return true;
3353 return false;
3356 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3357 FMT and ARG are the arguments to the call; we don't fold cases with
3358 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3360 Return NULL_TREE if no simplification was possible, otherwise return the
3361 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3362 code of the function to be simplified. */
3364 static bool
3365 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3366 tree arg, enum built_in_function fcode)
3368 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3369 tree fn_putchar, fn_puts, newarg;
3370 const char *fmt_str = NULL;
3372 /* If the return value is used, don't do the transformation. */
3373 if (gimple_call_lhs (stmt) != NULL_TREE)
3374 return false;
3376 /* Check whether the format is a literal string constant. */
3377 fmt_str = c_getstr (fmt);
3378 if (fmt_str == NULL)
3379 return false;
3381 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3383 /* If we're using an unlocked function, assume the other
3384 unlocked functions exist explicitly. */
3385 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3386 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3388 else
3390 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3391 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3394 if (!init_target_chars ())
3395 return false;
3397 if (strcmp (fmt_str, target_percent_s) == 0
3398 || strchr (fmt_str, target_percent) == NULL)
3400 const char *str;
3402 if (strcmp (fmt_str, target_percent_s) == 0)
3404 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3405 return false;
3407 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3408 return false;
3410 str = c_getstr (arg);
3411 if (str == NULL)
3412 return false;
3414 else
3416 /* The format specifier doesn't contain any '%' characters. */
3417 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3418 && arg)
3419 return false;
3420 str = fmt_str;
3423 /* If the string was "", printf does nothing. */
3424 if (str[0] == '\0')
3426 replace_call_with_value (gsi, NULL_TREE);
3427 return true;
3430 /* If the string has length of 1, call putchar. */
3431 if (str[1] == '\0')
3433 /* Given printf("c"), (where c is any one character,)
3434 convert "c"[0] to an int and pass that to the replacement
3435 function. */
3436 newarg = build_int_cst (integer_type_node, str[0]);
3437 if (fn_putchar)
3439 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3440 replace_call_with_call_and_fold (gsi, repl);
3441 return true;
3444 else
3446 /* If the string was "string\n", call puts("string"). */
3447 size_t len = strlen (str);
3448 if ((unsigned char)str[len - 1] == target_newline
3449 && (size_t) (int) len == len
3450 && (int) len > 0)
3452 char *newstr;
3453 tree offset_node, string_cst;
3455 /* Create a NUL-terminated string that's one char shorter
3456 than the original, stripping off the trailing '\n'. */
3457 newarg = build_string_literal (len, str);
3458 string_cst = string_constant (newarg, &offset_node);
3459 gcc_checking_assert (string_cst
3460 && (TREE_STRING_LENGTH (string_cst)
3461 == (int) len)
3462 && integer_zerop (offset_node)
3463 && (unsigned char)
3464 TREE_STRING_POINTER (string_cst)[len - 1]
3465 == target_newline);
3466 /* build_string_literal creates a new STRING_CST,
3467 modify it in place to avoid double copying. */
3468 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3469 newstr[len - 1] = '\0';
3470 if (fn_puts)
3472 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3473 replace_call_with_call_and_fold (gsi, repl);
3474 return true;
3477 else
3478 /* We'd like to arrange to call fputs(string,stdout) here,
3479 but we need stdout and don't have a way to get it yet. */
3480 return false;
3484 /* The other optimizations can be done only on the non-va_list variants. */
3485 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3486 return false;
3488 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3489 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3491 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3492 return false;
3493 if (fn_puts)
3495 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3496 replace_call_with_call_and_fold (gsi, repl);
3497 return true;
3501 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3502 else if (strcmp (fmt_str, target_percent_c) == 0)
3504 if (!arg || ! useless_type_conversion_p (integer_type_node,
3505 TREE_TYPE (arg)))
3506 return false;
3507 if (fn_putchar)
3509 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3510 replace_call_with_call_and_fold (gsi, repl);
3511 return true;
3515 return false;
3520 /* Fold a call to __builtin_strlen with known length LEN. */
3522 static bool
3523 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3525 gimple *stmt = gsi_stmt (*gsi);
3527 wide_int minlen;
3528 wide_int maxlen;
3530 tree lenrange[2];
3531 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange)
3532 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3533 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3535 /* The range of lengths refers to either a single constant
3536 string or to the longest and shortest constant string
3537 referenced by the argument of the strlen() call, or to
3538 the strings that can possibly be stored in the arrays
3539 the argument refers to. */
3540 minlen = wi::to_wide (lenrange[0]);
3541 maxlen = wi::to_wide (lenrange[1]);
3543 else
3545 unsigned prec = TYPE_PRECISION (sizetype);
3547 minlen = wi::shwi (0, prec);
3548 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3551 if (minlen == maxlen)
3553 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3554 true, GSI_SAME_STMT);
3555 replace_call_with_value (gsi, lenrange[0]);
3556 return true;
3559 tree lhs = gimple_call_lhs (stmt);
3560 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3561 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3563 return false;
3566 /* Fold a call to __builtin_acc_on_device. */
3568 static bool
3569 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3571 /* Defer folding until we know which compiler we're in. */
3572 if (symtab->state != EXPANSION)
3573 return false;
3575 unsigned val_host = GOMP_DEVICE_HOST;
3576 unsigned val_dev = GOMP_DEVICE_NONE;
3578 #ifdef ACCEL_COMPILER
3579 val_host = GOMP_DEVICE_NOT_HOST;
3580 val_dev = ACCEL_COMPILER_acc_device;
3581 #endif
3583 location_t loc = gimple_location (gsi_stmt (*gsi));
3585 tree host_eq = make_ssa_name (boolean_type_node);
3586 gimple *host_ass = gimple_build_assign
3587 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3588 gimple_set_location (host_ass, loc);
3589 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3591 tree dev_eq = make_ssa_name (boolean_type_node);
3592 gimple *dev_ass = gimple_build_assign
3593 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3594 gimple_set_location (dev_ass, loc);
3595 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3597 tree result = make_ssa_name (boolean_type_node);
3598 gimple *result_ass = gimple_build_assign
3599 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3600 gimple_set_location (result_ass, loc);
3601 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3603 replace_call_with_value (gsi, result);
3605 return true;
3608 /* Fold realloc (0, n) -> malloc (n). */
3610 static bool
3611 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3613 gimple *stmt = gsi_stmt (*gsi);
3614 tree arg = gimple_call_arg (stmt, 0);
3615 tree size = gimple_call_arg (stmt, 1);
3617 if (operand_equal_p (arg, null_pointer_node, 0))
3619 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3620 if (fn_malloc)
3622 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3623 replace_call_with_call_and_fold (gsi, repl);
3624 return true;
3627 return false;
3630 /* Fold the non-target builtin at *GSI and return whether any simplification
3631 was made. */
3633 static bool
3634 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3636 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3637 tree callee = gimple_call_fndecl (stmt);
3639 /* Give up for always_inline inline builtins until they are
3640 inlined. */
3641 if (avoid_folding_inline_builtin (callee))
3642 return false;
3644 unsigned n = gimple_call_num_args (stmt);
3645 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3646 switch (fcode)
3648 case BUILT_IN_BCMP:
3649 return gimple_fold_builtin_bcmp (gsi);
3650 case BUILT_IN_BCOPY:
3651 return gimple_fold_builtin_bcopy (gsi);
3652 case BUILT_IN_BZERO:
3653 return gimple_fold_builtin_bzero (gsi);
3655 case BUILT_IN_MEMSET:
3656 return gimple_fold_builtin_memset (gsi,
3657 gimple_call_arg (stmt, 1),
3658 gimple_call_arg (stmt, 2));
3659 case BUILT_IN_MEMCPY:
3660 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3661 gimple_call_arg (stmt, 1), 0);
3662 case BUILT_IN_MEMPCPY:
3663 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3664 gimple_call_arg (stmt, 1), 1);
3665 case BUILT_IN_MEMMOVE:
3666 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3667 gimple_call_arg (stmt, 1), 3);
3668 case BUILT_IN_SPRINTF_CHK:
3669 case BUILT_IN_VSPRINTF_CHK:
3670 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3671 case BUILT_IN_STRCAT_CHK:
3672 return gimple_fold_builtin_strcat_chk (gsi);
3673 case BUILT_IN_STRNCAT_CHK:
3674 return gimple_fold_builtin_strncat_chk (gsi);
3675 case BUILT_IN_STRLEN:
3676 return gimple_fold_builtin_strlen (gsi);
3677 case BUILT_IN_STRCPY:
3678 return gimple_fold_builtin_strcpy (gsi,
3679 gimple_call_arg (stmt, 0),
3680 gimple_call_arg (stmt, 1));
3681 case BUILT_IN_STRNCPY:
3682 return gimple_fold_builtin_strncpy (gsi,
3683 gimple_call_arg (stmt, 0),
3684 gimple_call_arg (stmt, 1),
3685 gimple_call_arg (stmt, 2));
3686 case BUILT_IN_STRCAT:
3687 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3688 gimple_call_arg (stmt, 1));
3689 case BUILT_IN_STRNCAT:
3690 return gimple_fold_builtin_strncat (gsi);
3691 case BUILT_IN_INDEX:
3692 case BUILT_IN_STRCHR:
3693 return gimple_fold_builtin_strchr (gsi, false);
3694 case BUILT_IN_RINDEX:
3695 case BUILT_IN_STRRCHR:
3696 return gimple_fold_builtin_strchr (gsi, true);
3697 case BUILT_IN_STRSTR:
3698 return gimple_fold_builtin_strstr (gsi);
3699 case BUILT_IN_STRCMP:
3700 case BUILT_IN_STRCASECMP:
3701 case BUILT_IN_STRNCMP:
3702 case BUILT_IN_STRNCASECMP:
3703 return gimple_fold_builtin_string_compare (gsi);
3704 case BUILT_IN_MEMCHR:
3705 return gimple_fold_builtin_memchr (gsi);
3706 case BUILT_IN_FPUTS:
3707 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3708 gimple_call_arg (stmt, 1), false);
3709 case BUILT_IN_FPUTS_UNLOCKED:
3710 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3711 gimple_call_arg (stmt, 1), true);
3712 case BUILT_IN_MEMCPY_CHK:
3713 case BUILT_IN_MEMPCPY_CHK:
3714 case BUILT_IN_MEMMOVE_CHK:
3715 case BUILT_IN_MEMSET_CHK:
3716 return gimple_fold_builtin_memory_chk (gsi,
3717 gimple_call_arg (stmt, 0),
3718 gimple_call_arg (stmt, 1),
3719 gimple_call_arg (stmt, 2),
3720 gimple_call_arg (stmt, 3),
3721 fcode);
3722 case BUILT_IN_STPCPY:
3723 return gimple_fold_builtin_stpcpy (gsi);
3724 case BUILT_IN_STRCPY_CHK:
3725 case BUILT_IN_STPCPY_CHK:
3726 return gimple_fold_builtin_stxcpy_chk (gsi,
3727 gimple_call_arg (stmt, 0),
3728 gimple_call_arg (stmt, 1),
3729 gimple_call_arg (stmt, 2),
3730 fcode);
3731 case BUILT_IN_STRNCPY_CHK:
3732 case BUILT_IN_STPNCPY_CHK:
3733 return gimple_fold_builtin_stxncpy_chk (gsi,
3734 gimple_call_arg (stmt, 0),
3735 gimple_call_arg (stmt, 1),
3736 gimple_call_arg (stmt, 2),
3737 gimple_call_arg (stmt, 3),
3738 fcode);
3739 case BUILT_IN_SNPRINTF_CHK:
3740 case BUILT_IN_VSNPRINTF_CHK:
3741 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3743 case BUILT_IN_FPRINTF:
3744 case BUILT_IN_FPRINTF_UNLOCKED:
3745 case BUILT_IN_VFPRINTF:
3746 if (n == 2 || n == 3)
3747 return gimple_fold_builtin_fprintf (gsi,
3748 gimple_call_arg (stmt, 0),
3749 gimple_call_arg (stmt, 1),
3750 n == 3
3751 ? gimple_call_arg (stmt, 2)
3752 : NULL_TREE,
3753 fcode);
3754 break;
3755 case BUILT_IN_FPRINTF_CHK:
3756 case BUILT_IN_VFPRINTF_CHK:
3757 if (n == 3 || n == 4)
3758 return gimple_fold_builtin_fprintf (gsi,
3759 gimple_call_arg (stmt, 0),
3760 gimple_call_arg (stmt, 2),
3761 n == 4
3762 ? gimple_call_arg (stmt, 3)
3763 : NULL_TREE,
3764 fcode);
3765 break;
3766 case BUILT_IN_PRINTF:
3767 case BUILT_IN_PRINTF_UNLOCKED:
3768 case BUILT_IN_VPRINTF:
3769 if (n == 1 || n == 2)
3770 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3771 n == 2
3772 ? gimple_call_arg (stmt, 1)
3773 : NULL_TREE, fcode);
3774 break;
3775 case BUILT_IN_PRINTF_CHK:
3776 case BUILT_IN_VPRINTF_CHK:
3777 if (n == 2 || n == 3)
3778 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3779 n == 3
3780 ? gimple_call_arg (stmt, 2)
3781 : NULL_TREE, fcode);
3782 break;
3783 case BUILT_IN_ACC_ON_DEVICE:
3784 return gimple_fold_builtin_acc_on_device (gsi,
3785 gimple_call_arg (stmt, 0));
3786 case BUILT_IN_REALLOC:
3787 return gimple_fold_builtin_realloc (gsi);
3789 default:;
3792 /* Try the generic builtin folder. */
3793 bool ignore = (gimple_call_lhs (stmt) == NULL);
3794 tree result = fold_call_stmt (stmt, ignore);
3795 if (result)
3797 if (ignore)
3798 STRIP_NOPS (result);
3799 else
3800 result = fold_convert (gimple_call_return_type (stmt), result);
3801 if (!update_call_from_tree (gsi, result))
3802 gimplify_and_update_call_from_tree (gsi, result);
3803 return true;
3806 return false;
3809 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3810 function calls to constants, where possible. */
3812 static tree
3813 fold_internal_goacc_dim (const gimple *call)
3815 int axis = oacc_get_ifn_dim_arg (call);
3816 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3817 tree result = NULL_TREE;
3818 tree type = TREE_TYPE (gimple_call_lhs (call));
3820 switch (gimple_call_internal_fn (call))
3822 case IFN_GOACC_DIM_POS:
3823 /* If the size is 1, we know the answer. */
3824 if (size == 1)
3825 result = build_int_cst (type, 0);
3826 break;
3827 case IFN_GOACC_DIM_SIZE:
3828 /* If the size is not dynamic, we know the answer. */
3829 if (size)
3830 result = build_int_cst (type, size);
3831 break;
3832 default:
3833 break;
3836 return result;
3839 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3840 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3841 &var where var is only addressable because of such calls. */
3843 bool
3844 optimize_atomic_compare_exchange_p (gimple *stmt)
3846 if (gimple_call_num_args (stmt) != 6
3847 || !flag_inline_atomics
3848 || !optimize
3849 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3850 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3851 || !gimple_vdef (stmt)
3852 || !gimple_vuse (stmt))
3853 return false;
3855 tree fndecl = gimple_call_fndecl (stmt);
3856 switch (DECL_FUNCTION_CODE (fndecl))
3858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3859 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3860 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3861 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3862 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3863 break;
3864 default:
3865 return false;
3868 tree expected = gimple_call_arg (stmt, 1);
3869 if (TREE_CODE (expected) != ADDR_EXPR
3870 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3871 return false;
3873 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3874 if (!is_gimple_reg_type (etype)
3875 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3876 || TREE_THIS_VOLATILE (etype)
3877 || VECTOR_TYPE_P (etype)
3878 || TREE_CODE (etype) == COMPLEX_TYPE
3879 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3880 might not preserve all the bits. See PR71716. */
3881 || SCALAR_FLOAT_TYPE_P (etype)
3882 || maybe_ne (TYPE_PRECISION (etype),
3883 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3884 return false;
3886 tree weak = gimple_call_arg (stmt, 3);
3887 if (!integer_zerop (weak) && !integer_onep (weak))
3888 return false;
3890 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3891 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3892 machine_mode mode = TYPE_MODE (itype);
3894 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3895 == CODE_FOR_nothing
3896 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3897 return false;
3899 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3900 return false;
3902 return true;
3905 /* Fold
3906 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3907 into
3908 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3909 i = IMAGPART_EXPR <t>;
3910 r = (_Bool) i;
3911 e = REALPART_EXPR <t>; */
3913 void
3914 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3916 gimple *stmt = gsi_stmt (*gsi);
3917 tree fndecl = gimple_call_fndecl (stmt);
3918 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3919 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3920 tree ctype = build_complex_type (itype);
3921 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3922 bool throws = false;
3923 edge e = NULL;
3924 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3925 expected);
3926 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3927 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3928 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3930 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3931 build1 (VIEW_CONVERT_EXPR, itype,
3932 gimple_assign_lhs (g)));
3933 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3935 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3936 + int_size_in_bytes (itype);
3937 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3938 gimple_call_arg (stmt, 0),
3939 gimple_assign_lhs (g),
3940 gimple_call_arg (stmt, 2),
3941 build_int_cst (integer_type_node, flag),
3942 gimple_call_arg (stmt, 4),
3943 gimple_call_arg (stmt, 5));
3944 tree lhs = make_ssa_name (ctype);
3945 gimple_call_set_lhs (g, lhs);
3946 gimple_set_vdef (g, gimple_vdef (stmt));
3947 gimple_set_vuse (g, gimple_vuse (stmt));
3948 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3949 tree oldlhs = gimple_call_lhs (stmt);
3950 if (stmt_can_throw_internal (stmt))
3952 throws = true;
3953 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3955 gimple_call_set_nothrow (as_a <gcall *> (g),
3956 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3957 gimple_call_set_lhs (stmt, NULL_TREE);
3958 gsi_replace (gsi, g, true);
3959 if (oldlhs)
3961 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3962 build1 (IMAGPART_EXPR, itype, lhs));
3963 if (throws)
3965 gsi_insert_on_edge_immediate (e, g);
3966 *gsi = gsi_for_stmt (g);
3968 else
3969 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3970 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3971 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3973 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3974 build1 (REALPART_EXPR, itype, lhs));
3975 if (throws && oldlhs == NULL_TREE)
3977 gsi_insert_on_edge_immediate (e, g);
3978 *gsi = gsi_for_stmt (g);
3980 else
3981 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3982 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3984 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3985 VIEW_CONVERT_EXPR,
3986 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3987 gimple_assign_lhs (g)));
3988 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3990 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3991 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3992 *gsi = gsiret;
3995 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3996 doesn't fit into TYPE. The test for overflow should be regardless of
3997 -fwrapv, and even for unsigned types. */
3999 bool
4000 arith_overflowed_p (enum tree_code code, const_tree type,
4001 const_tree arg0, const_tree arg1)
4003 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
4004 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
4005 widest2_int_cst;
4006 widest2_int warg0 = widest2_int_cst (arg0);
4007 widest2_int warg1 = widest2_int_cst (arg1);
4008 widest2_int wres;
4009 switch (code)
4011 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4012 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4013 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4014 default: gcc_unreachable ();
4016 signop sign = TYPE_SIGN (type);
4017 if (sign == UNSIGNED && wi::neg_p (wres))
4018 return true;
4019 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4022 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4023 The statement may be replaced by another statement, e.g., if the call
4024 simplifies to a constant value. Return true if any changes were made.
4025 It is assumed that the operands have been previously folded. */
4027 static bool
4028 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4030 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4031 tree callee;
4032 bool changed = false;
4033 unsigned i;
4035 /* Fold *& in call arguments. */
4036 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4037 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4039 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4040 if (tmp)
4042 gimple_call_set_arg (stmt, i, tmp);
4043 changed = true;
4047 /* Check for virtual calls that became direct calls. */
4048 callee = gimple_call_fn (stmt);
4049 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4051 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4053 if (dump_file && virtual_method_call_p (callee)
4054 && !possible_polymorphic_call_target_p
4055 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4056 (OBJ_TYPE_REF_EXPR (callee)))))
4058 fprintf (dump_file,
4059 "Type inheritance inconsistent devirtualization of ");
4060 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4061 fprintf (dump_file, " to ");
4062 print_generic_expr (dump_file, callee, TDF_SLIM);
4063 fprintf (dump_file, "\n");
4066 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4067 changed = true;
4069 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4071 bool final;
4072 vec <cgraph_node *>targets
4073 = possible_polymorphic_call_targets (callee, stmt, &final);
4074 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4076 tree lhs = gimple_call_lhs (stmt);
4077 if (dump_enabled_p ())
4079 location_t loc = gimple_location_safe (stmt);
4080 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4081 "folding virtual function call to %s\n",
4082 targets.length () == 1
4083 ? targets[0]->name ()
4084 : "__builtin_unreachable");
4086 if (targets.length () == 1)
4088 tree fndecl = targets[0]->decl;
4089 gimple_call_set_fndecl (stmt, fndecl);
4090 changed = true;
4091 /* If changing the call to __cxa_pure_virtual
4092 or similar noreturn function, adjust gimple_call_fntype
4093 too. */
4094 if (gimple_call_noreturn_p (stmt)
4095 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4096 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4097 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4098 == void_type_node))
4099 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4100 /* If the call becomes noreturn, remove the lhs. */
4101 if (lhs
4102 && gimple_call_noreturn_p (stmt)
4103 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4104 || should_remove_lhs_p (lhs)))
4106 if (TREE_CODE (lhs) == SSA_NAME)
4108 tree var = create_tmp_var (TREE_TYPE (lhs));
4109 tree def = get_or_create_ssa_default_def (cfun, var);
4110 gimple *new_stmt = gimple_build_assign (lhs, def);
4111 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4113 gimple_call_set_lhs (stmt, NULL_TREE);
4115 maybe_remove_unused_call_args (cfun, stmt);
4117 else
4119 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4120 gimple *new_stmt = gimple_build_call (fndecl, 0);
4121 gimple_set_location (new_stmt, gimple_location (stmt));
4122 /* If the call had a SSA name as lhs morph that into
4123 an uninitialized value. */
4124 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4126 tree var = create_tmp_var (TREE_TYPE (lhs));
4127 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4128 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4129 set_ssa_default_def (cfun, var, lhs);
4131 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4132 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4133 gsi_replace (gsi, new_stmt, false);
4134 return true;
4140 /* Check for indirect calls that became direct calls, and then
4141 no longer require a static chain. */
4142 if (gimple_call_chain (stmt))
4144 tree fn = gimple_call_fndecl (stmt);
4145 if (fn && !DECL_STATIC_CHAIN (fn))
4147 gimple_call_set_chain (stmt, NULL);
4148 changed = true;
4150 else
4152 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4153 if (tmp)
4155 gimple_call_set_chain (stmt, tmp);
4156 changed = true;
4161 if (inplace)
4162 return changed;
4164 /* Check for builtins that CCP can handle using information not
4165 available in the generic fold routines. */
4166 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4168 if (gimple_fold_builtin (gsi))
4169 changed = true;
4171 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4173 changed |= targetm.gimple_fold_builtin (gsi);
4175 else if (gimple_call_internal_p (stmt))
4177 enum tree_code subcode = ERROR_MARK;
4178 tree result = NULL_TREE;
4179 bool cplx_result = false;
4180 tree overflow = NULL_TREE;
4181 switch (gimple_call_internal_fn (stmt))
4183 case IFN_BUILTIN_EXPECT:
4184 result = fold_builtin_expect (gimple_location (stmt),
4185 gimple_call_arg (stmt, 0),
4186 gimple_call_arg (stmt, 1),
4187 gimple_call_arg (stmt, 2));
4188 break;
4189 case IFN_UBSAN_OBJECT_SIZE:
4191 tree offset = gimple_call_arg (stmt, 1);
4192 tree objsize = gimple_call_arg (stmt, 2);
4193 if (integer_all_onesp (objsize)
4194 || (TREE_CODE (offset) == INTEGER_CST
4195 && TREE_CODE (objsize) == INTEGER_CST
4196 && tree_int_cst_le (offset, objsize)))
4198 replace_call_with_value (gsi, NULL_TREE);
4199 return true;
4202 break;
4203 case IFN_UBSAN_PTR:
4204 if (integer_zerop (gimple_call_arg (stmt, 1)))
4206 replace_call_with_value (gsi, NULL_TREE);
4207 return true;
4209 break;
4210 case IFN_UBSAN_BOUNDS:
4212 tree index = gimple_call_arg (stmt, 1);
4213 tree bound = gimple_call_arg (stmt, 2);
4214 if (TREE_CODE (index) == INTEGER_CST
4215 && TREE_CODE (bound) == INTEGER_CST)
4217 index = fold_convert (TREE_TYPE (bound), index);
4218 if (TREE_CODE (index) == INTEGER_CST
4219 && tree_int_cst_le (index, bound))
4221 replace_call_with_value (gsi, NULL_TREE);
4222 return true;
4226 break;
4227 case IFN_GOACC_DIM_SIZE:
4228 case IFN_GOACC_DIM_POS:
4229 result = fold_internal_goacc_dim (stmt);
4230 break;
4231 case IFN_UBSAN_CHECK_ADD:
4232 subcode = PLUS_EXPR;
4233 break;
4234 case IFN_UBSAN_CHECK_SUB:
4235 subcode = MINUS_EXPR;
4236 break;
4237 case IFN_UBSAN_CHECK_MUL:
4238 subcode = MULT_EXPR;
4239 break;
4240 case IFN_ADD_OVERFLOW:
4241 subcode = PLUS_EXPR;
4242 cplx_result = true;
4243 break;
4244 case IFN_SUB_OVERFLOW:
4245 subcode = MINUS_EXPR;
4246 cplx_result = true;
4247 break;
4248 case IFN_MUL_OVERFLOW:
4249 subcode = MULT_EXPR;
4250 cplx_result = true;
4251 break;
4252 default:
4253 break;
4255 if (subcode != ERROR_MARK)
4257 tree arg0 = gimple_call_arg (stmt, 0);
4258 tree arg1 = gimple_call_arg (stmt, 1);
4259 tree type = TREE_TYPE (arg0);
4260 if (cplx_result)
4262 tree lhs = gimple_call_lhs (stmt);
4263 if (lhs == NULL_TREE)
4264 type = NULL_TREE;
4265 else
4266 type = TREE_TYPE (TREE_TYPE (lhs));
4268 if (type == NULL_TREE)
4270 /* x = y + 0; x = y - 0; x = y * 0; */
4271 else if (integer_zerop (arg1))
4272 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4273 /* x = 0 + y; x = 0 * y; */
4274 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4275 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4276 /* x = y - y; */
4277 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4278 result = integer_zero_node;
4279 /* x = y * 1; x = 1 * y; */
4280 else if (subcode == MULT_EXPR && integer_onep (arg1))
4281 result = arg0;
4282 else if (subcode == MULT_EXPR && integer_onep (arg0))
4283 result = arg1;
4284 else if (TREE_CODE (arg0) == INTEGER_CST
4285 && TREE_CODE (arg1) == INTEGER_CST)
4287 if (cplx_result)
4288 result = int_const_binop (subcode, fold_convert (type, arg0),
4289 fold_convert (type, arg1));
4290 else
4291 result = int_const_binop (subcode, arg0, arg1);
4292 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4294 if (cplx_result)
4295 overflow = build_one_cst (type);
4296 else
4297 result = NULL_TREE;
4300 if (result)
4302 if (result == integer_zero_node)
4303 result = build_zero_cst (type);
4304 else if (cplx_result && TREE_TYPE (result) != type)
4306 if (TREE_CODE (result) == INTEGER_CST)
4308 if (arith_overflowed_p (PLUS_EXPR, type, result,
4309 integer_zero_node))
4310 overflow = build_one_cst (type);
4312 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4313 && TYPE_UNSIGNED (type))
4314 || (TYPE_PRECISION (type)
4315 < (TYPE_PRECISION (TREE_TYPE (result))
4316 + (TYPE_UNSIGNED (TREE_TYPE (result))
4317 && !TYPE_UNSIGNED (type)))))
4318 result = NULL_TREE;
4319 if (result)
4320 result = fold_convert (type, result);
4325 if (result)
4327 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4328 result = drop_tree_overflow (result);
4329 if (cplx_result)
4331 if (overflow == NULL_TREE)
4332 overflow = build_zero_cst (TREE_TYPE (result));
4333 tree ctype = build_complex_type (TREE_TYPE (result));
4334 if (TREE_CODE (result) == INTEGER_CST
4335 && TREE_CODE (overflow) == INTEGER_CST)
4336 result = build_complex (ctype, result, overflow);
4337 else
4338 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4339 ctype, result, overflow);
4341 if (!update_call_from_tree (gsi, result))
4342 gimplify_and_update_call_from_tree (gsi, result);
4343 changed = true;
4347 return changed;
4351 /* Return true whether NAME has a use on STMT. */
4353 static bool
4354 has_use_on_stmt (tree name, gimple *stmt)
4356 imm_use_iterator iter;
4357 use_operand_p use_p;
4358 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4359 if (USE_STMT (use_p) == stmt)
4360 return true;
4361 return false;
4364 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4365 gimple_simplify.
4367 Replaces *GSI with the simplification result in RCODE and OPS
4368 and the associated statements in *SEQ. Does the replacement
4369 according to INPLACE and returns true if the operation succeeded. */
4371 static bool
4372 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4373 code_helper rcode, tree *ops,
4374 gimple_seq *seq, bool inplace)
4376 gimple *stmt = gsi_stmt (*gsi);
4378 /* Play safe and do not allow abnormals to be mentioned in
4379 newly created statements. See also maybe_push_res_to_seq.
4380 As an exception allow such uses if there was a use of the
4381 same SSA name on the old stmt. */
4382 if ((TREE_CODE (ops[0]) == SSA_NAME
4383 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0])
4384 && !has_use_on_stmt (ops[0], stmt))
4385 || (ops[1]
4386 && TREE_CODE (ops[1]) == SSA_NAME
4387 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1])
4388 && !has_use_on_stmt (ops[1], stmt))
4389 || (ops[2]
4390 && TREE_CODE (ops[2]) == SSA_NAME
4391 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])
4392 && !has_use_on_stmt (ops[2], stmt))
4393 || (COMPARISON_CLASS_P (ops[0])
4394 && ((TREE_CODE (TREE_OPERAND (ops[0], 0)) == SSA_NAME
4395 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 0))
4396 && !has_use_on_stmt (TREE_OPERAND (ops[0], 0), stmt))
4397 || (TREE_CODE (TREE_OPERAND (ops[0], 1)) == SSA_NAME
4398 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], 1))
4399 && !has_use_on_stmt (TREE_OPERAND (ops[0], 1), stmt)))))
4400 return false;
4402 /* Don't insert new statements when INPLACE is true, even if we could
4403 reuse STMT for the final statement. */
4404 if (inplace && !gimple_seq_empty_p (*seq))
4405 return false;
4407 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4409 gcc_assert (rcode.is_tree_code ());
4410 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
4411 /* GIMPLE_CONDs condition may not throw. */
4412 && (!flag_exceptions
4413 || !cfun->can_throw_non_call_exceptions
4414 || !operation_could_trap_p (rcode,
4415 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4416 false, NULL_TREE)))
4417 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
4418 else if (rcode == SSA_NAME)
4419 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4420 build_zero_cst (TREE_TYPE (ops[0])));
4421 else if (rcode == INTEGER_CST)
4423 if (integer_zerop (ops[0]))
4424 gimple_cond_make_false (cond_stmt);
4425 else
4426 gimple_cond_make_true (cond_stmt);
4428 else if (!inplace)
4430 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
4431 ops, seq);
4432 if (!res)
4433 return false;
4434 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4435 build_zero_cst (TREE_TYPE (res)));
4437 else
4438 return false;
4439 if (dump_file && (dump_flags & TDF_DETAILS))
4441 fprintf (dump_file, "gimple_simplified to ");
4442 if (!gimple_seq_empty_p (*seq))
4443 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4444 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4445 0, TDF_SLIM);
4447 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4448 return true;
4450 else if (is_gimple_assign (stmt)
4451 && rcode.is_tree_code ())
4453 if (!inplace
4454 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
4456 maybe_build_generic_op (rcode,
4457 TREE_TYPE (gimple_assign_lhs (stmt)), ops);
4458 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
4459 if (dump_file && (dump_flags & TDF_DETAILS))
4461 fprintf (dump_file, "gimple_simplified to ");
4462 if (!gimple_seq_empty_p (*seq))
4463 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4464 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4465 0, TDF_SLIM);
4467 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4468 return true;
4471 else if (rcode.is_fn_code ()
4472 && gimple_call_combined_fn (stmt) == rcode)
4474 unsigned i;
4475 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4477 gcc_assert (ops[i] != NULL_TREE);
4478 gimple_call_set_arg (stmt, i, ops[i]);
4480 if (i < 3)
4481 gcc_assert (ops[i] == NULL_TREE);
4482 if (dump_file && (dump_flags & TDF_DETAILS))
4484 fprintf (dump_file, "gimple_simplified to ");
4485 if (!gimple_seq_empty_p (*seq))
4486 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4487 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4489 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4490 return true;
4492 else if (!inplace)
4494 if (gimple_has_lhs (stmt))
4496 tree lhs = gimple_get_lhs (stmt);
4497 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
4498 ops, seq, lhs))
4499 return false;
4500 if (dump_file && (dump_flags & TDF_DETAILS))
4502 fprintf (dump_file, "gimple_simplified to ");
4503 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4505 gsi_replace_with_seq_vops (gsi, *seq);
4506 return true;
4508 else
4509 gcc_unreachable ();
4512 return false;
4515 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4517 static bool
4518 maybe_canonicalize_mem_ref_addr (tree *t)
4520 bool res = false;
4522 if (TREE_CODE (*t) == ADDR_EXPR)
4523 t = &TREE_OPERAND (*t, 0);
4525 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4526 generic vector extension. The actual vector referenced is
4527 view-converted to an array type for this purpose. If the index
4528 is constant the canonical representation in the middle-end is a
4529 BIT_FIELD_REF so re-write the former to the latter here. */
4530 if (TREE_CODE (*t) == ARRAY_REF
4531 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4532 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4533 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4535 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4536 if (VECTOR_TYPE_P (vtype))
4538 tree low = array_ref_low_bound (*t);
4539 if (TREE_CODE (low) == INTEGER_CST)
4541 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4543 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4544 wi::to_widest (low));
4545 idx = wi::mul (idx, wi::to_widest
4546 (TYPE_SIZE (TREE_TYPE (*t))));
4547 widest_int ext
4548 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4549 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4551 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4552 TREE_TYPE (*t),
4553 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4554 TYPE_SIZE (TREE_TYPE (*t)),
4555 wide_int_to_tree (bitsizetype, idx));
4556 res = true;
4563 while (handled_component_p (*t))
4564 t = &TREE_OPERAND (*t, 0);
4566 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4567 of invariant addresses into a SSA name MEM_REF address. */
4568 if (TREE_CODE (*t) == MEM_REF
4569 || TREE_CODE (*t) == TARGET_MEM_REF)
4571 tree addr = TREE_OPERAND (*t, 0);
4572 if (TREE_CODE (addr) == ADDR_EXPR
4573 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4574 || handled_component_p (TREE_OPERAND (addr, 0))))
4576 tree base;
4577 poly_int64 coffset;
4578 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4579 &coffset);
4580 if (!base)
4581 gcc_unreachable ();
4583 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4584 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4585 TREE_OPERAND (*t, 1),
4586 size_int (coffset));
4587 res = true;
4589 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4590 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4593 /* Canonicalize back MEM_REFs to plain reference trees if the object
4594 accessed is a decl that has the same access semantics as the MEM_REF. */
4595 if (TREE_CODE (*t) == MEM_REF
4596 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4597 && integer_zerop (TREE_OPERAND (*t, 1))
4598 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4600 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4601 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4602 if (/* Same volatile qualification. */
4603 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4604 /* Same TBAA behavior with -fstrict-aliasing. */
4605 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4606 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4607 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4608 /* Same alignment. */
4609 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4610 /* We have to look out here to not drop a required conversion
4611 from the rhs to the lhs if *t appears on the lhs or vice-versa
4612 if it appears on the rhs. Thus require strict type
4613 compatibility. */
4614 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4616 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4617 res = true;
4621 /* Canonicalize TARGET_MEM_REF in particular with respect to
4622 the indexes becoming constant. */
4623 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4625 tree tem = maybe_fold_tmr (*t);
4626 if (tem)
4628 *t = tem;
4629 res = true;
4633 return res;
4636 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4637 distinguishes both cases. */
4639 static bool
4640 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4642 bool changed = false;
4643 gimple *stmt = gsi_stmt (*gsi);
4644 bool nowarning = gimple_no_warning_p (stmt);
4645 unsigned i;
4646 fold_defer_overflow_warnings ();
4648 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4649 after propagation.
4650 ??? This shouldn't be done in generic folding but in the
4651 propagation helpers which also know whether an address was
4652 propagated.
4653 Also canonicalize operand order. */
4654 switch (gimple_code (stmt))
4656 case GIMPLE_ASSIGN:
4657 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4659 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4660 if ((REFERENCE_CLASS_P (*rhs)
4661 || TREE_CODE (*rhs) == ADDR_EXPR)
4662 && maybe_canonicalize_mem_ref_addr (rhs))
4663 changed = true;
4664 tree *lhs = gimple_assign_lhs_ptr (stmt);
4665 if (REFERENCE_CLASS_P (*lhs)
4666 && maybe_canonicalize_mem_ref_addr (lhs))
4667 changed = true;
4669 else
4671 /* Canonicalize operand order. */
4672 enum tree_code code = gimple_assign_rhs_code (stmt);
4673 if (TREE_CODE_CLASS (code) == tcc_comparison
4674 || commutative_tree_code (code)
4675 || commutative_ternary_tree_code (code))
4677 tree rhs1 = gimple_assign_rhs1 (stmt);
4678 tree rhs2 = gimple_assign_rhs2 (stmt);
4679 if (tree_swap_operands_p (rhs1, rhs2))
4681 gimple_assign_set_rhs1 (stmt, rhs2);
4682 gimple_assign_set_rhs2 (stmt, rhs1);
4683 if (TREE_CODE_CLASS (code) == tcc_comparison)
4684 gimple_assign_set_rhs_code (stmt,
4685 swap_tree_comparison (code));
4686 changed = true;
4690 break;
4691 case GIMPLE_CALL:
4693 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4695 tree *arg = gimple_call_arg_ptr (stmt, i);
4696 if (REFERENCE_CLASS_P (*arg)
4697 && maybe_canonicalize_mem_ref_addr (arg))
4698 changed = true;
4700 tree *lhs = gimple_call_lhs_ptr (stmt);
4701 if (*lhs
4702 && REFERENCE_CLASS_P (*lhs)
4703 && maybe_canonicalize_mem_ref_addr (lhs))
4704 changed = true;
4705 break;
4707 case GIMPLE_ASM:
4709 gasm *asm_stmt = as_a <gasm *> (stmt);
4710 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4712 tree link = gimple_asm_output_op (asm_stmt, i);
4713 tree op = TREE_VALUE (link);
4714 if (REFERENCE_CLASS_P (op)
4715 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4716 changed = true;
4718 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4720 tree link = gimple_asm_input_op (asm_stmt, i);
4721 tree op = TREE_VALUE (link);
4722 if ((REFERENCE_CLASS_P (op)
4723 || TREE_CODE (op) == ADDR_EXPR)
4724 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4725 changed = true;
4728 break;
4729 case GIMPLE_DEBUG:
4730 if (gimple_debug_bind_p (stmt))
4732 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4733 if (*val
4734 && (REFERENCE_CLASS_P (*val)
4735 || TREE_CODE (*val) == ADDR_EXPR)
4736 && maybe_canonicalize_mem_ref_addr (val))
4737 changed = true;
4739 break;
4740 case GIMPLE_COND:
4742 /* Canonicalize operand order. */
4743 tree lhs = gimple_cond_lhs (stmt);
4744 tree rhs = gimple_cond_rhs (stmt);
4745 if (tree_swap_operands_p (lhs, rhs))
4747 gcond *gc = as_a <gcond *> (stmt);
4748 gimple_cond_set_lhs (gc, rhs);
4749 gimple_cond_set_rhs (gc, lhs);
4750 gimple_cond_set_code (gc,
4751 swap_tree_comparison (gimple_cond_code (gc)));
4752 changed = true;
4755 default:;
4758 /* Dispatch to pattern-based folding. */
4759 if (!inplace
4760 || is_gimple_assign (stmt)
4761 || gimple_code (stmt) == GIMPLE_COND)
4763 gimple_seq seq = NULL;
4764 code_helper rcode;
4765 tree ops[3] = {};
4766 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq,
4767 valueize, valueize))
4769 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
4770 changed = true;
4771 else
4772 gimple_seq_discard (seq);
4776 stmt = gsi_stmt (*gsi);
4778 /* Fold the main computation performed by the statement. */
4779 switch (gimple_code (stmt))
4781 case GIMPLE_ASSIGN:
4783 /* Try to canonicalize for boolean-typed X the comparisons
4784 X == 0, X == 1, X != 0, and X != 1. */
4785 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4786 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4788 tree lhs = gimple_assign_lhs (stmt);
4789 tree op1 = gimple_assign_rhs1 (stmt);
4790 tree op2 = gimple_assign_rhs2 (stmt);
4791 tree type = TREE_TYPE (op1);
4793 /* Check whether the comparison operands are of the same boolean
4794 type as the result type is.
4795 Check that second operand is an integer-constant with value
4796 one or zero. */
4797 if (TREE_CODE (op2) == INTEGER_CST
4798 && (integer_zerop (op2) || integer_onep (op2))
4799 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4801 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4802 bool is_logical_not = false;
4804 /* X == 0 and X != 1 is a logical-not.of X
4805 X == 1 and X != 0 is X */
4806 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4807 || (cmp_code == NE_EXPR && integer_onep (op2)))
4808 is_logical_not = true;
4810 if (is_logical_not == false)
4811 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4812 /* Only for one-bit precision typed X the transformation
4813 !X -> ~X is valied. */
4814 else if (TYPE_PRECISION (type) == 1)
4815 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4816 /* Otherwise we use !X -> X ^ 1. */
4817 else
4818 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4819 build_int_cst (type, 1));
4820 changed = true;
4821 break;
4825 unsigned old_num_ops = gimple_num_ops (stmt);
4826 tree lhs = gimple_assign_lhs (stmt);
4827 tree new_rhs = fold_gimple_assign (gsi);
4828 if (new_rhs
4829 && !useless_type_conversion_p (TREE_TYPE (lhs),
4830 TREE_TYPE (new_rhs)))
4831 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4832 if (new_rhs
4833 && (!inplace
4834 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4836 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4837 changed = true;
4839 break;
4842 case GIMPLE_CALL:
4843 changed |= gimple_fold_call (gsi, inplace);
4844 break;
4846 case GIMPLE_ASM:
4847 /* Fold *& in asm operands. */
4849 gasm *asm_stmt = as_a <gasm *> (stmt);
4850 size_t noutputs;
4851 const char **oconstraints;
4852 const char *constraint;
4853 bool allows_mem, allows_reg;
4855 noutputs = gimple_asm_noutputs (asm_stmt);
4856 oconstraints = XALLOCAVEC (const char *, noutputs);
4858 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4860 tree link = gimple_asm_output_op (asm_stmt, i);
4861 tree op = TREE_VALUE (link);
4862 oconstraints[i]
4863 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4864 if (REFERENCE_CLASS_P (op)
4865 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4867 TREE_VALUE (link) = op;
4868 changed = true;
4871 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4873 tree link = gimple_asm_input_op (asm_stmt, i);
4874 tree op = TREE_VALUE (link);
4875 constraint
4876 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4877 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4878 oconstraints, &allows_mem, &allows_reg);
4879 if (REFERENCE_CLASS_P (op)
4880 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4881 != NULL_TREE)
4883 TREE_VALUE (link) = op;
4884 changed = true;
4888 break;
4890 case GIMPLE_DEBUG:
4891 if (gimple_debug_bind_p (stmt))
4893 tree val = gimple_debug_bind_get_value (stmt);
4894 if (val
4895 && REFERENCE_CLASS_P (val))
4897 tree tem = maybe_fold_reference (val, false);
4898 if (tem)
4900 gimple_debug_bind_set_value (stmt, tem);
4901 changed = true;
4904 else if (val
4905 && TREE_CODE (val) == ADDR_EXPR)
4907 tree ref = TREE_OPERAND (val, 0);
4908 tree tem = maybe_fold_reference (ref, false);
4909 if (tem)
4911 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4912 gimple_debug_bind_set_value (stmt, tem);
4913 changed = true;
4917 break;
4919 case GIMPLE_RETURN:
4921 greturn *ret_stmt = as_a<greturn *> (stmt);
4922 tree ret = gimple_return_retval(ret_stmt);
4924 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4926 tree val = valueize (ret);
4927 if (val && val != ret
4928 && may_propagate_copy (ret, val))
4930 gimple_return_set_retval (ret_stmt, val);
4931 changed = true;
4935 break;
4937 default:;
4940 stmt = gsi_stmt (*gsi);
4942 /* Fold *& on the lhs. */
4943 if (gimple_has_lhs (stmt))
4945 tree lhs = gimple_get_lhs (stmt);
4946 if (lhs && REFERENCE_CLASS_P (lhs))
4948 tree new_lhs = maybe_fold_reference (lhs, true);
4949 if (new_lhs)
4951 gimple_set_lhs (stmt, new_lhs);
4952 changed = true;
4957 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4958 return changed;
4961 /* Valueziation callback that ends up not following SSA edges. */
4963 tree
4964 no_follow_ssa_edges (tree)
4966 return NULL_TREE;
4969 /* Valueization callback that ends up following single-use SSA edges only. */
4971 tree
4972 follow_single_use_edges (tree val)
4974 if (TREE_CODE (val) == SSA_NAME
4975 && !has_single_use (val))
4976 return NULL_TREE;
4977 return val;
4980 /* Fold the statement pointed to by GSI. In some cases, this function may
4981 replace the whole statement with a new one. Returns true iff folding
4982 makes any changes.
4983 The statement pointed to by GSI should be in valid gimple form but may
4984 be in unfolded state as resulting from for example constant propagation
4985 which can produce *&x = 0. */
4987 bool
4988 fold_stmt (gimple_stmt_iterator *gsi)
4990 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4993 bool
4994 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4996 return fold_stmt_1 (gsi, false, valueize);
4999 /* Perform the minimal folding on statement *GSI. Only operations like
5000 *&x created by constant propagation are handled. The statement cannot
5001 be replaced with a new one. Return true if the statement was
5002 changed, false otherwise.
5003 The statement *GSI should be in valid gimple form but may
5004 be in unfolded state as resulting from for example constant propagation
5005 which can produce *&x = 0. */
5007 bool
5008 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5010 gimple *stmt = gsi_stmt (*gsi);
5011 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5012 gcc_assert (gsi_stmt (*gsi) == stmt);
5013 return changed;
5016 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5017 if EXPR is null or we don't know how.
5018 If non-null, the result always has boolean type. */
5020 static tree
5021 canonicalize_bool (tree expr, bool invert)
5023 if (!expr)
5024 return NULL_TREE;
5025 else if (invert)
5027 if (integer_nonzerop (expr))
5028 return boolean_false_node;
5029 else if (integer_zerop (expr))
5030 return boolean_true_node;
5031 else if (TREE_CODE (expr) == SSA_NAME)
5032 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5033 build_int_cst (TREE_TYPE (expr), 0));
5034 else if (COMPARISON_CLASS_P (expr))
5035 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5036 boolean_type_node,
5037 TREE_OPERAND (expr, 0),
5038 TREE_OPERAND (expr, 1));
5039 else
5040 return NULL_TREE;
5042 else
5044 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5045 return expr;
5046 if (integer_nonzerop (expr))
5047 return boolean_true_node;
5048 else if (integer_zerop (expr))
5049 return boolean_false_node;
5050 else if (TREE_CODE (expr) == SSA_NAME)
5051 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5052 build_int_cst (TREE_TYPE (expr), 0));
5053 else if (COMPARISON_CLASS_P (expr))
5054 return fold_build2 (TREE_CODE (expr),
5055 boolean_type_node,
5056 TREE_OPERAND (expr, 0),
5057 TREE_OPERAND (expr, 1));
5058 else
5059 return NULL_TREE;
5063 /* Check to see if a boolean expression EXPR is logically equivalent to the
5064 comparison (OP1 CODE OP2). Check for various identities involving
5065 SSA_NAMEs. */
5067 static bool
5068 same_bool_comparison_p (const_tree expr, enum tree_code code,
5069 const_tree op1, const_tree op2)
5071 gimple *s;
5073 /* The obvious case. */
5074 if (TREE_CODE (expr) == code
5075 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5076 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5077 return true;
5079 /* Check for comparing (name, name != 0) and the case where expr
5080 is an SSA_NAME with a definition matching the comparison. */
5081 if (TREE_CODE (expr) == SSA_NAME
5082 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5084 if (operand_equal_p (expr, op1, 0))
5085 return ((code == NE_EXPR && integer_zerop (op2))
5086 || (code == EQ_EXPR && integer_nonzerop (op2)));
5087 s = SSA_NAME_DEF_STMT (expr);
5088 if (is_gimple_assign (s)
5089 && gimple_assign_rhs_code (s) == code
5090 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5091 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5092 return true;
5095 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5096 of name is a comparison, recurse. */
5097 if (TREE_CODE (op1) == SSA_NAME
5098 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5100 s = SSA_NAME_DEF_STMT (op1);
5101 if (is_gimple_assign (s)
5102 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5104 enum tree_code c = gimple_assign_rhs_code (s);
5105 if ((c == NE_EXPR && integer_zerop (op2))
5106 || (c == EQ_EXPR && integer_nonzerop (op2)))
5107 return same_bool_comparison_p (expr, c,
5108 gimple_assign_rhs1 (s),
5109 gimple_assign_rhs2 (s));
5110 if ((c == EQ_EXPR && integer_zerop (op2))
5111 || (c == NE_EXPR && integer_nonzerop (op2)))
5112 return same_bool_comparison_p (expr,
5113 invert_tree_comparison (c, false),
5114 gimple_assign_rhs1 (s),
5115 gimple_assign_rhs2 (s));
5118 return false;
5121 /* Check to see if two boolean expressions OP1 and OP2 are logically
5122 equivalent. */
5124 static bool
5125 same_bool_result_p (const_tree op1, const_tree op2)
5127 /* Simple cases first. */
5128 if (operand_equal_p (op1, op2, 0))
5129 return true;
5131 /* Check the cases where at least one of the operands is a comparison.
5132 These are a bit smarter than operand_equal_p in that they apply some
5133 identifies on SSA_NAMEs. */
5134 if (COMPARISON_CLASS_P (op2)
5135 && same_bool_comparison_p (op1, TREE_CODE (op2),
5136 TREE_OPERAND (op2, 0),
5137 TREE_OPERAND (op2, 1)))
5138 return true;
5139 if (COMPARISON_CLASS_P (op1)
5140 && same_bool_comparison_p (op2, TREE_CODE (op1),
5141 TREE_OPERAND (op1, 0),
5142 TREE_OPERAND (op1, 1)))
5143 return true;
5145 /* Default case. */
5146 return false;
5149 /* Forward declarations for some mutually recursive functions. */
5151 static tree
5152 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5153 enum tree_code code2, tree op2a, tree op2b);
5154 static tree
5155 and_var_with_comparison (tree var, bool invert,
5156 enum tree_code code2, tree op2a, tree op2b);
5157 static tree
5158 and_var_with_comparison_1 (gimple *stmt,
5159 enum tree_code code2, tree op2a, tree op2b);
5160 static tree
5161 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5162 enum tree_code code2, tree op2a, tree op2b);
5163 static tree
5164 or_var_with_comparison (tree var, bool invert,
5165 enum tree_code code2, tree op2a, tree op2b);
5166 static tree
5167 or_var_with_comparison_1 (gimple *stmt,
5168 enum tree_code code2, tree op2a, tree op2b);
5170 /* Helper function for and_comparisons_1: try to simplify the AND of the
5171 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5172 If INVERT is true, invert the value of the VAR before doing the AND.
5173 Return NULL_EXPR if we can't simplify this to a single expression. */
5175 static tree
5176 and_var_with_comparison (tree var, bool invert,
5177 enum tree_code code2, tree op2a, tree op2b)
5179 tree t;
5180 gimple *stmt = SSA_NAME_DEF_STMT (var);
5182 /* We can only deal with variables whose definitions are assignments. */
5183 if (!is_gimple_assign (stmt))
5184 return NULL_TREE;
5186 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5187 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5188 Then we only have to consider the simpler non-inverted cases. */
5189 if (invert)
5190 t = or_var_with_comparison_1 (stmt,
5191 invert_tree_comparison (code2, false),
5192 op2a, op2b);
5193 else
5194 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5195 return canonicalize_bool (t, invert);
5198 /* Try to simplify the AND of the ssa variable defined by the assignment
5199 STMT with the comparison specified by (OP2A CODE2 OP2B).
5200 Return NULL_EXPR if we can't simplify this to a single expression. */
5202 static tree
5203 and_var_with_comparison_1 (gimple *stmt,
5204 enum tree_code code2, tree op2a, tree op2b)
5206 tree var = gimple_assign_lhs (stmt);
5207 tree true_test_var = NULL_TREE;
5208 tree false_test_var = NULL_TREE;
5209 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5211 /* Check for identities like (var AND (var == 0)) => false. */
5212 if (TREE_CODE (op2a) == SSA_NAME
5213 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5215 if ((code2 == NE_EXPR && integer_zerop (op2b))
5216 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5218 true_test_var = op2a;
5219 if (var == true_test_var)
5220 return var;
5222 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5223 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5225 false_test_var = op2a;
5226 if (var == false_test_var)
5227 return boolean_false_node;
5231 /* If the definition is a comparison, recurse on it. */
5232 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5234 tree t = and_comparisons_1 (innercode,
5235 gimple_assign_rhs1 (stmt),
5236 gimple_assign_rhs2 (stmt),
5237 code2,
5238 op2a,
5239 op2b);
5240 if (t)
5241 return t;
5244 /* If the definition is an AND or OR expression, we may be able to
5245 simplify by reassociating. */
5246 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5247 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5249 tree inner1 = gimple_assign_rhs1 (stmt);
5250 tree inner2 = gimple_assign_rhs2 (stmt);
5251 gimple *s;
5252 tree t;
5253 tree partial = NULL_TREE;
5254 bool is_and = (innercode == BIT_AND_EXPR);
5256 /* Check for boolean identities that don't require recursive examination
5257 of inner1/inner2:
5258 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5259 inner1 AND (inner1 OR inner2) => inner1
5260 !inner1 AND (inner1 AND inner2) => false
5261 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5262 Likewise for similar cases involving inner2. */
5263 if (inner1 == true_test_var)
5264 return (is_and ? var : inner1);
5265 else if (inner2 == true_test_var)
5266 return (is_and ? var : inner2);
5267 else if (inner1 == false_test_var)
5268 return (is_and
5269 ? boolean_false_node
5270 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5271 else if (inner2 == false_test_var)
5272 return (is_and
5273 ? boolean_false_node
5274 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5276 /* Next, redistribute/reassociate the AND across the inner tests.
5277 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5278 if (TREE_CODE (inner1) == SSA_NAME
5279 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5280 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5281 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5282 gimple_assign_rhs1 (s),
5283 gimple_assign_rhs2 (s),
5284 code2, op2a, op2b)))
5286 /* Handle the AND case, where we are reassociating:
5287 (inner1 AND inner2) AND (op2a code2 op2b)
5288 => (t AND inner2)
5289 If the partial result t is a constant, we win. Otherwise
5290 continue on to try reassociating with the other inner test. */
5291 if (is_and)
5293 if (integer_onep (t))
5294 return inner2;
5295 else if (integer_zerop (t))
5296 return boolean_false_node;
5299 /* Handle the OR case, where we are redistributing:
5300 (inner1 OR inner2) AND (op2a code2 op2b)
5301 => (t OR (inner2 AND (op2a code2 op2b))) */
5302 else if (integer_onep (t))
5303 return boolean_true_node;
5305 /* Save partial result for later. */
5306 partial = t;
5309 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5310 if (TREE_CODE (inner2) == SSA_NAME
5311 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5312 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5313 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5314 gimple_assign_rhs1 (s),
5315 gimple_assign_rhs2 (s),
5316 code2, op2a, op2b)))
5318 /* Handle the AND case, where we are reassociating:
5319 (inner1 AND inner2) AND (op2a code2 op2b)
5320 => (inner1 AND t) */
5321 if (is_and)
5323 if (integer_onep (t))
5324 return inner1;
5325 else if (integer_zerop (t))
5326 return boolean_false_node;
5327 /* If both are the same, we can apply the identity
5328 (x AND x) == x. */
5329 else if (partial && same_bool_result_p (t, partial))
5330 return t;
5333 /* Handle the OR case. where we are redistributing:
5334 (inner1 OR inner2) AND (op2a code2 op2b)
5335 => (t OR (inner1 AND (op2a code2 op2b)))
5336 => (t OR partial) */
5337 else
5339 if (integer_onep (t))
5340 return boolean_true_node;
5341 else if (partial)
5343 /* We already got a simplification for the other
5344 operand to the redistributed OR expression. The
5345 interesting case is when at least one is false.
5346 Or, if both are the same, we can apply the identity
5347 (x OR x) == x. */
5348 if (integer_zerop (partial))
5349 return t;
5350 else if (integer_zerop (t))
5351 return partial;
5352 else if (same_bool_result_p (t, partial))
5353 return t;
5358 return NULL_TREE;
5361 /* Try to simplify the AND of two comparisons defined by
5362 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5363 If this can be done without constructing an intermediate value,
5364 return the resulting tree; otherwise NULL_TREE is returned.
5365 This function is deliberately asymmetric as it recurses on SSA_DEFs
5366 in the first comparison but not the second. */
5368 static tree
5369 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5370 enum tree_code code2, tree op2a, tree op2b)
5372 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5374 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5375 if (operand_equal_p (op1a, op2a, 0)
5376 && operand_equal_p (op1b, op2b, 0))
5378 /* Result will be either NULL_TREE, or a combined comparison. */
5379 tree t = combine_comparisons (UNKNOWN_LOCATION,
5380 TRUTH_ANDIF_EXPR, code1, code2,
5381 truth_type, op1a, op1b);
5382 if (t)
5383 return t;
5386 /* Likewise the swapped case of the above. */
5387 if (operand_equal_p (op1a, op2b, 0)
5388 && operand_equal_p (op1b, op2a, 0))
5390 /* Result will be either NULL_TREE, or a combined comparison. */
5391 tree t = combine_comparisons (UNKNOWN_LOCATION,
5392 TRUTH_ANDIF_EXPR, code1,
5393 swap_tree_comparison (code2),
5394 truth_type, op1a, op1b);
5395 if (t)
5396 return t;
5399 /* If both comparisons are of the same value against constants, we might
5400 be able to merge them. */
5401 if (operand_equal_p (op1a, op2a, 0)
5402 && TREE_CODE (op1b) == INTEGER_CST
5403 && TREE_CODE (op2b) == INTEGER_CST)
5405 int cmp = tree_int_cst_compare (op1b, op2b);
5407 /* If we have (op1a == op1b), we should either be able to
5408 return that or FALSE, depending on whether the constant op1b
5409 also satisfies the other comparison against op2b. */
5410 if (code1 == EQ_EXPR)
5412 bool done = true;
5413 bool val;
5414 switch (code2)
5416 case EQ_EXPR: val = (cmp == 0); break;
5417 case NE_EXPR: val = (cmp != 0); break;
5418 case LT_EXPR: val = (cmp < 0); break;
5419 case GT_EXPR: val = (cmp > 0); break;
5420 case LE_EXPR: val = (cmp <= 0); break;
5421 case GE_EXPR: val = (cmp >= 0); break;
5422 default: done = false;
5424 if (done)
5426 if (val)
5427 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5428 else
5429 return boolean_false_node;
5432 /* Likewise if the second comparison is an == comparison. */
5433 else if (code2 == EQ_EXPR)
5435 bool done = true;
5436 bool val;
5437 switch (code1)
5439 case EQ_EXPR: val = (cmp == 0); break;
5440 case NE_EXPR: val = (cmp != 0); break;
5441 case LT_EXPR: val = (cmp > 0); break;
5442 case GT_EXPR: val = (cmp < 0); break;
5443 case LE_EXPR: val = (cmp >= 0); break;
5444 case GE_EXPR: val = (cmp <= 0); break;
5445 default: done = false;
5447 if (done)
5449 if (val)
5450 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5451 else
5452 return boolean_false_node;
5456 /* Same business with inequality tests. */
5457 else if (code1 == NE_EXPR)
5459 bool val;
5460 switch (code2)
5462 case EQ_EXPR: val = (cmp != 0); break;
5463 case NE_EXPR: val = (cmp == 0); break;
5464 case LT_EXPR: val = (cmp >= 0); break;
5465 case GT_EXPR: val = (cmp <= 0); break;
5466 case LE_EXPR: val = (cmp > 0); break;
5467 case GE_EXPR: val = (cmp < 0); break;
5468 default:
5469 val = false;
5471 if (val)
5472 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5474 else if (code2 == NE_EXPR)
5476 bool val;
5477 switch (code1)
5479 case EQ_EXPR: val = (cmp == 0); break;
5480 case NE_EXPR: val = (cmp != 0); break;
5481 case LT_EXPR: val = (cmp <= 0); break;
5482 case GT_EXPR: val = (cmp >= 0); break;
5483 case LE_EXPR: val = (cmp < 0); break;
5484 case GE_EXPR: val = (cmp > 0); break;
5485 default:
5486 val = false;
5488 if (val)
5489 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5492 /* Chose the more restrictive of two < or <= comparisons. */
5493 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5494 && (code2 == LT_EXPR || code2 == LE_EXPR))
5496 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5497 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5498 else
5499 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5502 /* Likewise chose the more restrictive of two > or >= comparisons. */
5503 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5504 && (code2 == GT_EXPR || code2 == GE_EXPR))
5506 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5507 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5508 else
5509 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5512 /* Check for singleton ranges. */
5513 else if (cmp == 0
5514 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5515 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5516 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5518 /* Check for disjoint ranges. */
5519 else if (cmp <= 0
5520 && (code1 == LT_EXPR || code1 == LE_EXPR)
5521 && (code2 == GT_EXPR || code2 == GE_EXPR))
5522 return boolean_false_node;
5523 else if (cmp >= 0
5524 && (code1 == GT_EXPR || code1 == GE_EXPR)
5525 && (code2 == LT_EXPR || code2 == LE_EXPR))
5526 return boolean_false_node;
5529 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5530 NAME's definition is a truth value. See if there are any simplifications
5531 that can be done against the NAME's definition. */
5532 if (TREE_CODE (op1a) == SSA_NAME
5533 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5534 && (integer_zerop (op1b) || integer_onep (op1b)))
5536 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5537 || (code1 == NE_EXPR && integer_onep (op1b)));
5538 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5539 switch (gimple_code (stmt))
5541 case GIMPLE_ASSIGN:
5542 /* Try to simplify by copy-propagating the definition. */
5543 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5545 case GIMPLE_PHI:
5546 /* If every argument to the PHI produces the same result when
5547 ANDed with the second comparison, we win.
5548 Do not do this unless the type is bool since we need a bool
5549 result here anyway. */
5550 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5552 tree result = NULL_TREE;
5553 unsigned i;
5554 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5556 tree arg = gimple_phi_arg_def (stmt, i);
5558 /* If this PHI has itself as an argument, ignore it.
5559 If all the other args produce the same result,
5560 we're still OK. */
5561 if (arg == gimple_phi_result (stmt))
5562 continue;
5563 else if (TREE_CODE (arg) == INTEGER_CST)
5565 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5567 if (!result)
5568 result = boolean_false_node;
5569 else if (!integer_zerop (result))
5570 return NULL_TREE;
5572 else if (!result)
5573 result = fold_build2 (code2, boolean_type_node,
5574 op2a, op2b);
5575 else if (!same_bool_comparison_p (result,
5576 code2, op2a, op2b))
5577 return NULL_TREE;
5579 else if (TREE_CODE (arg) == SSA_NAME
5580 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5582 tree temp;
5583 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5584 /* In simple cases we can look through PHI nodes,
5585 but we have to be careful with loops.
5586 See PR49073. */
5587 if (! dom_info_available_p (CDI_DOMINATORS)
5588 || gimple_bb (def_stmt) == gimple_bb (stmt)
5589 || dominated_by_p (CDI_DOMINATORS,
5590 gimple_bb (def_stmt),
5591 gimple_bb (stmt)))
5592 return NULL_TREE;
5593 temp = and_var_with_comparison (arg, invert, code2,
5594 op2a, op2b);
5595 if (!temp)
5596 return NULL_TREE;
5597 else if (!result)
5598 result = temp;
5599 else if (!same_bool_result_p (result, temp))
5600 return NULL_TREE;
5602 else
5603 return NULL_TREE;
5605 return result;
5608 default:
5609 break;
5612 return NULL_TREE;
5615 /* Try to simplify the AND of two comparisons, specified by
5616 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5617 If this can be simplified to a single expression (without requiring
5618 introducing more SSA variables to hold intermediate values),
5619 return the resulting tree. Otherwise return NULL_TREE.
5620 If the result expression is non-null, it has boolean type. */
5622 tree
5623 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5624 enum tree_code code2, tree op2a, tree op2b)
5626 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5627 if (t)
5628 return t;
5629 else
5630 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5633 /* Helper function for or_comparisons_1: try to simplify the OR of the
5634 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5635 If INVERT is true, invert the value of VAR before doing the OR.
5636 Return NULL_EXPR if we can't simplify this to a single expression. */
5638 static tree
5639 or_var_with_comparison (tree var, bool invert,
5640 enum tree_code code2, tree op2a, tree op2b)
5642 tree t;
5643 gimple *stmt = SSA_NAME_DEF_STMT (var);
5645 /* We can only deal with variables whose definitions are assignments. */
5646 if (!is_gimple_assign (stmt))
5647 return NULL_TREE;
5649 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5650 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5651 Then we only have to consider the simpler non-inverted cases. */
5652 if (invert)
5653 t = and_var_with_comparison_1 (stmt,
5654 invert_tree_comparison (code2, false),
5655 op2a, op2b);
5656 else
5657 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5658 return canonicalize_bool (t, invert);
5661 /* Try to simplify the OR of the ssa variable defined by the assignment
5662 STMT with the comparison specified by (OP2A CODE2 OP2B).
5663 Return NULL_EXPR if we can't simplify this to a single expression. */
5665 static tree
5666 or_var_with_comparison_1 (gimple *stmt,
5667 enum tree_code code2, tree op2a, tree op2b)
5669 tree var = gimple_assign_lhs (stmt);
5670 tree true_test_var = NULL_TREE;
5671 tree false_test_var = NULL_TREE;
5672 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5674 /* Check for identities like (var OR (var != 0)) => true . */
5675 if (TREE_CODE (op2a) == SSA_NAME
5676 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5678 if ((code2 == NE_EXPR && integer_zerop (op2b))
5679 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5681 true_test_var = op2a;
5682 if (var == true_test_var)
5683 return var;
5685 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5686 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5688 false_test_var = op2a;
5689 if (var == false_test_var)
5690 return boolean_true_node;
5694 /* If the definition is a comparison, recurse on it. */
5695 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5697 tree t = or_comparisons_1 (innercode,
5698 gimple_assign_rhs1 (stmt),
5699 gimple_assign_rhs2 (stmt),
5700 code2,
5701 op2a,
5702 op2b);
5703 if (t)
5704 return t;
5707 /* If the definition is an AND or OR expression, we may be able to
5708 simplify by reassociating. */
5709 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5710 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5712 tree inner1 = gimple_assign_rhs1 (stmt);
5713 tree inner2 = gimple_assign_rhs2 (stmt);
5714 gimple *s;
5715 tree t;
5716 tree partial = NULL_TREE;
5717 bool is_or = (innercode == BIT_IOR_EXPR);
5719 /* Check for boolean identities that don't require recursive examination
5720 of inner1/inner2:
5721 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5722 inner1 OR (inner1 AND inner2) => inner1
5723 !inner1 OR (inner1 OR inner2) => true
5724 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5726 if (inner1 == true_test_var)
5727 return (is_or ? var : inner1);
5728 else if (inner2 == true_test_var)
5729 return (is_or ? var : inner2);
5730 else if (inner1 == false_test_var)
5731 return (is_or
5732 ? boolean_true_node
5733 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5734 else if (inner2 == false_test_var)
5735 return (is_or
5736 ? boolean_true_node
5737 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5739 /* Next, redistribute/reassociate the OR across the inner tests.
5740 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5741 if (TREE_CODE (inner1) == SSA_NAME
5742 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5743 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5744 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5745 gimple_assign_rhs1 (s),
5746 gimple_assign_rhs2 (s),
5747 code2, op2a, op2b)))
5749 /* Handle the OR case, where we are reassociating:
5750 (inner1 OR inner2) OR (op2a code2 op2b)
5751 => (t OR inner2)
5752 If the partial result t is a constant, we win. Otherwise
5753 continue on to try reassociating with the other inner test. */
5754 if (is_or)
5756 if (integer_onep (t))
5757 return boolean_true_node;
5758 else if (integer_zerop (t))
5759 return inner2;
5762 /* Handle the AND case, where we are redistributing:
5763 (inner1 AND inner2) OR (op2a code2 op2b)
5764 => (t AND (inner2 OR (op2a code op2b))) */
5765 else if (integer_zerop (t))
5766 return boolean_false_node;
5768 /* Save partial result for later. */
5769 partial = t;
5772 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5773 if (TREE_CODE (inner2) == SSA_NAME
5774 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5775 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5776 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5777 gimple_assign_rhs1 (s),
5778 gimple_assign_rhs2 (s),
5779 code2, op2a, op2b)))
5781 /* Handle the OR case, where we are reassociating:
5782 (inner1 OR inner2) OR (op2a code2 op2b)
5783 => (inner1 OR t)
5784 => (t OR partial) */
5785 if (is_or)
5787 if (integer_zerop (t))
5788 return inner1;
5789 else if (integer_onep (t))
5790 return boolean_true_node;
5791 /* If both are the same, we can apply the identity
5792 (x OR x) == x. */
5793 else if (partial && same_bool_result_p (t, partial))
5794 return t;
5797 /* Handle the AND case, where we are redistributing:
5798 (inner1 AND inner2) OR (op2a code2 op2b)
5799 => (t AND (inner1 OR (op2a code2 op2b)))
5800 => (t AND partial) */
5801 else
5803 if (integer_zerop (t))
5804 return boolean_false_node;
5805 else if (partial)
5807 /* We already got a simplification for the other
5808 operand to the redistributed AND expression. The
5809 interesting case is when at least one is true.
5810 Or, if both are the same, we can apply the identity
5811 (x AND x) == x. */
5812 if (integer_onep (partial))
5813 return t;
5814 else if (integer_onep (t))
5815 return partial;
5816 else if (same_bool_result_p (t, partial))
5817 return t;
5822 return NULL_TREE;
5825 /* Try to simplify the OR of two comparisons defined by
5826 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5827 If this can be done without constructing an intermediate value,
5828 return the resulting tree; otherwise NULL_TREE is returned.
5829 This function is deliberately asymmetric as it recurses on SSA_DEFs
5830 in the first comparison but not the second. */
5832 static tree
5833 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5834 enum tree_code code2, tree op2a, tree op2b)
5836 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5838 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5839 if (operand_equal_p (op1a, op2a, 0)
5840 && operand_equal_p (op1b, op2b, 0))
5842 /* Result will be either NULL_TREE, or a combined comparison. */
5843 tree t = combine_comparisons (UNKNOWN_LOCATION,
5844 TRUTH_ORIF_EXPR, code1, code2,
5845 truth_type, op1a, op1b);
5846 if (t)
5847 return t;
5850 /* Likewise the swapped case of the above. */
5851 if (operand_equal_p (op1a, op2b, 0)
5852 && operand_equal_p (op1b, op2a, 0))
5854 /* Result will be either NULL_TREE, or a combined comparison. */
5855 tree t = combine_comparisons (UNKNOWN_LOCATION,
5856 TRUTH_ORIF_EXPR, code1,
5857 swap_tree_comparison (code2),
5858 truth_type, op1a, op1b);
5859 if (t)
5860 return t;
5863 /* If both comparisons are of the same value against constants, we might
5864 be able to merge them. */
5865 if (operand_equal_p (op1a, op2a, 0)
5866 && TREE_CODE (op1b) == INTEGER_CST
5867 && TREE_CODE (op2b) == INTEGER_CST)
5869 int cmp = tree_int_cst_compare (op1b, op2b);
5871 /* If we have (op1a != op1b), we should either be able to
5872 return that or TRUE, depending on whether the constant op1b
5873 also satisfies the other comparison against op2b. */
5874 if (code1 == NE_EXPR)
5876 bool done = true;
5877 bool val;
5878 switch (code2)
5880 case EQ_EXPR: val = (cmp == 0); break;
5881 case NE_EXPR: val = (cmp != 0); break;
5882 case LT_EXPR: val = (cmp < 0); break;
5883 case GT_EXPR: val = (cmp > 0); break;
5884 case LE_EXPR: val = (cmp <= 0); break;
5885 case GE_EXPR: val = (cmp >= 0); break;
5886 default: done = false;
5888 if (done)
5890 if (val)
5891 return boolean_true_node;
5892 else
5893 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5896 /* Likewise if the second comparison is a != comparison. */
5897 else if (code2 == NE_EXPR)
5899 bool done = true;
5900 bool val;
5901 switch (code1)
5903 case EQ_EXPR: val = (cmp == 0); break;
5904 case NE_EXPR: val = (cmp != 0); break;
5905 case LT_EXPR: val = (cmp > 0); break;
5906 case GT_EXPR: val = (cmp < 0); break;
5907 case LE_EXPR: val = (cmp >= 0); break;
5908 case GE_EXPR: val = (cmp <= 0); break;
5909 default: done = false;
5911 if (done)
5913 if (val)
5914 return boolean_true_node;
5915 else
5916 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5920 /* See if an equality test is redundant with the other comparison. */
5921 else if (code1 == EQ_EXPR)
5923 bool val;
5924 switch (code2)
5926 case EQ_EXPR: val = (cmp == 0); break;
5927 case NE_EXPR: val = (cmp != 0); break;
5928 case LT_EXPR: val = (cmp < 0); break;
5929 case GT_EXPR: val = (cmp > 0); break;
5930 case LE_EXPR: val = (cmp <= 0); break;
5931 case GE_EXPR: val = (cmp >= 0); break;
5932 default:
5933 val = false;
5935 if (val)
5936 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5938 else if (code2 == EQ_EXPR)
5940 bool val;
5941 switch (code1)
5943 case EQ_EXPR: val = (cmp == 0); break;
5944 case NE_EXPR: val = (cmp != 0); break;
5945 case LT_EXPR: val = (cmp > 0); break;
5946 case GT_EXPR: val = (cmp < 0); break;
5947 case LE_EXPR: val = (cmp >= 0); break;
5948 case GE_EXPR: val = (cmp <= 0); break;
5949 default:
5950 val = false;
5952 if (val)
5953 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5956 /* Chose the less restrictive of two < or <= comparisons. */
5957 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5958 && (code2 == LT_EXPR || code2 == LE_EXPR))
5960 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5961 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5962 else
5963 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5966 /* Likewise chose the less restrictive of two > or >= comparisons. */
5967 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5968 && (code2 == GT_EXPR || code2 == GE_EXPR))
5970 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5971 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5972 else
5973 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5976 /* Check for singleton ranges. */
5977 else if (cmp == 0
5978 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5979 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5980 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5982 /* Check for less/greater pairs that don't restrict the range at all. */
5983 else if (cmp >= 0
5984 && (code1 == LT_EXPR || code1 == LE_EXPR)
5985 && (code2 == GT_EXPR || code2 == GE_EXPR))
5986 return boolean_true_node;
5987 else if (cmp <= 0
5988 && (code1 == GT_EXPR || code1 == GE_EXPR)
5989 && (code2 == LT_EXPR || code2 == LE_EXPR))
5990 return boolean_true_node;
5993 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5994 NAME's definition is a truth value. See if there are any simplifications
5995 that can be done against the NAME's definition. */
5996 if (TREE_CODE (op1a) == SSA_NAME
5997 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5998 && (integer_zerop (op1b) || integer_onep (op1b)))
6000 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6001 || (code1 == NE_EXPR && integer_onep (op1b)));
6002 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6003 switch (gimple_code (stmt))
6005 case GIMPLE_ASSIGN:
6006 /* Try to simplify by copy-propagating the definition. */
6007 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6009 case GIMPLE_PHI:
6010 /* If every argument to the PHI produces the same result when
6011 ORed with the second comparison, we win.
6012 Do not do this unless the type is bool since we need a bool
6013 result here anyway. */
6014 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6016 tree result = NULL_TREE;
6017 unsigned i;
6018 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6020 tree arg = gimple_phi_arg_def (stmt, i);
6022 /* If this PHI has itself as an argument, ignore it.
6023 If all the other args produce the same result,
6024 we're still OK. */
6025 if (arg == gimple_phi_result (stmt))
6026 continue;
6027 else if (TREE_CODE (arg) == INTEGER_CST)
6029 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6031 if (!result)
6032 result = boolean_true_node;
6033 else if (!integer_onep (result))
6034 return NULL_TREE;
6036 else if (!result)
6037 result = fold_build2 (code2, boolean_type_node,
6038 op2a, op2b);
6039 else if (!same_bool_comparison_p (result,
6040 code2, op2a, op2b))
6041 return NULL_TREE;
6043 else if (TREE_CODE (arg) == SSA_NAME
6044 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6046 tree temp;
6047 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6048 /* In simple cases we can look through PHI nodes,
6049 but we have to be careful with loops.
6050 See PR49073. */
6051 if (! dom_info_available_p (CDI_DOMINATORS)
6052 || gimple_bb (def_stmt) == gimple_bb (stmt)
6053 || dominated_by_p (CDI_DOMINATORS,
6054 gimple_bb (def_stmt),
6055 gimple_bb (stmt)))
6056 return NULL_TREE;
6057 temp = or_var_with_comparison (arg, invert, code2,
6058 op2a, op2b);
6059 if (!temp)
6060 return NULL_TREE;
6061 else if (!result)
6062 result = temp;
6063 else if (!same_bool_result_p (result, temp))
6064 return NULL_TREE;
6066 else
6067 return NULL_TREE;
6069 return result;
6072 default:
6073 break;
6076 return NULL_TREE;
6079 /* Try to simplify the OR of two comparisons, specified by
6080 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6081 If this can be simplified to a single expression (without requiring
6082 introducing more SSA variables to hold intermediate values),
6083 return the resulting tree. Otherwise return NULL_TREE.
6084 If the result expression is non-null, it has boolean type. */
6086 tree
6087 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6088 enum tree_code code2, tree op2a, tree op2b)
6090 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6091 if (t)
6092 return t;
6093 else
6094 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6098 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6100 Either NULL_TREE, a simplified but non-constant or a constant
6101 is returned.
6103 ??? This should go into a gimple-fold-inline.h file to be eventually
6104 privatized with the single valueize function used in the various TUs
6105 to avoid the indirect function call overhead. */
6107 tree
6108 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6109 tree (*gvalueize) (tree))
6111 code_helper rcode;
6112 tree ops[3] = {};
6113 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6114 edges if there are intermediate VARYING defs. For this reason
6115 do not follow SSA edges here even though SCCVN can technically
6116 just deal fine with that. */
6117 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize, valueize))
6119 tree res = NULL_TREE;
6120 if (gimple_simplified_result_is_gimple_val (rcode, ops))
6121 res = ops[0];
6122 else if (mprts_hook)
6123 res = mprts_hook (rcode, gimple_expr_type (stmt), ops);
6124 if (res)
6126 if (dump_file && dump_flags & TDF_DETAILS)
6128 fprintf (dump_file, "Match-and-simplified ");
6129 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6130 fprintf (dump_file, " to ");
6131 print_generic_expr (dump_file, res);
6132 fprintf (dump_file, "\n");
6134 return res;
6138 location_t loc = gimple_location (stmt);
6139 switch (gimple_code (stmt))
6141 case GIMPLE_ASSIGN:
6143 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6145 switch (get_gimple_rhs_class (subcode))
6147 case GIMPLE_SINGLE_RHS:
6149 tree rhs = gimple_assign_rhs1 (stmt);
6150 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6152 if (TREE_CODE (rhs) == SSA_NAME)
6154 /* If the RHS is an SSA_NAME, return its known constant value,
6155 if any. */
6156 return (*valueize) (rhs);
6158 /* Handle propagating invariant addresses into address
6159 operations. */
6160 else if (TREE_CODE (rhs) == ADDR_EXPR
6161 && !is_gimple_min_invariant (rhs))
6163 poly_int64 offset = 0;
6164 tree base;
6165 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6166 &offset,
6167 valueize);
6168 if (base
6169 && (CONSTANT_CLASS_P (base)
6170 || decl_address_invariant_p (base)))
6171 return build_invariant_address (TREE_TYPE (rhs),
6172 base, offset);
6174 else if (TREE_CODE (rhs) == CONSTRUCTOR
6175 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6176 && known_eq (CONSTRUCTOR_NELTS (rhs),
6177 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6179 unsigned i, nelts;
6180 tree val;
6182 nelts = CONSTRUCTOR_NELTS (rhs);
6183 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6184 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6186 val = (*valueize) (val);
6187 if (TREE_CODE (val) == INTEGER_CST
6188 || TREE_CODE (val) == REAL_CST
6189 || TREE_CODE (val) == FIXED_CST)
6190 vec.quick_push (val);
6191 else
6192 return NULL_TREE;
6195 return vec.build ();
6197 if (subcode == OBJ_TYPE_REF)
6199 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6200 /* If callee is constant, we can fold away the wrapper. */
6201 if (is_gimple_min_invariant (val))
6202 return val;
6205 if (kind == tcc_reference)
6207 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6208 || TREE_CODE (rhs) == REALPART_EXPR
6209 || TREE_CODE (rhs) == IMAGPART_EXPR)
6210 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6212 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6213 return fold_unary_loc (EXPR_LOCATION (rhs),
6214 TREE_CODE (rhs),
6215 TREE_TYPE (rhs), val);
6217 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6218 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6220 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6221 return fold_ternary_loc (EXPR_LOCATION (rhs),
6222 TREE_CODE (rhs),
6223 TREE_TYPE (rhs), val,
6224 TREE_OPERAND (rhs, 1),
6225 TREE_OPERAND (rhs, 2));
6227 else if (TREE_CODE (rhs) == MEM_REF
6228 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6230 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6231 if (TREE_CODE (val) == ADDR_EXPR
6232 && is_gimple_min_invariant (val))
6234 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6235 unshare_expr (val),
6236 TREE_OPERAND (rhs, 1));
6237 if (tem)
6238 rhs = tem;
6241 return fold_const_aggregate_ref_1 (rhs, valueize);
6243 else if (kind == tcc_declaration)
6244 return get_symbol_constant_value (rhs);
6245 return rhs;
6248 case GIMPLE_UNARY_RHS:
6249 return NULL_TREE;
6251 case GIMPLE_BINARY_RHS:
6252 /* Translate &x + CST into an invariant form suitable for
6253 further propagation. */
6254 if (subcode == POINTER_PLUS_EXPR)
6256 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6257 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6258 if (TREE_CODE (op0) == ADDR_EXPR
6259 && TREE_CODE (op1) == INTEGER_CST)
6261 tree off = fold_convert (ptr_type_node, op1);
6262 return build_fold_addr_expr_loc
6263 (loc,
6264 fold_build2 (MEM_REF,
6265 TREE_TYPE (TREE_TYPE (op0)),
6266 unshare_expr (op0), off));
6269 /* Canonicalize bool != 0 and bool == 0 appearing after
6270 valueization. While gimple_simplify handles this
6271 it can get confused by the ~X == 1 -> X == 0 transform
6272 which we cant reduce to a SSA name or a constant
6273 (and we have no way to tell gimple_simplify to not
6274 consider those transforms in the first place). */
6275 else if (subcode == EQ_EXPR
6276 || subcode == NE_EXPR)
6278 tree lhs = gimple_assign_lhs (stmt);
6279 tree op0 = gimple_assign_rhs1 (stmt);
6280 if (useless_type_conversion_p (TREE_TYPE (lhs),
6281 TREE_TYPE (op0)))
6283 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6284 op0 = (*valueize) (op0);
6285 if (TREE_CODE (op0) == INTEGER_CST)
6286 std::swap (op0, op1);
6287 if (TREE_CODE (op1) == INTEGER_CST
6288 && ((subcode == NE_EXPR && integer_zerop (op1))
6289 || (subcode == EQ_EXPR && integer_onep (op1))))
6290 return op0;
6293 return NULL_TREE;
6295 case GIMPLE_TERNARY_RHS:
6297 /* Handle ternary operators that can appear in GIMPLE form. */
6298 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6299 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6300 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6301 return fold_ternary_loc (loc, subcode,
6302 gimple_expr_type (stmt), op0, op1, op2);
6305 default:
6306 gcc_unreachable ();
6310 case GIMPLE_CALL:
6312 tree fn;
6313 gcall *call_stmt = as_a <gcall *> (stmt);
6315 if (gimple_call_internal_p (stmt))
6317 enum tree_code subcode = ERROR_MARK;
6318 switch (gimple_call_internal_fn (stmt))
6320 case IFN_UBSAN_CHECK_ADD:
6321 subcode = PLUS_EXPR;
6322 break;
6323 case IFN_UBSAN_CHECK_SUB:
6324 subcode = MINUS_EXPR;
6325 break;
6326 case IFN_UBSAN_CHECK_MUL:
6327 subcode = MULT_EXPR;
6328 break;
6329 case IFN_BUILTIN_EXPECT:
6331 tree arg0 = gimple_call_arg (stmt, 0);
6332 tree op0 = (*valueize) (arg0);
6333 if (TREE_CODE (op0) == INTEGER_CST)
6334 return op0;
6335 return NULL_TREE;
6337 default:
6338 return NULL_TREE;
6340 tree arg0 = gimple_call_arg (stmt, 0);
6341 tree arg1 = gimple_call_arg (stmt, 1);
6342 tree op0 = (*valueize) (arg0);
6343 tree op1 = (*valueize) (arg1);
6345 if (TREE_CODE (op0) != INTEGER_CST
6346 || TREE_CODE (op1) != INTEGER_CST)
6348 switch (subcode)
6350 case MULT_EXPR:
6351 /* x * 0 = 0 * x = 0 without overflow. */
6352 if (integer_zerop (op0) || integer_zerop (op1))
6353 return build_zero_cst (TREE_TYPE (arg0));
6354 break;
6355 case MINUS_EXPR:
6356 /* y - y = 0 without overflow. */
6357 if (operand_equal_p (op0, op1, 0))
6358 return build_zero_cst (TREE_TYPE (arg0));
6359 break;
6360 default:
6361 break;
6364 tree res
6365 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6366 if (res
6367 && TREE_CODE (res) == INTEGER_CST
6368 && !TREE_OVERFLOW (res))
6369 return res;
6370 return NULL_TREE;
6373 fn = (*valueize) (gimple_call_fn (stmt));
6374 if (TREE_CODE (fn) == ADDR_EXPR
6375 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6376 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6377 && gimple_builtin_call_types_compatible_p (stmt,
6378 TREE_OPERAND (fn, 0)))
6380 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6381 tree retval;
6382 unsigned i;
6383 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6384 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6385 retval = fold_builtin_call_array (loc,
6386 gimple_call_return_type (call_stmt),
6387 fn, gimple_call_num_args (stmt), args);
6388 if (retval)
6390 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6391 STRIP_NOPS (retval);
6392 retval = fold_convert (gimple_call_return_type (call_stmt),
6393 retval);
6395 return retval;
6397 return NULL_TREE;
6400 default:
6401 return NULL_TREE;
6405 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6406 Returns NULL_TREE if folding to a constant is not possible, otherwise
6407 returns a constant according to is_gimple_min_invariant. */
6409 tree
6410 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6412 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6413 if (res && is_gimple_min_invariant (res))
6414 return res;
6415 return NULL_TREE;
6419 /* The following set of functions are supposed to fold references using
6420 their constant initializers. */
6422 /* See if we can find constructor defining value of BASE.
6423 When we know the consructor with constant offset (such as
6424 base is array[40] and we do know constructor of array), then
6425 BIT_OFFSET is adjusted accordingly.
6427 As a special case, return error_mark_node when constructor
6428 is not explicitly available, but it is known to be zero
6429 such as 'static const int a;'. */
6430 static tree
6431 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6432 tree (*valueize)(tree))
6434 poly_int64 bit_offset2, size, max_size;
6435 bool reverse;
6437 if (TREE_CODE (base) == MEM_REF)
6439 if (!integer_zerop (TREE_OPERAND (base, 1)))
6441 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
6442 return NULL_TREE;
6443 *bit_offset += (mem_ref_offset (base).force_shwi ()
6444 * BITS_PER_UNIT);
6447 if (valueize
6448 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6449 base = valueize (TREE_OPERAND (base, 0));
6450 if (!base || TREE_CODE (base) != ADDR_EXPR)
6451 return NULL_TREE;
6452 base = TREE_OPERAND (base, 0);
6454 else if (valueize
6455 && TREE_CODE (base) == SSA_NAME)
6456 base = valueize (base);
6458 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6459 DECL_INITIAL. If BASE is a nested reference into another
6460 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6461 the inner reference. */
6462 switch (TREE_CODE (base))
6464 case VAR_DECL:
6465 case CONST_DECL:
6467 tree init = ctor_for_folding (base);
6469 /* Our semantic is exact opposite of ctor_for_folding;
6470 NULL means unknown, while error_mark_node is 0. */
6471 if (init == error_mark_node)
6472 return NULL_TREE;
6473 if (!init)
6474 return error_mark_node;
6475 return init;
6478 case VIEW_CONVERT_EXPR:
6479 return get_base_constructor (TREE_OPERAND (base, 0),
6480 bit_offset, valueize);
6482 case ARRAY_REF:
6483 case COMPONENT_REF:
6484 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6485 &reverse);
6486 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6487 return NULL_TREE;
6488 *bit_offset += bit_offset2;
6489 return get_base_constructor (base, bit_offset, valueize);
6491 case CONSTRUCTOR:
6492 return base;
6494 default:
6495 if (CONSTANT_CLASS_P (base))
6496 return base;
6498 return NULL_TREE;
6502 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6503 SIZE to the memory at bit OFFSET. */
6505 static tree
6506 fold_array_ctor_reference (tree type, tree ctor,
6507 unsigned HOST_WIDE_INT offset,
6508 unsigned HOST_WIDE_INT size,
6509 tree from_decl)
6511 offset_int low_bound;
6512 offset_int elt_size;
6513 offset_int access_index;
6514 tree domain_type = NULL_TREE;
6515 HOST_WIDE_INT inner_offset;
6517 /* Compute low bound and elt size. */
6518 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6519 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6520 if (domain_type && TYPE_MIN_VALUE (domain_type))
6522 /* Static constructors for variably sized objects makes no sense. */
6523 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6524 return NULL_TREE;
6525 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6527 else
6528 low_bound = 0;
6529 /* Static constructors for variably sized objects makes no sense. */
6530 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6531 return NULL_TREE;
6532 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6534 /* We can handle only constantly sized accesses that are known to not
6535 be larger than size of array element. */
6536 if (!TYPE_SIZE_UNIT (type)
6537 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6538 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6539 || elt_size == 0)
6540 return NULL_TREE;
6542 /* Compute the array index we look for. */
6543 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6544 elt_size);
6545 access_index += low_bound;
6547 /* And offset within the access. */
6548 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6550 /* See if the array field is large enough to span whole access. We do not
6551 care to fold accesses spanning multiple array indexes. */
6552 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6553 return NULL_TREE;
6554 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6555 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6557 /* When memory is not explicitely mentioned in constructor,
6558 it is 0 (or out of range). */
6559 return build_zero_cst (type);
6562 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6563 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6565 static tree
6566 fold_nonarray_ctor_reference (tree type, tree ctor,
6567 unsigned HOST_WIDE_INT offset,
6568 unsigned HOST_WIDE_INT size,
6569 tree from_decl)
6571 unsigned HOST_WIDE_INT cnt;
6572 tree cfield, cval;
6574 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6575 cval)
6577 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6578 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6579 tree field_size = DECL_SIZE (cfield);
6580 offset_int bitoffset;
6581 offset_int bitoffset_end, access_end;
6583 /* Variable sized objects in static constructors makes no sense,
6584 but field_size can be NULL for flexible array members. */
6585 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6586 && TREE_CODE (byte_offset) == INTEGER_CST
6587 && (field_size != NULL_TREE
6588 ? TREE_CODE (field_size) == INTEGER_CST
6589 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6591 /* Compute bit offset of the field. */
6592 bitoffset = (wi::to_offset (field_offset)
6593 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6594 /* Compute bit offset where the field ends. */
6595 if (field_size != NULL_TREE)
6596 bitoffset_end = bitoffset + wi::to_offset (field_size);
6597 else
6598 bitoffset_end = 0;
6600 access_end = offset_int (offset) + size;
6602 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6603 [BITOFFSET, BITOFFSET_END)? */
6604 if (wi::cmps (access_end, bitoffset) > 0
6605 && (field_size == NULL_TREE
6606 || wi::lts_p (offset, bitoffset_end)))
6608 offset_int inner_offset = offset_int (offset) - bitoffset;
6609 /* We do have overlap. Now see if field is large enough to
6610 cover the access. Give up for accesses spanning multiple
6611 fields. */
6612 if (wi::cmps (access_end, bitoffset_end) > 0)
6613 return NULL_TREE;
6614 if (offset < bitoffset)
6615 return NULL_TREE;
6616 return fold_ctor_reference (type, cval,
6617 inner_offset.to_uhwi (), size,
6618 from_decl);
6621 /* When memory is not explicitely mentioned in constructor, it is 0. */
6622 return build_zero_cst (type);
6625 /* CTOR is value initializing memory, fold reference of type TYPE and
6626 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6628 tree
6629 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6630 poly_uint64 poly_size, tree from_decl)
6632 tree ret;
6634 /* We found the field with exact match. */
6635 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6636 && known_eq (poly_offset, 0U))
6637 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6639 /* The remaining optimizations need a constant size and offset. */
6640 unsigned HOST_WIDE_INT size, offset;
6641 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6642 return NULL_TREE;
6644 /* We are at the end of walk, see if we can view convert the
6645 result. */
6646 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6647 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6648 && !compare_tree_int (TYPE_SIZE (type), size)
6649 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6651 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6652 if (ret)
6654 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6655 if (ret)
6656 STRIP_USELESS_TYPE_CONVERSION (ret);
6658 return ret;
6660 /* For constants and byte-aligned/sized reads try to go through
6661 native_encode/interpret. */
6662 if (CONSTANT_CLASS_P (ctor)
6663 && BITS_PER_UNIT == 8
6664 && offset % BITS_PER_UNIT == 0
6665 && size % BITS_PER_UNIT == 0
6666 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6668 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6669 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6670 offset / BITS_PER_UNIT);
6671 if (len > 0)
6672 return native_interpret_expr (type, buf, len);
6674 if (TREE_CODE (ctor) == CONSTRUCTOR)
6677 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6678 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6679 return fold_array_ctor_reference (type, ctor, offset, size,
6680 from_decl);
6681 else
6682 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6683 from_decl);
6686 return NULL_TREE;
6689 /* Return the tree representing the element referenced by T if T is an
6690 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6691 names using VALUEIZE. Return NULL_TREE otherwise. */
6693 tree
6694 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6696 tree ctor, idx, base;
6697 poly_int64 offset, size, max_size;
6698 tree tem;
6699 bool reverse;
6701 if (TREE_THIS_VOLATILE (t))
6702 return NULL_TREE;
6704 if (DECL_P (t))
6705 return get_symbol_constant_value (t);
6707 tem = fold_read_from_constant_string (t);
6708 if (tem)
6709 return tem;
6711 switch (TREE_CODE (t))
6713 case ARRAY_REF:
6714 case ARRAY_RANGE_REF:
6715 /* Constant indexes are handled well by get_base_constructor.
6716 Only special case variable offsets.
6717 FIXME: This code can't handle nested references with variable indexes
6718 (they will be handled only by iteration of ccp). Perhaps we can bring
6719 get_ref_base_and_extent here and make it use a valueize callback. */
6720 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6721 && valueize
6722 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6723 && poly_int_tree_p (idx))
6725 tree low_bound, unit_size;
6727 /* If the resulting bit-offset is constant, track it. */
6728 if ((low_bound = array_ref_low_bound (t),
6729 poly_int_tree_p (low_bound))
6730 && (unit_size = array_ref_element_size (t),
6731 tree_fits_uhwi_p (unit_size)))
6733 poly_offset_int woffset
6734 = wi::sext (wi::to_poly_offset (idx)
6735 - wi::to_poly_offset (low_bound),
6736 TYPE_PRECISION (TREE_TYPE (idx)));
6738 if (woffset.to_shwi (&offset))
6740 /* TODO: This code seems wrong, multiply then check
6741 to see if it fits. */
6742 offset *= tree_to_uhwi (unit_size);
6743 offset *= BITS_PER_UNIT;
6745 base = TREE_OPERAND (t, 0);
6746 ctor = get_base_constructor (base, &offset, valueize);
6747 /* Empty constructor. Always fold to 0. */
6748 if (ctor == error_mark_node)
6749 return build_zero_cst (TREE_TYPE (t));
6750 /* Out of bound array access. Value is undefined,
6751 but don't fold. */
6752 if (maybe_lt (offset, 0))
6753 return NULL_TREE;
6754 /* We can not determine ctor. */
6755 if (!ctor)
6756 return NULL_TREE;
6757 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6758 tree_to_uhwi (unit_size)
6759 * BITS_PER_UNIT,
6760 base);
6764 /* Fallthru. */
6766 case COMPONENT_REF:
6767 case BIT_FIELD_REF:
6768 case TARGET_MEM_REF:
6769 case MEM_REF:
6770 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6771 ctor = get_base_constructor (base, &offset, valueize);
6773 /* Empty constructor. Always fold to 0. */
6774 if (ctor == error_mark_node)
6775 return build_zero_cst (TREE_TYPE (t));
6776 /* We do not know precise address. */
6777 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6778 return NULL_TREE;
6779 /* We can not determine ctor. */
6780 if (!ctor)
6781 return NULL_TREE;
6783 /* Out of bound array access. Value is undefined, but don't fold. */
6784 if (maybe_lt (offset, 0))
6785 return NULL_TREE;
6787 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6788 base);
6790 case REALPART_EXPR:
6791 case IMAGPART_EXPR:
6793 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6794 if (c && TREE_CODE (c) == COMPLEX_CST)
6795 return fold_build1_loc (EXPR_LOCATION (t),
6796 TREE_CODE (t), TREE_TYPE (t), c);
6797 break;
6800 default:
6801 break;
6804 return NULL_TREE;
6807 tree
6808 fold_const_aggregate_ref (tree t)
6810 return fold_const_aggregate_ref_1 (t, NULL);
6813 /* Lookup virtual method with index TOKEN in a virtual table V
6814 at OFFSET.
6815 Set CAN_REFER if non-NULL to false if method
6816 is not referable or if the virtual table is ill-formed (such as rewriten
6817 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6819 tree
6820 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6821 tree v,
6822 unsigned HOST_WIDE_INT offset,
6823 bool *can_refer)
6825 tree vtable = v, init, fn;
6826 unsigned HOST_WIDE_INT size;
6827 unsigned HOST_WIDE_INT elt_size, access_index;
6828 tree domain_type;
6830 if (can_refer)
6831 *can_refer = true;
6833 /* First of all double check we have virtual table. */
6834 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6836 /* Pass down that we lost track of the target. */
6837 if (can_refer)
6838 *can_refer = false;
6839 return NULL_TREE;
6842 init = ctor_for_folding (v);
6844 /* The virtual tables should always be born with constructors
6845 and we always should assume that they are avaialble for
6846 folding. At the moment we do not stream them in all cases,
6847 but it should never happen that ctor seem unreachable. */
6848 gcc_assert (init);
6849 if (init == error_mark_node)
6851 /* Pass down that we lost track of the target. */
6852 if (can_refer)
6853 *can_refer = false;
6854 return NULL_TREE;
6856 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6857 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6858 offset *= BITS_PER_UNIT;
6859 offset += token * size;
6861 /* Lookup the value in the constructor that is assumed to be array.
6862 This is equivalent to
6863 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6864 offset, size, NULL);
6865 but in a constant time. We expect that frontend produced a simple
6866 array without indexed initializers. */
6868 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6869 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6870 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6871 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6873 access_index = offset / BITS_PER_UNIT / elt_size;
6874 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6876 /* This code makes an assumption that there are no
6877 indexed fileds produced by C++ FE, so we can directly index the array. */
6878 if (access_index < CONSTRUCTOR_NELTS (init))
6880 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6881 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6882 STRIP_NOPS (fn);
6884 else
6885 fn = NULL;
6887 /* For type inconsistent program we may end up looking up virtual method
6888 in virtual table that does not contain TOKEN entries. We may overrun
6889 the virtual table and pick up a constant or RTTI info pointer.
6890 In any case the call is undefined. */
6891 if (!fn
6892 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6893 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6894 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6895 else
6897 fn = TREE_OPERAND (fn, 0);
6899 /* When cgraph node is missing and function is not public, we cannot
6900 devirtualize. This can happen in WHOPR when the actual method
6901 ends up in other partition, because we found devirtualization
6902 possibility too late. */
6903 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6905 if (can_refer)
6907 *can_refer = false;
6908 return fn;
6910 return NULL_TREE;
6914 /* Make sure we create a cgraph node for functions we'll reference.
6915 They can be non-existent if the reference comes from an entry
6916 of an external vtable for example. */
6917 cgraph_node::get_create (fn);
6919 return fn;
6922 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6923 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6924 KNOWN_BINFO carries the binfo describing the true type of
6925 OBJ_TYPE_REF_OBJECT(REF).
6926 Set CAN_REFER if non-NULL to false if method
6927 is not referable or if the virtual table is ill-formed (such as rewriten
6928 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6930 tree
6931 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6932 bool *can_refer)
6934 unsigned HOST_WIDE_INT offset;
6935 tree v;
6937 v = BINFO_VTABLE (known_binfo);
6938 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6939 if (!v)
6940 return NULL_TREE;
6942 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6944 if (can_refer)
6945 *can_refer = false;
6946 return NULL_TREE;
6948 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6951 /* Given a pointer value T, return a simplified version of an
6952 indirection through T, or NULL_TREE if no simplification is
6953 possible. Note that the resulting type may be different from
6954 the type pointed to in the sense that it is still compatible
6955 from the langhooks point of view. */
6957 tree
6958 gimple_fold_indirect_ref (tree t)
6960 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6961 tree sub = t;
6962 tree subtype;
6964 STRIP_NOPS (sub);
6965 subtype = TREE_TYPE (sub);
6966 if (!POINTER_TYPE_P (subtype)
6967 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6968 return NULL_TREE;
6970 if (TREE_CODE (sub) == ADDR_EXPR)
6972 tree op = TREE_OPERAND (sub, 0);
6973 tree optype = TREE_TYPE (op);
6974 /* *&p => p */
6975 if (useless_type_conversion_p (type, optype))
6976 return op;
6978 /* *(foo *)&fooarray => fooarray[0] */
6979 if (TREE_CODE (optype) == ARRAY_TYPE
6980 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6981 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6983 tree type_domain = TYPE_DOMAIN (optype);
6984 tree min_val = size_zero_node;
6985 if (type_domain && TYPE_MIN_VALUE (type_domain))
6986 min_val = TYPE_MIN_VALUE (type_domain);
6987 if (TREE_CODE (min_val) == INTEGER_CST)
6988 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6990 /* *(foo *)&complexfoo => __real__ complexfoo */
6991 else if (TREE_CODE (optype) == COMPLEX_TYPE
6992 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6993 return fold_build1 (REALPART_EXPR, type, op);
6994 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6995 else if (TREE_CODE (optype) == VECTOR_TYPE
6996 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6998 tree part_width = TYPE_SIZE (type);
6999 tree index = bitsize_int (0);
7000 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7004 /* *(p + CST) -> ... */
7005 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7006 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7008 tree addr = TREE_OPERAND (sub, 0);
7009 tree off = TREE_OPERAND (sub, 1);
7010 tree addrtype;
7012 STRIP_NOPS (addr);
7013 addrtype = TREE_TYPE (addr);
7015 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7016 if (TREE_CODE (addr) == ADDR_EXPR
7017 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7018 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7019 && tree_fits_uhwi_p (off))
7021 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7022 tree part_width = TYPE_SIZE (type);
7023 unsigned HOST_WIDE_INT part_widthi
7024 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7025 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7026 tree index = bitsize_int (indexi);
7027 if (known_lt (offset / part_widthi,
7028 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7029 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7030 part_width, index);
7033 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7034 if (TREE_CODE (addr) == ADDR_EXPR
7035 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7036 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7038 tree size = TYPE_SIZE_UNIT (type);
7039 if (tree_int_cst_equal (size, off))
7040 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7043 /* *(p + CST) -> MEM_REF <p, CST>. */
7044 if (TREE_CODE (addr) != ADDR_EXPR
7045 || DECL_P (TREE_OPERAND (addr, 0)))
7046 return fold_build2 (MEM_REF, type,
7047 addr,
7048 wide_int_to_tree (ptype, wi::to_wide (off)));
7051 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7052 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7053 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7054 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7056 tree type_domain;
7057 tree min_val = size_zero_node;
7058 tree osub = sub;
7059 sub = gimple_fold_indirect_ref (sub);
7060 if (! sub)
7061 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7062 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7063 if (type_domain && TYPE_MIN_VALUE (type_domain))
7064 min_val = TYPE_MIN_VALUE (type_domain);
7065 if (TREE_CODE (min_val) == INTEGER_CST)
7066 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7069 return NULL_TREE;
7072 /* Return true if CODE is an operation that when operating on signed
7073 integer types involves undefined behavior on overflow and the
7074 operation can be expressed with unsigned arithmetic. */
7076 bool
7077 arith_code_with_undefined_signed_overflow (tree_code code)
7079 switch (code)
7081 case PLUS_EXPR:
7082 case MINUS_EXPR:
7083 case MULT_EXPR:
7084 case NEGATE_EXPR:
7085 case POINTER_PLUS_EXPR:
7086 return true;
7087 default:
7088 return false;
7092 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7093 operation that can be transformed to unsigned arithmetic by converting
7094 its operand, carrying out the operation in the corresponding unsigned
7095 type and converting the result back to the original type.
7097 Returns a sequence of statements that replace STMT and also contain
7098 a modified form of STMT itself. */
7100 gimple_seq
7101 rewrite_to_defined_overflow (gimple *stmt)
7103 if (dump_file && (dump_flags & TDF_DETAILS))
7105 fprintf (dump_file, "rewriting stmt with undefined signed "
7106 "overflow ");
7107 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7110 tree lhs = gimple_assign_lhs (stmt);
7111 tree type = unsigned_type_for (TREE_TYPE (lhs));
7112 gimple_seq stmts = NULL;
7113 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7115 tree op = gimple_op (stmt, i);
7116 op = gimple_convert (&stmts, type, op);
7117 gimple_set_op (stmt, i, op);
7119 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7120 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7121 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7122 gimple_seq_add_stmt (&stmts, stmt);
7123 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7124 gimple_seq_add_stmt (&stmts, cvt);
7126 return stmts;
7130 /* The valueization hook we use for the gimple_build API simplification.
7131 This makes us match fold_buildN behavior by only combining with
7132 statements in the sequence(s) we are currently building. */
7134 static tree
7135 gimple_build_valueize (tree op)
7137 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7138 return op;
7139 return NULL_TREE;
7142 /* Build the expression CODE OP0 of type TYPE with location LOC,
7143 simplifying it first if possible. Returns the built
7144 expression value and appends statements possibly defining it
7145 to SEQ. */
7147 tree
7148 gimple_build (gimple_seq *seq, location_t loc,
7149 enum tree_code code, tree type, tree op0)
7151 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7152 if (!res)
7154 res = create_tmp_reg_or_ssa_name (type);
7155 gimple *stmt;
7156 if (code == REALPART_EXPR
7157 || code == IMAGPART_EXPR
7158 || code == VIEW_CONVERT_EXPR)
7159 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7160 else
7161 stmt = gimple_build_assign (res, code, op0);
7162 gimple_set_location (stmt, loc);
7163 gimple_seq_add_stmt_without_update (seq, stmt);
7165 return res;
7168 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7169 simplifying it first if possible. Returns the built
7170 expression value and appends statements possibly defining it
7171 to SEQ. */
7173 tree
7174 gimple_build (gimple_seq *seq, location_t loc,
7175 enum tree_code code, tree type, tree op0, tree op1)
7177 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7178 if (!res)
7180 res = create_tmp_reg_or_ssa_name (type);
7181 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7182 gimple_set_location (stmt, loc);
7183 gimple_seq_add_stmt_without_update (seq, stmt);
7185 return res;
7188 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7189 simplifying it first if possible. Returns the built
7190 expression value and appends statements possibly defining it
7191 to SEQ. */
7193 tree
7194 gimple_build (gimple_seq *seq, location_t loc,
7195 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7197 tree res = gimple_simplify (code, type, op0, op1, op2,
7198 seq, gimple_build_valueize);
7199 if (!res)
7201 res = create_tmp_reg_or_ssa_name (type);
7202 gimple *stmt;
7203 if (code == BIT_FIELD_REF)
7204 stmt = gimple_build_assign (res, code,
7205 build3 (code, type, op0, op1, op2));
7206 else
7207 stmt = gimple_build_assign (res, code, op0, op1, op2);
7208 gimple_set_location (stmt, loc);
7209 gimple_seq_add_stmt_without_update (seq, stmt);
7211 return res;
7214 /* Build the call FN (ARG0) with a result of type TYPE
7215 (or no result if TYPE is void) with location LOC,
7216 simplifying it first if possible. Returns the built
7217 expression value (or NULL_TREE if TYPE is void) and appends
7218 statements possibly defining it to SEQ. */
7220 tree
7221 gimple_build (gimple_seq *seq, location_t loc,
7222 enum built_in_function fn, tree type, tree arg0)
7224 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7225 if (!res)
7227 tree decl = builtin_decl_implicit (fn);
7228 gimple *stmt = gimple_build_call (decl, 1, arg0);
7229 if (!VOID_TYPE_P (type))
7231 res = create_tmp_reg_or_ssa_name (type);
7232 gimple_call_set_lhs (stmt, res);
7234 gimple_set_location (stmt, loc);
7235 gimple_seq_add_stmt_without_update (seq, stmt);
7237 return res;
7240 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7241 (or no result if TYPE is void) with location LOC,
7242 simplifying it first if possible. Returns the built
7243 expression value (or NULL_TREE if TYPE is void) and appends
7244 statements possibly defining it to SEQ. */
7246 tree
7247 gimple_build (gimple_seq *seq, location_t loc,
7248 enum built_in_function fn, tree type, tree arg0, tree arg1)
7250 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7251 if (!res)
7253 tree decl = builtin_decl_implicit (fn);
7254 gimple *stmt = gimple_build_call (decl, 2, arg0, arg1);
7255 if (!VOID_TYPE_P (type))
7257 res = create_tmp_reg_or_ssa_name (type);
7258 gimple_call_set_lhs (stmt, res);
7260 gimple_set_location (stmt, loc);
7261 gimple_seq_add_stmt_without_update (seq, stmt);
7263 return res;
7266 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7267 (or no result if TYPE is void) with location LOC,
7268 simplifying it first if possible. Returns the built
7269 expression value (or NULL_TREE if TYPE is void) and appends
7270 statements possibly defining it to SEQ. */
7272 tree
7273 gimple_build (gimple_seq *seq, location_t loc,
7274 enum built_in_function fn, tree type,
7275 tree arg0, tree arg1, tree arg2)
7277 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7278 seq, gimple_build_valueize);
7279 if (!res)
7281 tree decl = builtin_decl_implicit (fn);
7282 gimple *stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7283 if (!VOID_TYPE_P (type))
7285 res = create_tmp_reg_or_ssa_name (type);
7286 gimple_call_set_lhs (stmt, res);
7288 gimple_set_location (stmt, loc);
7289 gimple_seq_add_stmt_without_update (seq, stmt);
7291 return res;
7294 /* Build the conversion (TYPE) OP with a result of type TYPE
7295 with location LOC if such conversion is neccesary in GIMPLE,
7296 simplifying it first.
7297 Returns the built expression value and appends
7298 statements possibly defining it to SEQ. */
7300 tree
7301 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7303 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7304 return op;
7305 return gimple_build (seq, loc, NOP_EXPR, type, op);
7308 /* Build the conversion (ptrofftype) OP with a result of a type
7309 compatible with ptrofftype with location LOC if such conversion
7310 is neccesary in GIMPLE, simplifying it first.
7311 Returns the built expression value and appends
7312 statements possibly defining it to SEQ. */
7314 tree
7315 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7317 if (ptrofftype_p (TREE_TYPE (op)))
7318 return op;
7319 return gimple_convert (seq, loc, sizetype, op);
7322 /* Build a vector of type TYPE in which each element has the value OP.
7323 Return a gimple value for the result, appending any new statements
7324 to SEQ. */
7326 tree
7327 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7328 tree op)
7330 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7331 && !CONSTANT_CLASS_P (op))
7332 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7334 tree res, vec = build_vector_from_val (type, op);
7335 if (is_gimple_val (vec))
7336 return vec;
7337 if (gimple_in_ssa_p (cfun))
7338 res = make_ssa_name (type);
7339 else
7340 res = create_tmp_reg (type);
7341 gimple *stmt = gimple_build_assign (res, vec);
7342 gimple_set_location (stmt, loc);
7343 gimple_seq_add_stmt_without_update (seq, stmt);
7344 return res;
7347 /* Build a vector from BUILDER, handling the case in which some elements
7348 are non-constant. Return a gimple value for the result, appending any
7349 new instructions to SEQ.
7351 BUILDER must not have a stepped encoding on entry. This is because
7352 the function is not geared up to handle the arithmetic that would
7353 be needed in the variable case, and any code building a vector that
7354 is known to be constant should use BUILDER->build () directly. */
7356 tree
7357 gimple_build_vector (gimple_seq *seq, location_t loc,
7358 tree_vector_builder *builder)
7360 gcc_assert (builder->nelts_per_pattern () <= 2);
7361 unsigned int encoded_nelts = builder->encoded_nelts ();
7362 for (unsigned int i = 0; i < encoded_nelts; ++i)
7363 if (!TREE_CONSTANT ((*builder)[i]))
7365 tree type = builder->type ();
7366 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7367 vec<constructor_elt, va_gc> *v;
7368 vec_alloc (v, nelts);
7369 for (i = 0; i < nelts; ++i)
7370 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7372 tree res;
7373 if (gimple_in_ssa_p (cfun))
7374 res = make_ssa_name (type);
7375 else
7376 res = create_tmp_reg (type);
7377 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7378 gimple_set_location (stmt, loc);
7379 gimple_seq_add_stmt_without_update (seq, stmt);
7380 return res;
7382 return builder->build ();
7385 /* Return true if the result of assignment STMT is known to be non-negative.
7386 If the return value is based on the assumption that signed overflow is
7387 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7388 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7390 static bool
7391 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7392 int depth)
7394 enum tree_code code = gimple_assign_rhs_code (stmt);
7395 switch (get_gimple_rhs_class (code))
7397 case GIMPLE_UNARY_RHS:
7398 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7399 gimple_expr_type (stmt),
7400 gimple_assign_rhs1 (stmt),
7401 strict_overflow_p, depth);
7402 case GIMPLE_BINARY_RHS:
7403 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7404 gimple_expr_type (stmt),
7405 gimple_assign_rhs1 (stmt),
7406 gimple_assign_rhs2 (stmt),
7407 strict_overflow_p, depth);
7408 case GIMPLE_TERNARY_RHS:
7409 return false;
7410 case GIMPLE_SINGLE_RHS:
7411 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7412 strict_overflow_p, depth);
7413 case GIMPLE_INVALID_RHS:
7414 break;
7416 gcc_unreachable ();
7419 /* Return true if return value of call STMT is known to be non-negative.
7420 If the return value is based on the assumption that signed overflow is
7421 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7422 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7424 static bool
7425 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7426 int depth)
7428 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7429 gimple_call_arg (stmt, 0) : NULL_TREE;
7430 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7431 gimple_call_arg (stmt, 1) : NULL_TREE;
7433 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7434 gimple_call_combined_fn (stmt),
7435 arg0,
7436 arg1,
7437 strict_overflow_p, depth);
7440 /* Return true if return value of call STMT is known to be non-negative.
7441 If the return value is based on the assumption that signed overflow is
7442 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7443 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7445 static bool
7446 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7447 int depth)
7449 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7451 tree arg = gimple_phi_arg_def (stmt, i);
7452 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7453 return false;
7455 return true;
7458 /* Return true if STMT is known to compute a non-negative value.
7459 If the return value is based on the assumption that signed overflow is
7460 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7461 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7463 bool
7464 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7465 int depth)
7467 switch (gimple_code (stmt))
7469 case GIMPLE_ASSIGN:
7470 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7471 depth);
7472 case GIMPLE_CALL:
7473 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7474 depth);
7475 case GIMPLE_PHI:
7476 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7477 depth);
7478 default:
7479 return false;
7483 /* Return true if the floating-point value computed by assignment STMT
7484 is known to have an integer value. We also allow +Inf, -Inf and NaN
7485 to be considered integer values. Return false for signaling NaN.
7487 DEPTH is the current nesting depth of the query. */
7489 static bool
7490 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7492 enum tree_code code = gimple_assign_rhs_code (stmt);
7493 switch (get_gimple_rhs_class (code))
7495 case GIMPLE_UNARY_RHS:
7496 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7497 gimple_assign_rhs1 (stmt), depth);
7498 case GIMPLE_BINARY_RHS:
7499 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7500 gimple_assign_rhs1 (stmt),
7501 gimple_assign_rhs2 (stmt), depth);
7502 case GIMPLE_TERNARY_RHS:
7503 return false;
7504 case GIMPLE_SINGLE_RHS:
7505 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7506 case GIMPLE_INVALID_RHS:
7507 break;
7509 gcc_unreachable ();
7512 /* Return true if the floating-point value computed by call STMT is known
7513 to have an integer value. We also allow +Inf, -Inf and NaN to be
7514 considered integer values. Return false for signaling NaN.
7516 DEPTH is the current nesting depth of the query. */
7518 static bool
7519 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7521 tree arg0 = (gimple_call_num_args (stmt) > 0
7522 ? gimple_call_arg (stmt, 0)
7523 : NULL_TREE);
7524 tree arg1 = (gimple_call_num_args (stmt) > 1
7525 ? gimple_call_arg (stmt, 1)
7526 : NULL_TREE);
7527 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7528 arg0, arg1, depth);
7531 /* Return true if the floating-point result of phi STMT is known to have
7532 an integer value. We also allow +Inf, -Inf and NaN to be considered
7533 integer values. Return false for signaling NaN.
7535 DEPTH is the current nesting depth of the query. */
7537 static bool
7538 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7540 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7542 tree arg = gimple_phi_arg_def (stmt, i);
7543 if (!integer_valued_real_single_p (arg, depth + 1))
7544 return false;
7546 return true;
7549 /* Return true if the floating-point value computed by STMT is known
7550 to have an integer value. We also allow +Inf, -Inf and NaN to be
7551 considered integer values. Return false for signaling NaN.
7553 DEPTH is the current nesting depth of the query. */
7555 bool
7556 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7558 switch (gimple_code (stmt))
7560 case GIMPLE_ASSIGN:
7561 return gimple_assign_integer_valued_real_p (stmt, depth);
7562 case GIMPLE_CALL:
7563 return gimple_call_integer_valued_real_p (stmt, depth);
7564 case GIMPLE_PHI:
7565 return gimple_phi_integer_valued_real_p (stmt, depth);
7566 default:
7567 return false;