[NDS32] Implement fp-as-gp optimization.
[official-gcc.git] / gcc / gimple-fold.c
blobf6d758a6828cc22467be72d852092dd68566c746
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "ipa-chkp.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
70 /* Return true when DECL can be referenced from current unit.
71 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
72 We can get declarations that are not possible to reference for various
73 reasons:
75 1) When analyzing C++ virtual tables.
76 C++ virtual tables do have known constructors even
77 when they are keyed to other compilation unit.
78 Those tables can contain pointers to methods and vars
79 in other units. Those methods have both STATIC and EXTERNAL
80 set.
81 2) In WHOPR mode devirtualization might lead to reference
82 to method that was partitioned elsehwere.
83 In this case we have static VAR_DECL or FUNCTION_DECL
84 that has no corresponding callgraph/varpool node
85 declaring the body.
86 3) COMDAT functions referred by external vtables that
87 we devirtualize only during final compilation stage.
88 At this time we already decided that we will not output
89 the function body and thus we can't reference the symbol
90 directly. */
92 static bool
93 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
95 varpool_node *vnode;
96 struct cgraph_node *node;
97 symtab_node *snode;
99 if (DECL_ABSTRACT_P (decl))
100 return false;
102 /* We are concerned only about static/external vars and functions. */
103 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
104 || !VAR_OR_FUNCTION_DECL_P (decl))
105 return true;
107 /* Static objects can be referred only if they was not optimized out yet. */
108 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
110 /* Before we start optimizing unreachable code we can be sure all
111 static objects are defined. */
112 if (symtab->function_flags_ready)
113 return true;
114 snode = symtab_node::get (decl);
115 if (!snode || !snode->definition)
116 return false;
117 node = dyn_cast <cgraph_node *> (snode);
118 return !node || !node->global.inlined_to;
121 /* We will later output the initializer, so we can refer to it.
122 So we are concerned only when DECL comes from initializer of
123 external var or var that has been optimized out. */
124 if (!from_decl
125 || !VAR_P (from_decl)
126 || (!DECL_EXTERNAL (from_decl)
127 && (vnode = varpool_node::get (from_decl)) != NULL
128 && vnode->definition)
129 || (flag_ltrans
130 && (vnode = varpool_node::get (from_decl)) != NULL
131 && vnode->in_other_partition))
132 return true;
133 /* We are folding reference from external vtable. The vtable may reffer
134 to a symbol keyed to other compilation unit. The other compilation
135 unit may be in separate DSO and the symbol may be hidden. */
136 if (DECL_VISIBILITY_SPECIFIED (decl)
137 && DECL_EXTERNAL (decl)
138 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
139 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
140 return false;
141 /* When function is public, we always can introduce new reference.
142 Exception are the COMDAT functions where introducing a direct
143 reference imply need to include function body in the curren tunit. */
144 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
145 return true;
146 /* We have COMDAT. We are going to check if we still have definition
147 or if the definition is going to be output in other partition.
148 Bypass this when gimplifying; all needed functions will be produced.
150 As observed in PR20991 for already optimized out comdat virtual functions
151 it may be tempting to not necessarily give up because the copy will be
152 output elsewhere when corresponding vtable is output.
153 This is however not possible - ABI specify that COMDATs are output in
154 units where they are used and when the other unit was compiled with LTO
155 it is possible that vtable was kept public while the function itself
156 was privatized. */
157 if (!symtab->function_flags_ready)
158 return true;
160 snode = symtab_node::get (decl);
161 if (!snode
162 || ((!snode->definition || DECL_EXTERNAL (decl))
163 && (!snode->in_other_partition
164 || (!snode->forced_by_abi && !snode->force_output))))
165 return false;
166 node = dyn_cast <cgraph_node *> (snode);
167 return !node || !node->global.inlined_to;
170 /* Create a temporary for TYPE for a statement STMT. If the current function
171 is in SSA form, a SSA name is created. Otherwise a temporary register
172 is made. */
174 tree
175 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
177 if (gimple_in_ssa_p (cfun))
178 return make_ssa_name (type, stmt);
179 else
180 return create_tmp_reg (type);
183 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
184 acceptable form for is_gimple_min_invariant.
185 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
187 tree
188 canonicalize_constructor_val (tree cval, tree from_decl)
190 tree orig_cval = cval;
191 STRIP_NOPS (cval);
192 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
193 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
195 tree ptr = TREE_OPERAND (cval, 0);
196 if (is_gimple_min_invariant (ptr))
197 cval = build1_loc (EXPR_LOCATION (cval),
198 ADDR_EXPR, TREE_TYPE (ptr),
199 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
200 ptr,
201 fold_convert (ptr_type_node,
202 TREE_OPERAND (cval, 1))));
204 if (TREE_CODE (cval) == ADDR_EXPR)
206 tree base = NULL_TREE;
207 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
209 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
210 if (base)
211 TREE_OPERAND (cval, 0) = base;
213 else
214 base = get_base_address (TREE_OPERAND (cval, 0));
215 if (!base)
216 return NULL_TREE;
218 if (VAR_OR_FUNCTION_DECL_P (base)
219 && !can_refer_decl_in_current_unit_p (base, from_decl))
220 return NULL_TREE;
221 if (TREE_TYPE (base) == error_mark_node)
222 return NULL_TREE;
223 if (VAR_P (base))
224 TREE_ADDRESSABLE (base) = 1;
225 else if (TREE_CODE (base) == FUNCTION_DECL)
227 /* Make sure we create a cgraph node for functions we'll reference.
228 They can be non-existent if the reference comes from an entry
229 of an external vtable for example. */
230 cgraph_node::get_create (base);
232 /* Fixup types in global initializers. */
233 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
234 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
236 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
237 cval = fold_convert (TREE_TYPE (orig_cval), cval);
238 return cval;
240 if (TREE_OVERFLOW_P (cval))
241 return drop_tree_overflow (cval);
242 return orig_cval;
245 /* If SYM is a constant variable with known value, return the value.
246 NULL_TREE is returned otherwise. */
248 tree
249 get_symbol_constant_value (tree sym)
251 tree val = ctor_for_folding (sym);
252 if (val != error_mark_node)
254 if (val)
256 val = canonicalize_constructor_val (unshare_expr (val), sym);
257 if (val && is_gimple_min_invariant (val))
258 return val;
259 else
260 return NULL_TREE;
262 /* Variables declared 'const' without an initializer
263 have zero as the initializer if they may not be
264 overridden at link or run time. */
265 if (!val
266 && is_gimple_reg_type (TREE_TYPE (sym)))
267 return build_zero_cst (TREE_TYPE (sym));
270 return NULL_TREE;
275 /* Subroutine of fold_stmt. We perform several simplifications of the
276 memory reference tree EXPR and make sure to re-gimplify them properly
277 after propagation of constant addresses. IS_LHS is true if the
278 reference is supposed to be an lvalue. */
280 static tree
281 maybe_fold_reference (tree expr, bool is_lhs)
283 tree result;
285 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
286 || TREE_CODE (expr) == REALPART_EXPR
287 || TREE_CODE (expr) == IMAGPART_EXPR)
288 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
289 return fold_unary_loc (EXPR_LOCATION (expr),
290 TREE_CODE (expr),
291 TREE_TYPE (expr),
292 TREE_OPERAND (expr, 0));
293 else if (TREE_CODE (expr) == BIT_FIELD_REF
294 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
295 return fold_ternary_loc (EXPR_LOCATION (expr),
296 TREE_CODE (expr),
297 TREE_TYPE (expr),
298 TREE_OPERAND (expr, 0),
299 TREE_OPERAND (expr, 1),
300 TREE_OPERAND (expr, 2));
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
307 return NULL_TREE;
311 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
312 replacement rhs for the statement or NULL_TREE if no simplification
313 could be made. It is assumed that the operands have been previously
314 folded. */
316 static tree
317 fold_gimple_assign (gimple_stmt_iterator *si)
319 gimple *stmt = gsi_stmt (*si);
320 enum tree_code subcode = gimple_assign_rhs_code (stmt);
321 location_t loc = gimple_location (stmt);
323 tree result = NULL_TREE;
325 switch (get_gimple_rhs_class (subcode))
327 case GIMPLE_SINGLE_RHS:
329 tree rhs = gimple_assign_rhs1 (stmt);
331 if (TREE_CLOBBER_P (rhs))
332 return NULL_TREE;
334 if (REFERENCE_CLASS_P (rhs))
335 return maybe_fold_reference (rhs, false);
337 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
339 tree val = OBJ_TYPE_REF_EXPR (rhs);
340 if (is_gimple_min_invariant (val))
341 return val;
342 else if (flag_devirtualize && virtual_method_call_p (rhs))
344 bool final;
345 vec <cgraph_node *>targets
346 = possible_polymorphic_call_targets (rhs, stmt, &final);
347 if (final && targets.length () <= 1 && dbg_cnt (devirt))
349 if (dump_enabled_p ())
351 location_t loc = gimple_location_safe (stmt);
352 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
353 "resolving virtual function address "
354 "reference to function %s\n",
355 targets.length () == 1
356 ? targets[0]->name ()
357 : "NULL");
359 if (targets.length () == 1)
361 val = fold_convert (TREE_TYPE (val),
362 build_fold_addr_expr_loc
363 (loc, targets[0]->decl));
364 STRIP_USELESS_TYPE_CONVERSION (val);
366 else
367 /* We can not use __builtin_unreachable here because it
368 can not have address taken. */
369 val = build_int_cst (TREE_TYPE (val), 0);
370 return val;
375 else if (TREE_CODE (rhs) == ADDR_EXPR)
377 tree ref = TREE_OPERAND (rhs, 0);
378 tree tem = maybe_fold_reference (ref, true);
379 if (tem
380 && TREE_CODE (tem) == MEM_REF
381 && integer_zerop (TREE_OPERAND (tem, 1)))
382 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
383 else if (tem)
384 result = fold_convert (TREE_TYPE (rhs),
385 build_fold_addr_expr_loc (loc, tem));
386 else if (TREE_CODE (ref) == MEM_REF
387 && integer_zerop (TREE_OPERAND (ref, 1)))
388 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
390 if (result)
392 /* Strip away useless type conversions. Both the
393 NON_LVALUE_EXPR that may have been added by fold, and
394 "useless" type conversions that might now be apparent
395 due to propagation. */
396 STRIP_USELESS_TYPE_CONVERSION (result);
398 if (result != rhs && valid_gimple_rhs_p (result))
399 return result;
403 else if (TREE_CODE (rhs) == CONSTRUCTOR
404 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
406 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
407 unsigned i;
408 tree val;
410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
411 if (! CONSTANT_CLASS_P (val))
412 return NULL_TREE;
414 return build_vector_from_ctor (TREE_TYPE (rhs),
415 CONSTRUCTOR_ELTS (rhs));
418 else if (DECL_P (rhs))
419 return get_symbol_constant_value (rhs);
421 break;
423 case GIMPLE_UNARY_RHS:
424 break;
426 case GIMPLE_BINARY_RHS:
427 break;
429 case GIMPLE_TERNARY_RHS:
430 result = fold_ternary_loc (loc, subcode,
431 TREE_TYPE (gimple_assign_lhs (stmt)),
432 gimple_assign_rhs1 (stmt),
433 gimple_assign_rhs2 (stmt),
434 gimple_assign_rhs3 (stmt));
436 if (result)
438 STRIP_USELESS_TYPE_CONVERSION (result);
439 if (valid_gimple_rhs_p (result))
440 return result;
442 break;
444 case GIMPLE_INVALID_RHS:
445 gcc_unreachable ();
448 return NULL_TREE;
452 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
453 adjusting the replacement stmts location and virtual operands.
454 If the statement has a lhs the last stmt in the sequence is expected
455 to assign to that lhs. */
457 static void
458 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
460 gimple *stmt = gsi_stmt (*si_p);
462 if (gimple_has_location (stmt))
463 annotate_all_with_location (stmts, gimple_location (stmt));
465 /* First iterate over the replacement statements backward, assigning
466 virtual operands to their defining statements. */
467 gimple *laststore = NULL;
468 for (gimple_stmt_iterator i = gsi_last (stmts);
469 !gsi_end_p (i); gsi_prev (&i))
471 gimple *new_stmt = gsi_stmt (i);
472 if ((gimple_assign_single_p (new_stmt)
473 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
474 || (is_gimple_call (new_stmt)
475 && (gimple_call_flags (new_stmt)
476 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
478 tree vdef;
479 if (!laststore)
480 vdef = gimple_vdef (stmt);
481 else
482 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
483 gimple_set_vdef (new_stmt, vdef);
484 if (vdef && TREE_CODE (vdef) == SSA_NAME)
485 SSA_NAME_DEF_STMT (vdef) = new_stmt;
486 laststore = new_stmt;
490 /* Second iterate over the statements forward, assigning virtual
491 operands to their uses. */
492 tree reaching_vuse = gimple_vuse (stmt);
493 for (gimple_stmt_iterator i = gsi_start (stmts);
494 !gsi_end_p (i); gsi_next (&i))
496 gimple *new_stmt = gsi_stmt (i);
497 /* If the new statement possibly has a VUSE, update it with exact SSA
498 name we know will reach this one. */
499 if (gimple_has_mem_ops (new_stmt))
500 gimple_set_vuse (new_stmt, reaching_vuse);
501 gimple_set_modified (new_stmt, true);
502 if (gimple_vdef (new_stmt))
503 reaching_vuse = gimple_vdef (new_stmt);
506 /* If the new sequence does not do a store release the virtual
507 definition of the original statement. */
508 if (reaching_vuse
509 && reaching_vuse == gimple_vuse (stmt))
511 tree vdef = gimple_vdef (stmt);
512 if (vdef
513 && TREE_CODE (vdef) == SSA_NAME)
515 unlink_stmt_vdef (stmt);
516 release_ssa_name (vdef);
520 /* Finally replace the original statement with the sequence. */
521 gsi_replace_with_seq (si_p, stmts, false);
524 /* Convert EXPR into a GIMPLE value suitable for substitution on the
525 RHS of an assignment. Insert the necessary statements before
526 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
527 is replaced. If the call is expected to produces a result, then it
528 is replaced by an assignment of the new RHS to the result variable.
529 If the result is to be ignored, then the call is replaced by a
530 GIMPLE_NOP. A proper VDEF chain is retained by making the first
531 VUSE and the last VDEF of the whole sequence be the same as the replaced
532 statement and using new SSA names for stores in between. */
534 void
535 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
537 tree lhs;
538 gimple *stmt, *new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = NULL;
542 stmt = gsi_stmt (*si_p);
544 gcc_assert (is_gimple_call (stmt));
546 push_gimplify_context (gimple_in_ssa_p (cfun));
548 lhs = gimple_call_lhs (stmt);
549 if (lhs == NULL_TREE)
551 gimplify_and_add (expr, &stmts);
552 /* We can end up with folding a memcpy of an empty class assignment
553 which gets optimized away by C++ gimplification. */
554 if (gimple_seq_empty_p (stmts))
556 pop_gimplify_context (NULL);
557 if (gimple_in_ssa_p (cfun))
559 unlink_stmt_vdef (stmt);
560 release_defs (stmt);
562 gsi_replace (si_p, gimple_build_nop (), false);
563 return;
566 else
568 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
569 new_stmt = gimple_build_assign (lhs, tmp);
570 i = gsi_last (stmts);
571 gsi_insert_after_without_update (&i, new_stmt,
572 GSI_CONTINUE_LINKING);
575 pop_gimplify_context (NULL);
577 gsi_replace_with_seq_vops (si_p, stmts);
581 /* Replace the call at *GSI with the gimple value VAL. */
583 void
584 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
586 gimple *stmt = gsi_stmt (*gsi);
587 tree lhs = gimple_call_lhs (stmt);
588 gimple *repl;
589 if (lhs)
591 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
592 val = fold_convert (TREE_TYPE (lhs), val);
593 repl = gimple_build_assign (lhs, val);
595 else
596 repl = gimple_build_nop ();
597 tree vdef = gimple_vdef (stmt);
598 if (vdef && TREE_CODE (vdef) == SSA_NAME)
600 unlink_stmt_vdef (stmt);
601 release_ssa_name (vdef);
603 gsi_replace (gsi, repl, false);
606 /* Replace the call at *GSI with the new call REPL and fold that
607 again. */
609 static void
610 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
612 gimple *stmt = gsi_stmt (*gsi);
613 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
614 gimple_set_location (repl, gimple_location (stmt));
615 if (gimple_vdef (stmt)
616 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
618 gimple_set_vdef (repl, gimple_vdef (stmt));
619 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
621 if (gimple_vuse (stmt))
622 gimple_set_vuse (repl, gimple_vuse (stmt));
623 gsi_replace (gsi, repl, false);
624 fold_stmt (gsi);
627 /* Return true if VAR is a VAR_DECL or a component thereof. */
629 static bool
630 var_decl_component_p (tree var)
632 tree inner = var;
633 while (handled_component_p (inner))
634 inner = TREE_OPERAND (inner, 0);
635 return (DECL_P (inner)
636 || (TREE_CODE (inner) == MEM_REF
637 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
640 /* If the SIZE argument representing the size of an object is in a range
641 of values of which exactly one is valid (and that is zero), return
642 true, otherwise false. */
644 static bool
645 size_must_be_zero_p (tree size)
647 if (integer_zerop (size))
648 return true;
650 if (TREE_CODE (size) != SSA_NAME)
651 return false;
653 wide_int min, max;
654 enum value_range_type rtype = get_range_info (size, &min, &max);
655 if (rtype != VR_ANTI_RANGE)
656 return false;
658 tree type = TREE_TYPE (size);
659 int prec = TYPE_PRECISION (type);
661 wide_int wone = wi::one (prec);
663 /* Compute the value of SSIZE_MAX, the largest positive value that
664 can be stored in ssize_t, the signed counterpart of size_t. */
665 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
667 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
670 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
671 diagnose (otherwise undefined) overlapping copies without preventing
672 folding. When folded, GCC guarantees that overlapping memcpy has
673 the same semantics as memmove. Call to the library memcpy need not
674 provide the same guarantee. Return false if no simplification can
675 be made. */
677 static bool
678 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
679 tree dest, tree src, int endp)
681 gimple *stmt = gsi_stmt (*gsi);
682 tree lhs = gimple_call_lhs (stmt);
683 tree len = gimple_call_arg (stmt, 2);
684 tree destvar, srcvar;
685 location_t loc = gimple_location (stmt);
687 bool nowarn = gimple_no_warning_p (stmt);
689 /* If the LEN parameter is a constant zero or in range where
690 the only valid value is zero, return DEST. */
691 if (size_must_be_zero_p (len))
693 gimple *repl;
694 if (gimple_call_lhs (stmt))
695 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
696 else
697 repl = gimple_build_nop ();
698 tree vdef = gimple_vdef (stmt);
699 if (vdef && TREE_CODE (vdef) == SSA_NAME)
701 unlink_stmt_vdef (stmt);
702 release_ssa_name (vdef);
704 gsi_replace (gsi, repl, false);
705 return true;
708 /* If SRC and DEST are the same (and not volatile), return
709 DEST{,+LEN,+LEN-1}. */
710 if (operand_equal_p (src, dest, 0))
712 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
713 It's safe and may even be emitted by GCC itself (see bug
714 32667). */
715 unlink_stmt_vdef (stmt);
716 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
717 release_ssa_name (gimple_vdef (stmt));
718 if (!lhs)
720 gsi_replace (gsi, gimple_build_nop (), false);
721 return true;
723 goto done;
725 else
727 tree srctype, desttype;
728 unsigned int src_align, dest_align;
729 tree off0;
731 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
732 pointers as wide integer) and also may result in huge function
733 size because of inlined bounds copy. Thus don't inline for
734 functions we want to instrument. */
735 if (flag_check_pointer_bounds
736 && chkp_instrumentable_p (cfun->decl)
737 /* Even if data may contain pointers we can inline if copy
738 less than a pointer size. */
739 && (!tree_fits_uhwi_p (len)
740 || compare_tree_int (len, POINTER_SIZE_UNITS) >= 0))
741 return false;
743 /* Build accesses at offset zero with a ref-all character type. */
744 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
745 ptr_mode, true), 0);
747 /* If we can perform the copy efficiently with first doing all loads
748 and then all stores inline it that way. Currently efficiently
749 means that we can load all the memory into a single integer
750 register which is what MOVE_MAX gives us. */
751 src_align = get_pointer_alignment (src);
752 dest_align = get_pointer_alignment (dest);
753 if (tree_fits_uhwi_p (len)
754 && compare_tree_int (len, MOVE_MAX) <= 0
755 /* ??? Don't transform copies from strings with known length this
756 confuses the tree-ssa-strlen.c. This doesn't handle
757 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
758 reason. */
759 && !c_strlen (src, 2))
761 unsigned ilen = tree_to_uhwi (len);
762 if (pow2p_hwi (ilen))
764 /* Detect invalid bounds and overlapping copies and issue
765 either -Warray-bounds or -Wrestrict. */
766 if (!nowarn
767 && check_bounds_or_overlap (as_a <gcall *>(stmt),
768 dest, src, len, len))
769 gimple_set_no_warning (stmt, true);
771 scalar_int_mode mode;
772 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
773 if (type
774 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
775 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
776 /* If the destination pointer is not aligned we must be able
777 to emit an unaligned store. */
778 && (dest_align >= GET_MODE_ALIGNMENT (mode)
779 || !targetm.slow_unaligned_access (mode, dest_align)
780 || (optab_handler (movmisalign_optab, mode)
781 != CODE_FOR_nothing)))
783 tree srctype = type;
784 tree desttype = type;
785 if (src_align < GET_MODE_ALIGNMENT (mode))
786 srctype = build_aligned_type (type, src_align);
787 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
788 tree tem = fold_const_aggregate_ref (srcmem);
789 if (tem)
790 srcmem = tem;
791 else if (src_align < GET_MODE_ALIGNMENT (mode)
792 && targetm.slow_unaligned_access (mode, src_align)
793 && (optab_handler (movmisalign_optab, mode)
794 == CODE_FOR_nothing))
795 srcmem = NULL_TREE;
796 if (srcmem)
798 gimple *new_stmt;
799 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
801 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
802 srcmem
803 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
804 new_stmt);
805 gimple_assign_set_lhs (new_stmt, srcmem);
806 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
807 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
809 if (dest_align < GET_MODE_ALIGNMENT (mode))
810 desttype = build_aligned_type (type, dest_align);
811 new_stmt
812 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
813 dest, off0),
814 srcmem);
815 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
816 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
817 if (gimple_vdef (new_stmt)
818 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
819 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
820 if (!lhs)
822 gsi_replace (gsi, new_stmt, false);
823 return true;
825 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
826 goto done;
832 if (endp == 3)
834 /* Both DEST and SRC must be pointer types.
835 ??? This is what old code did. Is the testing for pointer types
836 really mandatory?
838 If either SRC is readonly or length is 1, we can use memcpy. */
839 if (!dest_align || !src_align)
840 return false;
841 if (readonly_data_expr (src)
842 || (tree_fits_uhwi_p (len)
843 && (MIN (src_align, dest_align) / BITS_PER_UNIT
844 >= tree_to_uhwi (len))))
846 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
847 if (!fn)
848 return false;
849 gimple_call_set_fndecl (stmt, fn);
850 gimple_call_set_arg (stmt, 0, dest);
851 gimple_call_set_arg (stmt, 1, src);
852 fold_stmt (gsi);
853 return true;
856 /* If *src and *dest can't overlap, optimize into memcpy as well. */
857 if (TREE_CODE (src) == ADDR_EXPR
858 && TREE_CODE (dest) == ADDR_EXPR)
860 tree src_base, dest_base, fn;
861 poly_int64 src_offset = 0, dest_offset = 0;
862 poly_uint64 maxsize;
864 srcvar = TREE_OPERAND (src, 0);
865 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
866 if (src_base == NULL)
867 src_base = srcvar;
868 destvar = TREE_OPERAND (dest, 0);
869 dest_base = get_addr_base_and_unit_offset (destvar,
870 &dest_offset);
871 if (dest_base == NULL)
872 dest_base = destvar;
873 if (!poly_int_tree_p (len, &maxsize))
874 maxsize = -1;
875 if (SSA_VAR_P (src_base)
876 && SSA_VAR_P (dest_base))
878 if (operand_equal_p (src_base, dest_base, 0)
879 && ranges_maybe_overlap_p (src_offset, maxsize,
880 dest_offset, maxsize))
881 return false;
883 else if (TREE_CODE (src_base) == MEM_REF
884 && TREE_CODE (dest_base) == MEM_REF)
886 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
887 TREE_OPERAND (dest_base, 0), 0))
888 return false;
889 poly_offset_int full_src_offset
890 = mem_ref_offset (src_base) + src_offset;
891 poly_offset_int full_dest_offset
892 = mem_ref_offset (dest_base) + dest_offset;
893 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
894 full_dest_offset, maxsize))
895 return false;
897 else
898 return false;
900 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
901 if (!fn)
902 return false;
903 gimple_call_set_fndecl (stmt, fn);
904 gimple_call_set_arg (stmt, 0, dest);
905 gimple_call_set_arg (stmt, 1, src);
906 fold_stmt (gsi);
907 return true;
910 /* If the destination and source do not alias optimize into
911 memcpy as well. */
912 if ((is_gimple_min_invariant (dest)
913 || TREE_CODE (dest) == SSA_NAME)
914 && (is_gimple_min_invariant (src)
915 || TREE_CODE (src) == SSA_NAME))
917 ao_ref destr, srcr;
918 ao_ref_init_from_ptr_and_size (&destr, dest, len);
919 ao_ref_init_from_ptr_and_size (&srcr, src, len);
920 if (!refs_may_alias_p_1 (&destr, &srcr, false))
922 tree fn;
923 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
924 if (!fn)
925 return false;
926 gimple_call_set_fndecl (stmt, fn);
927 gimple_call_set_arg (stmt, 0, dest);
928 gimple_call_set_arg (stmt, 1, src);
929 fold_stmt (gsi);
930 return true;
934 return false;
937 if (!tree_fits_shwi_p (len))
938 return false;
939 if (!POINTER_TYPE_P (TREE_TYPE (src))
940 || !POINTER_TYPE_P (TREE_TYPE (dest)))
941 return false;
942 /* In the following try to find a type that is most natural to be
943 used for the memcpy source and destination and that allows
944 the most optimization when memcpy is turned into a plain assignment
945 using that type. In theory we could always use a char[len] type
946 but that only gains us that the destination and source possibly
947 no longer will have their address taken. */
948 srctype = TREE_TYPE (TREE_TYPE (src));
949 if (TREE_CODE (srctype) == ARRAY_TYPE
950 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
951 srctype = TREE_TYPE (srctype);
952 desttype = TREE_TYPE (TREE_TYPE (dest));
953 if (TREE_CODE (desttype) == ARRAY_TYPE
954 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
955 desttype = TREE_TYPE (desttype);
956 if (TREE_ADDRESSABLE (srctype)
957 || TREE_ADDRESSABLE (desttype))
958 return false;
960 /* Make sure we are not copying using a floating-point mode or
961 a type whose size possibly does not match its precision. */
962 if (FLOAT_MODE_P (TYPE_MODE (desttype))
963 || TREE_CODE (desttype) == BOOLEAN_TYPE
964 || TREE_CODE (desttype) == ENUMERAL_TYPE)
965 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
966 if (FLOAT_MODE_P (TYPE_MODE (srctype))
967 || TREE_CODE (srctype) == BOOLEAN_TYPE
968 || TREE_CODE (srctype) == ENUMERAL_TYPE)
969 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
970 if (!srctype)
971 srctype = desttype;
972 if (!desttype)
973 desttype = srctype;
974 if (!srctype)
975 return false;
977 src_align = get_pointer_alignment (src);
978 dest_align = get_pointer_alignment (dest);
979 if (dest_align < TYPE_ALIGN (desttype)
980 || src_align < TYPE_ALIGN (srctype))
981 return false;
983 destvar = NULL_TREE;
984 if (TREE_CODE (dest) == ADDR_EXPR
985 && var_decl_component_p (TREE_OPERAND (dest, 0))
986 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
987 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
989 srcvar = NULL_TREE;
990 if (TREE_CODE (src) == ADDR_EXPR
991 && var_decl_component_p (TREE_OPERAND (src, 0))
992 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
994 if (!destvar
995 || src_align >= TYPE_ALIGN (desttype))
996 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
997 src, off0);
998 else if (!STRICT_ALIGNMENT)
1000 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1001 src_align);
1002 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1006 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1007 return false;
1009 if (srcvar == NULL_TREE)
1011 if (src_align >= TYPE_ALIGN (desttype))
1012 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1013 else
1015 if (STRICT_ALIGNMENT)
1016 return false;
1017 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1018 src_align);
1019 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1022 else if (destvar == NULL_TREE)
1024 if (dest_align >= TYPE_ALIGN (srctype))
1025 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1026 else
1028 if (STRICT_ALIGNMENT)
1029 return false;
1030 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1031 dest_align);
1032 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1036 /* Detect invalid bounds and overlapping copies and issue either
1037 -Warray-bounds or -Wrestrict. */
1038 if (!nowarn)
1039 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1041 gimple *new_stmt;
1042 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1044 tree tem = fold_const_aggregate_ref (srcvar);
1045 if (tem)
1046 srcvar = tem;
1047 if (! is_gimple_min_invariant (srcvar))
1049 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1050 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1051 new_stmt);
1052 gimple_assign_set_lhs (new_stmt, srcvar);
1053 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1054 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1056 new_stmt = gimple_build_assign (destvar, srcvar);
1057 goto set_vop_and_replace;
1060 /* We get an aggregate copy. Use an unsigned char[] type to
1061 perform the copying to preserve padding and to avoid any issues
1062 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1063 desttype = build_array_type_nelts (unsigned_char_type_node,
1064 tree_to_uhwi (len));
1065 srctype = desttype;
1066 if (src_align > TYPE_ALIGN (srctype))
1067 srctype = build_aligned_type (srctype, src_align);
1068 if (dest_align > TYPE_ALIGN (desttype))
1069 desttype = build_aligned_type (desttype, dest_align);
1070 new_stmt
1071 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1072 fold_build2 (MEM_REF, srctype, src, off0));
1073 set_vop_and_replace:
1074 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1075 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1076 if (gimple_vdef (new_stmt)
1077 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1078 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1079 if (!lhs)
1081 gsi_replace (gsi, new_stmt, false);
1082 return true;
1084 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1087 done:
1088 gimple_seq stmts = NULL;
1089 if (endp == 0 || endp == 3)
1090 len = NULL_TREE;
1091 else if (endp == 2)
1092 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1093 ssize_int (1));
1094 if (endp == 2 || endp == 1)
1096 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1097 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1098 TREE_TYPE (dest), dest, len);
1101 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1102 gimple *repl = gimple_build_assign (lhs, dest);
1103 gsi_replace (gsi, repl, false);
1104 return true;
1107 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1108 to built-in memcmp (a, b, len). */
1110 static bool
1111 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1113 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1115 if (!fn)
1116 return false;
1118 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1120 gimple *stmt = gsi_stmt (*gsi);
1121 tree a = gimple_call_arg (stmt, 0);
1122 tree b = gimple_call_arg (stmt, 1);
1123 tree len = gimple_call_arg (stmt, 2);
1125 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1126 replace_call_with_call_and_fold (gsi, repl);
1128 return true;
1131 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1132 to built-in memmove (dest, src, len). */
1134 static bool
1135 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1137 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1139 if (!fn)
1140 return false;
1142 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1143 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1144 len) into memmove (dest, src, len). */
1146 gimple *stmt = gsi_stmt (*gsi);
1147 tree src = gimple_call_arg (stmt, 0);
1148 tree dest = gimple_call_arg (stmt, 1);
1149 tree len = gimple_call_arg (stmt, 2);
1151 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1152 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1153 replace_call_with_call_and_fold (gsi, repl);
1155 return true;
1158 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1159 to built-in memset (dest, 0, len). */
1161 static bool
1162 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1164 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1166 if (!fn)
1167 return false;
1169 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1171 gimple *stmt = gsi_stmt (*gsi);
1172 tree dest = gimple_call_arg (stmt, 0);
1173 tree len = gimple_call_arg (stmt, 1);
1175 gimple_seq seq = NULL;
1176 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1177 gimple_seq_add_stmt_without_update (&seq, repl);
1178 gsi_replace_with_seq_vops (gsi, seq);
1179 fold_stmt (gsi);
1181 return true;
1184 /* Fold function call to builtin memset or bzero at *GSI setting the
1185 memory of size LEN to VAL. Return whether a simplification was made. */
1187 static bool
1188 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1190 gimple *stmt = gsi_stmt (*gsi);
1191 tree etype;
1192 unsigned HOST_WIDE_INT length, cval;
1194 /* If the LEN parameter is zero, return DEST. */
1195 if (integer_zerop (len))
1197 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1198 return true;
1201 if (! tree_fits_uhwi_p (len))
1202 return false;
1204 if (TREE_CODE (c) != INTEGER_CST)
1205 return false;
1207 tree dest = gimple_call_arg (stmt, 0);
1208 tree var = dest;
1209 if (TREE_CODE (var) != ADDR_EXPR)
1210 return false;
1212 var = TREE_OPERAND (var, 0);
1213 if (TREE_THIS_VOLATILE (var))
1214 return false;
1216 etype = TREE_TYPE (var);
1217 if (TREE_CODE (etype) == ARRAY_TYPE)
1218 etype = TREE_TYPE (etype);
1220 if (!INTEGRAL_TYPE_P (etype)
1221 && !POINTER_TYPE_P (etype))
1222 return NULL_TREE;
1224 if (! var_decl_component_p (var))
1225 return NULL_TREE;
1227 length = tree_to_uhwi (len);
1228 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1229 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1230 return NULL_TREE;
1232 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1233 return NULL_TREE;
1235 if (integer_zerop (c))
1236 cval = 0;
1237 else
1239 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1240 return NULL_TREE;
1242 cval = TREE_INT_CST_LOW (c);
1243 cval &= 0xff;
1244 cval |= cval << 8;
1245 cval |= cval << 16;
1246 cval |= (cval << 31) << 1;
1249 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1250 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1251 gimple_set_vuse (store, gimple_vuse (stmt));
1252 tree vdef = gimple_vdef (stmt);
1253 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1255 gimple_set_vdef (store, gimple_vdef (stmt));
1256 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1258 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1259 if (gimple_call_lhs (stmt))
1261 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1262 gsi_replace (gsi, asgn, false);
1264 else
1266 gimple_stmt_iterator gsi2 = *gsi;
1267 gsi_prev (gsi);
1268 gsi_remove (&gsi2, true);
1271 return true;
1275 /* Obtain the minimum and maximum string length or minimum and maximum
1276 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1277 If ARG is an SSA name variable, follow its use-def chains. When
1278 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1279 if we are unable to determine the length or value, return false.
1280 VISITED is a bitmap of visited variables.
1281 TYPE is 0 if string length should be obtained, 1 for maximum string
1282 length and 2 for maximum value ARG can have.
1283 When FUZZY is non-zero and the length of a string cannot be determined,
1284 the function instead considers as the maximum possible length the
1285 size of a character array it may refer to. If FUZZY is 2, it will handle
1286 PHIs and COND_EXPRs optimistically, if we can determine string length
1287 minimum and maximum, it will use the minimum from the ones where it
1288 can be determined.
1289 Set *FLEXP to true if the range of the string lengths has been
1290 obtained from the upper bound of an array at the end of a struct.
1291 Such an array may hold a string that's longer than its upper bound
1292 due to it being used as a poor-man's flexible array member. */
1294 static bool
1295 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1296 int fuzzy, bool *flexp)
1298 tree var, val = NULL_TREE;
1299 gimple *def_stmt;
1301 /* The minimum and maximum length. */
1302 tree *const minlen = length;
1303 tree *const maxlen = length + 1;
1305 if (TREE_CODE (arg) != SSA_NAME)
1307 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1308 if (TREE_CODE (arg) == ADDR_EXPR
1309 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1311 tree op = TREE_OPERAND (arg, 0);
1312 if (integer_zerop (TREE_OPERAND (op, 1)))
1314 tree aop0 = TREE_OPERAND (op, 0);
1315 if (TREE_CODE (aop0) == INDIRECT_REF
1316 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1317 return get_range_strlen (TREE_OPERAND (aop0, 0),
1318 length, visited, type, fuzzy, flexp);
1320 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1322 /* Fail if an array is the last member of a struct object
1323 since it could be treated as a (fake) flexible array
1324 member. */
1325 tree idx = TREE_OPERAND (op, 1);
1327 arg = TREE_OPERAND (op, 0);
1328 tree optype = TREE_TYPE (arg);
1329 if (tree dom = TYPE_DOMAIN (optype))
1330 if (tree bound = TYPE_MAX_VALUE (dom))
1331 if (TREE_CODE (bound) == INTEGER_CST
1332 && TREE_CODE (idx) == INTEGER_CST
1333 && tree_int_cst_lt (bound, idx))
1334 return false;
1338 if (type == 2)
1340 val = arg;
1341 if (TREE_CODE (val) != INTEGER_CST
1342 || tree_int_cst_sgn (val) < 0)
1343 return false;
1345 else
1346 val = c_strlen (arg, 1);
1348 if (!val && fuzzy)
1350 if (TREE_CODE (arg) == ADDR_EXPR)
1351 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1352 visited, type, fuzzy, flexp);
1354 if (TREE_CODE (arg) == ARRAY_REF)
1356 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1358 /* Determine the "innermost" array type. */
1359 while (TREE_CODE (type) == ARRAY_TYPE
1360 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1361 type = TREE_TYPE (type);
1363 /* Avoid arrays of pointers. */
1364 tree eltype = TREE_TYPE (type);
1365 if (TREE_CODE (type) != ARRAY_TYPE
1366 || !INTEGRAL_TYPE_P (eltype))
1367 return false;
1369 val = TYPE_SIZE_UNIT (type);
1370 if (!val || integer_zerop (val))
1371 return false;
1373 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1374 integer_one_node);
1375 /* Set the minimum size to zero since the string in
1376 the array could have zero length. */
1377 *minlen = ssize_int (0);
1379 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1380 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1381 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1382 *flexp = true;
1384 else if (TREE_CODE (arg) == COMPONENT_REF
1385 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1386 == ARRAY_TYPE))
1388 /* Use the type of the member array to determine the upper
1389 bound on the length of the array. This may be overly
1390 optimistic if the array itself isn't NUL-terminated and
1391 the caller relies on the subsequent member to contain
1392 the NUL but that would only be considered valid if
1393 the array were the last member of a struct.
1394 Set *FLEXP to true if the array whose bound is being
1395 used is at the end of a struct. */
1396 if (array_at_struct_end_p (arg))
1397 *flexp = true;
1399 arg = TREE_OPERAND (arg, 1);
1401 tree type = TREE_TYPE (arg);
1403 while (TREE_CODE (type) == ARRAY_TYPE
1404 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1405 type = TREE_TYPE (type);
1407 /* Fail when the array bound is unknown or zero. */
1408 val = TYPE_SIZE_UNIT (type);
1409 if (!val || integer_zerop (val))
1410 return false;
1411 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1412 integer_one_node);
1413 /* Set the minimum size to zero since the string in
1414 the array could have zero length. */
1415 *minlen = ssize_int (0);
1418 if (VAR_P (arg))
1420 tree type = TREE_TYPE (arg);
1421 if (POINTER_TYPE_P (type))
1422 type = TREE_TYPE (type);
1424 if (TREE_CODE (type) == ARRAY_TYPE)
1426 val = TYPE_SIZE_UNIT (type);
1427 if (!val
1428 || TREE_CODE (val) != INTEGER_CST
1429 || integer_zerop (val))
1430 return false;
1431 val = wide_int_to_tree (TREE_TYPE (val),
1432 wi::sub (wi::to_wide (val), 1));
1433 /* Set the minimum size to zero since the string in
1434 the array could have zero length. */
1435 *minlen = ssize_int (0);
1440 if (!val)
1441 return false;
1443 if (!*minlen
1444 || (type > 0
1445 && TREE_CODE (*minlen) == INTEGER_CST
1446 && TREE_CODE (val) == INTEGER_CST
1447 && tree_int_cst_lt (val, *minlen)))
1448 *minlen = val;
1450 if (*maxlen)
1452 if (type > 0)
1454 if (TREE_CODE (*maxlen) != INTEGER_CST
1455 || TREE_CODE (val) != INTEGER_CST)
1456 return false;
1458 if (tree_int_cst_lt (*maxlen, val))
1459 *maxlen = val;
1460 return true;
1462 else if (simple_cst_equal (val, *maxlen) != 1)
1463 return false;
1466 *maxlen = val;
1467 return true;
1470 /* If ARG is registered for SSA update we cannot look at its defining
1471 statement. */
1472 if (name_registered_for_update_p (arg))
1473 return false;
1475 /* If we were already here, break the infinite cycle. */
1476 if (!*visited)
1477 *visited = BITMAP_ALLOC (NULL);
1478 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1479 return true;
1481 var = arg;
1482 def_stmt = SSA_NAME_DEF_STMT (var);
1484 switch (gimple_code (def_stmt))
1486 case GIMPLE_ASSIGN:
1487 /* The RHS of the statement defining VAR must either have a
1488 constant length or come from another SSA_NAME with a constant
1489 length. */
1490 if (gimple_assign_single_p (def_stmt)
1491 || gimple_assign_unary_nop_p (def_stmt))
1493 tree rhs = gimple_assign_rhs1 (def_stmt);
1494 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1496 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1498 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1499 gimple_assign_rhs3 (def_stmt) };
1501 for (unsigned int i = 0; i < 2; i++)
1502 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1503 flexp))
1505 if (fuzzy == 2)
1506 *maxlen = build_all_ones_cst (size_type_node);
1507 else
1508 return false;
1510 return true;
1512 return false;
1514 case GIMPLE_PHI:
1515 /* All the arguments of the PHI node must have the same constant
1516 length. */
1517 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1519 tree arg = gimple_phi_arg (def_stmt, i)->def;
1521 /* If this PHI has itself as an argument, we cannot
1522 determine the string length of this argument. However,
1523 if we can find a constant string length for the other
1524 PHI args then we can still be sure that this is a
1525 constant string length. So be optimistic and just
1526 continue with the next argument. */
1527 if (arg == gimple_phi_result (def_stmt))
1528 continue;
1530 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1532 if (fuzzy == 2)
1533 *maxlen = build_all_ones_cst (size_type_node);
1534 else
1535 return false;
1538 return true;
1540 default:
1541 return false;
1545 /* Determine the minimum and maximum value or string length that ARG
1546 refers to and store each in the first two elements of MINMAXLEN.
1547 For expressions that point to strings of unknown lengths that are
1548 character arrays, use the upper bound of the array as the maximum
1549 length. For example, given an expression like 'x ? array : "xyz"'
1550 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1551 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1552 stored in array.
1553 Return true if the range of the string lengths has been obtained
1554 from the upper bound of an array at the end of a struct. Such
1555 an array may hold a string that's longer than its upper bound
1556 due to it being used as a poor-man's flexible array member.
1558 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1559 and false if PHIs and COND_EXPRs are to be handled optimistically,
1560 if we can determine string length minimum and maximum; it will use
1561 the minimum from the ones where it can be determined.
1562 STRICT false should be only used for warning code. */
1564 bool
1565 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1567 bitmap visited = NULL;
1569 minmaxlen[0] = NULL_TREE;
1570 minmaxlen[1] = NULL_TREE;
1572 bool flexarray = false;
1573 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1574 &flexarray))
1576 minmaxlen[0] = NULL_TREE;
1577 minmaxlen[1] = NULL_TREE;
1580 if (visited)
1581 BITMAP_FREE (visited);
1583 return flexarray;
1586 tree
1587 get_maxval_strlen (tree arg, int type)
1589 bitmap visited = NULL;
1590 tree len[2] = { NULL_TREE, NULL_TREE };
1592 bool dummy;
1593 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1594 len[1] = NULL_TREE;
1595 if (visited)
1596 BITMAP_FREE (visited);
1598 return len[1];
1602 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1603 If LEN is not NULL, it represents the length of the string to be
1604 copied. Return NULL_TREE if no simplification can be made. */
1606 static bool
1607 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1608 tree dest, tree src)
1610 gimple *stmt = gsi_stmt (*gsi);
1611 location_t loc = gimple_location (stmt);
1612 tree fn;
1614 /* If SRC and DEST are the same (and not volatile), return DEST. */
1615 if (operand_equal_p (src, dest, 0))
1617 /* Issue -Wrestrict unless the pointers are null (those do
1618 not point to objects and so do not indicate an overlap;
1619 such calls could be the result of sanitization and jump
1620 threading). */
1621 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1623 tree func = gimple_call_fndecl (stmt);
1625 warning_at (loc, OPT_Wrestrict,
1626 "%qD source argument is the same as destination",
1627 func);
1630 replace_call_with_value (gsi, dest);
1631 return true;
1634 if (optimize_function_for_size_p (cfun))
1635 return false;
1637 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1638 if (!fn)
1639 return false;
1641 tree len = get_maxval_strlen (src, 0);
1642 if (!len)
1643 return false;
1645 len = fold_convert_loc (loc, size_type_node, len);
1646 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1647 len = force_gimple_operand_gsi (gsi, len, true,
1648 NULL_TREE, true, GSI_SAME_STMT);
1649 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1650 replace_call_with_call_and_fold (gsi, repl);
1651 return true;
1654 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1655 If SLEN is not NULL, it represents the length of the source string.
1656 Return NULL_TREE if no simplification can be made. */
1658 static bool
1659 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1660 tree dest, tree src, tree len)
1662 gimple *stmt = gsi_stmt (*gsi);
1663 location_t loc = gimple_location (stmt);
1664 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1666 /* If the LEN parameter is zero, return DEST. */
1667 if (integer_zerop (len))
1669 /* Avoid warning if the destination refers to a an array/pointer
1670 decorate with attribute nonstring. */
1671 if (!nonstring)
1673 tree fndecl = gimple_call_fndecl (stmt);
1674 gcall *call = as_a <gcall *> (stmt);
1676 /* Warn about the lack of nul termination: the result is not
1677 a (nul-terminated) string. */
1678 tree slen = get_maxval_strlen (src, 0);
1679 if (slen && !integer_zerop (slen))
1680 warning_at (loc, OPT_Wstringop_truncation,
1681 "%G%qD destination unchanged after copying no bytes "
1682 "from a string of length %E",
1683 call, fndecl, slen);
1684 else
1685 warning_at (loc, OPT_Wstringop_truncation,
1686 "%G%qD destination unchanged after copying no bytes",
1687 call, fndecl);
1690 replace_call_with_value (gsi, dest);
1691 return true;
1694 /* We can't compare slen with len as constants below if len is not a
1695 constant. */
1696 if (TREE_CODE (len) != INTEGER_CST)
1697 return false;
1699 /* Now, we must be passed a constant src ptr parameter. */
1700 tree slen = get_maxval_strlen (src, 0);
1701 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1702 return false;
1704 /* The size of the source string including the terminating nul. */
1705 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1707 /* We do not support simplification of this case, though we do
1708 support it when expanding trees into RTL. */
1709 /* FIXME: generate a call to __builtin_memset. */
1710 if (tree_int_cst_lt (ssize, len))
1711 return false;
1713 /* Diagnose truncation that leaves the copy unterminated. */
1714 maybe_diag_stxncpy_trunc (*gsi, src, len);
1716 /* OK transform into builtin memcpy. */
1717 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1718 if (!fn)
1719 return false;
1721 len = fold_convert_loc (loc, size_type_node, len);
1722 len = force_gimple_operand_gsi (gsi, len, true,
1723 NULL_TREE, true, GSI_SAME_STMT);
1724 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1725 replace_call_with_call_and_fold (gsi, repl);
1727 return true;
1730 /* Fold function call to builtin strchr or strrchr.
1731 If both arguments are constant, evaluate and fold the result,
1732 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1733 In general strlen is significantly faster than strchr
1734 due to being a simpler operation. */
1735 static bool
1736 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1738 gimple *stmt = gsi_stmt (*gsi);
1739 tree str = gimple_call_arg (stmt, 0);
1740 tree c = gimple_call_arg (stmt, 1);
1741 location_t loc = gimple_location (stmt);
1742 const char *p;
1743 char ch;
1745 if (!gimple_call_lhs (stmt))
1746 return false;
1748 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1750 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1752 if (p1 == NULL)
1754 replace_call_with_value (gsi, integer_zero_node);
1755 return true;
1758 tree len = build_int_cst (size_type_node, p1 - p);
1759 gimple_seq stmts = NULL;
1760 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1761 POINTER_PLUS_EXPR, str, len);
1762 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1763 gsi_replace_with_seq_vops (gsi, stmts);
1764 return true;
1767 if (!integer_zerop (c))
1768 return false;
1770 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1771 if (is_strrchr && optimize_function_for_size_p (cfun))
1773 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1775 if (strchr_fn)
1777 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1778 replace_call_with_call_and_fold (gsi, repl);
1779 return true;
1782 return false;
1785 tree len;
1786 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1788 if (!strlen_fn)
1789 return false;
1791 /* Create newstr = strlen (str). */
1792 gimple_seq stmts = NULL;
1793 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1794 gimple_set_location (new_stmt, loc);
1795 len = create_tmp_reg_or_ssa_name (size_type_node);
1796 gimple_call_set_lhs (new_stmt, len);
1797 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1799 /* Create (str p+ strlen (str)). */
1800 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1801 POINTER_PLUS_EXPR, str, len);
1802 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1803 gsi_replace_with_seq_vops (gsi, stmts);
1804 /* gsi now points at the assignment to the lhs, get a
1805 stmt iterator to the strlen.
1806 ??? We can't use gsi_for_stmt as that doesn't work when the
1807 CFG isn't built yet. */
1808 gimple_stmt_iterator gsi2 = *gsi;
1809 gsi_prev (&gsi2);
1810 fold_stmt (&gsi2);
1811 return true;
1814 /* Fold function call to builtin strstr.
1815 If both arguments are constant, evaluate and fold the result,
1816 additionally fold strstr (x, "") into x and strstr (x, "c")
1817 into strchr (x, 'c'). */
1818 static bool
1819 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1821 gimple *stmt = gsi_stmt (*gsi);
1822 tree haystack = gimple_call_arg (stmt, 0);
1823 tree needle = gimple_call_arg (stmt, 1);
1824 const char *p, *q;
1826 if (!gimple_call_lhs (stmt))
1827 return false;
1829 q = c_getstr (needle);
1830 if (q == NULL)
1831 return false;
1833 if ((p = c_getstr (haystack)))
1835 const char *r = strstr (p, q);
1837 if (r == NULL)
1839 replace_call_with_value (gsi, integer_zero_node);
1840 return true;
1843 tree len = build_int_cst (size_type_node, r - p);
1844 gimple_seq stmts = NULL;
1845 gimple *new_stmt
1846 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1847 haystack, len);
1848 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1849 gsi_replace_with_seq_vops (gsi, stmts);
1850 return true;
1853 /* For strstr (x, "") return x. */
1854 if (q[0] == '\0')
1856 replace_call_with_value (gsi, haystack);
1857 return true;
1860 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1861 if (q[1] == '\0')
1863 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1864 if (strchr_fn)
1866 tree c = build_int_cst (integer_type_node, q[0]);
1867 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1868 replace_call_with_call_and_fold (gsi, repl);
1869 return true;
1873 return false;
1876 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1877 to the call.
1879 Return NULL_TREE if no simplification was possible, otherwise return the
1880 simplified form of the call as a tree.
1882 The simplified form may be a constant or other expression which
1883 computes the same value, but in a more efficient manner (including
1884 calls to other builtin functions).
1886 The call may contain arguments which need to be evaluated, but
1887 which are not useful to determine the result of the call. In
1888 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1889 COMPOUND_EXPR will be an argument which must be evaluated.
1890 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1891 COMPOUND_EXPR in the chain will contain the tree for the simplified
1892 form of the builtin function call. */
1894 static bool
1895 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1897 gimple *stmt = gsi_stmt (*gsi);
1898 location_t loc = gimple_location (stmt);
1900 const char *p = c_getstr (src);
1902 /* If the string length is zero, return the dst parameter. */
1903 if (p && *p == '\0')
1905 replace_call_with_value (gsi, dst);
1906 return true;
1909 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1910 return false;
1912 /* See if we can store by pieces into (dst + strlen(dst)). */
1913 tree newdst;
1914 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1915 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1917 if (!strlen_fn || !memcpy_fn)
1918 return false;
1920 /* If the length of the source string isn't computable don't
1921 split strcat into strlen and memcpy. */
1922 tree len = get_maxval_strlen (src, 0);
1923 if (! len)
1924 return false;
1926 /* Create strlen (dst). */
1927 gimple_seq stmts = NULL, stmts2;
1928 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1929 gimple_set_location (repl, loc);
1930 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1931 gimple_call_set_lhs (repl, newdst);
1932 gimple_seq_add_stmt_without_update (&stmts, repl);
1934 /* Create (dst p+ strlen (dst)). */
1935 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1936 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1937 gimple_seq_add_seq_without_update (&stmts, stmts2);
1939 len = fold_convert_loc (loc, size_type_node, len);
1940 len = size_binop_loc (loc, PLUS_EXPR, len,
1941 build_int_cst (size_type_node, 1));
1942 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1943 gimple_seq_add_seq_without_update (&stmts, stmts2);
1945 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1946 gimple_seq_add_stmt_without_update (&stmts, repl);
1947 if (gimple_call_lhs (stmt))
1949 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1950 gimple_seq_add_stmt_without_update (&stmts, repl);
1951 gsi_replace_with_seq_vops (gsi, stmts);
1952 /* gsi now points at the assignment to the lhs, get a
1953 stmt iterator to the memcpy call.
1954 ??? We can't use gsi_for_stmt as that doesn't work when the
1955 CFG isn't built yet. */
1956 gimple_stmt_iterator gsi2 = *gsi;
1957 gsi_prev (&gsi2);
1958 fold_stmt (&gsi2);
1960 else
1962 gsi_replace_with_seq_vops (gsi, stmts);
1963 fold_stmt (gsi);
1965 return true;
1968 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1969 are the arguments to the call. */
1971 static bool
1972 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1974 gimple *stmt = gsi_stmt (*gsi);
1975 tree dest = gimple_call_arg (stmt, 0);
1976 tree src = gimple_call_arg (stmt, 1);
1977 tree size = gimple_call_arg (stmt, 2);
1978 tree fn;
1979 const char *p;
1982 p = c_getstr (src);
1983 /* If the SRC parameter is "", return DEST. */
1984 if (p && *p == '\0')
1986 replace_call_with_value (gsi, dest);
1987 return true;
1990 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1991 return false;
1993 /* If __builtin_strcat_chk is used, assume strcat is available. */
1994 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1995 if (!fn)
1996 return false;
1998 gimple *repl = gimple_build_call (fn, 2, dest, src);
1999 replace_call_with_call_and_fold (gsi, repl);
2000 return true;
2003 /* Simplify a call to the strncat builtin. */
2005 static bool
2006 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2008 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2009 tree dst = gimple_call_arg (stmt, 0);
2010 tree src = gimple_call_arg (stmt, 1);
2011 tree len = gimple_call_arg (stmt, 2);
2013 const char *p = c_getstr (src);
2015 /* If the requested length is zero, or the src parameter string
2016 length is zero, return the dst parameter. */
2017 if (integer_zerop (len) || (p && *p == '\0'))
2019 replace_call_with_value (gsi, dst);
2020 return true;
2023 if (TREE_CODE (len) != INTEGER_CST || !p)
2024 return false;
2026 unsigned srclen = strlen (p);
2028 int cmpsrc = compare_tree_int (len, srclen);
2030 /* Return early if the requested len is less than the string length.
2031 Warnings will be issued elsewhere later. */
2032 if (cmpsrc < 0)
2033 return false;
2035 unsigned HOST_WIDE_INT dstsize;
2037 bool nowarn = gimple_no_warning_p (stmt);
2039 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2041 int cmpdst = compare_tree_int (len, dstsize);
2043 if (cmpdst >= 0)
2045 tree fndecl = gimple_call_fndecl (stmt);
2047 /* Strncat copies (at most) LEN bytes and always appends
2048 the terminating NUL so the specified bound should never
2049 be equal to (or greater than) the size of the destination.
2050 If it is, the copy could overflow. */
2051 location_t loc = gimple_location (stmt);
2052 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2053 cmpdst == 0
2054 ? G_("%G%qD specified bound %E equals "
2055 "destination size")
2056 : G_("%G%qD specified bound %E exceeds "
2057 "destination size %wu"),
2058 stmt, fndecl, len, dstsize);
2059 if (nowarn)
2060 gimple_set_no_warning (stmt, true);
2064 if (!nowarn && cmpsrc == 0)
2066 tree fndecl = gimple_call_fndecl (stmt);
2068 /* To avoid certain truncation the specified bound should also
2069 not be equal to (or less than) the length of the source. */
2070 location_t loc = gimple_location (stmt);
2071 if (warning_at (loc, OPT_Wstringop_overflow_,
2072 "%G%qD specified bound %E equals source length",
2073 stmt, fndecl, len))
2074 gimple_set_no_warning (stmt, true);
2077 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2079 /* If the replacement _DECL isn't initialized, don't do the
2080 transformation. */
2081 if (!fn)
2082 return false;
2084 /* Otherwise, emit a call to strcat. */
2085 gcall *repl = gimple_build_call (fn, 2, dst, src);
2086 replace_call_with_call_and_fold (gsi, repl);
2087 return true;
2090 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2091 LEN, and SIZE. */
2093 static bool
2094 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2096 gimple *stmt = gsi_stmt (*gsi);
2097 tree dest = gimple_call_arg (stmt, 0);
2098 tree src = gimple_call_arg (stmt, 1);
2099 tree len = gimple_call_arg (stmt, 2);
2100 tree size = gimple_call_arg (stmt, 3);
2101 tree fn;
2102 const char *p;
2104 p = c_getstr (src);
2105 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2106 if ((p && *p == '\0')
2107 || integer_zerop (len))
2109 replace_call_with_value (gsi, dest);
2110 return true;
2113 if (! tree_fits_uhwi_p (size))
2114 return false;
2116 if (! integer_all_onesp (size))
2118 tree src_len = c_strlen (src, 1);
2119 if (src_len
2120 && tree_fits_uhwi_p (src_len)
2121 && tree_fits_uhwi_p (len)
2122 && ! tree_int_cst_lt (len, src_len))
2124 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2125 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2126 if (!fn)
2127 return false;
2129 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2130 replace_call_with_call_and_fold (gsi, repl);
2131 return true;
2133 return false;
2136 /* If __builtin_strncat_chk is used, assume strncat is available. */
2137 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2138 if (!fn)
2139 return false;
2141 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2142 replace_call_with_call_and_fold (gsi, repl);
2143 return true;
2146 /* Build and append gimple statements to STMTS that would load a first
2147 character of a memory location identified by STR. LOC is location
2148 of the statement. */
2150 static tree
2151 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2153 tree var;
2155 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2156 tree cst_uchar_ptr_node
2157 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2158 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2160 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2161 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2162 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2164 gimple_assign_set_lhs (stmt, var);
2165 gimple_seq_add_stmt_without_update (stmts, stmt);
2167 return var;
2170 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2171 FCODE is the name of the builtin. */
2173 static bool
2174 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2176 gimple *stmt = gsi_stmt (*gsi);
2177 tree callee = gimple_call_fndecl (stmt);
2178 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2180 tree type = integer_type_node;
2181 tree str1 = gimple_call_arg (stmt, 0);
2182 tree str2 = gimple_call_arg (stmt, 1);
2183 tree lhs = gimple_call_lhs (stmt);
2184 HOST_WIDE_INT length = -1;
2186 /* Handle strncmp and strncasecmp functions. */
2187 if (gimple_call_num_args (stmt) == 3)
2189 tree len = gimple_call_arg (stmt, 2);
2190 if (tree_fits_uhwi_p (len))
2191 length = tree_to_uhwi (len);
2194 /* If the LEN parameter is zero, return zero. */
2195 if (length == 0)
2197 replace_call_with_value (gsi, integer_zero_node);
2198 return true;
2201 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2202 if (operand_equal_p (str1, str2, 0))
2204 replace_call_with_value (gsi, integer_zero_node);
2205 return true;
2208 const char *p1 = c_getstr (str1);
2209 const char *p2 = c_getstr (str2);
2211 /* For known strings, return an immediate value. */
2212 if (p1 && p2)
2214 int r = 0;
2215 bool known_result = false;
2217 switch (fcode)
2219 case BUILT_IN_STRCMP:
2220 case BUILT_IN_STRCMP_EQ:
2222 r = strcmp (p1, p2);
2223 known_result = true;
2224 break;
2226 case BUILT_IN_STRNCMP:
2227 case BUILT_IN_STRNCMP_EQ:
2229 if (length == -1)
2230 break;
2231 r = strncmp (p1, p2, length);
2232 known_result = true;
2233 break;
2235 /* Only handleable situation is where the string are equal (result 0),
2236 which is already handled by operand_equal_p case. */
2237 case BUILT_IN_STRCASECMP:
2238 break;
2239 case BUILT_IN_STRNCASECMP:
2241 if (length == -1)
2242 break;
2243 r = strncmp (p1, p2, length);
2244 if (r == 0)
2245 known_result = true;
2246 break;
2248 default:
2249 gcc_unreachable ();
2252 if (known_result)
2254 replace_call_with_value (gsi, build_cmp_result (type, r));
2255 return true;
2259 bool nonzero_length = length >= 1
2260 || fcode == BUILT_IN_STRCMP
2261 || fcode == BUILT_IN_STRCMP_EQ
2262 || fcode == BUILT_IN_STRCASECMP;
2264 location_t loc = gimple_location (stmt);
2266 /* If the second arg is "", return *(const unsigned char*)arg1. */
2267 if (p2 && *p2 == '\0' && nonzero_length)
2269 gimple_seq stmts = NULL;
2270 tree var = gimple_load_first_char (loc, str1, &stmts);
2271 if (lhs)
2273 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2274 gimple_seq_add_stmt_without_update (&stmts, stmt);
2277 gsi_replace_with_seq_vops (gsi, stmts);
2278 return true;
2281 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2282 if (p1 && *p1 == '\0' && nonzero_length)
2284 gimple_seq stmts = NULL;
2285 tree var = gimple_load_first_char (loc, str2, &stmts);
2287 if (lhs)
2289 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2290 stmt = gimple_build_assign (c, NOP_EXPR, var);
2291 gimple_seq_add_stmt_without_update (&stmts, stmt);
2293 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2294 gimple_seq_add_stmt_without_update (&stmts, stmt);
2297 gsi_replace_with_seq_vops (gsi, stmts);
2298 return true;
2301 /* If len parameter is one, return an expression corresponding to
2302 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2303 if (fcode == BUILT_IN_STRNCMP && length == 1)
2305 gimple_seq stmts = NULL;
2306 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2307 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2309 if (lhs)
2311 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2312 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2313 gimple_seq_add_stmt_without_update (&stmts, convert1);
2315 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2316 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2317 gimple_seq_add_stmt_without_update (&stmts, convert2);
2319 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2320 gimple_seq_add_stmt_without_update (&stmts, stmt);
2323 gsi_replace_with_seq_vops (gsi, stmts);
2324 return true;
2327 /* If length is larger than the length of one constant string,
2328 replace strncmp with corresponding strcmp */
2329 if (fcode == BUILT_IN_STRNCMP
2330 && length > 0
2331 && ((p2 && (size_t) length > strlen (p2))
2332 || (p1 && (size_t) length > strlen (p1))))
2334 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2335 if (!fn)
2336 return false;
2337 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2338 replace_call_with_call_and_fold (gsi, repl);
2339 return true;
2342 return false;
2345 /* Fold a call to the memchr pointed by GSI iterator. */
2347 static bool
2348 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2350 gimple *stmt = gsi_stmt (*gsi);
2351 tree lhs = gimple_call_lhs (stmt);
2352 tree arg1 = gimple_call_arg (stmt, 0);
2353 tree arg2 = gimple_call_arg (stmt, 1);
2354 tree len = gimple_call_arg (stmt, 2);
2356 /* If the LEN parameter is zero, return zero. */
2357 if (integer_zerop (len))
2359 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2360 return true;
2363 char c;
2364 if (TREE_CODE (arg2) != INTEGER_CST
2365 || !tree_fits_uhwi_p (len)
2366 || !target_char_cst_p (arg2, &c))
2367 return false;
2369 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2370 unsigned HOST_WIDE_INT string_length;
2371 const char *p1 = c_getstr (arg1, &string_length);
2373 if (p1)
2375 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2376 if (r == NULL)
2378 if (length <= string_length)
2380 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2381 return true;
2384 else
2386 unsigned HOST_WIDE_INT offset = r - p1;
2387 gimple_seq stmts = NULL;
2388 if (lhs != NULL_TREE)
2390 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2391 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2392 arg1, offset_cst);
2393 gimple_seq_add_stmt_without_update (&stmts, stmt);
2395 else
2396 gimple_seq_add_stmt_without_update (&stmts,
2397 gimple_build_nop ());
2399 gsi_replace_with_seq_vops (gsi, stmts);
2400 return true;
2404 return false;
2407 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2408 to the call. IGNORE is true if the value returned
2409 by the builtin will be ignored. UNLOCKED is true is true if this
2410 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2411 the known length of the string. Return NULL_TREE if no simplification
2412 was possible. */
2414 static bool
2415 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2416 tree arg0, tree arg1,
2417 bool unlocked)
2419 gimple *stmt = gsi_stmt (*gsi);
2421 /* If we're using an unlocked function, assume the other unlocked
2422 functions exist explicitly. */
2423 tree const fn_fputc = (unlocked
2424 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2425 : builtin_decl_implicit (BUILT_IN_FPUTC));
2426 tree const fn_fwrite = (unlocked
2427 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2428 : builtin_decl_implicit (BUILT_IN_FWRITE));
2430 /* If the return value is used, don't do the transformation. */
2431 if (gimple_call_lhs (stmt))
2432 return false;
2434 /* Get the length of the string passed to fputs. If the length
2435 can't be determined, punt. */
2436 tree len = get_maxval_strlen (arg0, 0);
2437 if (!len
2438 || TREE_CODE (len) != INTEGER_CST)
2439 return false;
2441 switch (compare_tree_int (len, 1))
2443 case -1: /* length is 0, delete the call entirely . */
2444 replace_call_with_value (gsi, integer_zero_node);
2445 return true;
2447 case 0: /* length is 1, call fputc. */
2449 const char *p = c_getstr (arg0);
2450 if (p != NULL)
2452 if (!fn_fputc)
2453 return false;
2455 gimple *repl = gimple_build_call (fn_fputc, 2,
2456 build_int_cst
2457 (integer_type_node, p[0]), arg1);
2458 replace_call_with_call_and_fold (gsi, repl);
2459 return true;
2462 /* FALLTHROUGH */
2463 case 1: /* length is greater than 1, call fwrite. */
2465 /* If optimizing for size keep fputs. */
2466 if (optimize_function_for_size_p (cfun))
2467 return false;
2468 /* New argument list transforming fputs(string, stream) to
2469 fwrite(string, 1, len, stream). */
2470 if (!fn_fwrite)
2471 return false;
2473 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2474 size_one_node, len, arg1);
2475 replace_call_with_call_and_fold (gsi, repl);
2476 return true;
2478 default:
2479 gcc_unreachable ();
2481 return false;
2484 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2485 DEST, SRC, LEN, and SIZE are the arguments to the call.
2486 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2487 code of the builtin. If MAXLEN is not NULL, it is maximum length
2488 passed as third argument. */
2490 static bool
2491 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2492 tree dest, tree src, tree len, tree size,
2493 enum built_in_function fcode)
2495 gimple *stmt = gsi_stmt (*gsi);
2496 location_t loc = gimple_location (stmt);
2497 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2498 tree fn;
2500 /* If SRC and DEST are the same (and not volatile), return DEST
2501 (resp. DEST+LEN for __mempcpy_chk). */
2502 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2504 if (fcode != BUILT_IN_MEMPCPY_CHK)
2506 replace_call_with_value (gsi, dest);
2507 return true;
2509 else
2511 gimple_seq stmts = NULL;
2512 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2513 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2514 TREE_TYPE (dest), dest, len);
2515 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2516 replace_call_with_value (gsi, temp);
2517 return true;
2521 if (! tree_fits_uhwi_p (size))
2522 return false;
2524 tree maxlen = get_maxval_strlen (len, 2);
2525 if (! integer_all_onesp (size))
2527 if (! tree_fits_uhwi_p (len))
2529 /* If LEN is not constant, try MAXLEN too.
2530 For MAXLEN only allow optimizing into non-_ocs function
2531 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2532 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2534 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2536 /* (void) __mempcpy_chk () can be optimized into
2537 (void) __memcpy_chk (). */
2538 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2539 if (!fn)
2540 return false;
2542 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2543 replace_call_with_call_and_fold (gsi, repl);
2544 return true;
2546 return false;
2549 else
2550 maxlen = len;
2552 if (tree_int_cst_lt (size, maxlen))
2553 return false;
2556 fn = NULL_TREE;
2557 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2558 mem{cpy,pcpy,move,set} is available. */
2559 switch (fcode)
2561 case BUILT_IN_MEMCPY_CHK:
2562 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2563 break;
2564 case BUILT_IN_MEMPCPY_CHK:
2565 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2566 break;
2567 case BUILT_IN_MEMMOVE_CHK:
2568 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2569 break;
2570 case BUILT_IN_MEMSET_CHK:
2571 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2572 break;
2573 default:
2574 break;
2577 if (!fn)
2578 return false;
2580 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2581 replace_call_with_call_and_fold (gsi, repl);
2582 return true;
2585 /* Fold a call to the __st[rp]cpy_chk builtin.
2586 DEST, SRC, and SIZE are the arguments to the call.
2587 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2588 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2589 strings passed as second argument. */
2591 static bool
2592 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2593 tree dest,
2594 tree src, tree size,
2595 enum built_in_function fcode)
2597 gimple *stmt = gsi_stmt (*gsi);
2598 location_t loc = gimple_location (stmt);
2599 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2600 tree len, fn;
2602 /* If SRC and DEST are the same (and not volatile), return DEST. */
2603 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2605 /* Issue -Wrestrict unless the pointers are null (those do
2606 not point to objects and so do not indicate an overlap;
2607 such calls could be the result of sanitization and jump
2608 threading). */
2609 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2611 tree func = gimple_call_fndecl (stmt);
2613 warning_at (loc, OPT_Wrestrict,
2614 "%qD source argument is the same as destination",
2615 func);
2618 replace_call_with_value (gsi, dest);
2619 return true;
2622 if (! tree_fits_uhwi_p (size))
2623 return false;
2625 tree maxlen = get_maxval_strlen (src, 1);
2626 if (! integer_all_onesp (size))
2628 len = c_strlen (src, 1);
2629 if (! len || ! tree_fits_uhwi_p (len))
2631 /* If LEN is not constant, try MAXLEN too.
2632 For MAXLEN only allow optimizing into non-_ocs function
2633 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2634 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2636 if (fcode == BUILT_IN_STPCPY_CHK)
2638 if (! ignore)
2639 return false;
2641 /* If return value of __stpcpy_chk is ignored,
2642 optimize into __strcpy_chk. */
2643 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2644 if (!fn)
2645 return false;
2647 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2648 replace_call_with_call_and_fold (gsi, repl);
2649 return true;
2652 if (! len || TREE_SIDE_EFFECTS (len))
2653 return false;
2655 /* If c_strlen returned something, but not a constant,
2656 transform __strcpy_chk into __memcpy_chk. */
2657 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2658 if (!fn)
2659 return false;
2661 gimple_seq stmts = NULL;
2662 len = gimple_convert (&stmts, loc, size_type_node, len);
2663 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2664 build_int_cst (size_type_node, 1));
2665 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2666 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2667 replace_call_with_call_and_fold (gsi, repl);
2668 return true;
2671 else
2672 maxlen = len;
2674 if (! tree_int_cst_lt (maxlen, size))
2675 return false;
2678 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2679 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2680 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2681 if (!fn)
2682 return false;
2684 gimple *repl = gimple_build_call (fn, 2, dest, src);
2685 replace_call_with_call_and_fold (gsi, repl);
2686 return true;
2689 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2690 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2691 length passed as third argument. IGNORE is true if return value can be
2692 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2694 static bool
2695 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2696 tree dest, tree src,
2697 tree len, tree size,
2698 enum built_in_function fcode)
2700 gimple *stmt = gsi_stmt (*gsi);
2701 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2702 tree fn;
2704 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2706 /* If return value of __stpncpy_chk is ignored,
2707 optimize into __strncpy_chk. */
2708 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2709 if (fn)
2711 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2712 replace_call_with_call_and_fold (gsi, repl);
2713 return true;
2717 if (! tree_fits_uhwi_p (size))
2718 return false;
2720 tree maxlen = get_maxval_strlen (len, 2);
2721 if (! integer_all_onesp (size))
2723 if (! tree_fits_uhwi_p (len))
2725 /* If LEN is not constant, try MAXLEN too.
2726 For MAXLEN only allow optimizing into non-_ocs function
2727 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2728 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2729 return false;
2731 else
2732 maxlen = len;
2734 if (tree_int_cst_lt (size, maxlen))
2735 return false;
2738 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2739 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2740 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2741 if (!fn)
2742 return false;
2744 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2745 replace_call_with_call_and_fold (gsi, repl);
2746 return true;
2749 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2750 Return NULL_TREE if no simplification can be made. */
2752 static bool
2753 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2755 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2756 location_t loc = gimple_location (stmt);
2757 tree dest = gimple_call_arg (stmt, 0);
2758 tree src = gimple_call_arg (stmt, 1);
2759 tree fn, len, lenp1;
2761 /* If the result is unused, replace stpcpy with strcpy. */
2762 if (gimple_call_lhs (stmt) == NULL_TREE)
2764 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2765 if (!fn)
2766 return false;
2767 gimple_call_set_fndecl (stmt, fn);
2768 fold_stmt (gsi);
2769 return true;
2772 len = c_strlen (src, 1);
2773 if (!len
2774 || TREE_CODE (len) != INTEGER_CST)
2775 return false;
2777 if (optimize_function_for_size_p (cfun)
2778 /* If length is zero it's small enough. */
2779 && !integer_zerop (len))
2780 return false;
2782 /* If the source has a known length replace stpcpy with memcpy. */
2783 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2784 if (!fn)
2785 return false;
2787 gimple_seq stmts = NULL;
2788 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2789 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2790 tem, build_int_cst (size_type_node, 1));
2791 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2792 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2793 gimple_set_vuse (repl, gimple_vuse (stmt));
2794 gimple_set_vdef (repl, gimple_vdef (stmt));
2795 if (gimple_vdef (repl)
2796 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2797 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2798 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2799 /* Replace the result with dest + len. */
2800 stmts = NULL;
2801 tem = gimple_convert (&stmts, loc, sizetype, len);
2802 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2803 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2804 POINTER_PLUS_EXPR, dest, tem);
2805 gsi_replace (gsi, ret, false);
2806 /* Finally fold the memcpy call. */
2807 gimple_stmt_iterator gsi2 = *gsi;
2808 gsi_prev (&gsi2);
2809 fold_stmt (&gsi2);
2810 return true;
2813 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2814 NULL_TREE if a normal call should be emitted rather than expanding
2815 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2816 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2817 passed as second argument. */
2819 static bool
2820 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2821 enum built_in_function fcode)
2823 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2824 tree dest, size, len, fn, fmt, flag;
2825 const char *fmt_str;
2827 /* Verify the required arguments in the original call. */
2828 if (gimple_call_num_args (stmt) < 5)
2829 return false;
2831 dest = gimple_call_arg (stmt, 0);
2832 len = gimple_call_arg (stmt, 1);
2833 flag = gimple_call_arg (stmt, 2);
2834 size = gimple_call_arg (stmt, 3);
2835 fmt = gimple_call_arg (stmt, 4);
2837 if (! tree_fits_uhwi_p (size))
2838 return false;
2840 if (! integer_all_onesp (size))
2842 tree maxlen = get_maxval_strlen (len, 2);
2843 if (! tree_fits_uhwi_p (len))
2845 /* If LEN is not constant, try MAXLEN too.
2846 For MAXLEN only allow optimizing into non-_ocs function
2847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2848 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2849 return false;
2851 else
2852 maxlen = len;
2854 if (tree_int_cst_lt (size, maxlen))
2855 return false;
2858 if (!init_target_chars ())
2859 return false;
2861 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2862 or if format doesn't contain % chars or is "%s". */
2863 if (! integer_zerop (flag))
2865 fmt_str = c_getstr (fmt);
2866 if (fmt_str == NULL)
2867 return false;
2868 if (strchr (fmt_str, target_percent) != NULL
2869 && strcmp (fmt_str, target_percent_s))
2870 return false;
2873 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2874 available. */
2875 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2876 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2877 if (!fn)
2878 return false;
2880 /* Replace the called function and the first 5 argument by 3 retaining
2881 trailing varargs. */
2882 gimple_call_set_fndecl (stmt, fn);
2883 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2884 gimple_call_set_arg (stmt, 0, dest);
2885 gimple_call_set_arg (stmt, 1, len);
2886 gimple_call_set_arg (stmt, 2, fmt);
2887 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2888 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2889 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2890 fold_stmt (gsi);
2891 return true;
2894 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2895 Return NULL_TREE if a normal call should be emitted rather than
2896 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2897 or BUILT_IN_VSPRINTF_CHK. */
2899 static bool
2900 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2901 enum built_in_function fcode)
2903 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2904 tree dest, size, len, fn, fmt, flag;
2905 const char *fmt_str;
2906 unsigned nargs = gimple_call_num_args (stmt);
2908 /* Verify the required arguments in the original call. */
2909 if (nargs < 4)
2910 return false;
2911 dest = gimple_call_arg (stmt, 0);
2912 flag = gimple_call_arg (stmt, 1);
2913 size = gimple_call_arg (stmt, 2);
2914 fmt = gimple_call_arg (stmt, 3);
2916 if (! tree_fits_uhwi_p (size))
2917 return false;
2919 len = NULL_TREE;
2921 if (!init_target_chars ())
2922 return false;
2924 /* Check whether the format is a literal string constant. */
2925 fmt_str = c_getstr (fmt);
2926 if (fmt_str != NULL)
2928 /* If the format doesn't contain % args or %%, we know the size. */
2929 if (strchr (fmt_str, target_percent) == 0)
2931 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2932 len = build_int_cstu (size_type_node, strlen (fmt_str));
2934 /* If the format is "%s" and first ... argument is a string literal,
2935 we know the size too. */
2936 else if (fcode == BUILT_IN_SPRINTF_CHK
2937 && strcmp (fmt_str, target_percent_s) == 0)
2939 tree arg;
2941 if (nargs == 5)
2943 arg = gimple_call_arg (stmt, 4);
2944 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2946 len = c_strlen (arg, 1);
2947 if (! len || ! tree_fits_uhwi_p (len))
2948 len = NULL_TREE;
2954 if (! integer_all_onesp (size))
2956 if (! len || ! tree_int_cst_lt (len, size))
2957 return false;
2960 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2961 or if format doesn't contain % chars or is "%s". */
2962 if (! integer_zerop (flag))
2964 if (fmt_str == NULL)
2965 return false;
2966 if (strchr (fmt_str, target_percent) != NULL
2967 && strcmp (fmt_str, target_percent_s))
2968 return false;
2971 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2972 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2973 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2974 if (!fn)
2975 return false;
2977 /* Replace the called function and the first 4 argument by 2 retaining
2978 trailing varargs. */
2979 gimple_call_set_fndecl (stmt, fn);
2980 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2981 gimple_call_set_arg (stmt, 0, dest);
2982 gimple_call_set_arg (stmt, 1, fmt);
2983 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2984 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2985 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2986 fold_stmt (gsi);
2987 return true;
2990 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2991 ORIG may be null if this is a 2-argument call. We don't attempt to
2992 simplify calls with more than 3 arguments.
2994 Return true if simplification was possible, otherwise false. */
2996 bool
2997 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2999 gimple *stmt = gsi_stmt (*gsi);
3000 tree dest = gimple_call_arg (stmt, 0);
3001 tree fmt = gimple_call_arg (stmt, 1);
3002 tree orig = NULL_TREE;
3003 const char *fmt_str = NULL;
3005 /* Verify the required arguments in the original call. We deal with two
3006 types of sprintf() calls: 'sprintf (str, fmt)' and
3007 'sprintf (dest, "%s", orig)'. */
3008 if (gimple_call_num_args (stmt) > 3)
3009 return false;
3011 if (gimple_call_num_args (stmt) == 3)
3012 orig = gimple_call_arg (stmt, 2);
3014 /* Check whether the format is a literal string constant. */
3015 fmt_str = c_getstr (fmt);
3016 if (fmt_str == NULL)
3017 return false;
3019 if (!init_target_chars ())
3020 return false;
3022 /* If the format doesn't contain % args or %%, use strcpy. */
3023 if (strchr (fmt_str, target_percent) == NULL)
3025 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3027 if (!fn)
3028 return false;
3030 /* Don't optimize sprintf (buf, "abc", ptr++). */
3031 if (orig)
3032 return false;
3034 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3035 'format' is known to contain no % formats. */
3036 gimple_seq stmts = NULL;
3037 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3038 gimple_seq_add_stmt_without_update (&stmts, repl);
3039 if (gimple_call_lhs (stmt))
3041 repl = gimple_build_assign (gimple_call_lhs (stmt),
3042 build_int_cst (integer_type_node,
3043 strlen (fmt_str)));
3044 gimple_seq_add_stmt_without_update (&stmts, repl);
3045 gsi_replace_with_seq_vops (gsi, stmts);
3046 /* gsi now points at the assignment to the lhs, get a
3047 stmt iterator to the memcpy call.
3048 ??? We can't use gsi_for_stmt as that doesn't work when the
3049 CFG isn't built yet. */
3050 gimple_stmt_iterator gsi2 = *gsi;
3051 gsi_prev (&gsi2);
3052 fold_stmt (&gsi2);
3054 else
3056 gsi_replace_with_seq_vops (gsi, stmts);
3057 fold_stmt (gsi);
3059 return true;
3062 /* If the format is "%s", use strcpy if the result isn't used. */
3063 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3065 tree fn;
3066 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3068 if (!fn)
3069 return false;
3071 /* Don't crash on sprintf (str1, "%s"). */
3072 if (!orig)
3073 return false;
3075 tree orig_len = NULL_TREE;
3076 if (gimple_call_lhs (stmt))
3078 orig_len = get_maxval_strlen (orig, 0);
3079 if (!orig_len)
3080 return false;
3083 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3084 gimple_seq stmts = NULL;
3085 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3086 gimple_seq_add_stmt_without_update (&stmts, repl);
3087 if (gimple_call_lhs (stmt))
3089 if (!useless_type_conversion_p (integer_type_node,
3090 TREE_TYPE (orig_len)))
3091 orig_len = fold_convert (integer_type_node, orig_len);
3092 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3093 gimple_seq_add_stmt_without_update (&stmts, repl);
3094 gsi_replace_with_seq_vops (gsi, stmts);
3095 /* gsi now points at the assignment to the lhs, get a
3096 stmt iterator to the memcpy call.
3097 ??? We can't use gsi_for_stmt as that doesn't work when the
3098 CFG isn't built yet. */
3099 gimple_stmt_iterator gsi2 = *gsi;
3100 gsi_prev (&gsi2);
3101 fold_stmt (&gsi2);
3103 else
3105 gsi_replace_with_seq_vops (gsi, stmts);
3106 fold_stmt (gsi);
3108 return true;
3110 return false;
3113 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3114 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3115 attempt to simplify calls with more than 4 arguments.
3117 Return true if simplification was possible, otherwise false. */
3119 bool
3120 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3122 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3123 tree dest = gimple_call_arg (stmt, 0);
3124 tree destsize = gimple_call_arg (stmt, 1);
3125 tree fmt = gimple_call_arg (stmt, 2);
3126 tree orig = NULL_TREE;
3127 const char *fmt_str = NULL;
3129 if (gimple_call_num_args (stmt) > 4)
3130 return false;
3132 if (gimple_call_num_args (stmt) == 4)
3133 orig = gimple_call_arg (stmt, 3);
3135 if (!tree_fits_uhwi_p (destsize))
3136 return false;
3137 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3139 /* Check whether the format is a literal string constant. */
3140 fmt_str = c_getstr (fmt);
3141 if (fmt_str == NULL)
3142 return false;
3144 if (!init_target_chars ())
3145 return false;
3147 /* If the format doesn't contain % args or %%, use strcpy. */
3148 if (strchr (fmt_str, target_percent) == NULL)
3150 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3151 if (!fn)
3152 return false;
3154 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3155 if (orig)
3156 return false;
3158 /* We could expand this as
3159 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3160 or to
3161 memcpy (str, fmt_with_nul_at_cstm1, cst);
3162 but in the former case that might increase code size
3163 and in the latter case grow .rodata section too much.
3164 So punt for now. */
3165 size_t len = strlen (fmt_str);
3166 if (len >= destlen)
3167 return false;
3169 gimple_seq stmts = NULL;
3170 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3171 gimple_seq_add_stmt_without_update (&stmts, repl);
3172 if (gimple_call_lhs (stmt))
3174 repl = gimple_build_assign (gimple_call_lhs (stmt),
3175 build_int_cst (integer_type_node, len));
3176 gimple_seq_add_stmt_without_update (&stmts, repl);
3177 gsi_replace_with_seq_vops (gsi, stmts);
3178 /* gsi now points at the assignment to the lhs, get a
3179 stmt iterator to the memcpy call.
3180 ??? We can't use gsi_for_stmt as that doesn't work when the
3181 CFG isn't built yet. */
3182 gimple_stmt_iterator gsi2 = *gsi;
3183 gsi_prev (&gsi2);
3184 fold_stmt (&gsi2);
3186 else
3188 gsi_replace_with_seq_vops (gsi, stmts);
3189 fold_stmt (gsi);
3191 return true;
3194 /* If the format is "%s", use strcpy if the result isn't used. */
3195 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3197 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3198 if (!fn)
3199 return false;
3201 /* Don't crash on snprintf (str1, cst, "%s"). */
3202 if (!orig)
3203 return false;
3205 tree orig_len = get_maxval_strlen (orig, 0);
3206 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3207 return false;
3209 /* We could expand this as
3210 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3211 or to
3212 memcpy (str1, str2_with_nul_at_cstm1, cst);
3213 but in the former case that might increase code size
3214 and in the latter case grow .rodata section too much.
3215 So punt for now. */
3216 if (compare_tree_int (orig_len, destlen) >= 0)
3217 return false;
3219 /* Convert snprintf (str1, cst, "%s", str2) into
3220 strcpy (str1, str2) if strlen (str2) < cst. */
3221 gimple_seq stmts = NULL;
3222 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3223 gimple_seq_add_stmt_without_update (&stmts, repl);
3224 if (gimple_call_lhs (stmt))
3226 if (!useless_type_conversion_p (integer_type_node,
3227 TREE_TYPE (orig_len)))
3228 orig_len = fold_convert (integer_type_node, orig_len);
3229 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3230 gimple_seq_add_stmt_without_update (&stmts, repl);
3231 gsi_replace_with_seq_vops (gsi, stmts);
3232 /* gsi now points at the assignment to the lhs, get a
3233 stmt iterator to the memcpy call.
3234 ??? We can't use gsi_for_stmt as that doesn't work when the
3235 CFG isn't built yet. */
3236 gimple_stmt_iterator gsi2 = *gsi;
3237 gsi_prev (&gsi2);
3238 fold_stmt (&gsi2);
3240 else
3242 gsi_replace_with_seq_vops (gsi, stmts);
3243 fold_stmt (gsi);
3245 return true;
3247 return false;
3250 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3251 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3252 more than 3 arguments, and ARG may be null in the 2-argument case.
3254 Return NULL_TREE if no simplification was possible, otherwise return the
3255 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3256 code of the function to be simplified. */
3258 static bool
3259 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3260 tree fp, tree fmt, tree arg,
3261 enum built_in_function fcode)
3263 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3264 tree fn_fputc, fn_fputs;
3265 const char *fmt_str = NULL;
3267 /* If the return value is used, don't do the transformation. */
3268 if (gimple_call_lhs (stmt) != NULL_TREE)
3269 return false;
3271 /* Check whether the format is a literal string constant. */
3272 fmt_str = c_getstr (fmt);
3273 if (fmt_str == NULL)
3274 return false;
3276 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3278 /* If we're using an unlocked function, assume the other
3279 unlocked functions exist explicitly. */
3280 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3281 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3283 else
3285 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3286 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3289 if (!init_target_chars ())
3290 return false;
3292 /* If the format doesn't contain % args or %%, use strcpy. */
3293 if (strchr (fmt_str, target_percent) == NULL)
3295 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3296 && arg)
3297 return false;
3299 /* If the format specifier was "", fprintf does nothing. */
3300 if (fmt_str[0] == '\0')
3302 replace_call_with_value (gsi, NULL_TREE);
3303 return true;
3306 /* When "string" doesn't contain %, replace all cases of
3307 fprintf (fp, string) with fputs (string, fp). The fputs
3308 builtin will take care of special cases like length == 1. */
3309 if (fn_fputs)
3311 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3312 replace_call_with_call_and_fold (gsi, repl);
3313 return true;
3317 /* The other optimizations can be done only on the non-va_list variants. */
3318 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3319 return false;
3321 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3322 else if (strcmp (fmt_str, target_percent_s) == 0)
3324 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3325 return false;
3326 if (fn_fputs)
3328 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3329 replace_call_with_call_and_fold (gsi, repl);
3330 return true;
3334 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3335 else if (strcmp (fmt_str, target_percent_c) == 0)
3337 if (!arg
3338 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3339 return false;
3340 if (fn_fputc)
3342 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3343 replace_call_with_call_and_fold (gsi, repl);
3344 return true;
3348 return false;
3351 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3352 FMT and ARG are the arguments to the call; we don't fold cases with
3353 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3355 Return NULL_TREE if no simplification was possible, otherwise return the
3356 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3357 code of the function to be simplified. */
3359 static bool
3360 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3361 tree arg, enum built_in_function fcode)
3363 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3364 tree fn_putchar, fn_puts, newarg;
3365 const char *fmt_str = NULL;
3367 /* If the return value is used, don't do the transformation. */
3368 if (gimple_call_lhs (stmt) != NULL_TREE)
3369 return false;
3371 /* Check whether the format is a literal string constant. */
3372 fmt_str = c_getstr (fmt);
3373 if (fmt_str == NULL)
3374 return false;
3376 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3378 /* If we're using an unlocked function, assume the other
3379 unlocked functions exist explicitly. */
3380 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3381 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3383 else
3385 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3386 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3389 if (!init_target_chars ())
3390 return false;
3392 if (strcmp (fmt_str, target_percent_s) == 0
3393 || strchr (fmt_str, target_percent) == NULL)
3395 const char *str;
3397 if (strcmp (fmt_str, target_percent_s) == 0)
3399 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3400 return false;
3402 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3403 return false;
3405 str = c_getstr (arg);
3406 if (str == NULL)
3407 return false;
3409 else
3411 /* The format specifier doesn't contain any '%' characters. */
3412 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3413 && arg)
3414 return false;
3415 str = fmt_str;
3418 /* If the string was "", printf does nothing. */
3419 if (str[0] == '\0')
3421 replace_call_with_value (gsi, NULL_TREE);
3422 return true;
3425 /* If the string has length of 1, call putchar. */
3426 if (str[1] == '\0')
3428 /* Given printf("c"), (where c is any one character,)
3429 convert "c"[0] to an int and pass that to the replacement
3430 function. */
3431 newarg = build_int_cst (integer_type_node, str[0]);
3432 if (fn_putchar)
3434 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3435 replace_call_with_call_and_fold (gsi, repl);
3436 return true;
3439 else
3441 /* If the string was "string\n", call puts("string"). */
3442 size_t len = strlen (str);
3443 if ((unsigned char)str[len - 1] == target_newline
3444 && (size_t) (int) len == len
3445 && (int) len > 0)
3447 char *newstr;
3448 tree offset_node, string_cst;
3450 /* Create a NUL-terminated string that's one char shorter
3451 than the original, stripping off the trailing '\n'. */
3452 newarg = build_string_literal (len, str);
3453 string_cst = string_constant (newarg, &offset_node);
3454 gcc_checking_assert (string_cst
3455 && (TREE_STRING_LENGTH (string_cst)
3456 == (int) len)
3457 && integer_zerop (offset_node)
3458 && (unsigned char)
3459 TREE_STRING_POINTER (string_cst)[len - 1]
3460 == target_newline);
3461 /* build_string_literal creates a new STRING_CST,
3462 modify it in place to avoid double copying. */
3463 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3464 newstr[len - 1] = '\0';
3465 if (fn_puts)
3467 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3468 replace_call_with_call_and_fold (gsi, repl);
3469 return true;
3472 else
3473 /* We'd like to arrange to call fputs(string,stdout) here,
3474 but we need stdout and don't have a way to get it yet. */
3475 return false;
3479 /* The other optimizations can be done only on the non-va_list variants. */
3480 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3481 return false;
3483 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3484 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3486 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3487 return false;
3488 if (fn_puts)
3490 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3491 replace_call_with_call_and_fold (gsi, repl);
3492 return true;
3496 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3497 else if (strcmp (fmt_str, target_percent_c) == 0)
3499 if (!arg || ! useless_type_conversion_p (integer_type_node,
3500 TREE_TYPE (arg)))
3501 return false;
3502 if (fn_putchar)
3504 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3505 replace_call_with_call_and_fold (gsi, repl);
3506 return true;
3510 return false;
3515 /* Fold a call to __builtin_strlen with known length LEN. */
3517 static bool
3518 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3520 gimple *stmt = gsi_stmt (*gsi);
3522 wide_int minlen;
3523 wide_int maxlen;
3525 tree lenrange[2];
3526 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3527 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3528 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3530 /* The range of lengths refers to either a single constant
3531 string or to the longest and shortest constant string
3532 referenced by the argument of the strlen() call, or to
3533 the strings that can possibly be stored in the arrays
3534 the argument refers to. */
3535 minlen = wi::to_wide (lenrange[0]);
3536 maxlen = wi::to_wide (lenrange[1]);
3538 else
3540 unsigned prec = TYPE_PRECISION (sizetype);
3542 minlen = wi::shwi (0, prec);
3543 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3546 if (minlen == maxlen)
3548 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3549 true, GSI_SAME_STMT);
3550 replace_call_with_value (gsi, lenrange[0]);
3551 return true;
3554 tree lhs = gimple_call_lhs (stmt);
3555 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3556 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3558 return false;
3561 /* Fold a call to __builtin_acc_on_device. */
3563 static bool
3564 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3566 /* Defer folding until we know which compiler we're in. */
3567 if (symtab->state != EXPANSION)
3568 return false;
3570 unsigned val_host = GOMP_DEVICE_HOST;
3571 unsigned val_dev = GOMP_DEVICE_NONE;
3573 #ifdef ACCEL_COMPILER
3574 val_host = GOMP_DEVICE_NOT_HOST;
3575 val_dev = ACCEL_COMPILER_acc_device;
3576 #endif
3578 location_t loc = gimple_location (gsi_stmt (*gsi));
3580 tree host_eq = make_ssa_name (boolean_type_node);
3581 gimple *host_ass = gimple_build_assign
3582 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3583 gimple_set_location (host_ass, loc);
3584 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3586 tree dev_eq = make_ssa_name (boolean_type_node);
3587 gimple *dev_ass = gimple_build_assign
3588 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3589 gimple_set_location (dev_ass, loc);
3590 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3592 tree result = make_ssa_name (boolean_type_node);
3593 gimple *result_ass = gimple_build_assign
3594 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3595 gimple_set_location (result_ass, loc);
3596 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3598 replace_call_with_value (gsi, result);
3600 return true;
3603 /* Fold realloc (0, n) -> malloc (n). */
3605 static bool
3606 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3608 gimple *stmt = gsi_stmt (*gsi);
3609 tree arg = gimple_call_arg (stmt, 0);
3610 tree size = gimple_call_arg (stmt, 1);
3612 if (operand_equal_p (arg, null_pointer_node, 0))
3614 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3615 if (fn_malloc)
3617 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3618 replace_call_with_call_and_fold (gsi, repl);
3619 return true;
3622 return false;
3625 /* Fold the non-target builtin at *GSI and return whether any simplification
3626 was made. */
3628 static bool
3629 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3631 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3632 tree callee = gimple_call_fndecl (stmt);
3634 /* Give up for always_inline inline builtins until they are
3635 inlined. */
3636 if (avoid_folding_inline_builtin (callee))
3637 return false;
3639 unsigned n = gimple_call_num_args (stmt);
3640 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3641 switch (fcode)
3643 case BUILT_IN_BCMP:
3644 return gimple_fold_builtin_bcmp (gsi);
3645 case BUILT_IN_BCOPY:
3646 return gimple_fold_builtin_bcopy (gsi);
3647 case BUILT_IN_BZERO:
3648 return gimple_fold_builtin_bzero (gsi);
3650 case BUILT_IN_MEMSET:
3651 return gimple_fold_builtin_memset (gsi,
3652 gimple_call_arg (stmt, 1),
3653 gimple_call_arg (stmt, 2));
3654 case BUILT_IN_MEMCPY:
3655 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3656 gimple_call_arg (stmt, 1), 0);
3657 case BUILT_IN_MEMPCPY:
3658 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3659 gimple_call_arg (stmt, 1), 1);
3660 case BUILT_IN_MEMMOVE:
3661 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3662 gimple_call_arg (stmt, 1), 3);
3663 case BUILT_IN_SPRINTF_CHK:
3664 case BUILT_IN_VSPRINTF_CHK:
3665 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3666 case BUILT_IN_STRCAT_CHK:
3667 return gimple_fold_builtin_strcat_chk (gsi);
3668 case BUILT_IN_STRNCAT_CHK:
3669 return gimple_fold_builtin_strncat_chk (gsi);
3670 case BUILT_IN_STRLEN:
3671 return gimple_fold_builtin_strlen (gsi);
3672 case BUILT_IN_STRCPY:
3673 return gimple_fold_builtin_strcpy (gsi,
3674 gimple_call_arg (stmt, 0),
3675 gimple_call_arg (stmt, 1));
3676 case BUILT_IN_STRNCPY:
3677 return gimple_fold_builtin_strncpy (gsi,
3678 gimple_call_arg (stmt, 0),
3679 gimple_call_arg (stmt, 1),
3680 gimple_call_arg (stmt, 2));
3681 case BUILT_IN_STRCAT:
3682 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3683 gimple_call_arg (stmt, 1));
3684 case BUILT_IN_STRNCAT:
3685 return gimple_fold_builtin_strncat (gsi);
3686 case BUILT_IN_INDEX:
3687 case BUILT_IN_STRCHR:
3688 return gimple_fold_builtin_strchr (gsi, false);
3689 case BUILT_IN_RINDEX:
3690 case BUILT_IN_STRRCHR:
3691 return gimple_fold_builtin_strchr (gsi, true);
3692 case BUILT_IN_STRSTR:
3693 return gimple_fold_builtin_strstr (gsi);
3694 case BUILT_IN_STRCMP:
3695 case BUILT_IN_STRCMP_EQ:
3696 case BUILT_IN_STRCASECMP:
3697 case BUILT_IN_STRNCMP:
3698 case BUILT_IN_STRNCMP_EQ:
3699 case BUILT_IN_STRNCASECMP:
3700 return gimple_fold_builtin_string_compare (gsi);
3701 case BUILT_IN_MEMCHR:
3702 return gimple_fold_builtin_memchr (gsi);
3703 case BUILT_IN_FPUTS:
3704 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3705 gimple_call_arg (stmt, 1), false);
3706 case BUILT_IN_FPUTS_UNLOCKED:
3707 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3708 gimple_call_arg (stmt, 1), true);
3709 case BUILT_IN_MEMCPY_CHK:
3710 case BUILT_IN_MEMPCPY_CHK:
3711 case BUILT_IN_MEMMOVE_CHK:
3712 case BUILT_IN_MEMSET_CHK:
3713 return gimple_fold_builtin_memory_chk (gsi,
3714 gimple_call_arg (stmt, 0),
3715 gimple_call_arg (stmt, 1),
3716 gimple_call_arg (stmt, 2),
3717 gimple_call_arg (stmt, 3),
3718 fcode);
3719 case BUILT_IN_STPCPY:
3720 return gimple_fold_builtin_stpcpy (gsi);
3721 case BUILT_IN_STRCPY_CHK:
3722 case BUILT_IN_STPCPY_CHK:
3723 return gimple_fold_builtin_stxcpy_chk (gsi,
3724 gimple_call_arg (stmt, 0),
3725 gimple_call_arg (stmt, 1),
3726 gimple_call_arg (stmt, 2),
3727 fcode);
3728 case BUILT_IN_STRNCPY_CHK:
3729 case BUILT_IN_STPNCPY_CHK:
3730 return gimple_fold_builtin_stxncpy_chk (gsi,
3731 gimple_call_arg (stmt, 0),
3732 gimple_call_arg (stmt, 1),
3733 gimple_call_arg (stmt, 2),
3734 gimple_call_arg (stmt, 3),
3735 fcode);
3736 case BUILT_IN_SNPRINTF_CHK:
3737 case BUILT_IN_VSNPRINTF_CHK:
3738 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3740 case BUILT_IN_FPRINTF:
3741 case BUILT_IN_FPRINTF_UNLOCKED:
3742 case BUILT_IN_VFPRINTF:
3743 if (n == 2 || n == 3)
3744 return gimple_fold_builtin_fprintf (gsi,
3745 gimple_call_arg (stmt, 0),
3746 gimple_call_arg (stmt, 1),
3747 n == 3
3748 ? gimple_call_arg (stmt, 2)
3749 : NULL_TREE,
3750 fcode);
3751 break;
3752 case BUILT_IN_FPRINTF_CHK:
3753 case BUILT_IN_VFPRINTF_CHK:
3754 if (n == 3 || n == 4)
3755 return gimple_fold_builtin_fprintf (gsi,
3756 gimple_call_arg (stmt, 0),
3757 gimple_call_arg (stmt, 2),
3758 n == 4
3759 ? gimple_call_arg (stmt, 3)
3760 : NULL_TREE,
3761 fcode);
3762 break;
3763 case BUILT_IN_PRINTF:
3764 case BUILT_IN_PRINTF_UNLOCKED:
3765 case BUILT_IN_VPRINTF:
3766 if (n == 1 || n == 2)
3767 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3768 n == 2
3769 ? gimple_call_arg (stmt, 1)
3770 : NULL_TREE, fcode);
3771 break;
3772 case BUILT_IN_PRINTF_CHK:
3773 case BUILT_IN_VPRINTF_CHK:
3774 if (n == 2 || n == 3)
3775 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3776 n == 3
3777 ? gimple_call_arg (stmt, 2)
3778 : NULL_TREE, fcode);
3779 break;
3780 case BUILT_IN_ACC_ON_DEVICE:
3781 return gimple_fold_builtin_acc_on_device (gsi,
3782 gimple_call_arg (stmt, 0));
3783 case BUILT_IN_REALLOC:
3784 return gimple_fold_builtin_realloc (gsi);
3786 default:;
3789 /* Try the generic builtin folder. */
3790 bool ignore = (gimple_call_lhs (stmt) == NULL);
3791 tree result = fold_call_stmt (stmt, ignore);
3792 if (result)
3794 if (ignore)
3795 STRIP_NOPS (result);
3796 else
3797 result = fold_convert (gimple_call_return_type (stmt), result);
3798 if (!update_call_from_tree (gsi, result))
3799 gimplify_and_update_call_from_tree (gsi, result);
3800 return true;
3803 return false;
3806 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3807 function calls to constants, where possible. */
3809 static tree
3810 fold_internal_goacc_dim (const gimple *call)
3812 int axis = oacc_get_ifn_dim_arg (call);
3813 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3814 tree result = NULL_TREE;
3815 tree type = TREE_TYPE (gimple_call_lhs (call));
3817 switch (gimple_call_internal_fn (call))
3819 case IFN_GOACC_DIM_POS:
3820 /* If the size is 1, we know the answer. */
3821 if (size == 1)
3822 result = build_int_cst (type, 0);
3823 break;
3824 case IFN_GOACC_DIM_SIZE:
3825 /* If the size is not dynamic, we know the answer. */
3826 if (size)
3827 result = build_int_cst (type, size);
3828 break;
3829 default:
3830 break;
3833 return result;
3836 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3837 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3838 &var where var is only addressable because of such calls. */
3840 bool
3841 optimize_atomic_compare_exchange_p (gimple *stmt)
3843 if (gimple_call_num_args (stmt) != 6
3844 || !flag_inline_atomics
3845 || !optimize
3846 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3847 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3848 || !gimple_vdef (stmt)
3849 || !gimple_vuse (stmt))
3850 return false;
3852 tree fndecl = gimple_call_fndecl (stmt);
3853 switch (DECL_FUNCTION_CODE (fndecl))
3855 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3856 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3857 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3858 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3859 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3860 break;
3861 default:
3862 return false;
3865 tree expected = gimple_call_arg (stmt, 1);
3866 if (TREE_CODE (expected) != ADDR_EXPR
3867 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3868 return false;
3870 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3871 if (!is_gimple_reg_type (etype)
3872 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3873 || TREE_THIS_VOLATILE (etype)
3874 || VECTOR_TYPE_P (etype)
3875 || TREE_CODE (etype) == COMPLEX_TYPE
3876 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3877 might not preserve all the bits. See PR71716. */
3878 || SCALAR_FLOAT_TYPE_P (etype)
3879 || maybe_ne (TYPE_PRECISION (etype),
3880 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3881 return false;
3883 tree weak = gimple_call_arg (stmt, 3);
3884 if (!integer_zerop (weak) && !integer_onep (weak))
3885 return false;
3887 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3888 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3889 machine_mode mode = TYPE_MODE (itype);
3891 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3892 == CODE_FOR_nothing
3893 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3894 return false;
3896 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3897 return false;
3899 return true;
3902 /* Fold
3903 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3904 into
3905 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3906 i = IMAGPART_EXPR <t>;
3907 r = (_Bool) i;
3908 e = REALPART_EXPR <t>; */
3910 void
3911 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3913 gimple *stmt = gsi_stmt (*gsi);
3914 tree fndecl = gimple_call_fndecl (stmt);
3915 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3916 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3917 tree ctype = build_complex_type (itype);
3918 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3919 bool throws = false;
3920 edge e = NULL;
3921 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3922 expected);
3923 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3924 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3925 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3927 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3928 build1 (VIEW_CONVERT_EXPR, itype,
3929 gimple_assign_lhs (g)));
3930 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3932 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3933 + int_size_in_bytes (itype);
3934 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3935 gimple_call_arg (stmt, 0),
3936 gimple_assign_lhs (g),
3937 gimple_call_arg (stmt, 2),
3938 build_int_cst (integer_type_node, flag),
3939 gimple_call_arg (stmt, 4),
3940 gimple_call_arg (stmt, 5));
3941 tree lhs = make_ssa_name (ctype);
3942 gimple_call_set_lhs (g, lhs);
3943 gimple_set_vdef (g, gimple_vdef (stmt));
3944 gimple_set_vuse (g, gimple_vuse (stmt));
3945 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3946 tree oldlhs = gimple_call_lhs (stmt);
3947 if (stmt_can_throw_internal (stmt))
3949 throws = true;
3950 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3952 gimple_call_set_nothrow (as_a <gcall *> (g),
3953 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3954 gimple_call_set_lhs (stmt, NULL_TREE);
3955 gsi_replace (gsi, g, true);
3956 if (oldlhs)
3958 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3959 build1 (IMAGPART_EXPR, itype, lhs));
3960 if (throws)
3962 gsi_insert_on_edge_immediate (e, g);
3963 *gsi = gsi_for_stmt (g);
3965 else
3966 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3967 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3968 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3970 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3971 build1 (REALPART_EXPR, itype, lhs));
3972 if (throws && oldlhs == NULL_TREE)
3974 gsi_insert_on_edge_immediate (e, g);
3975 *gsi = gsi_for_stmt (g);
3977 else
3978 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3979 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3981 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3982 VIEW_CONVERT_EXPR,
3983 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3984 gimple_assign_lhs (g)));
3985 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3987 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3988 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3989 *gsi = gsiret;
3992 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3993 doesn't fit into TYPE. The test for overflow should be regardless of
3994 -fwrapv, and even for unsigned types. */
3996 bool
3997 arith_overflowed_p (enum tree_code code, const_tree type,
3998 const_tree arg0, const_tree arg1)
4000 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
4001 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
4002 widest2_int_cst;
4003 widest2_int warg0 = widest2_int_cst (arg0);
4004 widest2_int warg1 = widest2_int_cst (arg1);
4005 widest2_int wres;
4006 switch (code)
4008 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4009 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4010 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4011 default: gcc_unreachable ();
4013 signop sign = TYPE_SIGN (type);
4014 if (sign == UNSIGNED && wi::neg_p (wres))
4015 return true;
4016 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4019 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4020 The statement may be replaced by another statement, e.g., if the call
4021 simplifies to a constant value. Return true if any changes were made.
4022 It is assumed that the operands have been previously folded. */
4024 static bool
4025 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4027 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4028 tree callee;
4029 bool changed = false;
4030 unsigned i;
4032 /* Fold *& in call arguments. */
4033 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4034 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4036 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4037 if (tmp)
4039 gimple_call_set_arg (stmt, i, tmp);
4040 changed = true;
4044 /* Check for virtual calls that became direct calls. */
4045 callee = gimple_call_fn (stmt);
4046 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4048 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4050 if (dump_file && virtual_method_call_p (callee)
4051 && !possible_polymorphic_call_target_p
4052 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4053 (OBJ_TYPE_REF_EXPR (callee)))))
4055 fprintf (dump_file,
4056 "Type inheritance inconsistent devirtualization of ");
4057 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4058 fprintf (dump_file, " to ");
4059 print_generic_expr (dump_file, callee, TDF_SLIM);
4060 fprintf (dump_file, "\n");
4063 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4064 changed = true;
4066 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4068 bool final;
4069 vec <cgraph_node *>targets
4070 = possible_polymorphic_call_targets (callee, stmt, &final);
4071 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4073 tree lhs = gimple_call_lhs (stmt);
4074 if (dump_enabled_p ())
4076 location_t loc = gimple_location_safe (stmt);
4077 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
4078 "folding virtual function call to %s\n",
4079 targets.length () == 1
4080 ? targets[0]->name ()
4081 : "__builtin_unreachable");
4083 if (targets.length () == 1)
4085 tree fndecl = targets[0]->decl;
4086 gimple_call_set_fndecl (stmt, fndecl);
4087 changed = true;
4088 /* If changing the call to __cxa_pure_virtual
4089 or similar noreturn function, adjust gimple_call_fntype
4090 too. */
4091 if (gimple_call_noreturn_p (stmt)
4092 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4093 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4094 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4095 == void_type_node))
4096 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4097 /* If the call becomes noreturn, remove the lhs. */
4098 if (lhs
4099 && gimple_call_noreturn_p (stmt)
4100 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4101 || should_remove_lhs_p (lhs)))
4103 if (TREE_CODE (lhs) == SSA_NAME)
4105 tree var = create_tmp_var (TREE_TYPE (lhs));
4106 tree def = get_or_create_ssa_default_def (cfun, var);
4107 gimple *new_stmt = gimple_build_assign (lhs, def);
4108 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4110 gimple_call_set_lhs (stmt, NULL_TREE);
4112 maybe_remove_unused_call_args (cfun, stmt);
4114 else
4116 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4117 gimple *new_stmt = gimple_build_call (fndecl, 0);
4118 gimple_set_location (new_stmt, gimple_location (stmt));
4119 /* If the call had a SSA name as lhs morph that into
4120 an uninitialized value. */
4121 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4123 tree var = create_tmp_var (TREE_TYPE (lhs));
4124 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4125 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4126 set_ssa_default_def (cfun, var, lhs);
4128 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4129 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4130 gsi_replace (gsi, new_stmt, false);
4131 return true;
4137 /* Check for indirect calls that became direct calls, and then
4138 no longer require a static chain. */
4139 if (gimple_call_chain (stmt))
4141 tree fn = gimple_call_fndecl (stmt);
4142 if (fn && !DECL_STATIC_CHAIN (fn))
4144 gimple_call_set_chain (stmt, NULL);
4145 changed = true;
4147 else
4149 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4150 if (tmp)
4152 gimple_call_set_chain (stmt, tmp);
4153 changed = true;
4158 if (inplace)
4159 return changed;
4161 /* Check for builtins that CCP can handle using information not
4162 available in the generic fold routines. */
4163 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4165 if (gimple_fold_builtin (gsi))
4166 changed = true;
4168 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4170 changed |= targetm.gimple_fold_builtin (gsi);
4172 else if (gimple_call_internal_p (stmt))
4174 enum tree_code subcode = ERROR_MARK;
4175 tree result = NULL_TREE;
4176 bool cplx_result = false;
4177 tree overflow = NULL_TREE;
4178 switch (gimple_call_internal_fn (stmt))
4180 case IFN_BUILTIN_EXPECT:
4181 result = fold_builtin_expect (gimple_location (stmt),
4182 gimple_call_arg (stmt, 0),
4183 gimple_call_arg (stmt, 1),
4184 gimple_call_arg (stmt, 2));
4185 break;
4186 case IFN_UBSAN_OBJECT_SIZE:
4188 tree offset = gimple_call_arg (stmt, 1);
4189 tree objsize = gimple_call_arg (stmt, 2);
4190 if (integer_all_onesp (objsize)
4191 || (TREE_CODE (offset) == INTEGER_CST
4192 && TREE_CODE (objsize) == INTEGER_CST
4193 && tree_int_cst_le (offset, objsize)))
4195 replace_call_with_value (gsi, NULL_TREE);
4196 return true;
4199 break;
4200 case IFN_UBSAN_PTR:
4201 if (integer_zerop (gimple_call_arg (stmt, 1)))
4203 replace_call_with_value (gsi, NULL_TREE);
4204 return true;
4206 break;
4207 case IFN_UBSAN_BOUNDS:
4209 tree index = gimple_call_arg (stmt, 1);
4210 tree bound = gimple_call_arg (stmt, 2);
4211 if (TREE_CODE (index) == INTEGER_CST
4212 && TREE_CODE (bound) == INTEGER_CST)
4214 index = fold_convert (TREE_TYPE (bound), index);
4215 if (TREE_CODE (index) == INTEGER_CST
4216 && tree_int_cst_le (index, bound))
4218 replace_call_with_value (gsi, NULL_TREE);
4219 return true;
4223 break;
4224 case IFN_GOACC_DIM_SIZE:
4225 case IFN_GOACC_DIM_POS:
4226 result = fold_internal_goacc_dim (stmt);
4227 break;
4228 case IFN_UBSAN_CHECK_ADD:
4229 subcode = PLUS_EXPR;
4230 break;
4231 case IFN_UBSAN_CHECK_SUB:
4232 subcode = MINUS_EXPR;
4233 break;
4234 case IFN_UBSAN_CHECK_MUL:
4235 subcode = MULT_EXPR;
4236 break;
4237 case IFN_ADD_OVERFLOW:
4238 subcode = PLUS_EXPR;
4239 cplx_result = true;
4240 break;
4241 case IFN_SUB_OVERFLOW:
4242 subcode = MINUS_EXPR;
4243 cplx_result = true;
4244 break;
4245 case IFN_MUL_OVERFLOW:
4246 subcode = MULT_EXPR;
4247 cplx_result = true;
4248 break;
4249 default:
4250 break;
4252 if (subcode != ERROR_MARK)
4254 tree arg0 = gimple_call_arg (stmt, 0);
4255 tree arg1 = gimple_call_arg (stmt, 1);
4256 tree type = TREE_TYPE (arg0);
4257 if (cplx_result)
4259 tree lhs = gimple_call_lhs (stmt);
4260 if (lhs == NULL_TREE)
4261 type = NULL_TREE;
4262 else
4263 type = TREE_TYPE (TREE_TYPE (lhs));
4265 if (type == NULL_TREE)
4267 /* x = y + 0; x = y - 0; x = y * 0; */
4268 else if (integer_zerop (arg1))
4269 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4270 /* x = 0 + y; x = 0 * y; */
4271 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4272 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4273 /* x = y - y; */
4274 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4275 result = integer_zero_node;
4276 /* x = y * 1; x = 1 * y; */
4277 else if (subcode == MULT_EXPR && integer_onep (arg1))
4278 result = arg0;
4279 else if (subcode == MULT_EXPR && integer_onep (arg0))
4280 result = arg1;
4281 else if (TREE_CODE (arg0) == INTEGER_CST
4282 && TREE_CODE (arg1) == INTEGER_CST)
4284 if (cplx_result)
4285 result = int_const_binop (subcode, fold_convert (type, arg0),
4286 fold_convert (type, arg1));
4287 else
4288 result = int_const_binop (subcode, arg0, arg1);
4289 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4291 if (cplx_result)
4292 overflow = build_one_cst (type);
4293 else
4294 result = NULL_TREE;
4297 if (result)
4299 if (result == integer_zero_node)
4300 result = build_zero_cst (type);
4301 else if (cplx_result && TREE_TYPE (result) != type)
4303 if (TREE_CODE (result) == INTEGER_CST)
4305 if (arith_overflowed_p (PLUS_EXPR, type, result,
4306 integer_zero_node))
4307 overflow = build_one_cst (type);
4309 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4310 && TYPE_UNSIGNED (type))
4311 || (TYPE_PRECISION (type)
4312 < (TYPE_PRECISION (TREE_TYPE (result))
4313 + (TYPE_UNSIGNED (TREE_TYPE (result))
4314 && !TYPE_UNSIGNED (type)))))
4315 result = NULL_TREE;
4316 if (result)
4317 result = fold_convert (type, result);
4322 if (result)
4324 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4325 result = drop_tree_overflow (result);
4326 if (cplx_result)
4328 if (overflow == NULL_TREE)
4329 overflow = build_zero_cst (TREE_TYPE (result));
4330 tree ctype = build_complex_type (TREE_TYPE (result));
4331 if (TREE_CODE (result) == INTEGER_CST
4332 && TREE_CODE (overflow) == INTEGER_CST)
4333 result = build_complex (ctype, result, overflow);
4334 else
4335 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4336 ctype, result, overflow);
4338 if (!update_call_from_tree (gsi, result))
4339 gimplify_and_update_call_from_tree (gsi, result);
4340 changed = true;
4344 return changed;
4348 /* Return true whether NAME has a use on STMT. */
4350 static bool
4351 has_use_on_stmt (tree name, gimple *stmt)
4353 imm_use_iterator iter;
4354 use_operand_p use_p;
4355 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4356 if (USE_STMT (use_p) == stmt)
4357 return true;
4358 return false;
4361 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4362 gimple_simplify.
4364 Replaces *GSI with the simplification result in RCODE and OPS
4365 and the associated statements in *SEQ. Does the replacement
4366 according to INPLACE and returns true if the operation succeeded. */
4368 static bool
4369 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4370 gimple_match_op *res_op,
4371 gimple_seq *seq, bool inplace)
4373 gimple *stmt = gsi_stmt (*gsi);
4374 tree *ops = res_op->ops;
4375 unsigned int num_ops = res_op->num_ops;
4377 /* Play safe and do not allow abnormals to be mentioned in
4378 newly created statements. See also maybe_push_res_to_seq.
4379 As an exception allow such uses if there was a use of the
4380 same SSA name on the old stmt. */
4381 for (unsigned int i = 0; i < num_ops; ++i)
4382 if (TREE_CODE (ops[i]) == SSA_NAME
4383 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4384 && !has_use_on_stmt (ops[i], stmt))
4385 return false;
4387 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4388 for (unsigned int i = 0; i < 2; ++i)
4389 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4390 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4391 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4392 return false;
4394 /* Don't insert new statements when INPLACE is true, even if we could
4395 reuse STMT for the final statement. */
4396 if (inplace && !gimple_seq_empty_p (*seq))
4397 return false;
4399 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4401 gcc_assert (res_op->code.is_tree_code ());
4402 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4403 /* GIMPLE_CONDs condition may not throw. */
4404 && (!flag_exceptions
4405 || !cfun->can_throw_non_call_exceptions
4406 || !operation_could_trap_p (res_op->code,
4407 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4408 false, NULL_TREE)))
4409 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4410 else if (res_op->code == SSA_NAME)
4411 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4412 build_zero_cst (TREE_TYPE (ops[0])));
4413 else if (res_op->code == INTEGER_CST)
4415 if (integer_zerop (ops[0]))
4416 gimple_cond_make_false (cond_stmt);
4417 else
4418 gimple_cond_make_true (cond_stmt);
4420 else if (!inplace)
4422 tree res = maybe_push_res_to_seq (res_op, seq);
4423 if (!res)
4424 return false;
4425 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4426 build_zero_cst (TREE_TYPE (res)));
4428 else
4429 return false;
4430 if (dump_file && (dump_flags & TDF_DETAILS))
4432 fprintf (dump_file, "gimple_simplified to ");
4433 if (!gimple_seq_empty_p (*seq))
4434 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4435 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4436 0, TDF_SLIM);
4438 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4439 return true;
4441 else if (is_gimple_assign (stmt)
4442 && res_op->code.is_tree_code ())
4444 if (!inplace
4445 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4447 maybe_build_generic_op (res_op);
4448 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4449 res_op->op_or_null (0),
4450 res_op->op_or_null (1),
4451 res_op->op_or_null (2));
4452 if (dump_file && (dump_flags & TDF_DETAILS))
4454 fprintf (dump_file, "gimple_simplified to ");
4455 if (!gimple_seq_empty_p (*seq))
4456 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4457 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4458 0, TDF_SLIM);
4460 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4461 return true;
4464 else if (res_op->code.is_fn_code ()
4465 && gimple_call_combined_fn (stmt) == res_op->code)
4467 gcc_assert (num_ops == gimple_call_num_args (stmt));
4468 for (unsigned int i = 0; i < num_ops; ++i)
4469 gimple_call_set_arg (stmt, i, ops[i]);
4470 if (dump_file && (dump_flags & TDF_DETAILS))
4472 fprintf (dump_file, "gimple_simplified to ");
4473 if (!gimple_seq_empty_p (*seq))
4474 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4475 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4477 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4478 return true;
4480 else if (!inplace)
4482 if (gimple_has_lhs (stmt))
4484 tree lhs = gimple_get_lhs (stmt);
4485 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4486 return false;
4487 if (dump_file && (dump_flags & TDF_DETAILS))
4489 fprintf (dump_file, "gimple_simplified to ");
4490 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4492 gsi_replace_with_seq_vops (gsi, *seq);
4493 return true;
4495 else
4496 gcc_unreachable ();
4499 return false;
4502 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4504 static bool
4505 maybe_canonicalize_mem_ref_addr (tree *t)
4507 bool res = false;
4509 if (TREE_CODE (*t) == ADDR_EXPR)
4510 t = &TREE_OPERAND (*t, 0);
4512 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4513 generic vector extension. The actual vector referenced is
4514 view-converted to an array type for this purpose. If the index
4515 is constant the canonical representation in the middle-end is a
4516 BIT_FIELD_REF so re-write the former to the latter here. */
4517 if (TREE_CODE (*t) == ARRAY_REF
4518 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4519 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4520 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4522 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4523 if (VECTOR_TYPE_P (vtype))
4525 tree low = array_ref_low_bound (*t);
4526 if (TREE_CODE (low) == INTEGER_CST)
4528 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4530 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4531 wi::to_widest (low));
4532 idx = wi::mul (idx, wi::to_widest
4533 (TYPE_SIZE (TREE_TYPE (*t))));
4534 widest_int ext
4535 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4536 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4538 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4539 TREE_TYPE (*t),
4540 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4541 TYPE_SIZE (TREE_TYPE (*t)),
4542 wide_int_to_tree (bitsizetype, idx));
4543 res = true;
4550 while (handled_component_p (*t))
4551 t = &TREE_OPERAND (*t, 0);
4553 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4554 of invariant addresses into a SSA name MEM_REF address. */
4555 if (TREE_CODE (*t) == MEM_REF
4556 || TREE_CODE (*t) == TARGET_MEM_REF)
4558 tree addr = TREE_OPERAND (*t, 0);
4559 if (TREE_CODE (addr) == ADDR_EXPR
4560 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4561 || handled_component_p (TREE_OPERAND (addr, 0))))
4563 tree base;
4564 poly_int64 coffset;
4565 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4566 &coffset);
4567 if (!base)
4568 gcc_unreachable ();
4570 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4571 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4572 TREE_OPERAND (*t, 1),
4573 size_int (coffset));
4574 res = true;
4576 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4577 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4580 /* Canonicalize back MEM_REFs to plain reference trees if the object
4581 accessed is a decl that has the same access semantics as the MEM_REF. */
4582 if (TREE_CODE (*t) == MEM_REF
4583 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4584 && integer_zerop (TREE_OPERAND (*t, 1))
4585 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4587 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4588 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4589 if (/* Same volatile qualification. */
4590 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4591 /* Same TBAA behavior with -fstrict-aliasing. */
4592 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4593 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4594 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4595 /* Same alignment. */
4596 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4597 /* We have to look out here to not drop a required conversion
4598 from the rhs to the lhs if *t appears on the lhs or vice-versa
4599 if it appears on the rhs. Thus require strict type
4600 compatibility. */
4601 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4603 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4604 res = true;
4608 /* Canonicalize TARGET_MEM_REF in particular with respect to
4609 the indexes becoming constant. */
4610 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4612 tree tem = maybe_fold_tmr (*t);
4613 if (tem)
4615 *t = tem;
4616 res = true;
4620 return res;
4623 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4624 distinguishes both cases. */
4626 static bool
4627 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4629 bool changed = false;
4630 gimple *stmt = gsi_stmt (*gsi);
4631 bool nowarning = gimple_no_warning_p (stmt);
4632 unsigned i;
4633 fold_defer_overflow_warnings ();
4635 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4636 after propagation.
4637 ??? This shouldn't be done in generic folding but in the
4638 propagation helpers which also know whether an address was
4639 propagated.
4640 Also canonicalize operand order. */
4641 switch (gimple_code (stmt))
4643 case GIMPLE_ASSIGN:
4644 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4646 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4647 if ((REFERENCE_CLASS_P (*rhs)
4648 || TREE_CODE (*rhs) == ADDR_EXPR)
4649 && maybe_canonicalize_mem_ref_addr (rhs))
4650 changed = true;
4651 tree *lhs = gimple_assign_lhs_ptr (stmt);
4652 if (REFERENCE_CLASS_P (*lhs)
4653 && maybe_canonicalize_mem_ref_addr (lhs))
4654 changed = true;
4656 else
4658 /* Canonicalize operand order. */
4659 enum tree_code code = gimple_assign_rhs_code (stmt);
4660 if (TREE_CODE_CLASS (code) == tcc_comparison
4661 || commutative_tree_code (code)
4662 || commutative_ternary_tree_code (code))
4664 tree rhs1 = gimple_assign_rhs1 (stmt);
4665 tree rhs2 = gimple_assign_rhs2 (stmt);
4666 if (tree_swap_operands_p (rhs1, rhs2))
4668 gimple_assign_set_rhs1 (stmt, rhs2);
4669 gimple_assign_set_rhs2 (stmt, rhs1);
4670 if (TREE_CODE_CLASS (code) == tcc_comparison)
4671 gimple_assign_set_rhs_code (stmt,
4672 swap_tree_comparison (code));
4673 changed = true;
4677 break;
4678 case GIMPLE_CALL:
4680 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4682 tree *arg = gimple_call_arg_ptr (stmt, i);
4683 if (REFERENCE_CLASS_P (*arg)
4684 && maybe_canonicalize_mem_ref_addr (arg))
4685 changed = true;
4687 tree *lhs = gimple_call_lhs_ptr (stmt);
4688 if (*lhs
4689 && REFERENCE_CLASS_P (*lhs)
4690 && maybe_canonicalize_mem_ref_addr (lhs))
4691 changed = true;
4692 break;
4694 case GIMPLE_ASM:
4696 gasm *asm_stmt = as_a <gasm *> (stmt);
4697 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4699 tree link = gimple_asm_output_op (asm_stmt, i);
4700 tree op = TREE_VALUE (link);
4701 if (REFERENCE_CLASS_P (op)
4702 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4703 changed = true;
4705 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4707 tree link = gimple_asm_input_op (asm_stmt, i);
4708 tree op = TREE_VALUE (link);
4709 if ((REFERENCE_CLASS_P (op)
4710 || TREE_CODE (op) == ADDR_EXPR)
4711 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4712 changed = true;
4715 break;
4716 case GIMPLE_DEBUG:
4717 if (gimple_debug_bind_p (stmt))
4719 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4720 if (*val
4721 && (REFERENCE_CLASS_P (*val)
4722 || TREE_CODE (*val) == ADDR_EXPR)
4723 && maybe_canonicalize_mem_ref_addr (val))
4724 changed = true;
4726 break;
4727 case GIMPLE_COND:
4729 /* Canonicalize operand order. */
4730 tree lhs = gimple_cond_lhs (stmt);
4731 tree rhs = gimple_cond_rhs (stmt);
4732 if (tree_swap_operands_p (lhs, rhs))
4734 gcond *gc = as_a <gcond *> (stmt);
4735 gimple_cond_set_lhs (gc, rhs);
4736 gimple_cond_set_rhs (gc, lhs);
4737 gimple_cond_set_code (gc,
4738 swap_tree_comparison (gimple_cond_code (gc)));
4739 changed = true;
4742 default:;
4745 /* Dispatch to pattern-based folding. */
4746 if (!inplace
4747 || is_gimple_assign (stmt)
4748 || gimple_code (stmt) == GIMPLE_COND)
4750 gimple_seq seq = NULL;
4751 gimple_match_op res_op;
4752 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4753 valueize, valueize))
4755 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4756 changed = true;
4757 else
4758 gimple_seq_discard (seq);
4762 stmt = gsi_stmt (*gsi);
4764 /* Fold the main computation performed by the statement. */
4765 switch (gimple_code (stmt))
4767 case GIMPLE_ASSIGN:
4769 /* Try to canonicalize for boolean-typed X the comparisons
4770 X == 0, X == 1, X != 0, and X != 1. */
4771 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4772 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4774 tree lhs = gimple_assign_lhs (stmt);
4775 tree op1 = gimple_assign_rhs1 (stmt);
4776 tree op2 = gimple_assign_rhs2 (stmt);
4777 tree type = TREE_TYPE (op1);
4779 /* Check whether the comparison operands are of the same boolean
4780 type as the result type is.
4781 Check that second operand is an integer-constant with value
4782 one or zero. */
4783 if (TREE_CODE (op2) == INTEGER_CST
4784 && (integer_zerop (op2) || integer_onep (op2))
4785 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4787 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4788 bool is_logical_not = false;
4790 /* X == 0 and X != 1 is a logical-not.of X
4791 X == 1 and X != 0 is X */
4792 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4793 || (cmp_code == NE_EXPR && integer_onep (op2)))
4794 is_logical_not = true;
4796 if (is_logical_not == false)
4797 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4798 /* Only for one-bit precision typed X the transformation
4799 !X -> ~X is valied. */
4800 else if (TYPE_PRECISION (type) == 1)
4801 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4802 /* Otherwise we use !X -> X ^ 1. */
4803 else
4804 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4805 build_int_cst (type, 1));
4806 changed = true;
4807 break;
4811 unsigned old_num_ops = gimple_num_ops (stmt);
4812 tree lhs = gimple_assign_lhs (stmt);
4813 tree new_rhs = fold_gimple_assign (gsi);
4814 if (new_rhs
4815 && !useless_type_conversion_p (TREE_TYPE (lhs),
4816 TREE_TYPE (new_rhs)))
4817 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4818 if (new_rhs
4819 && (!inplace
4820 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4822 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4823 changed = true;
4825 break;
4828 case GIMPLE_CALL:
4829 changed |= gimple_fold_call (gsi, inplace);
4830 break;
4832 case GIMPLE_ASM:
4833 /* Fold *& in asm operands. */
4835 gasm *asm_stmt = as_a <gasm *> (stmt);
4836 size_t noutputs;
4837 const char **oconstraints;
4838 const char *constraint;
4839 bool allows_mem, allows_reg;
4841 noutputs = gimple_asm_noutputs (asm_stmt);
4842 oconstraints = XALLOCAVEC (const char *, noutputs);
4844 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4846 tree link = gimple_asm_output_op (asm_stmt, i);
4847 tree op = TREE_VALUE (link);
4848 oconstraints[i]
4849 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4850 if (REFERENCE_CLASS_P (op)
4851 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4853 TREE_VALUE (link) = op;
4854 changed = true;
4857 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4859 tree link = gimple_asm_input_op (asm_stmt, i);
4860 tree op = TREE_VALUE (link);
4861 constraint
4862 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4863 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4864 oconstraints, &allows_mem, &allows_reg);
4865 if (REFERENCE_CLASS_P (op)
4866 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4867 != NULL_TREE)
4869 TREE_VALUE (link) = op;
4870 changed = true;
4874 break;
4876 case GIMPLE_DEBUG:
4877 if (gimple_debug_bind_p (stmt))
4879 tree val = gimple_debug_bind_get_value (stmt);
4880 if (val
4881 && REFERENCE_CLASS_P (val))
4883 tree tem = maybe_fold_reference (val, false);
4884 if (tem)
4886 gimple_debug_bind_set_value (stmt, tem);
4887 changed = true;
4890 else if (val
4891 && TREE_CODE (val) == ADDR_EXPR)
4893 tree ref = TREE_OPERAND (val, 0);
4894 tree tem = maybe_fold_reference (ref, false);
4895 if (tem)
4897 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4898 gimple_debug_bind_set_value (stmt, tem);
4899 changed = true;
4903 break;
4905 case GIMPLE_RETURN:
4907 greturn *ret_stmt = as_a<greturn *> (stmt);
4908 tree ret = gimple_return_retval(ret_stmt);
4910 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4912 tree val = valueize (ret);
4913 if (val && val != ret
4914 && may_propagate_copy (ret, val))
4916 gimple_return_set_retval (ret_stmt, val);
4917 changed = true;
4921 break;
4923 default:;
4926 stmt = gsi_stmt (*gsi);
4928 /* Fold *& on the lhs. */
4929 if (gimple_has_lhs (stmt))
4931 tree lhs = gimple_get_lhs (stmt);
4932 if (lhs && REFERENCE_CLASS_P (lhs))
4934 tree new_lhs = maybe_fold_reference (lhs, true);
4935 if (new_lhs)
4937 gimple_set_lhs (stmt, new_lhs);
4938 changed = true;
4943 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4944 return changed;
4947 /* Valueziation callback that ends up not following SSA edges. */
4949 tree
4950 no_follow_ssa_edges (tree)
4952 return NULL_TREE;
4955 /* Valueization callback that ends up following single-use SSA edges only. */
4957 tree
4958 follow_single_use_edges (tree val)
4960 if (TREE_CODE (val) == SSA_NAME
4961 && !has_single_use (val))
4962 return NULL_TREE;
4963 return val;
4966 /* Valueization callback that follows all SSA edges. */
4968 tree
4969 follow_all_ssa_edges (tree val)
4971 return val;
4974 /* Fold the statement pointed to by GSI. In some cases, this function may
4975 replace the whole statement with a new one. Returns true iff folding
4976 makes any changes.
4977 The statement pointed to by GSI should be in valid gimple form but may
4978 be in unfolded state as resulting from for example constant propagation
4979 which can produce *&x = 0. */
4981 bool
4982 fold_stmt (gimple_stmt_iterator *gsi)
4984 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4987 bool
4988 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4990 return fold_stmt_1 (gsi, false, valueize);
4993 /* Perform the minimal folding on statement *GSI. Only operations like
4994 *&x created by constant propagation are handled. The statement cannot
4995 be replaced with a new one. Return true if the statement was
4996 changed, false otherwise.
4997 The statement *GSI should be in valid gimple form but may
4998 be in unfolded state as resulting from for example constant propagation
4999 which can produce *&x = 0. */
5001 bool
5002 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5004 gimple *stmt = gsi_stmt (*gsi);
5005 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5006 gcc_assert (gsi_stmt (*gsi) == stmt);
5007 return changed;
5010 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5011 if EXPR is null or we don't know how.
5012 If non-null, the result always has boolean type. */
5014 static tree
5015 canonicalize_bool (tree expr, bool invert)
5017 if (!expr)
5018 return NULL_TREE;
5019 else if (invert)
5021 if (integer_nonzerop (expr))
5022 return boolean_false_node;
5023 else if (integer_zerop (expr))
5024 return boolean_true_node;
5025 else if (TREE_CODE (expr) == SSA_NAME)
5026 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5027 build_int_cst (TREE_TYPE (expr), 0));
5028 else if (COMPARISON_CLASS_P (expr))
5029 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5030 boolean_type_node,
5031 TREE_OPERAND (expr, 0),
5032 TREE_OPERAND (expr, 1));
5033 else
5034 return NULL_TREE;
5036 else
5038 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5039 return expr;
5040 if (integer_nonzerop (expr))
5041 return boolean_true_node;
5042 else if (integer_zerop (expr))
5043 return boolean_false_node;
5044 else if (TREE_CODE (expr) == SSA_NAME)
5045 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5046 build_int_cst (TREE_TYPE (expr), 0));
5047 else if (COMPARISON_CLASS_P (expr))
5048 return fold_build2 (TREE_CODE (expr),
5049 boolean_type_node,
5050 TREE_OPERAND (expr, 0),
5051 TREE_OPERAND (expr, 1));
5052 else
5053 return NULL_TREE;
5057 /* Check to see if a boolean expression EXPR is logically equivalent to the
5058 comparison (OP1 CODE OP2). Check for various identities involving
5059 SSA_NAMEs. */
5061 static bool
5062 same_bool_comparison_p (const_tree expr, enum tree_code code,
5063 const_tree op1, const_tree op2)
5065 gimple *s;
5067 /* The obvious case. */
5068 if (TREE_CODE (expr) == code
5069 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5070 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5071 return true;
5073 /* Check for comparing (name, name != 0) and the case where expr
5074 is an SSA_NAME with a definition matching the comparison. */
5075 if (TREE_CODE (expr) == SSA_NAME
5076 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5078 if (operand_equal_p (expr, op1, 0))
5079 return ((code == NE_EXPR && integer_zerop (op2))
5080 || (code == EQ_EXPR && integer_nonzerop (op2)));
5081 s = SSA_NAME_DEF_STMT (expr);
5082 if (is_gimple_assign (s)
5083 && gimple_assign_rhs_code (s) == code
5084 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5085 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5086 return true;
5089 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5090 of name is a comparison, recurse. */
5091 if (TREE_CODE (op1) == SSA_NAME
5092 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5094 s = SSA_NAME_DEF_STMT (op1);
5095 if (is_gimple_assign (s)
5096 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5098 enum tree_code c = gimple_assign_rhs_code (s);
5099 if ((c == NE_EXPR && integer_zerop (op2))
5100 || (c == EQ_EXPR && integer_nonzerop (op2)))
5101 return same_bool_comparison_p (expr, c,
5102 gimple_assign_rhs1 (s),
5103 gimple_assign_rhs2 (s));
5104 if ((c == EQ_EXPR && integer_zerop (op2))
5105 || (c == NE_EXPR && integer_nonzerop (op2)))
5106 return same_bool_comparison_p (expr,
5107 invert_tree_comparison (c, false),
5108 gimple_assign_rhs1 (s),
5109 gimple_assign_rhs2 (s));
5112 return false;
5115 /* Check to see if two boolean expressions OP1 and OP2 are logically
5116 equivalent. */
5118 static bool
5119 same_bool_result_p (const_tree op1, const_tree op2)
5121 /* Simple cases first. */
5122 if (operand_equal_p (op1, op2, 0))
5123 return true;
5125 /* Check the cases where at least one of the operands is a comparison.
5126 These are a bit smarter than operand_equal_p in that they apply some
5127 identifies on SSA_NAMEs. */
5128 if (COMPARISON_CLASS_P (op2)
5129 && same_bool_comparison_p (op1, TREE_CODE (op2),
5130 TREE_OPERAND (op2, 0),
5131 TREE_OPERAND (op2, 1)))
5132 return true;
5133 if (COMPARISON_CLASS_P (op1)
5134 && same_bool_comparison_p (op2, TREE_CODE (op1),
5135 TREE_OPERAND (op1, 0),
5136 TREE_OPERAND (op1, 1)))
5137 return true;
5139 /* Default case. */
5140 return false;
5143 /* Forward declarations for some mutually recursive functions. */
5145 static tree
5146 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5147 enum tree_code code2, tree op2a, tree op2b);
5148 static tree
5149 and_var_with_comparison (tree var, bool invert,
5150 enum tree_code code2, tree op2a, tree op2b);
5151 static tree
5152 and_var_with_comparison_1 (gimple *stmt,
5153 enum tree_code code2, tree op2a, tree op2b);
5154 static tree
5155 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5156 enum tree_code code2, tree op2a, tree op2b);
5157 static tree
5158 or_var_with_comparison (tree var, bool invert,
5159 enum tree_code code2, tree op2a, tree op2b);
5160 static tree
5161 or_var_with_comparison_1 (gimple *stmt,
5162 enum tree_code code2, tree op2a, tree op2b);
5164 /* Helper function for and_comparisons_1: try to simplify the AND of the
5165 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5166 If INVERT is true, invert the value of the VAR before doing the AND.
5167 Return NULL_EXPR if we can't simplify this to a single expression. */
5169 static tree
5170 and_var_with_comparison (tree var, bool invert,
5171 enum tree_code code2, tree op2a, tree op2b)
5173 tree t;
5174 gimple *stmt = SSA_NAME_DEF_STMT (var);
5176 /* We can only deal with variables whose definitions are assignments. */
5177 if (!is_gimple_assign (stmt))
5178 return NULL_TREE;
5180 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5181 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5182 Then we only have to consider the simpler non-inverted cases. */
5183 if (invert)
5184 t = or_var_with_comparison_1 (stmt,
5185 invert_tree_comparison (code2, false),
5186 op2a, op2b);
5187 else
5188 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5189 return canonicalize_bool (t, invert);
5192 /* Try to simplify the AND of the ssa variable defined by the assignment
5193 STMT with the comparison specified by (OP2A CODE2 OP2B).
5194 Return NULL_EXPR if we can't simplify this to a single expression. */
5196 static tree
5197 and_var_with_comparison_1 (gimple *stmt,
5198 enum tree_code code2, tree op2a, tree op2b)
5200 tree var = gimple_assign_lhs (stmt);
5201 tree true_test_var = NULL_TREE;
5202 tree false_test_var = NULL_TREE;
5203 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5205 /* Check for identities like (var AND (var == 0)) => false. */
5206 if (TREE_CODE (op2a) == SSA_NAME
5207 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5209 if ((code2 == NE_EXPR && integer_zerop (op2b))
5210 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5212 true_test_var = op2a;
5213 if (var == true_test_var)
5214 return var;
5216 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5217 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5219 false_test_var = op2a;
5220 if (var == false_test_var)
5221 return boolean_false_node;
5225 /* If the definition is a comparison, recurse on it. */
5226 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5228 tree t = and_comparisons_1 (innercode,
5229 gimple_assign_rhs1 (stmt),
5230 gimple_assign_rhs2 (stmt),
5231 code2,
5232 op2a,
5233 op2b);
5234 if (t)
5235 return t;
5238 /* If the definition is an AND or OR expression, we may be able to
5239 simplify by reassociating. */
5240 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5241 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5243 tree inner1 = gimple_assign_rhs1 (stmt);
5244 tree inner2 = gimple_assign_rhs2 (stmt);
5245 gimple *s;
5246 tree t;
5247 tree partial = NULL_TREE;
5248 bool is_and = (innercode == BIT_AND_EXPR);
5250 /* Check for boolean identities that don't require recursive examination
5251 of inner1/inner2:
5252 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5253 inner1 AND (inner1 OR inner2) => inner1
5254 !inner1 AND (inner1 AND inner2) => false
5255 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5256 Likewise for similar cases involving inner2. */
5257 if (inner1 == true_test_var)
5258 return (is_and ? var : inner1);
5259 else if (inner2 == true_test_var)
5260 return (is_and ? var : inner2);
5261 else if (inner1 == false_test_var)
5262 return (is_and
5263 ? boolean_false_node
5264 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5265 else if (inner2 == false_test_var)
5266 return (is_and
5267 ? boolean_false_node
5268 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5270 /* Next, redistribute/reassociate the AND across the inner tests.
5271 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5272 if (TREE_CODE (inner1) == SSA_NAME
5273 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5274 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5275 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5276 gimple_assign_rhs1 (s),
5277 gimple_assign_rhs2 (s),
5278 code2, op2a, op2b)))
5280 /* Handle the AND case, where we are reassociating:
5281 (inner1 AND inner2) AND (op2a code2 op2b)
5282 => (t AND inner2)
5283 If the partial result t is a constant, we win. Otherwise
5284 continue on to try reassociating with the other inner test. */
5285 if (is_and)
5287 if (integer_onep (t))
5288 return inner2;
5289 else if (integer_zerop (t))
5290 return boolean_false_node;
5293 /* Handle the OR case, where we are redistributing:
5294 (inner1 OR inner2) AND (op2a code2 op2b)
5295 => (t OR (inner2 AND (op2a code2 op2b))) */
5296 else if (integer_onep (t))
5297 return boolean_true_node;
5299 /* Save partial result for later. */
5300 partial = t;
5303 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5304 if (TREE_CODE (inner2) == SSA_NAME
5305 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5306 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5307 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5308 gimple_assign_rhs1 (s),
5309 gimple_assign_rhs2 (s),
5310 code2, op2a, op2b)))
5312 /* Handle the AND case, where we are reassociating:
5313 (inner1 AND inner2) AND (op2a code2 op2b)
5314 => (inner1 AND t) */
5315 if (is_and)
5317 if (integer_onep (t))
5318 return inner1;
5319 else if (integer_zerop (t))
5320 return boolean_false_node;
5321 /* If both are the same, we can apply the identity
5322 (x AND x) == x. */
5323 else if (partial && same_bool_result_p (t, partial))
5324 return t;
5327 /* Handle the OR case. where we are redistributing:
5328 (inner1 OR inner2) AND (op2a code2 op2b)
5329 => (t OR (inner1 AND (op2a code2 op2b)))
5330 => (t OR partial) */
5331 else
5333 if (integer_onep (t))
5334 return boolean_true_node;
5335 else if (partial)
5337 /* We already got a simplification for the other
5338 operand to the redistributed OR expression. The
5339 interesting case is when at least one is false.
5340 Or, if both are the same, we can apply the identity
5341 (x OR x) == x. */
5342 if (integer_zerop (partial))
5343 return t;
5344 else if (integer_zerop (t))
5345 return partial;
5346 else if (same_bool_result_p (t, partial))
5347 return t;
5352 return NULL_TREE;
5355 /* Try to simplify the AND of two comparisons defined by
5356 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5357 If this can be done without constructing an intermediate value,
5358 return the resulting tree; otherwise NULL_TREE is returned.
5359 This function is deliberately asymmetric as it recurses on SSA_DEFs
5360 in the first comparison but not the second. */
5362 static tree
5363 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5364 enum tree_code code2, tree op2a, tree op2b)
5366 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5368 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5369 if (operand_equal_p (op1a, op2a, 0)
5370 && operand_equal_p (op1b, op2b, 0))
5372 /* Result will be either NULL_TREE, or a combined comparison. */
5373 tree t = combine_comparisons (UNKNOWN_LOCATION,
5374 TRUTH_ANDIF_EXPR, code1, code2,
5375 truth_type, op1a, op1b);
5376 if (t)
5377 return t;
5380 /* Likewise the swapped case of the above. */
5381 if (operand_equal_p (op1a, op2b, 0)
5382 && operand_equal_p (op1b, op2a, 0))
5384 /* Result will be either NULL_TREE, or a combined comparison. */
5385 tree t = combine_comparisons (UNKNOWN_LOCATION,
5386 TRUTH_ANDIF_EXPR, code1,
5387 swap_tree_comparison (code2),
5388 truth_type, op1a, op1b);
5389 if (t)
5390 return t;
5393 /* If both comparisons are of the same value against constants, we might
5394 be able to merge them. */
5395 if (operand_equal_p (op1a, op2a, 0)
5396 && TREE_CODE (op1b) == INTEGER_CST
5397 && TREE_CODE (op2b) == INTEGER_CST)
5399 int cmp = tree_int_cst_compare (op1b, op2b);
5401 /* If we have (op1a == op1b), we should either be able to
5402 return that or FALSE, depending on whether the constant op1b
5403 also satisfies the other comparison against op2b. */
5404 if (code1 == EQ_EXPR)
5406 bool done = true;
5407 bool val;
5408 switch (code2)
5410 case EQ_EXPR: val = (cmp == 0); break;
5411 case NE_EXPR: val = (cmp != 0); break;
5412 case LT_EXPR: val = (cmp < 0); break;
5413 case GT_EXPR: val = (cmp > 0); break;
5414 case LE_EXPR: val = (cmp <= 0); break;
5415 case GE_EXPR: val = (cmp >= 0); break;
5416 default: done = false;
5418 if (done)
5420 if (val)
5421 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5422 else
5423 return boolean_false_node;
5426 /* Likewise if the second comparison is an == comparison. */
5427 else if (code2 == EQ_EXPR)
5429 bool done = true;
5430 bool val;
5431 switch (code1)
5433 case EQ_EXPR: val = (cmp == 0); break;
5434 case NE_EXPR: val = (cmp != 0); break;
5435 case LT_EXPR: val = (cmp > 0); break;
5436 case GT_EXPR: val = (cmp < 0); break;
5437 case LE_EXPR: val = (cmp >= 0); break;
5438 case GE_EXPR: val = (cmp <= 0); break;
5439 default: done = false;
5441 if (done)
5443 if (val)
5444 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5445 else
5446 return boolean_false_node;
5450 /* Same business with inequality tests. */
5451 else if (code1 == NE_EXPR)
5453 bool val;
5454 switch (code2)
5456 case EQ_EXPR: val = (cmp != 0); break;
5457 case NE_EXPR: val = (cmp == 0); break;
5458 case LT_EXPR: val = (cmp >= 0); break;
5459 case GT_EXPR: val = (cmp <= 0); break;
5460 case LE_EXPR: val = (cmp > 0); break;
5461 case GE_EXPR: val = (cmp < 0); break;
5462 default:
5463 val = false;
5465 if (val)
5466 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5468 else if (code2 == NE_EXPR)
5470 bool val;
5471 switch (code1)
5473 case EQ_EXPR: val = (cmp == 0); break;
5474 case NE_EXPR: val = (cmp != 0); break;
5475 case LT_EXPR: val = (cmp <= 0); break;
5476 case GT_EXPR: val = (cmp >= 0); break;
5477 case LE_EXPR: val = (cmp < 0); break;
5478 case GE_EXPR: val = (cmp > 0); break;
5479 default:
5480 val = false;
5482 if (val)
5483 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5486 /* Chose the more restrictive of two < or <= comparisons. */
5487 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5488 && (code2 == LT_EXPR || code2 == LE_EXPR))
5490 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5491 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5492 else
5493 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5496 /* Likewise chose the more restrictive of two > or >= comparisons. */
5497 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5498 && (code2 == GT_EXPR || code2 == GE_EXPR))
5500 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5501 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5502 else
5503 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5506 /* Check for singleton ranges. */
5507 else if (cmp == 0
5508 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5509 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5510 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5512 /* Check for disjoint ranges. */
5513 else if (cmp <= 0
5514 && (code1 == LT_EXPR || code1 == LE_EXPR)
5515 && (code2 == GT_EXPR || code2 == GE_EXPR))
5516 return boolean_false_node;
5517 else if (cmp >= 0
5518 && (code1 == GT_EXPR || code1 == GE_EXPR)
5519 && (code2 == LT_EXPR || code2 == LE_EXPR))
5520 return boolean_false_node;
5523 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5524 NAME's definition is a truth value. See if there are any simplifications
5525 that can be done against the NAME's definition. */
5526 if (TREE_CODE (op1a) == SSA_NAME
5527 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5528 && (integer_zerop (op1b) || integer_onep (op1b)))
5530 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5531 || (code1 == NE_EXPR && integer_onep (op1b)));
5532 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5533 switch (gimple_code (stmt))
5535 case GIMPLE_ASSIGN:
5536 /* Try to simplify by copy-propagating the definition. */
5537 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5539 case GIMPLE_PHI:
5540 /* If every argument to the PHI produces the same result when
5541 ANDed with the second comparison, we win.
5542 Do not do this unless the type is bool since we need a bool
5543 result here anyway. */
5544 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5546 tree result = NULL_TREE;
5547 unsigned i;
5548 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5550 tree arg = gimple_phi_arg_def (stmt, i);
5552 /* If this PHI has itself as an argument, ignore it.
5553 If all the other args produce the same result,
5554 we're still OK. */
5555 if (arg == gimple_phi_result (stmt))
5556 continue;
5557 else if (TREE_CODE (arg) == INTEGER_CST)
5559 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5561 if (!result)
5562 result = boolean_false_node;
5563 else if (!integer_zerop (result))
5564 return NULL_TREE;
5566 else if (!result)
5567 result = fold_build2 (code2, boolean_type_node,
5568 op2a, op2b);
5569 else if (!same_bool_comparison_p (result,
5570 code2, op2a, op2b))
5571 return NULL_TREE;
5573 else if (TREE_CODE (arg) == SSA_NAME
5574 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5576 tree temp;
5577 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5578 /* In simple cases we can look through PHI nodes,
5579 but we have to be careful with loops.
5580 See PR49073. */
5581 if (! dom_info_available_p (CDI_DOMINATORS)
5582 || gimple_bb (def_stmt) == gimple_bb (stmt)
5583 || dominated_by_p (CDI_DOMINATORS,
5584 gimple_bb (def_stmt),
5585 gimple_bb (stmt)))
5586 return NULL_TREE;
5587 temp = and_var_with_comparison (arg, invert, code2,
5588 op2a, op2b);
5589 if (!temp)
5590 return NULL_TREE;
5591 else if (!result)
5592 result = temp;
5593 else if (!same_bool_result_p (result, temp))
5594 return NULL_TREE;
5596 else
5597 return NULL_TREE;
5599 return result;
5602 default:
5603 break;
5606 return NULL_TREE;
5609 /* Try to simplify the AND of two comparisons, specified by
5610 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5611 If this can be simplified to a single expression (without requiring
5612 introducing more SSA variables to hold intermediate values),
5613 return the resulting tree. Otherwise return NULL_TREE.
5614 If the result expression is non-null, it has boolean type. */
5616 tree
5617 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5618 enum tree_code code2, tree op2a, tree op2b)
5620 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5621 if (t)
5622 return t;
5623 else
5624 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5627 /* Helper function for or_comparisons_1: try to simplify the OR of the
5628 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5629 If INVERT is true, invert the value of VAR before doing the OR.
5630 Return NULL_EXPR if we can't simplify this to a single expression. */
5632 static tree
5633 or_var_with_comparison (tree var, bool invert,
5634 enum tree_code code2, tree op2a, tree op2b)
5636 tree t;
5637 gimple *stmt = SSA_NAME_DEF_STMT (var);
5639 /* We can only deal with variables whose definitions are assignments. */
5640 if (!is_gimple_assign (stmt))
5641 return NULL_TREE;
5643 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5644 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5645 Then we only have to consider the simpler non-inverted cases. */
5646 if (invert)
5647 t = and_var_with_comparison_1 (stmt,
5648 invert_tree_comparison (code2, false),
5649 op2a, op2b);
5650 else
5651 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5652 return canonicalize_bool (t, invert);
5655 /* Try to simplify the OR of the ssa variable defined by the assignment
5656 STMT with the comparison specified by (OP2A CODE2 OP2B).
5657 Return NULL_EXPR if we can't simplify this to a single expression. */
5659 static tree
5660 or_var_with_comparison_1 (gimple *stmt,
5661 enum tree_code code2, tree op2a, tree op2b)
5663 tree var = gimple_assign_lhs (stmt);
5664 tree true_test_var = NULL_TREE;
5665 tree false_test_var = NULL_TREE;
5666 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5668 /* Check for identities like (var OR (var != 0)) => true . */
5669 if (TREE_CODE (op2a) == SSA_NAME
5670 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5672 if ((code2 == NE_EXPR && integer_zerop (op2b))
5673 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5675 true_test_var = op2a;
5676 if (var == true_test_var)
5677 return var;
5679 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5680 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5682 false_test_var = op2a;
5683 if (var == false_test_var)
5684 return boolean_true_node;
5688 /* If the definition is a comparison, recurse on it. */
5689 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5691 tree t = or_comparisons_1 (innercode,
5692 gimple_assign_rhs1 (stmt),
5693 gimple_assign_rhs2 (stmt),
5694 code2,
5695 op2a,
5696 op2b);
5697 if (t)
5698 return t;
5701 /* If the definition is an AND or OR expression, we may be able to
5702 simplify by reassociating. */
5703 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5704 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5706 tree inner1 = gimple_assign_rhs1 (stmt);
5707 tree inner2 = gimple_assign_rhs2 (stmt);
5708 gimple *s;
5709 tree t;
5710 tree partial = NULL_TREE;
5711 bool is_or = (innercode == BIT_IOR_EXPR);
5713 /* Check for boolean identities that don't require recursive examination
5714 of inner1/inner2:
5715 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5716 inner1 OR (inner1 AND inner2) => inner1
5717 !inner1 OR (inner1 OR inner2) => true
5718 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5720 if (inner1 == true_test_var)
5721 return (is_or ? var : inner1);
5722 else if (inner2 == true_test_var)
5723 return (is_or ? var : inner2);
5724 else if (inner1 == false_test_var)
5725 return (is_or
5726 ? boolean_true_node
5727 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5728 else if (inner2 == false_test_var)
5729 return (is_or
5730 ? boolean_true_node
5731 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5733 /* Next, redistribute/reassociate the OR across the inner tests.
5734 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5735 if (TREE_CODE (inner1) == SSA_NAME
5736 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5737 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5738 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5739 gimple_assign_rhs1 (s),
5740 gimple_assign_rhs2 (s),
5741 code2, op2a, op2b)))
5743 /* Handle the OR case, where we are reassociating:
5744 (inner1 OR inner2) OR (op2a code2 op2b)
5745 => (t OR inner2)
5746 If the partial result t is a constant, we win. Otherwise
5747 continue on to try reassociating with the other inner test. */
5748 if (is_or)
5750 if (integer_onep (t))
5751 return boolean_true_node;
5752 else if (integer_zerop (t))
5753 return inner2;
5756 /* Handle the AND case, where we are redistributing:
5757 (inner1 AND inner2) OR (op2a code2 op2b)
5758 => (t AND (inner2 OR (op2a code op2b))) */
5759 else if (integer_zerop (t))
5760 return boolean_false_node;
5762 /* Save partial result for later. */
5763 partial = t;
5766 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5767 if (TREE_CODE (inner2) == SSA_NAME
5768 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5769 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5770 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5771 gimple_assign_rhs1 (s),
5772 gimple_assign_rhs2 (s),
5773 code2, op2a, op2b)))
5775 /* Handle the OR case, where we are reassociating:
5776 (inner1 OR inner2) OR (op2a code2 op2b)
5777 => (inner1 OR t)
5778 => (t OR partial) */
5779 if (is_or)
5781 if (integer_zerop (t))
5782 return inner1;
5783 else if (integer_onep (t))
5784 return boolean_true_node;
5785 /* If both are the same, we can apply the identity
5786 (x OR x) == x. */
5787 else if (partial && same_bool_result_p (t, partial))
5788 return t;
5791 /* Handle the AND case, where we are redistributing:
5792 (inner1 AND inner2) OR (op2a code2 op2b)
5793 => (t AND (inner1 OR (op2a code2 op2b)))
5794 => (t AND partial) */
5795 else
5797 if (integer_zerop (t))
5798 return boolean_false_node;
5799 else if (partial)
5801 /* We already got a simplification for the other
5802 operand to the redistributed AND expression. The
5803 interesting case is when at least one is true.
5804 Or, if both are the same, we can apply the identity
5805 (x AND x) == x. */
5806 if (integer_onep (partial))
5807 return t;
5808 else if (integer_onep (t))
5809 return partial;
5810 else if (same_bool_result_p (t, partial))
5811 return t;
5816 return NULL_TREE;
5819 /* Try to simplify the OR of two comparisons defined by
5820 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5821 If this can be done without constructing an intermediate value,
5822 return the resulting tree; otherwise NULL_TREE is returned.
5823 This function is deliberately asymmetric as it recurses on SSA_DEFs
5824 in the first comparison but not the second. */
5826 static tree
5827 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5828 enum tree_code code2, tree op2a, tree op2b)
5830 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5832 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5833 if (operand_equal_p (op1a, op2a, 0)
5834 && operand_equal_p (op1b, op2b, 0))
5836 /* Result will be either NULL_TREE, or a combined comparison. */
5837 tree t = combine_comparisons (UNKNOWN_LOCATION,
5838 TRUTH_ORIF_EXPR, code1, code2,
5839 truth_type, op1a, op1b);
5840 if (t)
5841 return t;
5844 /* Likewise the swapped case of the above. */
5845 if (operand_equal_p (op1a, op2b, 0)
5846 && operand_equal_p (op1b, op2a, 0))
5848 /* Result will be either NULL_TREE, or a combined comparison. */
5849 tree t = combine_comparisons (UNKNOWN_LOCATION,
5850 TRUTH_ORIF_EXPR, code1,
5851 swap_tree_comparison (code2),
5852 truth_type, op1a, op1b);
5853 if (t)
5854 return t;
5857 /* If both comparisons are of the same value against constants, we might
5858 be able to merge them. */
5859 if (operand_equal_p (op1a, op2a, 0)
5860 && TREE_CODE (op1b) == INTEGER_CST
5861 && TREE_CODE (op2b) == INTEGER_CST)
5863 int cmp = tree_int_cst_compare (op1b, op2b);
5865 /* If we have (op1a != op1b), we should either be able to
5866 return that or TRUE, depending on whether the constant op1b
5867 also satisfies the other comparison against op2b. */
5868 if (code1 == NE_EXPR)
5870 bool done = true;
5871 bool val;
5872 switch (code2)
5874 case EQ_EXPR: val = (cmp == 0); break;
5875 case NE_EXPR: val = (cmp != 0); break;
5876 case LT_EXPR: val = (cmp < 0); break;
5877 case GT_EXPR: val = (cmp > 0); break;
5878 case LE_EXPR: val = (cmp <= 0); break;
5879 case GE_EXPR: val = (cmp >= 0); break;
5880 default: done = false;
5882 if (done)
5884 if (val)
5885 return boolean_true_node;
5886 else
5887 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5890 /* Likewise if the second comparison is a != comparison. */
5891 else if (code2 == NE_EXPR)
5893 bool done = true;
5894 bool val;
5895 switch (code1)
5897 case EQ_EXPR: val = (cmp == 0); break;
5898 case NE_EXPR: val = (cmp != 0); break;
5899 case LT_EXPR: val = (cmp > 0); break;
5900 case GT_EXPR: val = (cmp < 0); break;
5901 case LE_EXPR: val = (cmp >= 0); break;
5902 case GE_EXPR: val = (cmp <= 0); break;
5903 default: done = false;
5905 if (done)
5907 if (val)
5908 return boolean_true_node;
5909 else
5910 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5914 /* See if an equality test is redundant with the other comparison. */
5915 else if (code1 == EQ_EXPR)
5917 bool val;
5918 switch (code2)
5920 case EQ_EXPR: val = (cmp == 0); break;
5921 case NE_EXPR: val = (cmp != 0); break;
5922 case LT_EXPR: val = (cmp < 0); break;
5923 case GT_EXPR: val = (cmp > 0); break;
5924 case LE_EXPR: val = (cmp <= 0); break;
5925 case GE_EXPR: val = (cmp >= 0); break;
5926 default:
5927 val = false;
5929 if (val)
5930 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5932 else if (code2 == EQ_EXPR)
5934 bool val;
5935 switch (code1)
5937 case EQ_EXPR: val = (cmp == 0); break;
5938 case NE_EXPR: val = (cmp != 0); break;
5939 case LT_EXPR: val = (cmp > 0); break;
5940 case GT_EXPR: val = (cmp < 0); break;
5941 case LE_EXPR: val = (cmp >= 0); break;
5942 case GE_EXPR: val = (cmp <= 0); break;
5943 default:
5944 val = false;
5946 if (val)
5947 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5950 /* Chose the less restrictive of two < or <= comparisons. */
5951 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5952 && (code2 == LT_EXPR || code2 == LE_EXPR))
5954 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5955 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5956 else
5957 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5960 /* Likewise chose the less restrictive of two > or >= comparisons. */
5961 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5962 && (code2 == GT_EXPR || code2 == GE_EXPR))
5964 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5965 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5966 else
5967 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5970 /* Check for singleton ranges. */
5971 else if (cmp == 0
5972 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5973 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5974 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5976 /* Check for less/greater pairs that don't restrict the range at all. */
5977 else if (cmp >= 0
5978 && (code1 == LT_EXPR || code1 == LE_EXPR)
5979 && (code2 == GT_EXPR || code2 == GE_EXPR))
5980 return boolean_true_node;
5981 else if (cmp <= 0
5982 && (code1 == GT_EXPR || code1 == GE_EXPR)
5983 && (code2 == LT_EXPR || code2 == LE_EXPR))
5984 return boolean_true_node;
5987 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5988 NAME's definition is a truth value. See if there are any simplifications
5989 that can be done against the NAME's definition. */
5990 if (TREE_CODE (op1a) == SSA_NAME
5991 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5992 && (integer_zerop (op1b) || integer_onep (op1b)))
5994 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5995 || (code1 == NE_EXPR && integer_onep (op1b)));
5996 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5997 switch (gimple_code (stmt))
5999 case GIMPLE_ASSIGN:
6000 /* Try to simplify by copy-propagating the definition. */
6001 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6003 case GIMPLE_PHI:
6004 /* If every argument to the PHI produces the same result when
6005 ORed with the second comparison, we win.
6006 Do not do this unless the type is bool since we need a bool
6007 result here anyway. */
6008 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6010 tree result = NULL_TREE;
6011 unsigned i;
6012 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6014 tree arg = gimple_phi_arg_def (stmt, i);
6016 /* If this PHI has itself as an argument, ignore it.
6017 If all the other args produce the same result,
6018 we're still OK. */
6019 if (arg == gimple_phi_result (stmt))
6020 continue;
6021 else if (TREE_CODE (arg) == INTEGER_CST)
6023 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6025 if (!result)
6026 result = boolean_true_node;
6027 else if (!integer_onep (result))
6028 return NULL_TREE;
6030 else if (!result)
6031 result = fold_build2 (code2, boolean_type_node,
6032 op2a, op2b);
6033 else if (!same_bool_comparison_p (result,
6034 code2, op2a, op2b))
6035 return NULL_TREE;
6037 else if (TREE_CODE (arg) == SSA_NAME
6038 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6040 tree temp;
6041 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6042 /* In simple cases we can look through PHI nodes,
6043 but we have to be careful with loops.
6044 See PR49073. */
6045 if (! dom_info_available_p (CDI_DOMINATORS)
6046 || gimple_bb (def_stmt) == gimple_bb (stmt)
6047 || dominated_by_p (CDI_DOMINATORS,
6048 gimple_bb (def_stmt),
6049 gimple_bb (stmt)))
6050 return NULL_TREE;
6051 temp = or_var_with_comparison (arg, invert, code2,
6052 op2a, op2b);
6053 if (!temp)
6054 return NULL_TREE;
6055 else if (!result)
6056 result = temp;
6057 else if (!same_bool_result_p (result, temp))
6058 return NULL_TREE;
6060 else
6061 return NULL_TREE;
6063 return result;
6066 default:
6067 break;
6070 return NULL_TREE;
6073 /* Try to simplify the OR of two comparisons, specified by
6074 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6075 If this can be simplified to a single expression (without requiring
6076 introducing more SSA variables to hold intermediate values),
6077 return the resulting tree. Otherwise return NULL_TREE.
6078 If the result expression is non-null, it has boolean type. */
6080 tree
6081 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6082 enum tree_code code2, tree op2a, tree op2b)
6084 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6085 if (t)
6086 return t;
6087 else
6088 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6092 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6094 Either NULL_TREE, a simplified but non-constant or a constant
6095 is returned.
6097 ??? This should go into a gimple-fold-inline.h file to be eventually
6098 privatized with the single valueize function used in the various TUs
6099 to avoid the indirect function call overhead. */
6101 tree
6102 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6103 tree (*gvalueize) (tree))
6105 gimple_match_op res_op;
6106 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6107 edges if there are intermediate VARYING defs. For this reason
6108 do not follow SSA edges here even though SCCVN can technically
6109 just deal fine with that. */
6110 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6112 tree res = NULL_TREE;
6113 if (gimple_simplified_result_is_gimple_val (&res_op))
6114 res = res_op.ops[0];
6115 else if (mprts_hook)
6116 res = mprts_hook (&res_op);
6117 if (res)
6119 if (dump_file && dump_flags & TDF_DETAILS)
6121 fprintf (dump_file, "Match-and-simplified ");
6122 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6123 fprintf (dump_file, " to ");
6124 print_generic_expr (dump_file, res);
6125 fprintf (dump_file, "\n");
6127 return res;
6131 location_t loc = gimple_location (stmt);
6132 switch (gimple_code (stmt))
6134 case GIMPLE_ASSIGN:
6136 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6138 switch (get_gimple_rhs_class (subcode))
6140 case GIMPLE_SINGLE_RHS:
6142 tree rhs = gimple_assign_rhs1 (stmt);
6143 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6145 if (TREE_CODE (rhs) == SSA_NAME)
6147 /* If the RHS is an SSA_NAME, return its known constant value,
6148 if any. */
6149 return (*valueize) (rhs);
6151 /* Handle propagating invariant addresses into address
6152 operations. */
6153 else if (TREE_CODE (rhs) == ADDR_EXPR
6154 && !is_gimple_min_invariant (rhs))
6156 poly_int64 offset = 0;
6157 tree base;
6158 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6159 &offset,
6160 valueize);
6161 if (base
6162 && (CONSTANT_CLASS_P (base)
6163 || decl_address_invariant_p (base)))
6164 return build_invariant_address (TREE_TYPE (rhs),
6165 base, offset);
6167 else if (TREE_CODE (rhs) == CONSTRUCTOR
6168 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6169 && known_eq (CONSTRUCTOR_NELTS (rhs),
6170 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6172 unsigned i, nelts;
6173 tree val;
6175 nelts = CONSTRUCTOR_NELTS (rhs);
6176 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6177 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6179 val = (*valueize) (val);
6180 if (TREE_CODE (val) == INTEGER_CST
6181 || TREE_CODE (val) == REAL_CST
6182 || TREE_CODE (val) == FIXED_CST)
6183 vec.quick_push (val);
6184 else
6185 return NULL_TREE;
6188 return vec.build ();
6190 if (subcode == OBJ_TYPE_REF)
6192 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6193 /* If callee is constant, we can fold away the wrapper. */
6194 if (is_gimple_min_invariant (val))
6195 return val;
6198 if (kind == tcc_reference)
6200 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6201 || TREE_CODE (rhs) == REALPART_EXPR
6202 || TREE_CODE (rhs) == IMAGPART_EXPR)
6203 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6205 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6206 return fold_unary_loc (EXPR_LOCATION (rhs),
6207 TREE_CODE (rhs),
6208 TREE_TYPE (rhs), val);
6210 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6211 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6213 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6214 return fold_ternary_loc (EXPR_LOCATION (rhs),
6215 TREE_CODE (rhs),
6216 TREE_TYPE (rhs), val,
6217 TREE_OPERAND (rhs, 1),
6218 TREE_OPERAND (rhs, 2));
6220 else if (TREE_CODE (rhs) == MEM_REF
6221 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6223 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6224 if (TREE_CODE (val) == ADDR_EXPR
6225 && is_gimple_min_invariant (val))
6227 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6228 unshare_expr (val),
6229 TREE_OPERAND (rhs, 1));
6230 if (tem)
6231 rhs = tem;
6234 return fold_const_aggregate_ref_1 (rhs, valueize);
6236 else if (kind == tcc_declaration)
6237 return get_symbol_constant_value (rhs);
6238 return rhs;
6241 case GIMPLE_UNARY_RHS:
6242 return NULL_TREE;
6244 case GIMPLE_BINARY_RHS:
6245 /* Translate &x + CST into an invariant form suitable for
6246 further propagation. */
6247 if (subcode == POINTER_PLUS_EXPR)
6249 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6250 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6251 if (TREE_CODE (op0) == ADDR_EXPR
6252 && TREE_CODE (op1) == INTEGER_CST)
6254 tree off = fold_convert (ptr_type_node, op1);
6255 return build_fold_addr_expr_loc
6256 (loc,
6257 fold_build2 (MEM_REF,
6258 TREE_TYPE (TREE_TYPE (op0)),
6259 unshare_expr (op0), off));
6262 /* Canonicalize bool != 0 and bool == 0 appearing after
6263 valueization. While gimple_simplify handles this
6264 it can get confused by the ~X == 1 -> X == 0 transform
6265 which we cant reduce to a SSA name or a constant
6266 (and we have no way to tell gimple_simplify to not
6267 consider those transforms in the first place). */
6268 else if (subcode == EQ_EXPR
6269 || subcode == NE_EXPR)
6271 tree lhs = gimple_assign_lhs (stmt);
6272 tree op0 = gimple_assign_rhs1 (stmt);
6273 if (useless_type_conversion_p (TREE_TYPE (lhs),
6274 TREE_TYPE (op0)))
6276 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6277 op0 = (*valueize) (op0);
6278 if (TREE_CODE (op0) == INTEGER_CST)
6279 std::swap (op0, op1);
6280 if (TREE_CODE (op1) == INTEGER_CST
6281 && ((subcode == NE_EXPR && integer_zerop (op1))
6282 || (subcode == EQ_EXPR && integer_onep (op1))))
6283 return op0;
6286 return NULL_TREE;
6288 case GIMPLE_TERNARY_RHS:
6290 /* Handle ternary operators that can appear in GIMPLE form. */
6291 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6292 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6293 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6294 return fold_ternary_loc (loc, subcode,
6295 gimple_expr_type (stmt), op0, op1, op2);
6298 default:
6299 gcc_unreachable ();
6303 case GIMPLE_CALL:
6305 tree fn;
6306 gcall *call_stmt = as_a <gcall *> (stmt);
6308 if (gimple_call_internal_p (stmt))
6310 enum tree_code subcode = ERROR_MARK;
6311 switch (gimple_call_internal_fn (stmt))
6313 case IFN_UBSAN_CHECK_ADD:
6314 subcode = PLUS_EXPR;
6315 break;
6316 case IFN_UBSAN_CHECK_SUB:
6317 subcode = MINUS_EXPR;
6318 break;
6319 case IFN_UBSAN_CHECK_MUL:
6320 subcode = MULT_EXPR;
6321 break;
6322 case IFN_BUILTIN_EXPECT:
6324 tree arg0 = gimple_call_arg (stmt, 0);
6325 tree op0 = (*valueize) (arg0);
6326 if (TREE_CODE (op0) == INTEGER_CST)
6327 return op0;
6328 return NULL_TREE;
6330 default:
6331 return NULL_TREE;
6333 tree arg0 = gimple_call_arg (stmt, 0);
6334 tree arg1 = gimple_call_arg (stmt, 1);
6335 tree op0 = (*valueize) (arg0);
6336 tree op1 = (*valueize) (arg1);
6338 if (TREE_CODE (op0) != INTEGER_CST
6339 || TREE_CODE (op1) != INTEGER_CST)
6341 switch (subcode)
6343 case MULT_EXPR:
6344 /* x * 0 = 0 * x = 0 without overflow. */
6345 if (integer_zerop (op0) || integer_zerop (op1))
6346 return build_zero_cst (TREE_TYPE (arg0));
6347 break;
6348 case MINUS_EXPR:
6349 /* y - y = 0 without overflow. */
6350 if (operand_equal_p (op0, op1, 0))
6351 return build_zero_cst (TREE_TYPE (arg0));
6352 break;
6353 default:
6354 break;
6357 tree res
6358 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6359 if (res
6360 && TREE_CODE (res) == INTEGER_CST
6361 && !TREE_OVERFLOW (res))
6362 return res;
6363 return NULL_TREE;
6366 fn = (*valueize) (gimple_call_fn (stmt));
6367 if (TREE_CODE (fn) == ADDR_EXPR
6368 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6369 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6370 && gimple_builtin_call_types_compatible_p (stmt,
6371 TREE_OPERAND (fn, 0)))
6373 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6374 tree retval;
6375 unsigned i;
6376 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6377 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6378 retval = fold_builtin_call_array (loc,
6379 gimple_call_return_type (call_stmt),
6380 fn, gimple_call_num_args (stmt), args);
6381 if (retval)
6383 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6384 STRIP_NOPS (retval);
6385 retval = fold_convert (gimple_call_return_type (call_stmt),
6386 retval);
6388 return retval;
6390 return NULL_TREE;
6393 default:
6394 return NULL_TREE;
6398 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6399 Returns NULL_TREE if folding to a constant is not possible, otherwise
6400 returns a constant according to is_gimple_min_invariant. */
6402 tree
6403 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6405 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6406 if (res && is_gimple_min_invariant (res))
6407 return res;
6408 return NULL_TREE;
6412 /* The following set of functions are supposed to fold references using
6413 their constant initializers. */
6415 /* See if we can find constructor defining value of BASE.
6416 When we know the consructor with constant offset (such as
6417 base is array[40] and we do know constructor of array), then
6418 BIT_OFFSET is adjusted accordingly.
6420 As a special case, return error_mark_node when constructor
6421 is not explicitly available, but it is known to be zero
6422 such as 'static const int a;'. */
6423 static tree
6424 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6425 tree (*valueize)(tree))
6427 poly_int64 bit_offset2, size, max_size;
6428 bool reverse;
6430 if (TREE_CODE (base) == MEM_REF)
6432 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6433 if (!boff.to_shwi (bit_offset))
6434 return NULL_TREE;
6436 if (valueize
6437 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6438 base = valueize (TREE_OPERAND (base, 0));
6439 if (!base || TREE_CODE (base) != ADDR_EXPR)
6440 return NULL_TREE;
6441 base = TREE_OPERAND (base, 0);
6443 else if (valueize
6444 && TREE_CODE (base) == SSA_NAME)
6445 base = valueize (base);
6447 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6448 DECL_INITIAL. If BASE is a nested reference into another
6449 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6450 the inner reference. */
6451 switch (TREE_CODE (base))
6453 case VAR_DECL:
6454 case CONST_DECL:
6456 tree init = ctor_for_folding (base);
6458 /* Our semantic is exact opposite of ctor_for_folding;
6459 NULL means unknown, while error_mark_node is 0. */
6460 if (init == error_mark_node)
6461 return NULL_TREE;
6462 if (!init)
6463 return error_mark_node;
6464 return init;
6467 case VIEW_CONVERT_EXPR:
6468 return get_base_constructor (TREE_OPERAND (base, 0),
6469 bit_offset, valueize);
6471 case ARRAY_REF:
6472 case COMPONENT_REF:
6473 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6474 &reverse);
6475 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6476 return NULL_TREE;
6477 *bit_offset += bit_offset2;
6478 return get_base_constructor (base, bit_offset, valueize);
6480 case CONSTRUCTOR:
6481 return base;
6483 default:
6484 if (CONSTANT_CLASS_P (base))
6485 return base;
6487 return NULL_TREE;
6491 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6492 SIZE to the memory at bit OFFSET. */
6494 static tree
6495 fold_array_ctor_reference (tree type, tree ctor,
6496 unsigned HOST_WIDE_INT offset,
6497 unsigned HOST_WIDE_INT size,
6498 tree from_decl)
6500 offset_int low_bound;
6501 offset_int elt_size;
6502 offset_int access_index;
6503 tree domain_type = NULL_TREE;
6504 HOST_WIDE_INT inner_offset;
6506 /* Compute low bound and elt size. */
6507 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6508 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6509 if (domain_type && TYPE_MIN_VALUE (domain_type))
6511 /* Static constructors for variably sized objects makes no sense. */
6512 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6513 return NULL_TREE;
6514 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6516 else
6517 low_bound = 0;
6518 /* Static constructors for variably sized objects makes no sense. */
6519 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6520 return NULL_TREE;
6521 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6523 /* We can handle only constantly sized accesses that are known to not
6524 be larger than size of array element. */
6525 if (!TYPE_SIZE_UNIT (type)
6526 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6527 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6528 || elt_size == 0)
6529 return NULL_TREE;
6531 /* Compute the array index we look for. */
6532 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6533 elt_size);
6534 access_index += low_bound;
6536 /* And offset within the access. */
6537 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6539 /* See if the array field is large enough to span whole access. We do not
6540 care to fold accesses spanning multiple array indexes. */
6541 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6542 return NULL_TREE;
6543 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6544 return fold_ctor_reference (type, val, inner_offset, size, from_decl);
6546 /* When memory is not explicitely mentioned in constructor,
6547 it is 0 (or out of range). */
6548 return build_zero_cst (type);
6551 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6552 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6554 static tree
6555 fold_nonarray_ctor_reference (tree type, tree ctor,
6556 unsigned HOST_WIDE_INT offset,
6557 unsigned HOST_WIDE_INT size,
6558 tree from_decl)
6560 unsigned HOST_WIDE_INT cnt;
6561 tree cfield, cval;
6563 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6564 cval)
6566 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6567 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6568 tree field_size = DECL_SIZE (cfield);
6569 offset_int bitoffset;
6570 offset_int bitoffset_end, access_end;
6572 /* Variable sized objects in static constructors makes no sense,
6573 but field_size can be NULL for flexible array members. */
6574 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6575 && TREE_CODE (byte_offset) == INTEGER_CST
6576 && (field_size != NULL_TREE
6577 ? TREE_CODE (field_size) == INTEGER_CST
6578 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6580 /* Compute bit offset of the field. */
6581 bitoffset = (wi::to_offset (field_offset)
6582 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6583 /* Compute bit offset where the field ends. */
6584 if (field_size != NULL_TREE)
6585 bitoffset_end = bitoffset + wi::to_offset (field_size);
6586 else
6587 bitoffset_end = 0;
6589 access_end = offset_int (offset) + size;
6591 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6592 [BITOFFSET, BITOFFSET_END)? */
6593 if (wi::cmps (access_end, bitoffset) > 0
6594 && (field_size == NULL_TREE
6595 || wi::lts_p (offset, bitoffset_end)))
6597 offset_int inner_offset = offset_int (offset) - bitoffset;
6598 /* We do have overlap. Now see if field is large enough to
6599 cover the access. Give up for accesses spanning multiple
6600 fields. */
6601 if (wi::cmps (access_end, bitoffset_end) > 0)
6602 return NULL_TREE;
6603 if (offset < bitoffset)
6604 return NULL_TREE;
6605 return fold_ctor_reference (type, cval,
6606 inner_offset.to_uhwi (), size,
6607 from_decl);
6610 /* When memory is not explicitely mentioned in constructor, it is 0. */
6611 return build_zero_cst (type);
6614 /* CTOR is value initializing memory, fold reference of type TYPE and
6615 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6617 tree
6618 fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
6619 poly_uint64 poly_size, tree from_decl)
6621 tree ret;
6623 /* We found the field with exact match. */
6624 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
6625 && known_eq (poly_offset, 0U))
6626 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6628 /* The remaining optimizations need a constant size and offset. */
6629 unsigned HOST_WIDE_INT size, offset;
6630 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6631 return NULL_TREE;
6633 /* We are at the end of walk, see if we can view convert the
6634 result. */
6635 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6636 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6637 && !compare_tree_int (TYPE_SIZE (type), size)
6638 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6640 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6641 if (ret)
6643 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6644 if (ret)
6645 STRIP_USELESS_TYPE_CONVERSION (ret);
6647 return ret;
6649 /* For constants and byte-aligned/sized reads try to go through
6650 native_encode/interpret. */
6651 if (CONSTANT_CLASS_P (ctor)
6652 && BITS_PER_UNIT == 8
6653 && offset % BITS_PER_UNIT == 0
6654 && size % BITS_PER_UNIT == 0
6655 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6657 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6658 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6659 offset / BITS_PER_UNIT);
6660 if (len > 0)
6661 return native_interpret_expr (type, buf, len);
6663 if (TREE_CODE (ctor) == CONSTRUCTOR)
6666 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6667 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6668 return fold_array_ctor_reference (type, ctor, offset, size,
6669 from_decl);
6670 else
6671 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6672 from_decl);
6675 return NULL_TREE;
6678 /* Return the tree representing the element referenced by T if T is an
6679 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6680 names using VALUEIZE. Return NULL_TREE otherwise. */
6682 tree
6683 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6685 tree ctor, idx, base;
6686 poly_int64 offset, size, max_size;
6687 tree tem;
6688 bool reverse;
6690 if (TREE_THIS_VOLATILE (t))
6691 return NULL_TREE;
6693 if (DECL_P (t))
6694 return get_symbol_constant_value (t);
6696 tem = fold_read_from_constant_string (t);
6697 if (tem)
6698 return tem;
6700 switch (TREE_CODE (t))
6702 case ARRAY_REF:
6703 case ARRAY_RANGE_REF:
6704 /* Constant indexes are handled well by get_base_constructor.
6705 Only special case variable offsets.
6706 FIXME: This code can't handle nested references with variable indexes
6707 (they will be handled only by iteration of ccp). Perhaps we can bring
6708 get_ref_base_and_extent here and make it use a valueize callback. */
6709 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6710 && valueize
6711 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6712 && poly_int_tree_p (idx))
6714 tree low_bound, unit_size;
6716 /* If the resulting bit-offset is constant, track it. */
6717 if ((low_bound = array_ref_low_bound (t),
6718 poly_int_tree_p (low_bound))
6719 && (unit_size = array_ref_element_size (t),
6720 tree_fits_uhwi_p (unit_size)))
6722 poly_offset_int woffset
6723 = wi::sext (wi::to_poly_offset (idx)
6724 - wi::to_poly_offset (low_bound),
6725 TYPE_PRECISION (TREE_TYPE (idx)));
6727 if (woffset.to_shwi (&offset))
6729 /* TODO: This code seems wrong, multiply then check
6730 to see if it fits. */
6731 offset *= tree_to_uhwi (unit_size);
6732 offset *= BITS_PER_UNIT;
6734 base = TREE_OPERAND (t, 0);
6735 ctor = get_base_constructor (base, &offset, valueize);
6736 /* Empty constructor. Always fold to 0. */
6737 if (ctor == error_mark_node)
6738 return build_zero_cst (TREE_TYPE (t));
6739 /* Out of bound array access. Value is undefined,
6740 but don't fold. */
6741 if (maybe_lt (offset, 0))
6742 return NULL_TREE;
6743 /* We can not determine ctor. */
6744 if (!ctor)
6745 return NULL_TREE;
6746 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6747 tree_to_uhwi (unit_size)
6748 * BITS_PER_UNIT,
6749 base);
6753 /* Fallthru. */
6755 case COMPONENT_REF:
6756 case BIT_FIELD_REF:
6757 case TARGET_MEM_REF:
6758 case MEM_REF:
6759 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6760 ctor = get_base_constructor (base, &offset, valueize);
6762 /* Empty constructor. Always fold to 0. */
6763 if (ctor == error_mark_node)
6764 return build_zero_cst (TREE_TYPE (t));
6765 /* We do not know precise address. */
6766 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6767 return NULL_TREE;
6768 /* We can not determine ctor. */
6769 if (!ctor)
6770 return NULL_TREE;
6772 /* Out of bound array access. Value is undefined, but don't fold. */
6773 if (maybe_lt (offset, 0))
6774 return NULL_TREE;
6776 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6777 base);
6779 case REALPART_EXPR:
6780 case IMAGPART_EXPR:
6782 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6783 if (c && TREE_CODE (c) == COMPLEX_CST)
6784 return fold_build1_loc (EXPR_LOCATION (t),
6785 TREE_CODE (t), TREE_TYPE (t), c);
6786 break;
6789 default:
6790 break;
6793 return NULL_TREE;
6796 tree
6797 fold_const_aggregate_ref (tree t)
6799 return fold_const_aggregate_ref_1 (t, NULL);
6802 /* Lookup virtual method with index TOKEN in a virtual table V
6803 at OFFSET.
6804 Set CAN_REFER if non-NULL to false if method
6805 is not referable or if the virtual table is ill-formed (such as rewriten
6806 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6808 tree
6809 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6810 tree v,
6811 unsigned HOST_WIDE_INT offset,
6812 bool *can_refer)
6814 tree vtable = v, init, fn;
6815 unsigned HOST_WIDE_INT size;
6816 unsigned HOST_WIDE_INT elt_size, access_index;
6817 tree domain_type;
6819 if (can_refer)
6820 *can_refer = true;
6822 /* First of all double check we have virtual table. */
6823 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6825 /* Pass down that we lost track of the target. */
6826 if (can_refer)
6827 *can_refer = false;
6828 return NULL_TREE;
6831 init = ctor_for_folding (v);
6833 /* The virtual tables should always be born with constructors
6834 and we always should assume that they are avaialble for
6835 folding. At the moment we do not stream them in all cases,
6836 but it should never happen that ctor seem unreachable. */
6837 gcc_assert (init);
6838 if (init == error_mark_node)
6840 /* Pass down that we lost track of the target. */
6841 if (can_refer)
6842 *can_refer = false;
6843 return NULL_TREE;
6845 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6846 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6847 offset *= BITS_PER_UNIT;
6848 offset += token * size;
6850 /* Lookup the value in the constructor that is assumed to be array.
6851 This is equivalent to
6852 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6853 offset, size, NULL);
6854 but in a constant time. We expect that frontend produced a simple
6855 array without indexed initializers. */
6857 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6858 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6859 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6860 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6862 access_index = offset / BITS_PER_UNIT / elt_size;
6863 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6865 /* This code makes an assumption that there are no
6866 indexed fileds produced by C++ FE, so we can directly index the array. */
6867 if (access_index < CONSTRUCTOR_NELTS (init))
6869 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6870 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6871 STRIP_NOPS (fn);
6873 else
6874 fn = NULL;
6876 /* For type inconsistent program we may end up looking up virtual method
6877 in virtual table that does not contain TOKEN entries. We may overrun
6878 the virtual table and pick up a constant or RTTI info pointer.
6879 In any case the call is undefined. */
6880 if (!fn
6881 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6882 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6883 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6884 else
6886 fn = TREE_OPERAND (fn, 0);
6888 /* When cgraph node is missing and function is not public, we cannot
6889 devirtualize. This can happen in WHOPR when the actual method
6890 ends up in other partition, because we found devirtualization
6891 possibility too late. */
6892 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6894 if (can_refer)
6896 *can_refer = false;
6897 return fn;
6899 return NULL_TREE;
6903 /* Make sure we create a cgraph node for functions we'll reference.
6904 They can be non-existent if the reference comes from an entry
6905 of an external vtable for example. */
6906 cgraph_node::get_create (fn);
6908 return fn;
6911 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6912 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6913 KNOWN_BINFO carries the binfo describing the true type of
6914 OBJ_TYPE_REF_OBJECT(REF).
6915 Set CAN_REFER if non-NULL to false if method
6916 is not referable or if the virtual table is ill-formed (such as rewriten
6917 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6919 tree
6920 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6921 bool *can_refer)
6923 unsigned HOST_WIDE_INT offset;
6924 tree v;
6926 v = BINFO_VTABLE (known_binfo);
6927 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6928 if (!v)
6929 return NULL_TREE;
6931 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6933 if (can_refer)
6934 *can_refer = false;
6935 return NULL_TREE;
6937 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6940 /* Given a pointer value T, return a simplified version of an
6941 indirection through T, or NULL_TREE if no simplification is
6942 possible. Note that the resulting type may be different from
6943 the type pointed to in the sense that it is still compatible
6944 from the langhooks point of view. */
6946 tree
6947 gimple_fold_indirect_ref (tree t)
6949 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
6950 tree sub = t;
6951 tree subtype;
6953 STRIP_NOPS (sub);
6954 subtype = TREE_TYPE (sub);
6955 if (!POINTER_TYPE_P (subtype)
6956 || TYPE_REF_CAN_ALIAS_ALL (ptype))
6957 return NULL_TREE;
6959 if (TREE_CODE (sub) == ADDR_EXPR)
6961 tree op = TREE_OPERAND (sub, 0);
6962 tree optype = TREE_TYPE (op);
6963 /* *&p => p */
6964 if (useless_type_conversion_p (type, optype))
6965 return op;
6967 /* *(foo *)&fooarray => fooarray[0] */
6968 if (TREE_CODE (optype) == ARRAY_TYPE
6969 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
6970 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6972 tree type_domain = TYPE_DOMAIN (optype);
6973 tree min_val = size_zero_node;
6974 if (type_domain && TYPE_MIN_VALUE (type_domain))
6975 min_val = TYPE_MIN_VALUE (type_domain);
6976 if (TREE_CODE (min_val) == INTEGER_CST)
6977 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
6979 /* *(foo *)&complexfoo => __real__ complexfoo */
6980 else if (TREE_CODE (optype) == COMPLEX_TYPE
6981 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6982 return fold_build1 (REALPART_EXPR, type, op);
6983 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6984 else if (TREE_CODE (optype) == VECTOR_TYPE
6985 && useless_type_conversion_p (type, TREE_TYPE (optype)))
6987 tree part_width = TYPE_SIZE (type);
6988 tree index = bitsize_int (0);
6989 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
6993 /* *(p + CST) -> ... */
6994 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
6995 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
6997 tree addr = TREE_OPERAND (sub, 0);
6998 tree off = TREE_OPERAND (sub, 1);
6999 tree addrtype;
7001 STRIP_NOPS (addr);
7002 addrtype = TREE_TYPE (addr);
7004 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7005 if (TREE_CODE (addr) == ADDR_EXPR
7006 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7007 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7008 && tree_fits_uhwi_p (off))
7010 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7011 tree part_width = TYPE_SIZE (type);
7012 unsigned HOST_WIDE_INT part_widthi
7013 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7014 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7015 tree index = bitsize_int (indexi);
7016 if (known_lt (offset / part_widthi,
7017 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7018 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7019 part_width, index);
7022 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7023 if (TREE_CODE (addr) == ADDR_EXPR
7024 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7025 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7027 tree size = TYPE_SIZE_UNIT (type);
7028 if (tree_int_cst_equal (size, off))
7029 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7032 /* *(p + CST) -> MEM_REF <p, CST>. */
7033 if (TREE_CODE (addr) != ADDR_EXPR
7034 || DECL_P (TREE_OPERAND (addr, 0)))
7035 return fold_build2 (MEM_REF, type,
7036 addr,
7037 wide_int_to_tree (ptype, wi::to_wide (off)));
7040 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7041 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7042 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7043 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7045 tree type_domain;
7046 tree min_val = size_zero_node;
7047 tree osub = sub;
7048 sub = gimple_fold_indirect_ref (sub);
7049 if (! sub)
7050 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7051 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7052 if (type_domain && TYPE_MIN_VALUE (type_domain))
7053 min_val = TYPE_MIN_VALUE (type_domain);
7054 if (TREE_CODE (min_val) == INTEGER_CST)
7055 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7058 return NULL_TREE;
7061 /* Return true if CODE is an operation that when operating on signed
7062 integer types involves undefined behavior on overflow and the
7063 operation can be expressed with unsigned arithmetic. */
7065 bool
7066 arith_code_with_undefined_signed_overflow (tree_code code)
7068 switch (code)
7070 case PLUS_EXPR:
7071 case MINUS_EXPR:
7072 case MULT_EXPR:
7073 case NEGATE_EXPR:
7074 case POINTER_PLUS_EXPR:
7075 return true;
7076 default:
7077 return false;
7081 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7082 operation that can be transformed to unsigned arithmetic by converting
7083 its operand, carrying out the operation in the corresponding unsigned
7084 type and converting the result back to the original type.
7086 Returns a sequence of statements that replace STMT and also contain
7087 a modified form of STMT itself. */
7089 gimple_seq
7090 rewrite_to_defined_overflow (gimple *stmt)
7092 if (dump_file && (dump_flags & TDF_DETAILS))
7094 fprintf (dump_file, "rewriting stmt with undefined signed "
7095 "overflow ");
7096 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7099 tree lhs = gimple_assign_lhs (stmt);
7100 tree type = unsigned_type_for (TREE_TYPE (lhs));
7101 gimple_seq stmts = NULL;
7102 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7104 tree op = gimple_op (stmt, i);
7105 op = gimple_convert (&stmts, type, op);
7106 gimple_set_op (stmt, i, op);
7108 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7109 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7110 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7111 gimple_seq_add_stmt (&stmts, stmt);
7112 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7113 gimple_seq_add_stmt (&stmts, cvt);
7115 return stmts;
7119 /* The valueization hook we use for the gimple_build API simplification.
7120 This makes us match fold_buildN behavior by only combining with
7121 statements in the sequence(s) we are currently building. */
7123 static tree
7124 gimple_build_valueize (tree op)
7126 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7127 return op;
7128 return NULL_TREE;
7131 /* Build the expression CODE OP0 of type TYPE with location LOC,
7132 simplifying it first if possible. Returns the built
7133 expression value and appends statements possibly defining it
7134 to SEQ. */
7136 tree
7137 gimple_build (gimple_seq *seq, location_t loc,
7138 enum tree_code code, tree type, tree op0)
7140 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7141 if (!res)
7143 res = create_tmp_reg_or_ssa_name (type);
7144 gimple *stmt;
7145 if (code == REALPART_EXPR
7146 || code == IMAGPART_EXPR
7147 || code == VIEW_CONVERT_EXPR)
7148 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7149 else
7150 stmt = gimple_build_assign (res, code, op0);
7151 gimple_set_location (stmt, loc);
7152 gimple_seq_add_stmt_without_update (seq, stmt);
7154 return res;
7157 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7158 simplifying it first if possible. Returns the built
7159 expression value and appends statements possibly defining it
7160 to SEQ. */
7162 tree
7163 gimple_build (gimple_seq *seq, location_t loc,
7164 enum tree_code code, tree type, tree op0, tree op1)
7166 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7167 if (!res)
7169 res = create_tmp_reg_or_ssa_name (type);
7170 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7171 gimple_set_location (stmt, loc);
7172 gimple_seq_add_stmt_without_update (seq, stmt);
7174 return res;
7177 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7178 simplifying it first if possible. Returns the built
7179 expression value and appends statements possibly defining it
7180 to SEQ. */
7182 tree
7183 gimple_build (gimple_seq *seq, location_t loc,
7184 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7186 tree res = gimple_simplify (code, type, op0, op1, op2,
7187 seq, gimple_build_valueize);
7188 if (!res)
7190 res = create_tmp_reg_or_ssa_name (type);
7191 gimple *stmt;
7192 if (code == BIT_FIELD_REF)
7193 stmt = gimple_build_assign (res, code,
7194 build3 (code, type, op0, op1, op2));
7195 else
7196 stmt = gimple_build_assign (res, code, op0, op1, op2);
7197 gimple_set_location (stmt, loc);
7198 gimple_seq_add_stmt_without_update (seq, stmt);
7200 return res;
7203 /* Build the call FN (ARG0) with a result of type TYPE
7204 (or no result if TYPE is void) with location LOC,
7205 simplifying it first if possible. Returns the built
7206 expression value (or NULL_TREE if TYPE is void) and appends
7207 statements possibly defining it to SEQ. */
7209 tree
7210 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7211 tree type, tree arg0)
7213 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7214 if (!res)
7216 gcall *stmt;
7217 if (internal_fn_p (fn))
7218 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7219 else
7221 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7222 stmt = gimple_build_call (decl, 1, arg0);
7224 if (!VOID_TYPE_P (type))
7226 res = create_tmp_reg_or_ssa_name (type);
7227 gimple_call_set_lhs (stmt, res);
7229 gimple_set_location (stmt, loc);
7230 gimple_seq_add_stmt_without_update (seq, stmt);
7232 return res;
7235 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7236 (or no result if TYPE is void) with location LOC,
7237 simplifying it first if possible. Returns the built
7238 expression value (or NULL_TREE if TYPE is void) and appends
7239 statements possibly defining it to SEQ. */
7241 tree
7242 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7243 tree type, tree arg0, tree arg1)
7245 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7246 if (!res)
7248 gcall *stmt;
7249 if (internal_fn_p (fn))
7250 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7251 else
7253 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7254 stmt = gimple_build_call (decl, 2, arg0, arg1);
7256 if (!VOID_TYPE_P (type))
7258 res = create_tmp_reg_or_ssa_name (type);
7259 gimple_call_set_lhs (stmt, res);
7261 gimple_set_location (stmt, loc);
7262 gimple_seq_add_stmt_without_update (seq, stmt);
7264 return res;
7267 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7268 (or no result if TYPE is void) with location LOC,
7269 simplifying it first if possible. Returns the built
7270 expression value (or NULL_TREE if TYPE is void) and appends
7271 statements possibly defining it to SEQ. */
7273 tree
7274 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7275 tree type, tree arg0, tree arg1, tree arg2)
7277 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7278 seq, gimple_build_valueize);
7279 if (!res)
7281 gcall *stmt;
7282 if (internal_fn_p (fn))
7283 stmt = gimple_build_call_internal (as_internal_fn (fn),
7284 3, arg0, arg1, arg2);
7285 else
7287 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7288 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7290 if (!VOID_TYPE_P (type))
7292 res = create_tmp_reg_or_ssa_name (type);
7293 gimple_call_set_lhs (stmt, res);
7295 gimple_set_location (stmt, loc);
7296 gimple_seq_add_stmt_without_update (seq, stmt);
7298 return res;
7301 /* Build the conversion (TYPE) OP with a result of type TYPE
7302 with location LOC if such conversion is neccesary in GIMPLE,
7303 simplifying it first.
7304 Returns the built expression value and appends
7305 statements possibly defining it to SEQ. */
7307 tree
7308 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7310 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7311 return op;
7312 return gimple_build (seq, loc, NOP_EXPR, type, op);
7315 /* Build the conversion (ptrofftype) OP with a result of a type
7316 compatible with ptrofftype with location LOC if such conversion
7317 is neccesary in GIMPLE, simplifying it first.
7318 Returns the built expression value and appends
7319 statements possibly defining it to SEQ. */
7321 tree
7322 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7324 if (ptrofftype_p (TREE_TYPE (op)))
7325 return op;
7326 return gimple_convert (seq, loc, sizetype, op);
7329 /* Build a vector of type TYPE in which each element has the value OP.
7330 Return a gimple value for the result, appending any new statements
7331 to SEQ. */
7333 tree
7334 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7335 tree op)
7337 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7338 && !CONSTANT_CLASS_P (op))
7339 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7341 tree res, vec = build_vector_from_val (type, op);
7342 if (is_gimple_val (vec))
7343 return vec;
7344 if (gimple_in_ssa_p (cfun))
7345 res = make_ssa_name (type);
7346 else
7347 res = create_tmp_reg (type);
7348 gimple *stmt = gimple_build_assign (res, vec);
7349 gimple_set_location (stmt, loc);
7350 gimple_seq_add_stmt_without_update (seq, stmt);
7351 return res;
7354 /* Build a vector from BUILDER, handling the case in which some elements
7355 are non-constant. Return a gimple value for the result, appending any
7356 new instructions to SEQ.
7358 BUILDER must not have a stepped encoding on entry. This is because
7359 the function is not geared up to handle the arithmetic that would
7360 be needed in the variable case, and any code building a vector that
7361 is known to be constant should use BUILDER->build () directly. */
7363 tree
7364 gimple_build_vector (gimple_seq *seq, location_t loc,
7365 tree_vector_builder *builder)
7367 gcc_assert (builder->nelts_per_pattern () <= 2);
7368 unsigned int encoded_nelts = builder->encoded_nelts ();
7369 for (unsigned int i = 0; i < encoded_nelts; ++i)
7370 if (!TREE_CONSTANT ((*builder)[i]))
7372 tree type = builder->type ();
7373 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7374 vec<constructor_elt, va_gc> *v;
7375 vec_alloc (v, nelts);
7376 for (i = 0; i < nelts; ++i)
7377 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7379 tree res;
7380 if (gimple_in_ssa_p (cfun))
7381 res = make_ssa_name (type);
7382 else
7383 res = create_tmp_reg (type);
7384 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7385 gimple_set_location (stmt, loc);
7386 gimple_seq_add_stmt_without_update (seq, stmt);
7387 return res;
7389 return builder->build ();
7392 /* Return true if the result of assignment STMT is known to be non-negative.
7393 If the return value is based on the assumption that signed overflow is
7394 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7395 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7397 static bool
7398 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7399 int depth)
7401 enum tree_code code = gimple_assign_rhs_code (stmt);
7402 switch (get_gimple_rhs_class (code))
7404 case GIMPLE_UNARY_RHS:
7405 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7406 gimple_expr_type (stmt),
7407 gimple_assign_rhs1 (stmt),
7408 strict_overflow_p, depth);
7409 case GIMPLE_BINARY_RHS:
7410 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7411 gimple_expr_type (stmt),
7412 gimple_assign_rhs1 (stmt),
7413 gimple_assign_rhs2 (stmt),
7414 strict_overflow_p, depth);
7415 case GIMPLE_TERNARY_RHS:
7416 return false;
7417 case GIMPLE_SINGLE_RHS:
7418 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7419 strict_overflow_p, depth);
7420 case GIMPLE_INVALID_RHS:
7421 break;
7423 gcc_unreachable ();
7426 /* Return true if return value of call STMT is known to be non-negative.
7427 If the return value is based on the assumption that signed overflow is
7428 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7429 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7431 static bool
7432 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7433 int depth)
7435 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7436 gimple_call_arg (stmt, 0) : NULL_TREE;
7437 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7438 gimple_call_arg (stmt, 1) : NULL_TREE;
7440 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7441 gimple_call_combined_fn (stmt),
7442 arg0,
7443 arg1,
7444 strict_overflow_p, depth);
7447 /* Return true if return value of call STMT is known to be non-negative.
7448 If the return value is based on the assumption that signed overflow is
7449 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7450 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7452 static bool
7453 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7454 int depth)
7456 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7458 tree arg = gimple_phi_arg_def (stmt, i);
7459 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7460 return false;
7462 return true;
7465 /* Return true if STMT is known to compute a non-negative value.
7466 If the return value is based on the assumption that signed overflow is
7467 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7468 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7470 bool
7471 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7472 int depth)
7474 switch (gimple_code (stmt))
7476 case GIMPLE_ASSIGN:
7477 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7478 depth);
7479 case GIMPLE_CALL:
7480 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7481 depth);
7482 case GIMPLE_PHI:
7483 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7484 depth);
7485 default:
7486 return false;
7490 /* Return true if the floating-point value computed by assignment STMT
7491 is known to have an integer value. We also allow +Inf, -Inf and NaN
7492 to be considered integer values. Return false for signaling NaN.
7494 DEPTH is the current nesting depth of the query. */
7496 static bool
7497 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7499 enum tree_code code = gimple_assign_rhs_code (stmt);
7500 switch (get_gimple_rhs_class (code))
7502 case GIMPLE_UNARY_RHS:
7503 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7504 gimple_assign_rhs1 (stmt), depth);
7505 case GIMPLE_BINARY_RHS:
7506 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7507 gimple_assign_rhs1 (stmt),
7508 gimple_assign_rhs2 (stmt), depth);
7509 case GIMPLE_TERNARY_RHS:
7510 return false;
7511 case GIMPLE_SINGLE_RHS:
7512 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7513 case GIMPLE_INVALID_RHS:
7514 break;
7516 gcc_unreachable ();
7519 /* Return true if the floating-point value computed by call STMT is known
7520 to have an integer value. We also allow +Inf, -Inf and NaN to be
7521 considered integer values. Return false for signaling NaN.
7523 DEPTH is the current nesting depth of the query. */
7525 static bool
7526 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7528 tree arg0 = (gimple_call_num_args (stmt) > 0
7529 ? gimple_call_arg (stmt, 0)
7530 : NULL_TREE);
7531 tree arg1 = (gimple_call_num_args (stmt) > 1
7532 ? gimple_call_arg (stmt, 1)
7533 : NULL_TREE);
7534 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7535 arg0, arg1, depth);
7538 /* Return true if the floating-point result of phi STMT is known to have
7539 an integer value. We also allow +Inf, -Inf and NaN to be considered
7540 integer values. Return false for signaling NaN.
7542 DEPTH is the current nesting depth of the query. */
7544 static bool
7545 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7547 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7549 tree arg = gimple_phi_arg_def (stmt, i);
7550 if (!integer_valued_real_single_p (arg, depth + 1))
7551 return false;
7553 return true;
7556 /* Return true if the floating-point value computed by STMT is known
7557 to have an integer value. We also allow +Inf, -Inf and NaN to be
7558 considered integer values. Return false for signaling NaN.
7560 DEPTH is the current nesting depth of the query. */
7562 bool
7563 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7565 switch (gimple_code (stmt))
7567 case GIMPLE_ASSIGN:
7568 return gimple_assign_integer_valued_real_p (stmt, depth);
7569 case GIMPLE_CALL:
7570 return gimple_call_integer_valued_real_p (stmt, depth);
7571 case GIMPLE_PHI:
7572 return gimple_phi_integer_valued_real_p (stmt, depth);
7573 default:
7574 return false;