PR middle-end/77357 - strlen of constant strings not folded
[official-gcc.git] / gcc / gimple-fold.c
bloba6b42834d3291b0fea10f4ca7a2a58ff8336d3bb
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* If the SIZE argument representing the size of an object is in a range
639 of values of which exactly one is valid (and that is zero), return
640 true, otherwise false. */
642 static bool
643 size_must_be_zero_p (tree size)
645 if (integer_zerop (size))
646 return true;
648 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
649 return false;
651 wide_int min, max;
652 enum value_range_type rtype = get_range_info (size, &min, &max);
653 if (rtype != VR_ANTI_RANGE)
654 return false;
656 tree type = TREE_TYPE (size);
657 int prec = TYPE_PRECISION (type);
659 wide_int wone = wi::one (prec);
661 /* Compute the value of SSIZE_MAX, the largest positive value that
662 can be stored in ssize_t, the signed counterpart of size_t. */
663 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
665 return wi::eq_p (min, wone) && wi::geu_p (max, ssize_max);
668 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
669 diagnose (otherwise undefined) overlapping copies without preventing
670 folding. When folded, GCC guarantees that overlapping memcpy has
671 the same semantics as memmove. Call to the library memcpy need not
672 provide the same guarantee. Return false if no simplification can
673 be made. */
675 static bool
676 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
677 tree dest, tree src, int endp)
679 gimple *stmt = gsi_stmt (*gsi);
680 tree lhs = gimple_call_lhs (stmt);
681 tree len = gimple_call_arg (stmt, 2);
682 tree destvar, srcvar;
683 location_t loc = gimple_location (stmt);
685 bool nowarn = gimple_no_warning_p (stmt);
687 /* If the LEN parameter is a constant zero or in range where
688 the only valid value is zero, return DEST. */
689 if (size_must_be_zero_p (len))
691 gimple *repl;
692 if (gimple_call_lhs (stmt))
693 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
694 else
695 repl = gimple_build_nop ();
696 tree vdef = gimple_vdef (stmt);
697 if (vdef && TREE_CODE (vdef) == SSA_NAME)
699 unlink_stmt_vdef (stmt);
700 release_ssa_name (vdef);
702 gsi_replace (gsi, repl, false);
703 return true;
706 /* If SRC and DEST are the same (and not volatile), return
707 DEST{,+LEN,+LEN-1}. */
708 if (operand_equal_p (src, dest, 0))
710 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
711 It's safe and may even be emitted by GCC itself (see bug
712 32667). */
713 unlink_stmt_vdef (stmt);
714 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
715 release_ssa_name (gimple_vdef (stmt));
716 if (!lhs)
718 gsi_replace (gsi, gimple_build_nop (), false);
719 return true;
721 goto done;
723 else
725 tree srctype, desttype;
726 unsigned int src_align, dest_align;
727 tree off0;
729 /* Build accesses at offset zero with a ref-all character type. */
730 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
731 ptr_mode, true), 0);
733 /* If we can perform the copy efficiently with first doing all loads
734 and then all stores inline it that way. Currently efficiently
735 means that we can load all the memory into a single integer
736 register which is what MOVE_MAX gives us. */
737 src_align = get_pointer_alignment (src);
738 dest_align = get_pointer_alignment (dest);
739 if (tree_fits_uhwi_p (len)
740 && compare_tree_int (len, MOVE_MAX) <= 0
741 /* ??? Don't transform copies from strings with known length this
742 confuses the tree-ssa-strlen.c. This doesn't handle
743 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
744 reason. */
745 && !c_strlen (src, 2))
747 unsigned ilen = tree_to_uhwi (len);
748 if (pow2p_hwi (ilen))
750 /* Detect invalid bounds and overlapping copies and issue
751 either -Warray-bounds or -Wrestrict. */
752 if (!nowarn
753 && check_bounds_or_overlap (as_a <gcall *>(stmt),
754 dest, src, len, len))
755 gimple_set_no_warning (stmt, true);
757 scalar_int_mode mode;
758 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
759 if (type
760 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
761 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
762 /* If the destination pointer is not aligned we must be able
763 to emit an unaligned store. */
764 && (dest_align >= GET_MODE_ALIGNMENT (mode)
765 || !targetm.slow_unaligned_access (mode, dest_align)
766 || (optab_handler (movmisalign_optab, mode)
767 != CODE_FOR_nothing)))
769 tree srctype = type;
770 tree desttype = type;
771 if (src_align < GET_MODE_ALIGNMENT (mode))
772 srctype = build_aligned_type (type, src_align);
773 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
774 tree tem = fold_const_aggregate_ref (srcmem);
775 if (tem)
776 srcmem = tem;
777 else if (src_align < GET_MODE_ALIGNMENT (mode)
778 && targetm.slow_unaligned_access (mode, src_align)
779 && (optab_handler (movmisalign_optab, mode)
780 == CODE_FOR_nothing))
781 srcmem = NULL_TREE;
782 if (srcmem)
784 gimple *new_stmt;
785 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
787 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
788 srcmem
789 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
790 new_stmt);
791 gimple_assign_set_lhs (new_stmt, srcmem);
792 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
793 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
795 if (dest_align < GET_MODE_ALIGNMENT (mode))
796 desttype = build_aligned_type (type, dest_align);
797 new_stmt
798 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
799 dest, off0),
800 srcmem);
801 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
802 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
803 if (gimple_vdef (new_stmt)
804 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
805 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
806 if (!lhs)
808 gsi_replace (gsi, new_stmt, false);
809 return true;
811 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
812 goto done;
818 if (endp == 3)
820 /* Both DEST and SRC must be pointer types.
821 ??? This is what old code did. Is the testing for pointer types
822 really mandatory?
824 If either SRC is readonly or length is 1, we can use memcpy. */
825 if (!dest_align || !src_align)
826 return false;
827 if (readonly_data_expr (src)
828 || (tree_fits_uhwi_p (len)
829 && (MIN (src_align, dest_align) / BITS_PER_UNIT
830 >= tree_to_uhwi (len))))
832 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
833 if (!fn)
834 return false;
835 gimple_call_set_fndecl (stmt, fn);
836 gimple_call_set_arg (stmt, 0, dest);
837 gimple_call_set_arg (stmt, 1, src);
838 fold_stmt (gsi);
839 return true;
842 /* If *src and *dest can't overlap, optimize into memcpy as well. */
843 if (TREE_CODE (src) == ADDR_EXPR
844 && TREE_CODE (dest) == ADDR_EXPR)
846 tree src_base, dest_base, fn;
847 poly_int64 src_offset = 0, dest_offset = 0;
848 poly_uint64 maxsize;
850 srcvar = TREE_OPERAND (src, 0);
851 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
852 if (src_base == NULL)
853 src_base = srcvar;
854 destvar = TREE_OPERAND (dest, 0);
855 dest_base = get_addr_base_and_unit_offset (destvar,
856 &dest_offset);
857 if (dest_base == NULL)
858 dest_base = destvar;
859 if (!poly_int_tree_p (len, &maxsize))
860 maxsize = -1;
861 if (SSA_VAR_P (src_base)
862 && SSA_VAR_P (dest_base))
864 if (operand_equal_p (src_base, dest_base, 0)
865 && ranges_maybe_overlap_p (src_offset, maxsize,
866 dest_offset, maxsize))
867 return false;
869 else if (TREE_CODE (src_base) == MEM_REF
870 && TREE_CODE (dest_base) == MEM_REF)
872 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
873 TREE_OPERAND (dest_base, 0), 0))
874 return false;
875 poly_offset_int full_src_offset
876 = mem_ref_offset (src_base) + src_offset;
877 poly_offset_int full_dest_offset
878 = mem_ref_offset (dest_base) + dest_offset;
879 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
880 full_dest_offset, maxsize))
881 return false;
883 else
884 return false;
886 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
887 if (!fn)
888 return false;
889 gimple_call_set_fndecl (stmt, fn);
890 gimple_call_set_arg (stmt, 0, dest);
891 gimple_call_set_arg (stmt, 1, src);
892 fold_stmt (gsi);
893 return true;
896 /* If the destination and source do not alias optimize into
897 memcpy as well. */
898 if ((is_gimple_min_invariant (dest)
899 || TREE_CODE (dest) == SSA_NAME)
900 && (is_gimple_min_invariant (src)
901 || TREE_CODE (src) == SSA_NAME))
903 ao_ref destr, srcr;
904 ao_ref_init_from_ptr_and_size (&destr, dest, len);
905 ao_ref_init_from_ptr_and_size (&srcr, src, len);
906 if (!refs_may_alias_p_1 (&destr, &srcr, false))
908 tree fn;
909 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
910 if (!fn)
911 return false;
912 gimple_call_set_fndecl (stmt, fn);
913 gimple_call_set_arg (stmt, 0, dest);
914 gimple_call_set_arg (stmt, 1, src);
915 fold_stmt (gsi);
916 return true;
920 return false;
923 if (!tree_fits_shwi_p (len))
924 return false;
925 if (!POINTER_TYPE_P (TREE_TYPE (src))
926 || !POINTER_TYPE_P (TREE_TYPE (dest)))
927 return false;
928 /* In the following try to find a type that is most natural to be
929 used for the memcpy source and destination and that allows
930 the most optimization when memcpy is turned into a plain assignment
931 using that type. In theory we could always use a char[len] type
932 but that only gains us that the destination and source possibly
933 no longer will have their address taken. */
934 srctype = TREE_TYPE (TREE_TYPE (src));
935 if (TREE_CODE (srctype) == ARRAY_TYPE
936 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
937 srctype = TREE_TYPE (srctype);
938 desttype = TREE_TYPE (TREE_TYPE (dest));
939 if (TREE_CODE (desttype) == ARRAY_TYPE
940 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
941 desttype = TREE_TYPE (desttype);
942 if (TREE_ADDRESSABLE (srctype)
943 || TREE_ADDRESSABLE (desttype))
944 return false;
946 /* Make sure we are not copying using a floating-point mode or
947 a type whose size possibly does not match its precision. */
948 if (FLOAT_MODE_P (TYPE_MODE (desttype))
949 || TREE_CODE (desttype) == BOOLEAN_TYPE
950 || TREE_CODE (desttype) == ENUMERAL_TYPE)
951 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
952 if (FLOAT_MODE_P (TYPE_MODE (srctype))
953 || TREE_CODE (srctype) == BOOLEAN_TYPE
954 || TREE_CODE (srctype) == ENUMERAL_TYPE)
955 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
956 if (!srctype)
957 srctype = desttype;
958 if (!desttype)
959 desttype = srctype;
960 if (!srctype)
961 return false;
963 src_align = get_pointer_alignment (src);
964 dest_align = get_pointer_alignment (dest);
965 if (dest_align < TYPE_ALIGN (desttype)
966 || src_align < TYPE_ALIGN (srctype))
967 return false;
969 destvar = NULL_TREE;
970 if (TREE_CODE (dest) == ADDR_EXPR
971 && var_decl_component_p (TREE_OPERAND (dest, 0))
972 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
973 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
975 srcvar = NULL_TREE;
976 if (TREE_CODE (src) == ADDR_EXPR
977 && var_decl_component_p (TREE_OPERAND (src, 0))
978 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
980 if (!destvar
981 || src_align >= TYPE_ALIGN (desttype))
982 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
983 src, off0);
984 else if (!STRICT_ALIGNMENT)
986 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
987 src_align);
988 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
992 if (srcvar == NULL_TREE && destvar == NULL_TREE)
993 return false;
995 if (srcvar == NULL_TREE)
997 if (src_align >= TYPE_ALIGN (desttype))
998 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
999 else
1001 if (STRICT_ALIGNMENT)
1002 return false;
1003 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1004 src_align);
1005 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1008 else if (destvar == NULL_TREE)
1010 if (dest_align >= TYPE_ALIGN (srctype))
1011 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1012 else
1014 if (STRICT_ALIGNMENT)
1015 return false;
1016 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1017 dest_align);
1018 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1022 /* Detect invalid bounds and overlapping copies and issue either
1023 -Warray-bounds or -Wrestrict. */
1024 if (!nowarn)
1025 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1027 gimple *new_stmt;
1028 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1030 tree tem = fold_const_aggregate_ref (srcvar);
1031 if (tem)
1032 srcvar = tem;
1033 if (! is_gimple_min_invariant (srcvar))
1035 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1036 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1037 new_stmt);
1038 gimple_assign_set_lhs (new_stmt, srcvar);
1039 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1040 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1042 new_stmt = gimple_build_assign (destvar, srcvar);
1043 goto set_vop_and_replace;
1046 /* We get an aggregate copy. Use an unsigned char[] type to
1047 perform the copying to preserve padding and to avoid any issues
1048 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1049 desttype = build_array_type_nelts (unsigned_char_type_node,
1050 tree_to_uhwi (len));
1051 srctype = desttype;
1052 if (src_align > TYPE_ALIGN (srctype))
1053 srctype = build_aligned_type (srctype, src_align);
1054 if (dest_align > TYPE_ALIGN (desttype))
1055 desttype = build_aligned_type (desttype, dest_align);
1056 new_stmt
1057 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1058 fold_build2 (MEM_REF, srctype, src, off0));
1059 set_vop_and_replace:
1060 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1061 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1062 if (gimple_vdef (new_stmt)
1063 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1065 if (!lhs)
1067 gsi_replace (gsi, new_stmt, false);
1068 return true;
1070 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1073 done:
1074 gimple_seq stmts = NULL;
1075 if (endp == 0 || endp == 3)
1076 len = NULL_TREE;
1077 else if (endp == 2)
1078 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1079 ssize_int (1));
1080 if (endp == 2 || endp == 1)
1082 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1083 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1084 TREE_TYPE (dest), dest, len);
1087 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1088 gimple *repl = gimple_build_assign (lhs, dest);
1089 gsi_replace (gsi, repl, false);
1090 return true;
1093 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1094 to built-in memcmp (a, b, len). */
1096 static bool
1097 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1099 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1101 if (!fn)
1102 return false;
1104 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1106 gimple *stmt = gsi_stmt (*gsi);
1107 tree a = gimple_call_arg (stmt, 0);
1108 tree b = gimple_call_arg (stmt, 1);
1109 tree len = gimple_call_arg (stmt, 2);
1111 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1112 replace_call_with_call_and_fold (gsi, repl);
1114 return true;
1117 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1118 to built-in memmove (dest, src, len). */
1120 static bool
1121 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1123 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1125 if (!fn)
1126 return false;
1128 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1129 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1130 len) into memmove (dest, src, len). */
1132 gimple *stmt = gsi_stmt (*gsi);
1133 tree src = gimple_call_arg (stmt, 0);
1134 tree dest = gimple_call_arg (stmt, 1);
1135 tree len = gimple_call_arg (stmt, 2);
1137 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1138 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1139 replace_call_with_call_and_fold (gsi, repl);
1141 return true;
1144 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1145 to built-in memset (dest, 0, len). */
1147 static bool
1148 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1150 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1152 if (!fn)
1153 return false;
1155 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1157 gimple *stmt = gsi_stmt (*gsi);
1158 tree dest = gimple_call_arg (stmt, 0);
1159 tree len = gimple_call_arg (stmt, 1);
1161 gimple_seq seq = NULL;
1162 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1163 gimple_seq_add_stmt_without_update (&seq, repl);
1164 gsi_replace_with_seq_vops (gsi, seq);
1165 fold_stmt (gsi);
1167 return true;
1170 /* Fold function call to builtin memset or bzero at *GSI setting the
1171 memory of size LEN to VAL. Return whether a simplification was made. */
1173 static bool
1174 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1176 gimple *stmt = gsi_stmt (*gsi);
1177 tree etype;
1178 unsigned HOST_WIDE_INT length, cval;
1180 /* If the LEN parameter is zero, return DEST. */
1181 if (integer_zerop (len))
1183 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1184 return true;
1187 if (! tree_fits_uhwi_p (len))
1188 return false;
1190 if (TREE_CODE (c) != INTEGER_CST)
1191 return false;
1193 tree dest = gimple_call_arg (stmt, 0);
1194 tree var = dest;
1195 if (TREE_CODE (var) != ADDR_EXPR)
1196 return false;
1198 var = TREE_OPERAND (var, 0);
1199 if (TREE_THIS_VOLATILE (var))
1200 return false;
1202 etype = TREE_TYPE (var);
1203 if (TREE_CODE (etype) == ARRAY_TYPE)
1204 etype = TREE_TYPE (etype);
1206 if (!INTEGRAL_TYPE_P (etype)
1207 && !POINTER_TYPE_P (etype))
1208 return NULL_TREE;
1210 if (! var_decl_component_p (var))
1211 return NULL_TREE;
1213 length = tree_to_uhwi (len);
1214 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1215 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1216 return NULL_TREE;
1218 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1219 return NULL_TREE;
1221 if (integer_zerop (c))
1222 cval = 0;
1223 else
1225 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1226 return NULL_TREE;
1228 cval = TREE_INT_CST_LOW (c);
1229 cval &= 0xff;
1230 cval |= cval << 8;
1231 cval |= cval << 16;
1232 cval |= (cval << 31) << 1;
1235 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1236 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1237 gimple_set_vuse (store, gimple_vuse (stmt));
1238 tree vdef = gimple_vdef (stmt);
1239 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1241 gimple_set_vdef (store, gimple_vdef (stmt));
1242 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1244 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1245 if (gimple_call_lhs (stmt))
1247 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1248 gsi_replace (gsi, asgn, false);
1250 else
1252 gimple_stmt_iterator gsi2 = *gsi;
1253 gsi_prev (gsi);
1254 gsi_remove (&gsi2, true);
1257 return true;
1261 /* Obtain the minimum and maximum string length or minimum and maximum
1262 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1263 If ARG is an SSA name variable, follow its use-def chains. When
1264 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1265 if we are unable to determine the length or value, return false.
1266 VISITED is a bitmap of visited variables.
1267 TYPE is 0 if string length should be obtained, 1 for maximum string
1268 length and 2 for maximum value ARG can have.
1269 When FUZZY is non-zero and the length of a string cannot be determined,
1270 the function instead considers as the maximum possible length the
1271 size of a character array it may refer to. If FUZZY is 2, it will handle
1272 PHIs and COND_EXPRs optimistically, if we can determine string length
1273 minimum and maximum, it will use the minimum from the ones where it
1274 can be determined.
1275 Set *FLEXP to true if the range of the string lengths has been
1276 obtained from the upper bound of an array at the end of a struct.
1277 Such an array may hold a string that's longer than its upper bound
1278 due to it being used as a poor-man's flexible array member. */
1280 static bool
1281 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1282 int fuzzy, bool *flexp)
1284 tree var, val = NULL_TREE;
1285 gimple *def_stmt;
1287 /* The minimum and maximum length. */
1288 tree *const minlen = length;
1289 tree *const maxlen = length + 1;
1291 if (TREE_CODE (arg) != SSA_NAME)
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1297 tree op = TREE_OPERAND (arg, 0);
1298 if (integer_zerop (TREE_OPERAND (op, 1)))
1300 tree aop0 = TREE_OPERAND (op, 0);
1301 if (TREE_CODE (aop0) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1303 return get_range_strlen (TREE_OPERAND (aop0, 0),
1304 length, visited, type, fuzzy, flexp);
1306 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1308 /* Fail if an array is the last member of a struct object
1309 since it could be treated as a (fake) flexible array
1310 member. */
1311 tree idx = TREE_OPERAND (op, 1);
1313 arg = TREE_OPERAND (op, 0);
1314 tree optype = TREE_TYPE (arg);
1315 if (tree dom = TYPE_DOMAIN (optype))
1316 if (tree bound = TYPE_MAX_VALUE (dom))
1317 if (TREE_CODE (bound) == INTEGER_CST
1318 && TREE_CODE (idx) == INTEGER_CST
1319 && tree_int_cst_lt (bound, idx))
1320 return false;
1324 if (type == 2)
1326 val = arg;
1327 if (TREE_CODE (val) != INTEGER_CST
1328 || tree_int_cst_sgn (val) < 0)
1329 return false;
1331 else
1332 val = c_strlen (arg, 1);
1334 if (!val && fuzzy)
1336 if (TREE_CODE (arg) == ADDR_EXPR)
1337 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1338 visited, type, fuzzy, flexp);
1340 if (TREE_CODE (arg) == ARRAY_REF)
1342 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1344 /* Determine the "innermost" array type. */
1345 while (TREE_CODE (type) == ARRAY_TYPE
1346 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1347 type = TREE_TYPE (type);
1349 /* Avoid arrays of pointers. */
1350 tree eltype = TREE_TYPE (type);
1351 if (TREE_CODE (type) != ARRAY_TYPE
1352 || !INTEGRAL_TYPE_P (eltype))
1353 return false;
1355 val = TYPE_SIZE_UNIT (type);
1356 if (!val || integer_zerop (val))
1357 return false;
1359 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1360 integer_one_node);
1361 /* Set the minimum size to zero since the string in
1362 the array could have zero length. */
1363 *minlen = ssize_int (0);
1365 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1366 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1367 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1368 *flexp = true;
1370 else if (TREE_CODE (arg) == COMPONENT_REF
1371 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1372 == ARRAY_TYPE))
1374 /* Use the type of the member array to determine the upper
1375 bound on the length of the array. This may be overly
1376 optimistic if the array itself isn't NUL-terminated and
1377 the caller relies on the subsequent member to contain
1378 the NUL but that would only be considered valid if
1379 the array were the last member of a struct.
1380 Set *FLEXP to true if the array whose bound is being
1381 used is at the end of a struct. */
1382 if (array_at_struct_end_p (arg))
1383 *flexp = true;
1385 arg = TREE_OPERAND (arg, 1);
1387 tree type = TREE_TYPE (arg);
1389 while (TREE_CODE (type) == ARRAY_TYPE
1390 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1391 type = TREE_TYPE (type);
1393 /* Fail when the array bound is unknown or zero. */
1394 val = TYPE_SIZE_UNIT (type);
1395 if (!val || integer_zerop (val))
1396 return false;
1397 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1398 integer_one_node);
1399 /* Set the minimum size to zero since the string in
1400 the array could have zero length. */
1401 *minlen = ssize_int (0);
1404 if (VAR_P (arg))
1406 tree type = TREE_TYPE (arg);
1407 if (POINTER_TYPE_P (type))
1408 type = TREE_TYPE (type);
1410 if (TREE_CODE (type) == ARRAY_TYPE)
1412 val = TYPE_SIZE_UNIT (type);
1413 if (!val
1414 || TREE_CODE (val) != INTEGER_CST
1415 || integer_zerop (val))
1416 return false;
1417 val = wide_int_to_tree (TREE_TYPE (val),
1418 wi::sub (wi::to_wide (val), 1));
1419 /* Set the minimum size to zero since the string in
1420 the array could have zero length. */
1421 *minlen = ssize_int (0);
1426 if (!val)
1427 return false;
1429 if (!*minlen
1430 || (type > 0
1431 && TREE_CODE (*minlen) == INTEGER_CST
1432 && TREE_CODE (val) == INTEGER_CST
1433 && tree_int_cst_lt (val, *minlen)))
1434 *minlen = val;
1436 if (*maxlen)
1438 if (type > 0)
1440 if (TREE_CODE (*maxlen) != INTEGER_CST
1441 || TREE_CODE (val) != INTEGER_CST)
1442 return false;
1444 if (tree_int_cst_lt (*maxlen, val))
1445 *maxlen = val;
1446 return true;
1448 else if (simple_cst_equal (val, *maxlen) != 1)
1449 return false;
1452 *maxlen = val;
1453 return true;
1456 /* If ARG is registered for SSA update we cannot look at its defining
1457 statement. */
1458 if (name_registered_for_update_p (arg))
1459 return false;
1461 /* If we were already here, break the infinite cycle. */
1462 if (!*visited)
1463 *visited = BITMAP_ALLOC (NULL);
1464 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1465 return true;
1467 var = arg;
1468 def_stmt = SSA_NAME_DEF_STMT (var);
1470 switch (gimple_code (def_stmt))
1472 case GIMPLE_ASSIGN:
1473 /* The RHS of the statement defining VAR must either have a
1474 constant length or come from another SSA_NAME with a constant
1475 length. */
1476 if (gimple_assign_single_p (def_stmt)
1477 || gimple_assign_unary_nop_p (def_stmt))
1479 tree rhs = gimple_assign_rhs1 (def_stmt);
1480 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp);
1482 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1484 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1485 gimple_assign_rhs3 (def_stmt) };
1487 for (unsigned int i = 0; i < 2; i++)
1488 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1489 flexp))
1491 if (fuzzy == 2)
1492 *maxlen = build_all_ones_cst (size_type_node);
1493 else
1494 return false;
1496 return true;
1498 return false;
1500 case GIMPLE_PHI:
1501 /* All the arguments of the PHI node must have the same constant
1502 length. */
1503 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1505 tree arg = gimple_phi_arg (def_stmt, i)->def;
1507 /* If this PHI has itself as an argument, we cannot
1508 determine the string length of this argument. However,
1509 if we can find a constant string length for the other
1510 PHI args then we can still be sure that this is a
1511 constant string length. So be optimistic and just
1512 continue with the next argument. */
1513 if (arg == gimple_phi_result (def_stmt))
1514 continue;
1516 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp))
1518 if (fuzzy == 2)
1519 *maxlen = build_all_ones_cst (size_type_node);
1520 else
1521 return false;
1524 return true;
1526 default:
1527 return false;
1531 /* Determine the minimum and maximum value or string length that ARG
1532 refers to and store each in the first two elements of MINMAXLEN.
1533 For expressions that point to strings of unknown lengths that are
1534 character arrays, use the upper bound of the array as the maximum
1535 length. For example, given an expression like 'x ? array : "xyz"'
1536 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1537 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1538 stored in array.
1539 Return true if the range of the string lengths has been obtained
1540 from the upper bound of an array at the end of a struct. Such
1541 an array may hold a string that's longer than its upper bound
1542 due to it being used as a poor-man's flexible array member.
1544 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1545 and false if PHIs and COND_EXPRs are to be handled optimistically,
1546 if we can determine string length minimum and maximum; it will use
1547 the minimum from the ones where it can be determined.
1548 STRICT false should be only used for warning code. */
1550 bool
1551 get_range_strlen (tree arg, tree minmaxlen[2], bool strict)
1553 bitmap visited = NULL;
1555 minmaxlen[0] = NULL_TREE;
1556 minmaxlen[1] = NULL_TREE;
1558 bool flexarray = false;
1559 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1560 &flexarray))
1562 minmaxlen[0] = NULL_TREE;
1563 minmaxlen[1] = NULL_TREE;
1566 if (visited)
1567 BITMAP_FREE (visited);
1569 return flexarray;
1572 tree
1573 get_maxval_strlen (tree arg, int type)
1575 bitmap visited = NULL;
1576 tree len[2] = { NULL_TREE, NULL_TREE };
1578 bool dummy;
1579 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy))
1580 len[1] = NULL_TREE;
1581 if (visited)
1582 BITMAP_FREE (visited);
1584 return len[1];
1588 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1589 If LEN is not NULL, it represents the length of the string to be
1590 copied. Return NULL_TREE if no simplification can be made. */
1592 static bool
1593 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1594 tree dest, tree src)
1596 gimple *stmt = gsi_stmt (*gsi);
1597 location_t loc = gimple_location (stmt);
1598 tree fn;
1600 /* If SRC and DEST are the same (and not volatile), return DEST. */
1601 if (operand_equal_p (src, dest, 0))
1603 /* Issue -Wrestrict unless the pointers are null (those do
1604 not point to objects and so do not indicate an overlap;
1605 such calls could be the result of sanitization and jump
1606 threading). */
1607 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1609 tree func = gimple_call_fndecl (stmt);
1611 warning_at (loc, OPT_Wrestrict,
1612 "%qD source argument is the same as destination",
1613 func);
1616 replace_call_with_value (gsi, dest);
1617 return true;
1620 if (optimize_function_for_size_p (cfun))
1621 return false;
1623 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1624 if (!fn)
1625 return false;
1627 tree len = get_maxval_strlen (src, 0);
1628 if (!len)
1629 return false;
1631 len = fold_convert_loc (loc, size_type_node, len);
1632 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1633 len = force_gimple_operand_gsi (gsi, len, true,
1634 NULL_TREE, true, GSI_SAME_STMT);
1635 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1636 replace_call_with_call_and_fold (gsi, repl);
1637 return true;
1640 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1641 If SLEN is not NULL, it represents the length of the source string.
1642 Return NULL_TREE if no simplification can be made. */
1644 static bool
1645 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1646 tree dest, tree src, tree len)
1648 gimple *stmt = gsi_stmt (*gsi);
1649 location_t loc = gimple_location (stmt);
1650 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1652 /* If the LEN parameter is zero, return DEST. */
1653 if (integer_zerop (len))
1655 /* Avoid warning if the destination refers to a an array/pointer
1656 decorate with attribute nonstring. */
1657 if (!nonstring)
1659 tree fndecl = gimple_call_fndecl (stmt);
1660 gcall *call = as_a <gcall *> (stmt);
1662 /* Warn about the lack of nul termination: the result is not
1663 a (nul-terminated) string. */
1664 tree slen = get_maxval_strlen (src, 0);
1665 if (slen && !integer_zerop (slen))
1666 warning_at (loc, OPT_Wstringop_truncation,
1667 "%G%qD destination unchanged after copying no bytes "
1668 "from a string of length %E",
1669 call, fndecl, slen);
1670 else
1671 warning_at (loc, OPT_Wstringop_truncation,
1672 "%G%qD destination unchanged after copying no bytes",
1673 call, fndecl);
1676 replace_call_with_value (gsi, dest);
1677 return true;
1680 /* We can't compare slen with len as constants below if len is not a
1681 constant. */
1682 if (TREE_CODE (len) != INTEGER_CST)
1683 return false;
1685 /* Now, we must be passed a constant src ptr parameter. */
1686 tree slen = get_maxval_strlen (src, 0);
1687 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1688 return false;
1690 /* The size of the source string including the terminating nul. */
1691 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1693 /* We do not support simplification of this case, though we do
1694 support it when expanding trees into RTL. */
1695 /* FIXME: generate a call to __builtin_memset. */
1696 if (tree_int_cst_lt (ssize, len))
1697 return false;
1699 /* Diagnose truncation that leaves the copy unterminated. */
1700 maybe_diag_stxncpy_trunc (*gsi, src, len);
1702 /* OK transform into builtin memcpy. */
1703 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1704 if (!fn)
1705 return false;
1707 len = fold_convert_loc (loc, size_type_node, len);
1708 len = force_gimple_operand_gsi (gsi, len, true,
1709 NULL_TREE, true, GSI_SAME_STMT);
1710 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1711 replace_call_with_call_and_fold (gsi, repl);
1713 return true;
1716 /* Fold function call to builtin strchr or strrchr.
1717 If both arguments are constant, evaluate and fold the result,
1718 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1719 In general strlen is significantly faster than strchr
1720 due to being a simpler operation. */
1721 static bool
1722 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1724 gimple *stmt = gsi_stmt (*gsi);
1725 tree str = gimple_call_arg (stmt, 0);
1726 tree c = gimple_call_arg (stmt, 1);
1727 location_t loc = gimple_location (stmt);
1728 const char *p;
1729 char ch;
1731 if (!gimple_call_lhs (stmt))
1732 return false;
1734 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1736 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1738 if (p1 == NULL)
1740 replace_call_with_value (gsi, integer_zero_node);
1741 return true;
1744 tree len = build_int_cst (size_type_node, p1 - p);
1745 gimple_seq stmts = NULL;
1746 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1747 POINTER_PLUS_EXPR, str, len);
1748 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1749 gsi_replace_with_seq_vops (gsi, stmts);
1750 return true;
1753 if (!integer_zerop (c))
1754 return false;
1756 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1757 if (is_strrchr && optimize_function_for_size_p (cfun))
1759 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1761 if (strchr_fn)
1763 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1764 replace_call_with_call_and_fold (gsi, repl);
1765 return true;
1768 return false;
1771 tree len;
1772 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1774 if (!strlen_fn)
1775 return false;
1777 /* Create newstr = strlen (str). */
1778 gimple_seq stmts = NULL;
1779 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1780 gimple_set_location (new_stmt, loc);
1781 len = create_tmp_reg_or_ssa_name (size_type_node);
1782 gimple_call_set_lhs (new_stmt, len);
1783 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1785 /* Create (str p+ strlen (str)). */
1786 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1787 POINTER_PLUS_EXPR, str, len);
1788 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1789 gsi_replace_with_seq_vops (gsi, stmts);
1790 /* gsi now points at the assignment to the lhs, get a
1791 stmt iterator to the strlen.
1792 ??? We can't use gsi_for_stmt as that doesn't work when the
1793 CFG isn't built yet. */
1794 gimple_stmt_iterator gsi2 = *gsi;
1795 gsi_prev (&gsi2);
1796 fold_stmt (&gsi2);
1797 return true;
1800 /* Fold function call to builtin strstr.
1801 If both arguments are constant, evaluate and fold the result,
1802 additionally fold strstr (x, "") into x and strstr (x, "c")
1803 into strchr (x, 'c'). */
1804 static bool
1805 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1807 gimple *stmt = gsi_stmt (*gsi);
1808 tree haystack = gimple_call_arg (stmt, 0);
1809 tree needle = gimple_call_arg (stmt, 1);
1810 const char *p, *q;
1812 if (!gimple_call_lhs (stmt))
1813 return false;
1815 q = c_getstr (needle);
1816 if (q == NULL)
1817 return false;
1819 if ((p = c_getstr (haystack)))
1821 const char *r = strstr (p, q);
1823 if (r == NULL)
1825 replace_call_with_value (gsi, integer_zero_node);
1826 return true;
1829 tree len = build_int_cst (size_type_node, r - p);
1830 gimple_seq stmts = NULL;
1831 gimple *new_stmt
1832 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1833 haystack, len);
1834 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1835 gsi_replace_with_seq_vops (gsi, stmts);
1836 return true;
1839 /* For strstr (x, "") return x. */
1840 if (q[0] == '\0')
1842 replace_call_with_value (gsi, haystack);
1843 return true;
1846 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1847 if (q[1] == '\0')
1849 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1850 if (strchr_fn)
1852 tree c = build_int_cst (integer_type_node, q[0]);
1853 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1854 replace_call_with_call_and_fold (gsi, repl);
1855 return true;
1859 return false;
1862 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1863 to the call.
1865 Return NULL_TREE if no simplification was possible, otherwise return the
1866 simplified form of the call as a tree.
1868 The simplified form may be a constant or other expression which
1869 computes the same value, but in a more efficient manner (including
1870 calls to other builtin functions).
1872 The call may contain arguments which need to be evaluated, but
1873 which are not useful to determine the result of the call. In
1874 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1875 COMPOUND_EXPR will be an argument which must be evaluated.
1876 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1877 COMPOUND_EXPR in the chain will contain the tree for the simplified
1878 form of the builtin function call. */
1880 static bool
1881 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1883 gimple *stmt = gsi_stmt (*gsi);
1884 location_t loc = gimple_location (stmt);
1886 const char *p = c_getstr (src);
1888 /* If the string length is zero, return the dst parameter. */
1889 if (p && *p == '\0')
1891 replace_call_with_value (gsi, dst);
1892 return true;
1895 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1896 return false;
1898 /* See if we can store by pieces into (dst + strlen(dst)). */
1899 tree newdst;
1900 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1901 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1903 if (!strlen_fn || !memcpy_fn)
1904 return false;
1906 /* If the length of the source string isn't computable don't
1907 split strcat into strlen and memcpy. */
1908 tree len = get_maxval_strlen (src, 0);
1909 if (! len)
1910 return false;
1912 /* Create strlen (dst). */
1913 gimple_seq stmts = NULL, stmts2;
1914 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1915 gimple_set_location (repl, loc);
1916 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1917 gimple_call_set_lhs (repl, newdst);
1918 gimple_seq_add_stmt_without_update (&stmts, repl);
1920 /* Create (dst p+ strlen (dst)). */
1921 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1922 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1923 gimple_seq_add_seq_without_update (&stmts, stmts2);
1925 len = fold_convert_loc (loc, size_type_node, len);
1926 len = size_binop_loc (loc, PLUS_EXPR, len,
1927 build_int_cst (size_type_node, 1));
1928 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1929 gimple_seq_add_seq_without_update (&stmts, stmts2);
1931 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1932 gimple_seq_add_stmt_without_update (&stmts, repl);
1933 if (gimple_call_lhs (stmt))
1935 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1936 gimple_seq_add_stmt_without_update (&stmts, repl);
1937 gsi_replace_with_seq_vops (gsi, stmts);
1938 /* gsi now points at the assignment to the lhs, get a
1939 stmt iterator to the memcpy call.
1940 ??? We can't use gsi_for_stmt as that doesn't work when the
1941 CFG isn't built yet. */
1942 gimple_stmt_iterator gsi2 = *gsi;
1943 gsi_prev (&gsi2);
1944 fold_stmt (&gsi2);
1946 else
1948 gsi_replace_with_seq_vops (gsi, stmts);
1949 fold_stmt (gsi);
1951 return true;
1954 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1955 are the arguments to the call. */
1957 static bool
1958 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1960 gimple *stmt = gsi_stmt (*gsi);
1961 tree dest = gimple_call_arg (stmt, 0);
1962 tree src = gimple_call_arg (stmt, 1);
1963 tree size = gimple_call_arg (stmt, 2);
1964 tree fn;
1965 const char *p;
1968 p = c_getstr (src);
1969 /* If the SRC parameter is "", return DEST. */
1970 if (p && *p == '\0')
1972 replace_call_with_value (gsi, dest);
1973 return true;
1976 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1977 return false;
1979 /* If __builtin_strcat_chk is used, assume strcat is available. */
1980 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1981 if (!fn)
1982 return false;
1984 gimple *repl = gimple_build_call (fn, 2, dest, src);
1985 replace_call_with_call_and_fold (gsi, repl);
1986 return true;
1989 /* Simplify a call to the strncat builtin. */
1991 static bool
1992 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1994 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1995 tree dst = gimple_call_arg (stmt, 0);
1996 tree src = gimple_call_arg (stmt, 1);
1997 tree len = gimple_call_arg (stmt, 2);
1999 const char *p = c_getstr (src);
2001 /* If the requested length is zero, or the src parameter string
2002 length is zero, return the dst parameter. */
2003 if (integer_zerop (len) || (p && *p == '\0'))
2005 replace_call_with_value (gsi, dst);
2006 return true;
2009 if (TREE_CODE (len) != INTEGER_CST || !p)
2010 return false;
2012 unsigned srclen = strlen (p);
2014 int cmpsrc = compare_tree_int (len, srclen);
2016 /* Return early if the requested len is less than the string length.
2017 Warnings will be issued elsewhere later. */
2018 if (cmpsrc < 0)
2019 return false;
2021 unsigned HOST_WIDE_INT dstsize;
2023 bool nowarn = gimple_no_warning_p (stmt);
2025 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2027 int cmpdst = compare_tree_int (len, dstsize);
2029 if (cmpdst >= 0)
2031 tree fndecl = gimple_call_fndecl (stmt);
2033 /* Strncat copies (at most) LEN bytes and always appends
2034 the terminating NUL so the specified bound should never
2035 be equal to (or greater than) the size of the destination.
2036 If it is, the copy could overflow. */
2037 location_t loc = gimple_location (stmt);
2038 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2039 cmpdst == 0
2040 ? G_("%G%qD specified bound %E equals "
2041 "destination size")
2042 : G_("%G%qD specified bound %E exceeds "
2043 "destination size %wu"),
2044 stmt, fndecl, len, dstsize);
2045 if (nowarn)
2046 gimple_set_no_warning (stmt, true);
2050 if (!nowarn && cmpsrc == 0)
2052 tree fndecl = gimple_call_fndecl (stmt);
2053 location_t loc = gimple_location (stmt);
2055 /* To avoid possible overflow the specified bound should also
2056 not be equal to the length of the source, even when the size
2057 of the destination is unknown (it's not an uncommon mistake
2058 to specify as the bound to strncpy the length of the source). */
2059 if (warning_at (loc, OPT_Wstringop_overflow_,
2060 "%G%qD specified bound %E equals source length",
2061 stmt, fndecl, len))
2062 gimple_set_no_warning (stmt, true);
2065 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2067 /* If the replacement _DECL isn't initialized, don't do the
2068 transformation. */
2069 if (!fn)
2070 return false;
2072 /* Otherwise, emit a call to strcat. */
2073 gcall *repl = gimple_build_call (fn, 2, dst, src);
2074 replace_call_with_call_and_fold (gsi, repl);
2075 return true;
2078 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2079 LEN, and SIZE. */
2081 static bool
2082 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2084 gimple *stmt = gsi_stmt (*gsi);
2085 tree dest = gimple_call_arg (stmt, 0);
2086 tree src = gimple_call_arg (stmt, 1);
2087 tree len = gimple_call_arg (stmt, 2);
2088 tree size = gimple_call_arg (stmt, 3);
2089 tree fn;
2090 const char *p;
2092 p = c_getstr (src);
2093 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2094 if ((p && *p == '\0')
2095 || integer_zerop (len))
2097 replace_call_with_value (gsi, dest);
2098 return true;
2101 if (! tree_fits_uhwi_p (size))
2102 return false;
2104 if (! integer_all_onesp (size))
2106 tree src_len = c_strlen (src, 1);
2107 if (src_len
2108 && tree_fits_uhwi_p (src_len)
2109 && tree_fits_uhwi_p (len)
2110 && ! tree_int_cst_lt (len, src_len))
2112 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2113 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2114 if (!fn)
2115 return false;
2117 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2118 replace_call_with_call_and_fold (gsi, repl);
2119 return true;
2121 return false;
2124 /* If __builtin_strncat_chk is used, assume strncat is available. */
2125 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2126 if (!fn)
2127 return false;
2129 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2130 replace_call_with_call_and_fold (gsi, repl);
2131 return true;
2134 /* Build and append gimple statements to STMTS that would load a first
2135 character of a memory location identified by STR. LOC is location
2136 of the statement. */
2138 static tree
2139 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2141 tree var;
2143 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2144 tree cst_uchar_ptr_node
2145 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2146 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2148 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2149 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2150 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2152 gimple_assign_set_lhs (stmt, var);
2153 gimple_seq_add_stmt_without_update (stmts, stmt);
2155 return var;
2158 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2159 FCODE is the name of the builtin. */
2161 static bool
2162 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2164 gimple *stmt = gsi_stmt (*gsi);
2165 tree callee = gimple_call_fndecl (stmt);
2166 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2168 tree type = integer_type_node;
2169 tree str1 = gimple_call_arg (stmt, 0);
2170 tree str2 = gimple_call_arg (stmt, 1);
2171 tree lhs = gimple_call_lhs (stmt);
2172 HOST_WIDE_INT length = -1;
2174 /* Handle strncmp and strncasecmp functions. */
2175 if (gimple_call_num_args (stmt) == 3)
2177 tree len = gimple_call_arg (stmt, 2);
2178 if (tree_fits_uhwi_p (len))
2179 length = tree_to_uhwi (len);
2182 /* If the LEN parameter is zero, return zero. */
2183 if (length == 0)
2185 replace_call_with_value (gsi, integer_zero_node);
2186 return true;
2189 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2190 if (operand_equal_p (str1, str2, 0))
2192 replace_call_with_value (gsi, integer_zero_node);
2193 return true;
2196 const char *p1 = c_getstr (str1);
2197 const char *p2 = c_getstr (str2);
2199 /* For known strings, return an immediate value. */
2200 if (p1 && p2)
2202 int r = 0;
2203 bool known_result = false;
2205 switch (fcode)
2207 case BUILT_IN_STRCMP:
2208 case BUILT_IN_STRCMP_EQ:
2210 r = strcmp (p1, p2);
2211 known_result = true;
2212 break;
2214 case BUILT_IN_STRNCMP:
2215 case BUILT_IN_STRNCMP_EQ:
2217 if (length == -1)
2218 break;
2219 r = strncmp (p1, p2, length);
2220 known_result = true;
2221 break;
2223 /* Only handleable situation is where the string are equal (result 0),
2224 which is already handled by operand_equal_p case. */
2225 case BUILT_IN_STRCASECMP:
2226 break;
2227 case BUILT_IN_STRNCASECMP:
2229 if (length == -1)
2230 break;
2231 r = strncmp (p1, p2, length);
2232 if (r == 0)
2233 known_result = true;
2234 break;
2236 default:
2237 gcc_unreachable ();
2240 if (known_result)
2242 replace_call_with_value (gsi, build_cmp_result (type, r));
2243 return true;
2247 bool nonzero_length = length >= 1
2248 || fcode == BUILT_IN_STRCMP
2249 || fcode == BUILT_IN_STRCMP_EQ
2250 || fcode == BUILT_IN_STRCASECMP;
2252 location_t loc = gimple_location (stmt);
2254 /* If the second arg is "", return *(const unsigned char*)arg1. */
2255 if (p2 && *p2 == '\0' && nonzero_length)
2257 gimple_seq stmts = NULL;
2258 tree var = gimple_load_first_char (loc, str1, &stmts);
2259 if (lhs)
2261 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2262 gimple_seq_add_stmt_without_update (&stmts, stmt);
2265 gsi_replace_with_seq_vops (gsi, stmts);
2266 return true;
2269 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2270 if (p1 && *p1 == '\0' && nonzero_length)
2272 gimple_seq stmts = NULL;
2273 tree var = gimple_load_first_char (loc, str2, &stmts);
2275 if (lhs)
2277 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2278 stmt = gimple_build_assign (c, NOP_EXPR, var);
2279 gimple_seq_add_stmt_without_update (&stmts, stmt);
2281 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2282 gimple_seq_add_stmt_without_update (&stmts, stmt);
2285 gsi_replace_with_seq_vops (gsi, stmts);
2286 return true;
2289 /* If len parameter is one, return an expression corresponding to
2290 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2291 if (fcode == BUILT_IN_STRNCMP && length == 1)
2293 gimple_seq stmts = NULL;
2294 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2295 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2297 if (lhs)
2299 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2300 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2301 gimple_seq_add_stmt_without_update (&stmts, convert1);
2303 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2304 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2305 gimple_seq_add_stmt_without_update (&stmts, convert2);
2307 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2308 gimple_seq_add_stmt_without_update (&stmts, stmt);
2311 gsi_replace_with_seq_vops (gsi, stmts);
2312 return true;
2315 /* If length is larger than the length of one constant string,
2316 replace strncmp with corresponding strcmp */
2317 if (fcode == BUILT_IN_STRNCMP
2318 && length > 0
2319 && ((p2 && (size_t) length > strlen (p2))
2320 || (p1 && (size_t) length > strlen (p1))))
2322 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2323 if (!fn)
2324 return false;
2325 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2326 replace_call_with_call_and_fold (gsi, repl);
2327 return true;
2330 return false;
2333 /* Fold a call to the memchr pointed by GSI iterator. */
2335 static bool
2336 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2338 gimple *stmt = gsi_stmt (*gsi);
2339 tree lhs = gimple_call_lhs (stmt);
2340 tree arg1 = gimple_call_arg (stmt, 0);
2341 tree arg2 = gimple_call_arg (stmt, 1);
2342 tree len = gimple_call_arg (stmt, 2);
2344 /* If the LEN parameter is zero, return zero. */
2345 if (integer_zerop (len))
2347 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2348 return true;
2351 char c;
2352 if (TREE_CODE (arg2) != INTEGER_CST
2353 || !tree_fits_uhwi_p (len)
2354 || !target_char_cst_p (arg2, &c))
2355 return false;
2357 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2358 unsigned HOST_WIDE_INT string_length;
2359 const char *p1 = c_getstr (arg1, &string_length);
2361 if (p1)
2363 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2364 if (r == NULL)
2366 if (length <= string_length)
2368 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2369 return true;
2372 else
2374 unsigned HOST_WIDE_INT offset = r - p1;
2375 gimple_seq stmts = NULL;
2376 if (lhs != NULL_TREE)
2378 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2379 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2380 arg1, offset_cst);
2381 gimple_seq_add_stmt_without_update (&stmts, stmt);
2383 else
2384 gimple_seq_add_stmt_without_update (&stmts,
2385 gimple_build_nop ());
2387 gsi_replace_with_seq_vops (gsi, stmts);
2388 return true;
2392 return false;
2395 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2396 to the call. IGNORE is true if the value returned
2397 by the builtin will be ignored. UNLOCKED is true is true if this
2398 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2399 the known length of the string. Return NULL_TREE if no simplification
2400 was possible. */
2402 static bool
2403 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2404 tree arg0, tree arg1,
2405 bool unlocked)
2407 gimple *stmt = gsi_stmt (*gsi);
2409 /* If we're using an unlocked function, assume the other unlocked
2410 functions exist explicitly. */
2411 tree const fn_fputc = (unlocked
2412 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2413 : builtin_decl_implicit (BUILT_IN_FPUTC));
2414 tree const fn_fwrite = (unlocked
2415 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2416 : builtin_decl_implicit (BUILT_IN_FWRITE));
2418 /* If the return value is used, don't do the transformation. */
2419 if (gimple_call_lhs (stmt))
2420 return false;
2422 /* Get the length of the string passed to fputs. If the length
2423 can't be determined, punt. */
2424 tree len = get_maxval_strlen (arg0, 0);
2425 if (!len
2426 || TREE_CODE (len) != INTEGER_CST)
2427 return false;
2429 switch (compare_tree_int (len, 1))
2431 case -1: /* length is 0, delete the call entirely . */
2432 replace_call_with_value (gsi, integer_zero_node);
2433 return true;
2435 case 0: /* length is 1, call fputc. */
2437 const char *p = c_getstr (arg0);
2438 if (p != NULL)
2440 if (!fn_fputc)
2441 return false;
2443 gimple *repl = gimple_build_call (fn_fputc, 2,
2444 build_int_cst
2445 (integer_type_node, p[0]), arg1);
2446 replace_call_with_call_and_fold (gsi, repl);
2447 return true;
2450 /* FALLTHROUGH */
2451 case 1: /* length is greater than 1, call fwrite. */
2453 /* If optimizing for size keep fputs. */
2454 if (optimize_function_for_size_p (cfun))
2455 return false;
2456 /* New argument list transforming fputs(string, stream) to
2457 fwrite(string, 1, len, stream). */
2458 if (!fn_fwrite)
2459 return false;
2461 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2462 size_one_node, len, arg1);
2463 replace_call_with_call_and_fold (gsi, repl);
2464 return true;
2466 default:
2467 gcc_unreachable ();
2469 return false;
2472 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2473 DEST, SRC, LEN, and SIZE are the arguments to the call.
2474 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2475 code of the builtin. If MAXLEN is not NULL, it is maximum length
2476 passed as third argument. */
2478 static bool
2479 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2480 tree dest, tree src, tree len, tree size,
2481 enum built_in_function fcode)
2483 gimple *stmt = gsi_stmt (*gsi);
2484 location_t loc = gimple_location (stmt);
2485 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2486 tree fn;
2488 /* If SRC and DEST are the same (and not volatile), return DEST
2489 (resp. DEST+LEN for __mempcpy_chk). */
2490 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2492 if (fcode != BUILT_IN_MEMPCPY_CHK)
2494 replace_call_with_value (gsi, dest);
2495 return true;
2497 else
2499 gimple_seq stmts = NULL;
2500 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2501 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2502 TREE_TYPE (dest), dest, len);
2503 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2504 replace_call_with_value (gsi, temp);
2505 return true;
2509 if (! tree_fits_uhwi_p (size))
2510 return false;
2512 tree maxlen = get_maxval_strlen (len, 2);
2513 if (! integer_all_onesp (size))
2515 if (! tree_fits_uhwi_p (len))
2517 /* If LEN is not constant, try MAXLEN too.
2518 For MAXLEN only allow optimizing into non-_ocs function
2519 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2520 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2522 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2524 /* (void) __mempcpy_chk () can be optimized into
2525 (void) __memcpy_chk (). */
2526 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2527 if (!fn)
2528 return false;
2530 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2531 replace_call_with_call_and_fold (gsi, repl);
2532 return true;
2534 return false;
2537 else
2538 maxlen = len;
2540 if (tree_int_cst_lt (size, maxlen))
2541 return false;
2544 fn = NULL_TREE;
2545 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2546 mem{cpy,pcpy,move,set} is available. */
2547 switch (fcode)
2549 case BUILT_IN_MEMCPY_CHK:
2550 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2551 break;
2552 case BUILT_IN_MEMPCPY_CHK:
2553 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2554 break;
2555 case BUILT_IN_MEMMOVE_CHK:
2556 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2557 break;
2558 case BUILT_IN_MEMSET_CHK:
2559 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2560 break;
2561 default:
2562 break;
2565 if (!fn)
2566 return false;
2568 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2569 replace_call_with_call_and_fold (gsi, repl);
2570 return true;
2573 /* Fold a call to the __st[rp]cpy_chk builtin.
2574 DEST, SRC, and SIZE are the arguments to the call.
2575 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2576 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2577 strings passed as second argument. */
2579 static bool
2580 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2581 tree dest,
2582 tree src, tree size,
2583 enum built_in_function fcode)
2585 gimple *stmt = gsi_stmt (*gsi);
2586 location_t loc = gimple_location (stmt);
2587 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2588 tree len, fn;
2590 /* If SRC and DEST are the same (and not volatile), return DEST. */
2591 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2593 /* Issue -Wrestrict unless the pointers are null (those do
2594 not point to objects and so do not indicate an overlap;
2595 such calls could be the result of sanitization and jump
2596 threading). */
2597 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2599 tree func = gimple_call_fndecl (stmt);
2601 warning_at (loc, OPT_Wrestrict,
2602 "%qD source argument is the same as destination",
2603 func);
2606 replace_call_with_value (gsi, dest);
2607 return true;
2610 if (! tree_fits_uhwi_p (size))
2611 return false;
2613 tree maxlen = get_maxval_strlen (src, 1);
2614 if (! integer_all_onesp (size))
2616 len = c_strlen (src, 1);
2617 if (! len || ! tree_fits_uhwi_p (len))
2619 /* If LEN is not constant, try MAXLEN too.
2620 For MAXLEN only allow optimizing into non-_ocs function
2621 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2622 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2624 if (fcode == BUILT_IN_STPCPY_CHK)
2626 if (! ignore)
2627 return false;
2629 /* If return value of __stpcpy_chk is ignored,
2630 optimize into __strcpy_chk. */
2631 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2632 if (!fn)
2633 return false;
2635 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2636 replace_call_with_call_and_fold (gsi, repl);
2637 return true;
2640 if (! len || TREE_SIDE_EFFECTS (len))
2641 return false;
2643 /* If c_strlen returned something, but not a constant,
2644 transform __strcpy_chk into __memcpy_chk. */
2645 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2646 if (!fn)
2647 return false;
2649 gimple_seq stmts = NULL;
2650 len = gimple_convert (&stmts, loc, size_type_node, len);
2651 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2652 build_int_cst (size_type_node, 1));
2653 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2654 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2655 replace_call_with_call_and_fold (gsi, repl);
2656 return true;
2659 else
2660 maxlen = len;
2662 if (! tree_int_cst_lt (maxlen, size))
2663 return false;
2666 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2667 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2668 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2669 if (!fn)
2670 return false;
2672 gimple *repl = gimple_build_call (fn, 2, dest, src);
2673 replace_call_with_call_and_fold (gsi, repl);
2674 return true;
2677 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2678 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2679 length passed as third argument. IGNORE is true if return value can be
2680 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2682 static bool
2683 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2684 tree dest, tree src,
2685 tree len, tree size,
2686 enum built_in_function fcode)
2688 gimple *stmt = gsi_stmt (*gsi);
2689 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2690 tree fn;
2692 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2694 /* If return value of __stpncpy_chk is ignored,
2695 optimize into __strncpy_chk. */
2696 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2697 if (fn)
2699 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2700 replace_call_with_call_and_fold (gsi, repl);
2701 return true;
2705 if (! tree_fits_uhwi_p (size))
2706 return false;
2708 tree maxlen = get_maxval_strlen (len, 2);
2709 if (! integer_all_onesp (size))
2711 if (! tree_fits_uhwi_p (len))
2713 /* If LEN is not constant, try MAXLEN too.
2714 For MAXLEN only allow optimizing into non-_ocs function
2715 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2716 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2717 return false;
2719 else
2720 maxlen = len;
2722 if (tree_int_cst_lt (size, maxlen))
2723 return false;
2726 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2727 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2728 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2729 if (!fn)
2730 return false;
2732 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2733 replace_call_with_call_and_fold (gsi, repl);
2734 return true;
2737 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2738 Return NULL_TREE if no simplification can be made. */
2740 static bool
2741 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2743 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2744 location_t loc = gimple_location (stmt);
2745 tree dest = gimple_call_arg (stmt, 0);
2746 tree src = gimple_call_arg (stmt, 1);
2747 tree fn, len, lenp1;
2749 /* If the result is unused, replace stpcpy with strcpy. */
2750 if (gimple_call_lhs (stmt) == NULL_TREE)
2752 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2753 if (!fn)
2754 return false;
2755 gimple_call_set_fndecl (stmt, fn);
2756 fold_stmt (gsi);
2757 return true;
2760 len = c_strlen (src, 1);
2761 if (!len
2762 || TREE_CODE (len) != INTEGER_CST)
2763 return false;
2765 if (optimize_function_for_size_p (cfun)
2766 /* If length is zero it's small enough. */
2767 && !integer_zerop (len))
2768 return false;
2770 /* If the source has a known length replace stpcpy with memcpy. */
2771 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2772 if (!fn)
2773 return false;
2775 gimple_seq stmts = NULL;
2776 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2777 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2778 tem, build_int_cst (size_type_node, 1));
2779 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2780 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2781 gimple_set_vuse (repl, gimple_vuse (stmt));
2782 gimple_set_vdef (repl, gimple_vdef (stmt));
2783 if (gimple_vdef (repl)
2784 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2785 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2786 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2787 /* Replace the result with dest + len. */
2788 stmts = NULL;
2789 tem = gimple_convert (&stmts, loc, sizetype, len);
2790 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2791 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2792 POINTER_PLUS_EXPR, dest, tem);
2793 gsi_replace (gsi, ret, false);
2794 /* Finally fold the memcpy call. */
2795 gimple_stmt_iterator gsi2 = *gsi;
2796 gsi_prev (&gsi2);
2797 fold_stmt (&gsi2);
2798 return true;
2801 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2802 NULL_TREE if a normal call should be emitted rather than expanding
2803 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2804 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2805 passed as second argument. */
2807 static bool
2808 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2809 enum built_in_function fcode)
2811 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2812 tree dest, size, len, fn, fmt, flag;
2813 const char *fmt_str;
2815 /* Verify the required arguments in the original call. */
2816 if (gimple_call_num_args (stmt) < 5)
2817 return false;
2819 dest = gimple_call_arg (stmt, 0);
2820 len = gimple_call_arg (stmt, 1);
2821 flag = gimple_call_arg (stmt, 2);
2822 size = gimple_call_arg (stmt, 3);
2823 fmt = gimple_call_arg (stmt, 4);
2825 if (! tree_fits_uhwi_p (size))
2826 return false;
2828 if (! integer_all_onesp (size))
2830 tree maxlen = get_maxval_strlen (len, 2);
2831 if (! tree_fits_uhwi_p (len))
2833 /* If LEN is not constant, try MAXLEN too.
2834 For MAXLEN only allow optimizing into non-_ocs function
2835 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2836 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2837 return false;
2839 else
2840 maxlen = len;
2842 if (tree_int_cst_lt (size, maxlen))
2843 return false;
2846 if (!init_target_chars ())
2847 return false;
2849 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2850 or if format doesn't contain % chars or is "%s". */
2851 if (! integer_zerop (flag))
2853 fmt_str = c_getstr (fmt);
2854 if (fmt_str == NULL)
2855 return false;
2856 if (strchr (fmt_str, target_percent) != NULL
2857 && strcmp (fmt_str, target_percent_s))
2858 return false;
2861 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2862 available. */
2863 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2864 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2865 if (!fn)
2866 return false;
2868 /* Replace the called function and the first 5 argument by 3 retaining
2869 trailing varargs. */
2870 gimple_call_set_fndecl (stmt, fn);
2871 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2872 gimple_call_set_arg (stmt, 0, dest);
2873 gimple_call_set_arg (stmt, 1, len);
2874 gimple_call_set_arg (stmt, 2, fmt);
2875 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2876 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2877 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2878 fold_stmt (gsi);
2879 return true;
2882 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2883 Return NULL_TREE if a normal call should be emitted rather than
2884 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2885 or BUILT_IN_VSPRINTF_CHK. */
2887 static bool
2888 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2889 enum built_in_function fcode)
2891 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2892 tree dest, size, len, fn, fmt, flag;
2893 const char *fmt_str;
2894 unsigned nargs = gimple_call_num_args (stmt);
2896 /* Verify the required arguments in the original call. */
2897 if (nargs < 4)
2898 return false;
2899 dest = gimple_call_arg (stmt, 0);
2900 flag = gimple_call_arg (stmt, 1);
2901 size = gimple_call_arg (stmt, 2);
2902 fmt = gimple_call_arg (stmt, 3);
2904 if (! tree_fits_uhwi_p (size))
2905 return false;
2907 len = NULL_TREE;
2909 if (!init_target_chars ())
2910 return false;
2912 /* Check whether the format is a literal string constant. */
2913 fmt_str = c_getstr (fmt);
2914 if (fmt_str != NULL)
2916 /* If the format doesn't contain % args or %%, we know the size. */
2917 if (strchr (fmt_str, target_percent) == 0)
2919 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2920 len = build_int_cstu (size_type_node, strlen (fmt_str));
2922 /* If the format is "%s" and first ... argument is a string literal,
2923 we know the size too. */
2924 else if (fcode == BUILT_IN_SPRINTF_CHK
2925 && strcmp (fmt_str, target_percent_s) == 0)
2927 tree arg;
2929 if (nargs == 5)
2931 arg = gimple_call_arg (stmt, 4);
2932 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2934 len = c_strlen (arg, 1);
2935 if (! len || ! tree_fits_uhwi_p (len))
2936 len = NULL_TREE;
2942 if (! integer_all_onesp (size))
2944 if (! len || ! tree_int_cst_lt (len, size))
2945 return false;
2948 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2949 or if format doesn't contain % chars or is "%s". */
2950 if (! integer_zerop (flag))
2952 if (fmt_str == NULL)
2953 return false;
2954 if (strchr (fmt_str, target_percent) != NULL
2955 && strcmp (fmt_str, target_percent_s))
2956 return false;
2959 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2960 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2961 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2962 if (!fn)
2963 return false;
2965 /* Replace the called function and the first 4 argument by 2 retaining
2966 trailing varargs. */
2967 gimple_call_set_fndecl (stmt, fn);
2968 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2969 gimple_call_set_arg (stmt, 0, dest);
2970 gimple_call_set_arg (stmt, 1, fmt);
2971 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2972 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2973 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2974 fold_stmt (gsi);
2975 return true;
2978 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2979 ORIG may be null if this is a 2-argument call. We don't attempt to
2980 simplify calls with more than 3 arguments.
2982 Return true if simplification was possible, otherwise false. */
2984 bool
2985 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
2987 gimple *stmt = gsi_stmt (*gsi);
2988 tree dest = gimple_call_arg (stmt, 0);
2989 tree fmt = gimple_call_arg (stmt, 1);
2990 tree orig = NULL_TREE;
2991 const char *fmt_str = NULL;
2993 /* Verify the required arguments in the original call. We deal with two
2994 types of sprintf() calls: 'sprintf (str, fmt)' and
2995 'sprintf (dest, "%s", orig)'. */
2996 if (gimple_call_num_args (stmt) > 3)
2997 return false;
2999 if (gimple_call_num_args (stmt) == 3)
3000 orig = gimple_call_arg (stmt, 2);
3002 /* Check whether the format is a literal string constant. */
3003 fmt_str = c_getstr (fmt);
3004 if (fmt_str == NULL)
3005 return false;
3007 if (!init_target_chars ())
3008 return false;
3010 /* If the format doesn't contain % args or %%, use strcpy. */
3011 if (strchr (fmt_str, target_percent) == NULL)
3013 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3015 if (!fn)
3016 return false;
3018 /* Don't optimize sprintf (buf, "abc", ptr++). */
3019 if (orig)
3020 return false;
3022 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3023 'format' is known to contain no % formats. */
3024 gimple_seq stmts = NULL;
3025 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3026 gimple_seq_add_stmt_without_update (&stmts, repl);
3027 if (gimple_call_lhs (stmt))
3029 repl = gimple_build_assign (gimple_call_lhs (stmt),
3030 build_int_cst (integer_type_node,
3031 strlen (fmt_str)));
3032 gimple_seq_add_stmt_without_update (&stmts, repl);
3033 gsi_replace_with_seq_vops (gsi, stmts);
3034 /* gsi now points at the assignment to the lhs, get a
3035 stmt iterator to the memcpy call.
3036 ??? We can't use gsi_for_stmt as that doesn't work when the
3037 CFG isn't built yet. */
3038 gimple_stmt_iterator gsi2 = *gsi;
3039 gsi_prev (&gsi2);
3040 fold_stmt (&gsi2);
3042 else
3044 gsi_replace_with_seq_vops (gsi, stmts);
3045 fold_stmt (gsi);
3047 return true;
3050 /* If the format is "%s", use strcpy if the result isn't used. */
3051 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3053 tree fn;
3054 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3056 if (!fn)
3057 return false;
3059 /* Don't crash on sprintf (str1, "%s"). */
3060 if (!orig)
3061 return false;
3063 tree orig_len = NULL_TREE;
3064 if (gimple_call_lhs (stmt))
3066 orig_len = get_maxval_strlen (orig, 0);
3067 if (!orig_len)
3068 return false;
3071 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3072 gimple_seq stmts = NULL;
3073 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3074 gimple_seq_add_stmt_without_update (&stmts, repl);
3075 if (gimple_call_lhs (stmt))
3077 if (!useless_type_conversion_p (integer_type_node,
3078 TREE_TYPE (orig_len)))
3079 orig_len = fold_convert (integer_type_node, orig_len);
3080 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3081 gimple_seq_add_stmt_without_update (&stmts, repl);
3082 gsi_replace_with_seq_vops (gsi, stmts);
3083 /* gsi now points at the assignment to the lhs, get a
3084 stmt iterator to the memcpy call.
3085 ??? We can't use gsi_for_stmt as that doesn't work when the
3086 CFG isn't built yet. */
3087 gimple_stmt_iterator gsi2 = *gsi;
3088 gsi_prev (&gsi2);
3089 fold_stmt (&gsi2);
3091 else
3093 gsi_replace_with_seq_vops (gsi, stmts);
3094 fold_stmt (gsi);
3096 return true;
3098 return false;
3101 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3102 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3103 attempt to simplify calls with more than 4 arguments.
3105 Return true if simplification was possible, otherwise false. */
3107 bool
3108 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3110 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3111 tree dest = gimple_call_arg (stmt, 0);
3112 tree destsize = gimple_call_arg (stmt, 1);
3113 tree fmt = gimple_call_arg (stmt, 2);
3114 tree orig = NULL_TREE;
3115 const char *fmt_str = NULL;
3117 if (gimple_call_num_args (stmt) > 4)
3118 return false;
3120 if (gimple_call_num_args (stmt) == 4)
3121 orig = gimple_call_arg (stmt, 3);
3123 if (!tree_fits_uhwi_p (destsize))
3124 return false;
3125 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3127 /* Check whether the format is a literal string constant. */
3128 fmt_str = c_getstr (fmt);
3129 if (fmt_str == NULL)
3130 return false;
3132 if (!init_target_chars ())
3133 return false;
3135 /* If the format doesn't contain % args or %%, use strcpy. */
3136 if (strchr (fmt_str, target_percent) == NULL)
3138 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3139 if (!fn)
3140 return false;
3142 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3143 if (orig)
3144 return false;
3146 /* We could expand this as
3147 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3148 or to
3149 memcpy (str, fmt_with_nul_at_cstm1, cst);
3150 but in the former case that might increase code size
3151 and in the latter case grow .rodata section too much.
3152 So punt for now. */
3153 size_t len = strlen (fmt_str);
3154 if (len >= destlen)
3155 return false;
3157 gimple_seq stmts = NULL;
3158 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3159 gimple_seq_add_stmt_without_update (&stmts, repl);
3160 if (gimple_call_lhs (stmt))
3162 repl = gimple_build_assign (gimple_call_lhs (stmt),
3163 build_int_cst (integer_type_node, len));
3164 gimple_seq_add_stmt_without_update (&stmts, repl);
3165 gsi_replace_with_seq_vops (gsi, stmts);
3166 /* gsi now points at the assignment to the lhs, get a
3167 stmt iterator to the memcpy call.
3168 ??? We can't use gsi_for_stmt as that doesn't work when the
3169 CFG isn't built yet. */
3170 gimple_stmt_iterator gsi2 = *gsi;
3171 gsi_prev (&gsi2);
3172 fold_stmt (&gsi2);
3174 else
3176 gsi_replace_with_seq_vops (gsi, stmts);
3177 fold_stmt (gsi);
3179 return true;
3182 /* If the format is "%s", use strcpy if the result isn't used. */
3183 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3185 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3186 if (!fn)
3187 return false;
3189 /* Don't crash on snprintf (str1, cst, "%s"). */
3190 if (!orig)
3191 return false;
3193 tree orig_len = get_maxval_strlen (orig, 0);
3194 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3195 return false;
3197 /* We could expand this as
3198 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3199 or to
3200 memcpy (str1, str2_with_nul_at_cstm1, cst);
3201 but in the former case that might increase code size
3202 and in the latter case grow .rodata section too much.
3203 So punt for now. */
3204 if (compare_tree_int (orig_len, destlen) >= 0)
3205 return false;
3207 /* Convert snprintf (str1, cst, "%s", str2) into
3208 strcpy (str1, str2) if strlen (str2) < cst. */
3209 gimple_seq stmts = NULL;
3210 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3211 gimple_seq_add_stmt_without_update (&stmts, repl);
3212 if (gimple_call_lhs (stmt))
3214 if (!useless_type_conversion_p (integer_type_node,
3215 TREE_TYPE (orig_len)))
3216 orig_len = fold_convert (integer_type_node, orig_len);
3217 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3218 gimple_seq_add_stmt_without_update (&stmts, repl);
3219 gsi_replace_with_seq_vops (gsi, stmts);
3220 /* gsi now points at the assignment to the lhs, get a
3221 stmt iterator to the memcpy call.
3222 ??? We can't use gsi_for_stmt as that doesn't work when the
3223 CFG isn't built yet. */
3224 gimple_stmt_iterator gsi2 = *gsi;
3225 gsi_prev (&gsi2);
3226 fold_stmt (&gsi2);
3228 else
3230 gsi_replace_with_seq_vops (gsi, stmts);
3231 fold_stmt (gsi);
3233 return true;
3235 return false;
3238 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3239 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3240 more than 3 arguments, and ARG may be null in the 2-argument case.
3242 Return NULL_TREE if no simplification was possible, otherwise return the
3243 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3244 code of the function to be simplified. */
3246 static bool
3247 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3248 tree fp, tree fmt, tree arg,
3249 enum built_in_function fcode)
3251 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3252 tree fn_fputc, fn_fputs;
3253 const char *fmt_str = NULL;
3255 /* If the return value is used, don't do the transformation. */
3256 if (gimple_call_lhs (stmt) != NULL_TREE)
3257 return false;
3259 /* Check whether the format is a literal string constant. */
3260 fmt_str = c_getstr (fmt);
3261 if (fmt_str == NULL)
3262 return false;
3264 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3266 /* If we're using an unlocked function, assume the other
3267 unlocked functions exist explicitly. */
3268 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3269 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3271 else
3273 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3274 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3277 if (!init_target_chars ())
3278 return false;
3280 /* If the format doesn't contain % args or %%, use strcpy. */
3281 if (strchr (fmt_str, target_percent) == NULL)
3283 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3284 && arg)
3285 return false;
3287 /* If the format specifier was "", fprintf does nothing. */
3288 if (fmt_str[0] == '\0')
3290 replace_call_with_value (gsi, NULL_TREE);
3291 return true;
3294 /* When "string" doesn't contain %, replace all cases of
3295 fprintf (fp, string) with fputs (string, fp). The fputs
3296 builtin will take care of special cases like length == 1. */
3297 if (fn_fputs)
3299 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3300 replace_call_with_call_and_fold (gsi, repl);
3301 return true;
3305 /* The other optimizations can be done only on the non-va_list variants. */
3306 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3307 return false;
3309 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3310 else if (strcmp (fmt_str, target_percent_s) == 0)
3312 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3313 return false;
3314 if (fn_fputs)
3316 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3317 replace_call_with_call_and_fold (gsi, repl);
3318 return true;
3322 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3323 else if (strcmp (fmt_str, target_percent_c) == 0)
3325 if (!arg
3326 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3327 return false;
3328 if (fn_fputc)
3330 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3331 replace_call_with_call_and_fold (gsi, repl);
3332 return true;
3336 return false;
3339 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3340 FMT and ARG are the arguments to the call; we don't fold cases with
3341 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3343 Return NULL_TREE if no simplification was possible, otherwise return the
3344 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3345 code of the function to be simplified. */
3347 static bool
3348 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3349 tree arg, enum built_in_function fcode)
3351 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3352 tree fn_putchar, fn_puts, newarg;
3353 const char *fmt_str = NULL;
3355 /* If the return value is used, don't do the transformation. */
3356 if (gimple_call_lhs (stmt) != NULL_TREE)
3357 return false;
3359 /* Check whether the format is a literal string constant. */
3360 fmt_str = c_getstr (fmt);
3361 if (fmt_str == NULL)
3362 return false;
3364 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3366 /* If we're using an unlocked function, assume the other
3367 unlocked functions exist explicitly. */
3368 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3369 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3371 else
3373 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3374 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3377 if (!init_target_chars ())
3378 return false;
3380 if (strcmp (fmt_str, target_percent_s) == 0
3381 || strchr (fmt_str, target_percent) == NULL)
3383 const char *str;
3385 if (strcmp (fmt_str, target_percent_s) == 0)
3387 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3388 return false;
3390 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3391 return false;
3393 str = c_getstr (arg);
3394 if (str == NULL)
3395 return false;
3397 else
3399 /* The format specifier doesn't contain any '%' characters. */
3400 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3401 && arg)
3402 return false;
3403 str = fmt_str;
3406 /* If the string was "", printf does nothing. */
3407 if (str[0] == '\0')
3409 replace_call_with_value (gsi, NULL_TREE);
3410 return true;
3413 /* If the string has length of 1, call putchar. */
3414 if (str[1] == '\0')
3416 /* Given printf("c"), (where c is any one character,)
3417 convert "c"[0] to an int and pass that to the replacement
3418 function. */
3419 newarg = build_int_cst (integer_type_node, str[0]);
3420 if (fn_putchar)
3422 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3423 replace_call_with_call_and_fold (gsi, repl);
3424 return true;
3427 else
3429 /* If the string was "string\n", call puts("string"). */
3430 size_t len = strlen (str);
3431 if ((unsigned char)str[len - 1] == target_newline
3432 && (size_t) (int) len == len
3433 && (int) len > 0)
3435 char *newstr;
3436 tree offset_node, string_cst;
3438 /* Create a NUL-terminated string that's one char shorter
3439 than the original, stripping off the trailing '\n'. */
3440 newarg = build_string_literal (len, str);
3441 string_cst = string_constant (newarg, &offset_node);
3442 gcc_checking_assert (string_cst
3443 && (TREE_STRING_LENGTH (string_cst)
3444 == (int) len)
3445 && integer_zerop (offset_node)
3446 && (unsigned char)
3447 TREE_STRING_POINTER (string_cst)[len - 1]
3448 == target_newline);
3449 /* build_string_literal creates a new STRING_CST,
3450 modify it in place to avoid double copying. */
3451 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
3452 newstr[len - 1] = '\0';
3453 if (fn_puts)
3455 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3456 replace_call_with_call_and_fold (gsi, repl);
3457 return true;
3460 else
3461 /* We'd like to arrange to call fputs(string,stdout) here,
3462 but we need stdout and don't have a way to get it yet. */
3463 return false;
3467 /* The other optimizations can be done only on the non-va_list variants. */
3468 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3469 return false;
3471 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3472 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3474 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3475 return false;
3476 if (fn_puts)
3478 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3479 replace_call_with_call_and_fold (gsi, repl);
3480 return true;
3484 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3485 else if (strcmp (fmt_str, target_percent_c) == 0)
3487 if (!arg || ! useless_type_conversion_p (integer_type_node,
3488 TREE_TYPE (arg)))
3489 return false;
3490 if (fn_putchar)
3492 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3493 replace_call_with_call_and_fold (gsi, repl);
3494 return true;
3498 return false;
3503 /* Fold a call to __builtin_strlen with known length LEN. */
3505 static bool
3506 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3508 gimple *stmt = gsi_stmt (*gsi);
3510 wide_int minlen;
3511 wide_int maxlen;
3513 tree lenrange[2];
3514 if (!get_range_strlen (gimple_call_arg (stmt, 0), lenrange, true)
3515 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3516 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3518 /* The range of lengths refers to either a single constant
3519 string or to the longest and shortest constant string
3520 referenced by the argument of the strlen() call, or to
3521 the strings that can possibly be stored in the arrays
3522 the argument refers to. */
3523 minlen = wi::to_wide (lenrange[0]);
3524 maxlen = wi::to_wide (lenrange[1]);
3526 else
3528 unsigned prec = TYPE_PRECISION (sizetype);
3530 minlen = wi::shwi (0, prec);
3531 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3534 if (minlen == maxlen)
3536 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3537 true, GSI_SAME_STMT);
3538 replace_call_with_value (gsi, lenrange[0]);
3539 return true;
3542 if (tree lhs = gimple_call_lhs (stmt))
3543 if (TREE_CODE (lhs) == SSA_NAME
3544 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3545 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3547 return false;
3550 /* Fold a call to __builtin_acc_on_device. */
3552 static bool
3553 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3555 /* Defer folding until we know which compiler we're in. */
3556 if (symtab->state != EXPANSION)
3557 return false;
3559 unsigned val_host = GOMP_DEVICE_HOST;
3560 unsigned val_dev = GOMP_DEVICE_NONE;
3562 #ifdef ACCEL_COMPILER
3563 val_host = GOMP_DEVICE_NOT_HOST;
3564 val_dev = ACCEL_COMPILER_acc_device;
3565 #endif
3567 location_t loc = gimple_location (gsi_stmt (*gsi));
3569 tree host_eq = make_ssa_name (boolean_type_node);
3570 gimple *host_ass = gimple_build_assign
3571 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3572 gimple_set_location (host_ass, loc);
3573 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3575 tree dev_eq = make_ssa_name (boolean_type_node);
3576 gimple *dev_ass = gimple_build_assign
3577 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3578 gimple_set_location (dev_ass, loc);
3579 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3581 tree result = make_ssa_name (boolean_type_node);
3582 gimple *result_ass = gimple_build_assign
3583 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3584 gimple_set_location (result_ass, loc);
3585 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3587 replace_call_with_value (gsi, result);
3589 return true;
3592 /* Fold realloc (0, n) -> malloc (n). */
3594 static bool
3595 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3597 gimple *stmt = gsi_stmt (*gsi);
3598 tree arg = gimple_call_arg (stmt, 0);
3599 tree size = gimple_call_arg (stmt, 1);
3601 if (operand_equal_p (arg, null_pointer_node, 0))
3603 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3604 if (fn_malloc)
3606 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3607 replace_call_with_call_and_fold (gsi, repl);
3608 return true;
3611 return false;
3614 /* Fold the non-target builtin at *GSI and return whether any simplification
3615 was made. */
3617 static bool
3618 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3620 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3621 tree callee = gimple_call_fndecl (stmt);
3623 /* Give up for always_inline inline builtins until they are
3624 inlined. */
3625 if (avoid_folding_inline_builtin (callee))
3626 return false;
3628 unsigned n = gimple_call_num_args (stmt);
3629 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3630 switch (fcode)
3632 case BUILT_IN_BCMP:
3633 return gimple_fold_builtin_bcmp (gsi);
3634 case BUILT_IN_BCOPY:
3635 return gimple_fold_builtin_bcopy (gsi);
3636 case BUILT_IN_BZERO:
3637 return gimple_fold_builtin_bzero (gsi);
3639 case BUILT_IN_MEMSET:
3640 return gimple_fold_builtin_memset (gsi,
3641 gimple_call_arg (stmt, 1),
3642 gimple_call_arg (stmt, 2));
3643 case BUILT_IN_MEMCPY:
3644 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3645 gimple_call_arg (stmt, 1), 0);
3646 case BUILT_IN_MEMPCPY:
3647 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3648 gimple_call_arg (stmt, 1), 1);
3649 case BUILT_IN_MEMMOVE:
3650 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3651 gimple_call_arg (stmt, 1), 3);
3652 case BUILT_IN_SPRINTF_CHK:
3653 case BUILT_IN_VSPRINTF_CHK:
3654 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3655 case BUILT_IN_STRCAT_CHK:
3656 return gimple_fold_builtin_strcat_chk (gsi);
3657 case BUILT_IN_STRNCAT_CHK:
3658 return gimple_fold_builtin_strncat_chk (gsi);
3659 case BUILT_IN_STRLEN:
3660 return gimple_fold_builtin_strlen (gsi);
3661 case BUILT_IN_STRCPY:
3662 return gimple_fold_builtin_strcpy (gsi,
3663 gimple_call_arg (stmt, 0),
3664 gimple_call_arg (stmt, 1));
3665 case BUILT_IN_STRNCPY:
3666 return gimple_fold_builtin_strncpy (gsi,
3667 gimple_call_arg (stmt, 0),
3668 gimple_call_arg (stmt, 1),
3669 gimple_call_arg (stmt, 2));
3670 case BUILT_IN_STRCAT:
3671 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3672 gimple_call_arg (stmt, 1));
3673 case BUILT_IN_STRNCAT:
3674 return gimple_fold_builtin_strncat (gsi);
3675 case BUILT_IN_INDEX:
3676 case BUILT_IN_STRCHR:
3677 return gimple_fold_builtin_strchr (gsi, false);
3678 case BUILT_IN_RINDEX:
3679 case BUILT_IN_STRRCHR:
3680 return gimple_fold_builtin_strchr (gsi, true);
3681 case BUILT_IN_STRSTR:
3682 return gimple_fold_builtin_strstr (gsi);
3683 case BUILT_IN_STRCMP:
3684 case BUILT_IN_STRCMP_EQ:
3685 case BUILT_IN_STRCASECMP:
3686 case BUILT_IN_STRNCMP:
3687 case BUILT_IN_STRNCMP_EQ:
3688 case BUILT_IN_STRNCASECMP:
3689 return gimple_fold_builtin_string_compare (gsi);
3690 case BUILT_IN_MEMCHR:
3691 return gimple_fold_builtin_memchr (gsi);
3692 case BUILT_IN_FPUTS:
3693 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3694 gimple_call_arg (stmt, 1), false);
3695 case BUILT_IN_FPUTS_UNLOCKED:
3696 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3697 gimple_call_arg (stmt, 1), true);
3698 case BUILT_IN_MEMCPY_CHK:
3699 case BUILT_IN_MEMPCPY_CHK:
3700 case BUILT_IN_MEMMOVE_CHK:
3701 case BUILT_IN_MEMSET_CHK:
3702 return gimple_fold_builtin_memory_chk (gsi,
3703 gimple_call_arg (stmt, 0),
3704 gimple_call_arg (stmt, 1),
3705 gimple_call_arg (stmt, 2),
3706 gimple_call_arg (stmt, 3),
3707 fcode);
3708 case BUILT_IN_STPCPY:
3709 return gimple_fold_builtin_stpcpy (gsi);
3710 case BUILT_IN_STRCPY_CHK:
3711 case BUILT_IN_STPCPY_CHK:
3712 return gimple_fold_builtin_stxcpy_chk (gsi,
3713 gimple_call_arg (stmt, 0),
3714 gimple_call_arg (stmt, 1),
3715 gimple_call_arg (stmt, 2),
3716 fcode);
3717 case BUILT_IN_STRNCPY_CHK:
3718 case BUILT_IN_STPNCPY_CHK:
3719 return gimple_fold_builtin_stxncpy_chk (gsi,
3720 gimple_call_arg (stmt, 0),
3721 gimple_call_arg (stmt, 1),
3722 gimple_call_arg (stmt, 2),
3723 gimple_call_arg (stmt, 3),
3724 fcode);
3725 case BUILT_IN_SNPRINTF_CHK:
3726 case BUILT_IN_VSNPRINTF_CHK:
3727 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3729 case BUILT_IN_FPRINTF:
3730 case BUILT_IN_FPRINTF_UNLOCKED:
3731 case BUILT_IN_VFPRINTF:
3732 if (n == 2 || n == 3)
3733 return gimple_fold_builtin_fprintf (gsi,
3734 gimple_call_arg (stmt, 0),
3735 gimple_call_arg (stmt, 1),
3736 n == 3
3737 ? gimple_call_arg (stmt, 2)
3738 : NULL_TREE,
3739 fcode);
3740 break;
3741 case BUILT_IN_FPRINTF_CHK:
3742 case BUILT_IN_VFPRINTF_CHK:
3743 if (n == 3 || n == 4)
3744 return gimple_fold_builtin_fprintf (gsi,
3745 gimple_call_arg (stmt, 0),
3746 gimple_call_arg (stmt, 2),
3747 n == 4
3748 ? gimple_call_arg (stmt, 3)
3749 : NULL_TREE,
3750 fcode);
3751 break;
3752 case BUILT_IN_PRINTF:
3753 case BUILT_IN_PRINTF_UNLOCKED:
3754 case BUILT_IN_VPRINTF:
3755 if (n == 1 || n == 2)
3756 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3757 n == 2
3758 ? gimple_call_arg (stmt, 1)
3759 : NULL_TREE, fcode);
3760 break;
3761 case BUILT_IN_PRINTF_CHK:
3762 case BUILT_IN_VPRINTF_CHK:
3763 if (n == 2 || n == 3)
3764 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3765 n == 3
3766 ? gimple_call_arg (stmt, 2)
3767 : NULL_TREE, fcode);
3768 break;
3769 case BUILT_IN_ACC_ON_DEVICE:
3770 return gimple_fold_builtin_acc_on_device (gsi,
3771 gimple_call_arg (stmt, 0));
3772 case BUILT_IN_REALLOC:
3773 return gimple_fold_builtin_realloc (gsi);
3775 default:;
3778 /* Try the generic builtin folder. */
3779 bool ignore = (gimple_call_lhs (stmt) == NULL);
3780 tree result = fold_call_stmt (stmt, ignore);
3781 if (result)
3783 if (ignore)
3784 STRIP_NOPS (result);
3785 else
3786 result = fold_convert (gimple_call_return_type (stmt), result);
3787 if (!update_call_from_tree (gsi, result))
3788 gimplify_and_update_call_from_tree (gsi, result);
3789 return true;
3792 return false;
3795 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3796 function calls to constants, where possible. */
3798 static tree
3799 fold_internal_goacc_dim (const gimple *call)
3801 int axis = oacc_get_ifn_dim_arg (call);
3802 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3803 tree result = NULL_TREE;
3804 tree type = TREE_TYPE (gimple_call_lhs (call));
3806 switch (gimple_call_internal_fn (call))
3808 case IFN_GOACC_DIM_POS:
3809 /* If the size is 1, we know the answer. */
3810 if (size == 1)
3811 result = build_int_cst (type, 0);
3812 break;
3813 case IFN_GOACC_DIM_SIZE:
3814 /* If the size is not dynamic, we know the answer. */
3815 if (size)
3816 result = build_int_cst (type, size);
3817 break;
3818 default:
3819 break;
3822 return result;
3825 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3826 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3827 &var where var is only addressable because of such calls. */
3829 bool
3830 optimize_atomic_compare_exchange_p (gimple *stmt)
3832 if (gimple_call_num_args (stmt) != 6
3833 || !flag_inline_atomics
3834 || !optimize
3835 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3836 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3837 || !gimple_vdef (stmt)
3838 || !gimple_vuse (stmt))
3839 return false;
3841 tree fndecl = gimple_call_fndecl (stmt);
3842 switch (DECL_FUNCTION_CODE (fndecl))
3844 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3845 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3846 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3847 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3848 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3849 break;
3850 default:
3851 return false;
3854 tree expected = gimple_call_arg (stmt, 1);
3855 if (TREE_CODE (expected) != ADDR_EXPR
3856 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3857 return false;
3859 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3860 if (!is_gimple_reg_type (etype)
3861 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3862 || TREE_THIS_VOLATILE (etype)
3863 || VECTOR_TYPE_P (etype)
3864 || TREE_CODE (etype) == COMPLEX_TYPE
3865 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3866 might not preserve all the bits. See PR71716. */
3867 || SCALAR_FLOAT_TYPE_P (etype)
3868 || maybe_ne (TYPE_PRECISION (etype),
3869 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3870 return false;
3872 tree weak = gimple_call_arg (stmt, 3);
3873 if (!integer_zerop (weak) && !integer_onep (weak))
3874 return false;
3876 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3877 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3878 machine_mode mode = TYPE_MODE (itype);
3880 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3881 == CODE_FOR_nothing
3882 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3883 return false;
3885 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3886 return false;
3888 return true;
3891 /* Fold
3892 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3893 into
3894 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3895 i = IMAGPART_EXPR <t>;
3896 r = (_Bool) i;
3897 e = REALPART_EXPR <t>; */
3899 void
3900 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3902 gimple *stmt = gsi_stmt (*gsi);
3903 tree fndecl = gimple_call_fndecl (stmt);
3904 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3905 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3906 tree ctype = build_complex_type (itype);
3907 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3908 bool throws = false;
3909 edge e = NULL;
3910 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3911 expected);
3912 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3913 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
3914 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
3916 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
3917 build1 (VIEW_CONVERT_EXPR, itype,
3918 gimple_assign_lhs (g)));
3919 gsi_insert_before (gsi, g, GSI_SAME_STMT);
3921 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
3922 + int_size_in_bytes (itype);
3923 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
3924 gimple_call_arg (stmt, 0),
3925 gimple_assign_lhs (g),
3926 gimple_call_arg (stmt, 2),
3927 build_int_cst (integer_type_node, flag),
3928 gimple_call_arg (stmt, 4),
3929 gimple_call_arg (stmt, 5));
3930 tree lhs = make_ssa_name (ctype);
3931 gimple_call_set_lhs (g, lhs);
3932 gimple_set_vdef (g, gimple_vdef (stmt));
3933 gimple_set_vuse (g, gimple_vuse (stmt));
3934 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
3935 tree oldlhs = gimple_call_lhs (stmt);
3936 if (stmt_can_throw_internal (stmt))
3938 throws = true;
3939 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
3941 gimple_call_set_nothrow (as_a <gcall *> (g),
3942 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
3943 gimple_call_set_lhs (stmt, NULL_TREE);
3944 gsi_replace (gsi, g, true);
3945 if (oldlhs)
3947 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
3948 build1 (IMAGPART_EXPR, itype, lhs));
3949 if (throws)
3951 gsi_insert_on_edge_immediate (e, g);
3952 *gsi = gsi_for_stmt (g);
3954 else
3955 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3956 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
3957 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3959 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
3960 build1 (REALPART_EXPR, itype, lhs));
3961 if (throws && oldlhs == NULL_TREE)
3963 gsi_insert_on_edge_immediate (e, g);
3964 *gsi = gsi_for_stmt (g);
3966 else
3967 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3968 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
3970 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3971 VIEW_CONVERT_EXPR,
3972 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
3973 gimple_assign_lhs (g)));
3974 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3976 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
3977 gsi_insert_after (gsi, g, GSI_NEW_STMT);
3978 *gsi = gsiret;
3981 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3982 doesn't fit into TYPE. The test for overflow should be regardless of
3983 -fwrapv, and even for unsigned types. */
3985 bool
3986 arith_overflowed_p (enum tree_code code, const_tree type,
3987 const_tree arg0, const_tree arg1)
3989 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3990 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3991 widest2_int_cst;
3992 widest2_int warg0 = widest2_int_cst (arg0);
3993 widest2_int warg1 = widest2_int_cst (arg1);
3994 widest2_int wres;
3995 switch (code)
3997 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3998 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3999 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4000 default: gcc_unreachable ();
4002 signop sign = TYPE_SIGN (type);
4003 if (sign == UNSIGNED && wi::neg_p (wres))
4004 return true;
4005 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4008 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4009 The statement may be replaced by another statement, e.g., if the call
4010 simplifies to a constant value. Return true if any changes were made.
4011 It is assumed that the operands have been previously folded. */
4013 static bool
4014 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4016 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4017 tree callee;
4018 bool changed = false;
4019 unsigned i;
4021 /* Fold *& in call arguments. */
4022 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4023 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4025 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4026 if (tmp)
4028 gimple_call_set_arg (stmt, i, tmp);
4029 changed = true;
4033 /* Check for virtual calls that became direct calls. */
4034 callee = gimple_call_fn (stmt);
4035 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4037 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4039 if (dump_file && virtual_method_call_p (callee)
4040 && !possible_polymorphic_call_target_p
4041 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4042 (OBJ_TYPE_REF_EXPR (callee)))))
4044 fprintf (dump_file,
4045 "Type inheritance inconsistent devirtualization of ");
4046 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4047 fprintf (dump_file, " to ");
4048 print_generic_expr (dump_file, callee, TDF_SLIM);
4049 fprintf (dump_file, "\n");
4052 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4053 changed = true;
4055 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4057 bool final;
4058 vec <cgraph_node *>targets
4059 = possible_polymorphic_call_targets (callee, stmt, &final);
4060 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4062 tree lhs = gimple_call_lhs (stmt);
4063 if (dump_enabled_p ())
4065 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4066 "folding virtual function call to %s\n",
4067 targets.length () == 1
4068 ? targets[0]->name ()
4069 : "__builtin_unreachable");
4071 if (targets.length () == 1)
4073 tree fndecl = targets[0]->decl;
4074 gimple_call_set_fndecl (stmt, fndecl);
4075 changed = true;
4076 /* If changing the call to __cxa_pure_virtual
4077 or similar noreturn function, adjust gimple_call_fntype
4078 too. */
4079 if (gimple_call_noreturn_p (stmt)
4080 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4081 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4082 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4083 == void_type_node))
4084 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4085 /* If the call becomes noreturn, remove the lhs. */
4086 if (lhs
4087 && gimple_call_noreturn_p (stmt)
4088 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4089 || should_remove_lhs_p (lhs)))
4091 if (TREE_CODE (lhs) == SSA_NAME)
4093 tree var = create_tmp_var (TREE_TYPE (lhs));
4094 tree def = get_or_create_ssa_default_def (cfun, var);
4095 gimple *new_stmt = gimple_build_assign (lhs, def);
4096 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4098 gimple_call_set_lhs (stmt, NULL_TREE);
4100 maybe_remove_unused_call_args (cfun, stmt);
4102 else
4104 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4105 gimple *new_stmt = gimple_build_call (fndecl, 0);
4106 gimple_set_location (new_stmt, gimple_location (stmt));
4107 /* If the call had a SSA name as lhs morph that into
4108 an uninitialized value. */
4109 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4111 tree var = create_tmp_var (TREE_TYPE (lhs));
4112 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4113 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4114 set_ssa_default_def (cfun, var, lhs);
4116 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4117 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4118 gsi_replace (gsi, new_stmt, false);
4119 return true;
4125 /* Check for indirect calls that became direct calls, and then
4126 no longer require a static chain. */
4127 if (gimple_call_chain (stmt))
4129 tree fn = gimple_call_fndecl (stmt);
4130 if (fn && !DECL_STATIC_CHAIN (fn))
4132 gimple_call_set_chain (stmt, NULL);
4133 changed = true;
4135 else
4137 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4138 if (tmp)
4140 gimple_call_set_chain (stmt, tmp);
4141 changed = true;
4146 if (inplace)
4147 return changed;
4149 /* Check for builtins that CCP can handle using information not
4150 available in the generic fold routines. */
4151 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4153 if (gimple_fold_builtin (gsi))
4154 changed = true;
4156 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4158 changed |= targetm.gimple_fold_builtin (gsi);
4160 else if (gimple_call_internal_p (stmt))
4162 enum tree_code subcode = ERROR_MARK;
4163 tree result = NULL_TREE;
4164 bool cplx_result = false;
4165 tree overflow = NULL_TREE;
4166 switch (gimple_call_internal_fn (stmt))
4168 case IFN_BUILTIN_EXPECT:
4169 result = fold_builtin_expect (gimple_location (stmt),
4170 gimple_call_arg (stmt, 0),
4171 gimple_call_arg (stmt, 1),
4172 gimple_call_arg (stmt, 2));
4173 break;
4174 case IFN_UBSAN_OBJECT_SIZE:
4176 tree offset = gimple_call_arg (stmt, 1);
4177 tree objsize = gimple_call_arg (stmt, 2);
4178 if (integer_all_onesp (objsize)
4179 || (TREE_CODE (offset) == INTEGER_CST
4180 && TREE_CODE (objsize) == INTEGER_CST
4181 && tree_int_cst_le (offset, objsize)))
4183 replace_call_with_value (gsi, NULL_TREE);
4184 return true;
4187 break;
4188 case IFN_UBSAN_PTR:
4189 if (integer_zerop (gimple_call_arg (stmt, 1)))
4191 replace_call_with_value (gsi, NULL_TREE);
4192 return true;
4194 break;
4195 case IFN_UBSAN_BOUNDS:
4197 tree index = gimple_call_arg (stmt, 1);
4198 tree bound = gimple_call_arg (stmt, 2);
4199 if (TREE_CODE (index) == INTEGER_CST
4200 && TREE_CODE (bound) == INTEGER_CST)
4202 index = fold_convert (TREE_TYPE (bound), index);
4203 if (TREE_CODE (index) == INTEGER_CST
4204 && tree_int_cst_le (index, bound))
4206 replace_call_with_value (gsi, NULL_TREE);
4207 return true;
4211 break;
4212 case IFN_GOACC_DIM_SIZE:
4213 case IFN_GOACC_DIM_POS:
4214 result = fold_internal_goacc_dim (stmt);
4215 break;
4216 case IFN_UBSAN_CHECK_ADD:
4217 subcode = PLUS_EXPR;
4218 break;
4219 case IFN_UBSAN_CHECK_SUB:
4220 subcode = MINUS_EXPR;
4221 break;
4222 case IFN_UBSAN_CHECK_MUL:
4223 subcode = MULT_EXPR;
4224 break;
4225 case IFN_ADD_OVERFLOW:
4226 subcode = PLUS_EXPR;
4227 cplx_result = true;
4228 break;
4229 case IFN_SUB_OVERFLOW:
4230 subcode = MINUS_EXPR;
4231 cplx_result = true;
4232 break;
4233 case IFN_MUL_OVERFLOW:
4234 subcode = MULT_EXPR;
4235 cplx_result = true;
4236 break;
4237 default:
4238 break;
4240 if (subcode != ERROR_MARK)
4242 tree arg0 = gimple_call_arg (stmt, 0);
4243 tree arg1 = gimple_call_arg (stmt, 1);
4244 tree type = TREE_TYPE (arg0);
4245 if (cplx_result)
4247 tree lhs = gimple_call_lhs (stmt);
4248 if (lhs == NULL_TREE)
4249 type = NULL_TREE;
4250 else
4251 type = TREE_TYPE (TREE_TYPE (lhs));
4253 if (type == NULL_TREE)
4255 /* x = y + 0; x = y - 0; x = y * 0; */
4256 else if (integer_zerop (arg1))
4257 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4258 /* x = 0 + y; x = 0 * y; */
4259 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4260 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4261 /* x = y - y; */
4262 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4263 result = integer_zero_node;
4264 /* x = y * 1; x = 1 * y; */
4265 else if (subcode == MULT_EXPR && integer_onep (arg1))
4266 result = arg0;
4267 else if (subcode == MULT_EXPR && integer_onep (arg0))
4268 result = arg1;
4269 else if (TREE_CODE (arg0) == INTEGER_CST
4270 && TREE_CODE (arg1) == INTEGER_CST)
4272 if (cplx_result)
4273 result = int_const_binop (subcode, fold_convert (type, arg0),
4274 fold_convert (type, arg1));
4275 else
4276 result = int_const_binop (subcode, arg0, arg1);
4277 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4279 if (cplx_result)
4280 overflow = build_one_cst (type);
4281 else
4282 result = NULL_TREE;
4285 if (result)
4287 if (result == integer_zero_node)
4288 result = build_zero_cst (type);
4289 else if (cplx_result && TREE_TYPE (result) != type)
4291 if (TREE_CODE (result) == INTEGER_CST)
4293 if (arith_overflowed_p (PLUS_EXPR, type, result,
4294 integer_zero_node))
4295 overflow = build_one_cst (type);
4297 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4298 && TYPE_UNSIGNED (type))
4299 || (TYPE_PRECISION (type)
4300 < (TYPE_PRECISION (TREE_TYPE (result))
4301 + (TYPE_UNSIGNED (TREE_TYPE (result))
4302 && !TYPE_UNSIGNED (type)))))
4303 result = NULL_TREE;
4304 if (result)
4305 result = fold_convert (type, result);
4310 if (result)
4312 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4313 result = drop_tree_overflow (result);
4314 if (cplx_result)
4316 if (overflow == NULL_TREE)
4317 overflow = build_zero_cst (TREE_TYPE (result));
4318 tree ctype = build_complex_type (TREE_TYPE (result));
4319 if (TREE_CODE (result) == INTEGER_CST
4320 && TREE_CODE (overflow) == INTEGER_CST)
4321 result = build_complex (ctype, result, overflow);
4322 else
4323 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4324 ctype, result, overflow);
4326 if (!update_call_from_tree (gsi, result))
4327 gimplify_and_update_call_from_tree (gsi, result);
4328 changed = true;
4332 return changed;
4336 /* Return true whether NAME has a use on STMT. */
4338 static bool
4339 has_use_on_stmt (tree name, gimple *stmt)
4341 imm_use_iterator iter;
4342 use_operand_p use_p;
4343 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4344 if (USE_STMT (use_p) == stmt)
4345 return true;
4346 return false;
4349 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4350 gimple_simplify.
4352 Replaces *GSI with the simplification result in RCODE and OPS
4353 and the associated statements in *SEQ. Does the replacement
4354 according to INPLACE and returns true if the operation succeeded. */
4356 static bool
4357 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4358 gimple_match_op *res_op,
4359 gimple_seq *seq, bool inplace)
4361 gimple *stmt = gsi_stmt (*gsi);
4362 tree *ops = res_op->ops;
4363 unsigned int num_ops = res_op->num_ops;
4365 /* Play safe and do not allow abnormals to be mentioned in
4366 newly created statements. See also maybe_push_res_to_seq.
4367 As an exception allow such uses if there was a use of the
4368 same SSA name on the old stmt. */
4369 for (unsigned int i = 0; i < num_ops; ++i)
4370 if (TREE_CODE (ops[i]) == SSA_NAME
4371 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4372 && !has_use_on_stmt (ops[i], stmt))
4373 return false;
4375 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4376 for (unsigned int i = 0; i < 2; ++i)
4377 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4378 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4379 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4380 return false;
4382 /* Don't insert new statements when INPLACE is true, even if we could
4383 reuse STMT for the final statement. */
4384 if (inplace && !gimple_seq_empty_p (*seq))
4385 return false;
4387 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4389 gcc_assert (res_op->code.is_tree_code ());
4390 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4391 /* GIMPLE_CONDs condition may not throw. */
4392 && (!flag_exceptions
4393 || !cfun->can_throw_non_call_exceptions
4394 || !operation_could_trap_p (res_op->code,
4395 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4396 false, NULL_TREE)))
4397 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4398 else if (res_op->code == SSA_NAME)
4399 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4400 build_zero_cst (TREE_TYPE (ops[0])));
4401 else if (res_op->code == INTEGER_CST)
4403 if (integer_zerop (ops[0]))
4404 gimple_cond_make_false (cond_stmt);
4405 else
4406 gimple_cond_make_true (cond_stmt);
4408 else if (!inplace)
4410 tree res = maybe_push_res_to_seq (res_op, seq);
4411 if (!res)
4412 return false;
4413 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4414 build_zero_cst (TREE_TYPE (res)));
4416 else
4417 return false;
4418 if (dump_file && (dump_flags & TDF_DETAILS))
4420 fprintf (dump_file, "gimple_simplified to ");
4421 if (!gimple_seq_empty_p (*seq))
4422 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4423 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4424 0, TDF_SLIM);
4426 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4427 return true;
4429 else if (is_gimple_assign (stmt)
4430 && res_op->code.is_tree_code ())
4432 if (!inplace
4433 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4435 maybe_build_generic_op (res_op);
4436 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4437 res_op->op_or_null (0),
4438 res_op->op_or_null (1),
4439 res_op->op_or_null (2));
4440 if (dump_file && (dump_flags & TDF_DETAILS))
4442 fprintf (dump_file, "gimple_simplified to ");
4443 if (!gimple_seq_empty_p (*seq))
4444 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4445 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4446 0, TDF_SLIM);
4448 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4449 return true;
4452 else if (res_op->code.is_fn_code ()
4453 && gimple_call_combined_fn (stmt) == res_op->code)
4455 gcc_assert (num_ops == gimple_call_num_args (stmt));
4456 for (unsigned int i = 0; i < num_ops; ++i)
4457 gimple_call_set_arg (stmt, i, ops[i]);
4458 if (dump_file && (dump_flags & TDF_DETAILS))
4460 fprintf (dump_file, "gimple_simplified to ");
4461 if (!gimple_seq_empty_p (*seq))
4462 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4463 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4465 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4466 return true;
4468 else if (!inplace)
4470 if (gimple_has_lhs (stmt))
4472 tree lhs = gimple_get_lhs (stmt);
4473 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4474 return false;
4475 if (dump_file && (dump_flags & TDF_DETAILS))
4477 fprintf (dump_file, "gimple_simplified to ");
4478 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4480 gsi_replace_with_seq_vops (gsi, *seq);
4481 return true;
4483 else
4484 gcc_unreachable ();
4487 return false;
4490 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4492 static bool
4493 maybe_canonicalize_mem_ref_addr (tree *t)
4495 bool res = false;
4497 if (TREE_CODE (*t) == ADDR_EXPR)
4498 t = &TREE_OPERAND (*t, 0);
4500 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4501 generic vector extension. The actual vector referenced is
4502 view-converted to an array type for this purpose. If the index
4503 is constant the canonical representation in the middle-end is a
4504 BIT_FIELD_REF so re-write the former to the latter here. */
4505 if (TREE_CODE (*t) == ARRAY_REF
4506 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4507 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4508 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4510 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4511 if (VECTOR_TYPE_P (vtype))
4513 tree low = array_ref_low_bound (*t);
4514 if (TREE_CODE (low) == INTEGER_CST)
4516 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4518 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4519 wi::to_widest (low));
4520 idx = wi::mul (idx, wi::to_widest
4521 (TYPE_SIZE (TREE_TYPE (*t))));
4522 widest_int ext
4523 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4524 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4526 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4527 TREE_TYPE (*t),
4528 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4529 TYPE_SIZE (TREE_TYPE (*t)),
4530 wide_int_to_tree (bitsizetype, idx));
4531 res = true;
4538 while (handled_component_p (*t))
4539 t = &TREE_OPERAND (*t, 0);
4541 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4542 of invariant addresses into a SSA name MEM_REF address. */
4543 if (TREE_CODE (*t) == MEM_REF
4544 || TREE_CODE (*t) == TARGET_MEM_REF)
4546 tree addr = TREE_OPERAND (*t, 0);
4547 if (TREE_CODE (addr) == ADDR_EXPR
4548 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4549 || handled_component_p (TREE_OPERAND (addr, 0))))
4551 tree base;
4552 poly_int64 coffset;
4553 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4554 &coffset);
4555 if (!base)
4556 gcc_unreachable ();
4558 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4559 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4560 TREE_OPERAND (*t, 1),
4561 size_int (coffset));
4562 res = true;
4564 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4565 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4568 /* Canonicalize back MEM_REFs to plain reference trees if the object
4569 accessed is a decl that has the same access semantics as the MEM_REF. */
4570 if (TREE_CODE (*t) == MEM_REF
4571 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4572 && integer_zerop (TREE_OPERAND (*t, 1))
4573 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4575 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4576 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4577 if (/* Same volatile qualification. */
4578 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4579 /* Same TBAA behavior with -fstrict-aliasing. */
4580 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4581 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4582 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4583 /* Same alignment. */
4584 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4585 /* We have to look out here to not drop a required conversion
4586 from the rhs to the lhs if *t appears on the lhs or vice-versa
4587 if it appears on the rhs. Thus require strict type
4588 compatibility. */
4589 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4591 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4592 res = true;
4596 /* Canonicalize TARGET_MEM_REF in particular with respect to
4597 the indexes becoming constant. */
4598 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4600 tree tem = maybe_fold_tmr (*t);
4601 if (tem)
4603 *t = tem;
4604 res = true;
4608 return res;
4611 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4612 distinguishes both cases. */
4614 static bool
4615 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4617 bool changed = false;
4618 gimple *stmt = gsi_stmt (*gsi);
4619 bool nowarning = gimple_no_warning_p (stmt);
4620 unsigned i;
4621 fold_defer_overflow_warnings ();
4623 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4624 after propagation.
4625 ??? This shouldn't be done in generic folding but in the
4626 propagation helpers which also know whether an address was
4627 propagated.
4628 Also canonicalize operand order. */
4629 switch (gimple_code (stmt))
4631 case GIMPLE_ASSIGN:
4632 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4634 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4635 if ((REFERENCE_CLASS_P (*rhs)
4636 || TREE_CODE (*rhs) == ADDR_EXPR)
4637 && maybe_canonicalize_mem_ref_addr (rhs))
4638 changed = true;
4639 tree *lhs = gimple_assign_lhs_ptr (stmt);
4640 if (REFERENCE_CLASS_P (*lhs)
4641 && maybe_canonicalize_mem_ref_addr (lhs))
4642 changed = true;
4644 else
4646 /* Canonicalize operand order. */
4647 enum tree_code code = gimple_assign_rhs_code (stmt);
4648 if (TREE_CODE_CLASS (code) == tcc_comparison
4649 || commutative_tree_code (code)
4650 || commutative_ternary_tree_code (code))
4652 tree rhs1 = gimple_assign_rhs1 (stmt);
4653 tree rhs2 = gimple_assign_rhs2 (stmt);
4654 if (tree_swap_operands_p (rhs1, rhs2))
4656 gimple_assign_set_rhs1 (stmt, rhs2);
4657 gimple_assign_set_rhs2 (stmt, rhs1);
4658 if (TREE_CODE_CLASS (code) == tcc_comparison)
4659 gimple_assign_set_rhs_code (stmt,
4660 swap_tree_comparison (code));
4661 changed = true;
4665 break;
4666 case GIMPLE_CALL:
4668 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4670 tree *arg = gimple_call_arg_ptr (stmt, i);
4671 if (REFERENCE_CLASS_P (*arg)
4672 && maybe_canonicalize_mem_ref_addr (arg))
4673 changed = true;
4675 tree *lhs = gimple_call_lhs_ptr (stmt);
4676 if (*lhs
4677 && REFERENCE_CLASS_P (*lhs)
4678 && maybe_canonicalize_mem_ref_addr (lhs))
4679 changed = true;
4680 break;
4682 case GIMPLE_ASM:
4684 gasm *asm_stmt = as_a <gasm *> (stmt);
4685 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4687 tree link = gimple_asm_output_op (asm_stmt, i);
4688 tree op = TREE_VALUE (link);
4689 if (REFERENCE_CLASS_P (op)
4690 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4691 changed = true;
4693 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4695 tree link = gimple_asm_input_op (asm_stmt, i);
4696 tree op = TREE_VALUE (link);
4697 if ((REFERENCE_CLASS_P (op)
4698 || TREE_CODE (op) == ADDR_EXPR)
4699 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4700 changed = true;
4703 break;
4704 case GIMPLE_DEBUG:
4705 if (gimple_debug_bind_p (stmt))
4707 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4708 if (*val
4709 && (REFERENCE_CLASS_P (*val)
4710 || TREE_CODE (*val) == ADDR_EXPR)
4711 && maybe_canonicalize_mem_ref_addr (val))
4712 changed = true;
4714 break;
4715 case GIMPLE_COND:
4717 /* Canonicalize operand order. */
4718 tree lhs = gimple_cond_lhs (stmt);
4719 tree rhs = gimple_cond_rhs (stmt);
4720 if (tree_swap_operands_p (lhs, rhs))
4722 gcond *gc = as_a <gcond *> (stmt);
4723 gimple_cond_set_lhs (gc, rhs);
4724 gimple_cond_set_rhs (gc, lhs);
4725 gimple_cond_set_code (gc,
4726 swap_tree_comparison (gimple_cond_code (gc)));
4727 changed = true;
4730 default:;
4733 /* Dispatch to pattern-based folding. */
4734 if (!inplace
4735 || is_gimple_assign (stmt)
4736 || gimple_code (stmt) == GIMPLE_COND)
4738 gimple_seq seq = NULL;
4739 gimple_match_op res_op;
4740 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4741 valueize, valueize))
4743 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4744 changed = true;
4745 else
4746 gimple_seq_discard (seq);
4750 stmt = gsi_stmt (*gsi);
4752 /* Fold the main computation performed by the statement. */
4753 switch (gimple_code (stmt))
4755 case GIMPLE_ASSIGN:
4757 /* Try to canonicalize for boolean-typed X the comparisons
4758 X == 0, X == 1, X != 0, and X != 1. */
4759 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4760 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4762 tree lhs = gimple_assign_lhs (stmt);
4763 tree op1 = gimple_assign_rhs1 (stmt);
4764 tree op2 = gimple_assign_rhs2 (stmt);
4765 tree type = TREE_TYPE (op1);
4767 /* Check whether the comparison operands are of the same boolean
4768 type as the result type is.
4769 Check that second operand is an integer-constant with value
4770 one or zero. */
4771 if (TREE_CODE (op2) == INTEGER_CST
4772 && (integer_zerop (op2) || integer_onep (op2))
4773 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4775 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4776 bool is_logical_not = false;
4778 /* X == 0 and X != 1 is a logical-not.of X
4779 X == 1 and X != 0 is X */
4780 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4781 || (cmp_code == NE_EXPR && integer_onep (op2)))
4782 is_logical_not = true;
4784 if (is_logical_not == false)
4785 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4786 /* Only for one-bit precision typed X the transformation
4787 !X -> ~X is valied. */
4788 else if (TYPE_PRECISION (type) == 1)
4789 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4790 /* Otherwise we use !X -> X ^ 1. */
4791 else
4792 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4793 build_int_cst (type, 1));
4794 changed = true;
4795 break;
4799 unsigned old_num_ops = gimple_num_ops (stmt);
4800 tree lhs = gimple_assign_lhs (stmt);
4801 tree new_rhs = fold_gimple_assign (gsi);
4802 if (new_rhs
4803 && !useless_type_conversion_p (TREE_TYPE (lhs),
4804 TREE_TYPE (new_rhs)))
4805 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4806 if (new_rhs
4807 && (!inplace
4808 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4810 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4811 changed = true;
4813 break;
4816 case GIMPLE_CALL:
4817 changed |= gimple_fold_call (gsi, inplace);
4818 break;
4820 case GIMPLE_ASM:
4821 /* Fold *& in asm operands. */
4823 gasm *asm_stmt = as_a <gasm *> (stmt);
4824 size_t noutputs;
4825 const char **oconstraints;
4826 const char *constraint;
4827 bool allows_mem, allows_reg;
4829 noutputs = gimple_asm_noutputs (asm_stmt);
4830 oconstraints = XALLOCAVEC (const char *, noutputs);
4832 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4834 tree link = gimple_asm_output_op (asm_stmt, i);
4835 tree op = TREE_VALUE (link);
4836 oconstraints[i]
4837 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4838 if (REFERENCE_CLASS_P (op)
4839 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4841 TREE_VALUE (link) = op;
4842 changed = true;
4845 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4847 tree link = gimple_asm_input_op (asm_stmt, i);
4848 tree op = TREE_VALUE (link);
4849 constraint
4850 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4851 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4852 oconstraints, &allows_mem, &allows_reg);
4853 if (REFERENCE_CLASS_P (op)
4854 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4855 != NULL_TREE)
4857 TREE_VALUE (link) = op;
4858 changed = true;
4862 break;
4864 case GIMPLE_DEBUG:
4865 if (gimple_debug_bind_p (stmt))
4867 tree val = gimple_debug_bind_get_value (stmt);
4868 if (val
4869 && REFERENCE_CLASS_P (val))
4871 tree tem = maybe_fold_reference (val, false);
4872 if (tem)
4874 gimple_debug_bind_set_value (stmt, tem);
4875 changed = true;
4878 else if (val
4879 && TREE_CODE (val) == ADDR_EXPR)
4881 tree ref = TREE_OPERAND (val, 0);
4882 tree tem = maybe_fold_reference (ref, false);
4883 if (tem)
4885 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4886 gimple_debug_bind_set_value (stmt, tem);
4887 changed = true;
4891 break;
4893 case GIMPLE_RETURN:
4895 greturn *ret_stmt = as_a<greturn *> (stmt);
4896 tree ret = gimple_return_retval(ret_stmt);
4898 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4900 tree val = valueize (ret);
4901 if (val && val != ret
4902 && may_propagate_copy (ret, val))
4904 gimple_return_set_retval (ret_stmt, val);
4905 changed = true;
4909 break;
4911 default:;
4914 stmt = gsi_stmt (*gsi);
4916 /* Fold *& on the lhs. */
4917 if (gimple_has_lhs (stmt))
4919 tree lhs = gimple_get_lhs (stmt);
4920 if (lhs && REFERENCE_CLASS_P (lhs))
4922 tree new_lhs = maybe_fold_reference (lhs, true);
4923 if (new_lhs)
4925 gimple_set_lhs (stmt, new_lhs);
4926 changed = true;
4931 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
4932 return changed;
4935 /* Valueziation callback that ends up not following SSA edges. */
4937 tree
4938 no_follow_ssa_edges (tree)
4940 return NULL_TREE;
4943 /* Valueization callback that ends up following single-use SSA edges only. */
4945 tree
4946 follow_single_use_edges (tree val)
4948 if (TREE_CODE (val) == SSA_NAME
4949 && !has_single_use (val))
4950 return NULL_TREE;
4951 return val;
4954 /* Valueization callback that follows all SSA edges. */
4956 tree
4957 follow_all_ssa_edges (tree val)
4959 return val;
4962 /* Fold the statement pointed to by GSI. In some cases, this function may
4963 replace the whole statement with a new one. Returns true iff folding
4964 makes any changes.
4965 The statement pointed to by GSI should be in valid gimple form but may
4966 be in unfolded state as resulting from for example constant propagation
4967 which can produce *&x = 0. */
4969 bool
4970 fold_stmt (gimple_stmt_iterator *gsi)
4972 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
4975 bool
4976 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
4978 return fold_stmt_1 (gsi, false, valueize);
4981 /* Perform the minimal folding on statement *GSI. Only operations like
4982 *&x created by constant propagation are handled. The statement cannot
4983 be replaced with a new one. Return true if the statement was
4984 changed, false otherwise.
4985 The statement *GSI should be in valid gimple form but may
4986 be in unfolded state as resulting from for example constant propagation
4987 which can produce *&x = 0. */
4989 bool
4990 fold_stmt_inplace (gimple_stmt_iterator *gsi)
4992 gimple *stmt = gsi_stmt (*gsi);
4993 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
4994 gcc_assert (gsi_stmt (*gsi) == stmt);
4995 return changed;
4998 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4999 if EXPR is null or we don't know how.
5000 If non-null, the result always has boolean type. */
5002 static tree
5003 canonicalize_bool (tree expr, bool invert)
5005 if (!expr)
5006 return NULL_TREE;
5007 else if (invert)
5009 if (integer_nonzerop (expr))
5010 return boolean_false_node;
5011 else if (integer_zerop (expr))
5012 return boolean_true_node;
5013 else if (TREE_CODE (expr) == SSA_NAME)
5014 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5015 build_int_cst (TREE_TYPE (expr), 0));
5016 else if (COMPARISON_CLASS_P (expr))
5017 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5018 boolean_type_node,
5019 TREE_OPERAND (expr, 0),
5020 TREE_OPERAND (expr, 1));
5021 else
5022 return NULL_TREE;
5024 else
5026 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5027 return expr;
5028 if (integer_nonzerop (expr))
5029 return boolean_true_node;
5030 else if (integer_zerop (expr))
5031 return boolean_false_node;
5032 else if (TREE_CODE (expr) == SSA_NAME)
5033 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5034 build_int_cst (TREE_TYPE (expr), 0));
5035 else if (COMPARISON_CLASS_P (expr))
5036 return fold_build2 (TREE_CODE (expr),
5037 boolean_type_node,
5038 TREE_OPERAND (expr, 0),
5039 TREE_OPERAND (expr, 1));
5040 else
5041 return NULL_TREE;
5045 /* Check to see if a boolean expression EXPR is logically equivalent to the
5046 comparison (OP1 CODE OP2). Check for various identities involving
5047 SSA_NAMEs. */
5049 static bool
5050 same_bool_comparison_p (const_tree expr, enum tree_code code,
5051 const_tree op1, const_tree op2)
5053 gimple *s;
5055 /* The obvious case. */
5056 if (TREE_CODE (expr) == code
5057 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5058 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5059 return true;
5061 /* Check for comparing (name, name != 0) and the case where expr
5062 is an SSA_NAME with a definition matching the comparison. */
5063 if (TREE_CODE (expr) == SSA_NAME
5064 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5066 if (operand_equal_p (expr, op1, 0))
5067 return ((code == NE_EXPR && integer_zerop (op2))
5068 || (code == EQ_EXPR && integer_nonzerop (op2)));
5069 s = SSA_NAME_DEF_STMT (expr);
5070 if (is_gimple_assign (s)
5071 && gimple_assign_rhs_code (s) == code
5072 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5073 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5074 return true;
5077 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5078 of name is a comparison, recurse. */
5079 if (TREE_CODE (op1) == SSA_NAME
5080 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5082 s = SSA_NAME_DEF_STMT (op1);
5083 if (is_gimple_assign (s)
5084 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5086 enum tree_code c = gimple_assign_rhs_code (s);
5087 if ((c == NE_EXPR && integer_zerop (op2))
5088 || (c == EQ_EXPR && integer_nonzerop (op2)))
5089 return same_bool_comparison_p (expr, c,
5090 gimple_assign_rhs1 (s),
5091 gimple_assign_rhs2 (s));
5092 if ((c == EQ_EXPR && integer_zerop (op2))
5093 || (c == NE_EXPR && integer_nonzerop (op2)))
5094 return same_bool_comparison_p (expr,
5095 invert_tree_comparison (c, false),
5096 gimple_assign_rhs1 (s),
5097 gimple_assign_rhs2 (s));
5100 return false;
5103 /* Check to see if two boolean expressions OP1 and OP2 are logically
5104 equivalent. */
5106 static bool
5107 same_bool_result_p (const_tree op1, const_tree op2)
5109 /* Simple cases first. */
5110 if (operand_equal_p (op1, op2, 0))
5111 return true;
5113 /* Check the cases where at least one of the operands is a comparison.
5114 These are a bit smarter than operand_equal_p in that they apply some
5115 identifies on SSA_NAMEs. */
5116 if (COMPARISON_CLASS_P (op2)
5117 && same_bool_comparison_p (op1, TREE_CODE (op2),
5118 TREE_OPERAND (op2, 0),
5119 TREE_OPERAND (op2, 1)))
5120 return true;
5121 if (COMPARISON_CLASS_P (op1)
5122 && same_bool_comparison_p (op2, TREE_CODE (op1),
5123 TREE_OPERAND (op1, 0),
5124 TREE_OPERAND (op1, 1)))
5125 return true;
5127 /* Default case. */
5128 return false;
5131 /* Forward declarations for some mutually recursive functions. */
5133 static tree
5134 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5135 enum tree_code code2, tree op2a, tree op2b);
5136 static tree
5137 and_var_with_comparison (tree var, bool invert,
5138 enum tree_code code2, tree op2a, tree op2b);
5139 static tree
5140 and_var_with_comparison_1 (gimple *stmt,
5141 enum tree_code code2, tree op2a, tree op2b);
5142 static tree
5143 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5144 enum tree_code code2, tree op2a, tree op2b);
5145 static tree
5146 or_var_with_comparison (tree var, bool invert,
5147 enum tree_code code2, tree op2a, tree op2b);
5148 static tree
5149 or_var_with_comparison_1 (gimple *stmt,
5150 enum tree_code code2, tree op2a, tree op2b);
5152 /* Helper function for and_comparisons_1: try to simplify the AND of the
5153 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5154 If INVERT is true, invert the value of the VAR before doing the AND.
5155 Return NULL_EXPR if we can't simplify this to a single expression. */
5157 static tree
5158 and_var_with_comparison (tree var, bool invert,
5159 enum tree_code code2, tree op2a, tree op2b)
5161 tree t;
5162 gimple *stmt = SSA_NAME_DEF_STMT (var);
5164 /* We can only deal with variables whose definitions are assignments. */
5165 if (!is_gimple_assign (stmt))
5166 return NULL_TREE;
5168 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5169 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5170 Then we only have to consider the simpler non-inverted cases. */
5171 if (invert)
5172 t = or_var_with_comparison_1 (stmt,
5173 invert_tree_comparison (code2, false),
5174 op2a, op2b);
5175 else
5176 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5177 return canonicalize_bool (t, invert);
5180 /* Try to simplify the AND of the ssa variable defined by the assignment
5181 STMT with the comparison specified by (OP2A CODE2 OP2B).
5182 Return NULL_EXPR if we can't simplify this to a single expression. */
5184 static tree
5185 and_var_with_comparison_1 (gimple *stmt,
5186 enum tree_code code2, tree op2a, tree op2b)
5188 tree var = gimple_assign_lhs (stmt);
5189 tree true_test_var = NULL_TREE;
5190 tree false_test_var = NULL_TREE;
5191 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5193 /* Check for identities like (var AND (var == 0)) => false. */
5194 if (TREE_CODE (op2a) == SSA_NAME
5195 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5197 if ((code2 == NE_EXPR && integer_zerop (op2b))
5198 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5200 true_test_var = op2a;
5201 if (var == true_test_var)
5202 return var;
5204 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5205 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5207 false_test_var = op2a;
5208 if (var == false_test_var)
5209 return boolean_false_node;
5213 /* If the definition is a comparison, recurse on it. */
5214 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5216 tree t = and_comparisons_1 (innercode,
5217 gimple_assign_rhs1 (stmt),
5218 gimple_assign_rhs2 (stmt),
5219 code2,
5220 op2a,
5221 op2b);
5222 if (t)
5223 return t;
5226 /* If the definition is an AND or OR expression, we may be able to
5227 simplify by reassociating. */
5228 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5229 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5231 tree inner1 = gimple_assign_rhs1 (stmt);
5232 tree inner2 = gimple_assign_rhs2 (stmt);
5233 gimple *s;
5234 tree t;
5235 tree partial = NULL_TREE;
5236 bool is_and = (innercode == BIT_AND_EXPR);
5238 /* Check for boolean identities that don't require recursive examination
5239 of inner1/inner2:
5240 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5241 inner1 AND (inner1 OR inner2) => inner1
5242 !inner1 AND (inner1 AND inner2) => false
5243 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5244 Likewise for similar cases involving inner2. */
5245 if (inner1 == true_test_var)
5246 return (is_and ? var : inner1);
5247 else if (inner2 == true_test_var)
5248 return (is_and ? var : inner2);
5249 else if (inner1 == false_test_var)
5250 return (is_and
5251 ? boolean_false_node
5252 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5253 else if (inner2 == false_test_var)
5254 return (is_and
5255 ? boolean_false_node
5256 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5258 /* Next, redistribute/reassociate the AND across the inner tests.
5259 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5260 if (TREE_CODE (inner1) == SSA_NAME
5261 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5262 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5263 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5264 gimple_assign_rhs1 (s),
5265 gimple_assign_rhs2 (s),
5266 code2, op2a, op2b)))
5268 /* Handle the AND case, where we are reassociating:
5269 (inner1 AND inner2) AND (op2a code2 op2b)
5270 => (t AND inner2)
5271 If the partial result t is a constant, we win. Otherwise
5272 continue on to try reassociating with the other inner test. */
5273 if (is_and)
5275 if (integer_onep (t))
5276 return inner2;
5277 else if (integer_zerop (t))
5278 return boolean_false_node;
5281 /* Handle the OR case, where we are redistributing:
5282 (inner1 OR inner2) AND (op2a code2 op2b)
5283 => (t OR (inner2 AND (op2a code2 op2b))) */
5284 else if (integer_onep (t))
5285 return boolean_true_node;
5287 /* Save partial result for later. */
5288 partial = t;
5291 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5292 if (TREE_CODE (inner2) == SSA_NAME
5293 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5294 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5295 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5296 gimple_assign_rhs1 (s),
5297 gimple_assign_rhs2 (s),
5298 code2, op2a, op2b)))
5300 /* Handle the AND case, where we are reassociating:
5301 (inner1 AND inner2) AND (op2a code2 op2b)
5302 => (inner1 AND t) */
5303 if (is_and)
5305 if (integer_onep (t))
5306 return inner1;
5307 else if (integer_zerop (t))
5308 return boolean_false_node;
5309 /* If both are the same, we can apply the identity
5310 (x AND x) == x. */
5311 else if (partial && same_bool_result_p (t, partial))
5312 return t;
5315 /* Handle the OR case. where we are redistributing:
5316 (inner1 OR inner2) AND (op2a code2 op2b)
5317 => (t OR (inner1 AND (op2a code2 op2b)))
5318 => (t OR partial) */
5319 else
5321 if (integer_onep (t))
5322 return boolean_true_node;
5323 else if (partial)
5325 /* We already got a simplification for the other
5326 operand to the redistributed OR expression. The
5327 interesting case is when at least one is false.
5328 Or, if both are the same, we can apply the identity
5329 (x OR x) == x. */
5330 if (integer_zerop (partial))
5331 return t;
5332 else if (integer_zerop (t))
5333 return partial;
5334 else if (same_bool_result_p (t, partial))
5335 return t;
5340 return NULL_TREE;
5343 /* Try to simplify the AND of two comparisons defined by
5344 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5345 If this can be done without constructing an intermediate value,
5346 return the resulting tree; otherwise NULL_TREE is returned.
5347 This function is deliberately asymmetric as it recurses on SSA_DEFs
5348 in the first comparison but not the second. */
5350 static tree
5351 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5352 enum tree_code code2, tree op2a, tree op2b)
5354 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5356 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5357 if (operand_equal_p (op1a, op2a, 0)
5358 && operand_equal_p (op1b, op2b, 0))
5360 /* Result will be either NULL_TREE, or a combined comparison. */
5361 tree t = combine_comparisons (UNKNOWN_LOCATION,
5362 TRUTH_ANDIF_EXPR, code1, code2,
5363 truth_type, op1a, op1b);
5364 if (t)
5365 return t;
5368 /* Likewise the swapped case of the above. */
5369 if (operand_equal_p (op1a, op2b, 0)
5370 && operand_equal_p (op1b, op2a, 0))
5372 /* Result will be either NULL_TREE, or a combined comparison. */
5373 tree t = combine_comparisons (UNKNOWN_LOCATION,
5374 TRUTH_ANDIF_EXPR, code1,
5375 swap_tree_comparison (code2),
5376 truth_type, op1a, op1b);
5377 if (t)
5378 return t;
5381 /* If both comparisons are of the same value against constants, we might
5382 be able to merge them. */
5383 if (operand_equal_p (op1a, op2a, 0)
5384 && TREE_CODE (op1b) == INTEGER_CST
5385 && TREE_CODE (op2b) == INTEGER_CST)
5387 int cmp = tree_int_cst_compare (op1b, op2b);
5389 /* If we have (op1a == op1b), we should either be able to
5390 return that or FALSE, depending on whether the constant op1b
5391 also satisfies the other comparison against op2b. */
5392 if (code1 == EQ_EXPR)
5394 bool done = true;
5395 bool val;
5396 switch (code2)
5398 case EQ_EXPR: val = (cmp == 0); break;
5399 case NE_EXPR: val = (cmp != 0); break;
5400 case LT_EXPR: val = (cmp < 0); break;
5401 case GT_EXPR: val = (cmp > 0); break;
5402 case LE_EXPR: val = (cmp <= 0); break;
5403 case GE_EXPR: val = (cmp >= 0); break;
5404 default: done = false;
5406 if (done)
5408 if (val)
5409 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5410 else
5411 return boolean_false_node;
5414 /* Likewise if the second comparison is an == comparison. */
5415 else if (code2 == EQ_EXPR)
5417 bool done = true;
5418 bool val;
5419 switch (code1)
5421 case EQ_EXPR: val = (cmp == 0); break;
5422 case NE_EXPR: val = (cmp != 0); break;
5423 case LT_EXPR: val = (cmp > 0); break;
5424 case GT_EXPR: val = (cmp < 0); break;
5425 case LE_EXPR: val = (cmp >= 0); break;
5426 case GE_EXPR: val = (cmp <= 0); break;
5427 default: done = false;
5429 if (done)
5431 if (val)
5432 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5433 else
5434 return boolean_false_node;
5438 /* Same business with inequality tests. */
5439 else if (code1 == NE_EXPR)
5441 bool val;
5442 switch (code2)
5444 case EQ_EXPR: val = (cmp != 0); break;
5445 case NE_EXPR: val = (cmp == 0); break;
5446 case LT_EXPR: val = (cmp >= 0); break;
5447 case GT_EXPR: val = (cmp <= 0); break;
5448 case LE_EXPR: val = (cmp > 0); break;
5449 case GE_EXPR: val = (cmp < 0); break;
5450 default:
5451 val = false;
5453 if (val)
5454 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5456 else if (code2 == NE_EXPR)
5458 bool val;
5459 switch (code1)
5461 case EQ_EXPR: val = (cmp == 0); break;
5462 case NE_EXPR: val = (cmp != 0); break;
5463 case LT_EXPR: val = (cmp <= 0); break;
5464 case GT_EXPR: val = (cmp >= 0); break;
5465 case LE_EXPR: val = (cmp < 0); break;
5466 case GE_EXPR: val = (cmp > 0); break;
5467 default:
5468 val = false;
5470 if (val)
5471 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5474 /* Chose the more restrictive of two < or <= comparisons. */
5475 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5476 && (code2 == LT_EXPR || code2 == LE_EXPR))
5478 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5479 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5480 else
5481 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5484 /* Likewise chose the more restrictive of two > or >= comparisons. */
5485 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5486 && (code2 == GT_EXPR || code2 == GE_EXPR))
5488 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5489 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5490 else
5491 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5494 /* Check for singleton ranges. */
5495 else if (cmp == 0
5496 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5497 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5498 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5500 /* Check for disjoint ranges. */
5501 else if (cmp <= 0
5502 && (code1 == LT_EXPR || code1 == LE_EXPR)
5503 && (code2 == GT_EXPR || code2 == GE_EXPR))
5504 return boolean_false_node;
5505 else if (cmp >= 0
5506 && (code1 == GT_EXPR || code1 == GE_EXPR)
5507 && (code2 == LT_EXPR || code2 == LE_EXPR))
5508 return boolean_false_node;
5511 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5512 NAME's definition is a truth value. See if there are any simplifications
5513 that can be done against the NAME's definition. */
5514 if (TREE_CODE (op1a) == SSA_NAME
5515 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5516 && (integer_zerop (op1b) || integer_onep (op1b)))
5518 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5519 || (code1 == NE_EXPR && integer_onep (op1b)));
5520 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5521 switch (gimple_code (stmt))
5523 case GIMPLE_ASSIGN:
5524 /* Try to simplify by copy-propagating the definition. */
5525 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5527 case GIMPLE_PHI:
5528 /* If every argument to the PHI produces the same result when
5529 ANDed with the second comparison, we win.
5530 Do not do this unless the type is bool since we need a bool
5531 result here anyway. */
5532 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5534 tree result = NULL_TREE;
5535 unsigned i;
5536 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5538 tree arg = gimple_phi_arg_def (stmt, i);
5540 /* If this PHI has itself as an argument, ignore it.
5541 If all the other args produce the same result,
5542 we're still OK. */
5543 if (arg == gimple_phi_result (stmt))
5544 continue;
5545 else if (TREE_CODE (arg) == INTEGER_CST)
5547 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5549 if (!result)
5550 result = boolean_false_node;
5551 else if (!integer_zerop (result))
5552 return NULL_TREE;
5554 else if (!result)
5555 result = fold_build2 (code2, boolean_type_node,
5556 op2a, op2b);
5557 else if (!same_bool_comparison_p (result,
5558 code2, op2a, op2b))
5559 return NULL_TREE;
5561 else if (TREE_CODE (arg) == SSA_NAME
5562 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5564 tree temp;
5565 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5566 /* In simple cases we can look through PHI nodes,
5567 but we have to be careful with loops.
5568 See PR49073. */
5569 if (! dom_info_available_p (CDI_DOMINATORS)
5570 || gimple_bb (def_stmt) == gimple_bb (stmt)
5571 || dominated_by_p (CDI_DOMINATORS,
5572 gimple_bb (def_stmt),
5573 gimple_bb (stmt)))
5574 return NULL_TREE;
5575 temp = and_var_with_comparison (arg, invert, code2,
5576 op2a, op2b);
5577 if (!temp)
5578 return NULL_TREE;
5579 else if (!result)
5580 result = temp;
5581 else if (!same_bool_result_p (result, temp))
5582 return NULL_TREE;
5584 else
5585 return NULL_TREE;
5587 return result;
5590 default:
5591 break;
5594 return NULL_TREE;
5597 /* Try to simplify the AND of two comparisons, specified by
5598 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5599 If this can be simplified to a single expression (without requiring
5600 introducing more SSA variables to hold intermediate values),
5601 return the resulting tree. Otherwise return NULL_TREE.
5602 If the result expression is non-null, it has boolean type. */
5604 tree
5605 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5606 enum tree_code code2, tree op2a, tree op2b)
5608 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5609 if (t)
5610 return t;
5611 else
5612 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5615 /* Helper function for or_comparisons_1: try to simplify the OR of the
5616 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5617 If INVERT is true, invert the value of VAR before doing the OR.
5618 Return NULL_EXPR if we can't simplify this to a single expression. */
5620 static tree
5621 or_var_with_comparison (tree var, bool invert,
5622 enum tree_code code2, tree op2a, tree op2b)
5624 tree t;
5625 gimple *stmt = SSA_NAME_DEF_STMT (var);
5627 /* We can only deal with variables whose definitions are assignments. */
5628 if (!is_gimple_assign (stmt))
5629 return NULL_TREE;
5631 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5632 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5633 Then we only have to consider the simpler non-inverted cases. */
5634 if (invert)
5635 t = and_var_with_comparison_1 (stmt,
5636 invert_tree_comparison (code2, false),
5637 op2a, op2b);
5638 else
5639 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5640 return canonicalize_bool (t, invert);
5643 /* Try to simplify the OR of the ssa variable defined by the assignment
5644 STMT with the comparison specified by (OP2A CODE2 OP2B).
5645 Return NULL_EXPR if we can't simplify this to a single expression. */
5647 static tree
5648 or_var_with_comparison_1 (gimple *stmt,
5649 enum tree_code code2, tree op2a, tree op2b)
5651 tree var = gimple_assign_lhs (stmt);
5652 tree true_test_var = NULL_TREE;
5653 tree false_test_var = NULL_TREE;
5654 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5656 /* Check for identities like (var OR (var != 0)) => true . */
5657 if (TREE_CODE (op2a) == SSA_NAME
5658 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5660 if ((code2 == NE_EXPR && integer_zerop (op2b))
5661 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5663 true_test_var = op2a;
5664 if (var == true_test_var)
5665 return var;
5667 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5668 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5670 false_test_var = op2a;
5671 if (var == false_test_var)
5672 return boolean_true_node;
5676 /* If the definition is a comparison, recurse on it. */
5677 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5679 tree t = or_comparisons_1 (innercode,
5680 gimple_assign_rhs1 (stmt),
5681 gimple_assign_rhs2 (stmt),
5682 code2,
5683 op2a,
5684 op2b);
5685 if (t)
5686 return t;
5689 /* If the definition is an AND or OR expression, we may be able to
5690 simplify by reassociating. */
5691 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5692 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5694 tree inner1 = gimple_assign_rhs1 (stmt);
5695 tree inner2 = gimple_assign_rhs2 (stmt);
5696 gimple *s;
5697 tree t;
5698 tree partial = NULL_TREE;
5699 bool is_or = (innercode == BIT_IOR_EXPR);
5701 /* Check for boolean identities that don't require recursive examination
5702 of inner1/inner2:
5703 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5704 inner1 OR (inner1 AND inner2) => inner1
5705 !inner1 OR (inner1 OR inner2) => true
5706 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5708 if (inner1 == true_test_var)
5709 return (is_or ? var : inner1);
5710 else if (inner2 == true_test_var)
5711 return (is_or ? var : inner2);
5712 else if (inner1 == false_test_var)
5713 return (is_or
5714 ? boolean_true_node
5715 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5716 else if (inner2 == false_test_var)
5717 return (is_or
5718 ? boolean_true_node
5719 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5721 /* Next, redistribute/reassociate the OR across the inner tests.
5722 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5723 if (TREE_CODE (inner1) == SSA_NAME
5724 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5725 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5726 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5727 gimple_assign_rhs1 (s),
5728 gimple_assign_rhs2 (s),
5729 code2, op2a, op2b)))
5731 /* Handle the OR case, where we are reassociating:
5732 (inner1 OR inner2) OR (op2a code2 op2b)
5733 => (t OR inner2)
5734 If the partial result t is a constant, we win. Otherwise
5735 continue on to try reassociating with the other inner test. */
5736 if (is_or)
5738 if (integer_onep (t))
5739 return boolean_true_node;
5740 else if (integer_zerop (t))
5741 return inner2;
5744 /* Handle the AND case, where we are redistributing:
5745 (inner1 AND inner2) OR (op2a code2 op2b)
5746 => (t AND (inner2 OR (op2a code op2b))) */
5747 else if (integer_zerop (t))
5748 return boolean_false_node;
5750 /* Save partial result for later. */
5751 partial = t;
5754 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5755 if (TREE_CODE (inner2) == SSA_NAME
5756 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5757 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5758 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5759 gimple_assign_rhs1 (s),
5760 gimple_assign_rhs2 (s),
5761 code2, op2a, op2b)))
5763 /* Handle the OR case, where we are reassociating:
5764 (inner1 OR inner2) OR (op2a code2 op2b)
5765 => (inner1 OR t)
5766 => (t OR partial) */
5767 if (is_or)
5769 if (integer_zerop (t))
5770 return inner1;
5771 else if (integer_onep (t))
5772 return boolean_true_node;
5773 /* If both are the same, we can apply the identity
5774 (x OR x) == x. */
5775 else if (partial && same_bool_result_p (t, partial))
5776 return t;
5779 /* Handle the AND case, where we are redistributing:
5780 (inner1 AND inner2) OR (op2a code2 op2b)
5781 => (t AND (inner1 OR (op2a code2 op2b)))
5782 => (t AND partial) */
5783 else
5785 if (integer_zerop (t))
5786 return boolean_false_node;
5787 else if (partial)
5789 /* We already got a simplification for the other
5790 operand to the redistributed AND expression. The
5791 interesting case is when at least one is true.
5792 Or, if both are the same, we can apply the identity
5793 (x AND x) == x. */
5794 if (integer_onep (partial))
5795 return t;
5796 else if (integer_onep (t))
5797 return partial;
5798 else if (same_bool_result_p (t, partial))
5799 return t;
5804 return NULL_TREE;
5807 /* Try to simplify the OR of two comparisons defined by
5808 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5809 If this can be done without constructing an intermediate value,
5810 return the resulting tree; otherwise NULL_TREE is returned.
5811 This function is deliberately asymmetric as it recurses on SSA_DEFs
5812 in the first comparison but not the second. */
5814 static tree
5815 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5816 enum tree_code code2, tree op2a, tree op2b)
5818 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5820 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5821 if (operand_equal_p (op1a, op2a, 0)
5822 && operand_equal_p (op1b, op2b, 0))
5824 /* Result will be either NULL_TREE, or a combined comparison. */
5825 tree t = combine_comparisons (UNKNOWN_LOCATION,
5826 TRUTH_ORIF_EXPR, code1, code2,
5827 truth_type, op1a, op1b);
5828 if (t)
5829 return t;
5832 /* Likewise the swapped case of the above. */
5833 if (operand_equal_p (op1a, op2b, 0)
5834 && operand_equal_p (op1b, op2a, 0))
5836 /* Result will be either NULL_TREE, or a combined comparison. */
5837 tree t = combine_comparisons (UNKNOWN_LOCATION,
5838 TRUTH_ORIF_EXPR, code1,
5839 swap_tree_comparison (code2),
5840 truth_type, op1a, op1b);
5841 if (t)
5842 return t;
5845 /* If both comparisons are of the same value against constants, we might
5846 be able to merge them. */
5847 if (operand_equal_p (op1a, op2a, 0)
5848 && TREE_CODE (op1b) == INTEGER_CST
5849 && TREE_CODE (op2b) == INTEGER_CST)
5851 int cmp = tree_int_cst_compare (op1b, op2b);
5853 /* If we have (op1a != op1b), we should either be able to
5854 return that or TRUE, depending on whether the constant op1b
5855 also satisfies the other comparison against op2b. */
5856 if (code1 == NE_EXPR)
5858 bool done = true;
5859 bool val;
5860 switch (code2)
5862 case EQ_EXPR: val = (cmp == 0); break;
5863 case NE_EXPR: val = (cmp != 0); break;
5864 case LT_EXPR: val = (cmp < 0); break;
5865 case GT_EXPR: val = (cmp > 0); break;
5866 case LE_EXPR: val = (cmp <= 0); break;
5867 case GE_EXPR: val = (cmp >= 0); break;
5868 default: done = false;
5870 if (done)
5872 if (val)
5873 return boolean_true_node;
5874 else
5875 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5878 /* Likewise if the second comparison is a != comparison. */
5879 else if (code2 == NE_EXPR)
5881 bool done = true;
5882 bool val;
5883 switch (code1)
5885 case EQ_EXPR: val = (cmp == 0); break;
5886 case NE_EXPR: val = (cmp != 0); break;
5887 case LT_EXPR: val = (cmp > 0); break;
5888 case GT_EXPR: val = (cmp < 0); break;
5889 case LE_EXPR: val = (cmp >= 0); break;
5890 case GE_EXPR: val = (cmp <= 0); break;
5891 default: done = false;
5893 if (done)
5895 if (val)
5896 return boolean_true_node;
5897 else
5898 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5902 /* See if an equality test is redundant with the other comparison. */
5903 else if (code1 == EQ_EXPR)
5905 bool val;
5906 switch (code2)
5908 case EQ_EXPR: val = (cmp == 0); break;
5909 case NE_EXPR: val = (cmp != 0); break;
5910 case LT_EXPR: val = (cmp < 0); break;
5911 case GT_EXPR: val = (cmp > 0); break;
5912 case LE_EXPR: val = (cmp <= 0); break;
5913 case GE_EXPR: val = (cmp >= 0); break;
5914 default:
5915 val = false;
5917 if (val)
5918 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5920 else if (code2 == EQ_EXPR)
5922 bool val;
5923 switch (code1)
5925 case EQ_EXPR: val = (cmp == 0); break;
5926 case NE_EXPR: val = (cmp != 0); break;
5927 case LT_EXPR: val = (cmp > 0); break;
5928 case GT_EXPR: val = (cmp < 0); break;
5929 case LE_EXPR: val = (cmp >= 0); break;
5930 case GE_EXPR: val = (cmp <= 0); break;
5931 default:
5932 val = false;
5934 if (val)
5935 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5938 /* Chose the less restrictive of two < or <= comparisons. */
5939 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5940 && (code2 == LT_EXPR || code2 == LE_EXPR))
5942 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5943 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5944 else
5945 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5948 /* Likewise chose the less restrictive of two > or >= comparisons. */
5949 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5950 && (code2 == GT_EXPR || code2 == GE_EXPR))
5952 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5953 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5954 else
5955 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5958 /* Check for singleton ranges. */
5959 else if (cmp == 0
5960 && ((code1 == LT_EXPR && code2 == GT_EXPR)
5961 || (code1 == GT_EXPR && code2 == LT_EXPR)))
5962 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
5964 /* Check for less/greater pairs that don't restrict the range at all. */
5965 else if (cmp >= 0
5966 && (code1 == LT_EXPR || code1 == LE_EXPR)
5967 && (code2 == GT_EXPR || code2 == GE_EXPR))
5968 return boolean_true_node;
5969 else if (cmp <= 0
5970 && (code1 == GT_EXPR || code1 == GE_EXPR)
5971 && (code2 == LT_EXPR || code2 == LE_EXPR))
5972 return boolean_true_node;
5975 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5976 NAME's definition is a truth value. See if there are any simplifications
5977 that can be done against the NAME's definition. */
5978 if (TREE_CODE (op1a) == SSA_NAME
5979 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5980 && (integer_zerop (op1b) || integer_onep (op1b)))
5982 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5983 || (code1 == NE_EXPR && integer_onep (op1b)));
5984 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5985 switch (gimple_code (stmt))
5987 case GIMPLE_ASSIGN:
5988 /* Try to simplify by copy-propagating the definition. */
5989 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
5991 case GIMPLE_PHI:
5992 /* If every argument to the PHI produces the same result when
5993 ORed with the second comparison, we win.
5994 Do not do this unless the type is bool since we need a bool
5995 result here anyway. */
5996 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5998 tree result = NULL_TREE;
5999 unsigned i;
6000 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6002 tree arg = gimple_phi_arg_def (stmt, i);
6004 /* If this PHI has itself as an argument, ignore it.
6005 If all the other args produce the same result,
6006 we're still OK. */
6007 if (arg == gimple_phi_result (stmt))
6008 continue;
6009 else if (TREE_CODE (arg) == INTEGER_CST)
6011 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6013 if (!result)
6014 result = boolean_true_node;
6015 else if (!integer_onep (result))
6016 return NULL_TREE;
6018 else if (!result)
6019 result = fold_build2 (code2, boolean_type_node,
6020 op2a, op2b);
6021 else if (!same_bool_comparison_p (result,
6022 code2, op2a, op2b))
6023 return NULL_TREE;
6025 else if (TREE_CODE (arg) == SSA_NAME
6026 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6028 tree temp;
6029 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6030 /* In simple cases we can look through PHI nodes,
6031 but we have to be careful with loops.
6032 See PR49073. */
6033 if (! dom_info_available_p (CDI_DOMINATORS)
6034 || gimple_bb (def_stmt) == gimple_bb (stmt)
6035 || dominated_by_p (CDI_DOMINATORS,
6036 gimple_bb (def_stmt),
6037 gimple_bb (stmt)))
6038 return NULL_TREE;
6039 temp = or_var_with_comparison (arg, invert, code2,
6040 op2a, op2b);
6041 if (!temp)
6042 return NULL_TREE;
6043 else if (!result)
6044 result = temp;
6045 else if (!same_bool_result_p (result, temp))
6046 return NULL_TREE;
6048 else
6049 return NULL_TREE;
6051 return result;
6054 default:
6055 break;
6058 return NULL_TREE;
6061 /* Try to simplify the OR of two comparisons, specified by
6062 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6063 If this can be simplified to a single expression (without requiring
6064 introducing more SSA variables to hold intermediate values),
6065 return the resulting tree. Otherwise return NULL_TREE.
6066 If the result expression is non-null, it has boolean type. */
6068 tree
6069 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6070 enum tree_code code2, tree op2a, tree op2b)
6072 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6073 if (t)
6074 return t;
6075 else
6076 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6080 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6082 Either NULL_TREE, a simplified but non-constant or a constant
6083 is returned.
6085 ??? This should go into a gimple-fold-inline.h file to be eventually
6086 privatized with the single valueize function used in the various TUs
6087 to avoid the indirect function call overhead. */
6089 tree
6090 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6091 tree (*gvalueize) (tree))
6093 gimple_match_op res_op;
6094 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6095 edges if there are intermediate VARYING defs. For this reason
6096 do not follow SSA edges here even though SCCVN can technically
6097 just deal fine with that. */
6098 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6100 tree res = NULL_TREE;
6101 if (gimple_simplified_result_is_gimple_val (&res_op))
6102 res = res_op.ops[0];
6103 else if (mprts_hook)
6104 res = mprts_hook (&res_op);
6105 if (res)
6107 if (dump_file && dump_flags & TDF_DETAILS)
6109 fprintf (dump_file, "Match-and-simplified ");
6110 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6111 fprintf (dump_file, " to ");
6112 print_generic_expr (dump_file, res);
6113 fprintf (dump_file, "\n");
6115 return res;
6119 location_t loc = gimple_location (stmt);
6120 switch (gimple_code (stmt))
6122 case GIMPLE_ASSIGN:
6124 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6126 switch (get_gimple_rhs_class (subcode))
6128 case GIMPLE_SINGLE_RHS:
6130 tree rhs = gimple_assign_rhs1 (stmt);
6131 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6133 if (TREE_CODE (rhs) == SSA_NAME)
6135 /* If the RHS is an SSA_NAME, return its known constant value,
6136 if any. */
6137 return (*valueize) (rhs);
6139 /* Handle propagating invariant addresses into address
6140 operations. */
6141 else if (TREE_CODE (rhs) == ADDR_EXPR
6142 && !is_gimple_min_invariant (rhs))
6144 poly_int64 offset = 0;
6145 tree base;
6146 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6147 &offset,
6148 valueize);
6149 if (base
6150 && (CONSTANT_CLASS_P (base)
6151 || decl_address_invariant_p (base)))
6152 return build_invariant_address (TREE_TYPE (rhs),
6153 base, offset);
6155 else if (TREE_CODE (rhs) == CONSTRUCTOR
6156 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6157 && known_eq (CONSTRUCTOR_NELTS (rhs),
6158 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6160 unsigned i, nelts;
6161 tree val;
6163 nelts = CONSTRUCTOR_NELTS (rhs);
6164 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6165 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6167 val = (*valueize) (val);
6168 if (TREE_CODE (val) == INTEGER_CST
6169 || TREE_CODE (val) == REAL_CST
6170 || TREE_CODE (val) == FIXED_CST)
6171 vec.quick_push (val);
6172 else
6173 return NULL_TREE;
6176 return vec.build ();
6178 if (subcode == OBJ_TYPE_REF)
6180 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6181 /* If callee is constant, we can fold away the wrapper. */
6182 if (is_gimple_min_invariant (val))
6183 return val;
6186 if (kind == tcc_reference)
6188 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6189 || TREE_CODE (rhs) == REALPART_EXPR
6190 || TREE_CODE (rhs) == IMAGPART_EXPR)
6191 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6193 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6194 return fold_unary_loc (EXPR_LOCATION (rhs),
6195 TREE_CODE (rhs),
6196 TREE_TYPE (rhs), val);
6198 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6199 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6201 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6202 return fold_ternary_loc (EXPR_LOCATION (rhs),
6203 TREE_CODE (rhs),
6204 TREE_TYPE (rhs), val,
6205 TREE_OPERAND (rhs, 1),
6206 TREE_OPERAND (rhs, 2));
6208 else if (TREE_CODE (rhs) == MEM_REF
6209 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6211 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6212 if (TREE_CODE (val) == ADDR_EXPR
6213 && is_gimple_min_invariant (val))
6215 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6216 unshare_expr (val),
6217 TREE_OPERAND (rhs, 1));
6218 if (tem)
6219 rhs = tem;
6222 return fold_const_aggregate_ref_1 (rhs, valueize);
6224 else if (kind == tcc_declaration)
6225 return get_symbol_constant_value (rhs);
6226 return rhs;
6229 case GIMPLE_UNARY_RHS:
6230 return NULL_TREE;
6232 case GIMPLE_BINARY_RHS:
6233 /* Translate &x + CST into an invariant form suitable for
6234 further propagation. */
6235 if (subcode == POINTER_PLUS_EXPR)
6237 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6238 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6239 if (TREE_CODE (op0) == ADDR_EXPR
6240 && TREE_CODE (op1) == INTEGER_CST)
6242 tree off = fold_convert (ptr_type_node, op1);
6243 return build_fold_addr_expr_loc
6244 (loc,
6245 fold_build2 (MEM_REF,
6246 TREE_TYPE (TREE_TYPE (op0)),
6247 unshare_expr (op0), off));
6250 /* Canonicalize bool != 0 and bool == 0 appearing after
6251 valueization. While gimple_simplify handles this
6252 it can get confused by the ~X == 1 -> X == 0 transform
6253 which we cant reduce to a SSA name or a constant
6254 (and we have no way to tell gimple_simplify to not
6255 consider those transforms in the first place). */
6256 else if (subcode == EQ_EXPR
6257 || subcode == NE_EXPR)
6259 tree lhs = gimple_assign_lhs (stmt);
6260 tree op0 = gimple_assign_rhs1 (stmt);
6261 if (useless_type_conversion_p (TREE_TYPE (lhs),
6262 TREE_TYPE (op0)))
6264 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6265 op0 = (*valueize) (op0);
6266 if (TREE_CODE (op0) == INTEGER_CST)
6267 std::swap (op0, op1);
6268 if (TREE_CODE (op1) == INTEGER_CST
6269 && ((subcode == NE_EXPR && integer_zerop (op1))
6270 || (subcode == EQ_EXPR && integer_onep (op1))))
6271 return op0;
6274 return NULL_TREE;
6276 case GIMPLE_TERNARY_RHS:
6278 /* Handle ternary operators that can appear in GIMPLE form. */
6279 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6280 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6281 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6282 return fold_ternary_loc (loc, subcode,
6283 gimple_expr_type (stmt), op0, op1, op2);
6286 default:
6287 gcc_unreachable ();
6291 case GIMPLE_CALL:
6293 tree fn;
6294 gcall *call_stmt = as_a <gcall *> (stmt);
6296 if (gimple_call_internal_p (stmt))
6298 enum tree_code subcode = ERROR_MARK;
6299 switch (gimple_call_internal_fn (stmt))
6301 case IFN_UBSAN_CHECK_ADD:
6302 subcode = PLUS_EXPR;
6303 break;
6304 case IFN_UBSAN_CHECK_SUB:
6305 subcode = MINUS_EXPR;
6306 break;
6307 case IFN_UBSAN_CHECK_MUL:
6308 subcode = MULT_EXPR;
6309 break;
6310 case IFN_BUILTIN_EXPECT:
6312 tree arg0 = gimple_call_arg (stmt, 0);
6313 tree op0 = (*valueize) (arg0);
6314 if (TREE_CODE (op0) == INTEGER_CST)
6315 return op0;
6316 return NULL_TREE;
6318 default:
6319 return NULL_TREE;
6321 tree arg0 = gimple_call_arg (stmt, 0);
6322 tree arg1 = gimple_call_arg (stmt, 1);
6323 tree op0 = (*valueize) (arg0);
6324 tree op1 = (*valueize) (arg1);
6326 if (TREE_CODE (op0) != INTEGER_CST
6327 || TREE_CODE (op1) != INTEGER_CST)
6329 switch (subcode)
6331 case MULT_EXPR:
6332 /* x * 0 = 0 * x = 0 without overflow. */
6333 if (integer_zerop (op0) || integer_zerop (op1))
6334 return build_zero_cst (TREE_TYPE (arg0));
6335 break;
6336 case MINUS_EXPR:
6337 /* y - y = 0 without overflow. */
6338 if (operand_equal_p (op0, op1, 0))
6339 return build_zero_cst (TREE_TYPE (arg0));
6340 break;
6341 default:
6342 break;
6345 tree res
6346 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6347 if (res
6348 && TREE_CODE (res) == INTEGER_CST
6349 && !TREE_OVERFLOW (res))
6350 return res;
6351 return NULL_TREE;
6354 fn = (*valueize) (gimple_call_fn (stmt));
6355 if (TREE_CODE (fn) == ADDR_EXPR
6356 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
6357 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
6358 && gimple_builtin_call_types_compatible_p (stmt,
6359 TREE_OPERAND (fn, 0)))
6361 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6362 tree retval;
6363 unsigned i;
6364 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6365 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6366 retval = fold_builtin_call_array (loc,
6367 gimple_call_return_type (call_stmt),
6368 fn, gimple_call_num_args (stmt), args);
6369 if (retval)
6371 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6372 STRIP_NOPS (retval);
6373 retval = fold_convert (gimple_call_return_type (call_stmt),
6374 retval);
6376 return retval;
6378 return NULL_TREE;
6381 default:
6382 return NULL_TREE;
6386 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6387 Returns NULL_TREE if folding to a constant is not possible, otherwise
6388 returns a constant according to is_gimple_min_invariant. */
6390 tree
6391 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6393 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6394 if (res && is_gimple_min_invariant (res))
6395 return res;
6396 return NULL_TREE;
6400 /* The following set of functions are supposed to fold references using
6401 their constant initializers. */
6403 /* See if we can find constructor defining value of BASE.
6404 When we know the consructor with constant offset (such as
6405 base is array[40] and we do know constructor of array), then
6406 BIT_OFFSET is adjusted accordingly.
6408 As a special case, return error_mark_node when constructor
6409 is not explicitly available, but it is known to be zero
6410 such as 'static const int a;'. */
6411 static tree
6412 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6413 tree (*valueize)(tree))
6415 poly_int64 bit_offset2, size, max_size;
6416 bool reverse;
6418 if (TREE_CODE (base) == MEM_REF)
6420 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6421 if (!boff.to_shwi (bit_offset))
6422 return NULL_TREE;
6424 if (valueize
6425 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6426 base = valueize (TREE_OPERAND (base, 0));
6427 if (!base || TREE_CODE (base) != ADDR_EXPR)
6428 return NULL_TREE;
6429 base = TREE_OPERAND (base, 0);
6431 else if (valueize
6432 && TREE_CODE (base) == SSA_NAME)
6433 base = valueize (base);
6435 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6436 DECL_INITIAL. If BASE is a nested reference into another
6437 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6438 the inner reference. */
6439 switch (TREE_CODE (base))
6441 case VAR_DECL:
6442 case CONST_DECL:
6444 tree init = ctor_for_folding (base);
6446 /* Our semantic is exact opposite of ctor_for_folding;
6447 NULL means unknown, while error_mark_node is 0. */
6448 if (init == error_mark_node)
6449 return NULL_TREE;
6450 if (!init)
6451 return error_mark_node;
6452 return init;
6455 case VIEW_CONVERT_EXPR:
6456 return get_base_constructor (TREE_OPERAND (base, 0),
6457 bit_offset, valueize);
6459 case ARRAY_REF:
6460 case COMPONENT_REF:
6461 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6462 &reverse);
6463 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6464 return NULL_TREE;
6465 *bit_offset += bit_offset2;
6466 return get_base_constructor (base, bit_offset, valueize);
6468 case CONSTRUCTOR:
6469 return base;
6471 default:
6472 if (CONSTANT_CLASS_P (base))
6473 return base;
6475 return NULL_TREE;
6479 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6480 to the memory at bit OFFSET. When non-null, TYPE is the expected
6481 type of the reference; otherwise the type of the referenced element
6482 is used instead. When SIZE is zero, attempt to fold a reference to
6483 the entire element which OFFSET refers to. Increment *SUBOFF by
6484 the bit offset of the accessed element. */
6486 static tree
6487 fold_array_ctor_reference (tree type, tree ctor,
6488 unsigned HOST_WIDE_INT offset,
6489 unsigned HOST_WIDE_INT size,
6490 tree from_decl,
6491 unsigned HOST_WIDE_INT *suboff)
6493 offset_int low_bound;
6494 offset_int elt_size;
6495 offset_int access_index;
6496 tree domain_type = NULL_TREE;
6497 HOST_WIDE_INT inner_offset;
6499 /* Compute low bound and elt size. */
6500 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6501 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6502 if (domain_type && TYPE_MIN_VALUE (domain_type))
6504 /* Static constructors for variably sized objects makes no sense. */
6505 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6506 return NULL_TREE;
6507 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6509 else
6510 low_bound = 0;
6511 /* Static constructors for variably sized objects makes no sense. */
6512 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6513 return NULL_TREE;
6514 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6516 /* When TYPE is non-null, verify that it specifies a constant-sized
6517 accessed not larger than size of array element. */
6518 if (type
6519 && (!TYPE_SIZE_UNIT (type)
6520 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6521 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6522 || elt_size == 0))
6523 return NULL_TREE;
6525 /* Compute the array index we look for. */
6526 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6527 elt_size);
6528 access_index += low_bound;
6530 /* And offset within the access. */
6531 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6533 /* See if the array field is large enough to span whole access. We do not
6534 care to fold accesses spanning multiple array indexes. */
6535 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6536 return NULL_TREE;
6537 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6539 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6541 /* For the final reference to the entire accessed element
6542 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6543 may be null) in favor of the type of the element, and set
6544 SIZE to the size of the accessed element. */
6545 inner_offset = 0;
6546 type = TREE_TYPE (val);
6547 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6550 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6551 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6552 suboff);
6555 /* Memory not explicitly mentioned in constructor is 0 (or
6556 the reference is out of range). */
6557 return type ? build_zero_cst (type) : NULL_TREE;
6560 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6561 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6562 is the expected type of the reference; otherwise the type of
6563 the referenced member is used instead. When SIZE is zero,
6564 attempt to fold a reference to the entire member which OFFSET
6565 refers to; in this case. Increment *SUBOFF by the bit offset
6566 of the accessed member. */
6568 static tree
6569 fold_nonarray_ctor_reference (tree type, tree ctor,
6570 unsigned HOST_WIDE_INT offset,
6571 unsigned HOST_WIDE_INT size,
6572 tree from_decl,
6573 unsigned HOST_WIDE_INT *suboff)
6575 unsigned HOST_WIDE_INT cnt;
6576 tree cfield, cval;
6578 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6579 cval)
6581 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6582 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6583 tree field_size = DECL_SIZE (cfield);
6585 if (!field_size)
6587 /* Determine the size of the flexible array member from
6588 the size of the initializer provided for it. */
6589 field_size = TYPE_SIZE (TREE_TYPE (cval));
6592 /* Variable sized objects in static constructors makes no sense,
6593 but field_size can be NULL for flexible array members. */
6594 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6595 && TREE_CODE (byte_offset) == INTEGER_CST
6596 && (field_size != NULL_TREE
6597 ? TREE_CODE (field_size) == INTEGER_CST
6598 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6600 /* Compute bit offset of the field. */
6601 offset_int bitoffset
6602 = (wi::to_offset (field_offset)
6603 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6604 /* Compute bit offset where the field ends. */
6605 offset_int bitoffset_end;
6606 if (field_size != NULL_TREE)
6607 bitoffset_end = bitoffset + wi::to_offset (field_size);
6608 else
6609 bitoffset_end = 0;
6611 /* Compute the bit offset of the end of the desired access.
6612 As a special case, if the size of the desired access is
6613 zero, assume the access is to the entire field (and let
6614 the caller make any necessary adjustments by storing
6615 the actual bounds of the field in FIELDBOUNDS). */
6616 offset_int access_end = offset_int (offset);
6617 if (size)
6618 access_end += size;
6619 else
6620 access_end = bitoffset_end;
6622 /* Is there any overlap between the desired access at
6623 [OFFSET, OFFSET+SIZE) and the offset of the field within
6624 the object at [BITOFFSET, BITOFFSET_END)? */
6625 if (wi::cmps (access_end, bitoffset) > 0
6626 && (field_size == NULL_TREE
6627 || wi::lts_p (offset, bitoffset_end)))
6629 *suboff += bitoffset.to_uhwi ();
6631 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6633 /* For the final reference to the entire accessed member
6634 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6635 be null) in favor of the type of the member, and set
6636 SIZE to the size of the accessed member. */
6637 offset = bitoffset.to_uhwi ();
6638 type = TREE_TYPE (cval);
6639 size = (bitoffset_end - bitoffset).to_uhwi ();
6642 /* We do have overlap. Now see if the field is large enough
6643 to cover the access. Give up for accesses that extend
6644 beyond the end of the object or that span multiple fields. */
6645 if (wi::cmps (access_end, bitoffset_end) > 0)
6646 return NULL_TREE;
6647 if (offset < bitoffset)
6648 return NULL_TREE;
6650 offset_int inner_offset = offset_int (offset) - bitoffset;
6651 return fold_ctor_reference (type, cval,
6652 inner_offset.to_uhwi (), size,
6653 from_decl, suboff);
6656 /* Memory not explicitly mentioned in constructor is 0. */
6657 return type ? build_zero_cst (type) : NULL_TREE;
6660 /* CTOR is value initializing memory. Fold a reference of TYPE and
6661 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6662 is zero, attempt to fold a reference to the entire subobject
6663 which OFFSET refers to. This is used when folding accesses to
6664 string members of aggregates. When non-null, set *SUBOFF to
6665 the bit offset of the accessed subobject. */
6667 tree
6668 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6669 const poly_uint64 &poly_size, tree from_decl,
6670 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6672 tree ret;
6674 /* We found the field with exact match. */
6675 if (type
6676 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6677 && known_eq (poly_offset, 0U))
6678 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6680 /* The remaining optimizations need a constant size and offset. */
6681 unsigned HOST_WIDE_INT size, offset;
6682 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6683 return NULL_TREE;
6685 /* We are at the end of walk, see if we can view convert the
6686 result. */
6687 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6688 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6689 && !compare_tree_int (TYPE_SIZE (type), size)
6690 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6692 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6693 if (ret)
6695 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6696 if (ret)
6697 STRIP_USELESS_TYPE_CONVERSION (ret);
6699 return ret;
6701 /* For constants and byte-aligned/sized reads try to go through
6702 native_encode/interpret. */
6703 if (CONSTANT_CLASS_P (ctor)
6704 && BITS_PER_UNIT == 8
6705 && offset % BITS_PER_UNIT == 0
6706 && size % BITS_PER_UNIT == 0
6707 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6709 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6710 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6711 offset / BITS_PER_UNIT);
6712 if (len > 0)
6713 return native_interpret_expr (type, buf, len);
6715 if (TREE_CODE (ctor) == CONSTRUCTOR)
6717 unsigned HOST_WIDE_INT dummy = 0;
6718 if (!suboff)
6719 suboff = &dummy;
6721 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6722 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6723 return fold_array_ctor_reference (type, ctor, offset, size,
6724 from_decl, suboff);
6726 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6727 from_decl, suboff);
6730 return NULL_TREE;
6733 /* Return the tree representing the element referenced by T if T is an
6734 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6735 names using VALUEIZE. Return NULL_TREE otherwise. */
6737 tree
6738 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6740 tree ctor, idx, base;
6741 poly_int64 offset, size, max_size;
6742 tree tem;
6743 bool reverse;
6745 if (TREE_THIS_VOLATILE (t))
6746 return NULL_TREE;
6748 if (DECL_P (t))
6749 return get_symbol_constant_value (t);
6751 tem = fold_read_from_constant_string (t);
6752 if (tem)
6753 return tem;
6755 switch (TREE_CODE (t))
6757 case ARRAY_REF:
6758 case ARRAY_RANGE_REF:
6759 /* Constant indexes are handled well by get_base_constructor.
6760 Only special case variable offsets.
6761 FIXME: This code can't handle nested references with variable indexes
6762 (they will be handled only by iteration of ccp). Perhaps we can bring
6763 get_ref_base_and_extent here and make it use a valueize callback. */
6764 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6765 && valueize
6766 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6767 && poly_int_tree_p (idx))
6769 tree low_bound, unit_size;
6771 /* If the resulting bit-offset is constant, track it. */
6772 if ((low_bound = array_ref_low_bound (t),
6773 poly_int_tree_p (low_bound))
6774 && (unit_size = array_ref_element_size (t),
6775 tree_fits_uhwi_p (unit_size)))
6777 poly_offset_int woffset
6778 = wi::sext (wi::to_poly_offset (idx)
6779 - wi::to_poly_offset (low_bound),
6780 TYPE_PRECISION (TREE_TYPE (idx)));
6782 if (woffset.to_shwi (&offset))
6784 /* TODO: This code seems wrong, multiply then check
6785 to see if it fits. */
6786 offset *= tree_to_uhwi (unit_size);
6787 offset *= BITS_PER_UNIT;
6789 base = TREE_OPERAND (t, 0);
6790 ctor = get_base_constructor (base, &offset, valueize);
6791 /* Empty constructor. Always fold to 0. */
6792 if (ctor == error_mark_node)
6793 return build_zero_cst (TREE_TYPE (t));
6794 /* Out of bound array access. Value is undefined,
6795 but don't fold. */
6796 if (maybe_lt (offset, 0))
6797 return NULL_TREE;
6798 /* We can not determine ctor. */
6799 if (!ctor)
6800 return NULL_TREE;
6801 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6802 tree_to_uhwi (unit_size)
6803 * BITS_PER_UNIT,
6804 base);
6808 /* Fallthru. */
6810 case COMPONENT_REF:
6811 case BIT_FIELD_REF:
6812 case TARGET_MEM_REF:
6813 case MEM_REF:
6814 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6815 ctor = get_base_constructor (base, &offset, valueize);
6817 /* Empty constructor. Always fold to 0. */
6818 if (ctor == error_mark_node)
6819 return build_zero_cst (TREE_TYPE (t));
6820 /* We do not know precise address. */
6821 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6822 return NULL_TREE;
6823 /* We can not determine ctor. */
6824 if (!ctor)
6825 return NULL_TREE;
6827 /* Out of bound array access. Value is undefined, but don't fold. */
6828 if (maybe_lt (offset, 0))
6829 return NULL_TREE;
6831 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6832 base);
6834 case REALPART_EXPR:
6835 case IMAGPART_EXPR:
6837 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6838 if (c && TREE_CODE (c) == COMPLEX_CST)
6839 return fold_build1_loc (EXPR_LOCATION (t),
6840 TREE_CODE (t), TREE_TYPE (t), c);
6841 break;
6844 default:
6845 break;
6848 return NULL_TREE;
6851 tree
6852 fold_const_aggregate_ref (tree t)
6854 return fold_const_aggregate_ref_1 (t, NULL);
6857 /* Lookup virtual method with index TOKEN in a virtual table V
6858 at OFFSET.
6859 Set CAN_REFER if non-NULL to false if method
6860 is not referable or if the virtual table is ill-formed (such as rewriten
6861 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6863 tree
6864 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6865 tree v,
6866 unsigned HOST_WIDE_INT offset,
6867 bool *can_refer)
6869 tree vtable = v, init, fn;
6870 unsigned HOST_WIDE_INT size;
6871 unsigned HOST_WIDE_INT elt_size, access_index;
6872 tree domain_type;
6874 if (can_refer)
6875 *can_refer = true;
6877 /* First of all double check we have virtual table. */
6878 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6880 /* Pass down that we lost track of the target. */
6881 if (can_refer)
6882 *can_refer = false;
6883 return NULL_TREE;
6886 init = ctor_for_folding (v);
6888 /* The virtual tables should always be born with constructors
6889 and we always should assume that they are avaialble for
6890 folding. At the moment we do not stream them in all cases,
6891 but it should never happen that ctor seem unreachable. */
6892 gcc_assert (init);
6893 if (init == error_mark_node)
6895 /* Pass down that we lost track of the target. */
6896 if (can_refer)
6897 *can_refer = false;
6898 return NULL_TREE;
6900 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6901 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6902 offset *= BITS_PER_UNIT;
6903 offset += token * size;
6905 /* Lookup the value in the constructor that is assumed to be array.
6906 This is equivalent to
6907 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6908 offset, size, NULL);
6909 but in a constant time. We expect that frontend produced a simple
6910 array without indexed initializers. */
6912 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6913 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6914 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
6915 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
6917 access_index = offset / BITS_PER_UNIT / elt_size;
6918 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
6920 /* This code makes an assumption that there are no
6921 indexed fileds produced by C++ FE, so we can directly index the array. */
6922 if (access_index < CONSTRUCTOR_NELTS (init))
6924 fn = CONSTRUCTOR_ELT (init, access_index)->value;
6925 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
6926 STRIP_NOPS (fn);
6928 else
6929 fn = NULL;
6931 /* For type inconsistent program we may end up looking up virtual method
6932 in virtual table that does not contain TOKEN entries. We may overrun
6933 the virtual table and pick up a constant or RTTI info pointer.
6934 In any case the call is undefined. */
6935 if (!fn
6936 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
6937 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
6938 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6939 else
6941 fn = TREE_OPERAND (fn, 0);
6943 /* When cgraph node is missing and function is not public, we cannot
6944 devirtualize. This can happen in WHOPR when the actual method
6945 ends up in other partition, because we found devirtualization
6946 possibility too late. */
6947 if (!can_refer_decl_in_current_unit_p (fn, vtable))
6949 if (can_refer)
6951 *can_refer = false;
6952 return fn;
6954 return NULL_TREE;
6958 /* Make sure we create a cgraph node for functions we'll reference.
6959 They can be non-existent if the reference comes from an entry
6960 of an external vtable for example. */
6961 cgraph_node::get_create (fn);
6963 return fn;
6966 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6967 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6968 KNOWN_BINFO carries the binfo describing the true type of
6969 OBJ_TYPE_REF_OBJECT(REF).
6970 Set CAN_REFER if non-NULL to false if method
6971 is not referable or if the virtual table is ill-formed (such as rewriten
6972 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6974 tree
6975 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
6976 bool *can_refer)
6978 unsigned HOST_WIDE_INT offset;
6979 tree v;
6981 v = BINFO_VTABLE (known_binfo);
6982 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6983 if (!v)
6984 return NULL_TREE;
6986 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
6988 if (can_refer)
6989 *can_refer = false;
6990 return NULL_TREE;
6992 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
6995 /* Given a pointer value T, return a simplified version of an
6996 indirection through T, or NULL_TREE if no simplification is
6997 possible. Note that the resulting type may be different from
6998 the type pointed to in the sense that it is still compatible
6999 from the langhooks point of view. */
7001 tree
7002 gimple_fold_indirect_ref (tree t)
7004 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7005 tree sub = t;
7006 tree subtype;
7008 STRIP_NOPS (sub);
7009 subtype = TREE_TYPE (sub);
7010 if (!POINTER_TYPE_P (subtype)
7011 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7012 return NULL_TREE;
7014 if (TREE_CODE (sub) == ADDR_EXPR)
7016 tree op = TREE_OPERAND (sub, 0);
7017 tree optype = TREE_TYPE (op);
7018 /* *&p => p */
7019 if (useless_type_conversion_p (type, optype))
7020 return op;
7022 /* *(foo *)&fooarray => fooarray[0] */
7023 if (TREE_CODE (optype) == ARRAY_TYPE
7024 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7025 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7027 tree type_domain = TYPE_DOMAIN (optype);
7028 tree min_val = size_zero_node;
7029 if (type_domain && TYPE_MIN_VALUE (type_domain))
7030 min_val = TYPE_MIN_VALUE (type_domain);
7031 if (TREE_CODE (min_val) == INTEGER_CST)
7032 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7034 /* *(foo *)&complexfoo => __real__ complexfoo */
7035 else if (TREE_CODE (optype) == COMPLEX_TYPE
7036 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7037 return fold_build1 (REALPART_EXPR, type, op);
7038 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7039 else if (TREE_CODE (optype) == VECTOR_TYPE
7040 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7042 tree part_width = TYPE_SIZE (type);
7043 tree index = bitsize_int (0);
7044 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7048 /* *(p + CST) -> ... */
7049 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7050 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7052 tree addr = TREE_OPERAND (sub, 0);
7053 tree off = TREE_OPERAND (sub, 1);
7054 tree addrtype;
7056 STRIP_NOPS (addr);
7057 addrtype = TREE_TYPE (addr);
7059 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7060 if (TREE_CODE (addr) == ADDR_EXPR
7061 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7062 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7063 && tree_fits_uhwi_p (off))
7065 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7066 tree part_width = TYPE_SIZE (type);
7067 unsigned HOST_WIDE_INT part_widthi
7068 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7069 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7070 tree index = bitsize_int (indexi);
7071 if (known_lt (offset / part_widthi,
7072 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7073 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7074 part_width, index);
7077 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7078 if (TREE_CODE (addr) == ADDR_EXPR
7079 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7080 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7082 tree size = TYPE_SIZE_UNIT (type);
7083 if (tree_int_cst_equal (size, off))
7084 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7087 /* *(p + CST) -> MEM_REF <p, CST>. */
7088 if (TREE_CODE (addr) != ADDR_EXPR
7089 || DECL_P (TREE_OPERAND (addr, 0)))
7090 return fold_build2 (MEM_REF, type,
7091 addr,
7092 wide_int_to_tree (ptype, wi::to_wide (off)));
7095 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7096 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7097 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7098 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7100 tree type_domain;
7101 tree min_val = size_zero_node;
7102 tree osub = sub;
7103 sub = gimple_fold_indirect_ref (sub);
7104 if (! sub)
7105 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7106 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7107 if (type_domain && TYPE_MIN_VALUE (type_domain))
7108 min_val = TYPE_MIN_VALUE (type_domain);
7109 if (TREE_CODE (min_val) == INTEGER_CST)
7110 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7113 return NULL_TREE;
7116 /* Return true if CODE is an operation that when operating on signed
7117 integer types involves undefined behavior on overflow and the
7118 operation can be expressed with unsigned arithmetic. */
7120 bool
7121 arith_code_with_undefined_signed_overflow (tree_code code)
7123 switch (code)
7125 case PLUS_EXPR:
7126 case MINUS_EXPR:
7127 case MULT_EXPR:
7128 case NEGATE_EXPR:
7129 case POINTER_PLUS_EXPR:
7130 return true;
7131 default:
7132 return false;
7136 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7137 operation that can be transformed to unsigned arithmetic by converting
7138 its operand, carrying out the operation in the corresponding unsigned
7139 type and converting the result back to the original type.
7141 Returns a sequence of statements that replace STMT and also contain
7142 a modified form of STMT itself. */
7144 gimple_seq
7145 rewrite_to_defined_overflow (gimple *stmt)
7147 if (dump_file && (dump_flags & TDF_DETAILS))
7149 fprintf (dump_file, "rewriting stmt with undefined signed "
7150 "overflow ");
7151 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7154 tree lhs = gimple_assign_lhs (stmt);
7155 tree type = unsigned_type_for (TREE_TYPE (lhs));
7156 gimple_seq stmts = NULL;
7157 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7159 tree op = gimple_op (stmt, i);
7160 op = gimple_convert (&stmts, type, op);
7161 gimple_set_op (stmt, i, op);
7163 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7164 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7165 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7166 gimple_seq_add_stmt (&stmts, stmt);
7167 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7168 gimple_seq_add_stmt (&stmts, cvt);
7170 return stmts;
7174 /* The valueization hook we use for the gimple_build API simplification.
7175 This makes us match fold_buildN behavior by only combining with
7176 statements in the sequence(s) we are currently building. */
7178 static tree
7179 gimple_build_valueize (tree op)
7181 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7182 return op;
7183 return NULL_TREE;
7186 /* Build the expression CODE OP0 of type TYPE with location LOC,
7187 simplifying it first if possible. Returns the built
7188 expression value and appends statements possibly defining it
7189 to SEQ. */
7191 tree
7192 gimple_build (gimple_seq *seq, location_t loc,
7193 enum tree_code code, tree type, tree op0)
7195 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7196 if (!res)
7198 res = create_tmp_reg_or_ssa_name (type);
7199 gimple *stmt;
7200 if (code == REALPART_EXPR
7201 || code == IMAGPART_EXPR
7202 || code == VIEW_CONVERT_EXPR)
7203 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7204 else
7205 stmt = gimple_build_assign (res, code, op0);
7206 gimple_set_location (stmt, loc);
7207 gimple_seq_add_stmt_without_update (seq, stmt);
7209 return res;
7212 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7213 simplifying it first if possible. Returns the built
7214 expression value and appends statements possibly defining it
7215 to SEQ. */
7217 tree
7218 gimple_build (gimple_seq *seq, location_t loc,
7219 enum tree_code code, tree type, tree op0, tree op1)
7221 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7222 if (!res)
7224 res = create_tmp_reg_or_ssa_name (type);
7225 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7226 gimple_set_location (stmt, loc);
7227 gimple_seq_add_stmt_without_update (seq, stmt);
7229 return res;
7232 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7233 simplifying it first if possible. Returns the built
7234 expression value and appends statements possibly defining it
7235 to SEQ. */
7237 tree
7238 gimple_build (gimple_seq *seq, location_t loc,
7239 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7241 tree res = gimple_simplify (code, type, op0, op1, op2,
7242 seq, gimple_build_valueize);
7243 if (!res)
7245 res = create_tmp_reg_or_ssa_name (type);
7246 gimple *stmt;
7247 if (code == BIT_FIELD_REF)
7248 stmt = gimple_build_assign (res, code,
7249 build3 (code, type, op0, op1, op2));
7250 else
7251 stmt = gimple_build_assign (res, code, op0, op1, op2);
7252 gimple_set_location (stmt, loc);
7253 gimple_seq_add_stmt_without_update (seq, stmt);
7255 return res;
7258 /* Build the call FN (ARG0) with a result of type TYPE
7259 (or no result if TYPE is void) with location LOC,
7260 simplifying it first if possible. Returns the built
7261 expression value (or NULL_TREE if TYPE is void) and appends
7262 statements possibly defining it to SEQ. */
7264 tree
7265 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7266 tree type, tree arg0)
7268 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7269 if (!res)
7271 gcall *stmt;
7272 if (internal_fn_p (fn))
7273 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7274 else
7276 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7277 stmt = gimple_build_call (decl, 1, arg0);
7279 if (!VOID_TYPE_P (type))
7281 res = create_tmp_reg_or_ssa_name (type);
7282 gimple_call_set_lhs (stmt, res);
7284 gimple_set_location (stmt, loc);
7285 gimple_seq_add_stmt_without_update (seq, stmt);
7287 return res;
7290 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7291 (or no result if TYPE is void) with location LOC,
7292 simplifying it first if possible. Returns the built
7293 expression value (or NULL_TREE if TYPE is void) and appends
7294 statements possibly defining it to SEQ. */
7296 tree
7297 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7298 tree type, tree arg0, tree arg1)
7300 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7301 if (!res)
7303 gcall *stmt;
7304 if (internal_fn_p (fn))
7305 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7306 else
7308 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7309 stmt = gimple_build_call (decl, 2, arg0, arg1);
7311 if (!VOID_TYPE_P (type))
7313 res = create_tmp_reg_or_ssa_name (type);
7314 gimple_call_set_lhs (stmt, res);
7316 gimple_set_location (stmt, loc);
7317 gimple_seq_add_stmt_without_update (seq, stmt);
7319 return res;
7322 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7323 (or no result if TYPE is void) with location LOC,
7324 simplifying it first if possible. Returns the built
7325 expression value (or NULL_TREE if TYPE is void) and appends
7326 statements possibly defining it to SEQ. */
7328 tree
7329 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7330 tree type, tree arg0, tree arg1, tree arg2)
7332 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7333 seq, gimple_build_valueize);
7334 if (!res)
7336 gcall *stmt;
7337 if (internal_fn_p (fn))
7338 stmt = gimple_build_call_internal (as_internal_fn (fn),
7339 3, arg0, arg1, arg2);
7340 else
7342 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7343 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7345 if (!VOID_TYPE_P (type))
7347 res = create_tmp_reg_or_ssa_name (type);
7348 gimple_call_set_lhs (stmt, res);
7350 gimple_set_location (stmt, loc);
7351 gimple_seq_add_stmt_without_update (seq, stmt);
7353 return res;
7356 /* Build the conversion (TYPE) OP with a result of type TYPE
7357 with location LOC if such conversion is neccesary in GIMPLE,
7358 simplifying it first.
7359 Returns the built expression value and appends
7360 statements possibly defining it to SEQ. */
7362 tree
7363 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7365 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7366 return op;
7367 return gimple_build (seq, loc, NOP_EXPR, type, op);
7370 /* Build the conversion (ptrofftype) OP with a result of a type
7371 compatible with ptrofftype with location LOC if such conversion
7372 is neccesary in GIMPLE, simplifying it first.
7373 Returns the built expression value and appends
7374 statements possibly defining it to SEQ. */
7376 tree
7377 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7379 if (ptrofftype_p (TREE_TYPE (op)))
7380 return op;
7381 return gimple_convert (seq, loc, sizetype, op);
7384 /* Build a vector of type TYPE in which each element has the value OP.
7385 Return a gimple value for the result, appending any new statements
7386 to SEQ. */
7388 tree
7389 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7390 tree op)
7392 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7393 && !CONSTANT_CLASS_P (op))
7394 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7396 tree res, vec = build_vector_from_val (type, op);
7397 if (is_gimple_val (vec))
7398 return vec;
7399 if (gimple_in_ssa_p (cfun))
7400 res = make_ssa_name (type);
7401 else
7402 res = create_tmp_reg (type);
7403 gimple *stmt = gimple_build_assign (res, vec);
7404 gimple_set_location (stmt, loc);
7405 gimple_seq_add_stmt_without_update (seq, stmt);
7406 return res;
7409 /* Build a vector from BUILDER, handling the case in which some elements
7410 are non-constant. Return a gimple value for the result, appending any
7411 new instructions to SEQ.
7413 BUILDER must not have a stepped encoding on entry. This is because
7414 the function is not geared up to handle the arithmetic that would
7415 be needed in the variable case, and any code building a vector that
7416 is known to be constant should use BUILDER->build () directly. */
7418 tree
7419 gimple_build_vector (gimple_seq *seq, location_t loc,
7420 tree_vector_builder *builder)
7422 gcc_assert (builder->nelts_per_pattern () <= 2);
7423 unsigned int encoded_nelts = builder->encoded_nelts ();
7424 for (unsigned int i = 0; i < encoded_nelts; ++i)
7425 if (!TREE_CONSTANT ((*builder)[i]))
7427 tree type = builder->type ();
7428 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7429 vec<constructor_elt, va_gc> *v;
7430 vec_alloc (v, nelts);
7431 for (i = 0; i < nelts; ++i)
7432 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7434 tree res;
7435 if (gimple_in_ssa_p (cfun))
7436 res = make_ssa_name (type);
7437 else
7438 res = create_tmp_reg (type);
7439 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7440 gimple_set_location (stmt, loc);
7441 gimple_seq_add_stmt_without_update (seq, stmt);
7442 return res;
7444 return builder->build ();
7447 /* Return true if the result of assignment STMT is known to be non-negative.
7448 If the return value is based on the assumption that signed overflow is
7449 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7450 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7452 static bool
7453 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7454 int depth)
7456 enum tree_code code = gimple_assign_rhs_code (stmt);
7457 switch (get_gimple_rhs_class (code))
7459 case GIMPLE_UNARY_RHS:
7460 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7461 gimple_expr_type (stmt),
7462 gimple_assign_rhs1 (stmt),
7463 strict_overflow_p, depth);
7464 case GIMPLE_BINARY_RHS:
7465 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7466 gimple_expr_type (stmt),
7467 gimple_assign_rhs1 (stmt),
7468 gimple_assign_rhs2 (stmt),
7469 strict_overflow_p, depth);
7470 case GIMPLE_TERNARY_RHS:
7471 return false;
7472 case GIMPLE_SINGLE_RHS:
7473 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7474 strict_overflow_p, depth);
7475 case GIMPLE_INVALID_RHS:
7476 break;
7478 gcc_unreachable ();
7481 /* Return true if return value of call STMT is known to be non-negative.
7482 If the return value is based on the assumption that signed overflow is
7483 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7484 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7486 static bool
7487 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7488 int depth)
7490 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7491 gimple_call_arg (stmt, 0) : NULL_TREE;
7492 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7493 gimple_call_arg (stmt, 1) : NULL_TREE;
7495 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7496 gimple_call_combined_fn (stmt),
7497 arg0,
7498 arg1,
7499 strict_overflow_p, depth);
7502 /* Return true if return value of call STMT is known to be non-negative.
7503 If the return value is based on the assumption that signed overflow is
7504 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7505 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7507 static bool
7508 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7509 int depth)
7511 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7513 tree arg = gimple_phi_arg_def (stmt, i);
7514 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7515 return false;
7517 return true;
7520 /* Return true if STMT is known to compute a non-negative value.
7521 If the return value is based on the assumption that signed overflow is
7522 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7523 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7525 bool
7526 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7527 int depth)
7529 switch (gimple_code (stmt))
7531 case GIMPLE_ASSIGN:
7532 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7533 depth);
7534 case GIMPLE_CALL:
7535 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7536 depth);
7537 case GIMPLE_PHI:
7538 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7539 depth);
7540 default:
7541 return false;
7545 /* Return true if the floating-point value computed by assignment STMT
7546 is known to have an integer value. We also allow +Inf, -Inf and NaN
7547 to be considered integer values. Return false for signaling NaN.
7549 DEPTH is the current nesting depth of the query. */
7551 static bool
7552 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7554 enum tree_code code = gimple_assign_rhs_code (stmt);
7555 switch (get_gimple_rhs_class (code))
7557 case GIMPLE_UNARY_RHS:
7558 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7559 gimple_assign_rhs1 (stmt), depth);
7560 case GIMPLE_BINARY_RHS:
7561 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7562 gimple_assign_rhs1 (stmt),
7563 gimple_assign_rhs2 (stmt), depth);
7564 case GIMPLE_TERNARY_RHS:
7565 return false;
7566 case GIMPLE_SINGLE_RHS:
7567 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7568 case GIMPLE_INVALID_RHS:
7569 break;
7571 gcc_unreachable ();
7574 /* Return true if the floating-point value computed by call STMT is known
7575 to have an integer value. We also allow +Inf, -Inf and NaN to be
7576 considered integer values. Return false for signaling NaN.
7578 DEPTH is the current nesting depth of the query. */
7580 static bool
7581 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7583 tree arg0 = (gimple_call_num_args (stmt) > 0
7584 ? gimple_call_arg (stmt, 0)
7585 : NULL_TREE);
7586 tree arg1 = (gimple_call_num_args (stmt) > 1
7587 ? gimple_call_arg (stmt, 1)
7588 : NULL_TREE);
7589 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7590 arg0, arg1, depth);
7593 /* Return true if the floating-point result of phi STMT is known to have
7594 an integer value. We also allow +Inf, -Inf and NaN to be considered
7595 integer values. Return false for signaling NaN.
7597 DEPTH is the current nesting depth of the query. */
7599 static bool
7600 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7602 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7604 tree arg = gimple_phi_arg_def (stmt, i);
7605 if (!integer_valued_real_single_p (arg, depth + 1))
7606 return false;
7608 return true;
7611 /* Return true if the floating-point value computed by STMT is known
7612 to have an integer value. We also allow +Inf, -Inf and NaN to be
7613 considered integer values. Return false for signaling NaN.
7615 DEPTH is the current nesting depth of the query. */
7617 bool
7618 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7620 switch (gimple_code (stmt))
7622 case GIMPLE_ASSIGN:
7623 return gimple_assign_integer_valued_real_p (stmt, depth);
7624 case GIMPLE_CALL:
7625 return gimple_call_integer_valued_real_p (stmt, depth);
7626 case GIMPLE_PHI:
7627 return gimple_phi_integer_valued_real_p (stmt, depth);
7628 default:
7629 return false;