PR rtl-optimization/87817
[official-gcc.git] / gcc / gimple-fold.c
blob67c8cfa4f64a95258cad0bafa5b157cf883afe6f
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
35 #include "stmt.h"
36 #include "expr.h"
37 #include "stor-layout.h"
38 #include "dumpfile.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
43 #include "tree-dfa.h"
44 #include "tree-object-size.h"
45 #include "tree-ssa.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
51 #include "dbgcnt.h"
52 #include "builtins.h"
53 #include "tree-eh.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
58 #include "tree-cfg.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "asan.h"
63 #include "diagnostic-core.h"
64 #include "intl.h"
65 #include "calls.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
72 reasons:
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
79 set.
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
84 declaring the body.
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
89 directly. */
91 static bool
92 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
94 varpool_node *vnode;
95 struct cgraph_node *node;
96 symtab_node *snode;
98 if (DECL_ABSTRACT_P (decl))
99 return false;
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
103 || !VAR_OR_FUNCTION_DECL_P (decl))
104 return true;
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab->function_flags_ready)
112 return true;
113 snode = symtab_node::get (decl);
114 if (!snode || !snode->definition)
115 return false;
116 node = dyn_cast <cgraph_node *> (snode);
117 return !node || !node->global.inlined_to;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
123 if (!from_decl
124 || !VAR_P (from_decl)
125 || (!DECL_EXTERNAL (from_decl)
126 && (vnode = varpool_node::get (from_decl)) != NULL
127 && vnode->definition)
128 || (flag_ltrans
129 && (vnode = varpool_node::get (from_decl)) != NULL
130 && vnode->in_other_partition))
131 return true;
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl)
136 && DECL_EXTERNAL (decl)
137 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
138 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
139 return false;
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
144 return true;
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
155 was privatized. */
156 if (!symtab->function_flags_ready)
157 return true;
159 snode = symtab_node::get (decl);
160 if (!snode
161 || ((!snode->definition || DECL_EXTERNAL (decl))
162 && (!snode->in_other_partition
163 || (!snode->forced_by_abi && !snode->force_output))))
164 return false;
165 node = dyn_cast <cgraph_node *> (snode);
166 return !node || !node->global.inlined_to;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
171 is made. */
173 tree
174 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
176 if (gimple_in_ssa_p (cfun))
177 return make_ssa_name (type, stmt);
178 else
179 return create_tmp_reg (type);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 tree
187 canonicalize_constructor_val (tree cval, tree from_decl)
189 tree orig_cval = cval;
190 STRIP_NOPS (cval);
191 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
194 tree ptr = TREE_OPERAND (cval, 0);
195 if (is_gimple_min_invariant (ptr))
196 cval = build1_loc (EXPR_LOCATION (cval),
197 ADDR_EXPR, TREE_TYPE (ptr),
198 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
199 ptr,
200 fold_convert (ptr_type_node,
201 TREE_OPERAND (cval, 1))));
203 if (TREE_CODE (cval) == ADDR_EXPR)
205 tree base = NULL_TREE;
206 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
208 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
209 if (base)
210 TREE_OPERAND (cval, 0) = base;
212 else
213 base = get_base_address (TREE_OPERAND (cval, 0));
214 if (!base)
215 return NULL_TREE;
217 if (VAR_OR_FUNCTION_DECL_P (base)
218 && !can_refer_decl_in_current_unit_p (base, from_decl))
219 return NULL_TREE;
220 if (TREE_TYPE (base) == error_mark_node)
221 return NULL_TREE;
222 if (VAR_P (base))
223 TREE_ADDRESSABLE (base) = 1;
224 else if (TREE_CODE (base) == FUNCTION_DECL)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
233 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
236 cval = fold_convert (TREE_TYPE (orig_cval), cval);
237 return cval;
239 if (TREE_OVERFLOW_P (cval))
240 return drop_tree_overflow (cval);
241 return orig_cval;
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
247 tree
248 get_symbol_constant_value (tree sym)
250 tree val = ctor_for_folding (sym);
251 if (val != error_mark_node)
253 if (val)
255 val = canonicalize_constructor_val (unshare_expr (val), sym);
256 if (val && is_gimple_min_invariant (val))
257 return val;
258 else
259 return NULL_TREE;
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
264 if (!val
265 && is_gimple_reg_type (TREE_TYPE (sym)))
266 return build_zero_cst (TREE_TYPE (sym));
269 return NULL_TREE;
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
279 static tree
280 maybe_fold_reference (tree expr, bool is_lhs)
282 tree result;
284 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr) == REALPART_EXPR
286 || TREE_CODE (expr) == IMAGPART_EXPR)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr),
289 TREE_CODE (expr),
290 TREE_TYPE (expr),
291 TREE_OPERAND (expr, 0));
292 else if (TREE_CODE (expr) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr),
295 TREE_CODE (expr),
296 TREE_TYPE (expr),
297 TREE_OPERAND (expr, 0),
298 TREE_OPERAND (expr, 1),
299 TREE_OPERAND (expr, 2));
301 if (!is_lhs
302 && (result = fold_const_aggregate_ref (expr))
303 && is_gimple_min_invariant (result))
304 return result;
306 return NULL_TREE;
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
313 folded. */
315 static tree
316 fold_gimple_assign (gimple_stmt_iterator *si)
318 gimple *stmt = gsi_stmt (*si);
319 enum tree_code subcode = gimple_assign_rhs_code (stmt);
320 location_t loc = gimple_location (stmt);
322 tree result = NULL_TREE;
324 switch (get_gimple_rhs_class (subcode))
326 case GIMPLE_SINGLE_RHS:
328 tree rhs = gimple_assign_rhs1 (stmt);
330 if (TREE_CLOBBER_P (rhs))
331 return NULL_TREE;
333 if (REFERENCE_CLASS_P (rhs))
334 return maybe_fold_reference (rhs, false);
336 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
338 tree val = OBJ_TYPE_REF_EXPR (rhs);
339 if (is_gimple_min_invariant (val))
340 return val;
341 else if (flag_devirtualize && virtual_method_call_p (rhs))
343 bool final;
344 vec <cgraph_node *>targets
345 = possible_polymorphic_call_targets (rhs, stmt, &final);
346 if (final && targets.length () <= 1 && dbg_cnt (devirt))
348 if (dump_enabled_p ())
350 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
351 "resolving virtual function address "
352 "reference to function %s\n",
353 targets.length () == 1
354 ? targets[0]->name ()
355 : "NULL");
357 if (targets.length () == 1)
359 val = fold_convert (TREE_TYPE (val),
360 build_fold_addr_expr_loc
361 (loc, targets[0]->decl));
362 STRIP_USELESS_TYPE_CONVERSION (val);
364 else
365 /* We can not use __builtin_unreachable here because it
366 can not have address taken. */
367 val = build_int_cst (TREE_TYPE (val), 0);
368 return val;
373 else if (TREE_CODE (rhs) == ADDR_EXPR)
375 tree ref = TREE_OPERAND (rhs, 0);
376 tree tem = maybe_fold_reference (ref, true);
377 if (tem
378 && TREE_CODE (tem) == MEM_REF
379 && integer_zerop (TREE_OPERAND (tem, 1)))
380 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
381 else if (tem)
382 result = fold_convert (TREE_TYPE (rhs),
383 build_fold_addr_expr_loc (loc, tem));
384 else if (TREE_CODE (ref) == MEM_REF
385 && integer_zerop (TREE_OPERAND (ref, 1)))
386 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
388 if (result)
390 /* Strip away useless type conversions. Both the
391 NON_LVALUE_EXPR that may have been added by fold, and
392 "useless" type conversions that might now be apparent
393 due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result);
396 if (result != rhs && valid_gimple_rhs_p (result))
397 return result;
401 else if (TREE_CODE (rhs) == CONSTRUCTOR
402 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
405 unsigned i;
406 tree val;
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
409 if (! CONSTANT_CLASS_P (val))
410 return NULL_TREE;
412 return build_vector_from_ctor (TREE_TYPE (rhs),
413 CONSTRUCTOR_ELTS (rhs));
416 else if (DECL_P (rhs))
417 return get_symbol_constant_value (rhs);
419 break;
421 case GIMPLE_UNARY_RHS:
422 break;
424 case GIMPLE_BINARY_RHS:
425 break;
427 case GIMPLE_TERNARY_RHS:
428 result = fold_ternary_loc (loc, subcode,
429 TREE_TYPE (gimple_assign_lhs (stmt)),
430 gimple_assign_rhs1 (stmt),
431 gimple_assign_rhs2 (stmt),
432 gimple_assign_rhs3 (stmt));
434 if (result)
436 STRIP_USELESS_TYPE_CONVERSION (result);
437 if (valid_gimple_rhs_p (result))
438 return result;
440 break;
442 case GIMPLE_INVALID_RHS:
443 gcc_unreachable ();
446 return NULL_TREE;
450 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
451 adjusting the replacement stmts location and virtual operands.
452 If the statement has a lhs the last stmt in the sequence is expected
453 to assign to that lhs. */
455 static void
456 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
458 gimple *stmt = gsi_stmt (*si_p);
460 if (gimple_has_location (stmt))
461 annotate_all_with_location (stmts, gimple_location (stmt));
463 /* First iterate over the replacement statements backward, assigning
464 virtual operands to their defining statements. */
465 gimple *laststore = NULL;
466 for (gimple_stmt_iterator i = gsi_last (stmts);
467 !gsi_end_p (i); gsi_prev (&i))
469 gimple *new_stmt = gsi_stmt (i);
470 if ((gimple_assign_single_p (new_stmt)
471 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
472 || (is_gimple_call (new_stmt)
473 && (gimple_call_flags (new_stmt)
474 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
476 tree vdef;
477 if (!laststore)
478 vdef = gimple_vdef (stmt);
479 else
480 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
481 gimple_set_vdef (new_stmt, vdef);
482 if (vdef && TREE_CODE (vdef) == SSA_NAME)
483 SSA_NAME_DEF_STMT (vdef) = new_stmt;
484 laststore = new_stmt;
488 /* Second iterate over the statements forward, assigning virtual
489 operands to their uses. */
490 tree reaching_vuse = gimple_vuse (stmt);
491 for (gimple_stmt_iterator i = gsi_start (stmts);
492 !gsi_end_p (i); gsi_next (&i))
494 gimple *new_stmt = gsi_stmt (i);
495 /* If the new statement possibly has a VUSE, update it with exact SSA
496 name we know will reach this one. */
497 if (gimple_has_mem_ops (new_stmt))
498 gimple_set_vuse (new_stmt, reaching_vuse);
499 gimple_set_modified (new_stmt, true);
500 if (gimple_vdef (new_stmt))
501 reaching_vuse = gimple_vdef (new_stmt);
504 /* If the new sequence does not do a store release the virtual
505 definition of the original statement. */
506 if (reaching_vuse
507 && reaching_vuse == gimple_vuse (stmt))
509 tree vdef = gimple_vdef (stmt);
510 if (vdef
511 && TREE_CODE (vdef) == SSA_NAME)
513 unlink_stmt_vdef (stmt);
514 release_ssa_name (vdef);
518 /* Finally replace the original statement with the sequence. */
519 gsi_replace_with_seq (si_p, stmts, false);
522 /* Convert EXPR into a GIMPLE value suitable for substitution on the
523 RHS of an assignment. Insert the necessary statements before
524 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
525 is replaced. If the call is expected to produces a result, then it
526 is replaced by an assignment of the new RHS to the result variable.
527 If the result is to be ignored, then the call is replaced by a
528 GIMPLE_NOP. A proper VDEF chain is retained by making the first
529 VUSE and the last VDEF of the whole sequence be the same as the replaced
530 statement and using new SSA names for stores in between. */
532 void
533 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 tree lhs;
536 gimple *stmt, *new_stmt;
537 gimple_stmt_iterator i;
538 gimple_seq stmts = NULL;
540 stmt = gsi_stmt (*si_p);
542 gcc_assert (is_gimple_call (stmt));
544 push_gimplify_context (gimple_in_ssa_p (cfun));
546 lhs = gimple_call_lhs (stmt);
547 if (lhs == NULL_TREE)
549 gimplify_and_add (expr, &stmts);
550 /* We can end up with folding a memcpy of an empty class assignment
551 which gets optimized away by C++ gimplification. */
552 if (gimple_seq_empty_p (stmts))
554 pop_gimplify_context (NULL);
555 if (gimple_in_ssa_p (cfun))
557 unlink_stmt_vdef (stmt);
558 release_defs (stmt);
560 gsi_replace (si_p, gimple_build_nop (), false);
561 return;
564 else
566 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
567 new_stmt = gimple_build_assign (lhs, tmp);
568 i = gsi_last (stmts);
569 gsi_insert_after_without_update (&i, new_stmt,
570 GSI_CONTINUE_LINKING);
573 pop_gimplify_context (NULL);
575 gsi_replace_with_seq_vops (si_p, stmts);
579 /* Replace the call at *GSI with the gimple value VAL. */
581 void
582 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
584 gimple *stmt = gsi_stmt (*gsi);
585 tree lhs = gimple_call_lhs (stmt);
586 gimple *repl;
587 if (lhs)
589 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
590 val = fold_convert (TREE_TYPE (lhs), val);
591 repl = gimple_build_assign (lhs, val);
593 else
594 repl = gimple_build_nop ();
595 tree vdef = gimple_vdef (stmt);
596 if (vdef && TREE_CODE (vdef) == SSA_NAME)
598 unlink_stmt_vdef (stmt);
599 release_ssa_name (vdef);
601 gsi_replace (gsi, repl, false);
604 /* Replace the call at *GSI with the new call REPL and fold that
605 again. */
607 static void
608 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
610 gimple *stmt = gsi_stmt (*gsi);
611 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
612 gimple_set_location (repl, gimple_location (stmt));
613 if (gimple_vdef (stmt)
614 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
616 gimple_set_vdef (repl, gimple_vdef (stmt));
617 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
619 if (gimple_vuse (stmt))
620 gimple_set_vuse (repl, gimple_vuse (stmt));
621 gsi_replace (gsi, repl, false);
622 fold_stmt (gsi);
625 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 static bool
628 var_decl_component_p (tree var)
630 tree inner = var;
631 while (handled_component_p (inner))
632 inner = TREE_OPERAND (inner, 0);
633 return (DECL_P (inner)
634 || (TREE_CODE (inner) == MEM_REF
635 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
638 /* Return TRUE if the SIZE argument, representing the size of an
639 object, is in a range of values of which exactly zero is valid. */
641 static bool
642 size_must_be_zero_p (tree size)
644 if (integer_zerop (size))
645 return true;
647 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
648 return false;
650 tree type = TREE_TYPE (size);
651 int prec = TYPE_PRECISION (type);
653 /* Compute the value of SSIZE_MAX, the largest positive value that
654 can be stored in ssize_t, the signed counterpart of size_t. */
655 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
656 value_range valid_range (VR_RANGE,
657 build_int_cst (type, 0),
658 wide_int_to_tree (type, ssize_max));
659 value_range vr;
660 get_range_info (size, vr);
661 vr.intersect (&valid_range);
662 return vr.zero_p ();
665 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
666 diagnose (otherwise undefined) overlapping copies without preventing
667 folding. When folded, GCC guarantees that overlapping memcpy has
668 the same semantics as memmove. Call to the library memcpy need not
669 provide the same guarantee. Return false if no simplification can
670 be made. */
672 static bool
673 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
674 tree dest, tree src, int endp)
676 gimple *stmt = gsi_stmt (*gsi);
677 tree lhs = gimple_call_lhs (stmt);
678 tree len = gimple_call_arg (stmt, 2);
679 tree destvar, srcvar;
680 location_t loc = gimple_location (stmt);
682 bool nowarn = gimple_no_warning_p (stmt);
684 /* If the LEN parameter is a constant zero or in range where
685 the only valid value is zero, return DEST. */
686 if (size_must_be_zero_p (len))
688 gimple *repl;
689 if (gimple_call_lhs (stmt))
690 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
691 else
692 repl = gimple_build_nop ();
693 tree vdef = gimple_vdef (stmt);
694 if (vdef && TREE_CODE (vdef) == SSA_NAME)
696 unlink_stmt_vdef (stmt);
697 release_ssa_name (vdef);
699 gsi_replace (gsi, repl, false);
700 return true;
703 /* If SRC and DEST are the same (and not volatile), return
704 DEST{,+LEN,+LEN-1}. */
705 if (operand_equal_p (src, dest, 0))
707 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
708 It's safe and may even be emitted by GCC itself (see bug
709 32667). */
710 unlink_stmt_vdef (stmt);
711 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
712 release_ssa_name (gimple_vdef (stmt));
713 if (!lhs)
715 gsi_replace (gsi, gimple_build_nop (), false);
716 return true;
718 goto done;
720 else
722 tree srctype, desttype;
723 unsigned int src_align, dest_align;
724 tree off0;
725 const char *tmp_str;
726 unsigned HOST_WIDE_INT tmp_len;
728 /* Build accesses at offset zero with a ref-all character type. */
729 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
730 ptr_mode, true), 0);
732 /* If we can perform the copy efficiently with first doing all loads
733 and then all stores inline it that way. Currently efficiently
734 means that we can load all the memory into a single integer
735 register which is what MOVE_MAX gives us. */
736 src_align = get_pointer_alignment (src);
737 dest_align = get_pointer_alignment (dest);
738 if (tree_fits_uhwi_p (len)
739 && compare_tree_int (len, MOVE_MAX) <= 0
740 /* ??? Don't transform copies from strings with known length this
741 confuses the tree-ssa-strlen.c. This doesn't handle
742 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
743 reason. */
744 && !c_strlen (src, 2)
745 && !((tmp_str = c_getstr (src, &tmp_len)) != NULL
746 && memchr (tmp_str, 0, tmp_len) == NULL))
748 unsigned ilen = tree_to_uhwi (len);
749 if (pow2p_hwi (ilen))
751 /* Detect invalid bounds and overlapping copies and issue
752 either -Warray-bounds or -Wrestrict. */
753 if (!nowarn
754 && check_bounds_or_overlap (as_a <gcall *>(stmt),
755 dest, src, len, len))
756 gimple_set_no_warning (stmt, true);
758 scalar_int_mode mode;
759 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
760 if (type
761 && is_a <scalar_int_mode> (TYPE_MODE (type), &mode)
762 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
763 /* If the destination pointer is not aligned we must be able
764 to emit an unaligned store. */
765 && (dest_align >= GET_MODE_ALIGNMENT (mode)
766 || !targetm.slow_unaligned_access (mode, dest_align)
767 || (optab_handler (movmisalign_optab, mode)
768 != CODE_FOR_nothing)))
770 tree srctype = type;
771 tree desttype = type;
772 if (src_align < GET_MODE_ALIGNMENT (mode))
773 srctype = build_aligned_type (type, src_align);
774 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
775 tree tem = fold_const_aggregate_ref (srcmem);
776 if (tem)
777 srcmem = tem;
778 else if (src_align < GET_MODE_ALIGNMENT (mode)
779 && targetm.slow_unaligned_access (mode, src_align)
780 && (optab_handler (movmisalign_optab, mode)
781 == CODE_FOR_nothing))
782 srcmem = NULL_TREE;
783 if (srcmem)
785 gimple *new_stmt;
786 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
788 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
789 srcmem
790 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
791 new_stmt);
792 gimple_assign_set_lhs (new_stmt, srcmem);
793 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
794 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
796 if (dest_align < GET_MODE_ALIGNMENT (mode))
797 desttype = build_aligned_type (type, dest_align);
798 new_stmt
799 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
800 dest, off0),
801 srcmem);
802 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
803 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
804 if (gimple_vdef (new_stmt)
805 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
806 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
807 if (!lhs)
809 gsi_replace (gsi, new_stmt, false);
810 return true;
812 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
813 goto done;
819 if (endp == 3)
821 /* Both DEST and SRC must be pointer types.
822 ??? This is what old code did. Is the testing for pointer types
823 really mandatory?
825 If either SRC is readonly or length is 1, we can use memcpy. */
826 if (!dest_align || !src_align)
827 return false;
828 if (readonly_data_expr (src)
829 || (tree_fits_uhwi_p (len)
830 && (MIN (src_align, dest_align) / BITS_PER_UNIT
831 >= tree_to_uhwi (len))))
833 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
834 if (!fn)
835 return false;
836 gimple_call_set_fndecl (stmt, fn);
837 gimple_call_set_arg (stmt, 0, dest);
838 gimple_call_set_arg (stmt, 1, src);
839 fold_stmt (gsi);
840 return true;
843 /* If *src and *dest can't overlap, optimize into memcpy as well. */
844 if (TREE_CODE (src) == ADDR_EXPR
845 && TREE_CODE (dest) == ADDR_EXPR)
847 tree src_base, dest_base, fn;
848 poly_int64 src_offset = 0, dest_offset = 0;
849 poly_uint64 maxsize;
851 srcvar = TREE_OPERAND (src, 0);
852 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
853 if (src_base == NULL)
854 src_base = srcvar;
855 destvar = TREE_OPERAND (dest, 0);
856 dest_base = get_addr_base_and_unit_offset (destvar,
857 &dest_offset);
858 if (dest_base == NULL)
859 dest_base = destvar;
860 if (!poly_int_tree_p (len, &maxsize))
861 maxsize = -1;
862 if (SSA_VAR_P (src_base)
863 && SSA_VAR_P (dest_base))
865 if (operand_equal_p (src_base, dest_base, 0)
866 && ranges_maybe_overlap_p (src_offset, maxsize,
867 dest_offset, maxsize))
868 return false;
870 else if (TREE_CODE (src_base) == MEM_REF
871 && TREE_CODE (dest_base) == MEM_REF)
873 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
874 TREE_OPERAND (dest_base, 0), 0))
875 return false;
876 poly_offset_int full_src_offset
877 = mem_ref_offset (src_base) + src_offset;
878 poly_offset_int full_dest_offset
879 = mem_ref_offset (dest_base) + dest_offset;
880 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
881 full_dest_offset, maxsize))
882 return false;
884 else
885 return false;
887 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
888 if (!fn)
889 return false;
890 gimple_call_set_fndecl (stmt, fn);
891 gimple_call_set_arg (stmt, 0, dest);
892 gimple_call_set_arg (stmt, 1, src);
893 fold_stmt (gsi);
894 return true;
897 /* If the destination and source do not alias optimize into
898 memcpy as well. */
899 if ((is_gimple_min_invariant (dest)
900 || TREE_CODE (dest) == SSA_NAME)
901 && (is_gimple_min_invariant (src)
902 || TREE_CODE (src) == SSA_NAME))
904 ao_ref destr, srcr;
905 ao_ref_init_from_ptr_and_size (&destr, dest, len);
906 ao_ref_init_from_ptr_and_size (&srcr, src, len);
907 if (!refs_may_alias_p_1 (&destr, &srcr, false))
909 tree fn;
910 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
911 if (!fn)
912 return false;
913 gimple_call_set_fndecl (stmt, fn);
914 gimple_call_set_arg (stmt, 0, dest);
915 gimple_call_set_arg (stmt, 1, src);
916 fold_stmt (gsi);
917 return true;
921 return false;
924 if (!tree_fits_shwi_p (len))
925 return false;
926 if (!POINTER_TYPE_P (TREE_TYPE (src))
927 || !POINTER_TYPE_P (TREE_TYPE (dest)))
928 return false;
929 /* In the following try to find a type that is most natural to be
930 used for the memcpy source and destination and that allows
931 the most optimization when memcpy is turned into a plain assignment
932 using that type. In theory we could always use a char[len] type
933 but that only gains us that the destination and source possibly
934 no longer will have their address taken. */
935 srctype = TREE_TYPE (TREE_TYPE (src));
936 if (TREE_CODE (srctype) == ARRAY_TYPE
937 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
938 srctype = TREE_TYPE (srctype);
939 desttype = TREE_TYPE (TREE_TYPE (dest));
940 if (TREE_CODE (desttype) == ARRAY_TYPE
941 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
942 desttype = TREE_TYPE (desttype);
943 if (TREE_ADDRESSABLE (srctype)
944 || TREE_ADDRESSABLE (desttype))
945 return false;
947 /* Make sure we are not copying using a floating-point mode or
948 a type whose size possibly does not match its precision. */
949 if (FLOAT_MODE_P (TYPE_MODE (desttype))
950 || TREE_CODE (desttype) == BOOLEAN_TYPE
951 || TREE_CODE (desttype) == ENUMERAL_TYPE)
952 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
953 if (FLOAT_MODE_P (TYPE_MODE (srctype))
954 || TREE_CODE (srctype) == BOOLEAN_TYPE
955 || TREE_CODE (srctype) == ENUMERAL_TYPE)
956 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
957 if (!srctype)
958 srctype = desttype;
959 if (!desttype)
960 desttype = srctype;
961 if (!srctype)
962 return false;
964 src_align = get_pointer_alignment (src);
965 dest_align = get_pointer_alignment (dest);
966 if (dest_align < TYPE_ALIGN (desttype)
967 || src_align < TYPE_ALIGN (srctype))
968 return false;
970 destvar = NULL_TREE;
971 if (TREE_CODE (dest) == ADDR_EXPR
972 && var_decl_component_p (TREE_OPERAND (dest, 0))
973 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
974 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
976 srcvar = NULL_TREE;
977 if (TREE_CODE (src) == ADDR_EXPR
978 && var_decl_component_p (TREE_OPERAND (src, 0))
979 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
981 if (!destvar
982 || src_align >= TYPE_ALIGN (desttype))
983 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
984 src, off0);
985 else if (!STRICT_ALIGNMENT)
987 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
988 src_align);
989 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
993 if (srcvar == NULL_TREE && destvar == NULL_TREE)
994 return false;
996 if (srcvar == NULL_TREE)
998 if (src_align >= TYPE_ALIGN (desttype))
999 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1000 else
1002 if (STRICT_ALIGNMENT)
1003 return false;
1004 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1005 src_align);
1006 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1009 else if (destvar == NULL_TREE)
1011 if (dest_align >= TYPE_ALIGN (srctype))
1012 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1013 else
1015 if (STRICT_ALIGNMENT)
1016 return false;
1017 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1018 dest_align);
1019 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1023 /* Detect invalid bounds and overlapping copies and issue either
1024 -Warray-bounds or -Wrestrict. */
1025 if (!nowarn)
1026 check_bounds_or_overlap (as_a <gcall *>(stmt), dest, src, len, len);
1028 gimple *new_stmt;
1029 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1031 tree tem = fold_const_aggregate_ref (srcvar);
1032 if (tem)
1033 srcvar = tem;
1034 if (! is_gimple_min_invariant (srcvar))
1036 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1037 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1038 new_stmt);
1039 gimple_assign_set_lhs (new_stmt, srcvar);
1040 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1041 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1043 new_stmt = gimple_build_assign (destvar, srcvar);
1044 goto set_vop_and_replace;
1047 /* We get an aggregate copy. Use an unsigned char[] type to
1048 perform the copying to preserve padding and to avoid any issues
1049 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1050 desttype = build_array_type_nelts (unsigned_char_type_node,
1051 tree_to_uhwi (len));
1052 srctype = desttype;
1053 if (src_align > TYPE_ALIGN (srctype))
1054 srctype = build_aligned_type (srctype, src_align);
1055 if (dest_align > TYPE_ALIGN (desttype))
1056 desttype = build_aligned_type (desttype, dest_align);
1057 new_stmt
1058 = gimple_build_assign (fold_build2 (MEM_REF, desttype, dest, off0),
1059 fold_build2 (MEM_REF, srctype, src, off0));
1060 set_vop_and_replace:
1061 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1062 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1063 if (gimple_vdef (new_stmt)
1064 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1065 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1066 if (!lhs)
1068 gsi_replace (gsi, new_stmt, false);
1069 return true;
1071 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1074 done:
1075 gimple_seq stmts = NULL;
1076 if (endp == 0 || endp == 3)
1077 len = NULL_TREE;
1078 else if (endp == 2)
1079 len = gimple_build (&stmts, loc, MINUS_EXPR, TREE_TYPE (len), len,
1080 ssize_int (1));
1081 if (endp == 2 || endp == 1)
1083 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1084 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1085 TREE_TYPE (dest), dest, len);
1088 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1089 gimple *repl = gimple_build_assign (lhs, dest);
1090 gsi_replace (gsi, repl, false);
1091 return true;
1094 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1095 to built-in memcmp (a, b, len). */
1097 static bool
1098 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1100 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1102 if (!fn)
1103 return false;
1105 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1107 gimple *stmt = gsi_stmt (*gsi);
1108 tree a = gimple_call_arg (stmt, 0);
1109 tree b = gimple_call_arg (stmt, 1);
1110 tree len = gimple_call_arg (stmt, 2);
1112 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1113 replace_call_with_call_and_fold (gsi, repl);
1115 return true;
1118 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1119 to built-in memmove (dest, src, len). */
1121 static bool
1122 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1124 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1126 if (!fn)
1127 return false;
1129 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1130 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1131 len) into memmove (dest, src, len). */
1133 gimple *stmt = gsi_stmt (*gsi);
1134 tree src = gimple_call_arg (stmt, 0);
1135 tree dest = gimple_call_arg (stmt, 1);
1136 tree len = gimple_call_arg (stmt, 2);
1138 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1139 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1140 replace_call_with_call_and_fold (gsi, repl);
1142 return true;
1145 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1146 to built-in memset (dest, 0, len). */
1148 static bool
1149 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1151 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1153 if (!fn)
1154 return false;
1156 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1158 gimple *stmt = gsi_stmt (*gsi);
1159 tree dest = gimple_call_arg (stmt, 0);
1160 tree len = gimple_call_arg (stmt, 1);
1162 gimple_seq seq = NULL;
1163 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1164 gimple_seq_add_stmt_without_update (&seq, repl);
1165 gsi_replace_with_seq_vops (gsi, seq);
1166 fold_stmt (gsi);
1168 return true;
1171 /* Fold function call to builtin memset or bzero at *GSI setting the
1172 memory of size LEN to VAL. Return whether a simplification was made. */
1174 static bool
1175 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1177 gimple *stmt = gsi_stmt (*gsi);
1178 tree etype;
1179 unsigned HOST_WIDE_INT length, cval;
1181 /* If the LEN parameter is zero, return DEST. */
1182 if (integer_zerop (len))
1184 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1185 return true;
1188 if (! tree_fits_uhwi_p (len))
1189 return false;
1191 if (TREE_CODE (c) != INTEGER_CST)
1192 return false;
1194 tree dest = gimple_call_arg (stmt, 0);
1195 tree var = dest;
1196 if (TREE_CODE (var) != ADDR_EXPR)
1197 return false;
1199 var = TREE_OPERAND (var, 0);
1200 if (TREE_THIS_VOLATILE (var))
1201 return false;
1203 etype = TREE_TYPE (var);
1204 if (TREE_CODE (etype) == ARRAY_TYPE)
1205 etype = TREE_TYPE (etype);
1207 if (!INTEGRAL_TYPE_P (etype)
1208 && !POINTER_TYPE_P (etype))
1209 return NULL_TREE;
1211 if (! var_decl_component_p (var))
1212 return NULL_TREE;
1214 length = tree_to_uhwi (len);
1215 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1216 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1217 return NULL_TREE;
1219 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1220 return NULL_TREE;
1222 if (integer_zerop (c))
1223 cval = 0;
1224 else
1226 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1227 return NULL_TREE;
1229 cval = TREE_INT_CST_LOW (c);
1230 cval &= 0xff;
1231 cval |= cval << 8;
1232 cval |= cval << 16;
1233 cval |= (cval << 31) << 1;
1236 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1237 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1238 gimple_set_vuse (store, gimple_vuse (stmt));
1239 tree vdef = gimple_vdef (stmt);
1240 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1242 gimple_set_vdef (store, gimple_vdef (stmt));
1243 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1245 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1246 if (gimple_call_lhs (stmt))
1248 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1249 gsi_replace (gsi, asgn, false);
1251 else
1253 gimple_stmt_iterator gsi2 = *gsi;
1254 gsi_prev (gsi);
1255 gsi_remove (&gsi2, true);
1258 return true;
1262 /* Obtain the minimum and maximum string length or minimum and maximum
1263 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1264 If ARG is an SSA name variable, follow its use-def chains. When
1265 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1266 if we are unable to determine the length or value, return false.
1267 VISITED is a bitmap of visited variables.
1268 TYPE is 0 if string length should be obtained, 1 for maximum string
1269 length and 2 for maximum value ARG can have.
1270 When FUZZY is non-zero and the length of a string cannot be determined,
1271 the function instead considers as the maximum possible length the
1272 size of a character array it may refer to. If FUZZY is 2, it will handle
1273 PHIs and COND_EXPRs optimistically, if we can determine string length
1274 minimum and maximum, it will use the minimum from the ones where it
1275 can be determined.
1276 Set *FLEXP to true if the range of the string lengths has been
1277 obtained from the upper bound of an array at the end of a struct.
1278 Such an array may hold a string that's longer than its upper bound
1279 due to it being used as a poor-man's flexible array member.
1280 Pass NONSTR through to children.
1281 ELTSIZE is 1 for normal single byte character strings, and 2 or
1282 4 for wide characer strings. ELTSIZE is by default 1. */
1284 static bool
1285 get_range_strlen (tree arg, tree length[2], bitmap *visited, int type,
1286 int fuzzy, bool *flexp, unsigned eltsize, tree *nonstr)
1288 tree var, val = NULL_TREE;
1289 gimple *def_stmt;
1291 /* The minimum and maximum length. */
1292 tree *const minlen = length;
1293 tree *const maxlen = length + 1;
1295 if (TREE_CODE (arg) != SSA_NAME)
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1301 tree op = TREE_OPERAND (arg, 0);
1302 if (integer_zerop (TREE_OPERAND (op, 1)))
1304 tree aop0 = TREE_OPERAND (op, 0);
1305 if (TREE_CODE (aop0) == INDIRECT_REF
1306 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1307 return get_range_strlen (TREE_OPERAND (aop0, 0), length,
1308 visited, type, fuzzy, flexp,
1309 eltsize, nonstr);
1311 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF && fuzzy)
1313 /* Fail if an array is the last member of a struct object
1314 since it could be treated as a (fake) flexible array
1315 member. */
1316 tree idx = TREE_OPERAND (op, 1);
1318 arg = TREE_OPERAND (op, 0);
1319 tree optype = TREE_TYPE (arg);
1320 if (tree dom = TYPE_DOMAIN (optype))
1321 if (tree bound = TYPE_MAX_VALUE (dom))
1322 if (TREE_CODE (bound) == INTEGER_CST
1323 && TREE_CODE (idx) == INTEGER_CST
1324 && tree_int_cst_lt (bound, idx))
1325 return false;
1329 if (type == 2)
1331 val = arg;
1332 if (TREE_CODE (val) != INTEGER_CST
1333 || tree_int_cst_sgn (val) < 0)
1334 return false;
1336 else
1338 c_strlen_data data;
1339 memset (&data, 0, sizeof (c_strlen_data));
1340 val = c_strlen (arg, 1, &data, eltsize);
1342 /* If we potentially had a non-terminated string, then
1343 bubble that information up to the caller. */
1344 if (!val && data.decl)
1346 *nonstr = data.decl;
1347 *minlen = data.len;
1348 *maxlen = data.len;
1349 return type == 0 ? false : true;
1353 if (!val && fuzzy)
1355 if (TREE_CODE (arg) == ADDR_EXPR)
1356 return get_range_strlen (TREE_OPERAND (arg, 0), length,
1357 visited, type, fuzzy, flexp,
1358 eltsize, nonstr);
1360 if (TREE_CODE (arg) == ARRAY_REF)
1362 tree type = TREE_TYPE (TREE_OPERAND (arg, 0));
1364 /* Determine the "innermost" array type. */
1365 while (TREE_CODE (type) == ARRAY_TYPE
1366 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1367 type = TREE_TYPE (type);
1369 /* Avoid arrays of pointers. */
1370 tree eltype = TREE_TYPE (type);
1371 if (TREE_CODE (type) != ARRAY_TYPE
1372 || !INTEGRAL_TYPE_P (eltype))
1373 return false;
1375 val = TYPE_SIZE_UNIT (type);
1376 if (!val || integer_zerop (val))
1377 return false;
1379 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1380 integer_one_node);
1381 /* Set the minimum size to zero since the string in
1382 the array could have zero length. */
1383 *minlen = ssize_int (0);
1385 if (TREE_CODE (TREE_OPERAND (arg, 0)) == COMPONENT_REF
1386 && type == TREE_TYPE (TREE_OPERAND (arg, 0))
1387 && array_at_struct_end_p (TREE_OPERAND (arg, 0)))
1388 *flexp = true;
1390 else if (TREE_CODE (arg) == COMPONENT_REF
1391 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1392 == ARRAY_TYPE))
1394 /* Use the type of the member array to determine the upper
1395 bound on the length of the array. This may be overly
1396 optimistic if the array itself isn't NUL-terminated and
1397 the caller relies on the subsequent member to contain
1398 the NUL but that would only be considered valid if
1399 the array were the last member of a struct.
1400 Set *FLEXP to true if the array whose bound is being
1401 used is at the end of a struct. */
1402 if (array_at_struct_end_p (arg))
1403 *flexp = true;
1405 arg = TREE_OPERAND (arg, 1);
1407 tree type = TREE_TYPE (arg);
1409 while (TREE_CODE (type) == ARRAY_TYPE
1410 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
1411 type = TREE_TYPE (type);
1413 /* Fail when the array bound is unknown or zero. */
1414 val = TYPE_SIZE_UNIT (type);
1415 if (!val || integer_zerop (val))
1416 return false;
1417 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1418 integer_one_node);
1419 /* Set the minimum size to zero since the string in
1420 the array could have zero length. */
1421 *minlen = ssize_int (0);
1424 if (VAR_P (arg))
1426 tree type = TREE_TYPE (arg);
1427 if (POINTER_TYPE_P (type))
1428 type = TREE_TYPE (type);
1430 if (TREE_CODE (type) == ARRAY_TYPE)
1432 val = TYPE_SIZE_UNIT (type);
1433 if (!val
1434 || TREE_CODE (val) != INTEGER_CST
1435 || integer_zerop (val))
1436 return false;
1437 val = wide_int_to_tree (TREE_TYPE (val),
1438 wi::sub (wi::to_wide (val), 1));
1439 /* Set the minimum size to zero since the string in
1440 the array could have zero length. */
1441 *minlen = ssize_int (0);
1446 if (!val)
1447 return false;
1449 if (!*minlen
1450 || (type > 0
1451 && TREE_CODE (*minlen) == INTEGER_CST
1452 && TREE_CODE (val) == INTEGER_CST
1453 && tree_int_cst_lt (val, *minlen)))
1454 *minlen = val;
1456 if (*maxlen)
1458 if (type > 0)
1460 if (TREE_CODE (*maxlen) != INTEGER_CST
1461 || TREE_CODE (val) != INTEGER_CST)
1462 return false;
1464 if (tree_int_cst_lt (*maxlen, val))
1465 *maxlen = val;
1466 return true;
1468 else if (simple_cst_equal (val, *maxlen) != 1)
1469 return false;
1472 *maxlen = val;
1473 return true;
1476 /* If ARG is registered for SSA update we cannot look at its defining
1477 statement. */
1478 if (name_registered_for_update_p (arg))
1479 return false;
1481 /* If we were already here, break the infinite cycle. */
1482 if (!*visited)
1483 *visited = BITMAP_ALLOC (NULL);
1484 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
1485 return true;
1487 var = arg;
1488 def_stmt = SSA_NAME_DEF_STMT (var);
1490 switch (gimple_code (def_stmt))
1492 case GIMPLE_ASSIGN:
1493 /* The RHS of the statement defining VAR must either have a
1494 constant length or come from another SSA_NAME with a constant
1495 length. */
1496 if (gimple_assign_single_p (def_stmt)
1497 || gimple_assign_unary_nop_p (def_stmt))
1499 tree rhs = gimple_assign_rhs1 (def_stmt);
1500 return get_range_strlen (rhs, length, visited, type, fuzzy, flexp,
1501 eltsize, nonstr);
1503 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1505 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1506 gimple_assign_rhs3 (def_stmt) };
1508 for (unsigned int i = 0; i < 2; i++)
1509 if (!get_range_strlen (ops[i], length, visited, type, fuzzy,
1510 flexp, eltsize, nonstr))
1512 if (fuzzy == 2)
1513 *maxlen = build_all_ones_cst (size_type_node);
1514 else
1515 return false;
1517 return true;
1519 return false;
1521 case GIMPLE_PHI:
1522 /* All the arguments of the PHI node must have the same constant
1523 length. */
1524 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1526 tree arg = gimple_phi_arg (def_stmt, i)->def;
1528 /* If this PHI has itself as an argument, we cannot
1529 determine the string length of this argument. However,
1530 if we can find a constant string length for the other
1531 PHI args then we can still be sure that this is a
1532 constant string length. So be optimistic and just
1533 continue with the next argument. */
1534 if (arg == gimple_phi_result (def_stmt))
1535 continue;
1537 if (!get_range_strlen (arg, length, visited, type, fuzzy, flexp,
1538 eltsize, nonstr))
1540 if (fuzzy == 2)
1541 *maxlen = build_all_ones_cst (size_type_node);
1542 else
1543 return false;
1546 return true;
1548 default:
1549 return false;
1553 /* Determine the minimum and maximum value or string length that ARG
1554 refers to and store each in the first two elements of MINMAXLEN.
1555 For expressions that point to strings of unknown lengths that are
1556 character arrays, use the upper bound of the array as the maximum
1557 length. For example, given an expression like 'x ? array : "xyz"'
1558 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1559 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1560 stored in array.
1561 Return true if the range of the string lengths has been obtained
1562 from the upper bound of an array at the end of a struct. Such
1563 an array may hold a string that's longer than its upper bound
1564 due to it being used as a poor-man's flexible array member.
1566 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1567 and false if PHIs and COND_EXPRs are to be handled optimistically,
1568 if we can determine string length minimum and maximum; it will use
1569 the minimum from the ones where it can be determined.
1570 STRICT false should be only used for warning code.
1571 When non-null, clear *NONSTR if ARG refers to a constant array
1572 that is known not be nul-terminated. Otherwise set it to
1573 the declaration of the constant non-terminated array.
1575 ELTSIZE is 1 for normal single byte character strings, and 2 or
1576 4 for wide characer strings. ELTSIZE is by default 1. */
1578 bool
1579 get_range_strlen (tree arg, tree minmaxlen[2], unsigned eltsize,
1580 bool strict, tree *nonstr /* = NULL */)
1582 bitmap visited = NULL;
1584 minmaxlen[0] = NULL_TREE;
1585 minmaxlen[1] = NULL_TREE;
1587 tree nonstrbuf;
1588 if (!nonstr)
1589 nonstr = &nonstrbuf;
1590 *nonstr = NULL_TREE;
1592 bool flexarray = false;
1593 if (!get_range_strlen (arg, minmaxlen, &visited, 1, strict ? 1 : 2,
1594 &flexarray, eltsize, nonstr))
1596 minmaxlen[0] = NULL_TREE;
1597 minmaxlen[1] = NULL_TREE;
1600 if (visited)
1601 BITMAP_FREE (visited);
1603 return flexarray;
1606 /* Return the maximum string length for ARG, counting by TYPE
1607 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1608 if the caller is prepared to handle unterminated strings.
1610 If an unterminated string is discovered and our caller handles
1611 unterminated strings, then bubble up the offending DECL and
1612 return the maximum size. Otherwise return NULL. */
1614 tree
1615 get_maxval_strlen (tree arg, int type, tree *nonstr /* = NULL */)
1617 bitmap visited = NULL;
1618 tree len[2] = { NULL_TREE, NULL_TREE };
1620 bool dummy;
1621 /* Set to non-null if ARG refers to an untermianted array. */
1622 tree mynonstr = NULL_TREE;
1623 if (!get_range_strlen (arg, len, &visited, type, 0, &dummy, 1, &mynonstr))
1624 len[1] = NULL_TREE;
1625 if (visited)
1626 BITMAP_FREE (visited);
1628 if (nonstr)
1630 /* For callers prepared to handle unterminated arrays set
1631 *NONSTR to point to the declaration of the array and return
1632 the maximum length/size. */
1633 *nonstr = mynonstr;
1634 return len[1];
1637 /* Fail if the constant array isn't nul-terminated. */
1638 return mynonstr ? NULL_TREE : len[1];
1642 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1643 If LEN is not NULL, it represents the length of the string to be
1644 copied. Return NULL_TREE if no simplification can be made. */
1646 static bool
1647 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1648 tree dest, tree src)
1650 gimple *stmt = gsi_stmt (*gsi);
1651 location_t loc = gimple_location (stmt);
1652 tree fn;
1654 /* If SRC and DEST are the same (and not volatile), return DEST. */
1655 if (operand_equal_p (src, dest, 0))
1657 /* Issue -Wrestrict unless the pointers are null (those do
1658 not point to objects and so do not indicate an overlap;
1659 such calls could be the result of sanitization and jump
1660 threading). */
1661 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
1663 tree func = gimple_call_fndecl (stmt);
1665 warning_at (loc, OPT_Wrestrict,
1666 "%qD source argument is the same as destination",
1667 func);
1670 replace_call_with_value (gsi, dest);
1671 return true;
1674 if (optimize_function_for_size_p (cfun))
1675 return false;
1677 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1678 if (!fn)
1679 return false;
1681 /* Set to non-null if ARG refers to an unterminated array. */
1682 tree nonstr = NULL;
1683 tree len = get_maxval_strlen (src, 0, &nonstr);
1685 if (nonstr)
1687 /* Avoid folding calls with unterminated arrays. */
1688 if (!gimple_no_warning_p (stmt))
1689 warn_string_no_nul (loc, "strcpy", src, nonstr);
1690 gimple_set_no_warning (stmt, true);
1691 return false;
1694 if (!len)
1695 return false;
1697 len = fold_convert_loc (loc, size_type_node, len);
1698 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1699 len = force_gimple_operand_gsi (gsi, len, true,
1700 NULL_TREE, true, GSI_SAME_STMT);
1701 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1702 replace_call_with_call_and_fold (gsi, repl);
1703 return true;
1706 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1707 If SLEN is not NULL, it represents the length of the source string.
1708 Return NULL_TREE if no simplification can be made. */
1710 static bool
1711 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1712 tree dest, tree src, tree len)
1714 gimple *stmt = gsi_stmt (*gsi);
1715 location_t loc = gimple_location (stmt);
1716 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
1718 /* If the LEN parameter is zero, return DEST. */
1719 if (integer_zerop (len))
1721 /* Avoid warning if the destination refers to a an array/pointer
1722 decorate with attribute nonstring. */
1723 if (!nonstring)
1725 tree fndecl = gimple_call_fndecl (stmt);
1727 /* Warn about the lack of nul termination: the result is not
1728 a (nul-terminated) string. */
1729 tree slen = get_maxval_strlen (src, 0);
1730 if (slen && !integer_zerop (slen))
1731 warning_at (loc, OPT_Wstringop_truncation,
1732 "%G%qD destination unchanged after copying no bytes "
1733 "from a string of length %E",
1734 stmt, fndecl, slen);
1735 else
1736 warning_at (loc, OPT_Wstringop_truncation,
1737 "%G%qD destination unchanged after copying no bytes",
1738 stmt, fndecl);
1741 replace_call_with_value (gsi, dest);
1742 return true;
1745 /* We can't compare slen with len as constants below if len is not a
1746 constant. */
1747 if (TREE_CODE (len) != INTEGER_CST)
1748 return false;
1750 /* Now, we must be passed a constant src ptr parameter. */
1751 tree slen = get_maxval_strlen (src, 0);
1752 if (!slen || TREE_CODE (slen) != INTEGER_CST)
1753 return false;
1755 /* The size of the source string including the terminating nul. */
1756 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1758 /* We do not support simplification of this case, though we do
1759 support it when expanding trees into RTL. */
1760 /* FIXME: generate a call to __builtin_memset. */
1761 if (tree_int_cst_lt (ssize, len))
1762 return false;
1764 /* Diagnose truncation that leaves the copy unterminated. */
1765 maybe_diag_stxncpy_trunc (*gsi, src, len);
1767 /* OK transform into builtin memcpy. */
1768 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1769 if (!fn)
1770 return false;
1772 len = fold_convert_loc (loc, size_type_node, len);
1773 len = force_gimple_operand_gsi (gsi, len, true,
1774 NULL_TREE, true, GSI_SAME_STMT);
1775 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1776 replace_call_with_call_and_fold (gsi, repl);
1778 return true;
1781 /* Fold function call to builtin strchr or strrchr.
1782 If both arguments are constant, evaluate and fold the result,
1783 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1784 In general strlen is significantly faster than strchr
1785 due to being a simpler operation. */
1786 static bool
1787 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
1789 gimple *stmt = gsi_stmt (*gsi);
1790 tree str = gimple_call_arg (stmt, 0);
1791 tree c = gimple_call_arg (stmt, 1);
1792 location_t loc = gimple_location (stmt);
1793 const char *p;
1794 char ch;
1796 if (!gimple_call_lhs (stmt))
1797 return false;
1799 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
1801 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
1803 if (p1 == NULL)
1805 replace_call_with_value (gsi, integer_zero_node);
1806 return true;
1809 tree len = build_int_cst (size_type_node, p1 - p);
1810 gimple_seq stmts = NULL;
1811 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1812 POINTER_PLUS_EXPR, str, len);
1813 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1814 gsi_replace_with_seq_vops (gsi, stmts);
1815 return true;
1818 if (!integer_zerop (c))
1819 return false;
1821 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1822 if (is_strrchr && optimize_function_for_size_p (cfun))
1824 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1826 if (strchr_fn)
1828 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
1829 replace_call_with_call_and_fold (gsi, repl);
1830 return true;
1833 return false;
1836 tree len;
1837 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1839 if (!strlen_fn)
1840 return false;
1842 /* Create newstr = strlen (str). */
1843 gimple_seq stmts = NULL;
1844 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
1845 gimple_set_location (new_stmt, loc);
1846 len = create_tmp_reg_or_ssa_name (size_type_node);
1847 gimple_call_set_lhs (new_stmt, len);
1848 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1850 /* Create (str p+ strlen (str)). */
1851 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1852 POINTER_PLUS_EXPR, str, len);
1853 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1854 gsi_replace_with_seq_vops (gsi, stmts);
1855 /* gsi now points at the assignment to the lhs, get a
1856 stmt iterator to the strlen.
1857 ??? We can't use gsi_for_stmt as that doesn't work when the
1858 CFG isn't built yet. */
1859 gimple_stmt_iterator gsi2 = *gsi;
1860 gsi_prev (&gsi2);
1861 fold_stmt (&gsi2);
1862 return true;
1865 /* Fold function call to builtin strstr.
1866 If both arguments are constant, evaluate and fold the result,
1867 additionally fold strstr (x, "") into x and strstr (x, "c")
1868 into strchr (x, 'c'). */
1869 static bool
1870 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
1872 gimple *stmt = gsi_stmt (*gsi);
1873 tree haystack = gimple_call_arg (stmt, 0);
1874 tree needle = gimple_call_arg (stmt, 1);
1875 const char *p, *q;
1877 if (!gimple_call_lhs (stmt))
1878 return false;
1880 q = c_getstr (needle);
1881 if (q == NULL)
1882 return false;
1884 if ((p = c_getstr (haystack)))
1886 const char *r = strstr (p, q);
1888 if (r == NULL)
1890 replace_call_with_value (gsi, integer_zero_node);
1891 return true;
1894 tree len = build_int_cst (size_type_node, r - p);
1895 gimple_seq stmts = NULL;
1896 gimple *new_stmt
1897 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
1898 haystack, len);
1899 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
1900 gsi_replace_with_seq_vops (gsi, stmts);
1901 return true;
1904 /* For strstr (x, "") return x. */
1905 if (q[0] == '\0')
1907 replace_call_with_value (gsi, haystack);
1908 return true;
1911 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1912 if (q[1] == '\0')
1914 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
1915 if (strchr_fn)
1917 tree c = build_int_cst (integer_type_node, q[0]);
1918 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
1919 replace_call_with_call_and_fold (gsi, repl);
1920 return true;
1924 return false;
1927 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1928 to the call.
1930 Return NULL_TREE if no simplification was possible, otherwise return the
1931 simplified form of the call as a tree.
1933 The simplified form may be a constant or other expression which
1934 computes the same value, but in a more efficient manner (including
1935 calls to other builtin functions).
1937 The call may contain arguments which need to be evaluated, but
1938 which are not useful to determine the result of the call. In
1939 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1940 COMPOUND_EXPR will be an argument which must be evaluated.
1941 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1942 COMPOUND_EXPR in the chain will contain the tree for the simplified
1943 form of the builtin function call. */
1945 static bool
1946 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
1948 gimple *stmt = gsi_stmt (*gsi);
1949 location_t loc = gimple_location (stmt);
1951 const char *p = c_getstr (src);
1953 /* If the string length is zero, return the dst parameter. */
1954 if (p && *p == '\0')
1956 replace_call_with_value (gsi, dst);
1957 return true;
1960 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1961 return false;
1963 /* See if we can store by pieces into (dst + strlen(dst)). */
1964 tree newdst;
1965 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1966 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1968 if (!strlen_fn || !memcpy_fn)
1969 return false;
1971 /* If the length of the source string isn't computable don't
1972 split strcat into strlen and memcpy. */
1973 tree len = get_maxval_strlen (src, 0);
1974 if (! len)
1975 return false;
1977 /* Create strlen (dst). */
1978 gimple_seq stmts = NULL, stmts2;
1979 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
1980 gimple_set_location (repl, loc);
1981 newdst = create_tmp_reg_or_ssa_name (size_type_node);
1982 gimple_call_set_lhs (repl, newdst);
1983 gimple_seq_add_stmt_without_update (&stmts, repl);
1985 /* Create (dst p+ strlen (dst)). */
1986 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1987 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1988 gimple_seq_add_seq_without_update (&stmts, stmts2);
1990 len = fold_convert_loc (loc, size_type_node, len);
1991 len = size_binop_loc (loc, PLUS_EXPR, len,
1992 build_int_cst (size_type_node, 1));
1993 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1994 gimple_seq_add_seq_without_update (&stmts, stmts2);
1996 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1997 gimple_seq_add_stmt_without_update (&stmts, repl);
1998 if (gimple_call_lhs (stmt))
2000 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2001 gimple_seq_add_stmt_without_update (&stmts, repl);
2002 gsi_replace_with_seq_vops (gsi, stmts);
2003 /* gsi now points at the assignment to the lhs, get a
2004 stmt iterator to the memcpy call.
2005 ??? We can't use gsi_for_stmt as that doesn't work when the
2006 CFG isn't built yet. */
2007 gimple_stmt_iterator gsi2 = *gsi;
2008 gsi_prev (&gsi2);
2009 fold_stmt (&gsi2);
2011 else
2013 gsi_replace_with_seq_vops (gsi, stmts);
2014 fold_stmt (gsi);
2016 return true;
2019 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2020 are the arguments to the call. */
2022 static bool
2023 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2025 gimple *stmt = gsi_stmt (*gsi);
2026 tree dest = gimple_call_arg (stmt, 0);
2027 tree src = gimple_call_arg (stmt, 1);
2028 tree size = gimple_call_arg (stmt, 2);
2029 tree fn;
2030 const char *p;
2033 p = c_getstr (src);
2034 /* If the SRC parameter is "", return DEST. */
2035 if (p && *p == '\0')
2037 replace_call_with_value (gsi, dest);
2038 return true;
2041 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2042 return false;
2044 /* If __builtin_strcat_chk is used, assume strcat is available. */
2045 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2046 if (!fn)
2047 return false;
2049 gimple *repl = gimple_build_call (fn, 2, dest, src);
2050 replace_call_with_call_and_fold (gsi, repl);
2051 return true;
2054 /* Simplify a call to the strncat builtin. */
2056 static bool
2057 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2059 gimple *stmt = gsi_stmt (*gsi);
2060 tree dst = gimple_call_arg (stmt, 0);
2061 tree src = gimple_call_arg (stmt, 1);
2062 tree len = gimple_call_arg (stmt, 2);
2064 const char *p = c_getstr (src);
2066 /* If the requested length is zero, or the src parameter string
2067 length is zero, return the dst parameter. */
2068 if (integer_zerop (len) || (p && *p == '\0'))
2070 replace_call_with_value (gsi, dst);
2071 return true;
2074 if (TREE_CODE (len) != INTEGER_CST || !p)
2075 return false;
2077 unsigned srclen = strlen (p);
2079 int cmpsrc = compare_tree_int (len, srclen);
2081 /* Return early if the requested len is less than the string length.
2082 Warnings will be issued elsewhere later. */
2083 if (cmpsrc < 0)
2084 return false;
2086 unsigned HOST_WIDE_INT dstsize;
2088 bool nowarn = gimple_no_warning_p (stmt);
2090 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize))
2092 int cmpdst = compare_tree_int (len, dstsize);
2094 if (cmpdst >= 0)
2096 tree fndecl = gimple_call_fndecl (stmt);
2098 /* Strncat copies (at most) LEN bytes and always appends
2099 the terminating NUL so the specified bound should never
2100 be equal to (or greater than) the size of the destination.
2101 If it is, the copy could overflow. */
2102 location_t loc = gimple_location (stmt);
2103 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2104 cmpdst == 0
2105 ? G_("%G%qD specified bound %E equals "
2106 "destination size")
2107 : G_("%G%qD specified bound %E exceeds "
2108 "destination size %wu"),
2109 stmt, fndecl, len, dstsize);
2110 if (nowarn)
2111 gimple_set_no_warning (stmt, true);
2115 if (!nowarn && cmpsrc == 0)
2117 tree fndecl = gimple_call_fndecl (stmt);
2118 location_t loc = gimple_location (stmt);
2120 /* To avoid possible overflow the specified bound should also
2121 not be equal to the length of the source, even when the size
2122 of the destination is unknown (it's not an uncommon mistake
2123 to specify as the bound to strncpy the length of the source). */
2124 if (warning_at (loc, OPT_Wstringop_overflow_,
2125 "%G%qD specified bound %E equals source length",
2126 stmt, fndecl, len))
2127 gimple_set_no_warning (stmt, true);
2130 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2132 /* If the replacement _DECL isn't initialized, don't do the
2133 transformation. */
2134 if (!fn)
2135 return false;
2137 /* Otherwise, emit a call to strcat. */
2138 gcall *repl = gimple_build_call (fn, 2, dst, src);
2139 replace_call_with_call_and_fold (gsi, repl);
2140 return true;
2143 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2144 LEN, and SIZE. */
2146 static bool
2147 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2149 gimple *stmt = gsi_stmt (*gsi);
2150 tree dest = gimple_call_arg (stmt, 0);
2151 tree src = gimple_call_arg (stmt, 1);
2152 tree len = gimple_call_arg (stmt, 2);
2153 tree size = gimple_call_arg (stmt, 3);
2154 tree fn;
2155 const char *p;
2157 p = c_getstr (src);
2158 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2159 if ((p && *p == '\0')
2160 || integer_zerop (len))
2162 replace_call_with_value (gsi, dest);
2163 return true;
2166 if (! tree_fits_uhwi_p (size))
2167 return false;
2169 if (! integer_all_onesp (size))
2171 tree src_len = c_strlen (src, 1);
2172 if (src_len
2173 && tree_fits_uhwi_p (src_len)
2174 && tree_fits_uhwi_p (len)
2175 && ! tree_int_cst_lt (len, src_len))
2177 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2178 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2179 if (!fn)
2180 return false;
2182 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2183 replace_call_with_call_and_fold (gsi, repl);
2184 return true;
2186 return false;
2189 /* If __builtin_strncat_chk is used, assume strncat is available. */
2190 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2191 if (!fn)
2192 return false;
2194 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2195 replace_call_with_call_and_fold (gsi, repl);
2196 return true;
2199 /* Build and append gimple statements to STMTS that would load a first
2200 character of a memory location identified by STR. LOC is location
2201 of the statement. */
2203 static tree
2204 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2206 tree var;
2208 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2209 tree cst_uchar_ptr_node
2210 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2211 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2213 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2214 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2215 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2217 gimple_assign_set_lhs (stmt, var);
2218 gimple_seq_add_stmt_without_update (stmts, stmt);
2220 return var;
2223 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2224 FCODE is the name of the builtin. */
2226 static bool
2227 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2229 gimple *stmt = gsi_stmt (*gsi);
2230 tree callee = gimple_call_fndecl (stmt);
2231 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2233 tree type = integer_type_node;
2234 tree str1 = gimple_call_arg (stmt, 0);
2235 tree str2 = gimple_call_arg (stmt, 1);
2236 tree lhs = gimple_call_lhs (stmt);
2237 HOST_WIDE_INT length = -1;
2239 /* Handle strncmp and strncasecmp functions. */
2240 if (gimple_call_num_args (stmt) == 3)
2242 tree len = gimple_call_arg (stmt, 2);
2243 if (tree_fits_uhwi_p (len))
2244 length = tree_to_uhwi (len);
2247 /* If the LEN parameter is zero, return zero. */
2248 if (length == 0)
2250 replace_call_with_value (gsi, integer_zero_node);
2251 return true;
2254 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2255 if (operand_equal_p (str1, str2, 0))
2257 replace_call_with_value (gsi, integer_zero_node);
2258 return true;
2261 const char *p1 = c_getstr (str1);
2262 const char *p2 = c_getstr (str2);
2264 /* For known strings, return an immediate value. */
2265 if (p1 && p2)
2267 int r = 0;
2268 bool known_result = false;
2270 switch (fcode)
2272 case BUILT_IN_STRCMP:
2273 case BUILT_IN_STRCMP_EQ:
2275 r = strcmp (p1, p2);
2276 known_result = true;
2277 break;
2279 case BUILT_IN_STRNCMP:
2280 case BUILT_IN_STRNCMP_EQ:
2282 if (length == -1)
2283 break;
2284 r = strncmp (p1, p2, length);
2285 known_result = true;
2286 break;
2288 /* Only handleable situation is where the string are equal (result 0),
2289 which is already handled by operand_equal_p case. */
2290 case BUILT_IN_STRCASECMP:
2291 break;
2292 case BUILT_IN_STRNCASECMP:
2294 if (length == -1)
2295 break;
2296 r = strncmp (p1, p2, length);
2297 if (r == 0)
2298 known_result = true;
2299 break;
2301 default:
2302 gcc_unreachable ();
2305 if (known_result)
2307 replace_call_with_value (gsi, build_cmp_result (type, r));
2308 return true;
2312 bool nonzero_length = length >= 1
2313 || fcode == BUILT_IN_STRCMP
2314 || fcode == BUILT_IN_STRCMP_EQ
2315 || fcode == BUILT_IN_STRCASECMP;
2317 location_t loc = gimple_location (stmt);
2319 /* If the second arg is "", return *(const unsigned char*)arg1. */
2320 if (p2 && *p2 == '\0' && nonzero_length)
2322 gimple_seq stmts = NULL;
2323 tree var = gimple_load_first_char (loc, str1, &stmts);
2324 if (lhs)
2326 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2327 gimple_seq_add_stmt_without_update (&stmts, stmt);
2330 gsi_replace_with_seq_vops (gsi, stmts);
2331 return true;
2334 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2335 if (p1 && *p1 == '\0' && nonzero_length)
2337 gimple_seq stmts = NULL;
2338 tree var = gimple_load_first_char (loc, str2, &stmts);
2340 if (lhs)
2342 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2343 stmt = gimple_build_assign (c, NOP_EXPR, var);
2344 gimple_seq_add_stmt_without_update (&stmts, stmt);
2346 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2347 gimple_seq_add_stmt_without_update (&stmts, stmt);
2350 gsi_replace_with_seq_vops (gsi, stmts);
2351 return true;
2354 /* If len parameter is one, return an expression corresponding to
2355 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2356 if (fcode == BUILT_IN_STRNCMP && length == 1)
2358 gimple_seq stmts = NULL;
2359 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2360 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2362 if (lhs)
2364 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2365 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2366 gimple_seq_add_stmt_without_update (&stmts, convert1);
2368 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2369 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2370 gimple_seq_add_stmt_without_update (&stmts, convert2);
2372 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2373 gimple_seq_add_stmt_without_update (&stmts, stmt);
2376 gsi_replace_with_seq_vops (gsi, stmts);
2377 return true;
2380 /* If length is larger than the length of one constant string,
2381 replace strncmp with corresponding strcmp */
2382 if (fcode == BUILT_IN_STRNCMP
2383 && length > 0
2384 && ((p2 && (size_t) length > strlen (p2))
2385 || (p1 && (size_t) length > strlen (p1))))
2387 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2388 if (!fn)
2389 return false;
2390 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2391 replace_call_with_call_and_fold (gsi, repl);
2392 return true;
2395 return false;
2398 /* Fold a call to the memchr pointed by GSI iterator. */
2400 static bool
2401 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2403 gimple *stmt = gsi_stmt (*gsi);
2404 tree lhs = gimple_call_lhs (stmt);
2405 tree arg1 = gimple_call_arg (stmt, 0);
2406 tree arg2 = gimple_call_arg (stmt, 1);
2407 tree len = gimple_call_arg (stmt, 2);
2409 /* If the LEN parameter is zero, return zero. */
2410 if (integer_zerop (len))
2412 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2413 return true;
2416 char c;
2417 if (TREE_CODE (arg2) != INTEGER_CST
2418 || !tree_fits_uhwi_p (len)
2419 || !target_char_cst_p (arg2, &c))
2420 return false;
2422 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2423 unsigned HOST_WIDE_INT string_length;
2424 const char *p1 = c_getstr (arg1, &string_length);
2426 if (p1)
2428 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2429 if (r == NULL)
2431 if (length <= string_length)
2433 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2434 return true;
2437 else
2439 unsigned HOST_WIDE_INT offset = r - p1;
2440 gimple_seq stmts = NULL;
2441 if (lhs != NULL_TREE)
2443 tree offset_cst = build_int_cst (TREE_TYPE (len), offset);
2444 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2445 arg1, offset_cst);
2446 gimple_seq_add_stmt_without_update (&stmts, stmt);
2448 else
2449 gimple_seq_add_stmt_without_update (&stmts,
2450 gimple_build_nop ());
2452 gsi_replace_with_seq_vops (gsi, stmts);
2453 return true;
2457 return false;
2460 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2461 to the call. IGNORE is true if the value returned
2462 by the builtin will be ignored. UNLOCKED is true is true if this
2463 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2464 the known length of the string. Return NULL_TREE if no simplification
2465 was possible. */
2467 static bool
2468 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2469 tree arg0, tree arg1,
2470 bool unlocked)
2472 gimple *stmt = gsi_stmt (*gsi);
2474 /* If we're using an unlocked function, assume the other unlocked
2475 functions exist explicitly. */
2476 tree const fn_fputc = (unlocked
2477 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2478 : builtin_decl_implicit (BUILT_IN_FPUTC));
2479 tree const fn_fwrite = (unlocked
2480 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2481 : builtin_decl_implicit (BUILT_IN_FWRITE));
2483 /* If the return value is used, don't do the transformation. */
2484 if (gimple_call_lhs (stmt))
2485 return false;
2487 /* Get the length of the string passed to fputs. If the length
2488 can't be determined, punt. */
2489 tree len = get_maxval_strlen (arg0, 0);
2490 if (!len
2491 || TREE_CODE (len) != INTEGER_CST)
2492 return false;
2494 switch (compare_tree_int (len, 1))
2496 case -1: /* length is 0, delete the call entirely . */
2497 replace_call_with_value (gsi, integer_zero_node);
2498 return true;
2500 case 0: /* length is 1, call fputc. */
2502 const char *p = c_getstr (arg0);
2503 if (p != NULL)
2505 if (!fn_fputc)
2506 return false;
2508 gimple *repl = gimple_build_call (fn_fputc, 2,
2509 build_int_cst
2510 (integer_type_node, p[0]), arg1);
2511 replace_call_with_call_and_fold (gsi, repl);
2512 return true;
2515 /* FALLTHROUGH */
2516 case 1: /* length is greater than 1, call fwrite. */
2518 /* If optimizing for size keep fputs. */
2519 if (optimize_function_for_size_p (cfun))
2520 return false;
2521 /* New argument list transforming fputs(string, stream) to
2522 fwrite(string, 1, len, stream). */
2523 if (!fn_fwrite)
2524 return false;
2526 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2527 size_one_node, len, arg1);
2528 replace_call_with_call_and_fold (gsi, repl);
2529 return true;
2531 default:
2532 gcc_unreachable ();
2534 return false;
2537 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2538 DEST, SRC, LEN, and SIZE are the arguments to the call.
2539 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2540 code of the builtin. If MAXLEN is not NULL, it is maximum length
2541 passed as third argument. */
2543 static bool
2544 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
2545 tree dest, tree src, tree len, tree size,
2546 enum built_in_function fcode)
2548 gimple *stmt = gsi_stmt (*gsi);
2549 location_t loc = gimple_location (stmt);
2550 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2551 tree fn;
2553 /* If SRC and DEST are the same (and not volatile), return DEST
2554 (resp. DEST+LEN for __mempcpy_chk). */
2555 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
2557 if (fcode != BUILT_IN_MEMPCPY_CHK)
2559 replace_call_with_value (gsi, dest);
2560 return true;
2562 else
2564 gimple_seq stmts = NULL;
2565 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
2566 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
2567 TREE_TYPE (dest), dest, len);
2568 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2569 replace_call_with_value (gsi, temp);
2570 return true;
2574 if (! tree_fits_uhwi_p (size))
2575 return false;
2577 tree maxlen = get_maxval_strlen (len, 2);
2578 if (! integer_all_onesp (size))
2580 if (! tree_fits_uhwi_p (len))
2582 /* If LEN is not constant, try MAXLEN too.
2583 For MAXLEN only allow optimizing into non-_ocs function
2584 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2585 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2587 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
2589 /* (void) __mempcpy_chk () can be optimized into
2590 (void) __memcpy_chk (). */
2591 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2592 if (!fn)
2593 return false;
2595 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2596 replace_call_with_call_and_fold (gsi, repl);
2597 return true;
2599 return false;
2602 else
2603 maxlen = len;
2605 if (tree_int_cst_lt (size, maxlen))
2606 return false;
2609 fn = NULL_TREE;
2610 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2611 mem{cpy,pcpy,move,set} is available. */
2612 switch (fcode)
2614 case BUILT_IN_MEMCPY_CHK:
2615 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
2616 break;
2617 case BUILT_IN_MEMPCPY_CHK:
2618 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
2619 break;
2620 case BUILT_IN_MEMMOVE_CHK:
2621 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
2622 break;
2623 case BUILT_IN_MEMSET_CHK:
2624 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
2625 break;
2626 default:
2627 break;
2630 if (!fn)
2631 return false;
2633 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2634 replace_call_with_call_and_fold (gsi, repl);
2635 return true;
2638 /* Fold a call to the __st[rp]cpy_chk builtin.
2639 DEST, SRC, and SIZE are the arguments to the call.
2640 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2641 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2642 strings passed as second argument. */
2644 static bool
2645 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
2646 tree dest,
2647 tree src, tree size,
2648 enum built_in_function fcode)
2650 gimple *stmt = gsi_stmt (*gsi);
2651 location_t loc = gimple_location (stmt);
2652 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2653 tree len, fn;
2655 /* If SRC and DEST are the same (and not volatile), return DEST. */
2656 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
2658 /* Issue -Wrestrict unless the pointers are null (those do
2659 not point to objects and so do not indicate an overlap;
2660 such calls could be the result of sanitization and jump
2661 threading). */
2662 if (!integer_zerop (dest) && !gimple_no_warning_p (stmt))
2664 tree func = gimple_call_fndecl (stmt);
2666 warning_at (loc, OPT_Wrestrict,
2667 "%qD source argument is the same as destination",
2668 func);
2671 replace_call_with_value (gsi, dest);
2672 return true;
2675 if (! tree_fits_uhwi_p (size))
2676 return false;
2678 tree maxlen = get_maxval_strlen (src, 1);
2679 if (! integer_all_onesp (size))
2681 len = c_strlen (src, 1);
2682 if (! len || ! tree_fits_uhwi_p (len))
2684 /* If LEN is not constant, try MAXLEN too.
2685 For MAXLEN only allow optimizing into non-_ocs function
2686 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2687 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2689 if (fcode == BUILT_IN_STPCPY_CHK)
2691 if (! ignore)
2692 return false;
2694 /* If return value of __stpcpy_chk is ignored,
2695 optimize into __strcpy_chk. */
2696 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
2697 if (!fn)
2698 return false;
2700 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2701 replace_call_with_call_and_fold (gsi, repl);
2702 return true;
2705 if (! len || TREE_SIDE_EFFECTS (len))
2706 return false;
2708 /* If c_strlen returned something, but not a constant,
2709 transform __strcpy_chk into __memcpy_chk. */
2710 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
2711 if (!fn)
2712 return false;
2714 gimple_seq stmts = NULL;
2715 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
2716 len = gimple_convert (&stmts, loc, size_type_node, len);
2717 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
2718 build_int_cst (size_type_node, 1));
2719 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2720 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2721 replace_call_with_call_and_fold (gsi, repl);
2722 return true;
2725 else
2726 maxlen = len;
2728 if (! tree_int_cst_lt (maxlen, size))
2729 return false;
2732 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2733 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
2734 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
2735 if (!fn)
2736 return false;
2738 gimple *repl = gimple_build_call (fn, 2, dest, src);
2739 replace_call_with_call_and_fold (gsi, repl);
2740 return true;
2743 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2744 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2745 length passed as third argument. IGNORE is true if return value can be
2746 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2748 static bool
2749 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2750 tree dest, tree src,
2751 tree len, tree size,
2752 enum built_in_function fcode)
2754 gimple *stmt = gsi_stmt (*gsi);
2755 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
2756 tree fn;
2758 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
2760 /* If return value of __stpncpy_chk is ignored,
2761 optimize into __strncpy_chk. */
2762 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2763 if (fn)
2765 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
2766 replace_call_with_call_and_fold (gsi, repl);
2767 return true;
2771 if (! tree_fits_uhwi_p (size))
2772 return false;
2774 tree maxlen = get_maxval_strlen (len, 2);
2775 if (! integer_all_onesp (size))
2777 if (! tree_fits_uhwi_p (len))
2779 /* If LEN is not constant, try MAXLEN too.
2780 For MAXLEN only allow optimizing into non-_ocs function
2781 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2782 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2783 return false;
2785 else
2786 maxlen = len;
2788 if (tree_int_cst_lt (size, maxlen))
2789 return false;
2792 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2793 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2794 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2795 if (!fn)
2796 return false;
2798 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2799 replace_call_with_call_and_fold (gsi, repl);
2800 return true;
2803 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2804 Return NULL_TREE if no simplification can be made. */
2806 static bool
2807 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2809 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2810 location_t loc = gimple_location (stmt);
2811 tree dest = gimple_call_arg (stmt, 0);
2812 tree src = gimple_call_arg (stmt, 1);
2813 tree fn, lenp1;
2815 /* If the result is unused, replace stpcpy with strcpy. */
2816 if (gimple_call_lhs (stmt) == NULL_TREE)
2818 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2819 if (!fn)
2820 return false;
2821 gimple_call_set_fndecl (stmt, fn);
2822 fold_stmt (gsi);
2823 return true;
2826 /* Set to non-null if ARG refers to an unterminated array. */
2827 c_strlen_data data;
2828 memset (&data, 0, sizeof (c_strlen_data));
2829 tree len = c_strlen (src, 1, &data, 1);
2830 if (!len
2831 || TREE_CODE (len) != INTEGER_CST)
2833 data.decl = unterminated_array (src);
2834 if (!data.decl)
2835 return false;
2838 if (data.decl)
2840 /* Avoid folding calls with unterminated arrays. */
2841 if (!gimple_no_warning_p (stmt))
2842 warn_string_no_nul (loc, "stpcpy", src, data.decl);
2843 gimple_set_no_warning (stmt, true);
2844 return false;
2847 if (optimize_function_for_size_p (cfun)
2848 /* If length is zero it's small enough. */
2849 && !integer_zerop (len))
2850 return false;
2852 /* If the source has a known length replace stpcpy with memcpy. */
2853 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2854 if (!fn)
2855 return false;
2857 gimple_seq stmts = NULL;
2858 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2859 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2860 tem, build_int_cst (size_type_node, 1));
2861 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2862 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2863 gimple_set_vuse (repl, gimple_vuse (stmt));
2864 gimple_set_vdef (repl, gimple_vdef (stmt));
2865 if (gimple_vdef (repl)
2866 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2867 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2868 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2869 /* Replace the result with dest + len. */
2870 stmts = NULL;
2871 tem = gimple_convert (&stmts, loc, sizetype, len);
2872 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2873 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2874 POINTER_PLUS_EXPR, dest, tem);
2875 gsi_replace (gsi, ret, false);
2876 /* Finally fold the memcpy call. */
2877 gimple_stmt_iterator gsi2 = *gsi;
2878 gsi_prev (&gsi2);
2879 fold_stmt (&gsi2);
2880 return true;
2883 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2884 NULL_TREE if a normal call should be emitted rather than expanding
2885 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2886 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2887 passed as second argument. */
2889 static bool
2890 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
2891 enum built_in_function fcode)
2893 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2894 tree dest, size, len, fn, fmt, flag;
2895 const char *fmt_str;
2897 /* Verify the required arguments in the original call. */
2898 if (gimple_call_num_args (stmt) < 5)
2899 return false;
2901 dest = gimple_call_arg (stmt, 0);
2902 len = gimple_call_arg (stmt, 1);
2903 flag = gimple_call_arg (stmt, 2);
2904 size = gimple_call_arg (stmt, 3);
2905 fmt = gimple_call_arg (stmt, 4);
2907 if (! tree_fits_uhwi_p (size))
2908 return false;
2910 if (! integer_all_onesp (size))
2912 tree maxlen = get_maxval_strlen (len, 2);
2913 if (! tree_fits_uhwi_p (len))
2915 /* If LEN is not constant, try MAXLEN too.
2916 For MAXLEN only allow optimizing into non-_ocs function
2917 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2918 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2919 return false;
2921 else
2922 maxlen = len;
2924 if (tree_int_cst_lt (size, maxlen))
2925 return false;
2928 if (!init_target_chars ())
2929 return false;
2931 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2932 or if format doesn't contain % chars or is "%s". */
2933 if (! integer_zerop (flag))
2935 fmt_str = c_getstr (fmt);
2936 if (fmt_str == NULL)
2937 return false;
2938 if (strchr (fmt_str, target_percent) != NULL
2939 && strcmp (fmt_str, target_percent_s))
2940 return false;
2943 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2944 available. */
2945 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2946 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2947 if (!fn)
2948 return false;
2950 /* Replace the called function and the first 5 argument by 3 retaining
2951 trailing varargs. */
2952 gimple_call_set_fndecl (stmt, fn);
2953 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2954 gimple_call_set_arg (stmt, 0, dest);
2955 gimple_call_set_arg (stmt, 1, len);
2956 gimple_call_set_arg (stmt, 2, fmt);
2957 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2958 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2959 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2960 fold_stmt (gsi);
2961 return true;
2964 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2965 Return NULL_TREE if a normal call should be emitted rather than
2966 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2967 or BUILT_IN_VSPRINTF_CHK. */
2969 static bool
2970 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2971 enum built_in_function fcode)
2973 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2974 tree dest, size, len, fn, fmt, flag;
2975 const char *fmt_str;
2976 unsigned nargs = gimple_call_num_args (stmt);
2978 /* Verify the required arguments in the original call. */
2979 if (nargs < 4)
2980 return false;
2981 dest = gimple_call_arg (stmt, 0);
2982 flag = gimple_call_arg (stmt, 1);
2983 size = gimple_call_arg (stmt, 2);
2984 fmt = gimple_call_arg (stmt, 3);
2986 if (! tree_fits_uhwi_p (size))
2987 return false;
2989 len = NULL_TREE;
2991 if (!init_target_chars ())
2992 return false;
2994 /* Check whether the format is a literal string constant. */
2995 fmt_str = c_getstr (fmt);
2996 if (fmt_str != NULL)
2998 /* If the format doesn't contain % args or %%, we know the size. */
2999 if (strchr (fmt_str, target_percent) == 0)
3001 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3002 len = build_int_cstu (size_type_node, strlen (fmt_str));
3004 /* If the format is "%s" and first ... argument is a string literal,
3005 we know the size too. */
3006 else if (fcode == BUILT_IN_SPRINTF_CHK
3007 && strcmp (fmt_str, target_percent_s) == 0)
3009 tree arg;
3011 if (nargs == 5)
3013 arg = gimple_call_arg (stmt, 4);
3014 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3016 len = c_strlen (arg, 1);
3017 if (! len || ! tree_fits_uhwi_p (len))
3018 len = NULL_TREE;
3024 if (! integer_all_onesp (size))
3026 if (! len || ! tree_int_cst_lt (len, size))
3027 return false;
3030 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3031 or if format doesn't contain % chars or is "%s". */
3032 if (! integer_zerop (flag))
3034 if (fmt_str == NULL)
3035 return false;
3036 if (strchr (fmt_str, target_percent) != NULL
3037 && strcmp (fmt_str, target_percent_s))
3038 return false;
3041 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3042 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3043 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3044 if (!fn)
3045 return false;
3047 /* Replace the called function and the first 4 argument by 2 retaining
3048 trailing varargs. */
3049 gimple_call_set_fndecl (stmt, fn);
3050 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3051 gimple_call_set_arg (stmt, 0, dest);
3052 gimple_call_set_arg (stmt, 1, fmt);
3053 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3054 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3055 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3056 fold_stmt (gsi);
3057 return true;
3060 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3061 ORIG may be null if this is a 2-argument call. We don't attempt to
3062 simplify calls with more than 3 arguments.
3064 Return true if simplification was possible, otherwise false. */
3066 bool
3067 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3069 gimple *stmt = gsi_stmt (*gsi);
3070 tree dest = gimple_call_arg (stmt, 0);
3071 tree fmt = gimple_call_arg (stmt, 1);
3072 tree orig = NULL_TREE;
3073 const char *fmt_str = NULL;
3075 /* Verify the required arguments in the original call. We deal with two
3076 types of sprintf() calls: 'sprintf (str, fmt)' and
3077 'sprintf (dest, "%s", orig)'. */
3078 if (gimple_call_num_args (stmt) > 3)
3079 return false;
3081 if (gimple_call_num_args (stmt) == 3)
3082 orig = gimple_call_arg (stmt, 2);
3084 /* Check whether the format is a literal string constant. */
3085 fmt_str = c_getstr (fmt);
3086 if (fmt_str == NULL)
3087 return false;
3089 if (!init_target_chars ())
3090 return false;
3092 /* If the format doesn't contain % args or %%, use strcpy. */
3093 if (strchr (fmt_str, target_percent) == NULL)
3095 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3097 if (!fn)
3098 return false;
3100 /* Don't optimize sprintf (buf, "abc", ptr++). */
3101 if (orig)
3102 return false;
3104 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3105 'format' is known to contain no % formats. */
3106 gimple_seq stmts = NULL;
3107 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3109 /* Propagate the NO_WARNING bit to avoid issuing the same
3110 warning more than once. */
3111 if (gimple_no_warning_p (stmt))
3112 gimple_set_no_warning (repl, true);
3114 gimple_seq_add_stmt_without_update (&stmts, repl);
3115 if (gimple_call_lhs (stmt))
3117 repl = gimple_build_assign (gimple_call_lhs (stmt),
3118 build_int_cst (integer_type_node,
3119 strlen (fmt_str)));
3120 gimple_seq_add_stmt_without_update (&stmts, repl);
3121 gsi_replace_with_seq_vops (gsi, stmts);
3122 /* gsi now points at the assignment to the lhs, get a
3123 stmt iterator to the memcpy call.
3124 ??? We can't use gsi_for_stmt as that doesn't work when the
3125 CFG isn't built yet. */
3126 gimple_stmt_iterator gsi2 = *gsi;
3127 gsi_prev (&gsi2);
3128 fold_stmt (&gsi2);
3130 else
3132 gsi_replace_with_seq_vops (gsi, stmts);
3133 fold_stmt (gsi);
3135 return true;
3138 /* If the format is "%s", use strcpy if the result isn't used. */
3139 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3141 tree fn;
3142 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3144 if (!fn)
3145 return false;
3147 /* Don't crash on sprintf (str1, "%s"). */
3148 if (!orig)
3149 return false;
3151 tree orig_len = NULL_TREE;
3152 if (gimple_call_lhs (stmt))
3154 orig_len = get_maxval_strlen (orig, 0);
3155 if (!orig_len)
3156 return false;
3159 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3160 gimple_seq stmts = NULL;
3161 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3163 /* Propagate the NO_WARNING bit to avoid issuing the same
3164 warning more than once. */
3165 if (gimple_no_warning_p (stmt))
3166 gimple_set_no_warning (repl, true);
3168 gimple_seq_add_stmt_without_update (&stmts, repl);
3169 if (gimple_call_lhs (stmt))
3171 if (!useless_type_conversion_p (integer_type_node,
3172 TREE_TYPE (orig_len)))
3173 orig_len = fold_convert (integer_type_node, orig_len);
3174 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3175 gimple_seq_add_stmt_without_update (&stmts, repl);
3176 gsi_replace_with_seq_vops (gsi, stmts);
3177 /* gsi now points at the assignment to the lhs, get a
3178 stmt iterator to the memcpy call.
3179 ??? We can't use gsi_for_stmt as that doesn't work when the
3180 CFG isn't built yet. */
3181 gimple_stmt_iterator gsi2 = *gsi;
3182 gsi_prev (&gsi2);
3183 fold_stmt (&gsi2);
3185 else
3187 gsi_replace_with_seq_vops (gsi, stmts);
3188 fold_stmt (gsi);
3190 return true;
3192 return false;
3195 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3196 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3197 attempt to simplify calls with more than 4 arguments.
3199 Return true if simplification was possible, otherwise false. */
3201 bool
3202 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3204 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3205 tree dest = gimple_call_arg (stmt, 0);
3206 tree destsize = gimple_call_arg (stmt, 1);
3207 tree fmt = gimple_call_arg (stmt, 2);
3208 tree orig = NULL_TREE;
3209 const char *fmt_str = NULL;
3211 if (gimple_call_num_args (stmt) > 4)
3212 return false;
3214 if (gimple_call_num_args (stmt) == 4)
3215 orig = gimple_call_arg (stmt, 3);
3217 if (!tree_fits_uhwi_p (destsize))
3218 return false;
3219 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
3221 /* Check whether the format is a literal string constant. */
3222 fmt_str = c_getstr (fmt);
3223 if (fmt_str == NULL)
3224 return false;
3226 if (!init_target_chars ())
3227 return false;
3229 /* If the format doesn't contain % args or %%, use strcpy. */
3230 if (strchr (fmt_str, target_percent) == NULL)
3232 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3233 if (!fn)
3234 return false;
3236 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3237 if (orig)
3238 return false;
3240 /* We could expand this as
3241 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3242 or to
3243 memcpy (str, fmt_with_nul_at_cstm1, cst);
3244 but in the former case that might increase code size
3245 and in the latter case grow .rodata section too much.
3246 So punt for now. */
3247 size_t len = strlen (fmt_str);
3248 if (len >= destlen)
3249 return false;
3251 gimple_seq stmts = NULL;
3252 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3253 gimple_seq_add_stmt_without_update (&stmts, repl);
3254 if (gimple_call_lhs (stmt))
3256 repl = gimple_build_assign (gimple_call_lhs (stmt),
3257 build_int_cst (integer_type_node, len));
3258 gimple_seq_add_stmt_without_update (&stmts, repl);
3259 gsi_replace_with_seq_vops (gsi, stmts);
3260 /* gsi now points at the assignment to the lhs, get a
3261 stmt iterator to the memcpy call.
3262 ??? We can't use gsi_for_stmt as that doesn't work when the
3263 CFG isn't built yet. */
3264 gimple_stmt_iterator gsi2 = *gsi;
3265 gsi_prev (&gsi2);
3266 fold_stmt (&gsi2);
3268 else
3270 gsi_replace_with_seq_vops (gsi, stmts);
3271 fold_stmt (gsi);
3273 return true;
3276 /* If the format is "%s", use strcpy if the result isn't used. */
3277 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3279 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3280 if (!fn)
3281 return false;
3283 /* Don't crash on snprintf (str1, cst, "%s"). */
3284 if (!orig)
3285 return false;
3287 tree orig_len = get_maxval_strlen (orig, 0);
3288 if (!orig_len || TREE_CODE (orig_len) != INTEGER_CST)
3289 return false;
3291 /* We could expand this as
3292 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3293 or to
3294 memcpy (str1, str2_with_nul_at_cstm1, cst);
3295 but in the former case that might increase code size
3296 and in the latter case grow .rodata section too much.
3297 So punt for now. */
3298 if (compare_tree_int (orig_len, destlen) >= 0)
3299 return false;
3301 /* Convert snprintf (str1, cst, "%s", str2) into
3302 strcpy (str1, str2) if strlen (str2) < cst. */
3303 gimple_seq stmts = NULL;
3304 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3305 gimple_seq_add_stmt_without_update (&stmts, repl);
3306 if (gimple_call_lhs (stmt))
3308 if (!useless_type_conversion_p (integer_type_node,
3309 TREE_TYPE (orig_len)))
3310 orig_len = fold_convert (integer_type_node, orig_len);
3311 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
3312 gimple_seq_add_stmt_without_update (&stmts, repl);
3313 gsi_replace_with_seq_vops (gsi, stmts);
3314 /* gsi now points at the assignment to the lhs, get a
3315 stmt iterator to the memcpy call.
3316 ??? We can't use gsi_for_stmt as that doesn't work when the
3317 CFG isn't built yet. */
3318 gimple_stmt_iterator gsi2 = *gsi;
3319 gsi_prev (&gsi2);
3320 fold_stmt (&gsi2);
3322 else
3324 gsi_replace_with_seq_vops (gsi, stmts);
3325 fold_stmt (gsi);
3327 return true;
3329 return false;
3332 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3333 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3334 more than 3 arguments, and ARG may be null in the 2-argument case.
3336 Return NULL_TREE if no simplification was possible, otherwise return the
3337 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3338 code of the function to be simplified. */
3340 static bool
3341 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3342 tree fp, tree fmt, tree arg,
3343 enum built_in_function fcode)
3345 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3346 tree fn_fputc, fn_fputs;
3347 const char *fmt_str = NULL;
3349 /* If the return value is used, don't do the transformation. */
3350 if (gimple_call_lhs (stmt) != NULL_TREE)
3351 return false;
3353 /* Check whether the format is a literal string constant. */
3354 fmt_str = c_getstr (fmt);
3355 if (fmt_str == NULL)
3356 return false;
3358 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3360 /* If we're using an unlocked function, assume the other
3361 unlocked functions exist explicitly. */
3362 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3363 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3365 else
3367 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3368 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3371 if (!init_target_chars ())
3372 return false;
3374 /* If the format doesn't contain % args or %%, use strcpy. */
3375 if (strchr (fmt_str, target_percent) == NULL)
3377 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3378 && arg)
3379 return false;
3381 /* If the format specifier was "", fprintf does nothing. */
3382 if (fmt_str[0] == '\0')
3384 replace_call_with_value (gsi, NULL_TREE);
3385 return true;
3388 /* When "string" doesn't contain %, replace all cases of
3389 fprintf (fp, string) with fputs (string, fp). The fputs
3390 builtin will take care of special cases like length == 1. */
3391 if (fn_fputs)
3393 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3394 replace_call_with_call_and_fold (gsi, repl);
3395 return true;
3399 /* The other optimizations can be done only on the non-va_list variants. */
3400 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3401 return false;
3403 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3404 else if (strcmp (fmt_str, target_percent_s) == 0)
3406 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3407 return false;
3408 if (fn_fputs)
3410 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3411 replace_call_with_call_and_fold (gsi, repl);
3412 return true;
3416 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3417 else if (strcmp (fmt_str, target_percent_c) == 0)
3419 if (!arg
3420 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3421 return false;
3422 if (fn_fputc)
3424 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3425 replace_call_with_call_and_fold (gsi, repl);
3426 return true;
3430 return false;
3433 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3434 FMT and ARG are the arguments to the call; we don't fold cases with
3435 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3437 Return NULL_TREE if no simplification was possible, otherwise return the
3438 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3439 code of the function to be simplified. */
3441 static bool
3442 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3443 tree arg, enum built_in_function fcode)
3445 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3446 tree fn_putchar, fn_puts, newarg;
3447 const char *fmt_str = NULL;
3449 /* If the return value is used, don't do the transformation. */
3450 if (gimple_call_lhs (stmt) != NULL_TREE)
3451 return false;
3453 /* Check whether the format is a literal string constant. */
3454 fmt_str = c_getstr (fmt);
3455 if (fmt_str == NULL)
3456 return false;
3458 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3460 /* If we're using an unlocked function, assume the other
3461 unlocked functions exist explicitly. */
3462 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3463 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3465 else
3467 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3468 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3471 if (!init_target_chars ())
3472 return false;
3474 if (strcmp (fmt_str, target_percent_s) == 0
3475 || strchr (fmt_str, target_percent) == NULL)
3477 const char *str;
3479 if (strcmp (fmt_str, target_percent_s) == 0)
3481 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3482 return false;
3484 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3485 return false;
3487 str = c_getstr (arg);
3488 if (str == NULL)
3489 return false;
3491 else
3493 /* The format specifier doesn't contain any '%' characters. */
3494 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3495 && arg)
3496 return false;
3497 str = fmt_str;
3500 /* If the string was "", printf does nothing. */
3501 if (str[0] == '\0')
3503 replace_call_with_value (gsi, NULL_TREE);
3504 return true;
3507 /* If the string has length of 1, call putchar. */
3508 if (str[1] == '\0')
3510 /* Given printf("c"), (where c is any one character,)
3511 convert "c"[0] to an int and pass that to the replacement
3512 function. */
3513 newarg = build_int_cst (integer_type_node, str[0]);
3514 if (fn_putchar)
3516 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3517 replace_call_with_call_and_fold (gsi, repl);
3518 return true;
3521 else
3523 /* If the string was "string\n", call puts("string"). */
3524 size_t len = strlen (str);
3525 if ((unsigned char)str[len - 1] == target_newline
3526 && (size_t) (int) len == len
3527 && (int) len > 0)
3529 char *newstr;
3531 /* Create a NUL-terminated string that's one char shorter
3532 than the original, stripping off the trailing '\n'. */
3533 newstr = xstrdup (str);
3534 newstr[len - 1] = '\0';
3535 newarg = build_string_literal (len, newstr);
3536 free (newstr);
3537 if (fn_puts)
3539 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3540 replace_call_with_call_and_fold (gsi, repl);
3541 return true;
3544 else
3545 /* We'd like to arrange to call fputs(string,stdout) here,
3546 but we need stdout and don't have a way to get it yet. */
3547 return false;
3551 /* The other optimizations can be done only on the non-va_list variants. */
3552 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3553 return false;
3555 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3556 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3558 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3559 return false;
3560 if (fn_puts)
3562 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3563 replace_call_with_call_and_fold (gsi, repl);
3564 return true;
3568 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3569 else if (strcmp (fmt_str, target_percent_c) == 0)
3571 if (!arg || ! useless_type_conversion_p (integer_type_node,
3572 TREE_TYPE (arg)))
3573 return false;
3574 if (fn_putchar)
3576 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3577 replace_call_with_call_and_fold (gsi, repl);
3578 return true;
3582 return false;
3587 /* Fold a call to __builtin_strlen with known length LEN. */
3589 static bool
3590 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3592 gimple *stmt = gsi_stmt (*gsi);
3593 tree arg = gimple_call_arg (stmt, 0);
3595 wide_int minlen;
3596 wide_int maxlen;
3598 /* Set to non-null if ARG refers to an unterminated array. */
3599 tree nonstr;
3600 tree lenrange[2];
3601 if (!get_range_strlen (arg, lenrange, 1, true, &nonstr)
3602 && !nonstr
3603 && lenrange[0] && TREE_CODE (lenrange[0]) == INTEGER_CST
3604 && lenrange[1] && TREE_CODE (lenrange[1]) == INTEGER_CST)
3606 /* The range of lengths refers to either a single constant
3607 string or to the longest and shortest constant string
3608 referenced by the argument of the strlen() call, or to
3609 the strings that can possibly be stored in the arrays
3610 the argument refers to. */
3611 minlen = wi::to_wide (lenrange[0]);
3612 maxlen = wi::to_wide (lenrange[1]);
3614 else
3616 unsigned prec = TYPE_PRECISION (sizetype);
3618 minlen = wi::shwi (0, prec);
3619 maxlen = wi::to_wide (max_object_size (), prec) - 2;
3622 if (minlen == maxlen)
3624 lenrange[0] = force_gimple_operand_gsi (gsi, lenrange[0], true, NULL,
3625 true, GSI_SAME_STMT);
3626 replace_call_with_value (gsi, lenrange[0]);
3627 return true;
3630 if (tree lhs = gimple_call_lhs (stmt))
3631 if (TREE_CODE (lhs) == SSA_NAME
3632 && INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
3633 set_range_info (lhs, VR_RANGE, minlen, maxlen);
3635 return false;
3638 /* Fold a call to __builtin_acc_on_device. */
3640 static bool
3641 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
3643 /* Defer folding until we know which compiler we're in. */
3644 if (symtab->state != EXPANSION)
3645 return false;
3647 unsigned val_host = GOMP_DEVICE_HOST;
3648 unsigned val_dev = GOMP_DEVICE_NONE;
3650 #ifdef ACCEL_COMPILER
3651 val_host = GOMP_DEVICE_NOT_HOST;
3652 val_dev = ACCEL_COMPILER_acc_device;
3653 #endif
3655 location_t loc = gimple_location (gsi_stmt (*gsi));
3657 tree host_eq = make_ssa_name (boolean_type_node);
3658 gimple *host_ass = gimple_build_assign
3659 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
3660 gimple_set_location (host_ass, loc);
3661 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
3663 tree dev_eq = make_ssa_name (boolean_type_node);
3664 gimple *dev_ass = gimple_build_assign
3665 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
3666 gimple_set_location (dev_ass, loc);
3667 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
3669 tree result = make_ssa_name (boolean_type_node);
3670 gimple *result_ass = gimple_build_assign
3671 (result, BIT_IOR_EXPR, host_eq, dev_eq);
3672 gimple_set_location (result_ass, loc);
3673 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
3675 replace_call_with_value (gsi, result);
3677 return true;
3680 /* Fold realloc (0, n) -> malloc (n). */
3682 static bool
3683 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
3685 gimple *stmt = gsi_stmt (*gsi);
3686 tree arg = gimple_call_arg (stmt, 0);
3687 tree size = gimple_call_arg (stmt, 1);
3689 if (operand_equal_p (arg, null_pointer_node, 0))
3691 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
3692 if (fn_malloc)
3694 gcall *repl = gimple_build_call (fn_malloc, 1, size);
3695 replace_call_with_call_and_fold (gsi, repl);
3696 return true;
3699 return false;
3702 /* Fold the non-target builtin at *GSI and return whether any simplification
3703 was made. */
3705 static bool
3706 gimple_fold_builtin (gimple_stmt_iterator *gsi)
3708 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
3709 tree callee = gimple_call_fndecl (stmt);
3711 /* Give up for always_inline inline builtins until they are
3712 inlined. */
3713 if (avoid_folding_inline_builtin (callee))
3714 return false;
3716 unsigned n = gimple_call_num_args (stmt);
3717 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
3718 switch (fcode)
3720 case BUILT_IN_BCMP:
3721 return gimple_fold_builtin_bcmp (gsi);
3722 case BUILT_IN_BCOPY:
3723 return gimple_fold_builtin_bcopy (gsi);
3724 case BUILT_IN_BZERO:
3725 return gimple_fold_builtin_bzero (gsi);
3727 case BUILT_IN_MEMSET:
3728 return gimple_fold_builtin_memset (gsi,
3729 gimple_call_arg (stmt, 1),
3730 gimple_call_arg (stmt, 2));
3731 case BUILT_IN_MEMCPY:
3732 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3733 gimple_call_arg (stmt, 1), 0);
3734 case BUILT_IN_MEMPCPY:
3735 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3736 gimple_call_arg (stmt, 1), 1);
3737 case BUILT_IN_MEMMOVE:
3738 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
3739 gimple_call_arg (stmt, 1), 3);
3740 case BUILT_IN_SPRINTF_CHK:
3741 case BUILT_IN_VSPRINTF_CHK:
3742 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
3743 case BUILT_IN_STRCAT_CHK:
3744 return gimple_fold_builtin_strcat_chk (gsi);
3745 case BUILT_IN_STRNCAT_CHK:
3746 return gimple_fold_builtin_strncat_chk (gsi);
3747 case BUILT_IN_STRLEN:
3748 return gimple_fold_builtin_strlen (gsi);
3749 case BUILT_IN_STRCPY:
3750 return gimple_fold_builtin_strcpy (gsi,
3751 gimple_call_arg (stmt, 0),
3752 gimple_call_arg (stmt, 1));
3753 case BUILT_IN_STRNCPY:
3754 return gimple_fold_builtin_strncpy (gsi,
3755 gimple_call_arg (stmt, 0),
3756 gimple_call_arg (stmt, 1),
3757 gimple_call_arg (stmt, 2));
3758 case BUILT_IN_STRCAT:
3759 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
3760 gimple_call_arg (stmt, 1));
3761 case BUILT_IN_STRNCAT:
3762 return gimple_fold_builtin_strncat (gsi);
3763 case BUILT_IN_INDEX:
3764 case BUILT_IN_STRCHR:
3765 return gimple_fold_builtin_strchr (gsi, false);
3766 case BUILT_IN_RINDEX:
3767 case BUILT_IN_STRRCHR:
3768 return gimple_fold_builtin_strchr (gsi, true);
3769 case BUILT_IN_STRSTR:
3770 return gimple_fold_builtin_strstr (gsi);
3771 case BUILT_IN_STRCMP:
3772 case BUILT_IN_STRCMP_EQ:
3773 case BUILT_IN_STRCASECMP:
3774 case BUILT_IN_STRNCMP:
3775 case BUILT_IN_STRNCMP_EQ:
3776 case BUILT_IN_STRNCASECMP:
3777 return gimple_fold_builtin_string_compare (gsi);
3778 case BUILT_IN_MEMCHR:
3779 return gimple_fold_builtin_memchr (gsi);
3780 case BUILT_IN_FPUTS:
3781 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3782 gimple_call_arg (stmt, 1), false);
3783 case BUILT_IN_FPUTS_UNLOCKED:
3784 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
3785 gimple_call_arg (stmt, 1), true);
3786 case BUILT_IN_MEMCPY_CHK:
3787 case BUILT_IN_MEMPCPY_CHK:
3788 case BUILT_IN_MEMMOVE_CHK:
3789 case BUILT_IN_MEMSET_CHK:
3790 return gimple_fold_builtin_memory_chk (gsi,
3791 gimple_call_arg (stmt, 0),
3792 gimple_call_arg (stmt, 1),
3793 gimple_call_arg (stmt, 2),
3794 gimple_call_arg (stmt, 3),
3795 fcode);
3796 case BUILT_IN_STPCPY:
3797 return gimple_fold_builtin_stpcpy (gsi);
3798 case BUILT_IN_STRCPY_CHK:
3799 case BUILT_IN_STPCPY_CHK:
3800 return gimple_fold_builtin_stxcpy_chk (gsi,
3801 gimple_call_arg (stmt, 0),
3802 gimple_call_arg (stmt, 1),
3803 gimple_call_arg (stmt, 2),
3804 fcode);
3805 case BUILT_IN_STRNCPY_CHK:
3806 case BUILT_IN_STPNCPY_CHK:
3807 return gimple_fold_builtin_stxncpy_chk (gsi,
3808 gimple_call_arg (stmt, 0),
3809 gimple_call_arg (stmt, 1),
3810 gimple_call_arg (stmt, 2),
3811 gimple_call_arg (stmt, 3),
3812 fcode);
3813 case BUILT_IN_SNPRINTF_CHK:
3814 case BUILT_IN_VSNPRINTF_CHK:
3815 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
3817 case BUILT_IN_FPRINTF:
3818 case BUILT_IN_FPRINTF_UNLOCKED:
3819 case BUILT_IN_VFPRINTF:
3820 if (n == 2 || n == 3)
3821 return gimple_fold_builtin_fprintf (gsi,
3822 gimple_call_arg (stmt, 0),
3823 gimple_call_arg (stmt, 1),
3824 n == 3
3825 ? gimple_call_arg (stmt, 2)
3826 : NULL_TREE,
3827 fcode);
3828 break;
3829 case BUILT_IN_FPRINTF_CHK:
3830 case BUILT_IN_VFPRINTF_CHK:
3831 if (n == 3 || n == 4)
3832 return gimple_fold_builtin_fprintf (gsi,
3833 gimple_call_arg (stmt, 0),
3834 gimple_call_arg (stmt, 2),
3835 n == 4
3836 ? gimple_call_arg (stmt, 3)
3837 : NULL_TREE,
3838 fcode);
3839 break;
3840 case BUILT_IN_PRINTF:
3841 case BUILT_IN_PRINTF_UNLOCKED:
3842 case BUILT_IN_VPRINTF:
3843 if (n == 1 || n == 2)
3844 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
3845 n == 2
3846 ? gimple_call_arg (stmt, 1)
3847 : NULL_TREE, fcode);
3848 break;
3849 case BUILT_IN_PRINTF_CHK:
3850 case BUILT_IN_VPRINTF_CHK:
3851 if (n == 2 || n == 3)
3852 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
3853 n == 3
3854 ? gimple_call_arg (stmt, 2)
3855 : NULL_TREE, fcode);
3856 break;
3857 case BUILT_IN_ACC_ON_DEVICE:
3858 return gimple_fold_builtin_acc_on_device (gsi,
3859 gimple_call_arg (stmt, 0));
3860 case BUILT_IN_REALLOC:
3861 return gimple_fold_builtin_realloc (gsi);
3863 default:;
3866 /* Try the generic builtin folder. */
3867 bool ignore = (gimple_call_lhs (stmt) == NULL);
3868 tree result = fold_call_stmt (stmt, ignore);
3869 if (result)
3871 if (ignore)
3872 STRIP_NOPS (result);
3873 else
3874 result = fold_convert (gimple_call_return_type (stmt), result);
3875 if (!update_call_from_tree (gsi, result))
3876 gimplify_and_update_call_from_tree (gsi, result);
3877 return true;
3880 return false;
3883 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3884 function calls to constants, where possible. */
3886 static tree
3887 fold_internal_goacc_dim (const gimple *call)
3889 int axis = oacc_get_ifn_dim_arg (call);
3890 int size = oacc_get_fn_dim_size (current_function_decl, axis);
3891 tree result = NULL_TREE;
3892 tree type = TREE_TYPE (gimple_call_lhs (call));
3894 switch (gimple_call_internal_fn (call))
3896 case IFN_GOACC_DIM_POS:
3897 /* If the size is 1, we know the answer. */
3898 if (size == 1)
3899 result = build_int_cst (type, 0);
3900 break;
3901 case IFN_GOACC_DIM_SIZE:
3902 /* If the size is not dynamic, we know the answer. */
3903 if (size)
3904 result = build_int_cst (type, size);
3905 break;
3906 default:
3907 break;
3910 return result;
3913 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3914 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3915 &var where var is only addressable because of such calls. */
3917 bool
3918 optimize_atomic_compare_exchange_p (gimple *stmt)
3920 if (gimple_call_num_args (stmt) != 6
3921 || !flag_inline_atomics
3922 || !optimize
3923 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
3924 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3925 || !gimple_vdef (stmt)
3926 || !gimple_vuse (stmt))
3927 return false;
3929 tree fndecl = gimple_call_fndecl (stmt);
3930 switch (DECL_FUNCTION_CODE (fndecl))
3932 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
3933 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
3934 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
3935 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
3936 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
3937 break;
3938 default:
3939 return false;
3942 tree expected = gimple_call_arg (stmt, 1);
3943 if (TREE_CODE (expected) != ADDR_EXPR
3944 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
3945 return false;
3947 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
3948 if (!is_gimple_reg_type (etype)
3949 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
3950 || TREE_THIS_VOLATILE (etype)
3951 || VECTOR_TYPE_P (etype)
3952 || TREE_CODE (etype) == COMPLEX_TYPE
3953 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3954 might not preserve all the bits. See PR71716. */
3955 || SCALAR_FLOAT_TYPE_P (etype)
3956 || maybe_ne (TYPE_PRECISION (etype),
3957 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3958 return false;
3960 tree weak = gimple_call_arg (stmt, 3);
3961 if (!integer_zerop (weak) && !integer_onep (weak))
3962 return false;
3964 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3965 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3966 machine_mode mode = TYPE_MODE (itype);
3968 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
3969 == CODE_FOR_nothing
3970 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
3971 return false;
3973 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
3974 return false;
3976 return true;
3979 /* Fold
3980 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3981 into
3982 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3983 i = IMAGPART_EXPR <t>;
3984 r = (_Bool) i;
3985 e = REALPART_EXPR <t>; */
3987 void
3988 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
3990 gimple *stmt = gsi_stmt (*gsi);
3991 tree fndecl = gimple_call_fndecl (stmt);
3992 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3993 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
3994 tree ctype = build_complex_type (itype);
3995 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
3996 bool throws = false;
3997 edge e = NULL;
3998 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
3999 expected);
4000 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4001 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
4002 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
4004 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
4005 build1 (VIEW_CONVERT_EXPR, itype,
4006 gimple_assign_lhs (g)));
4007 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4009 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
4010 + int_size_in_bytes (itype);
4011 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
4012 gimple_call_arg (stmt, 0),
4013 gimple_assign_lhs (g),
4014 gimple_call_arg (stmt, 2),
4015 build_int_cst (integer_type_node, flag),
4016 gimple_call_arg (stmt, 4),
4017 gimple_call_arg (stmt, 5));
4018 tree lhs = make_ssa_name (ctype);
4019 gimple_call_set_lhs (g, lhs);
4020 gimple_set_vdef (g, gimple_vdef (stmt));
4021 gimple_set_vuse (g, gimple_vuse (stmt));
4022 SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
4023 tree oldlhs = gimple_call_lhs (stmt);
4024 if (stmt_can_throw_internal (cfun, stmt))
4026 throws = true;
4027 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
4029 gimple_call_set_nothrow (as_a <gcall *> (g),
4030 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
4031 gimple_call_set_lhs (stmt, NULL_TREE);
4032 gsi_replace (gsi, g, true);
4033 if (oldlhs)
4035 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
4036 build1 (IMAGPART_EXPR, itype, lhs));
4037 if (throws)
4039 gsi_insert_on_edge_immediate (e, g);
4040 *gsi = gsi_for_stmt (g);
4042 else
4043 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4044 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
4045 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4047 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
4048 build1 (REALPART_EXPR, itype, lhs));
4049 if (throws && oldlhs == NULL_TREE)
4051 gsi_insert_on_edge_immediate (e, g);
4052 *gsi = gsi_for_stmt (g);
4054 else
4055 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4056 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
4058 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
4059 VIEW_CONVERT_EXPR,
4060 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
4061 gimple_assign_lhs (g)));
4062 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4064 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
4065 gsi_insert_after (gsi, g, GSI_NEW_STMT);
4066 *gsi = gsiret;
4069 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4070 doesn't fit into TYPE. The test for overflow should be regardless of
4071 -fwrapv, and even for unsigned types. */
4073 bool
4074 arith_overflowed_p (enum tree_code code, const_tree type,
4075 const_tree arg0, const_tree arg1)
4077 widest2_int warg0 = widest2_int_cst (arg0);
4078 widest2_int warg1 = widest2_int_cst (arg1);
4079 widest2_int wres;
4080 switch (code)
4082 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
4083 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
4084 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
4085 default: gcc_unreachable ();
4087 signop sign = TYPE_SIGN (type);
4088 if (sign == UNSIGNED && wi::neg_p (wres))
4089 return true;
4090 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
4093 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4094 The statement may be replaced by another statement, e.g., if the call
4095 simplifies to a constant value. Return true if any changes were made.
4096 It is assumed that the operands have been previously folded. */
4098 static bool
4099 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
4101 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
4102 tree callee;
4103 bool changed = false;
4104 unsigned i;
4106 /* Fold *& in call arguments. */
4107 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4108 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
4110 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
4111 if (tmp)
4113 gimple_call_set_arg (stmt, i, tmp);
4114 changed = true;
4118 /* Check for virtual calls that became direct calls. */
4119 callee = gimple_call_fn (stmt);
4120 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
4122 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
4124 if (dump_file && virtual_method_call_p (callee)
4125 && !possible_polymorphic_call_target_p
4126 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
4127 (OBJ_TYPE_REF_EXPR (callee)))))
4129 fprintf (dump_file,
4130 "Type inheritance inconsistent devirtualization of ");
4131 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4132 fprintf (dump_file, " to ");
4133 print_generic_expr (dump_file, callee, TDF_SLIM);
4134 fprintf (dump_file, "\n");
4137 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
4138 changed = true;
4140 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
4142 bool final;
4143 vec <cgraph_node *>targets
4144 = possible_polymorphic_call_targets (callee, stmt, &final);
4145 if (final && targets.length () <= 1 && dbg_cnt (devirt))
4147 tree lhs = gimple_call_lhs (stmt);
4148 if (dump_enabled_p ())
4150 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
4151 "folding virtual function call to %s\n",
4152 targets.length () == 1
4153 ? targets[0]->name ()
4154 : "__builtin_unreachable");
4156 if (targets.length () == 1)
4158 tree fndecl = targets[0]->decl;
4159 gimple_call_set_fndecl (stmt, fndecl);
4160 changed = true;
4161 /* If changing the call to __cxa_pure_virtual
4162 or similar noreturn function, adjust gimple_call_fntype
4163 too. */
4164 if (gimple_call_noreturn_p (stmt)
4165 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
4166 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
4167 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
4168 == void_type_node))
4169 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
4170 /* If the call becomes noreturn, remove the lhs. */
4171 if (lhs
4172 && gimple_call_noreturn_p (stmt)
4173 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
4174 || should_remove_lhs_p (lhs)))
4176 if (TREE_CODE (lhs) == SSA_NAME)
4178 tree var = create_tmp_var (TREE_TYPE (lhs));
4179 tree def = get_or_create_ssa_default_def (cfun, var);
4180 gimple *new_stmt = gimple_build_assign (lhs, def);
4181 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
4183 gimple_call_set_lhs (stmt, NULL_TREE);
4185 maybe_remove_unused_call_args (cfun, stmt);
4187 else
4189 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4190 gimple *new_stmt = gimple_build_call (fndecl, 0);
4191 gimple_set_location (new_stmt, gimple_location (stmt));
4192 /* If the call had a SSA name as lhs morph that into
4193 an uninitialized value. */
4194 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4196 tree var = create_tmp_var (TREE_TYPE (lhs));
4197 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
4198 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
4199 set_ssa_default_def (cfun, var, lhs);
4201 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4202 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4203 gsi_replace (gsi, new_stmt, false);
4204 return true;
4210 /* Check for indirect calls that became direct calls, and then
4211 no longer require a static chain. */
4212 if (gimple_call_chain (stmt))
4214 tree fn = gimple_call_fndecl (stmt);
4215 if (fn && !DECL_STATIC_CHAIN (fn))
4217 gimple_call_set_chain (stmt, NULL);
4218 changed = true;
4220 else
4222 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
4223 if (tmp)
4225 gimple_call_set_chain (stmt, tmp);
4226 changed = true;
4231 if (inplace)
4232 return changed;
4234 /* Check for builtins that CCP can handle using information not
4235 available in the generic fold routines. */
4236 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4238 if (gimple_fold_builtin (gsi))
4239 changed = true;
4241 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
4243 changed |= targetm.gimple_fold_builtin (gsi);
4245 else if (gimple_call_internal_p (stmt))
4247 enum tree_code subcode = ERROR_MARK;
4248 tree result = NULL_TREE;
4249 bool cplx_result = false;
4250 tree overflow = NULL_TREE;
4251 switch (gimple_call_internal_fn (stmt))
4253 case IFN_BUILTIN_EXPECT:
4254 result = fold_builtin_expect (gimple_location (stmt),
4255 gimple_call_arg (stmt, 0),
4256 gimple_call_arg (stmt, 1),
4257 gimple_call_arg (stmt, 2),
4258 NULL_TREE);
4259 break;
4260 case IFN_UBSAN_OBJECT_SIZE:
4262 tree offset = gimple_call_arg (stmt, 1);
4263 tree objsize = gimple_call_arg (stmt, 2);
4264 if (integer_all_onesp (objsize)
4265 || (TREE_CODE (offset) == INTEGER_CST
4266 && TREE_CODE (objsize) == INTEGER_CST
4267 && tree_int_cst_le (offset, objsize)))
4269 replace_call_with_value (gsi, NULL_TREE);
4270 return true;
4273 break;
4274 case IFN_UBSAN_PTR:
4275 if (integer_zerop (gimple_call_arg (stmt, 1)))
4277 replace_call_with_value (gsi, NULL_TREE);
4278 return true;
4280 break;
4281 case IFN_UBSAN_BOUNDS:
4283 tree index = gimple_call_arg (stmt, 1);
4284 tree bound = gimple_call_arg (stmt, 2);
4285 if (TREE_CODE (index) == INTEGER_CST
4286 && TREE_CODE (bound) == INTEGER_CST)
4288 index = fold_convert (TREE_TYPE (bound), index);
4289 if (TREE_CODE (index) == INTEGER_CST
4290 && tree_int_cst_le (index, bound))
4292 replace_call_with_value (gsi, NULL_TREE);
4293 return true;
4297 break;
4298 case IFN_GOACC_DIM_SIZE:
4299 case IFN_GOACC_DIM_POS:
4300 result = fold_internal_goacc_dim (stmt);
4301 break;
4302 case IFN_UBSAN_CHECK_ADD:
4303 subcode = PLUS_EXPR;
4304 break;
4305 case IFN_UBSAN_CHECK_SUB:
4306 subcode = MINUS_EXPR;
4307 break;
4308 case IFN_UBSAN_CHECK_MUL:
4309 subcode = MULT_EXPR;
4310 break;
4311 case IFN_ADD_OVERFLOW:
4312 subcode = PLUS_EXPR;
4313 cplx_result = true;
4314 break;
4315 case IFN_SUB_OVERFLOW:
4316 subcode = MINUS_EXPR;
4317 cplx_result = true;
4318 break;
4319 case IFN_MUL_OVERFLOW:
4320 subcode = MULT_EXPR;
4321 cplx_result = true;
4322 break;
4323 default:
4324 break;
4326 if (subcode != ERROR_MARK)
4328 tree arg0 = gimple_call_arg (stmt, 0);
4329 tree arg1 = gimple_call_arg (stmt, 1);
4330 tree type = TREE_TYPE (arg0);
4331 if (cplx_result)
4333 tree lhs = gimple_call_lhs (stmt);
4334 if (lhs == NULL_TREE)
4335 type = NULL_TREE;
4336 else
4337 type = TREE_TYPE (TREE_TYPE (lhs));
4339 if (type == NULL_TREE)
4341 /* x = y + 0; x = y - 0; x = y * 0; */
4342 else if (integer_zerop (arg1))
4343 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
4344 /* x = 0 + y; x = 0 * y; */
4345 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
4346 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
4347 /* x = y - y; */
4348 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
4349 result = integer_zero_node;
4350 /* x = y * 1; x = 1 * y; */
4351 else if (subcode == MULT_EXPR && integer_onep (arg1))
4352 result = arg0;
4353 else if (subcode == MULT_EXPR && integer_onep (arg0))
4354 result = arg1;
4355 else if (TREE_CODE (arg0) == INTEGER_CST
4356 && TREE_CODE (arg1) == INTEGER_CST)
4358 if (cplx_result)
4359 result = int_const_binop (subcode, fold_convert (type, arg0),
4360 fold_convert (type, arg1));
4361 else
4362 result = int_const_binop (subcode, arg0, arg1);
4363 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
4365 if (cplx_result)
4366 overflow = build_one_cst (type);
4367 else
4368 result = NULL_TREE;
4371 if (result)
4373 if (result == integer_zero_node)
4374 result = build_zero_cst (type);
4375 else if (cplx_result && TREE_TYPE (result) != type)
4377 if (TREE_CODE (result) == INTEGER_CST)
4379 if (arith_overflowed_p (PLUS_EXPR, type, result,
4380 integer_zero_node))
4381 overflow = build_one_cst (type);
4383 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
4384 && TYPE_UNSIGNED (type))
4385 || (TYPE_PRECISION (type)
4386 < (TYPE_PRECISION (TREE_TYPE (result))
4387 + (TYPE_UNSIGNED (TREE_TYPE (result))
4388 && !TYPE_UNSIGNED (type)))))
4389 result = NULL_TREE;
4390 if (result)
4391 result = fold_convert (type, result);
4396 if (result)
4398 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
4399 result = drop_tree_overflow (result);
4400 if (cplx_result)
4402 if (overflow == NULL_TREE)
4403 overflow = build_zero_cst (TREE_TYPE (result));
4404 tree ctype = build_complex_type (TREE_TYPE (result));
4405 if (TREE_CODE (result) == INTEGER_CST
4406 && TREE_CODE (overflow) == INTEGER_CST)
4407 result = build_complex (ctype, result, overflow);
4408 else
4409 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
4410 ctype, result, overflow);
4412 if (!update_call_from_tree (gsi, result))
4413 gimplify_and_update_call_from_tree (gsi, result);
4414 changed = true;
4418 return changed;
4422 /* Return true whether NAME has a use on STMT. */
4424 static bool
4425 has_use_on_stmt (tree name, gimple *stmt)
4427 imm_use_iterator iter;
4428 use_operand_p use_p;
4429 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
4430 if (USE_STMT (use_p) == stmt)
4431 return true;
4432 return false;
4435 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4436 gimple_simplify.
4438 Replaces *GSI with the simplification result in RCODE and OPS
4439 and the associated statements in *SEQ. Does the replacement
4440 according to INPLACE and returns true if the operation succeeded. */
4442 static bool
4443 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
4444 gimple_match_op *res_op,
4445 gimple_seq *seq, bool inplace)
4447 gimple *stmt = gsi_stmt (*gsi);
4448 tree *ops = res_op->ops;
4449 unsigned int num_ops = res_op->num_ops;
4451 /* Play safe and do not allow abnormals to be mentioned in
4452 newly created statements. See also maybe_push_res_to_seq.
4453 As an exception allow such uses if there was a use of the
4454 same SSA name on the old stmt. */
4455 for (unsigned int i = 0; i < num_ops; ++i)
4456 if (TREE_CODE (ops[i]) == SSA_NAME
4457 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
4458 && !has_use_on_stmt (ops[i], stmt))
4459 return false;
4461 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
4462 for (unsigned int i = 0; i < 2; ++i)
4463 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
4464 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
4465 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
4466 return false;
4468 /* Don't insert new statements when INPLACE is true, even if we could
4469 reuse STMT for the final statement. */
4470 if (inplace && !gimple_seq_empty_p (*seq))
4471 return false;
4473 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
4475 gcc_assert (res_op->code.is_tree_code ());
4476 if (TREE_CODE_CLASS ((enum tree_code) res_op->code) == tcc_comparison
4477 /* GIMPLE_CONDs condition may not throw. */
4478 && (!flag_exceptions
4479 || !cfun->can_throw_non_call_exceptions
4480 || !operation_could_trap_p (res_op->code,
4481 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
4482 false, NULL_TREE)))
4483 gimple_cond_set_condition (cond_stmt, res_op->code, ops[0], ops[1]);
4484 else if (res_op->code == SSA_NAME)
4485 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
4486 build_zero_cst (TREE_TYPE (ops[0])));
4487 else if (res_op->code == INTEGER_CST)
4489 if (integer_zerop (ops[0]))
4490 gimple_cond_make_false (cond_stmt);
4491 else
4492 gimple_cond_make_true (cond_stmt);
4494 else if (!inplace)
4496 tree res = maybe_push_res_to_seq (res_op, seq);
4497 if (!res)
4498 return false;
4499 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
4500 build_zero_cst (TREE_TYPE (res)));
4502 else
4503 return false;
4504 if (dump_file && (dump_flags & TDF_DETAILS))
4506 fprintf (dump_file, "gimple_simplified to ");
4507 if (!gimple_seq_empty_p (*seq))
4508 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4509 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4510 0, TDF_SLIM);
4512 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4513 return true;
4515 else if (is_gimple_assign (stmt)
4516 && res_op->code.is_tree_code ())
4518 if (!inplace
4519 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (res_op->code))
4521 maybe_build_generic_op (res_op);
4522 gimple_assign_set_rhs_with_ops (gsi, res_op->code,
4523 res_op->op_or_null (0),
4524 res_op->op_or_null (1),
4525 res_op->op_or_null (2));
4526 if (dump_file && (dump_flags & TDF_DETAILS))
4528 fprintf (dump_file, "gimple_simplified to ");
4529 if (!gimple_seq_empty_p (*seq))
4530 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4531 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
4532 0, TDF_SLIM);
4534 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4535 return true;
4538 else if (res_op->code.is_fn_code ()
4539 && gimple_call_combined_fn (stmt) == res_op->code)
4541 gcc_assert (num_ops == gimple_call_num_args (stmt));
4542 for (unsigned int i = 0; i < num_ops; ++i)
4543 gimple_call_set_arg (stmt, i, ops[i]);
4544 if (dump_file && (dump_flags & TDF_DETAILS))
4546 fprintf (dump_file, "gimple_simplified to ");
4547 if (!gimple_seq_empty_p (*seq))
4548 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4549 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
4551 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
4552 return true;
4554 else if (!inplace)
4556 if (gimple_has_lhs (stmt))
4558 tree lhs = gimple_get_lhs (stmt);
4559 if (!maybe_push_res_to_seq (res_op, seq, lhs))
4560 return false;
4561 if (dump_file && (dump_flags & TDF_DETAILS))
4563 fprintf (dump_file, "gimple_simplified to ");
4564 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
4566 gsi_replace_with_seq_vops (gsi, *seq);
4567 return true;
4569 else
4570 gcc_unreachable ();
4573 return false;
4576 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4578 static bool
4579 maybe_canonicalize_mem_ref_addr (tree *t)
4581 bool res = false;
4583 if (TREE_CODE (*t) == ADDR_EXPR)
4584 t = &TREE_OPERAND (*t, 0);
4586 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4587 generic vector extension. The actual vector referenced is
4588 view-converted to an array type for this purpose. If the index
4589 is constant the canonical representation in the middle-end is a
4590 BIT_FIELD_REF so re-write the former to the latter here. */
4591 if (TREE_CODE (*t) == ARRAY_REF
4592 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
4593 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
4594 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
4596 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
4597 if (VECTOR_TYPE_P (vtype))
4599 tree low = array_ref_low_bound (*t);
4600 if (TREE_CODE (low) == INTEGER_CST)
4602 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
4604 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
4605 wi::to_widest (low));
4606 idx = wi::mul (idx, wi::to_widest
4607 (TYPE_SIZE (TREE_TYPE (*t))));
4608 widest_int ext
4609 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
4610 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
4612 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
4613 TREE_TYPE (*t),
4614 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
4615 TYPE_SIZE (TREE_TYPE (*t)),
4616 wide_int_to_tree (bitsizetype, idx));
4617 res = true;
4624 while (handled_component_p (*t))
4625 t = &TREE_OPERAND (*t, 0);
4627 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4628 of invariant addresses into a SSA name MEM_REF address. */
4629 if (TREE_CODE (*t) == MEM_REF
4630 || TREE_CODE (*t) == TARGET_MEM_REF)
4632 tree addr = TREE_OPERAND (*t, 0);
4633 if (TREE_CODE (addr) == ADDR_EXPR
4634 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
4635 || handled_component_p (TREE_OPERAND (addr, 0))))
4637 tree base;
4638 poly_int64 coffset;
4639 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
4640 &coffset);
4641 if (!base)
4642 gcc_unreachable ();
4644 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
4645 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
4646 TREE_OPERAND (*t, 1),
4647 size_int (coffset));
4648 res = true;
4650 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
4651 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
4654 /* Canonicalize back MEM_REFs to plain reference trees if the object
4655 accessed is a decl that has the same access semantics as the MEM_REF. */
4656 if (TREE_CODE (*t) == MEM_REF
4657 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
4658 && integer_zerop (TREE_OPERAND (*t, 1))
4659 && MR_DEPENDENCE_CLIQUE (*t) == 0)
4661 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4662 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
4663 if (/* Same volatile qualification. */
4664 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
4665 /* Same TBAA behavior with -fstrict-aliasing. */
4666 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
4667 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
4668 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
4669 /* Same alignment. */
4670 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
4671 /* We have to look out here to not drop a required conversion
4672 from the rhs to the lhs if *t appears on the lhs or vice-versa
4673 if it appears on the rhs. Thus require strict type
4674 compatibility. */
4675 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
4677 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
4678 res = true;
4682 /* Canonicalize TARGET_MEM_REF in particular with respect to
4683 the indexes becoming constant. */
4684 else if (TREE_CODE (*t) == TARGET_MEM_REF)
4686 tree tem = maybe_fold_tmr (*t);
4687 if (tem)
4689 *t = tem;
4690 res = true;
4694 return res;
4697 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4698 distinguishes both cases. */
4700 static bool
4701 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
4703 bool changed = false;
4704 gimple *stmt = gsi_stmt (*gsi);
4705 bool nowarning = gimple_no_warning_p (stmt);
4706 unsigned i;
4707 fold_defer_overflow_warnings ();
4709 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4710 after propagation.
4711 ??? This shouldn't be done in generic folding but in the
4712 propagation helpers which also know whether an address was
4713 propagated.
4714 Also canonicalize operand order. */
4715 switch (gimple_code (stmt))
4717 case GIMPLE_ASSIGN:
4718 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
4720 tree *rhs = gimple_assign_rhs1_ptr (stmt);
4721 if ((REFERENCE_CLASS_P (*rhs)
4722 || TREE_CODE (*rhs) == ADDR_EXPR)
4723 && maybe_canonicalize_mem_ref_addr (rhs))
4724 changed = true;
4725 tree *lhs = gimple_assign_lhs_ptr (stmt);
4726 if (REFERENCE_CLASS_P (*lhs)
4727 && maybe_canonicalize_mem_ref_addr (lhs))
4728 changed = true;
4730 else
4732 /* Canonicalize operand order. */
4733 enum tree_code code = gimple_assign_rhs_code (stmt);
4734 if (TREE_CODE_CLASS (code) == tcc_comparison
4735 || commutative_tree_code (code)
4736 || commutative_ternary_tree_code (code))
4738 tree rhs1 = gimple_assign_rhs1 (stmt);
4739 tree rhs2 = gimple_assign_rhs2 (stmt);
4740 if (tree_swap_operands_p (rhs1, rhs2))
4742 gimple_assign_set_rhs1 (stmt, rhs2);
4743 gimple_assign_set_rhs2 (stmt, rhs1);
4744 if (TREE_CODE_CLASS (code) == tcc_comparison)
4745 gimple_assign_set_rhs_code (stmt,
4746 swap_tree_comparison (code));
4747 changed = true;
4751 break;
4752 case GIMPLE_CALL:
4754 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4756 tree *arg = gimple_call_arg_ptr (stmt, i);
4757 if (REFERENCE_CLASS_P (*arg)
4758 && maybe_canonicalize_mem_ref_addr (arg))
4759 changed = true;
4761 tree *lhs = gimple_call_lhs_ptr (stmt);
4762 if (*lhs
4763 && REFERENCE_CLASS_P (*lhs)
4764 && maybe_canonicalize_mem_ref_addr (lhs))
4765 changed = true;
4766 break;
4768 case GIMPLE_ASM:
4770 gasm *asm_stmt = as_a <gasm *> (stmt);
4771 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4773 tree link = gimple_asm_output_op (asm_stmt, i);
4774 tree op = TREE_VALUE (link);
4775 if (REFERENCE_CLASS_P (op)
4776 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4777 changed = true;
4779 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4781 tree link = gimple_asm_input_op (asm_stmt, i);
4782 tree op = TREE_VALUE (link);
4783 if ((REFERENCE_CLASS_P (op)
4784 || TREE_CODE (op) == ADDR_EXPR)
4785 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
4786 changed = true;
4789 break;
4790 case GIMPLE_DEBUG:
4791 if (gimple_debug_bind_p (stmt))
4793 tree *val = gimple_debug_bind_get_value_ptr (stmt);
4794 if (*val
4795 && (REFERENCE_CLASS_P (*val)
4796 || TREE_CODE (*val) == ADDR_EXPR)
4797 && maybe_canonicalize_mem_ref_addr (val))
4798 changed = true;
4800 break;
4801 case GIMPLE_COND:
4803 /* Canonicalize operand order. */
4804 tree lhs = gimple_cond_lhs (stmt);
4805 tree rhs = gimple_cond_rhs (stmt);
4806 if (tree_swap_operands_p (lhs, rhs))
4808 gcond *gc = as_a <gcond *> (stmt);
4809 gimple_cond_set_lhs (gc, rhs);
4810 gimple_cond_set_rhs (gc, lhs);
4811 gimple_cond_set_code (gc,
4812 swap_tree_comparison (gimple_cond_code (gc)));
4813 changed = true;
4816 default:;
4819 /* Dispatch to pattern-based folding. */
4820 if (!inplace
4821 || is_gimple_assign (stmt)
4822 || gimple_code (stmt) == GIMPLE_COND)
4824 gimple_seq seq = NULL;
4825 gimple_match_op res_op;
4826 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
4827 valueize, valueize))
4829 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
4830 changed = true;
4831 else
4832 gimple_seq_discard (seq);
4836 stmt = gsi_stmt (*gsi);
4838 /* Fold the main computation performed by the statement. */
4839 switch (gimple_code (stmt))
4841 case GIMPLE_ASSIGN:
4843 /* Try to canonicalize for boolean-typed X the comparisons
4844 X == 0, X == 1, X != 0, and X != 1. */
4845 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
4846 || gimple_assign_rhs_code (stmt) == NE_EXPR)
4848 tree lhs = gimple_assign_lhs (stmt);
4849 tree op1 = gimple_assign_rhs1 (stmt);
4850 tree op2 = gimple_assign_rhs2 (stmt);
4851 tree type = TREE_TYPE (op1);
4853 /* Check whether the comparison operands are of the same boolean
4854 type as the result type is.
4855 Check that second operand is an integer-constant with value
4856 one or zero. */
4857 if (TREE_CODE (op2) == INTEGER_CST
4858 && (integer_zerop (op2) || integer_onep (op2))
4859 && useless_type_conversion_p (TREE_TYPE (lhs), type))
4861 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
4862 bool is_logical_not = false;
4864 /* X == 0 and X != 1 is a logical-not.of X
4865 X == 1 and X != 0 is X */
4866 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
4867 || (cmp_code == NE_EXPR && integer_onep (op2)))
4868 is_logical_not = true;
4870 if (is_logical_not == false)
4871 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
4872 /* Only for one-bit precision typed X the transformation
4873 !X -> ~X is valied. */
4874 else if (TYPE_PRECISION (type) == 1)
4875 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
4876 /* Otherwise we use !X -> X ^ 1. */
4877 else
4878 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
4879 build_int_cst (type, 1));
4880 changed = true;
4881 break;
4885 unsigned old_num_ops = gimple_num_ops (stmt);
4886 tree lhs = gimple_assign_lhs (stmt);
4887 tree new_rhs = fold_gimple_assign (gsi);
4888 if (new_rhs
4889 && !useless_type_conversion_p (TREE_TYPE (lhs),
4890 TREE_TYPE (new_rhs)))
4891 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
4892 if (new_rhs
4893 && (!inplace
4894 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
4896 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
4897 changed = true;
4899 break;
4902 case GIMPLE_CALL:
4903 changed |= gimple_fold_call (gsi, inplace);
4904 break;
4906 case GIMPLE_ASM:
4907 /* Fold *& in asm operands. */
4909 gasm *asm_stmt = as_a <gasm *> (stmt);
4910 size_t noutputs;
4911 const char **oconstraints;
4912 const char *constraint;
4913 bool allows_mem, allows_reg;
4915 noutputs = gimple_asm_noutputs (asm_stmt);
4916 oconstraints = XALLOCAVEC (const char *, noutputs);
4918 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
4920 tree link = gimple_asm_output_op (asm_stmt, i);
4921 tree op = TREE_VALUE (link);
4922 oconstraints[i]
4923 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4924 if (REFERENCE_CLASS_P (op)
4925 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
4927 TREE_VALUE (link) = op;
4928 changed = true;
4931 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
4933 tree link = gimple_asm_input_op (asm_stmt, i);
4934 tree op = TREE_VALUE (link);
4935 constraint
4936 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4937 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4938 oconstraints, &allows_mem, &allows_reg);
4939 if (REFERENCE_CLASS_P (op)
4940 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
4941 != NULL_TREE)
4943 TREE_VALUE (link) = op;
4944 changed = true;
4948 break;
4950 case GIMPLE_DEBUG:
4951 if (gimple_debug_bind_p (stmt))
4953 tree val = gimple_debug_bind_get_value (stmt);
4954 if (val
4955 && REFERENCE_CLASS_P (val))
4957 tree tem = maybe_fold_reference (val, false);
4958 if (tem)
4960 gimple_debug_bind_set_value (stmt, tem);
4961 changed = true;
4964 else if (val
4965 && TREE_CODE (val) == ADDR_EXPR)
4967 tree ref = TREE_OPERAND (val, 0);
4968 tree tem = maybe_fold_reference (ref, false);
4969 if (tem)
4971 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
4972 gimple_debug_bind_set_value (stmt, tem);
4973 changed = true;
4977 break;
4979 case GIMPLE_RETURN:
4981 greturn *ret_stmt = as_a<greturn *> (stmt);
4982 tree ret = gimple_return_retval(ret_stmt);
4984 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
4986 tree val = valueize (ret);
4987 if (val && val != ret
4988 && may_propagate_copy (ret, val))
4990 gimple_return_set_retval (ret_stmt, val);
4991 changed = true;
4995 break;
4997 default:;
5000 stmt = gsi_stmt (*gsi);
5002 /* Fold *& on the lhs. */
5003 if (gimple_has_lhs (stmt))
5005 tree lhs = gimple_get_lhs (stmt);
5006 if (lhs && REFERENCE_CLASS_P (lhs))
5008 tree new_lhs = maybe_fold_reference (lhs, true);
5009 if (new_lhs)
5011 gimple_set_lhs (stmt, new_lhs);
5012 changed = true;
5017 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
5018 return changed;
5021 /* Valueziation callback that ends up not following SSA edges. */
5023 tree
5024 no_follow_ssa_edges (tree)
5026 return NULL_TREE;
5029 /* Valueization callback that ends up following single-use SSA edges only. */
5031 tree
5032 follow_single_use_edges (tree val)
5034 if (TREE_CODE (val) == SSA_NAME
5035 && !has_single_use (val))
5036 return NULL_TREE;
5037 return val;
5040 /* Valueization callback that follows all SSA edges. */
5042 tree
5043 follow_all_ssa_edges (tree val)
5045 return val;
5048 /* Fold the statement pointed to by GSI. In some cases, this function may
5049 replace the whole statement with a new one. Returns true iff folding
5050 makes any changes.
5051 The statement pointed to by GSI should be in valid gimple form but may
5052 be in unfolded state as resulting from for example constant propagation
5053 which can produce *&x = 0. */
5055 bool
5056 fold_stmt (gimple_stmt_iterator *gsi)
5058 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
5061 bool
5062 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
5064 return fold_stmt_1 (gsi, false, valueize);
5067 /* Perform the minimal folding on statement *GSI. Only operations like
5068 *&x created by constant propagation are handled. The statement cannot
5069 be replaced with a new one. Return true if the statement was
5070 changed, false otherwise.
5071 The statement *GSI should be in valid gimple form but may
5072 be in unfolded state as resulting from for example constant propagation
5073 which can produce *&x = 0. */
5075 bool
5076 fold_stmt_inplace (gimple_stmt_iterator *gsi)
5078 gimple *stmt = gsi_stmt (*gsi);
5079 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
5080 gcc_assert (gsi_stmt (*gsi) == stmt);
5081 return changed;
5084 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5085 if EXPR is null or we don't know how.
5086 If non-null, the result always has boolean type. */
5088 static tree
5089 canonicalize_bool (tree expr, bool invert)
5091 if (!expr)
5092 return NULL_TREE;
5093 else if (invert)
5095 if (integer_nonzerop (expr))
5096 return boolean_false_node;
5097 else if (integer_zerop (expr))
5098 return boolean_true_node;
5099 else if (TREE_CODE (expr) == SSA_NAME)
5100 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
5101 build_int_cst (TREE_TYPE (expr), 0));
5102 else if (COMPARISON_CLASS_P (expr))
5103 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
5104 boolean_type_node,
5105 TREE_OPERAND (expr, 0),
5106 TREE_OPERAND (expr, 1));
5107 else
5108 return NULL_TREE;
5110 else
5112 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5113 return expr;
5114 if (integer_nonzerop (expr))
5115 return boolean_true_node;
5116 else if (integer_zerop (expr))
5117 return boolean_false_node;
5118 else if (TREE_CODE (expr) == SSA_NAME)
5119 return fold_build2 (NE_EXPR, boolean_type_node, expr,
5120 build_int_cst (TREE_TYPE (expr), 0));
5121 else if (COMPARISON_CLASS_P (expr))
5122 return fold_build2 (TREE_CODE (expr),
5123 boolean_type_node,
5124 TREE_OPERAND (expr, 0),
5125 TREE_OPERAND (expr, 1));
5126 else
5127 return NULL_TREE;
5131 /* Check to see if a boolean expression EXPR is logically equivalent to the
5132 comparison (OP1 CODE OP2). Check for various identities involving
5133 SSA_NAMEs. */
5135 static bool
5136 same_bool_comparison_p (const_tree expr, enum tree_code code,
5137 const_tree op1, const_tree op2)
5139 gimple *s;
5141 /* The obvious case. */
5142 if (TREE_CODE (expr) == code
5143 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
5144 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
5145 return true;
5147 /* Check for comparing (name, name != 0) and the case where expr
5148 is an SSA_NAME with a definition matching the comparison. */
5149 if (TREE_CODE (expr) == SSA_NAME
5150 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
5152 if (operand_equal_p (expr, op1, 0))
5153 return ((code == NE_EXPR && integer_zerop (op2))
5154 || (code == EQ_EXPR && integer_nonzerop (op2)));
5155 s = SSA_NAME_DEF_STMT (expr);
5156 if (is_gimple_assign (s)
5157 && gimple_assign_rhs_code (s) == code
5158 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
5159 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
5160 return true;
5163 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5164 of name is a comparison, recurse. */
5165 if (TREE_CODE (op1) == SSA_NAME
5166 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
5168 s = SSA_NAME_DEF_STMT (op1);
5169 if (is_gimple_assign (s)
5170 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
5172 enum tree_code c = gimple_assign_rhs_code (s);
5173 if ((c == NE_EXPR && integer_zerop (op2))
5174 || (c == EQ_EXPR && integer_nonzerop (op2)))
5175 return same_bool_comparison_p (expr, c,
5176 gimple_assign_rhs1 (s),
5177 gimple_assign_rhs2 (s));
5178 if ((c == EQ_EXPR && integer_zerop (op2))
5179 || (c == NE_EXPR && integer_nonzerop (op2)))
5180 return same_bool_comparison_p (expr,
5181 invert_tree_comparison (c, false),
5182 gimple_assign_rhs1 (s),
5183 gimple_assign_rhs2 (s));
5186 return false;
5189 /* Check to see if two boolean expressions OP1 and OP2 are logically
5190 equivalent. */
5192 static bool
5193 same_bool_result_p (const_tree op1, const_tree op2)
5195 /* Simple cases first. */
5196 if (operand_equal_p (op1, op2, 0))
5197 return true;
5199 /* Check the cases where at least one of the operands is a comparison.
5200 These are a bit smarter than operand_equal_p in that they apply some
5201 identifies on SSA_NAMEs. */
5202 if (COMPARISON_CLASS_P (op2)
5203 && same_bool_comparison_p (op1, TREE_CODE (op2),
5204 TREE_OPERAND (op2, 0),
5205 TREE_OPERAND (op2, 1)))
5206 return true;
5207 if (COMPARISON_CLASS_P (op1)
5208 && same_bool_comparison_p (op2, TREE_CODE (op1),
5209 TREE_OPERAND (op1, 0),
5210 TREE_OPERAND (op1, 1)))
5211 return true;
5213 /* Default case. */
5214 return false;
5217 /* Forward declarations for some mutually recursive functions. */
5219 static tree
5220 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5221 enum tree_code code2, tree op2a, tree op2b);
5222 static tree
5223 and_var_with_comparison (tree var, bool invert,
5224 enum tree_code code2, tree op2a, tree op2b);
5225 static tree
5226 and_var_with_comparison_1 (gimple *stmt,
5227 enum tree_code code2, tree op2a, tree op2b);
5228 static tree
5229 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5230 enum tree_code code2, tree op2a, tree op2b);
5231 static tree
5232 or_var_with_comparison (tree var, bool invert,
5233 enum tree_code code2, tree op2a, tree op2b);
5234 static tree
5235 or_var_with_comparison_1 (gimple *stmt,
5236 enum tree_code code2, tree op2a, tree op2b);
5238 /* Helper function for and_comparisons_1: try to simplify the AND of the
5239 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5240 If INVERT is true, invert the value of the VAR before doing the AND.
5241 Return NULL_EXPR if we can't simplify this to a single expression. */
5243 static tree
5244 and_var_with_comparison (tree var, bool invert,
5245 enum tree_code code2, tree op2a, tree op2b)
5247 tree t;
5248 gimple *stmt = SSA_NAME_DEF_STMT (var);
5250 /* We can only deal with variables whose definitions are assignments. */
5251 if (!is_gimple_assign (stmt))
5252 return NULL_TREE;
5254 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5255 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5256 Then we only have to consider the simpler non-inverted cases. */
5257 if (invert)
5258 t = or_var_with_comparison_1 (stmt,
5259 invert_tree_comparison (code2, false),
5260 op2a, op2b);
5261 else
5262 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
5263 return canonicalize_bool (t, invert);
5266 /* Try to simplify the AND of the ssa variable defined by the assignment
5267 STMT with the comparison specified by (OP2A CODE2 OP2B).
5268 Return NULL_EXPR if we can't simplify this to a single expression. */
5270 static tree
5271 and_var_with_comparison_1 (gimple *stmt,
5272 enum tree_code code2, tree op2a, tree op2b)
5274 tree var = gimple_assign_lhs (stmt);
5275 tree true_test_var = NULL_TREE;
5276 tree false_test_var = NULL_TREE;
5277 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5279 /* Check for identities like (var AND (var == 0)) => false. */
5280 if (TREE_CODE (op2a) == SSA_NAME
5281 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5283 if ((code2 == NE_EXPR && integer_zerop (op2b))
5284 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5286 true_test_var = op2a;
5287 if (var == true_test_var)
5288 return var;
5290 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5291 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5293 false_test_var = op2a;
5294 if (var == false_test_var)
5295 return boolean_false_node;
5299 /* If the definition is a comparison, recurse on it. */
5300 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5302 tree t = and_comparisons_1 (innercode,
5303 gimple_assign_rhs1 (stmt),
5304 gimple_assign_rhs2 (stmt),
5305 code2,
5306 op2a,
5307 op2b);
5308 if (t)
5309 return t;
5312 /* If the definition is an AND or OR expression, we may be able to
5313 simplify by reassociating. */
5314 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5315 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5317 tree inner1 = gimple_assign_rhs1 (stmt);
5318 tree inner2 = gimple_assign_rhs2 (stmt);
5319 gimple *s;
5320 tree t;
5321 tree partial = NULL_TREE;
5322 bool is_and = (innercode == BIT_AND_EXPR);
5324 /* Check for boolean identities that don't require recursive examination
5325 of inner1/inner2:
5326 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5327 inner1 AND (inner1 OR inner2) => inner1
5328 !inner1 AND (inner1 AND inner2) => false
5329 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5330 Likewise for similar cases involving inner2. */
5331 if (inner1 == true_test_var)
5332 return (is_and ? var : inner1);
5333 else if (inner2 == true_test_var)
5334 return (is_and ? var : inner2);
5335 else if (inner1 == false_test_var)
5336 return (is_and
5337 ? boolean_false_node
5338 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
5339 else if (inner2 == false_test_var)
5340 return (is_and
5341 ? boolean_false_node
5342 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
5344 /* Next, redistribute/reassociate the AND across the inner tests.
5345 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5346 if (TREE_CODE (inner1) == SSA_NAME
5347 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5348 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5349 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5350 gimple_assign_rhs1 (s),
5351 gimple_assign_rhs2 (s),
5352 code2, op2a, op2b)))
5354 /* Handle the AND case, where we are reassociating:
5355 (inner1 AND inner2) AND (op2a code2 op2b)
5356 => (t AND inner2)
5357 If the partial result t is a constant, we win. Otherwise
5358 continue on to try reassociating with the other inner test. */
5359 if (is_and)
5361 if (integer_onep (t))
5362 return inner2;
5363 else if (integer_zerop (t))
5364 return boolean_false_node;
5367 /* Handle the OR case, where we are redistributing:
5368 (inner1 OR inner2) AND (op2a code2 op2b)
5369 => (t OR (inner2 AND (op2a code2 op2b))) */
5370 else if (integer_onep (t))
5371 return boolean_true_node;
5373 /* Save partial result for later. */
5374 partial = t;
5377 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5378 if (TREE_CODE (inner2) == SSA_NAME
5379 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5380 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5381 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
5382 gimple_assign_rhs1 (s),
5383 gimple_assign_rhs2 (s),
5384 code2, op2a, op2b)))
5386 /* Handle the AND case, where we are reassociating:
5387 (inner1 AND inner2) AND (op2a code2 op2b)
5388 => (inner1 AND t) */
5389 if (is_and)
5391 if (integer_onep (t))
5392 return inner1;
5393 else if (integer_zerop (t))
5394 return boolean_false_node;
5395 /* If both are the same, we can apply the identity
5396 (x AND x) == x. */
5397 else if (partial && same_bool_result_p (t, partial))
5398 return t;
5401 /* Handle the OR case. where we are redistributing:
5402 (inner1 OR inner2) AND (op2a code2 op2b)
5403 => (t OR (inner1 AND (op2a code2 op2b)))
5404 => (t OR partial) */
5405 else
5407 if (integer_onep (t))
5408 return boolean_true_node;
5409 else if (partial)
5411 /* We already got a simplification for the other
5412 operand to the redistributed OR expression. The
5413 interesting case is when at least one is false.
5414 Or, if both are the same, we can apply the identity
5415 (x OR x) == x. */
5416 if (integer_zerop (partial))
5417 return t;
5418 else if (integer_zerop (t))
5419 return partial;
5420 else if (same_bool_result_p (t, partial))
5421 return t;
5426 return NULL_TREE;
5429 /* Try to simplify the AND of two comparisons defined by
5430 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5431 If this can be done without constructing an intermediate value,
5432 return the resulting tree; otherwise NULL_TREE is returned.
5433 This function is deliberately asymmetric as it recurses on SSA_DEFs
5434 in the first comparison but not the second. */
5436 static tree
5437 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5438 enum tree_code code2, tree op2a, tree op2b)
5440 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5442 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5443 if (operand_equal_p (op1a, op2a, 0)
5444 && operand_equal_p (op1b, op2b, 0))
5446 /* Result will be either NULL_TREE, or a combined comparison. */
5447 tree t = combine_comparisons (UNKNOWN_LOCATION,
5448 TRUTH_ANDIF_EXPR, code1, code2,
5449 truth_type, op1a, op1b);
5450 if (t)
5451 return t;
5454 /* Likewise the swapped case of the above. */
5455 if (operand_equal_p (op1a, op2b, 0)
5456 && operand_equal_p (op1b, op2a, 0))
5458 /* Result will be either NULL_TREE, or a combined comparison. */
5459 tree t = combine_comparisons (UNKNOWN_LOCATION,
5460 TRUTH_ANDIF_EXPR, code1,
5461 swap_tree_comparison (code2),
5462 truth_type, op1a, op1b);
5463 if (t)
5464 return t;
5467 /* If both comparisons are of the same value against constants, we might
5468 be able to merge them. */
5469 if (operand_equal_p (op1a, op2a, 0)
5470 && TREE_CODE (op1b) == INTEGER_CST
5471 && TREE_CODE (op2b) == INTEGER_CST)
5473 int cmp = tree_int_cst_compare (op1b, op2b);
5475 /* If we have (op1a == op1b), we should either be able to
5476 return that or FALSE, depending on whether the constant op1b
5477 also satisfies the other comparison against op2b. */
5478 if (code1 == EQ_EXPR)
5480 bool done = true;
5481 bool val;
5482 switch (code2)
5484 case EQ_EXPR: val = (cmp == 0); break;
5485 case NE_EXPR: val = (cmp != 0); break;
5486 case LT_EXPR: val = (cmp < 0); break;
5487 case GT_EXPR: val = (cmp > 0); break;
5488 case LE_EXPR: val = (cmp <= 0); break;
5489 case GE_EXPR: val = (cmp >= 0); break;
5490 default: done = false;
5492 if (done)
5494 if (val)
5495 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5496 else
5497 return boolean_false_node;
5500 /* Likewise if the second comparison is an == comparison. */
5501 else if (code2 == EQ_EXPR)
5503 bool done = true;
5504 bool val;
5505 switch (code1)
5507 case EQ_EXPR: val = (cmp == 0); break;
5508 case NE_EXPR: val = (cmp != 0); break;
5509 case LT_EXPR: val = (cmp > 0); break;
5510 case GT_EXPR: val = (cmp < 0); break;
5511 case LE_EXPR: val = (cmp >= 0); break;
5512 case GE_EXPR: val = (cmp <= 0); break;
5513 default: done = false;
5515 if (done)
5517 if (val)
5518 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5519 else
5520 return boolean_false_node;
5524 /* Same business with inequality tests. */
5525 else if (code1 == NE_EXPR)
5527 bool val;
5528 switch (code2)
5530 case EQ_EXPR: val = (cmp != 0); break;
5531 case NE_EXPR: val = (cmp == 0); break;
5532 case LT_EXPR: val = (cmp >= 0); break;
5533 case GT_EXPR: val = (cmp <= 0); break;
5534 case LE_EXPR: val = (cmp > 0); break;
5535 case GE_EXPR: val = (cmp < 0); break;
5536 default:
5537 val = false;
5539 if (val)
5540 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5542 else if (code2 == NE_EXPR)
5544 bool val;
5545 switch (code1)
5547 case EQ_EXPR: val = (cmp == 0); break;
5548 case NE_EXPR: val = (cmp != 0); break;
5549 case LT_EXPR: val = (cmp <= 0); break;
5550 case GT_EXPR: val = (cmp >= 0); break;
5551 case LE_EXPR: val = (cmp < 0); break;
5552 case GE_EXPR: val = (cmp > 0); break;
5553 default:
5554 val = false;
5556 if (val)
5557 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5560 /* Chose the more restrictive of two < or <= comparisons. */
5561 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
5562 && (code2 == LT_EXPR || code2 == LE_EXPR))
5564 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
5565 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5566 else
5567 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5570 /* Likewise chose the more restrictive of two > or >= comparisons. */
5571 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
5572 && (code2 == GT_EXPR || code2 == GE_EXPR))
5574 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
5575 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5576 else
5577 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5580 /* Check for singleton ranges. */
5581 else if (cmp == 0
5582 && ((code1 == LE_EXPR && code2 == GE_EXPR)
5583 || (code1 == GE_EXPR && code2 == LE_EXPR)))
5584 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
5586 /* Check for disjoint ranges. */
5587 else if (cmp <= 0
5588 && (code1 == LT_EXPR || code1 == LE_EXPR)
5589 && (code2 == GT_EXPR || code2 == GE_EXPR))
5590 return boolean_false_node;
5591 else if (cmp >= 0
5592 && (code1 == GT_EXPR || code1 == GE_EXPR)
5593 && (code2 == LT_EXPR || code2 == LE_EXPR))
5594 return boolean_false_node;
5597 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5598 NAME's definition is a truth value. See if there are any simplifications
5599 that can be done against the NAME's definition. */
5600 if (TREE_CODE (op1a) == SSA_NAME
5601 && (code1 == NE_EXPR || code1 == EQ_EXPR)
5602 && (integer_zerop (op1b) || integer_onep (op1b)))
5604 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
5605 || (code1 == NE_EXPR && integer_onep (op1b)));
5606 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
5607 switch (gimple_code (stmt))
5609 case GIMPLE_ASSIGN:
5610 /* Try to simplify by copy-propagating the definition. */
5611 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
5613 case GIMPLE_PHI:
5614 /* If every argument to the PHI produces the same result when
5615 ANDed with the second comparison, we win.
5616 Do not do this unless the type is bool since we need a bool
5617 result here anyway. */
5618 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
5620 tree result = NULL_TREE;
5621 unsigned i;
5622 for (i = 0; i < gimple_phi_num_args (stmt); i++)
5624 tree arg = gimple_phi_arg_def (stmt, i);
5626 /* If this PHI has itself as an argument, ignore it.
5627 If all the other args produce the same result,
5628 we're still OK. */
5629 if (arg == gimple_phi_result (stmt))
5630 continue;
5631 else if (TREE_CODE (arg) == INTEGER_CST)
5633 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
5635 if (!result)
5636 result = boolean_false_node;
5637 else if (!integer_zerop (result))
5638 return NULL_TREE;
5640 else if (!result)
5641 result = fold_build2 (code2, boolean_type_node,
5642 op2a, op2b);
5643 else if (!same_bool_comparison_p (result,
5644 code2, op2a, op2b))
5645 return NULL_TREE;
5647 else if (TREE_CODE (arg) == SSA_NAME
5648 && !SSA_NAME_IS_DEFAULT_DEF (arg))
5650 tree temp;
5651 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
5652 /* In simple cases we can look through PHI nodes,
5653 but we have to be careful with loops.
5654 See PR49073. */
5655 if (! dom_info_available_p (CDI_DOMINATORS)
5656 || gimple_bb (def_stmt) == gimple_bb (stmt)
5657 || dominated_by_p (CDI_DOMINATORS,
5658 gimple_bb (def_stmt),
5659 gimple_bb (stmt)))
5660 return NULL_TREE;
5661 temp = and_var_with_comparison (arg, invert, code2,
5662 op2a, op2b);
5663 if (!temp)
5664 return NULL_TREE;
5665 else if (!result)
5666 result = temp;
5667 else if (!same_bool_result_p (result, temp))
5668 return NULL_TREE;
5670 else
5671 return NULL_TREE;
5673 return result;
5676 default:
5677 break;
5680 return NULL_TREE;
5683 /* Try to simplify the AND of two comparisons, specified by
5684 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5685 If this can be simplified to a single expression (without requiring
5686 introducing more SSA variables to hold intermediate values),
5687 return the resulting tree. Otherwise return NULL_TREE.
5688 If the result expression is non-null, it has boolean type. */
5690 tree
5691 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
5692 enum tree_code code2, tree op2a, tree op2b)
5694 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
5695 if (t)
5696 return t;
5697 else
5698 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
5701 /* Helper function for or_comparisons_1: try to simplify the OR of the
5702 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5703 If INVERT is true, invert the value of VAR before doing the OR.
5704 Return NULL_EXPR if we can't simplify this to a single expression. */
5706 static tree
5707 or_var_with_comparison (tree var, bool invert,
5708 enum tree_code code2, tree op2a, tree op2b)
5710 tree t;
5711 gimple *stmt = SSA_NAME_DEF_STMT (var);
5713 /* We can only deal with variables whose definitions are assignments. */
5714 if (!is_gimple_assign (stmt))
5715 return NULL_TREE;
5717 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5718 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5719 Then we only have to consider the simpler non-inverted cases. */
5720 if (invert)
5721 t = and_var_with_comparison_1 (stmt,
5722 invert_tree_comparison (code2, false),
5723 op2a, op2b);
5724 else
5725 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
5726 return canonicalize_bool (t, invert);
5729 /* Try to simplify the OR of the ssa variable defined by the assignment
5730 STMT with the comparison specified by (OP2A CODE2 OP2B).
5731 Return NULL_EXPR if we can't simplify this to a single expression. */
5733 static tree
5734 or_var_with_comparison_1 (gimple *stmt,
5735 enum tree_code code2, tree op2a, tree op2b)
5737 tree var = gimple_assign_lhs (stmt);
5738 tree true_test_var = NULL_TREE;
5739 tree false_test_var = NULL_TREE;
5740 enum tree_code innercode = gimple_assign_rhs_code (stmt);
5742 /* Check for identities like (var OR (var != 0)) => true . */
5743 if (TREE_CODE (op2a) == SSA_NAME
5744 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
5746 if ((code2 == NE_EXPR && integer_zerop (op2b))
5747 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
5749 true_test_var = op2a;
5750 if (var == true_test_var)
5751 return var;
5753 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
5754 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
5756 false_test_var = op2a;
5757 if (var == false_test_var)
5758 return boolean_true_node;
5762 /* If the definition is a comparison, recurse on it. */
5763 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
5765 tree t = or_comparisons_1 (innercode,
5766 gimple_assign_rhs1 (stmt),
5767 gimple_assign_rhs2 (stmt),
5768 code2,
5769 op2a,
5770 op2b);
5771 if (t)
5772 return t;
5775 /* If the definition is an AND or OR expression, we may be able to
5776 simplify by reassociating. */
5777 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
5778 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
5780 tree inner1 = gimple_assign_rhs1 (stmt);
5781 tree inner2 = gimple_assign_rhs2 (stmt);
5782 gimple *s;
5783 tree t;
5784 tree partial = NULL_TREE;
5785 bool is_or = (innercode == BIT_IOR_EXPR);
5787 /* Check for boolean identities that don't require recursive examination
5788 of inner1/inner2:
5789 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5790 inner1 OR (inner1 AND inner2) => inner1
5791 !inner1 OR (inner1 OR inner2) => true
5792 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5794 if (inner1 == true_test_var)
5795 return (is_or ? var : inner1);
5796 else if (inner2 == true_test_var)
5797 return (is_or ? var : inner2);
5798 else if (inner1 == false_test_var)
5799 return (is_or
5800 ? boolean_true_node
5801 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
5802 else if (inner2 == false_test_var)
5803 return (is_or
5804 ? boolean_true_node
5805 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
5807 /* Next, redistribute/reassociate the OR across the inner tests.
5808 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5809 if (TREE_CODE (inner1) == SSA_NAME
5810 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
5811 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5812 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5813 gimple_assign_rhs1 (s),
5814 gimple_assign_rhs2 (s),
5815 code2, op2a, op2b)))
5817 /* Handle the OR case, where we are reassociating:
5818 (inner1 OR inner2) OR (op2a code2 op2b)
5819 => (t OR inner2)
5820 If the partial result t is a constant, we win. Otherwise
5821 continue on to try reassociating with the other inner test. */
5822 if (is_or)
5824 if (integer_onep (t))
5825 return boolean_true_node;
5826 else if (integer_zerop (t))
5827 return inner2;
5830 /* Handle the AND case, where we are redistributing:
5831 (inner1 AND inner2) OR (op2a code2 op2b)
5832 => (t AND (inner2 OR (op2a code op2b))) */
5833 else if (integer_zerop (t))
5834 return boolean_false_node;
5836 /* Save partial result for later. */
5837 partial = t;
5840 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5841 if (TREE_CODE (inner2) == SSA_NAME
5842 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
5843 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
5844 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
5845 gimple_assign_rhs1 (s),
5846 gimple_assign_rhs2 (s),
5847 code2, op2a, op2b)))
5849 /* Handle the OR case, where we are reassociating:
5850 (inner1 OR inner2) OR (op2a code2 op2b)
5851 => (inner1 OR t)
5852 => (t OR partial) */
5853 if (is_or)
5855 if (integer_zerop (t))
5856 return inner1;
5857 else if (integer_onep (t))
5858 return boolean_true_node;
5859 /* If both are the same, we can apply the identity
5860 (x OR x) == x. */
5861 else if (partial && same_bool_result_p (t, partial))
5862 return t;
5865 /* Handle the AND case, where we are redistributing:
5866 (inner1 AND inner2) OR (op2a code2 op2b)
5867 => (t AND (inner1 OR (op2a code2 op2b)))
5868 => (t AND partial) */
5869 else
5871 if (integer_zerop (t))
5872 return boolean_false_node;
5873 else if (partial)
5875 /* We already got a simplification for the other
5876 operand to the redistributed AND expression. The
5877 interesting case is when at least one is true.
5878 Or, if both are the same, we can apply the identity
5879 (x AND x) == x. */
5880 if (integer_onep (partial))
5881 return t;
5882 else if (integer_onep (t))
5883 return partial;
5884 else if (same_bool_result_p (t, partial))
5885 return t;
5890 return NULL_TREE;
5893 /* Try to simplify the OR of two comparisons defined by
5894 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5895 If this can be done without constructing an intermediate value,
5896 return the resulting tree; otherwise NULL_TREE is returned.
5897 This function is deliberately asymmetric as it recurses on SSA_DEFs
5898 in the first comparison but not the second. */
5900 static tree
5901 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
5902 enum tree_code code2, tree op2a, tree op2b)
5904 tree truth_type = truth_type_for (TREE_TYPE (op1a));
5906 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5907 if (operand_equal_p (op1a, op2a, 0)
5908 && operand_equal_p (op1b, op2b, 0))
5910 /* Result will be either NULL_TREE, or a combined comparison. */
5911 tree t = combine_comparisons (UNKNOWN_LOCATION,
5912 TRUTH_ORIF_EXPR, code1, code2,
5913 truth_type, op1a, op1b);
5914 if (t)
5915 return t;
5918 /* Likewise the swapped case of the above. */
5919 if (operand_equal_p (op1a, op2b, 0)
5920 && operand_equal_p (op1b, op2a, 0))
5922 /* Result will be either NULL_TREE, or a combined comparison. */
5923 tree t = combine_comparisons (UNKNOWN_LOCATION,
5924 TRUTH_ORIF_EXPR, code1,
5925 swap_tree_comparison (code2),
5926 truth_type, op1a, op1b);
5927 if (t)
5928 return t;
5931 /* If both comparisons are of the same value against constants, we might
5932 be able to merge them. */
5933 if (operand_equal_p (op1a, op2a, 0)
5934 && TREE_CODE (op1b) == INTEGER_CST
5935 && TREE_CODE (op2b) == INTEGER_CST)
5937 int cmp = tree_int_cst_compare (op1b, op2b);
5939 /* If we have (op1a != op1b), we should either be able to
5940 return that or TRUE, depending on whether the constant op1b
5941 also satisfies the other comparison against op2b. */
5942 if (code1 == NE_EXPR)
5944 bool done = true;
5945 bool val;
5946 switch (code2)
5948 case EQ_EXPR: val = (cmp == 0); break;
5949 case NE_EXPR: val = (cmp != 0); break;
5950 case LT_EXPR: val = (cmp < 0); break;
5951 case GT_EXPR: val = (cmp > 0); break;
5952 case LE_EXPR: val = (cmp <= 0); break;
5953 case GE_EXPR: val = (cmp >= 0); break;
5954 default: done = false;
5956 if (done)
5958 if (val)
5959 return boolean_true_node;
5960 else
5961 return fold_build2 (code1, boolean_type_node, op1a, op1b);
5964 /* Likewise if the second comparison is a != comparison. */
5965 else if (code2 == NE_EXPR)
5967 bool done = true;
5968 bool val;
5969 switch (code1)
5971 case EQ_EXPR: val = (cmp == 0); break;
5972 case NE_EXPR: val = (cmp != 0); break;
5973 case LT_EXPR: val = (cmp > 0); break;
5974 case GT_EXPR: val = (cmp < 0); break;
5975 case LE_EXPR: val = (cmp >= 0); break;
5976 case GE_EXPR: val = (cmp <= 0); break;
5977 default: done = false;
5979 if (done)
5981 if (val)
5982 return boolean_true_node;
5983 else
5984 return fold_build2 (code2, boolean_type_node, op2a, op2b);
5988 /* See if an equality test is redundant with the other comparison. */
5989 else if (code1 == EQ_EXPR)
5991 bool val;
5992 switch (code2)
5994 case EQ_EXPR: val = (cmp == 0); break;
5995 case NE_EXPR: val = (cmp != 0); break;
5996 case LT_EXPR: val = (cmp < 0); break;
5997 case GT_EXPR: val = (cmp > 0); break;
5998 case LE_EXPR: val = (cmp <= 0); break;
5999 case GE_EXPR: val = (cmp >= 0); break;
6000 default:
6001 val = false;
6003 if (val)
6004 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6006 else if (code2 == EQ_EXPR)
6008 bool val;
6009 switch (code1)
6011 case EQ_EXPR: val = (cmp == 0); break;
6012 case NE_EXPR: val = (cmp != 0); break;
6013 case LT_EXPR: val = (cmp > 0); break;
6014 case GT_EXPR: val = (cmp < 0); break;
6015 case LE_EXPR: val = (cmp >= 0); break;
6016 case GE_EXPR: val = (cmp <= 0); break;
6017 default:
6018 val = false;
6020 if (val)
6021 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6024 /* Chose the less restrictive of two < or <= comparisons. */
6025 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
6026 && (code2 == LT_EXPR || code2 == LE_EXPR))
6028 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
6029 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6030 else
6031 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6034 /* Likewise chose the less restrictive of two > or >= comparisons. */
6035 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
6036 && (code2 == GT_EXPR || code2 == GE_EXPR))
6038 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
6039 return fold_build2 (code2, boolean_type_node, op2a, op2b);
6040 else
6041 return fold_build2 (code1, boolean_type_node, op1a, op1b);
6044 /* Check for singleton ranges. */
6045 else if (cmp == 0
6046 && ((code1 == LT_EXPR && code2 == GT_EXPR)
6047 || (code1 == GT_EXPR && code2 == LT_EXPR)))
6048 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
6050 /* Check for less/greater pairs that don't restrict the range at all. */
6051 else if (cmp >= 0
6052 && (code1 == LT_EXPR || code1 == LE_EXPR)
6053 && (code2 == GT_EXPR || code2 == GE_EXPR))
6054 return boolean_true_node;
6055 else if (cmp <= 0
6056 && (code1 == GT_EXPR || code1 == GE_EXPR)
6057 && (code2 == LT_EXPR || code2 == LE_EXPR))
6058 return boolean_true_node;
6061 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6062 NAME's definition is a truth value. See if there are any simplifications
6063 that can be done against the NAME's definition. */
6064 if (TREE_CODE (op1a) == SSA_NAME
6065 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6066 && (integer_zerop (op1b) || integer_onep (op1b)))
6068 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6069 || (code1 == NE_EXPR && integer_onep (op1b)));
6070 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6071 switch (gimple_code (stmt))
6073 case GIMPLE_ASSIGN:
6074 /* Try to simplify by copy-propagating the definition. */
6075 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
6077 case GIMPLE_PHI:
6078 /* If every argument to the PHI produces the same result when
6079 ORed with the second comparison, we win.
6080 Do not do this unless the type is bool since we need a bool
6081 result here anyway. */
6082 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6084 tree result = NULL_TREE;
6085 unsigned i;
6086 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6088 tree arg = gimple_phi_arg_def (stmt, i);
6090 /* If this PHI has itself as an argument, ignore it.
6091 If all the other args produce the same result,
6092 we're still OK. */
6093 if (arg == gimple_phi_result (stmt))
6094 continue;
6095 else if (TREE_CODE (arg) == INTEGER_CST)
6097 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
6099 if (!result)
6100 result = boolean_true_node;
6101 else if (!integer_onep (result))
6102 return NULL_TREE;
6104 else if (!result)
6105 result = fold_build2 (code2, boolean_type_node,
6106 op2a, op2b);
6107 else if (!same_bool_comparison_p (result,
6108 code2, op2a, op2b))
6109 return NULL_TREE;
6111 else if (TREE_CODE (arg) == SSA_NAME
6112 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6114 tree temp;
6115 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6116 /* In simple cases we can look through PHI nodes,
6117 but we have to be careful with loops.
6118 See PR49073. */
6119 if (! dom_info_available_p (CDI_DOMINATORS)
6120 || gimple_bb (def_stmt) == gimple_bb (stmt)
6121 || dominated_by_p (CDI_DOMINATORS,
6122 gimple_bb (def_stmt),
6123 gimple_bb (stmt)))
6124 return NULL_TREE;
6125 temp = or_var_with_comparison (arg, invert, code2,
6126 op2a, op2b);
6127 if (!temp)
6128 return NULL_TREE;
6129 else if (!result)
6130 result = temp;
6131 else if (!same_bool_result_p (result, temp))
6132 return NULL_TREE;
6134 else
6135 return NULL_TREE;
6137 return result;
6140 default:
6141 break;
6144 return NULL_TREE;
6147 /* Try to simplify the OR of two comparisons, specified by
6148 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6149 If this can be simplified to a single expression (without requiring
6150 introducing more SSA variables to hold intermediate values),
6151 return the resulting tree. Otherwise return NULL_TREE.
6152 If the result expression is non-null, it has boolean type. */
6154 tree
6155 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
6156 enum tree_code code2, tree op2a, tree op2b)
6158 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
6159 if (t)
6160 return t;
6161 else
6162 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
6166 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6168 Either NULL_TREE, a simplified but non-constant or a constant
6169 is returned.
6171 ??? This should go into a gimple-fold-inline.h file to be eventually
6172 privatized with the single valueize function used in the various TUs
6173 to avoid the indirect function call overhead. */
6175 tree
6176 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
6177 tree (*gvalueize) (tree))
6179 gimple_match_op res_op;
6180 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6181 edges if there are intermediate VARYING defs. For this reason
6182 do not follow SSA edges here even though SCCVN can technically
6183 just deal fine with that. */
6184 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
6186 tree res = NULL_TREE;
6187 if (gimple_simplified_result_is_gimple_val (&res_op))
6188 res = res_op.ops[0];
6189 else if (mprts_hook)
6190 res = mprts_hook (&res_op);
6191 if (res)
6193 if (dump_file && dump_flags & TDF_DETAILS)
6195 fprintf (dump_file, "Match-and-simplified ");
6196 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
6197 fprintf (dump_file, " to ");
6198 print_generic_expr (dump_file, res);
6199 fprintf (dump_file, "\n");
6201 return res;
6205 location_t loc = gimple_location (stmt);
6206 switch (gimple_code (stmt))
6208 case GIMPLE_ASSIGN:
6210 enum tree_code subcode = gimple_assign_rhs_code (stmt);
6212 switch (get_gimple_rhs_class (subcode))
6214 case GIMPLE_SINGLE_RHS:
6216 tree rhs = gimple_assign_rhs1 (stmt);
6217 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
6219 if (TREE_CODE (rhs) == SSA_NAME)
6221 /* If the RHS is an SSA_NAME, return its known constant value,
6222 if any. */
6223 return (*valueize) (rhs);
6225 /* Handle propagating invariant addresses into address
6226 operations. */
6227 else if (TREE_CODE (rhs) == ADDR_EXPR
6228 && !is_gimple_min_invariant (rhs))
6230 poly_int64 offset = 0;
6231 tree base;
6232 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
6233 &offset,
6234 valueize);
6235 if (base
6236 && (CONSTANT_CLASS_P (base)
6237 || decl_address_invariant_p (base)))
6238 return build_invariant_address (TREE_TYPE (rhs),
6239 base, offset);
6241 else if (TREE_CODE (rhs) == CONSTRUCTOR
6242 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
6243 && known_eq (CONSTRUCTOR_NELTS (rhs),
6244 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
6246 unsigned i, nelts;
6247 tree val;
6249 nelts = CONSTRUCTOR_NELTS (rhs);
6250 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
6251 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
6253 val = (*valueize) (val);
6254 if (TREE_CODE (val) == INTEGER_CST
6255 || TREE_CODE (val) == REAL_CST
6256 || TREE_CODE (val) == FIXED_CST)
6257 vec.quick_push (val);
6258 else
6259 return NULL_TREE;
6262 return vec.build ();
6264 if (subcode == OBJ_TYPE_REF)
6266 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
6267 /* If callee is constant, we can fold away the wrapper. */
6268 if (is_gimple_min_invariant (val))
6269 return val;
6272 if (kind == tcc_reference)
6274 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
6275 || TREE_CODE (rhs) == REALPART_EXPR
6276 || TREE_CODE (rhs) == IMAGPART_EXPR)
6277 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6279 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6280 return fold_unary_loc (EXPR_LOCATION (rhs),
6281 TREE_CODE (rhs),
6282 TREE_TYPE (rhs), val);
6284 else if (TREE_CODE (rhs) == BIT_FIELD_REF
6285 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6287 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6288 return fold_ternary_loc (EXPR_LOCATION (rhs),
6289 TREE_CODE (rhs),
6290 TREE_TYPE (rhs), val,
6291 TREE_OPERAND (rhs, 1),
6292 TREE_OPERAND (rhs, 2));
6294 else if (TREE_CODE (rhs) == MEM_REF
6295 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
6297 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
6298 if (TREE_CODE (val) == ADDR_EXPR
6299 && is_gimple_min_invariant (val))
6301 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
6302 unshare_expr (val),
6303 TREE_OPERAND (rhs, 1));
6304 if (tem)
6305 rhs = tem;
6308 return fold_const_aggregate_ref_1 (rhs, valueize);
6310 else if (kind == tcc_declaration)
6311 return get_symbol_constant_value (rhs);
6312 return rhs;
6315 case GIMPLE_UNARY_RHS:
6316 return NULL_TREE;
6318 case GIMPLE_BINARY_RHS:
6319 /* Translate &x + CST into an invariant form suitable for
6320 further propagation. */
6321 if (subcode == POINTER_PLUS_EXPR)
6323 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6324 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6325 if (TREE_CODE (op0) == ADDR_EXPR
6326 && TREE_CODE (op1) == INTEGER_CST)
6328 tree off = fold_convert (ptr_type_node, op1);
6329 return build_fold_addr_expr_loc
6330 (loc,
6331 fold_build2 (MEM_REF,
6332 TREE_TYPE (TREE_TYPE (op0)),
6333 unshare_expr (op0), off));
6336 /* Canonicalize bool != 0 and bool == 0 appearing after
6337 valueization. While gimple_simplify handles this
6338 it can get confused by the ~X == 1 -> X == 0 transform
6339 which we cant reduce to a SSA name or a constant
6340 (and we have no way to tell gimple_simplify to not
6341 consider those transforms in the first place). */
6342 else if (subcode == EQ_EXPR
6343 || subcode == NE_EXPR)
6345 tree lhs = gimple_assign_lhs (stmt);
6346 tree op0 = gimple_assign_rhs1 (stmt);
6347 if (useless_type_conversion_p (TREE_TYPE (lhs),
6348 TREE_TYPE (op0)))
6350 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6351 op0 = (*valueize) (op0);
6352 if (TREE_CODE (op0) == INTEGER_CST)
6353 std::swap (op0, op1);
6354 if (TREE_CODE (op1) == INTEGER_CST
6355 && ((subcode == NE_EXPR && integer_zerop (op1))
6356 || (subcode == EQ_EXPR && integer_onep (op1))))
6357 return op0;
6360 return NULL_TREE;
6362 case GIMPLE_TERNARY_RHS:
6364 /* Handle ternary operators that can appear in GIMPLE form. */
6365 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
6366 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
6367 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
6368 return fold_ternary_loc (loc, subcode,
6369 gimple_expr_type (stmt), op0, op1, op2);
6372 default:
6373 gcc_unreachable ();
6377 case GIMPLE_CALL:
6379 tree fn;
6380 gcall *call_stmt = as_a <gcall *> (stmt);
6382 if (gimple_call_internal_p (stmt))
6384 enum tree_code subcode = ERROR_MARK;
6385 switch (gimple_call_internal_fn (stmt))
6387 case IFN_UBSAN_CHECK_ADD:
6388 subcode = PLUS_EXPR;
6389 break;
6390 case IFN_UBSAN_CHECK_SUB:
6391 subcode = MINUS_EXPR;
6392 break;
6393 case IFN_UBSAN_CHECK_MUL:
6394 subcode = MULT_EXPR;
6395 break;
6396 case IFN_BUILTIN_EXPECT:
6398 tree arg0 = gimple_call_arg (stmt, 0);
6399 tree op0 = (*valueize) (arg0);
6400 if (TREE_CODE (op0) == INTEGER_CST)
6401 return op0;
6402 return NULL_TREE;
6404 default:
6405 return NULL_TREE;
6407 tree arg0 = gimple_call_arg (stmt, 0);
6408 tree arg1 = gimple_call_arg (stmt, 1);
6409 tree op0 = (*valueize) (arg0);
6410 tree op1 = (*valueize) (arg1);
6412 if (TREE_CODE (op0) != INTEGER_CST
6413 || TREE_CODE (op1) != INTEGER_CST)
6415 switch (subcode)
6417 case MULT_EXPR:
6418 /* x * 0 = 0 * x = 0 without overflow. */
6419 if (integer_zerop (op0) || integer_zerop (op1))
6420 return build_zero_cst (TREE_TYPE (arg0));
6421 break;
6422 case MINUS_EXPR:
6423 /* y - y = 0 without overflow. */
6424 if (operand_equal_p (op0, op1, 0))
6425 return build_zero_cst (TREE_TYPE (arg0));
6426 break;
6427 default:
6428 break;
6431 tree res
6432 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
6433 if (res
6434 && TREE_CODE (res) == INTEGER_CST
6435 && !TREE_OVERFLOW (res))
6436 return res;
6437 return NULL_TREE;
6440 fn = (*valueize) (gimple_call_fn (stmt));
6441 if (TREE_CODE (fn) == ADDR_EXPR
6442 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
6443 && gimple_builtin_call_types_compatible_p (stmt,
6444 TREE_OPERAND (fn, 0)))
6446 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
6447 tree retval;
6448 unsigned i;
6449 for (i = 0; i < gimple_call_num_args (stmt); ++i)
6450 args[i] = (*valueize) (gimple_call_arg (stmt, i));
6451 retval = fold_builtin_call_array (loc,
6452 gimple_call_return_type (call_stmt),
6453 fn, gimple_call_num_args (stmt), args);
6454 if (retval)
6456 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6457 STRIP_NOPS (retval);
6458 retval = fold_convert (gimple_call_return_type (call_stmt),
6459 retval);
6461 return retval;
6463 return NULL_TREE;
6466 default:
6467 return NULL_TREE;
6471 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6472 Returns NULL_TREE if folding to a constant is not possible, otherwise
6473 returns a constant according to is_gimple_min_invariant. */
6475 tree
6476 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
6478 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
6479 if (res && is_gimple_min_invariant (res))
6480 return res;
6481 return NULL_TREE;
6485 /* The following set of functions are supposed to fold references using
6486 their constant initializers. */
6488 /* See if we can find constructor defining value of BASE.
6489 When we know the consructor with constant offset (such as
6490 base is array[40] and we do know constructor of array), then
6491 BIT_OFFSET is adjusted accordingly.
6493 As a special case, return error_mark_node when constructor
6494 is not explicitly available, but it is known to be zero
6495 such as 'static const int a;'. */
6496 static tree
6497 get_base_constructor (tree base, poly_int64_pod *bit_offset,
6498 tree (*valueize)(tree))
6500 poly_int64 bit_offset2, size, max_size;
6501 bool reverse;
6503 if (TREE_CODE (base) == MEM_REF)
6505 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
6506 if (!boff.to_shwi (bit_offset))
6507 return NULL_TREE;
6509 if (valueize
6510 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
6511 base = valueize (TREE_OPERAND (base, 0));
6512 if (!base || TREE_CODE (base) != ADDR_EXPR)
6513 return NULL_TREE;
6514 base = TREE_OPERAND (base, 0);
6516 else if (valueize
6517 && TREE_CODE (base) == SSA_NAME)
6518 base = valueize (base);
6520 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6521 DECL_INITIAL. If BASE is a nested reference into another
6522 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6523 the inner reference. */
6524 switch (TREE_CODE (base))
6526 case VAR_DECL:
6527 case CONST_DECL:
6529 tree init = ctor_for_folding (base);
6531 /* Our semantic is exact opposite of ctor_for_folding;
6532 NULL means unknown, while error_mark_node is 0. */
6533 if (init == error_mark_node)
6534 return NULL_TREE;
6535 if (!init)
6536 return error_mark_node;
6537 return init;
6540 case VIEW_CONVERT_EXPR:
6541 return get_base_constructor (TREE_OPERAND (base, 0),
6542 bit_offset, valueize);
6544 case ARRAY_REF:
6545 case COMPONENT_REF:
6546 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
6547 &reverse);
6548 if (!known_size_p (max_size) || maybe_ne (size, max_size))
6549 return NULL_TREE;
6550 *bit_offset += bit_offset2;
6551 return get_base_constructor (base, bit_offset, valueize);
6553 case CONSTRUCTOR:
6554 return base;
6556 default:
6557 if (CONSTANT_CLASS_P (base))
6558 return base;
6560 return NULL_TREE;
6564 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6565 to the memory at bit OFFSET. When non-null, TYPE is the expected
6566 type of the reference; otherwise the type of the referenced element
6567 is used instead. When SIZE is zero, attempt to fold a reference to
6568 the entire element which OFFSET refers to. Increment *SUBOFF by
6569 the bit offset of the accessed element. */
6571 static tree
6572 fold_array_ctor_reference (tree type, tree ctor,
6573 unsigned HOST_WIDE_INT offset,
6574 unsigned HOST_WIDE_INT size,
6575 tree from_decl,
6576 unsigned HOST_WIDE_INT *suboff)
6578 offset_int low_bound;
6579 offset_int elt_size;
6580 offset_int access_index;
6581 tree domain_type = NULL_TREE;
6582 HOST_WIDE_INT inner_offset;
6584 /* Compute low bound and elt size. */
6585 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
6586 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
6587 if (domain_type && TYPE_MIN_VALUE (domain_type))
6589 /* Static constructors for variably sized objects makes no sense. */
6590 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
6591 return NULL_TREE;
6592 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
6594 else
6595 low_bound = 0;
6596 /* Static constructors for variably sized objects makes no sense. */
6597 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
6598 return NULL_TREE;
6599 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
6601 /* When TYPE is non-null, verify that it specifies a constant-sized
6602 accessed not larger than size of array element. */
6603 if (type
6604 && (!TYPE_SIZE_UNIT (type)
6605 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
6606 || elt_size < wi::to_offset (TYPE_SIZE_UNIT (type))
6607 || elt_size == 0))
6608 return NULL_TREE;
6610 /* Compute the array index we look for. */
6611 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
6612 elt_size);
6613 access_index += low_bound;
6615 /* And offset within the access. */
6616 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
6618 /* See if the array field is large enough to span whole access. We do not
6619 care to fold accesses spanning multiple array indexes. */
6620 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
6621 return NULL_TREE;
6622 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
6624 if (!size && TREE_CODE (val) != CONSTRUCTOR)
6626 /* For the final reference to the entire accessed element
6627 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6628 may be null) in favor of the type of the element, and set
6629 SIZE to the size of the accessed element. */
6630 inner_offset = 0;
6631 type = TREE_TYPE (val);
6632 size = elt_size.to_uhwi () * BITS_PER_UNIT;
6635 *suboff += (access_index * elt_size * BITS_PER_UNIT).to_uhwi ();
6636 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
6637 suboff);
6640 /* Memory not explicitly mentioned in constructor is 0 (or
6641 the reference is out of range). */
6642 return type ? build_zero_cst (type) : NULL_TREE;
6645 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6646 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6647 is the expected type of the reference; otherwise the type of
6648 the referenced member is used instead. When SIZE is zero,
6649 attempt to fold a reference to the entire member which OFFSET
6650 refers to; in this case. Increment *SUBOFF by the bit offset
6651 of the accessed member. */
6653 static tree
6654 fold_nonarray_ctor_reference (tree type, tree ctor,
6655 unsigned HOST_WIDE_INT offset,
6656 unsigned HOST_WIDE_INT size,
6657 tree from_decl,
6658 unsigned HOST_WIDE_INT *suboff)
6660 unsigned HOST_WIDE_INT cnt;
6661 tree cfield, cval;
6663 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
6664 cval)
6666 tree byte_offset = DECL_FIELD_OFFSET (cfield);
6667 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
6668 tree field_size = DECL_SIZE (cfield);
6670 if (!field_size)
6672 /* Determine the size of the flexible array member from
6673 the size of the initializer provided for it. */
6674 field_size = TYPE_SIZE (TREE_TYPE (cval));
6677 /* Variable sized objects in static constructors makes no sense,
6678 but field_size can be NULL for flexible array members. */
6679 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
6680 && TREE_CODE (byte_offset) == INTEGER_CST
6681 && (field_size != NULL_TREE
6682 ? TREE_CODE (field_size) == INTEGER_CST
6683 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
6685 /* Compute bit offset of the field. */
6686 offset_int bitoffset
6687 = (wi::to_offset (field_offset)
6688 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
6689 /* Compute bit offset where the field ends. */
6690 offset_int bitoffset_end;
6691 if (field_size != NULL_TREE)
6692 bitoffset_end = bitoffset + wi::to_offset (field_size);
6693 else
6694 bitoffset_end = 0;
6696 /* Compute the bit offset of the end of the desired access.
6697 As a special case, if the size of the desired access is
6698 zero, assume the access is to the entire field (and let
6699 the caller make any necessary adjustments by storing
6700 the actual bounds of the field in FIELDBOUNDS). */
6701 offset_int access_end = offset_int (offset);
6702 if (size)
6703 access_end += size;
6704 else
6705 access_end = bitoffset_end;
6707 /* Is there any overlap between the desired access at
6708 [OFFSET, OFFSET+SIZE) and the offset of the field within
6709 the object at [BITOFFSET, BITOFFSET_END)? */
6710 if (wi::cmps (access_end, bitoffset) > 0
6711 && (field_size == NULL_TREE
6712 || wi::lts_p (offset, bitoffset_end)))
6714 *suboff += bitoffset.to_uhwi ();
6716 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
6718 /* For the final reference to the entire accessed member
6719 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6720 be null) in favor of the type of the member, and set
6721 SIZE to the size of the accessed member. */
6722 offset = bitoffset.to_uhwi ();
6723 type = TREE_TYPE (cval);
6724 size = (bitoffset_end - bitoffset).to_uhwi ();
6727 /* We do have overlap. Now see if the field is large enough
6728 to cover the access. Give up for accesses that extend
6729 beyond the end of the object or that span multiple fields. */
6730 if (wi::cmps (access_end, bitoffset_end) > 0)
6731 return NULL_TREE;
6732 if (offset < bitoffset)
6733 return NULL_TREE;
6735 offset_int inner_offset = offset_int (offset) - bitoffset;
6736 return fold_ctor_reference (type, cval,
6737 inner_offset.to_uhwi (), size,
6738 from_decl, suboff);
6741 /* Memory not explicitly mentioned in constructor is 0. */
6742 return type ? build_zero_cst (type) : NULL_TREE;
6745 /* CTOR is value initializing memory. Fold a reference of TYPE and
6746 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6747 is zero, attempt to fold a reference to the entire subobject
6748 which OFFSET refers to. This is used when folding accesses to
6749 string members of aggregates. When non-null, set *SUBOFF to
6750 the bit offset of the accessed subobject. */
6752 tree
6753 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
6754 const poly_uint64 &poly_size, tree from_decl,
6755 unsigned HOST_WIDE_INT *suboff /* = NULL */)
6757 tree ret;
6759 /* We found the field with exact match. */
6760 if (type
6761 && useless_type_conversion_p (type, TREE_TYPE (ctor))
6762 && known_eq (poly_offset, 0U))
6763 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6765 /* The remaining optimizations need a constant size and offset. */
6766 unsigned HOST_WIDE_INT size, offset;
6767 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
6768 return NULL_TREE;
6770 /* We are at the end of walk, see if we can view convert the
6771 result. */
6772 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
6773 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6774 && !compare_tree_int (TYPE_SIZE (type), size)
6775 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
6777 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
6778 if (ret)
6780 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
6781 if (ret)
6782 STRIP_USELESS_TYPE_CONVERSION (ret);
6784 return ret;
6786 /* For constants and byte-aligned/sized reads try to go through
6787 native_encode/interpret. */
6788 if (CONSTANT_CLASS_P (ctor)
6789 && BITS_PER_UNIT == 8
6790 && offset % BITS_PER_UNIT == 0
6791 && size % BITS_PER_UNIT == 0
6792 && size <= MAX_BITSIZE_MODE_ANY_MODE)
6794 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
6795 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
6796 offset / BITS_PER_UNIT);
6797 if (len > 0)
6798 return native_interpret_expr (type, buf, len);
6800 if (TREE_CODE (ctor) == CONSTRUCTOR)
6802 unsigned HOST_WIDE_INT dummy = 0;
6803 if (!suboff)
6804 suboff = &dummy;
6806 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
6807 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
6808 return fold_array_ctor_reference (type, ctor, offset, size,
6809 from_decl, suboff);
6811 return fold_nonarray_ctor_reference (type, ctor, offset, size,
6812 from_decl, suboff);
6815 return NULL_TREE;
6818 /* Return the tree representing the element referenced by T if T is an
6819 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6820 names using VALUEIZE. Return NULL_TREE otherwise. */
6822 tree
6823 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
6825 tree ctor, idx, base;
6826 poly_int64 offset, size, max_size;
6827 tree tem;
6828 bool reverse;
6830 if (TREE_THIS_VOLATILE (t))
6831 return NULL_TREE;
6833 if (DECL_P (t))
6834 return get_symbol_constant_value (t);
6836 tem = fold_read_from_constant_string (t);
6837 if (tem)
6838 return tem;
6840 switch (TREE_CODE (t))
6842 case ARRAY_REF:
6843 case ARRAY_RANGE_REF:
6844 /* Constant indexes are handled well by get_base_constructor.
6845 Only special case variable offsets.
6846 FIXME: This code can't handle nested references with variable indexes
6847 (they will be handled only by iteration of ccp). Perhaps we can bring
6848 get_ref_base_and_extent here and make it use a valueize callback. */
6849 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
6850 && valueize
6851 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
6852 && poly_int_tree_p (idx))
6854 tree low_bound, unit_size;
6856 /* If the resulting bit-offset is constant, track it. */
6857 if ((low_bound = array_ref_low_bound (t),
6858 poly_int_tree_p (low_bound))
6859 && (unit_size = array_ref_element_size (t),
6860 tree_fits_uhwi_p (unit_size)))
6862 poly_offset_int woffset
6863 = wi::sext (wi::to_poly_offset (idx)
6864 - wi::to_poly_offset (low_bound),
6865 TYPE_PRECISION (TREE_TYPE (idx)));
6867 if (woffset.to_shwi (&offset))
6869 /* TODO: This code seems wrong, multiply then check
6870 to see if it fits. */
6871 offset *= tree_to_uhwi (unit_size);
6872 offset *= BITS_PER_UNIT;
6874 base = TREE_OPERAND (t, 0);
6875 ctor = get_base_constructor (base, &offset, valueize);
6876 /* Empty constructor. Always fold to 0. */
6877 if (ctor == error_mark_node)
6878 return build_zero_cst (TREE_TYPE (t));
6879 /* Out of bound array access. Value is undefined,
6880 but don't fold. */
6881 if (maybe_lt (offset, 0))
6882 return NULL_TREE;
6883 /* We can not determine ctor. */
6884 if (!ctor)
6885 return NULL_TREE;
6886 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
6887 tree_to_uhwi (unit_size)
6888 * BITS_PER_UNIT,
6889 base);
6893 /* Fallthru. */
6895 case COMPONENT_REF:
6896 case BIT_FIELD_REF:
6897 case TARGET_MEM_REF:
6898 case MEM_REF:
6899 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
6900 ctor = get_base_constructor (base, &offset, valueize);
6902 /* Empty constructor. Always fold to 0. */
6903 if (ctor == error_mark_node)
6904 return build_zero_cst (TREE_TYPE (t));
6905 /* We do not know precise address. */
6906 if (!known_size_p (max_size) || maybe_ne (max_size, size))
6907 return NULL_TREE;
6908 /* We can not determine ctor. */
6909 if (!ctor)
6910 return NULL_TREE;
6912 /* Out of bound array access. Value is undefined, but don't fold. */
6913 if (maybe_lt (offset, 0))
6914 return NULL_TREE;
6916 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
6917 base);
6919 case REALPART_EXPR:
6920 case IMAGPART_EXPR:
6922 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
6923 if (c && TREE_CODE (c) == COMPLEX_CST)
6924 return fold_build1_loc (EXPR_LOCATION (t),
6925 TREE_CODE (t), TREE_TYPE (t), c);
6926 break;
6929 default:
6930 break;
6933 return NULL_TREE;
6936 tree
6937 fold_const_aggregate_ref (tree t)
6939 return fold_const_aggregate_ref_1 (t, NULL);
6942 /* Lookup virtual method with index TOKEN in a virtual table V
6943 at OFFSET.
6944 Set CAN_REFER if non-NULL to false if method
6945 is not referable or if the virtual table is ill-formed (such as rewriten
6946 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6948 tree
6949 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
6950 tree v,
6951 unsigned HOST_WIDE_INT offset,
6952 bool *can_refer)
6954 tree vtable = v, init, fn;
6955 unsigned HOST_WIDE_INT size;
6956 unsigned HOST_WIDE_INT elt_size, access_index;
6957 tree domain_type;
6959 if (can_refer)
6960 *can_refer = true;
6962 /* First of all double check we have virtual table. */
6963 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
6965 /* Pass down that we lost track of the target. */
6966 if (can_refer)
6967 *can_refer = false;
6968 return NULL_TREE;
6971 init = ctor_for_folding (v);
6973 /* The virtual tables should always be born with constructors
6974 and we always should assume that they are avaialble for
6975 folding. At the moment we do not stream them in all cases,
6976 but it should never happen that ctor seem unreachable. */
6977 gcc_assert (init);
6978 if (init == error_mark_node)
6980 /* Pass down that we lost track of the target. */
6981 if (can_refer)
6982 *can_refer = false;
6983 return NULL_TREE;
6985 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
6986 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
6987 offset *= BITS_PER_UNIT;
6988 offset += token * size;
6990 /* Lookup the value in the constructor that is assumed to be array.
6991 This is equivalent to
6992 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6993 offset, size, NULL);
6994 but in a constant time. We expect that frontend produced a simple
6995 array without indexed initializers. */
6997 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
6998 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
6999 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
7000 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
7002 access_index = offset / BITS_PER_UNIT / elt_size;
7003 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
7005 /* The C++ FE can now produce indexed fields, and we check if the indexes
7006 match. */
7007 if (access_index < CONSTRUCTOR_NELTS (init))
7009 fn = CONSTRUCTOR_ELT (init, access_index)->value;
7010 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
7011 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
7012 STRIP_NOPS (fn);
7014 else
7015 fn = NULL;
7017 /* For type inconsistent program we may end up looking up virtual method
7018 in virtual table that does not contain TOKEN entries. We may overrun
7019 the virtual table and pick up a constant or RTTI info pointer.
7020 In any case the call is undefined. */
7021 if (!fn
7022 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
7023 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
7024 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
7025 else
7027 fn = TREE_OPERAND (fn, 0);
7029 /* When cgraph node is missing and function is not public, we cannot
7030 devirtualize. This can happen in WHOPR when the actual method
7031 ends up in other partition, because we found devirtualization
7032 possibility too late. */
7033 if (!can_refer_decl_in_current_unit_p (fn, vtable))
7035 if (can_refer)
7037 *can_refer = false;
7038 return fn;
7040 return NULL_TREE;
7044 /* Make sure we create a cgraph node for functions we'll reference.
7045 They can be non-existent if the reference comes from an entry
7046 of an external vtable for example. */
7047 cgraph_node::get_create (fn);
7049 return fn;
7052 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7053 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7054 KNOWN_BINFO carries the binfo describing the true type of
7055 OBJ_TYPE_REF_OBJECT(REF).
7056 Set CAN_REFER if non-NULL to false if method
7057 is not referable or if the virtual table is ill-formed (such as rewriten
7058 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7060 tree
7061 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
7062 bool *can_refer)
7064 unsigned HOST_WIDE_INT offset;
7065 tree v;
7067 v = BINFO_VTABLE (known_binfo);
7068 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7069 if (!v)
7070 return NULL_TREE;
7072 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
7074 if (can_refer)
7075 *can_refer = false;
7076 return NULL_TREE;
7078 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
7081 /* Given a pointer value T, return a simplified version of an
7082 indirection through T, or NULL_TREE if no simplification is
7083 possible. Note that the resulting type may be different from
7084 the type pointed to in the sense that it is still compatible
7085 from the langhooks point of view. */
7087 tree
7088 gimple_fold_indirect_ref (tree t)
7090 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
7091 tree sub = t;
7092 tree subtype;
7094 STRIP_NOPS (sub);
7095 subtype = TREE_TYPE (sub);
7096 if (!POINTER_TYPE_P (subtype)
7097 || TYPE_REF_CAN_ALIAS_ALL (ptype))
7098 return NULL_TREE;
7100 if (TREE_CODE (sub) == ADDR_EXPR)
7102 tree op = TREE_OPERAND (sub, 0);
7103 tree optype = TREE_TYPE (op);
7104 /* *&p => p */
7105 if (useless_type_conversion_p (type, optype))
7106 return op;
7108 /* *(foo *)&fooarray => fooarray[0] */
7109 if (TREE_CODE (optype) == ARRAY_TYPE
7110 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
7111 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7113 tree type_domain = TYPE_DOMAIN (optype);
7114 tree min_val = size_zero_node;
7115 if (type_domain && TYPE_MIN_VALUE (type_domain))
7116 min_val = TYPE_MIN_VALUE (type_domain);
7117 if (TREE_CODE (min_val) == INTEGER_CST)
7118 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
7120 /* *(foo *)&complexfoo => __real__ complexfoo */
7121 else if (TREE_CODE (optype) == COMPLEX_TYPE
7122 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7123 return fold_build1 (REALPART_EXPR, type, op);
7124 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7125 else if (TREE_CODE (optype) == VECTOR_TYPE
7126 && useless_type_conversion_p (type, TREE_TYPE (optype)))
7128 tree part_width = TYPE_SIZE (type);
7129 tree index = bitsize_int (0);
7130 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
7134 /* *(p + CST) -> ... */
7135 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
7136 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
7138 tree addr = TREE_OPERAND (sub, 0);
7139 tree off = TREE_OPERAND (sub, 1);
7140 tree addrtype;
7142 STRIP_NOPS (addr);
7143 addrtype = TREE_TYPE (addr);
7145 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7146 if (TREE_CODE (addr) == ADDR_EXPR
7147 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
7148 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
7149 && tree_fits_uhwi_p (off))
7151 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
7152 tree part_width = TYPE_SIZE (type);
7153 unsigned HOST_WIDE_INT part_widthi
7154 = tree_to_shwi (part_width) / BITS_PER_UNIT;
7155 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
7156 tree index = bitsize_int (indexi);
7157 if (known_lt (offset / part_widthi,
7158 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
7159 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
7160 part_width, index);
7163 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7164 if (TREE_CODE (addr) == ADDR_EXPR
7165 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
7166 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
7168 tree size = TYPE_SIZE_UNIT (type);
7169 if (tree_int_cst_equal (size, off))
7170 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
7173 /* *(p + CST) -> MEM_REF <p, CST>. */
7174 if (TREE_CODE (addr) != ADDR_EXPR
7175 || DECL_P (TREE_OPERAND (addr, 0)))
7176 return fold_build2 (MEM_REF, type,
7177 addr,
7178 wide_int_to_tree (ptype, wi::to_wide (off)));
7181 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7182 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
7183 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
7184 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
7186 tree type_domain;
7187 tree min_val = size_zero_node;
7188 tree osub = sub;
7189 sub = gimple_fold_indirect_ref (sub);
7190 if (! sub)
7191 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
7192 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
7193 if (type_domain && TYPE_MIN_VALUE (type_domain))
7194 min_val = TYPE_MIN_VALUE (type_domain);
7195 if (TREE_CODE (min_val) == INTEGER_CST)
7196 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
7199 return NULL_TREE;
7202 /* Return true if CODE is an operation that when operating on signed
7203 integer types involves undefined behavior on overflow and the
7204 operation can be expressed with unsigned arithmetic. */
7206 bool
7207 arith_code_with_undefined_signed_overflow (tree_code code)
7209 switch (code)
7211 case PLUS_EXPR:
7212 case MINUS_EXPR:
7213 case MULT_EXPR:
7214 case NEGATE_EXPR:
7215 case POINTER_PLUS_EXPR:
7216 return true;
7217 default:
7218 return false;
7222 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7223 operation that can be transformed to unsigned arithmetic by converting
7224 its operand, carrying out the operation in the corresponding unsigned
7225 type and converting the result back to the original type.
7227 Returns a sequence of statements that replace STMT and also contain
7228 a modified form of STMT itself. */
7230 gimple_seq
7231 rewrite_to_defined_overflow (gimple *stmt)
7233 if (dump_file && (dump_flags & TDF_DETAILS))
7235 fprintf (dump_file, "rewriting stmt with undefined signed "
7236 "overflow ");
7237 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
7240 tree lhs = gimple_assign_lhs (stmt);
7241 tree type = unsigned_type_for (TREE_TYPE (lhs));
7242 gimple_seq stmts = NULL;
7243 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
7245 tree op = gimple_op (stmt, i);
7246 op = gimple_convert (&stmts, type, op);
7247 gimple_set_op (stmt, i, op);
7249 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
7250 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
7251 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
7252 gimple_seq_add_stmt (&stmts, stmt);
7253 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
7254 gimple_seq_add_stmt (&stmts, cvt);
7256 return stmts;
7260 /* The valueization hook we use for the gimple_build API simplification.
7261 This makes us match fold_buildN behavior by only combining with
7262 statements in the sequence(s) we are currently building. */
7264 static tree
7265 gimple_build_valueize (tree op)
7267 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
7268 return op;
7269 return NULL_TREE;
7272 /* Build the expression CODE OP0 of type TYPE with location LOC,
7273 simplifying it first if possible. Returns the built
7274 expression value and appends statements possibly defining it
7275 to SEQ. */
7277 tree
7278 gimple_build (gimple_seq *seq, location_t loc,
7279 enum tree_code code, tree type, tree op0)
7281 tree res = gimple_simplify (code, type, op0, seq, gimple_build_valueize);
7282 if (!res)
7284 res = create_tmp_reg_or_ssa_name (type);
7285 gimple *stmt;
7286 if (code == REALPART_EXPR
7287 || code == IMAGPART_EXPR
7288 || code == VIEW_CONVERT_EXPR)
7289 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
7290 else
7291 stmt = gimple_build_assign (res, code, op0);
7292 gimple_set_location (stmt, loc);
7293 gimple_seq_add_stmt_without_update (seq, stmt);
7295 return res;
7298 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7299 simplifying it first if possible. Returns the built
7300 expression value and appends statements possibly defining it
7301 to SEQ. */
7303 tree
7304 gimple_build (gimple_seq *seq, location_t loc,
7305 enum tree_code code, tree type, tree op0, tree op1)
7307 tree res = gimple_simplify (code, type, op0, op1, seq, gimple_build_valueize);
7308 if (!res)
7310 res = create_tmp_reg_or_ssa_name (type);
7311 gimple *stmt = gimple_build_assign (res, code, op0, op1);
7312 gimple_set_location (stmt, loc);
7313 gimple_seq_add_stmt_without_update (seq, stmt);
7315 return res;
7318 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7319 simplifying it first if possible. Returns the built
7320 expression value and appends statements possibly defining it
7321 to SEQ. */
7323 tree
7324 gimple_build (gimple_seq *seq, location_t loc,
7325 enum tree_code code, tree type, tree op0, tree op1, tree op2)
7327 tree res = gimple_simplify (code, type, op0, op1, op2,
7328 seq, gimple_build_valueize);
7329 if (!res)
7331 res = create_tmp_reg_or_ssa_name (type);
7332 gimple *stmt;
7333 if (code == BIT_FIELD_REF)
7334 stmt = gimple_build_assign (res, code,
7335 build3 (code, type, op0, op1, op2));
7336 else
7337 stmt = gimple_build_assign (res, code, op0, op1, op2);
7338 gimple_set_location (stmt, loc);
7339 gimple_seq_add_stmt_without_update (seq, stmt);
7341 return res;
7344 /* Build the call FN (ARG0) with a result of type TYPE
7345 (or no result if TYPE is void) with location LOC,
7346 simplifying it first if possible. Returns the built
7347 expression value (or NULL_TREE if TYPE is void) and appends
7348 statements possibly defining it to SEQ. */
7350 tree
7351 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7352 tree type, tree arg0)
7354 tree res = gimple_simplify (fn, type, arg0, seq, gimple_build_valueize);
7355 if (!res)
7357 gcall *stmt;
7358 if (internal_fn_p (fn))
7359 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
7360 else
7362 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7363 stmt = gimple_build_call (decl, 1, arg0);
7365 if (!VOID_TYPE_P (type))
7367 res = create_tmp_reg_or_ssa_name (type);
7368 gimple_call_set_lhs (stmt, res);
7370 gimple_set_location (stmt, loc);
7371 gimple_seq_add_stmt_without_update (seq, stmt);
7373 return res;
7376 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7377 (or no result if TYPE is void) with location LOC,
7378 simplifying it first if possible. Returns the built
7379 expression value (or NULL_TREE if TYPE is void) and appends
7380 statements possibly defining it to SEQ. */
7382 tree
7383 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7384 tree type, tree arg0, tree arg1)
7386 tree res = gimple_simplify (fn, type, arg0, arg1, seq, gimple_build_valueize);
7387 if (!res)
7389 gcall *stmt;
7390 if (internal_fn_p (fn))
7391 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
7392 else
7394 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7395 stmt = gimple_build_call (decl, 2, arg0, arg1);
7397 if (!VOID_TYPE_P (type))
7399 res = create_tmp_reg_or_ssa_name (type);
7400 gimple_call_set_lhs (stmt, res);
7402 gimple_set_location (stmt, loc);
7403 gimple_seq_add_stmt_without_update (seq, stmt);
7405 return res;
7408 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7409 (or no result if TYPE is void) with location LOC,
7410 simplifying it first if possible. Returns the built
7411 expression value (or NULL_TREE if TYPE is void) and appends
7412 statements possibly defining it to SEQ. */
7414 tree
7415 gimple_build (gimple_seq *seq, location_t loc, combined_fn fn,
7416 tree type, tree arg0, tree arg1, tree arg2)
7418 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
7419 seq, gimple_build_valueize);
7420 if (!res)
7422 gcall *stmt;
7423 if (internal_fn_p (fn))
7424 stmt = gimple_build_call_internal (as_internal_fn (fn),
7425 3, arg0, arg1, arg2);
7426 else
7428 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
7429 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
7431 if (!VOID_TYPE_P (type))
7433 res = create_tmp_reg_or_ssa_name (type);
7434 gimple_call_set_lhs (stmt, res);
7436 gimple_set_location (stmt, loc);
7437 gimple_seq_add_stmt_without_update (seq, stmt);
7439 return res;
7442 /* Build the conversion (TYPE) OP with a result of type TYPE
7443 with location LOC if such conversion is neccesary in GIMPLE,
7444 simplifying it first.
7445 Returns the built expression value and appends
7446 statements possibly defining it to SEQ. */
7448 tree
7449 gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
7451 if (useless_type_conversion_p (type, TREE_TYPE (op)))
7452 return op;
7453 return gimple_build (seq, loc, NOP_EXPR, type, op);
7456 /* Build the conversion (ptrofftype) OP with a result of a type
7457 compatible with ptrofftype with location LOC if such conversion
7458 is neccesary in GIMPLE, simplifying it first.
7459 Returns the built expression value and appends
7460 statements possibly defining it to SEQ. */
7462 tree
7463 gimple_convert_to_ptrofftype (gimple_seq *seq, location_t loc, tree op)
7465 if (ptrofftype_p (TREE_TYPE (op)))
7466 return op;
7467 return gimple_convert (seq, loc, sizetype, op);
7470 /* Build a vector of type TYPE in which each element has the value OP.
7471 Return a gimple value for the result, appending any new statements
7472 to SEQ. */
7474 tree
7475 gimple_build_vector_from_val (gimple_seq *seq, location_t loc, tree type,
7476 tree op)
7478 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
7479 && !CONSTANT_CLASS_P (op))
7480 return gimple_build (seq, loc, VEC_DUPLICATE_EXPR, type, op);
7482 tree res, vec = build_vector_from_val (type, op);
7483 if (is_gimple_val (vec))
7484 return vec;
7485 if (gimple_in_ssa_p (cfun))
7486 res = make_ssa_name (type);
7487 else
7488 res = create_tmp_reg (type);
7489 gimple *stmt = gimple_build_assign (res, vec);
7490 gimple_set_location (stmt, loc);
7491 gimple_seq_add_stmt_without_update (seq, stmt);
7492 return res;
7495 /* Build a vector from BUILDER, handling the case in which some elements
7496 are non-constant. Return a gimple value for the result, appending any
7497 new instructions to SEQ.
7499 BUILDER must not have a stepped encoding on entry. This is because
7500 the function is not geared up to handle the arithmetic that would
7501 be needed in the variable case, and any code building a vector that
7502 is known to be constant should use BUILDER->build () directly. */
7504 tree
7505 gimple_build_vector (gimple_seq *seq, location_t loc,
7506 tree_vector_builder *builder)
7508 gcc_assert (builder->nelts_per_pattern () <= 2);
7509 unsigned int encoded_nelts = builder->encoded_nelts ();
7510 for (unsigned int i = 0; i < encoded_nelts; ++i)
7511 if (!TREE_CONSTANT ((*builder)[i]))
7513 tree type = builder->type ();
7514 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
7515 vec<constructor_elt, va_gc> *v;
7516 vec_alloc (v, nelts);
7517 for (i = 0; i < nelts; ++i)
7518 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
7520 tree res;
7521 if (gimple_in_ssa_p (cfun))
7522 res = make_ssa_name (type);
7523 else
7524 res = create_tmp_reg (type);
7525 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
7526 gimple_set_location (stmt, loc);
7527 gimple_seq_add_stmt_without_update (seq, stmt);
7528 return res;
7530 return builder->build ();
7533 /* Return true if the result of assignment STMT is known to be non-negative.
7534 If the return value is based on the assumption that signed overflow is
7535 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7536 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7538 static bool
7539 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7540 int depth)
7542 enum tree_code code = gimple_assign_rhs_code (stmt);
7543 switch (get_gimple_rhs_class (code))
7545 case GIMPLE_UNARY_RHS:
7546 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7547 gimple_expr_type (stmt),
7548 gimple_assign_rhs1 (stmt),
7549 strict_overflow_p, depth);
7550 case GIMPLE_BINARY_RHS:
7551 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
7552 gimple_expr_type (stmt),
7553 gimple_assign_rhs1 (stmt),
7554 gimple_assign_rhs2 (stmt),
7555 strict_overflow_p, depth);
7556 case GIMPLE_TERNARY_RHS:
7557 return false;
7558 case GIMPLE_SINGLE_RHS:
7559 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
7560 strict_overflow_p, depth);
7561 case GIMPLE_INVALID_RHS:
7562 break;
7564 gcc_unreachable ();
7567 /* Return true if return value of call STMT is known to be non-negative.
7568 If the return value is based on the assumption that signed overflow is
7569 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7570 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7572 static bool
7573 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7574 int depth)
7576 tree arg0 = gimple_call_num_args (stmt) > 0 ?
7577 gimple_call_arg (stmt, 0) : NULL_TREE;
7578 tree arg1 = gimple_call_num_args (stmt) > 1 ?
7579 gimple_call_arg (stmt, 1) : NULL_TREE;
7581 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
7582 gimple_call_combined_fn (stmt),
7583 arg0,
7584 arg1,
7585 strict_overflow_p, depth);
7588 /* Return true if return value of call STMT is known to be non-negative.
7589 If the return value is based on the assumption that signed overflow is
7590 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7591 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7593 static bool
7594 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7595 int depth)
7597 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7599 tree arg = gimple_phi_arg_def (stmt, i);
7600 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
7601 return false;
7603 return true;
7606 /* Return true if STMT is known to compute a non-negative value.
7607 If the return value is based on the assumption that signed overflow is
7608 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7609 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7611 bool
7612 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
7613 int depth)
7615 switch (gimple_code (stmt))
7617 case GIMPLE_ASSIGN:
7618 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
7619 depth);
7620 case GIMPLE_CALL:
7621 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
7622 depth);
7623 case GIMPLE_PHI:
7624 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
7625 depth);
7626 default:
7627 return false;
7631 /* Return true if the floating-point value computed by assignment STMT
7632 is known to have an integer value. We also allow +Inf, -Inf and NaN
7633 to be considered integer values. Return false for signaling NaN.
7635 DEPTH is the current nesting depth of the query. */
7637 static bool
7638 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
7640 enum tree_code code = gimple_assign_rhs_code (stmt);
7641 switch (get_gimple_rhs_class (code))
7643 case GIMPLE_UNARY_RHS:
7644 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
7645 gimple_assign_rhs1 (stmt), depth);
7646 case GIMPLE_BINARY_RHS:
7647 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
7648 gimple_assign_rhs1 (stmt),
7649 gimple_assign_rhs2 (stmt), depth);
7650 case GIMPLE_TERNARY_RHS:
7651 return false;
7652 case GIMPLE_SINGLE_RHS:
7653 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
7654 case GIMPLE_INVALID_RHS:
7655 break;
7657 gcc_unreachable ();
7660 /* Return true if the floating-point value computed by call STMT is known
7661 to have an integer value. We also allow +Inf, -Inf and NaN to be
7662 considered integer values. Return false for signaling NaN.
7664 DEPTH is the current nesting depth of the query. */
7666 static bool
7667 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
7669 tree arg0 = (gimple_call_num_args (stmt) > 0
7670 ? gimple_call_arg (stmt, 0)
7671 : NULL_TREE);
7672 tree arg1 = (gimple_call_num_args (stmt) > 1
7673 ? gimple_call_arg (stmt, 1)
7674 : NULL_TREE);
7675 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
7676 arg0, arg1, depth);
7679 /* Return true if the floating-point result of phi STMT is known to have
7680 an integer value. We also allow +Inf, -Inf and NaN to be considered
7681 integer values. Return false for signaling NaN.
7683 DEPTH is the current nesting depth of the query. */
7685 static bool
7686 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
7688 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
7690 tree arg = gimple_phi_arg_def (stmt, i);
7691 if (!integer_valued_real_single_p (arg, depth + 1))
7692 return false;
7694 return true;
7697 /* Return true if the floating-point value computed by STMT is known
7698 to have an integer value. We also allow +Inf, -Inf and NaN to be
7699 considered integer values. Return false for signaling NaN.
7701 DEPTH is the current nesting depth of the query. */
7703 bool
7704 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
7706 switch (gimple_code (stmt))
7708 case GIMPLE_ASSIGN:
7709 return gimple_assign_integer_valued_real_p (stmt, depth);
7710 case GIMPLE_CALL:
7711 return gimple_call_integer_valued_real_p (stmt, depth);
7712 case GIMPLE_PHI:
7713 return gimple_phi_integer_valued_real_p (stmt, depth);
7714 default:
7715 return false;